Skip to content

Commit

Permalink
hunyuanvideo lora support
Browse files Browse the repository at this point in the history
Signed-off-by: Vladimir Mandic <[email protected]>
  • Loading branch information
vladmandic committed Jan 7, 2025
1 parent 7461507 commit 2090a33
Show file tree
Hide file tree
Showing 7 changed files with 35 additions and 14 deletions.
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@
- [PixelSmith](https://github.com/Thanos-DB/Pixelsmith/)
- available for SD-XL in txt2img and img2img workflows
- select from *scripts -> pixelsmith*
- [Hunyuan Video](https://github.com/Tencent/HunyuanVideo) LoRA support
- example: <https://huggingface.co/Cseti/HunyuanVideo-LoRA-Arcane_Jinx-v1>
- **Logging**:
- reverted enable debug by default
- updated [debug wiki](https://github.com/vladmandic/automatic/wiki/debug)
Expand Down Expand Up @@ -47,6 +49,8 @@
- controlnet with hires
- controlnet with batch count
- apply settings skip hidden settings
- lora diffusers method apply only once
- lora diffusers method set prompt tags and metadata

## Update for 2024-12-31

Expand Down
2 changes: 1 addition & 1 deletion installer.py
Original file line number Diff line number Diff line change
Expand Up @@ -459,7 +459,7 @@ def check_python(supported_minors=[9, 10, 11, 12], reason=None):
def check_diffusers():
if args.skip_all or args.skip_git:
return
sha = 'b5726358cf125f2fa1a596dce321e91a225a57e4' # diffusers commit hash
sha = '03bcf5aefef13a064c34b605e489c0730052cca8' # diffusers commit hash
pkg = pkg_resources.working_set.by_key.get('diffusers', None)
minor = int(pkg.version.split('.')[1] if pkg is not None else 0)
cur = opts.get('diffusers_version', '') if minor > 0 else ''
Expand Down
26 changes: 16 additions & 10 deletions modules/lora/extra_networks_lora.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import os
import re
import numpy as np
from modules.lora import networks
from modules.lora import networks, network_overrides
from modules import extra_networks, shared


Expand Down Expand Up @@ -155,17 +155,23 @@ def activate(self, p, params_list, step=0, include=[], exclude=[]):
fn = f'{sys._getframe(2).f_code.co_name}:{sys._getframe(1).f_code.co_name}' # pylint: disable=protected-access
debug_log(f'Load network: type=LoRA include={include} exclude={exclude} requested={requested} fn={fn}')

networks.network_load(names, te_multipliers, unet_multipliers, dyn_dims) # load
has_changed = self.changed(requested, include, exclude)
if has_changed:
networks.network_deactivate(include, exclude)
networks.network_activate(include, exclude)
debug_log(f'Load network: type=LoRA previous={[n.name for n in networks.previously_loaded_networks]} current={[n.name for n in networks.loaded_networks]} changed')

if len(networks.loaded_networks) > 0 and len(networks.applied_layers) > 0 and step == 0:
force_diffusers = network_overrides.check_override()
if force_diffusers:
has_changed = False # diffusers handle their own loading
if len(exclude) == 0:
networks.network_load(names, te_multipliers, unet_multipliers, dyn_dims) # load only on first call
else:
networks.network_load(names, te_multipliers, unet_multipliers, dyn_dims) # load
has_changed = self.changed(requested, include, exclude)
if has_changed:
networks.network_deactivate(include, exclude)
networks.network_activate(include, exclude)
debug_log(f'Load network: type=LoRA previous={[n.name for n in networks.previously_loaded_networks]} current={[n.name for n in networks.loaded_networks]} changed')

if len(networks.loaded_networks) > 0 and (len(networks.applied_layers) > 0 or force_diffusers) and step == 0:
infotext(p)
prompt(p)
if has_changed and len(include) == 0: # print only once
if (has_changed or force_diffusers) and len(include) == 0: # print only once
shared.log.info(f'Load network: type=LoRA apply={[n.name for n in networks.loaded_networks]} mode={"fuse" if shared.opts.lora_fuse_diffusers else "backup"} te={te_multipliers} unet={unet_multipliers} time={networks.timer.summary}')

def deactivate(self, p):
Expand Down
8 changes: 8 additions & 0 deletions modules/lora/network_overrides.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,12 +29,17 @@
# 'sd3',
'kandinsky',
'hunyuandit',
'hunyuanvideo',
'auraflow',
]

force_classes = [ # forced always
]

fuse_ignore = [
'hunyuanvideo',
]


def check_override(shorthash=''):
force = False
Expand All @@ -47,3 +52,6 @@ def check_override(shorthash=''):
if force and shared.opts.lora_maybe_diffusers:
shared.log.debug('LoRA override: force diffusers')
return force

def check_fuse():
return shared.sd_model_type in fuse_ignore
5 changes: 3 additions & 2 deletions modules/lora/networks.py
Original file line number Diff line number Diff line change
Expand Up @@ -267,7 +267,8 @@ def network_load(names, te_multipliers=None, unet_multipliers=None, dyn_dims=Non
failed_to_load_networks.append(name)
shared.log.error(f'Load network: type=LoRA name="{name}" detected={network_on_disk.sd_version if network_on_disk is not None else None} failed')
continue
shared.sd_model.embedding_db.load_diffusers_embedding(None, net.bundle_embeddings)
if hasattr(shared.sd_model, 'embedding_db'):
shared.sd_model.embedding_db.load_diffusers_embedding(None, net.bundle_embeddings)
net.te_multiplier = te_multipliers[i] if te_multipliers else shared.opts.extra_networks_default_multiplier
net.unet_multiplier = unet_multipliers[i] if unet_multipliers else shared.opts.extra_networks_default_multiplier
net.dyn_dim = dyn_dims[i] if dyn_dims else shared.opts.extra_networks_default_multiplier
Expand All @@ -282,7 +283,7 @@ def network_load(names, te_multipliers=None, unet_multipliers=None, dyn_dims=Non
try:
t0 = time.time()
shared.sd_model.set_adapters(adapter_names=diffuser_loaded, adapter_weights=diffuser_scales)
if shared.opts.lora_fuse_diffusers:
if shared.opts.lora_fuse_diffusers and not network_overrides.check_fuse():
shared.sd_model.fuse_lora(adapter_names=diffuser_loaded, lora_scale=1.0, fuse_unet=True, fuse_text_encoder=True) # fuse uses fixed scale since later apply does the scaling
shared.sd_model.unload_lora_weights()
timer.activate += time.time() - t0
Expand Down
2 changes: 2 additions & 0 deletions modules/modeldata.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,8 @@ def get_model_type(pipe):
model_type = 'cogvideox'
elif "Sana" in name:
model_type = 'sana'
elif 'HunyuanVideoPipeline' in name:
model_type = 'hunyuanvideo'
else:
model_type = name
return model_type
Expand Down
2 changes: 1 addition & 1 deletion wiki
Submodule wiki updated from 779328 to 06094c

0 comments on commit 2090a33

Please sign in to comment.