Skip to content

Commit

Permalink
linting
Browse files Browse the repository at this point in the history
Signed-off-by: Vladimir Mandic <[email protected]>
  • Loading branch information
vladmandic committed Oct 23, 2024
1 parent 3cc78bd commit 6d4f2df
Show file tree
Hide file tree
Showing 5 changed files with 7 additions and 6 deletions.
3 changes: 2 additions & 1 deletion .eslintrc.json
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,7 @@
"venv",
"panzoom.js",
"split.js",
"exifr.js"
"exifr.js",
"iframeResizer.min.js"
]
}
4 changes: 2 additions & 2 deletions extensions-builtin/Lora/networks.py
Original file line number Diff line number Diff line change
Expand Up @@ -321,7 +321,7 @@ def network_restore_weights_from_backup(self: Union[torch.nn.Conv2d, torch.nn.Li
timer['restore'] += t1 - t0


def maybe_backup_weights(self: Union[torch.nn.Conv2d, torch.nn.Linear, torch.nn.GroupNorm, torch.nn.LayerNorm, torch.nn.MultiheadAttention, diffusers.models.lora.LoRACompatibleLinear, diffusers.models.lora.LoRACompatibleConv], wanted_names, current_names):
def maybe_backup_weights(self: Union[torch.nn.Conv2d, torch.nn.Linear, torch.nn.GroupNorm, torch.nn.LayerNorm, torch.nn.MultiheadAttention, diffusers.models.lora.LoRACompatibleLinear, diffusers.models.lora.LoRACompatibleConv], wanted_names, current_names): # pylint: disable=W0613
weights_backup = getattr(self, "network_weights_backup", None)
if weights_backup is None and wanted_names != (): # pylint: disable=C1803
if isinstance(self, torch.nn.MultiheadAttention):
Expand Down Expand Up @@ -362,7 +362,7 @@ def network_apply_weights(self: Union[torch.nn.Conv2d, torch.nn.Linear, torch.nn
t0 = time.time()
current_names = getattr(self, "network_current_names", ())
wanted_names = tuple((x.name, x.te_multiplier, x.unet_multiplier, x.dyn_dim) for x in loaded_networks)
if any([net.modules.get(network_layer_name, None) for net in loaded_networks]):
if any([net.modules.get(network_layer_name, None) for net in loaded_networks]): # noqa: C419 # pylint: disable=R1729
maybe_backup_weights(self, wanted_names, current_names)
if current_names != wanted_names:
network_restore_weights_from_backup(self)
Expand Down
2 changes: 1 addition & 1 deletion modules/linfusion/attention.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def add_non_linear_model(self, mid_dim=None, **kwargs):
self.to_q_ = get_none_linear_projection(query_dim, mid_dim, **kwargs)
self.to_k_ = get_none_linear_projection(query_dim, mid_dim, **kwargs)

def forward(
def forward( # pylint: disable=unused-argument
self,
hidden_states,
encoder_hidden_states=None,
Expand Down
2 changes: 1 addition & 1 deletion modules/linfusion/linfusion.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ def construct_for(
if load_pretrained:
# Load from pretrained
if not pretrained_model_name_or_path:
pipe_name_path = pipe_name_path or pipeline._internal_dict._name_or_path
pipe_name_path = pipe_name_path or pipeline._internal_dict._name_or_path # pylint: disable=protected-access
pretrained_model_name_or_path = model_dict.get(pipe_name_path, None)
if pretrained_model_name_or_path:
print(
Expand Down
2 changes: 1 addition & 1 deletion modules/sd_samplers_diffusers.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,7 @@ def __init__(self, name, constructor, model, **kwargs):
if 'lower_order_final' in self.config:
self.config['lower_order_final'] = shared.opts.schedulers_use_loworder
if 'solver_order' in self.config and int(shared.opts.schedulers_solver_order) > 0:
self.config['solver_order'] = (shared.opts.schedulers_solver_order)
self.config['solver_order'] = int(shared.opts.schedulers_solver_order)
if 'predict_x0' in self.config:
self.config['solver_type'] = shared.opts.uni_pc_variant
if 'beta_start' in self.config and shared.opts.schedulers_beta_start > 0:
Expand Down

0 comments on commit 6d4f2df

Please sign in to comment.