Skip to content

Commit

Permalink
fix for lazy backup
Browse files Browse the repository at this point in the history
  • Loading branch information
wkpark committed Nov 1, 2024
1 parent 412401b commit b783a96
Showing 1 changed file with 1 addition and 9 deletions.
10 changes: 1 addition & 9 deletions extensions-builtin/Lora/networks.py
Original file line number Diff line number Diff line change
Expand Up @@ -426,7 +426,7 @@ def network_restore_weights_from_backup(self: Union[torch.nn.Conv2d, torch.nn.Li
def network_backup_weights(self):
network_layer_name = getattr(self, 'network_layer_name', None)

current_names = getattr(self, "network_current_names", ())
_current_names = getattr(self, "network_current_names", ())
wanted_names = tuple((x.name, x.te_multiplier, x.unet_multiplier, x.dyn_dim) for x in loaded_networks)

need_backup = False
Expand All @@ -443,9 +443,6 @@ def network_backup_weights(self):

weights_backup = getattr(self, "network_weights_backup", None)
if weights_backup is None and wanted_names != ():
if current_names != () and not allowed_layer_without_weight(self):
raise RuntimeError(f"{network_layer_name} - no backup weights found and current weights are not unchanged")

if isinstance(self, torch.nn.MultiheadAttention):
weights_backup = (store_weights_backup(self.in_proj_weight, self.org_dtype), store_weights_backup(self.out_proj.weight, self.org_dtype))
else:
Expand All @@ -462,11 +459,6 @@ def network_backup_weights(self):
else:
bias_backup = None

# Unlike weight which always has value, some modules don't have bias.
# Only report if bias is not None and current bias are not unchanged.
if bias_backup is not None and current_names != ():
raise RuntimeError("no backup bias found and current bias are not unchanged")

self.network_bias_backup = bias_backup


Expand Down

0 comments on commit b783a96

Please sign in to comment.