diff --git a/extensions-builtin/Lora/network.py b/extensions-builtin/Lora/network.py index b7b890618..d8e8dfb7f 100644 --- a/extensions-builtin/Lora/network.py +++ b/extensions-builtin/Lora/network.py @@ -145,10 +145,10 @@ class NetworkModule: if orig_weight.size().numel() == updown.size().numel(): updown = updown.reshape(orig_weight.shape) - if ex_bias is None: - ex_bias = 0 + if ex_bias is not None: + ex_bias = ex_bias * self.multiplier() - return updown * self.calc_scale() * self.multiplier(), ex_bias * self.multiplier() + return updown * self.calc_scale() * self.multiplier(), ex_bias def calc_updown(self, target): raise NotImplementedError() diff --git a/extensions-builtin/Lora/networks.py b/extensions-builtin/Lora/networks.py index 74cefe435..ba6211390 100644 --- a/extensions-builtin/Lora/networks.py +++ b/extensions-builtin/Lora/networks.py @@ -322,7 +322,7 @@ def network_apply_weights(self: Union[torch.nn.Conv2d, torch.nn.Linear, torch.nn updown = torch.nn.functional.pad(updown, (0, 0, 0, 0, 0, 5)) self.weight += updown - if getattr(self, 'bias', None) is not None: + if ex_bias is not None and getattr(self, 'bias', None) is not None: self.bias += ex_bias continue