fix: lora-bias-backup don't reset cache
This commit is contained in:
parent
924642331b
commit
7d4d871d46
|
@ -418,6 +418,7 @@ def network_forward(module, input, original_forward):
|
|||
def network_reset_cached_weight(self: Union[torch.nn.Conv2d, torch.nn.Linear]):
|
||||
self.network_current_names = ()
|
||||
self.network_weights_backup = None
|
||||
self.network_bias_backup = None
|
||||
|
||||
|
||||
def network_Linear_forward(self, input):
|
||||
|
|
Loading…
Reference in New Issue