Merge pull request #9017 from camenduru/dev

convert to python v3.9
This commit is contained in:
AUTOMATIC1111 2023-03-27 06:38:17 +03:00 committed by GitHub
commit a336c7fe23
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 3 additions and 2 deletions

View File

@ -2,6 +2,7 @@ import glob
import os import os
import re import re
import torch import torch
from typing import Union
from modules import shared, devices, sd_models, errors from modules import shared, devices, sd_models, errors
@ -235,7 +236,7 @@ def lora_calc_updown(lora, module, target):
return updown return updown
def lora_apply_weights(self: torch.nn.Conv2d | torch.nn.Linear | torch.nn.MultiheadAttention): def lora_apply_weights(self: Union[torch.nn.Conv2d, torch.nn.Linear, torch.nn.MultiheadAttention]):
""" """
Applies the currently selected set of Loras to the weights of torch layer self. Applies the currently selected set of Loras to the weights of torch layer self.
If weights already have this particular set of loras applied, does nothing. If weights already have this particular set of loras applied, does nothing.
@ -295,7 +296,7 @@ def lora_apply_weights(self: torch.nn.Conv2d | torch.nn.Linear | torch.nn.Multih
setattr(self, "lora_current_names", wanted_names) setattr(self, "lora_current_names", wanted_names)
def lora_reset_cached_weight(self: torch.nn.Conv2d | torch.nn.Linear): def lora_reset_cached_weight(self: Union[torch.nn.Conv2d, torch.nn.Linear]):
setattr(self, "lora_current_names", ()) setattr(self, "lora_current_names", ())
setattr(self, "lora_weights_backup", None) setattr(self, "lora_weights_backup", None)