Disabled CoordinateDoWG for now because it's a bit unstable with SD training, don't need the troubleshooting requests
This commit is contained in:
parent
4861d96ec2
commit
e5b08f92b4
|
@ -303,12 +303,13 @@ class EveryDreamOptimizer():
|
|||
)
|
||||
elif optimizer_name == "adamw":
|
||||
opt_class = torch.optim.AdamW
|
||||
if "dowg" in optimizer_name:
|
||||
from dowg import CoordinateDoWG, ScalarDoWG
|
||||
if optimizer_name == "coordinate_dowg":
|
||||
opt_class = CoordinateDoWG
|
||||
elif optimizer_name == "scalar_dowg":
|
||||
opt_class = ScalarDoWG
|
||||
if "dowg" in optimizer_name:
|
||||
# coordinate_dowg, scalar_dowg require no additional parameters. Epsilon is overrideable but is unnecessary in all stable diffusion training situations.
|
||||
import dowg
|
||||
#if optimizer_name == "coordinate_dowg":
|
||||
#opt_class = dowg.CoordinateDoWG
|
||||
if optimizer_name == "scalar_dowg":
|
||||
opt_class = dowg.ScalarDoWG
|
||||
else:
|
||||
raise ValueError(f"Unknown DoWG optimizer {optimizer_name}. Available options are coordinate_dowg and scalar_dowg")
|
||||
elif optimizer_name in ["dadapt_adam", "dadapt_lion", "dadapt_sgd"]:
|
||||
|
|
Loading…
Reference in New Issue