diff --git a/optimizer/optimizers.py b/optimizer/optimizers.py index 0a68486..cecf43d 100644 --- a/optimizer/optimizers.py +++ b/optimizer/optimizers.py @@ -257,8 +257,8 @@ class EveryDreamOptimizer(): lr_scheduler = get_scheduler( te_config.get("lr_scheduler", args.lr_scheduler), optimizer=self.optimizer_te, - num_warmup_steps=int(te_config.get("lr_warmup_steps", None)) or unet_config["lr_warmup_steps"], - num_training_steps=int(te_config.get("lr_decay_steps", None)) or unet_config["lr_decay_steps"] + num_warmup_steps=int(te_config.get("lr_warmup_steps", None) or unet_config["lr_warmup_steps"]), + num_training_steps=int(te_config.get("lr_decay_steps", None) or unet_config["lr_decay_steps"]) ) ret_val.append(lr_scheduler) @@ -453,10 +453,6 @@ class EveryDreamOptimizer(): growth_rate=growth_rate, ) - else: - import bitsandbytes as bnb - opt_class = bnb.optim.AdamW8bit - if not optimizer: optimizer = opt_class( itertools.chain(parameters),