bug in optimizer decay-warmup defaulting

This commit is contained in:
Victor Hall 2023-11-15 15:46:28 -05:00
parent e95a8861e9
commit 20a9b3254f
1 changed files with 2 additions and 6 deletions

View File

@ -257,8 +257,8 @@ class EveryDreamOptimizer():
lr_scheduler = get_scheduler( lr_scheduler = get_scheduler(
te_config.get("lr_scheduler", args.lr_scheduler), te_config.get("lr_scheduler", args.lr_scheduler),
optimizer=self.optimizer_te, optimizer=self.optimizer_te,
num_warmup_steps=int(te_config.get("lr_warmup_steps", None)) or unet_config["lr_warmup_steps"], num_warmup_steps=int(te_config.get("lr_warmup_steps", None) or unet_config["lr_warmup_steps"]),
num_training_steps=int(te_config.get("lr_decay_steps", None)) or unet_config["lr_decay_steps"] num_training_steps=int(te_config.get("lr_decay_steps", None) or unet_config["lr_decay_steps"])
) )
ret_val.append(lr_scheduler) ret_val.append(lr_scheduler)
@ -453,10 +453,6 @@ class EveryDreamOptimizer():
growth_rate=growth_rate, growth_rate=growth_rate,
) )
else:
import bitsandbytes as bnb
opt_class = bnb.optim.AdamW8bit
if not optimizer: if not optimizer:
optimizer = opt_class( optimizer = opt_class(
itertools.chain(parameters), itertools.chain(parameters),