prevent OOM with disabled unet when gradient checkpointing is enabled
unet needs to be in train() mode for gradient checkpointing to work
This commit is contained in:
parent
e08d5ded98
commit
9fc6ae7a09
2
train.py
2
train.py
|
@ -891,7 +891,7 @@ def main(args):
|
|||
|
||||
train_dataloader = build_torch_dataloader(train_batch, batch_size=args.batch_size)
|
||||
|
||||
unet.train() if not args.disable_unet_training else unet.eval()
|
||||
unet.train() if (args.gradient_checkpointing or not args.disable_unet_training) else unet.eval()
|
||||
text_encoder.train() if not args.disable_textenc_training else text_encoder.eval()
|
||||
|
||||
logging.info(f" unet device: {unet.device}, precision: {unet.dtype}, training: {unet.training}")
|
||||
|
|
Loading…
Reference in New Issue