From a26c12774005db488edcb5a6c1d033e610ba2fd8 Mon Sep 17 00:00:00 2001 From: Victor Hall Date: Tue, 3 Jan 2023 15:17:24 -0500 Subject: [PATCH] last tweak --- train.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/train.py b/train.py index a2ece9a..750ff11 100644 --- a/train.py +++ b/train.py @@ -651,6 +651,9 @@ def main(args): del timesteps, encoder_hidden_states, noisy_latents #with autocast(enabled=args.amp): loss = torch_functional.mse_loss(model_pred.float(), target.float(), reduction="mean") + + del target, model_pred + if batch["runt_size"] > 0: grad_scale = batch["runt_size"] / args.batch_size with torch.no_grad(): # not required? just in case for now, needs more testing @@ -661,7 +664,6 @@ def main(args): for param in text_encoder.parameters(): if param.grad is not None: param.grad *= grad_scale - del target, model_pred if args.clip_grad_norm is not None: torch.nn.utils.clip_grad_norm_(parameters=unet.parameters(), max_norm=args.clip_grad_norm)