EveryDream2trainer/optimizer.json

44 lines
2.0 KiB
JSON
Raw Permalink Normal View History

{
"doc": {
2023-05-04 18:11:11 -06:00
"base": "base optimizer configuration for unet and text encoder",
"text_encoder_overrides": "text encoder config overrides",
"text_encoder_lr_scale": "if LR not set on text encoder, sets the Lr to a multiple of the Base LR. for example, if base `lr` is 2e-6 and `text_encoder_lr_scale` is 0.5, the text encoder's LR will be set to `1e-6`.",
"-----------------": "-----------------",
"optimizer": "adamw, adamw8bit, lion",
"optimizer_desc": "'adamw' in standard 32bit, 'adamw8bit' is bitsandbytes, 'lion' is lucidrains",
"lr": "learning rate, if null will use CLI or main JSON config value",
2023-05-04 18:11:11 -06:00
"lr_scheduler": "'constant' or 'cosine'",
"lr_warmup_steps": "number of steps to warmup LR to target LR, if null will use CLI or default a value based on max epochs",
"lr_decay_steps": "number of steps to decay LR to zero for cosine, if null will use CLI or default a value based on max epochs",
"betas": "exponential decay rates for the moment estimates",
"epsilon": "value added to denominator for numerical stability, unused for lion",
Squashed commit of the following: commit 0f890f2d6bbccee225f738934f4c4450323f19a2 Merge: c008c40 003b089 Author: Damian Stewart <d@damianstewart.com> Date: Sun May 14 11:47:40 2023 +0200 Merge remote-tracking branch 'upstream/main' into feat_te_last_n_layers_unsquashed commit c008c404f19ebc6b78085f42a4e39aeb2ba00d04 Author: Damian Stewart <d@damianstewart.com> Date: Sun May 14 11:23:20 2023 +0200 finalize TE layer freezing commit 7377b10d59e32a6fea5d321a598ae4504e1a9f36 Author: Damian Stewart <d@damianstewart.com> Date: Thu May 11 20:45:28 2023 +0200 remove zero_lr method commit 4af13ba816c2811d7b5bd6fbb81a32bca6747e99 Author: Damian Stewart <d@damianstewart.com> Date: Thu May 11 20:05:01 2023 +0200 Revert "rename parameters" This reverts commit aa33c61337599ab2d90b34aaf8c3d36fd4edf147. commit aa33c61337599ab2d90b34aaf8c3d36fd4edf147 Author: Damian Stewart <d@damianstewart.com> Date: Tue May 9 00:28:00 2023 +0200 rename parameters commit 1da867e6fadb873da2571371a73b522406d76a18 Author: Damian Stewart <d@damianstewart.com> Date: Sun May 7 22:28:29 2023 +0200 remove silly check commit 483cb2a635c3fe5a044edf4ea8de095bedc3f0ac Author: Damian Stewart <d@damianstewart.com> Date: Sun May 7 20:53:43 2023 +0200 use 1e-10 not 0 as 'zero' lr commit e5d230e6c765a7e25dc6381d09bd0a66a9a54ec2 Author: Damian Stewart <d@damianstewart.com> Date: Sun May 7 20:51:51 2023 +0200 add experimental 'zero_lr' freeze method commit bcf24ee59a443c0ee71d622e65e1043b547f845e Author: Damian Stewart <d@damianstewart.com> Date: Sun May 7 17:32:11 2023 +0200 fix layer selection bug commit 7ee33eff8740e095f85042dcbb792e025b179c6c Author: Damian Stewart <d@damianstewart.com> Date: Sun May 7 17:25:25 2023 +0200 put back the 'drop' method and make accessible commit 76dfbf6dd6f43f3aa9a7f4629baa8e86573d9520 Author: Damian Stewart <d@damianstewart.com> Date: Sun May 7 16:39:05 2023 +0200 wip getting final_layer_norm to work commit a19d43651a87525251106ed57238cd2cd1c3f3ff Author: Damian Stewart <d@damianstewart.com> Date: Sun May 7 16:15:53 2023 +0200 work around a crash when freeze_final_layer_norm is True commit c2a44eb25132941b92e2ecd0be3682ae3c6838c2 Author: Damian Stewart <d@damianstewart.com> Date: Sun May 7 15:47:10 2023 +0200 improve logging, add extra freezing controls commit a31e64c4c0d12dfb6583dd6f22c8c09ba7840410 Author: Damian Stewart <d@damianstewart.com> Date: Sun May 7 13:46:38 2023 +0200 alternative method to freeze early TE layers commit 095692fd4ea53707c012217898321860d8b9329f Merge: 876072c 4c5ce81 Author: Damian Stewart <d@damianstewart.com> Date: Sun May 7 11:52:51 2023 +0200 Merge branch 'victorchall:main' into feat_te_last_n_layers commit 876072c46394fde721a6026f7a6ef72ccb150ddb Author: Damian Stewart <d@damianstewart.com> Date: Sun May 7 01:41:50 2023 +0200 implement last N layers training only for TE
2023-05-14 03:49:11 -06:00
"weight_decay": "weight decay (L2 penalty)",
"------------------": "-----------------",
"unfreeze_last_n_layers": "if not null, freeze all parameters in the text encoder except for the last n layers and the final layer norm"
},
2023-05-04 18:11:11 -06:00
"base": {
"optimizer": "adamw8bit",
"lr": 1e-6,
2023-04-30 07:28:55 -06:00
"lr_scheduler": "constant",
2023-05-04 18:11:11 -06:00
"lr_decay_steps": null,
"lr_warmup_steps": null,
"betas": [0.9, 0.999],
"epsilon": 1e-8,
"weight_decay": 0.010
},
2023-05-04 18:11:11 -06:00
"text_encoder_overrides": {
"optimizer": null,
"lr": 5e-7,
"lr_scheduler": "cosine",
2023-05-04 18:11:11 -06:00
"lr_decay_steps": null,
"lr_warmup_steps": null,
"betas": null,
"epsilon": null,
"weight_decay": null
Squashed commit of the following: commit 0f890f2d6bbccee225f738934f4c4450323f19a2 Merge: c008c40 003b089 Author: Damian Stewart <d@damianstewart.com> Date: Sun May 14 11:47:40 2023 +0200 Merge remote-tracking branch 'upstream/main' into feat_te_last_n_layers_unsquashed commit c008c404f19ebc6b78085f42a4e39aeb2ba00d04 Author: Damian Stewart <d@damianstewart.com> Date: Sun May 14 11:23:20 2023 +0200 finalize TE layer freezing commit 7377b10d59e32a6fea5d321a598ae4504e1a9f36 Author: Damian Stewart <d@damianstewart.com> Date: Thu May 11 20:45:28 2023 +0200 remove zero_lr method commit 4af13ba816c2811d7b5bd6fbb81a32bca6747e99 Author: Damian Stewart <d@damianstewart.com> Date: Thu May 11 20:05:01 2023 +0200 Revert "rename parameters" This reverts commit aa33c61337599ab2d90b34aaf8c3d36fd4edf147. commit aa33c61337599ab2d90b34aaf8c3d36fd4edf147 Author: Damian Stewart <d@damianstewart.com> Date: Tue May 9 00:28:00 2023 +0200 rename parameters commit 1da867e6fadb873da2571371a73b522406d76a18 Author: Damian Stewart <d@damianstewart.com> Date: Sun May 7 22:28:29 2023 +0200 remove silly check commit 483cb2a635c3fe5a044edf4ea8de095bedc3f0ac Author: Damian Stewart <d@damianstewart.com> Date: Sun May 7 20:53:43 2023 +0200 use 1e-10 not 0 as 'zero' lr commit e5d230e6c765a7e25dc6381d09bd0a66a9a54ec2 Author: Damian Stewart <d@damianstewart.com> Date: Sun May 7 20:51:51 2023 +0200 add experimental 'zero_lr' freeze method commit bcf24ee59a443c0ee71d622e65e1043b547f845e Author: Damian Stewart <d@damianstewart.com> Date: Sun May 7 17:32:11 2023 +0200 fix layer selection bug commit 7ee33eff8740e095f85042dcbb792e025b179c6c Author: Damian Stewart <d@damianstewart.com> Date: Sun May 7 17:25:25 2023 +0200 put back the 'drop' method and make accessible commit 76dfbf6dd6f43f3aa9a7f4629baa8e86573d9520 Author: Damian Stewart <d@damianstewart.com> Date: Sun May 7 16:39:05 2023 +0200 wip getting final_layer_norm to work commit a19d43651a87525251106ed57238cd2cd1c3f3ff Author: Damian Stewart <d@damianstewart.com> Date: Sun May 7 16:15:53 2023 +0200 work around a crash when freeze_final_layer_norm is True commit c2a44eb25132941b92e2ecd0be3682ae3c6838c2 Author: Damian Stewart <d@damianstewart.com> Date: Sun May 7 15:47:10 2023 +0200 improve logging, add extra freezing controls commit a31e64c4c0d12dfb6583dd6f22c8c09ba7840410 Author: Damian Stewart <d@damianstewart.com> Date: Sun May 7 13:46:38 2023 +0200 alternative method to freeze early TE layers commit 095692fd4ea53707c012217898321860d8b9329f Merge: 876072c 4c5ce81 Author: Damian Stewart <d@damianstewart.com> Date: Sun May 7 11:52:51 2023 +0200 Merge branch 'victorchall:main' into feat_te_last_n_layers commit 876072c46394fde721a6026f7a6ef72ccb150ddb Author: Damian Stewart <d@damianstewart.com> Date: Sun May 7 01:41:50 2023 +0200 implement last N layers training only for TE
2023-05-14 03:49:11 -06:00
},
"text_encoder_freezing": {
"unfreeze_last_n_layers": null
},
"apply_grad_scaler_step_tweaks": true
}