EveryDream2trainer/optimizer.json

33 lines
1.3 KiB
JSON

{
"doc": {
"unet": "unet config",
"text_encoder": "text encoder config, if properties are null copies from unet config",
"text_encoder_lr_scale": "if LR not set on text encoder, sets the Lr to a multiple of the Unet LR. for example, if unet `lr` is 2e-6 and `text_encoder_lr_scale` is 0.5, the text encoder's LR will be set to `1e-6`.",
"-----------------": "-----------------",
"optimizer": "adamw, adamw8bit, lion",
"optimizer_desc": "'adamw' in standard 32bit, 'adamw8bit' is bitsandbytes, 'lion' is lucidrains",
"lr": "learning rate, if null will use CLI or main JSON config value",
"lr_scheduler": "overrides global lr scheduler from main config",
"betas": "exponential decay rates for the moment estimates",
"epsilon": "value added to denominator for numerical stability, unused for lion",
"weight_decay": "weight decay (L2 penalty)"
},
"text_encoder_lr_scale": 0.5,
"unet": {
"optimizer": "adamw8bit",
"lr": 1e-6,
"lr_scheduler": null,
"betas": [0.9, 0.999],
"epsilon": 1e-8,
"weight_decay": 0.010
},
"text_encoder": {
"optimizer": null,
"lr": null,
"lr_scheduler": null,
"betas": null,
"epsilon": null,
"weight_decay": null
}
}