tweak runpod yaml

This commit is contained in:
Victor Hall 2022-11-10 19:57:59 -05:00
parent 9c12cc6912
commit 9950abda3e
1 changed files with 3 additions and 3 deletions

View File

@ -65,8 +65,8 @@ model:
data:
target: main.DataModuleFromConfig
params:
batch_size: 4
num_workers: 1
batch_size: 5
num_workers: 8
wrap: falsegit
train:
target: ldm.data.every_dream.EveryDreamBatch
@ -88,7 +88,7 @@ lightning:
params:
every_n_epochs: 1 # produce a ckpt every epoch, leave 1!
#every_n_train_steps: 1400 # can only use epoch or train step checkpoints, can use this if you want instead of every_n_epochs but suggest epochs
save_top_k: 2 # *** How many checkpoints you will get to try out, automatically keeps what it thinks are the best. ** REQUIRES ~15GB+ of VOLUME store per checkpoint!!! ***
save_top_k: 3 # *** How many checkpoints you will get to try out, automatically keeps what it thinks are the best. ** REQUIRES ~15GB+ of VOLUME store per checkpoint!!! ***
# Above is important. It costs a lot of VOLUME store but keeps you from having to start over if you overtrain by giving you a few checkpoints to try out.
save_last: False
filename: "{epoch:02d}-{step:05d}"