Merge pull request #80 from nawnie/main
Adds Sample_prompts.json toggle
This commit is contained in:
commit
d027525eac
|
@ -68,7 +68,7 @@
|
|||
"outputs": [],
|
||||
"source": [
|
||||
"#@title Optional connect Gdrive\n",
|
||||
"#@markdown # but strongly recommended\n",
|
||||
"#@markdown # But strongly recommended\n",
|
||||
"#@markdown This will let you put all your training data and checkpoints directly on your drive. Much faster/easier to continue later, less setup time.\n",
|
||||
"\n",
|
||||
"#@markdown Creates /content/drive/MyDrive/everydreamlogs/ckpt\n",
|
||||
|
@ -82,8 +82,8 @@
|
|||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"cellView": "form",
|
||||
"id": "hAuBbtSvGpau"
|
||||
"id": "hAuBbtSvGpau",
|
||||
"cellView": "form"
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
|
@ -94,7 +94,7 @@
|
|||
"s = getoutput('nvidia-smi')\n",
|
||||
"!pip install -q torch==1.13.1+cu117 torchvision==0.14.1+cu117 --extra-index-url \"https://download.pytorch.org/whl/cu117\"\n",
|
||||
"!pip install -q transformers==4.25.1\n",
|
||||
"!pip install -q diffusers[torch]==0.10.2\n",
|
||||
"!pip install -q diffusers[torch]==0.13.0\n",
|
||||
"!pip install -q pynvml==11.4.1\n",
|
||||
"!pip install -q bitsandbytes==0.35.0\n",
|
||||
"!pip install -q ftfy==6.1.1\n",
|
||||
|
@ -329,7 +329,12 @@
|
|||
"#@markdown * Using the same seed each time you train allows for more accurate a/b comparison of models, leave at -1 for random\n",
|
||||
"#@markdown * The seed also effects your training samples, if you want the same seed each sample you will need to change it from -1\n",
|
||||
"Training_Seed = -1 #@param{type:\"integer\"}\n",
|
||||
"\n",
|
||||
"#@markdown * use this option to configure a sample_prompts.json\n",
|
||||
"#@markdown * check out /content/EveryDream2trainer/doc/logging.md. for more details\n",
|
||||
"Advance_Samples = False #@param{type:\"boolean\"}\n",
|
||||
"Sample_File = \"sample_prompts.txt\"\n",
|
||||
"if Advance_Samples:\n",
|
||||
" Sample_File = \"sample_prompts.json\"\n",
|
||||
"#@markdown * Checkpointing Saves Vram to allow larger batch sizes minor slow down on a single batch size but will can allow room for a higher traning resolution (suggested on Colab Free tier, turn off for A100)\n",
|
||||
"Gradient_checkpointing = True #@param{type:\"boolean\"}\n",
|
||||
"Disable_Xformers = False #@param{type:\"boolean\"}\n",
|
||||
|
@ -405,7 +410,7 @@
|
|||
" --max_epochs $Max_Epochs \\\n",
|
||||
" --project_name \"$Project_Name\" \\\n",
|
||||
" --resolution $Resolution \\\n",
|
||||
" --sample_prompts \"sample_prompts.txt\" \\\n",
|
||||
" --sample_prompts \"$Sample_File\" \\\n",
|
||||
" --sample_steps $Steps_between_samples \\\n",
|
||||
" --save_every_n_epoch $Save_every_N_epoch \\\n",
|
||||
" --seed $Training_Seed \\\n",
|
||||
|
@ -501,4 +506,4 @@
|
|||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 0
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue