Merge pull request #60 from qslug/rp-docs
Add screenshots to Runpod docs
This commit is contained in:
commit
eea15934ca
|
@ -47,3 +47,4 @@ Make sure to check out the [tools repo](https://github.com/victorchall/EveryDrea
|
|||
|
||||
[Free tier Google Colab notebook](https://colab.research.google.com/github/victorchall/EveryDream2trainer/blob/main/Train_Colab.ipynb)
|
||||
|
||||
[Runpod Installer](/installers/Runpod.ipynb)
|
|
@ -9,6 +9,11 @@
|
|||
"\n",
|
||||
"### [General Instructions](https://github.com/victorchall/EveryDream2trainer/blob/main/README.md)\n",
|
||||
"\n",
|
||||
"### Have you installed EveryDream2?\n",
|
||||
"[Windows Setup](/doc/SETUP.md)\n",
|
||||
"\n",
|
||||
"[Runpod Installer](/installers/Runpod.ipynb)\n",
|
||||
"\n",
|
||||
"### What's your plan?\n",
|
||||
"You will want to have your data prepared before starting, and have a rough training plan in mind. \n",
|
||||
"\n",
|
||||
|
@ -142,14 +147,30 @@
|
|||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"execution_count": 3,
|
||||
"id": "6f73fb86-ebef-41e2-9382-4aa11be84be6",
|
||||
"metadata": {
|
||||
"scrolled": true,
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"outputs": [
|
||||
{
|
||||
"ename": "ModuleNotFoundError",
|
||||
"evalue": "No module named 'torch'",
|
||||
"output_type": "error",
|
||||
"traceback": [
|
||||
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
||||
"\u001b[0;31mModuleNotFoundError\u001b[0m Traceback (most recent call last)",
|
||||
"\u001b[0;32m~/Documents/Diffusion/tools/EveryDream2trainer/train.py\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m 29\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mshutil\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 30\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 31\u001b[0;31m \u001b[0;32mimport\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnn\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfunctional\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0mF\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 32\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcuda\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mamp\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mautocast\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mGradScaler\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 33\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mtorchvision\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtransforms\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0mtransforms\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||
"\u001b[0;31mModuleNotFoundError\u001b[0m: No module named 'torch'"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"wandb=\"\"\n",
|
||||
"if wandb_token and wandb_token.value:\n",
|
||||
" wandb=\"--wandb\"\n",
|
||||
" \n",
|
||||
"%run train.py --config train.json {wandb} \\\n",
|
||||
"--resume_ckpt \"{ckpt_name}\" \\\n",
|
||||
"--project_name \"sd1_mymodel\" \\\n",
|
||||
|
|
File diff suppressed because one or more lines are too long
Loading…
Reference in New Issue