Add civitai downloader + improvements

- Improve downloader
- Remove scripts from installer
- Better symlinking
- Link .ckpt and .safetensors/.safetensor/.st files
- Fix find+delete .ipynb_checkpoints
- symlinking and xformers enabled by default
- organize repository files
This commit is contained in:
Drake Panzer 2023-01-22 19:39:55 -07:00
parent de22b9c8a4
commit 7117369ba1
No known key found for this signature in database
GPG Key ID: 194A1C358AACFC39
4 changed files with 210 additions and 220 deletions

View File

@ -40,25 +40,23 @@
"cell_type": "markdown", "cell_type": "markdown",
"metadata": {}, "metadata": {},
"source": [ "source": [
"**Where to store the models**\n", "**Where to store your files**\n",
"\n", "\n",
"`/storage/` is persistent storage shared across all machines on your account. Mounted to your machine.\n", "`/storage/` is persistent storage shared across all machines on your account. Mounted to your machine.\n",
"\n", "\n",
"`/notebooks/` is storage for this notebook only. This directory has to be copied into your machine which can increase start/stop times if the directory is very large. To avoid this, put large files in `/storage/`.\n", "`/notebooks/` is storage for this notebook only. This directory has to be copied into your machine which can increase start/stop times if it's very large. To avoid this, put large files in `/storage/`.\n",
"\n", "\n",
"`/tmp/` <mark style=\"background-color:lime\">is not a persistent directory, meaning your files there will be deleted when the machine turns off.</mark>\n", "`/tmp/` <mark style=\"background-color:lime\">is not a persistent directory, meaning your files there will be deleted when the machine turns off.</mark>\n",
"\n", "\n",
"<br>\n", "<br>\n",
"\n", "\n",
"<mark style=\"background-color: #ff780082\">If you are having storage issues</mark>, set `repo_storage_dir` to `/tmp/stable-diffusion`.\n", "<mark style=\"background-color: #ff780082\">If you are having storage issues</mark>, set `repo_storage_dir` to `/tmp/stable-diffusion`. Make sure `symlink_to_notebooks` is set to `True` so it gets linked back to `/notebooks/`.\n",
"\n", "\n",
"<br><br>\n", "<br>\n",
"\n", "\n",
"<mark>You must uncomment the correct section and run the block below or else the notebook won't work!</mark>\n", "<mark>You must uncomment the correct section and run the block below or else the notebook won't work!</mark>\n",
"\n", "\n",
"Select the section you want and do `ctrl + /` to uncomment.\n", "Select the section you want and do `ctrl + /` to uncomment. If you change any settings here, rerun this cell."
"\n",
"Make sure there isn't a space in front of any variable. Don't put a trailing slash on directory paths. If you change any settings here, rerun this cell."
] ]
}, },
{ {
@ -67,31 +65,25 @@
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"# Free tier\n", "model_storage_dir = '/storage/models' # Where to store your model checkpoints.\n",
"# symlink_to_notebooks = True # Enables the creation of symlinks back to /notebooks/\n",
"# model_storage_dir = '/tmp/stable-diffusion/models' # Where the models will be downloaded to.\n",
"# repo_storage_dir = '/notebooks' # Where the repository will be downloaded to.\n",
"# pip_cache_dir = None # Cache pip downloads to this directory so you don't have to re-download everything.\n",
"\n", "\n",
"# Paid Tier\n", "repo_storage_dir = '/storage/stable-diffusion' # Where to store your Stable Diffusion-related files.\n",
"# symlink_to_notebooks = False\n", "\n",
"# model_storage_dir = '/storage/models'\n", "pip_cache_dir = None # The installer can cache pip wheels so you don't have to re-download them\n",
"# repo_storage_dir = '/notebooks'\n", " # next time you start the machine. I recommed setting it to '/storage/pip/cache'.\n",
"# pip_cache_dir = '/storage/pip/cache'\n",
"\n", "\n",
"\n", "\n",
"# Other optional settings\n", "# Other optional settings\n",
"# You don't have to change these if you don't want to\n", "# You don't have to change these if you don't want to\n",
"\n", "\n",
"activate_xformers = False # Enables the xformers optimizations using pre-built wheels.\n", "symlink_to_notebooks = True # Enables the creation of symlinks back to /notebooks/\n",
"\n",
"activate_xformers = True # Enables the xformers optimizations using pre-built wheels.\n",
" # Setting to True will automatically set up your environment/machine for xformers. \n", " # Setting to True will automatically set up your environment/machine for xformers. \n",
"\n", "\n",
"link_novelai_anime_vae = False # Enables the linking of animevae.pt to each of the NovelAI models.\n", "link_novelai_anime_vae = True # Enables the linking of animevae.pt to each of the NovelAI models.\n",
" # Set to True if you've downloaded both the NovelAI models and hypernetworks.\n", " # Set to True if you've downloaded both the NovelAI models and hypernetworks.\n",
"\n", "\n",
"download_scripts = False # Download custom scripts? Only reason why you would leave it disabled is because it may\n",
" # take a while to complete.\n",
"\n",
"activate_deepdanbooru = False # Enable and install DeepDanbooru -> https://github.com/KichangKim/DeepDanbooru\n", "activate_deepdanbooru = False # Enable and install DeepDanbooru -> https://github.com/KichangKim/DeepDanbooru\n",
"\n", "\n",
"activate_medvram = True # Enable medvram option.\n", "activate_medvram = True # Enable medvram option.\n",
@ -112,14 +104,14 @@
"ui_theme = None # Set the WEB UI theme. Values can be None (default) or 'dark'.\n", "ui_theme = None # Set the WEB UI theme. Values can be None (default) or 'dark'.\n",
"\n", "\n",
"insecure_extension_access = False # Force enable extensions without a password.\n", "insecure_extension_access = False # Force enable extensions without a password.\n",
" # If you don't set a password, anyone can install and run arbitrary code on your machine!\n", " # If you don't set a password anyone can install and run arbitrary code on your machine!\n",
" # Instead, use gradio_auth which will automatically enable extensions when set.\n", " # Instead, use gradio_auth which will automatically enable extensions when set.\n",
"\n", "\n",
"export_storage_dir = '/notebooks/exports' # Where the generated images will be exported to.\n", "export_storage_dir = '/notebooks/exports' # Where the generated images will be exported to.\n",
" \n", " \n",
"# ===================================================================================================\n", "# ===================================================================================================\n",
"# Save variables to Jupiter's temp storage so we can access it even if the kernel restarts.\n", "# Save variables to Jupiter's temp storage so we can access it even if the kernel restarts.\n",
"%store symlink_to_notebooks model_storage_dir repo_storage_dir export_storage_dir activate_xformers link_novelai_anime_vae download_scripts activate_deepdanbooru activate_medvram disable_pickle_check gradio_port gradio_auth search_paperspace_datasets ui_theme insecure_extension_access pip_cache_dir" "%store symlink_to_notebooks model_storage_dir repo_storage_dir export_storage_dir activate_xformers link_novelai_anime_vae activate_deepdanbooru activate_medvram disable_pickle_check gradio_port gradio_auth search_paperspace_datasets ui_theme insecure_extension_access pip_cache_dir"
] ]
}, },
{ {
@ -149,7 +141,6 @@
}, },
"outputs": [], "outputs": [],
"source": [ "source": [
"import os\n",
"# You'll see this little code block at the beginning of every cell.\n", "# You'll see this little code block at the beginning of every cell.\n",
"# It makes sure you have ran the first block that defines your settings.\n", "# It makes sure you have ran the first block that defines your settings.\n",
"try:\n", "try:\n",
@ -161,33 +152,41 @@
" print('Error:', e)\n", " print('Error:', e)\n",
" import sys\n", " import sys\n",
" sys.exit(1)\n", " sys.exit(1)\n",
" \n",
"import os\n",
"from pathlib import Path\n",
"\n", "\n",
"%cd /notebooks/\n", "repo_storage_dir = Path(repo_storage_dir)\n",
"stable_diffusion_webui_path = repo_storage_dir / 'stable-diffusion-webui'\n",
"\n", "\n",
"def delete_broken_symlinks(path):\n", "if not stable_diffusion_webui_path.exists():\n",
" # make sure to pass this function a path without a trailing slash\n", " !mkdir -p \"{stable_diffusion_webui_path}\"\n",
" for file in os.listdir(path):\n", " !git clone https://github.com/AUTOMATIC1111/stable-diffusion-webui \"{stable_diffusion_webui_path}\"\n",
" if os.path.islink(f'{path}/{file}') and not os.path.exists(os.readlink(f'{path}/{file}')):\n", "else:\n",
" print(f'Symlink broken, removing: {file}')\n", " print('stable-diffusion-webui already downloaded, updating...')\n",
" os.unlink(f'{path}/{file}')\n", " !cd \"{stable_diffusion_webui_path}\" && git pull # no % so we don't interfere with the main process\n",
"\n", "\n",
"def update_repo_if_not_exists(path, repo_clone_url, pre=None):\n", "!mkdir -p \"{repo_storage_dir / 'stable-diffusion-webui' / 'outputs'}\"\n",
" if pre is not None:\n", "!mkdir -p \"{repo_storage_dir / 'stable-diffusion-webui' / 'logs'}\"\n",
" pre() \n",
" if not os.path.exists(path):\n",
" !git clone \"{repo_clone_url}\" \"{path}\"\n",
" else:\n",
" print(f'{repo_clone_url.split(\"/\")[-1]} already downloaded, updating...')\n",
" !cd \"{path}\" && git pull # no % so we don't interfere with the main process\n",
"\n", "\n",
"def init_free():\n", "symlinks = [\n",
" if (symlink_to_notebooks and repo_storage_dir != '/notebooks'):\n", " (repo_storage_dir / 'stable-diffusion-webui', Path('/notebooks/stable-diffusion-webui')),\n",
" delete_broken_symlinks('/notebooks/') # remove broken symlinks since it might have been installed in a non-persistent directory\n", " (repo_storage_dir / 'stable-diffusion-webui' / 'outputs', Path('/notebooks/outputs')),\n",
" if not os.path.exists(repo_storage_dir):\n", " (repo_storage_dir / 'stable-diffusion-webui' / 'logs', repo_storage_dir / 'stable-diffusion-webui' / 'outputs' / 'logs'),\n",
" !mkdir -p \"{repo_storage_dir}\"\n", " (Path('/storage'), Path('/notebooks/storage')),\n",
" !ln -s \"{repo_storage_dir}\" /notebooks/\n", " ]\n",
" !ls -la /notebooks/stable-diffusion\n", "\n",
"update_repo_if_not_exists(f'{repo_storage_dir}/stable-diffusion-webui', 'https://github.com/AUTOMATIC1111/stable-diffusion-webui', init_free)" "if symlink_to_notebooks and repo_storage_dir != '/notebooks':\n",
" print('\\nCreating Symlinks...')\n",
" for src, dest in symlinks:\n",
" # If `/notebooks/stable-diffusion-webui` is a broken symlink then remove it.\n",
" # The WebUI might have previously been installed in a non-persistent directory.\n",
" if dest.is_symlink() and not dest.exists(): # .exists() validates a symlink\n",
" print('Symlink broken, removing:', dest)\n",
" dest.unlink()\n",
" if not dest.exists():\n",
" os.symlink(src, dest)\n",
" print(src, '->', os.path.realpath(dest))"
] ]
}, },
{ {
@ -211,8 +210,8 @@
"outputs": [], "outputs": [],
"source": [ "source": [
"try:\n", "try:\n",
" %store -r symlink_to_notebooks model_storage_dir repo_storage_dir activate_xformers download_scripts activate_deepdanbooru pip_cache_dir\n", " %store -r symlink_to_notebooks model_storage_dir repo_storage_dir activate_xformers activate_deepdanbooru pip_cache_dir\n",
" test = [symlink_to_notebooks, model_storage_dir, repo_storage_dir, activate_xformers, download_scripts, activate_deepdanbooru, pip_cache_dir]\n", " test = [symlink_to_notebooks, model_storage_dir, repo_storage_dir, activate_xformers, activate_deepdanbooru, pip_cache_dir]\n",
"except NameError as e:\n", "except NameError as e:\n",
" print(\"There is an issue with your variables.\")\n", " print(\"There is an issue with your variables.\")\n",
" print(\"Please go back to the first block and make sure your settings are correct, then run the cell.\")\n", " print(\"Please go back to the first block and make sure your settings are correct, then run the cell.\")\n",
@ -220,7 +219,7 @@
" import sys\n", " import sys\n",
" sys.exit(1)\n", " sys.exit(1)\n",
"\n", "\n",
"%cd \"{repo_storage_dir}/stable-diffusion-webui\"\n", "%cd \"{Path(repo_storage_dir, 'stable-diffusion-webui')}\"\n",
"\n", "\n",
"!pip install --upgrade pip\n", "!pip install --upgrade pip\n",
"!pip install --upgrade wheel setuptools\n", "!pip install --upgrade wheel setuptools\n",
@ -237,76 +236,19 @@
"import launch\n", "import launch\n",
"launch.prepare_environment()\n", "launch.prepare_environment()\n",
"\n", "\n",
"# Install things for this notebook\n",
"!pip install requests gdown bs4 markdownify\n",
"\n",
"# The installer isn't installing deepdanbooru right now so we'll do it manually.\n", "# The installer isn't installing deepdanbooru right now so we'll do it manually.\n",
"if activate_deepdanbooru:\n", "if activate_deepdanbooru:\n",
" !pip install \"git+https://github.com/KichangKim/DeepDanbooru.git@edf73df4cdaeea2cf00e9ac08bd8a9026b7a7b26#egg=deepdanbooru[tensorflow]\" # tensorflow==2.10.0 tensorflow-io==0.27.0 flatbuffers==1.12\n", " # https://github.com/KichangKim/DeepDanbooru/releases\n",
" !pip install \"git+https://github.com/KichangKim/DeepDanbooru.git@v3-20211112-sgd-e28#egg=deepdanbooru[tensorflow]\" # $(curl --silent \"https://api.github.com/KichangKim/DeepDanbooru/releases/latest\" | grep '\"tag_name\":' | sed -E 's/.*\"([^\"]+)\".*/\\1/')#egg=deepdanbooru[tensorflow]\" # tensorflow==2.10.0 tensorflow-io==0.27.0 flatbuffers==1.12\n",
"\n", "\n",
"# latent-diffusion is a requirement but launch.py isn't downloading it so we'll do it manually.\n", "# latent-diffusion is a requirement but launch.py isn't downloading it so we'll do it manually.\n",
"# TODO: can this be removed?\n", "# TODO: can this be removed?\n",
"if not os.path.exists(f'{repo_storage_dir}/stable-diffusion-webui/repositories/latent-diffusion'):\n", "# if not os.path.exists(f'{repo_storage_dir}/stable-diffusion-webui/repositories/latent-diffusion'):\n",
" !git clone https://github.com/crowsonkb/k-diffusion.git \"{repo_storage_dir}/stable-diffusion-webui/repositories/k-diffusion\"\n", "# !git clone https://github.com/crowsonkb/k-diffusion.git \"{repo_storage_dir}/stable-diffusion-webui/repositories/k-diffusion\"\n",
" !git clone https://github.com/Hafiidz/latent-diffusion.git \"{repo_storage_dir}/stable-diffusion-webui/repositories/latent-diffusion\"\n", "# !git clone https://github.com/Hafiidz/latent-diffusion.git \"{repo_storage_dir}/stable-diffusion-webui/repositories/latent-diffusion\"\n",
"\n",
"# For things in this notebook\n",
"!pip install requests\n",
"!pip install gdown\n",
"\n",
"# Download popular custom scripts. This is basically remote code execution so be careful.\n",
"# See https://github.com/AUTOMATIC1111/stable-diffusion-webui/wiki/Custom-Scripts\n",
"if download_scripts:\n",
" import shutil\n",
" import requests\n",
" !pip install moviepy==1.0.3\n",
" !apt update\n",
" !apt install -y potrace python3-tk\n",
"\n",
" def download_file_dir(url, output_dir):\n",
" # output_dir must have a trailing slash\n",
" local_filename = url.split('/')[-1]\n",
" with requests.get(url, stream=True) as r:\n",
" r.raise_for_status()\n",
" with open(f'{output_dir}{local_filename}', 'wb') as f:\n",
" for chunk in r.iter_content(chunk_size=8192):\n",
" f.write(chunk)\n",
" return local_filename\n",
" def do_script_download(scripts_list, domain, path):\n",
" for item in scripts_list:\n",
" download_file_dir(f'https://{domain}/{item}', path)\n",
" print(f'{item.split(\"/\")[-1]} downloaded...')\n",
"\n",
" do_script_download([\n",
" 'GRMrGecko/stable-diffusion-webui-automatic/advanced_matrix/scripts/advanced_prompt_matrix.py',\n",
" 'dfaker/stable-diffusion-webui-cv2-external-masking-script/main/external_masking.py',\n",
" 'memes-forever/Stable-diffusion-webui-video/main/videos.py',\n",
" 'yownas/seed_travel/main/scripts/seed_travel.py',\n",
" 'Animator-Anon/Animator/main/animation.py',\n",
" 'Filarius/stable-diffusion-webui/master/scripts/vid2vid.py',\n",
" 'GeorgLegato/Txt2Vectorgraphics/main/txt2vectorgfx.py',\n",
" 'yownas/shift-attention/main/scripts/shift_attention.py',\n",
" 'DiceOwl/StableDiffusionStuff/main/loopback_superimpose.py',\n",
" 'Engineer-of-Stuff/stable-diffusion-paperspace/main/lfs/save_steps.py',\n",
" 'Pfaeff/sd-web-ui-scripts/main/moisaic.py'\n",
" ], 'raw.githubusercontent.com', f'{repo_storage_dir}/stable-diffusion-webui/scripts/')\n",
"\n",
" do_script_download([\n",
" 'dfaker/f88aa62e3a14b559fe4e5f6b345db664/raw/791dabfa0ab26399aa2635bcbc1cf6267aa4ffc2/alternate_sampler_noise_schedules.py',\n",
" 'camenduru/9ec5f8141db9902e375967e93250860f/raw/c1a03eb447548adbef1858c0e69d3567a390d2f4/run_n_times.py'\n",
" ], 'gist.githubusercontent.com', f'{repo_storage_dir}/stable-diffusion-webui/scripts/')\n",
"\n",
" # Download and set up txt2img2img\n",
" update_repo_if_not_exists(f'{repo_storage_dir}/stable-diffusion-webui/txt2img2img_root', 'https://github.com/ThereforeGames/txt2img2img.git')\n",
" !cp -r \"{repo_storage_dir}/stable-diffusion-webui/txt2img2img_root/scripts\" \"{repo_storage_dir}/stable-diffusion-webui\"\n",
" !cp -r \"{repo_storage_dir}/stable-diffusion-webui/txt2img2img_root/txt2img2img\" \"{repo_storage_dir}/stable-diffusion-webui\"\n",
" !cp -r \"{repo_storage_dir}/stable-diffusion-webui/txt2img2img_root/venv\" \"{repo_storage_dir}/stable-diffusion-webui\"\n",
"\n",
" # Download and set up txt2mask\n",
" update_repo_if_not_exists(f'{repo_storage_dir}/stable-diffusion-webui/txt2mask', 'https://github.com/ThereforeGames/txt2mask.git')\n",
" !echo \"Copying txt2mask...\"\n",
" !cp -r \"{repo_storage_dir}/stable-diffusion-webui/txt2mask/repositories/clipseg\" \"{repo_storage_dir}/stable-diffusion-webui/repositories\"\n",
" !cp -r \"{repo_storage_dir}/stable-diffusion-webui/txt2mask/scripts/\" \"{repo_storage_dir}/stable-diffusion-webui/\"\n",
"\n",
" # Install the dynamic-prompts/wildcard script\n",
" # !git clone https://github.com/adieyal/sd-dynamic-prompting/ extensions/dynamic-prompts\n",
"\n", "\n",
"if activate_xformers:\n", "if activate_xformers:\n",
" print('Installing xformers...')\n", " print('Installing xformers...')\n",
@ -343,19 +285,12 @@
" xformers_whl = download_release('https://raw.githubusercontent.com/Cyberes/xformers-compiled/main/various/xformers-0.0.14.dev0-cp37-cp37m-linux_x86_64.whl')\n", " xformers_whl = download_release('https://raw.githubusercontent.com/Cyberes/xformers-compiled/main/various/xformers-0.0.14.dev0-cp37-cp37m-linux_x86_64.whl')\n",
" !pip install --force-reinstall \"{xformers_whl}\"\n", " !pip install --force-reinstall \"{xformers_whl}\"\n",
"\n", "\n",
"# Make sure your models storage directory exists\n", "# Make sure important directories exists\n",
"!mkdir -p \"{model_storage_dir}/hypernetworks\"\n", "!mkdir -p \"{model_storage_dir}/hypernetworks\"\n",
"!mkdir -p \"{model_storage_dir}/vae\"\n", "!mkdir -p \"{model_storage_dir}/vae\"\n",
"!mkdir -p \"{repo_storage_dir}/stable-diffusion-webui/models/hypernetworks\"\n", "!mkdir -p \"{repo_storage_dir}/stable-diffusion-webui/models/hypernetworks\"\n",
"\n", "!mkdir -p \"{repo_storage_dir}/stable-diffusion-webui/models/VAE\"\n",
"# Link the output folders to /notebooks/outputs\n",
"!mkdir -p \"{repo_storage_dir}/stable-diffusion-webui/log/images\"\n", "!mkdir -p \"{repo_storage_dir}/stable-diffusion-webui/log/images\"\n",
"!mkdir -p \"{repo_storage_dir}/stable-diffusion-webui/outputs\"\n",
"!ln -s \"{repo_storage_dir}/stable-diffusion-webui/outputs\" /notebooks/\n",
"!ln -s \"{repo_storage_dir}/stable-diffusion-webui/log\" \"{repo_storage_dir}/stable-diffusion-webui/outputs\"\n",
"\n",
"# Link /storage/ to /notebooks/\n",
"!ln -s /storage/ /notebooks/\n",
"\n", "\n",
"!echo -e \"\\n===================================\\nDone! If you're seeing this the process has exited successfully.\\n\"" "!echo -e \"\\n===================================\\nDone! If you're seeing this the process has exited successfully.\\n\""
] ]
@ -432,7 +367,7 @@
" sys.exit(1)\n", " sys.exit(1)\n",
"\n", "\n",
"!if [ $(dpkg-query -W -f='${Status}' aria2 2>/dev/null | grep -c \"ok installed\") = 0 ]; then sudo apt update && sudo apt install -y aria2; fi\n", "!if [ $(dpkg-query -W -f='${Status}' aria2 2>/dev/null | grep -c \"ok installed\") = 0 ]; then sudo apt update && sudo apt install -y aria2; fi\n",
"!aria2c --file-allocation=none -c -x 16 -s 16 --summary-interval=0 https://huggingface.co/stabilityai/stable-diffusion-2/resolve/main/768-v-ema.ckpt -d \"{model_storage_dir}\" -o \"sd-v2-0-768-v-ema.ckpt\"\n", "!aria2c --file-allocation=none -c -x 16 -s 16 --summary-interval=0 --console-log-level=warn --continue https://huggingface.co/stabilityai/stable-diffusion-2/resolve/main/768-v-ema.ckpt -d \"{model_storage_dir}\" -o \"sd-v2-0-768-v-ema.ckpt\"\n",
"!wget https://raw.githubusercontent.com/Stability-AI/stablediffusion/main/configs/stable-diffusion/v2-inference-v.yaml -O \"{model_storage_dir}/sd-v2-0-768-v-ema.yaml\"" "!wget https://raw.githubusercontent.com/Stability-AI/stablediffusion/main/configs/stable-diffusion/v2-inference-v.yaml -O \"{model_storage_dir}/sd-v2-0-768-v-ema.yaml\""
] ]
}, },
@ -462,7 +397,7 @@
" sys.exit(1)\n", " sys.exit(1)\n",
"\n", "\n",
"!if [ $(dpkg-query -W -f='${Status}' aria2 2>/dev/null | grep -c \"ok installed\") = 0 ]; then sudo apt update && sudo apt install -y aria2; fi\n", "!if [ $(dpkg-query -W -f='${Status}' aria2 2>/dev/null | grep -c \"ok installed\") = 0 ]; then sudo apt update && sudo apt install -y aria2; fi\n",
"!aria2c --file-allocation=none -c -x 16 -s 16 --summary-interval=0 https://huggingface.co/stabilityai/stable-diffusion-2-base/resolve/main/512-base-ema.ckpt -d \"{model_storage_dir}\" -o \"sd-v2-0-512-base-ema.ckpt\"\n", "!aria2c --file-allocation=none -c -x 16 -s 16 --summary-interval=0 --console-log-level=warn --continue https://huggingface.co/stabilityai/stable-diffusion-2-base/resolve/main/512-base-ema.ckpt -d \"{model_storage_dir}\" -o \"sd-v2-0-512-base-ema.ckpt\"\n",
"!wget https://raw.githubusercontent.com/Stability-AI/stablediffusion/main/configs/stable-diffusion/v2-inference.yaml -O \"{model_storage_dir}/sd-v2-0-512-base-ema.yaml\"" "!wget https://raw.githubusercontent.com/Stability-AI/stablediffusion/main/configs/stable-diffusion/v2-inference.yaml -O \"{model_storage_dir}/sd-v2-0-512-base-ema.yaml\""
] ]
}, },
@ -490,7 +425,7 @@
" sys.exit(1)\n", " sys.exit(1)\n",
"\n", "\n",
"!if [ $(dpkg-query -W -f='${Status}' aria2 2>/dev/null | grep -c \"ok installed\") = 0 ]; then sudo apt update && sudo apt install -y aria2; fi\n", "!if [ $(dpkg-query -W -f='${Status}' aria2 2>/dev/null | grep -c \"ok installed\") = 0 ]; then sudo apt update && sudo apt install -y aria2; fi\n",
"!aria2c --file-allocation=none -c -x 16 -s 16 --summary-interval=0 https://huggingface.co/stabilityai/stable-diffusion-2-depth/resolve/main/512-depth-ema.ckpt -d \"{model_storage_dir}\" -o \"sd-v2-0-512-depth-ema.ckpt\"\n", "!aria2c --file-allocation=none -c -x 16 -s 16 --summary-interval=0 --console-log-level=warn --continue https://huggingface.co/stabilityai/stable-diffusion-2-depth/resolve/main/512-depth-ema.ckpt -d \"{model_storage_dir}\" -o \"sd-v2-0-512-depth-ema.ckpt\"\n",
"!wget https://raw.githubusercontent.com/Stability-AI/stablediffusion/main/configs/stable-diffusion/v2-midas-inference.yaml -O \"{model_storage_dir}/sd-v2-0-512-depth-ema.yaml\"" "!wget https://raw.githubusercontent.com/Stability-AI/stablediffusion/main/configs/stable-diffusion/v2-midas-inference.yaml -O \"{model_storage_dir}/sd-v2-0-512-depth-ema.yaml\""
] ]
}, },
@ -518,7 +453,7 @@
" sys.exit(1)\n", " sys.exit(1)\n",
"\n", "\n",
"!if [ $(dpkg-query -W -f='${Status}' aria2 2>/dev/null | grep -c \"ok installed\") = 0 ]; then sudo apt update && sudo apt install -y aria2; fi\n", "!if [ $(dpkg-query -W -f='${Status}' aria2 2>/dev/null | grep -c \"ok installed\") = 0 ]; then sudo apt update && sudo apt install -y aria2; fi\n",
"!aria2c --file-allocation=none -c -x 16 -s 16 --summary-interval=0 https://huggingface.co/stabilityai/stable-diffusion-x4-upscaler/resolve/main/x4-upscaler-ema.ckpt -d \"{model_storage_dir}\" -o \"sd-v2-0-x4-upscaler-ema.ckpt\"\n", "!aria2c --file-allocation=none -c -x 16 -s 16 --summary-interval=0 --console-log-level=warn --continue https://huggingface.co/stabilityai/stable-diffusion-x4-upscaler/resolve/main/x4-upscaler-ema.ckpt -d \"{model_storage_dir}\" -o \"sd-v2-0-x4-upscaler-ema.ckpt\"\n",
"!wget https://raw.githubusercontent.com/Stability-AI/stablediffusion/main/configs/stable-diffusion/x4-upscaling.yaml -O \"{model_storage_dir}/sd-v2-0-x4-upscaler-ema.yaml\"" "!wget https://raw.githubusercontent.com/Stability-AI/stablediffusion/main/configs/stable-diffusion/x4-upscaling.yaml -O \"{model_storage_dir}/sd-v2-0-x4-upscaler-ema.yaml\""
] ]
}, },
@ -736,7 +671,7 @@
" sys.exit(1)\n", " sys.exit(1)\n",
"\n", "\n",
"!if [ $(dpkg-query -W -f='${Status}' aria2 2>/dev/null | grep -c \"ok installed\") = 0 ]; then sudo apt update && sudo apt install -y aria2; fi\n", "!if [ $(dpkg-query -W -f='${Status}' aria2 2>/dev/null | grep -c \"ok installed\") = 0 ]; then sudo apt update && sudo apt install -y aria2; fi\n",
"!aria2c --file-allocation=none -c -x 16 -s 16 --summary-interval=0 https://huggingface.co/naclbit/trinart_stable_diffusion_v2/resolve/main/trinart2_step60000.ckpt -d \"{model_storage_dir}\" -o \"trinart2_step60000.ckpt\"" "!aria2c --file-allocation=none -c -x 16 -s 16 --summary-interval=0 --console-log-level=warn --continue https://huggingface.co/naclbit/trinart_stable_diffusion_v2/resolve/main/trinart2_step60000.ckpt -d \"{model_storage_dir}\" -o \"trinart2_step60000.ckpt\""
] ]
}, },
{ {
@ -763,7 +698,7 @@
" sys.exit(1)\n", " sys.exit(1)\n",
"\n", "\n",
"!if [ $(dpkg-query -W -f='${Status}' aria2 2>/dev/null | grep -c \"ok installed\") = 0 ]; then sudo apt update && sudo apt install -y aria2; fi\n", "!if [ $(dpkg-query -W -f='${Status}' aria2 2>/dev/null | grep -c \"ok installed\") = 0 ]; then sudo apt update && sudo apt install -y aria2; fi\n",
"!aria2c --file-allocation=none -c -x 16 -s 16 --summary-interval=0 https://huggingface.co/naclbit/trinart_stable_diffusion_v2/resolve/main/trinart2_step95000.ckpt -d \"{model_storage_dir}\" -o \"trinart2_step95000.ckpt\"" "!aria2c --file-allocation=none -c -x 16 -s 16 --summary-interval=0 --console-log-level=warn --continue https://huggingface.co/naclbit/trinart_stable_diffusion_v2/resolve/main/trinart2_step95000.ckpt -d \"{model_storage_dir}\" -o \"trinart2_step95000.ckpt\""
] ]
}, },
{ {
@ -790,7 +725,7 @@
" sys.exit(1)\n", " sys.exit(1)\n",
"\n", "\n",
"!if [ $(dpkg-query -W -f='${Status}' aria2 2>/dev/null | grep -c \"ok installed\") = 0 ]; then sudo apt update && sudo apt install -y aria2; fi\n", "!if [ $(dpkg-query -W -f='${Status}' aria2 2>/dev/null | grep -c \"ok installed\") = 0 ]; then sudo apt update && sudo apt install -y aria2; fi\n",
"!aria2c --file-allocation=none -c -x 16 -s 16 --summary-interval=0 https://huggingface.co/naclbit/trinart_stable_diffusion_v2/resolve/main/trinart2_step115000.ckpt -d \"{model_storage_dir}\" -o \"trinart2_step115000.ckpt\"" "!aria2c --file-allocation=none -c -x 16 -s 16 --summary-interval=0 --console-log-level=warn --continue https://huggingface.co/naclbit/trinart_stable_diffusion_v2/resolve/main/trinart2_step115000.ckpt -d \"{model_storage_dir}\" -o \"trinart2_step115000.ckpt\""
] ]
}, },
{ {
@ -819,7 +754,7 @@
" sys.exit(1)\n", " sys.exit(1)\n",
"\n", "\n",
"!if [ $(dpkg-query -W -f='${Status}' aria2 2>/dev/null | grep -c \"ok installed\") = 0 ]; then sudo apt update && sudo apt install -y aria2; fi\n", "!if [ $(dpkg-query -W -f='${Status}' aria2 2>/dev/null | grep -c \"ok installed\") = 0 ]; then sudo apt update && sudo apt install -y aria2; fi\n",
"!aria2c --file-allocation=none -c -x 16 -s 16 --summary-interval=0 https://huggingface.co/naclbit/trinart_characters_19.2m_stable_diffusion_v1/resolve/main/trinart_characters_it4_v1.ckpt -d \"{model_storage_dir}\" -o \"trinart_characters_it4_v1.ckpt\"" "!aria2c --file-allocation=none -c -x 16 -s 16 --summary-interval=0 --console-log-level=warn --continue https://huggingface.co/naclbit/trinart_characters_19.2m_stable_diffusion_v1/resolve/main/trinart_characters_it4_v1.ckpt -d \"{model_storage_dir}\" -o \"trinart_characters_it4_v1.ckpt\""
] ]
}, },
{ {
@ -942,10 +877,9 @@
"!aria2c -d . --bt-metadata-only=true --bt-save-metadata=true --bt-max-peers=120 --summary-interval=0 --file-allocation=none \"magnet:?xt=urn:btih:5bde442da86265b670a3e5ea3163afad2c6f8ecc&dn=novelaileak&tr=udp%3A%2F%2Ftracker.opentrackr.org%3A1337%2Fannounce&tr=udp%3A%2F%2F9.rarbg.com%3A2810%2Fannounce&tr=udp%3A%2F%2Ftracker.openbittorrent.com%3A6969%2Fannounce&tr=http%3A%2F%2Ftracker.openbittorrent.com%3A80%2Fannounce&tr=udp%3A%2F%2Fopentracker.i2p.rocks%3A6969%2Fannounce\"\n", "!aria2c -d . --bt-metadata-only=true --bt-save-metadata=true --bt-max-peers=120 --summary-interval=0 --file-allocation=none \"magnet:?xt=urn:btih:5bde442da86265b670a3e5ea3163afad2c6f8ecc&dn=novelaileak&tr=udp%3A%2F%2Ftracker.opentrackr.org%3A1337%2Fannounce&tr=udp%3A%2F%2F9.rarbg.com%3A2810%2Fannounce&tr=udp%3A%2F%2Ftracker.openbittorrent.com%3A6969%2Fannounce&tr=http%3A%2F%2Ftracker.openbittorrent.com%3A80%2Fannounce&tr=udp%3A%2F%2Fopentracker.i2p.rocks%3A6969%2Fannounce\"\n",
"!aria2c --select-file=76,81,82,83,84,85,86,87,88,89,90,91,92,93 --seed-time=0 --max-overall-upload-limit=1K --bt-max-peers=120 --summary-interval=0 --file-allocation=none \"{infohash}.torrent\"\n", "!aria2c --select-file=76,81,82,83,84,85,86,87,88,89,90,91,92,93 --seed-time=0 --max-overall-upload-limit=1K --bt-max-peers=120 --summary-interval=0 --file-allocation=none \"{infohash}.torrent\"\n",
"# -exec mv doesn't work with python variables so we'll set an environment variable instead\n", "# -exec mv doesn't work with python variables so we'll set an environment variable instead\n",
"import os\n",
"os.environ[\"MODEL_STORAGE_DIR\"] = model_storage_dir\n",
"!rm novelaileak/stableckpt/extra-sd-prune/sd-prune/anime700k-64bs-0.1ucg-penultimate-1epoch-clip-ema-continue-76000.pt # aria2 downloads this file even though I told it not to\n", "!rm novelaileak/stableckpt/extra-sd-prune/sd-prune/anime700k-64bs-0.1ucg-penultimate-1epoch-clip-ema-continue-76000.pt # aria2 downloads this file even though I told it not to\n",
"!find novelaileak/ -type f -name '*.pt' -exec mv {} \"$MODEL_STORAGE_DIR/hypernetworks\" \\;" "import subprocess\n",
"s = subprocess.run(f'find novelaileak/ -type f -name \"*.pt\" -exec mv \"{{}}\" \"{Path(model_storage_dir, \"hypernetworks\")}\" \\;', shell=True)"
] ]
}, },
{ {
@ -974,7 +908,6 @@
" sys.exit(1)\n", " sys.exit(1)\n",
"\n", "\n",
"# Get some storage back\n", "# Get some storage back\n",
"\n",
"if not pip_cache_dir:\n", "if not pip_cache_dir:\n",
" !pip cache purge\n", " !pip cache purge\n",
" !echo \"Purged pip cache\"\n", " !echo \"Purged pip cache\"\n",
@ -1014,104 +947,102 @@
" sys.exit(1)\n", " sys.exit(1)\n",
"\n", "\n",
"import os\n", "import os\n",
"import glob\n", "from glob import glob\n",
"import subprocess\n",
"import re\n",
"from pathlib import Path\n", "from pathlib import Path\n",
"import sys\n", "import sys\n",
"\n", "\n",
"if not os.path.isdir(model_storage_dir):\n", "model_storage_dir = Path(model_storage_dir)\n",
"\n",
"if not model_storage_dir.exists():\n",
" print('Your model storage directory does not exist:', model_storage_dir)\n", " print('Your model storage directory does not exist:', model_storage_dir)\n",
" sys.exit(1)\n", " sys.exit(1)\n",
"\n", "\n",
"webui_model_path = Path(repo_storage_dir, 'stable-diffusion-webui/models')\n", "webui_root_model_path = Path(repo_storage_dir, 'stable-diffusion-webui/models')\n",
"webui_sd_models = Path(webui_model_path, 'Stable-diffusion')\n", "webui_sd_model_path = Path(webui_root_model_path, 'Stable-diffusion')\n",
"webui_hypernetworks = Path(webui_model_path, 'hypernetworks')\n", "webui_hypernetwork_path = Path(webui_root_model_path, 'hypernetworks')\n",
"webui_vaes = Path(webui_model_path, 'VAE')\n", "webui_vae_path = Path(webui_root_model_path, 'VAE')\n",
"\n", "\n",
"def delete_broken_symlinks(dir):\n", "def delete_broken_symlinks(dir):\n",
" deleted = False\n", " deleted = False\n",
" for file in os.listdir(dir):\n", " dir = Path(dir)\n",
" path = f'{dir}/{file}'\n", " for file in dir.iterdir():\n",
" if os.path.islink(path) and not os.path.exists(os.readlink(path)):\n", " if file.is_symlink() and not file.exists():\n",
" print(f'Symlink broken, removing: {file}')\n", " print('Symlink broken, removing:', file)\n",
" os.unlink(path)\n", " file.unlink()\n",
" deleted = True\n", " deleted = True\n",
" if deleted:\n", " if deleted:\n",
" print('')\n", " print('')\n",
" \n", "\n",
"def create_symlink(source, dest):\n", "def create_symlink(source, dest):\n",
" if os.path.isdir(dest):\n", " if os.path.isdir(dest):\n",
" dest = Path(dest, os.path.basename(source))\n", " dest = Path(dest, os.path.basename(source))\n",
" if not os.path.exists(dest):\n", " if not dest.exists():\n",
" !ln -s \"{source}\" \"{dest}\"\n", " os.symlink(source, dest)\n",
" print(source, '->', Path(dest).absolute())\n", " print(source, '->', Path(dest).absolute())\n",
"\n", "\n",
"# Check for broken symlinks and remove them\n", "# Check for broken symlinks and remove them\n",
"print('Removing broken symlinks...')\n", "print('Removing broken symlinks...')\n",
"delete_broken_symlinks(webui_sd_models)\n", "delete_broken_symlinks(webui_sd_model_path)\n",
"delete_broken_symlinks(webui_hypernetworks)\n", "delete_broken_symlinks(webui_hypernetwork_path)\n",
"delete_broken_symlinks(webui_vaes)\n", "delete_broken_symlinks(webui_vae_path)\n",
"\n", "\n",
"# Link .ckpt files (recursive)\n", "def link_ckpts(source_path):\n",
"print('\\nLinking .ckpt files...')\n", " # Link .ckpt and .safetensor/.st files (recursive)\n",
"for file in glob.glob(f'{model_storage_dir}/**/*.ckpt', recursive=True):\n", " print('\\nLinking .ckpt and .safetensor/.safetensors/.st files in', source_path)\n",
" if not Path(file).parent.parts[-1] in ['hypernetworks', 'vae'] :\n", " source_path = Path(source_path)\n",
" if not os.path.exists(file):\n", " for file in [p for p in source_path.rglob('*') if p.suffix in ['.ckpt', '.safetensor', '.safetensors', '.st']]:\n",
" print('New model:', os.path.basename(file))\n", " if Path(file).parent.parts[-1] not in ['hypernetworks', 'vae'] :\n",
" create_symlink(file, webui_sd_models)\n", " if not (webui_sd_model_path / file.name):\n",
" print('New model:', file.name)\n",
" create_symlink(file, webui_sd_model_path)\n",
" # Link config yaml files\n",
" print('\\nLinking config .yaml files in', source_path)\n",
" for file in model_storage_dir.glob('*.yaml'):\n",
" create_symlink(file, webui_sd_model_path)\n",
"\n", "\n",
"# Link config yaml files\n", "\n",
"print('\\nLinking config .yaml files...')\n", "link_ckpts(model_storage_dir)\n",
"for file in glob.glob(f'{model_storage_dir}/**/*.yaml', recursive=True):\n",
" create_symlink(file, webui_sd_models)\n",
"\n", "\n",
"# Link hypernetworks\n", "# Link hypernetworks\n",
"print('\\nLinking hypernetworks...')\n", "print('\\nLinking hypernetworks...')\n",
"source_hypernetworks = Path(model_storage_dir, 'hypernetworks')\n", "hypernetwork_source_path = Path(model_storage_dir, 'hypernetworks')\n",
"if os.path.isdir(source_hypernetworks):\n", "if hypernetwork_source_path.is_dir():\n",
" for file in os.listdir(source_hypernetworks):\n", " for file in hypernetwork_source_path.iterdir():\n",
" create_symlink(Path(source_hypernetworks, file), webui_hypernetworks)\n", " create_symlink(hypernetwork_source_path / file, webui_hypernetwork_path)\n",
"else:\n", "else:\n",
" print('Hypernetwork storage directory not found:', source_hypernetworks)\n", " print('Hypernetwork storage directory not found:', hypernetwork_source_path)\n",
"\n", "\n",
"# Link VAEs\n", "# Link VAEs\n",
"print('\\nLinking VAEs...')\n", "print('\\nLinking VAEs...')\n",
"source_vaes = Path(model_storage_dir, 'vae')\n", "vae_source_path = Path(model_storage_dir, 'vae')\n",
"if os.path.isdir(source_vaes):\n", "if vae_source_path.is_dir():\n",
" for file in os.listdir(source_vaes):\n", " for file in vae_source_path.iterdir():\n",
" create_symlink(Path(source_vaes, file), webui_vaes)\n", " create_symlink(vae_source_path / file, webui_vae_path)\n",
"else:\n", "else:\n",
" print('VAE storage directory not found:', source_vaes)\n", " print('VAE storage directory not found:', vae_source_path)\n",
"\n", "\n",
"# Link the NovelAI files for each of the NovelAI models\n", "# Link the NovelAI files for each of the NovelAI models\n",
"print('\\nLinking NovelAI files for each of the NovelAI models...')\n", "print('\\nLinking NovelAI files for each of the NovelAI models...')\n",
"for model in glob.glob(f'{model_storage_dir}/novelai-*.ckpt'):\n", "for model in model_storage_dir.glob('novelai-*.ckpt'):\n",
" yaml = model.replace('.ckpt', '.yaml')\n", " yaml = model.stem + '.yaml'\n",
" if os.path.exists(yaml):\n", " if os.path.exists(yaml):\n",
" # if not os.path.exists(yaml):\n", " print('New NovelAI model config:', yaml)\n",
" print(f'New NovelAI model config: {yaml}')\n", " create_symlink(yaml, webui_sd_model_path)\n",
" create_symlink(yaml, webui_sd_models)\n",
"\n", "\n",
"if link_novelai_anime_vae:\n", "if link_novelai_anime_vae:\n",
" print('\\nLinking NovelAI anime VAE...')\n", " print('\\nLinking NovelAI anime VAE...')\n",
" for model in glob.glob('novelai-*.ckpt'):\n", " for model in model_storage_dir.glob('novelai-*.ckpt'):\n",
" if os.path.exists(Path(model_storage_dir, 'hypernetworks/animevae.pt')):\n", " if (model_storage_dir / 'hypernetworks' / 'animevae.pt').is_file():\n",
" vae = model.replace('.ckpt', '.vae.pt')\n", " vae = model.stem + '.vae.pt'\n",
" if not os.path.exists(webui_vaes):\n", " if not os.path.exists(webui_vae_path):\n",
" print(f'Linking NovelAI {vae} and {model}')\n", " print(f'Linking NovelAI {vae} and {model}')\n",
" create_symlink(Path(model_storage_dir, 'hypernetworks/animevae.pt'), webui_vaes)\n", " create_symlink(model_storage_dir / 'hypernetworks' / 'animevae.pt', webui_vae_path)\n",
" else:\n", " else:\n",
" print(f'{model_storage_dir}/hypernetworks/animevae.pt NOT FOUND')\n", " print(f'{model_storage_dir}/hypernetworks/animevae.pt not found!')\n",
" \n",
"\n", "\n",
"if search_paperspace_datasets:\n", "if search_paperspace_datasets:\n",
" if os.path.isdir('/datasets'):\n", " if Path('/datasets').is_dir():\n",
" print('\\nSearching /datasets')\n", " link_ckpts('/datasets')\n",
" for file in glob.glob(f'/datasets/**/*.ckpt', recursive=True):\n",
" create_symlink(file, webui_sd_models)\n",
" for file in glob.glob(f'/datasets/**/*.yaml', recursive=True):\n",
" create_symlink(file, webui_sd_models)\n",
" else:\n", " else:\n",
" print('\\nNo datasets mounted!')" " print('\\nNo datasets mounted!')"
] ]
@ -1163,7 +1094,7 @@
" import sys\n", " import sys\n",
" sys.exit(1)\n", " sys.exit(1)\n",
"\n", "\n",
"%cd \"{repo_storage_dir}/stable-diffusion-webui\"\n", "%cd \"{Path(repo_storage_dir, 'stable-diffusion-webui')}\"\n",
"\n", "\n",
"# Code to set the options you want as defined in the very first block\n", "# Code to set the options you want as defined in the very first block\n",
"x_arg = '--xformers' if activate_xformers else ''\n", "x_arg = '--xformers' if activate_xformers else ''\n",
@ -1215,19 +1146,22 @@
" sys.exit(1)\n", " sys.exit(1)\n",
"\n", "\n",
"import os\n", "import os\n",
"from pathlib import Path\n",
"import subprocess\n",
"\n", "\n",
"if not os.path.exists(export_storage_dir):\n", "repo_storage_dir = Path(repo_storage_dir)\n",
" os.makedirs(export_storage_dir)\n", "export_storage_dir = Path(export_storage_dir)\n",
"export_storage_dir.mkdir(exist_ok=True)\n",
"\n", "\n",
"!if [ $(dpkg-query -W -f='${Status}' p7zip-full 2>/dev/null | grep -c \"ok installed\") = 0 ]; then sudo apt update && sudo apt install -y p7zip-full; fi # install 7z if it isn't already installed\n", "!if [ $(dpkg-query -W -f='${Status}' p7zip-full 2>/dev/null | grep -c \"ok installed\") = 0 ]; then sudo apt update && sudo apt install -y p7zip-full; fi # install 7z if it isn't already installed\n",
"from datetime import datetime\n", "from datetime import datetime\n",
"datetime_str = datetime.now().strftime('%m-%d-%Y_%H:%M:%S')\n", "datetime_str = datetime.now().strftime('%m-%d-%Y_%H:%M:%S')\n",
"%cd \"{export_storage_dir}\"\n", "%cd \"{export_storage_dir}\"\n",
"!mkdir -p \"{datetime_str}/log\"\n", "!mkdir -p \"{datetime_str}/logs\"\n",
"!cd \"{repo_storage_dir}/stable-diffusion-webui/log/\" && mv * \"{export_storage_dir}/{datetime_str}/log\"\n", "!cd \"{repo_storage_dir / 'stable-diffusion-webui' / 'logs'}\" && mv * \"{export_storage_dir / datetime_str / 'logs'}\"\n",
"!cd \"{repo_storage_dir}/stable-diffusion-webui/outputs/\" && mv * \"{export_storage_dir}/{datetime_str}\"\n", "!cd \"{repo_storage_dir / 'stable-diffusion-webui' / 'outputs'}\" && mv * \"{export_storage_dir / datetime_str}\"\n",
"!find \"{export_storage_dir}/{datetime_str}\" -name .ipynb_checkpoints -exec rm -rf {{}} +\n", "s = subprocess.run(f'find \"{Path(export_storage_dir, datetime_str)}\" -type d -name .ipynb_checkpoints -exec rm -rv {{}} +', shell=True)\n",
"!7z a -t7z -m0=lzma2 -mx=9 -mfb=64 -md=32m -ms=on \"{datetime_str}.7z\" \"{export_storage_dir}/{datetime_str}/\"" "!7z a -t7z -m0=lzma2 -mx=9 -mfb=64 -md=32m -ms=on \"{datetime_str}.7z\" \"{export_storage_dir / datetime_str}\""
] ]
}, },
{ {
@ -1247,8 +1181,8 @@
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"# !rm -rf \"{export_storage_dir}/{datetime_str}/\"\n", "# !rm -rf \"{export_storage_dir / datetime_str}\"\n",
"# !echo Deleted {export_storage_dir}/{datetime_str}/" "# !echo \"Deleted {export_storage_dir / datetime_str}\""
] ]
}, },
{ {
@ -1327,7 +1261,8 @@
" print('Error:', e)\n", " print('Error:', e)\n",
" import sys\n", " import sys\n",
" sys.exit(1)\n", " sys.exit(1)\n",
"%cd \"{repo_storage_dir}/stable-diffusion-webui\"\n", "\n",
"%cd \"{Path(repo_storage_dir, 'stable-diffusion-webui')}\"\n",
"!git reset --hard <commit>" "!git reset --hard <commit>"
] ]
}, },
@ -1346,7 +1281,18 @@
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"!find . -type d -name .ipynb_checkpoints -delete" "try:\n",
" %store -r model_storage_dir repo_storage_dir\n",
" test = [model_storage_dir, repo_storage_dir]\n",
"except NameError as e:\n",
" print(\"There is an issue with your variables.\")\n",
" print(\"Please go back to the first block and make sure your settings are correct, then run the cell.\")\n",
" print('Error:', e)\n",
" import sys\n",
" sys.exit(1)\n",
"import subprocess\n",
"!find /notebooks/ -type d -name .ipynb_checkpoints -type d -exec rm -rv {} +\n",
"s = subprocess.run(f'find \"{repo_storage_dir}\" -type d -name .ipynb_checkpoints -exec rm -rv {{}} +', shell=True)"
] ]
}, },
{ {
@ -1419,7 +1365,7 @@
"source": [ "source": [
"### Automated Model Downloader\n", "### Automated Model Downloader\n",
"\n", "\n",
"Here's a tool to download a model from a torrent magnet link, web link, Google Drive, or HuggingFace." "Here's a tool to download a model from a torrent magnet link, web link, Google Drive, HuggingFace, or CivitAI."
] ]
}, },
{ {
@ -1442,44 +1388,88 @@
"import re\n", "import re\n",
"import requests\n", "import requests\n",
"import gdown\n", "import gdown\n",
" \n", "import json\n",
"from bs4 import BeautifulSoup\n",
"from markdownify import markdownify\n",
"import urllib.request\n",
"from pathlib import Path\n",
"\n",
"user_agent = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36'\n",
"\n",
"def dl_web_file(web_dl_file):\n", "def dl_web_file(web_dl_file):\n",
" %cd \"{model_storage_dir}\"\n", " %cd \"{model_storage_dir}\"\n",
" # We're going to use aria2 to split the download into threads which will allow us to download\n", " # We're going to use aria2 to split the download into threads which will allow us to download\n",
" # the file very fast even if the site serves the file slow.\n", " # the file very fast even if the site serves the file slow.\n",
" !if [ $(dpkg-query -W -f='${Status}' aria2 2>/dev/null | grep -c \"ok installed\") = 0 ]; then sudo apt update && sudo apt install -y aria2; fi\n", " !if [ $(dpkg-query -W -f='${Status}' aria2 2>/dev/null | grep -c \"ok installed\") = 0 ]; then sudo apt update && sudo apt install -y aria2; fi\n",
" !aria2c --file-allocation=none -c -x 16 -s 16 --summary-interval=0 \"{web_dl_file}\" \n", " !aria2c --file-allocation=none -c -x 16 -s 16 --summary-interval=0 --console-log-level=warn --continue --user-agent=\"{user_agent}\" \"{web_dl_file}\" \n",
"\n",
"def is_url(url_str):\n",
" return re.search(r'https?:\\/\\/(?:www\\.|(?!www))[a-zA-Z0-9][a-zA-Z0-9-]+[a-zA-Z0-9]\\.[^\\s]{2,}|www\\.[a-zA-Z0-9][a-zA-Z0-9-]+[a-zA-Z0-9]\\.[^\\s]{2,}|https?:\\/\\/(?:www\\.|(?!www))[a-zA-Z0-9]+\\.[^\\s]{2,}|www\\.[a-zA-Z0-9]+\\.[^\\s]{2,}', url_str)\n",
"\n", "\n",
"magnet_match = re.search(r'magnet:\\?xt=urn:btih:[\\-_A-Za-z0-9&=%.]*', model_uri)\n", "magnet_match = re.search(r'magnet:\\?xt=urn:btih:[\\-_A-Za-z0-9&=%.]*', model_uri)\n",
"web_match = re.search(r'(https?:\\/\\/(?:www\\.|(?!www))[a-zA-Z0-9][a-zA-Z0-9-]+[a-zA-Z0-9]\\.[^\\s]{2,}|www\\.[a-zA-Z0-9][a-zA-Z0-9-]+[a-zA-Z0-9]\\.[^\\s]{2,}|https?:\\/\\/(?:www\\.|(?!www))[a-zA-Z0-9]+\\.[^\\s]{2,}|www\\.[a-zA-Z0-9]+\\.[^\\s]{2,})', model_uri)\n", "civitai_match = re.search(r'^https?:\\/\\/(?:www\\.|(?!www))civitai\\.com\\/models\\/\\d*\\/.*?$', model_uri)\n",
"web_match = is_url(model_uri)\n",
"\n", "\n",
"if magnet_match:\n", "if magnet_match:\n",
" !if [ $(dpkg-query -W -f='${Status}' aria2 2>/dev/null | grep -c \"ok installed\") = 0 ]; then sudo apt update && sudo apt install -y aria2; fi\n", " !if [ $(dpkg-query -W -f='${Status}' aria2 2>/dev/null | grep -c \"ok installed\") = 0 ]; then sudo apt update && sudo apt install -y aria2; fi\n",
" %cd \"{model_storage_dir}\"\n", " %cd \"{model_storage_dir}\"\n",
" bash_var = magnet_match[0]\n", " bash_var = magnet_match[0]\n",
" !aria2c --seed-time=0 --max-overall-upload-limit=1K --bt-max-peers=120 --summary-interval=0 --file-allocation=none \"{bash_var}\"\n", " !aria2c --seed-time=0 --max-overall-upload-limit=1K --bt-max-peers=120 --summary-interval=0 --console-log-level=warn --file-allocation=none \"{bash_var}\"\n",
" # clean exit here\n",
"elif 'https://huggingface.co/' in model_uri:\n", "elif 'https://huggingface.co/' in model_uri:\n",
" response = requests.head(web_match[0], allow_redirects=True)\n", " response = requests.head(model_uri, allow_redirects=True, headers={'User-Agent': user_agent})\n",
" if 'octet-stream' not in response.headers['content-type']:\n", " if 'octet-stream' not in response.headers['content-type']:\n",
" response = requests.head(web_match[0].replace('/blob/', '/resolve/'), allow_redirects=True)\n", " response = requests.head(model_uri.replace('/blob/', '/resolve/'), allow_redirects=True, headers={'User-Agent': user_agent})\n",
" if 'octet-stream' not in response.headers['content-type']:\n", " if 'octet-stream' not in response.headers['content-type']:\n",
" print(f'Wrong content-type: {response.headers[\"content-type\"].split(\";\")[0]}')\n", " print(f'Wrong content-type: {response.headers[\"content-type\"].split(\";\")[0]}')\n",
" # clean exit here\n", " # clean exit here\n",
" else:\n", " else:\n",
" dl_web_file(web_match[0].replace('/blob/', '/resolve/'))\n", " dl_web_file(model_uri.replace('/blob/', '/resolve/'))\n",
" # clean exit here\n",
" else:\n", " else:\n",
" dl_web_file(web_match[0])\n", " dl_web_file(model_uri)\n",
" # clean exit here\n",
"elif 'https://drive.google.com' in model_uri:\n", "elif 'https://drive.google.com' in model_uri:\n",
" gdrive_file_id, _ = gdown.parse_url.parse_url(web_match[0])\n", " gdrive_file_id, _ = gdown.parse_url.parse_url(model_uri)\n",
" %cd \"{model_storage_dir}\"\n", " %cd \"{model_storage_dir}\"\n",
" gdown.download(f\"https://drive.google.com/uc?id={gdrive_file_id}&confirm=t\") \n", " gdown.download(f\"https://drive.google.com/uc?id={gdrive_file_id}&confirm=t\")\n",
" # clean exit here\n",
"elif civitai_match:\n",
" if not is_url(civitai_match[0]):\n",
" print('URL does not match known civitai.com pattern.')\n",
" # clean exit here\n",
" else:\n",
" soup = BeautifulSoup(requests.get(model_uri, headers={'User-Agent': user_agent}).text, features=\"html.parser\")\n",
" data = json.loads(soup.find('script', {'id': '__NEXT_DATA__'}).text)\n",
" model_data = data[\"props\"][\"pageProps\"][\"trpcState\"][\"json\"][\"queries\"][0][\"state\"][\"data\"]\n",
" latest_model = model_data['modelVersions'][0]\n",
" latest_model_url = f\"https://civitai.com/api/download/models/{latest_model['id']}\"\n",
" print('Downloading model:', model_data['name'])\n",
" \n",
" # Download the description to a markdown file next to the checkpoint\n",
" desc = markdownify(model_data['description'])\n",
" req = urllib.request.Request(latest_model_url, data=None, headers={'User-Agent': user_agent})\n",
" content_disp = urllib.request.urlopen(req).getheader('Content-Disposition')\n",
" if content_disp:\n",
" filename = Path(re.match(r'attachment; filename=\"(.*?)\"', content_disp)[1]).stem\n",
" with open(Path(model_storage_dir, f'{filename}.md'), 'w') as file:\n",
" file.write(f\"# {model_data['name']} \\n\")\n",
" file.write(f'Original CivitAI URL: {model_uri} \\n\\n <br> \\nhttps://civitai.com/models/3950/art-and-eros-aeros-a-tribute-to-beauty\\n')\n",
" file.write(desc)\n",
" else:\n",
" print('Failed to get filename of checkpoint for markdown file')\n",
"\n",
" dl_web_file(latest_model_url)\n",
" # clean exit here\n",
"elif web_match:\n", "elif web_match:\n",
" response = requests.head(web_match[0], allow_redirects=True)\n", " # Always do the web match last\n",
" response = requests.head(model_uri, allow_redirects=True, headers={'User-Agent': user_agent})\n",
" if 'octet-stream' not in response.headers['content-type']:\n", " if 'octet-stream' not in response.headers['content-type']:\n",
" print(f'Wrong content-type: {response.headers[\"content-type\"].split(\";\")[0]}')\n", " print(f'Wrong content-type: {response.headers[\"content-type\"].split(\";\")[0]}')\n",
" # clean exit here\n", " # clean exit here\n",
" else:\n", " else:\n",
" dl_web_file(web_match[0])\n", " dl_web_file(model_uri)\n",
" # clean exit here\n",
"else:\n", "else:\n",
" print('Could not parse your URI.')\n", " print('Could not parse your URI.')\n",
" # clean exit here" " # clean exit here"