Add civitai downloader + improvements
- Improve downloader - Remove scripts from installer - Better symlinking - Link .ckpt and .safetensors/.safetensor/.st files - Fix find+delete .ipynb_checkpoints - symlinking and xformers enabled by default - organize repository files
This commit is contained in:
parent
de22b9c8a4
commit
7117369ba1
|
@ -40,25 +40,23 @@
|
|||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"**Where to store the models**\n",
|
||||
"**Where to store your files**\n",
|
||||
"\n",
|
||||
"`/storage/` is persistent storage shared across all machines on your account. Mounted to your machine.\n",
|
||||
"\n",
|
||||
"`/notebooks/` is storage for this notebook only. This directory has to be copied into your machine which can increase start/stop times if the directory is very large. To avoid this, put large files in `/storage/`.\n",
|
||||
"`/notebooks/` is storage for this notebook only. This directory has to be copied into your machine which can increase start/stop times if it's very large. To avoid this, put large files in `/storage/`.\n",
|
||||
"\n",
|
||||
"`/tmp/` <mark style=\"background-color:lime\">is not a persistent directory, meaning your files there will be deleted when the machine turns off.</mark>\n",
|
||||
"\n",
|
||||
"<br>\n",
|
||||
"\n",
|
||||
"<mark style=\"background-color: #ff780082\">If you are having storage issues</mark>, set `repo_storage_dir` to `/tmp/stable-diffusion`.\n",
|
||||
"<mark style=\"background-color: #ff780082\">If you are having storage issues</mark>, set `repo_storage_dir` to `/tmp/stable-diffusion`. Make sure `symlink_to_notebooks` is set to `True` so it gets linked back to `/notebooks/`.\n",
|
||||
"\n",
|
||||
"<br><br>\n",
|
||||
"<br>\n",
|
||||
"\n",
|
||||
"<mark>You must uncomment the correct section and run the block below or else the notebook won't work!</mark>\n",
|
||||
"\n",
|
||||
"Select the section you want and do `ctrl + /` to uncomment.\n",
|
||||
"\n",
|
||||
"Make sure there isn't a space in front of any variable. Don't put a trailing slash on directory paths. If you change any settings here, rerun this cell."
|
||||
"Select the section you want and do `ctrl + /` to uncomment. If you change any settings here, rerun this cell."
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -67,31 +65,25 @@
|
|||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Free tier\n",
|
||||
"# symlink_to_notebooks = True # Enables the creation of symlinks back to /notebooks/\n",
|
||||
"# model_storage_dir = '/tmp/stable-diffusion/models' # Where the models will be downloaded to.\n",
|
||||
"# repo_storage_dir = '/notebooks' # Where the repository will be downloaded to.\n",
|
||||
"# pip_cache_dir = None # Cache pip downloads to this directory so you don't have to re-download everything.\n",
|
||||
"model_storage_dir = '/storage/models' # Where to store your model checkpoints.\n",
|
||||
"\n",
|
||||
"# Paid Tier\n",
|
||||
"# symlink_to_notebooks = False\n",
|
||||
"# model_storage_dir = '/storage/models'\n",
|
||||
"# repo_storage_dir = '/notebooks'\n",
|
||||
"# pip_cache_dir = '/storage/pip/cache'\n",
|
||||
"repo_storage_dir = '/storage/stable-diffusion' # Where to store your Stable Diffusion-related files.\n",
|
||||
"\n",
|
||||
"pip_cache_dir = None # The installer can cache pip wheels so you don't have to re-download them\n",
|
||||
" # next time you start the machine. I recommed setting it to '/storage/pip/cache'.\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"# Other optional settings\n",
|
||||
"# You don't have to change these if you don't want to\n",
|
||||
"\n",
|
||||
"activate_xformers = False # Enables the xformers optimizations using pre-built wheels.\n",
|
||||
"symlink_to_notebooks = True # Enables the creation of symlinks back to /notebooks/\n",
|
||||
"\n",
|
||||
"activate_xformers = True # Enables the xformers optimizations using pre-built wheels.\n",
|
||||
" # Setting to True will automatically set up your environment/machine for xformers. \n",
|
||||
"\n",
|
||||
"link_novelai_anime_vae = False # Enables the linking of animevae.pt to each of the NovelAI models.\n",
|
||||
"link_novelai_anime_vae = True # Enables the linking of animevae.pt to each of the NovelAI models.\n",
|
||||
" # Set to True if you've downloaded both the NovelAI models and hypernetworks.\n",
|
||||
"\n",
|
||||
"download_scripts = False # Download custom scripts? Only reason why you would leave it disabled is because it may\n",
|
||||
" # take a while to complete.\n",
|
||||
"\n",
|
||||
"activate_deepdanbooru = False # Enable and install DeepDanbooru -> https://github.com/KichangKim/DeepDanbooru\n",
|
||||
"\n",
|
||||
"activate_medvram = True # Enable medvram option.\n",
|
||||
|
@ -112,14 +104,14 @@
|
|||
"ui_theme = None # Set the WEB UI theme. Values can be None (default) or 'dark'.\n",
|
||||
"\n",
|
||||
"insecure_extension_access = False # Force enable extensions without a password.\n",
|
||||
" # If you don't set a password, anyone can install and run arbitrary code on your machine!\n",
|
||||
" # If you don't set a password anyone can install and run arbitrary code on your machine!\n",
|
||||
" # Instead, use gradio_auth which will automatically enable extensions when set.\n",
|
||||
"\n",
|
||||
"export_storage_dir = '/notebooks/exports' # Where the generated images will be exported to.\n",
|
||||
" \n",
|
||||
"# ===================================================================================================\n",
|
||||
"# Save variables to Jupiter's temp storage so we can access it even if the kernel restarts.\n",
|
||||
"%store symlink_to_notebooks model_storage_dir repo_storage_dir export_storage_dir activate_xformers link_novelai_anime_vae download_scripts activate_deepdanbooru activate_medvram disable_pickle_check gradio_port gradio_auth search_paperspace_datasets ui_theme insecure_extension_access pip_cache_dir"
|
||||
"%store symlink_to_notebooks model_storage_dir repo_storage_dir export_storage_dir activate_xformers link_novelai_anime_vae activate_deepdanbooru activate_medvram disable_pickle_check gradio_port gradio_auth search_paperspace_datasets ui_theme insecure_extension_access pip_cache_dir"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -149,7 +141,6 @@
|
|||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import os\n",
|
||||
"# You'll see this little code block at the beginning of every cell.\n",
|
||||
"# It makes sure you have ran the first block that defines your settings.\n",
|
||||
"try:\n",
|
||||
|
@ -162,32 +153,40 @@
|
|||
" import sys\n",
|
||||
" sys.exit(1)\n",
|
||||
" \n",
|
||||
"%cd /notebooks/\n",
|
||||
"import os\n",
|
||||
"from pathlib import Path\n",
|
||||
"\n",
|
||||
"def delete_broken_symlinks(path):\n",
|
||||
" # make sure to pass this function a path without a trailing slash\n",
|
||||
" for file in os.listdir(path):\n",
|
||||
" if os.path.islink(f'{path}/{file}') and not os.path.exists(os.readlink(f'{path}/{file}')):\n",
|
||||
" print(f'Symlink broken, removing: {file}')\n",
|
||||
" os.unlink(f'{path}/{file}')\n",
|
||||
"repo_storage_dir = Path(repo_storage_dir)\n",
|
||||
"stable_diffusion_webui_path = repo_storage_dir / 'stable-diffusion-webui'\n",
|
||||
"\n",
|
||||
"def update_repo_if_not_exists(path, repo_clone_url, pre=None):\n",
|
||||
" if pre is not None:\n",
|
||||
" pre() \n",
|
||||
" if not os.path.exists(path):\n",
|
||||
" !git clone \"{repo_clone_url}\" \"{path}\"\n",
|
||||
"if not stable_diffusion_webui_path.exists():\n",
|
||||
" !mkdir -p \"{stable_diffusion_webui_path}\"\n",
|
||||
" !git clone https://github.com/AUTOMATIC1111/stable-diffusion-webui \"{stable_diffusion_webui_path}\"\n",
|
||||
"else:\n",
|
||||
" print(f'{repo_clone_url.split(\"/\")[-1]} already downloaded, updating...')\n",
|
||||
" !cd \"{path}\" && git pull # no % so we don't interfere with the main process\n",
|
||||
" print('stable-diffusion-webui already downloaded, updating...')\n",
|
||||
" !cd \"{stable_diffusion_webui_path}\" && git pull # no % so we don't interfere with the main process\n",
|
||||
"\n",
|
||||
"def init_free():\n",
|
||||
" if (symlink_to_notebooks and repo_storage_dir != '/notebooks'):\n",
|
||||
" delete_broken_symlinks('/notebooks/') # remove broken symlinks since it might have been installed in a non-persistent directory\n",
|
||||
" if not os.path.exists(repo_storage_dir):\n",
|
||||
" !mkdir -p \"{repo_storage_dir}\"\n",
|
||||
" !ln -s \"{repo_storage_dir}\" /notebooks/\n",
|
||||
" !ls -la /notebooks/stable-diffusion\n",
|
||||
"update_repo_if_not_exists(f'{repo_storage_dir}/stable-diffusion-webui', 'https://github.com/AUTOMATIC1111/stable-diffusion-webui', init_free)"
|
||||
"!mkdir -p \"{repo_storage_dir / 'stable-diffusion-webui' / 'outputs'}\"\n",
|
||||
"!mkdir -p \"{repo_storage_dir / 'stable-diffusion-webui' / 'logs'}\"\n",
|
||||
"\n",
|
||||
"symlinks = [\n",
|
||||
" (repo_storage_dir / 'stable-diffusion-webui', Path('/notebooks/stable-diffusion-webui')),\n",
|
||||
" (repo_storage_dir / 'stable-diffusion-webui' / 'outputs', Path('/notebooks/outputs')),\n",
|
||||
" (repo_storage_dir / 'stable-diffusion-webui' / 'logs', repo_storage_dir / 'stable-diffusion-webui' / 'outputs' / 'logs'),\n",
|
||||
" (Path('/storage'), Path('/notebooks/storage')),\n",
|
||||
" ]\n",
|
||||
"\n",
|
||||
"if symlink_to_notebooks and repo_storage_dir != '/notebooks':\n",
|
||||
" print('\\nCreating Symlinks...')\n",
|
||||
" for src, dest in symlinks:\n",
|
||||
" # If `/notebooks/stable-diffusion-webui` is a broken symlink then remove it.\n",
|
||||
" # The WebUI might have previously been installed in a non-persistent directory.\n",
|
||||
" if dest.is_symlink() and not dest.exists(): # .exists() validates a symlink\n",
|
||||
" print('Symlink broken, removing:', dest)\n",
|
||||
" dest.unlink()\n",
|
||||
" if not dest.exists():\n",
|
||||
" os.symlink(src, dest)\n",
|
||||
" print(src, '->', os.path.realpath(dest))"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -211,8 +210,8 @@
|
|||
"outputs": [],
|
||||
"source": [
|
||||
"try:\n",
|
||||
" %store -r symlink_to_notebooks model_storage_dir repo_storage_dir activate_xformers download_scripts activate_deepdanbooru pip_cache_dir\n",
|
||||
" test = [symlink_to_notebooks, model_storage_dir, repo_storage_dir, activate_xformers, download_scripts, activate_deepdanbooru, pip_cache_dir]\n",
|
||||
" %store -r symlink_to_notebooks model_storage_dir repo_storage_dir activate_xformers activate_deepdanbooru pip_cache_dir\n",
|
||||
" test = [symlink_to_notebooks, model_storage_dir, repo_storage_dir, activate_xformers, activate_deepdanbooru, pip_cache_dir]\n",
|
||||
"except NameError as e:\n",
|
||||
" print(\"There is an issue with your variables.\")\n",
|
||||
" print(\"Please go back to the first block and make sure your settings are correct, then run the cell.\")\n",
|
||||
|
@ -220,7 +219,7 @@
|
|||
" import sys\n",
|
||||
" sys.exit(1)\n",
|
||||
"\n",
|
||||
"%cd \"{repo_storage_dir}/stable-diffusion-webui\"\n",
|
||||
"%cd \"{Path(repo_storage_dir, 'stable-diffusion-webui')}\"\n",
|
||||
"\n",
|
||||
"!pip install --upgrade pip\n",
|
||||
"!pip install --upgrade wheel setuptools\n",
|
||||
|
@ -237,76 +236,19 @@
|
|||
"import launch\n",
|
||||
"launch.prepare_environment()\n",
|
||||
"\n",
|
||||
"# Install things for this notebook\n",
|
||||
"!pip install requests gdown bs4 markdownify\n",
|
||||
"\n",
|
||||
"# The installer isn't installing deepdanbooru right now so we'll do it manually.\n",
|
||||
"if activate_deepdanbooru:\n",
|
||||
" !pip install \"git+https://github.com/KichangKim/DeepDanbooru.git@edf73df4cdaeea2cf00e9ac08bd8a9026b7a7b26#egg=deepdanbooru[tensorflow]\" # tensorflow==2.10.0 tensorflow-io==0.27.0 flatbuffers==1.12\n",
|
||||
" # https://github.com/KichangKim/DeepDanbooru/releases\n",
|
||||
" !pip install \"git+https://github.com/KichangKim/DeepDanbooru.git@v3-20211112-sgd-e28#egg=deepdanbooru[tensorflow]\" # $(curl --silent \"https://api.github.com/KichangKim/DeepDanbooru/releases/latest\" | grep '\"tag_name\":' | sed -E 's/.*\"([^\"]+)\".*/\\1/')#egg=deepdanbooru[tensorflow]\" # tensorflow==2.10.0 tensorflow-io==0.27.0 flatbuffers==1.12\n",
|
||||
"\n",
|
||||
"# latent-diffusion is a requirement but launch.py isn't downloading it so we'll do it manually.\n",
|
||||
"# TODO: can this be removed?\n",
|
||||
"if not os.path.exists(f'{repo_storage_dir}/stable-diffusion-webui/repositories/latent-diffusion'):\n",
|
||||
" !git clone https://github.com/crowsonkb/k-diffusion.git \"{repo_storage_dir}/stable-diffusion-webui/repositories/k-diffusion\"\n",
|
||||
" !git clone https://github.com/Hafiidz/latent-diffusion.git \"{repo_storage_dir}/stable-diffusion-webui/repositories/latent-diffusion\"\n",
|
||||
"\n",
|
||||
"# For things in this notebook\n",
|
||||
"!pip install requests\n",
|
||||
"!pip install gdown\n",
|
||||
"\n",
|
||||
"# Download popular custom scripts. This is basically remote code execution so be careful.\n",
|
||||
"# See https://github.com/AUTOMATIC1111/stable-diffusion-webui/wiki/Custom-Scripts\n",
|
||||
"if download_scripts:\n",
|
||||
" import shutil\n",
|
||||
" import requests\n",
|
||||
" !pip install moviepy==1.0.3\n",
|
||||
" !apt update\n",
|
||||
" !apt install -y potrace python3-tk\n",
|
||||
"\n",
|
||||
" def download_file_dir(url, output_dir):\n",
|
||||
" # output_dir must have a trailing slash\n",
|
||||
" local_filename = url.split('/')[-1]\n",
|
||||
" with requests.get(url, stream=True) as r:\n",
|
||||
" r.raise_for_status()\n",
|
||||
" with open(f'{output_dir}{local_filename}', 'wb') as f:\n",
|
||||
" for chunk in r.iter_content(chunk_size=8192):\n",
|
||||
" f.write(chunk)\n",
|
||||
" return local_filename\n",
|
||||
" def do_script_download(scripts_list, domain, path):\n",
|
||||
" for item in scripts_list:\n",
|
||||
" download_file_dir(f'https://{domain}/{item}', path)\n",
|
||||
" print(f'{item.split(\"/\")[-1]} downloaded...')\n",
|
||||
"\n",
|
||||
" do_script_download([\n",
|
||||
" 'GRMrGecko/stable-diffusion-webui-automatic/advanced_matrix/scripts/advanced_prompt_matrix.py',\n",
|
||||
" 'dfaker/stable-diffusion-webui-cv2-external-masking-script/main/external_masking.py',\n",
|
||||
" 'memes-forever/Stable-diffusion-webui-video/main/videos.py',\n",
|
||||
" 'yownas/seed_travel/main/scripts/seed_travel.py',\n",
|
||||
" 'Animator-Anon/Animator/main/animation.py',\n",
|
||||
" 'Filarius/stable-diffusion-webui/master/scripts/vid2vid.py',\n",
|
||||
" 'GeorgLegato/Txt2Vectorgraphics/main/txt2vectorgfx.py',\n",
|
||||
" 'yownas/shift-attention/main/scripts/shift_attention.py',\n",
|
||||
" 'DiceOwl/StableDiffusionStuff/main/loopback_superimpose.py',\n",
|
||||
" 'Engineer-of-Stuff/stable-diffusion-paperspace/main/lfs/save_steps.py',\n",
|
||||
" 'Pfaeff/sd-web-ui-scripts/main/moisaic.py'\n",
|
||||
" ], 'raw.githubusercontent.com', f'{repo_storage_dir}/stable-diffusion-webui/scripts/')\n",
|
||||
"\n",
|
||||
" do_script_download([\n",
|
||||
" 'dfaker/f88aa62e3a14b559fe4e5f6b345db664/raw/791dabfa0ab26399aa2635bcbc1cf6267aa4ffc2/alternate_sampler_noise_schedules.py',\n",
|
||||
" 'camenduru/9ec5f8141db9902e375967e93250860f/raw/c1a03eb447548adbef1858c0e69d3567a390d2f4/run_n_times.py'\n",
|
||||
" ], 'gist.githubusercontent.com', f'{repo_storage_dir}/stable-diffusion-webui/scripts/')\n",
|
||||
"\n",
|
||||
" # Download and set up txt2img2img\n",
|
||||
" update_repo_if_not_exists(f'{repo_storage_dir}/stable-diffusion-webui/txt2img2img_root', 'https://github.com/ThereforeGames/txt2img2img.git')\n",
|
||||
" !cp -r \"{repo_storage_dir}/stable-diffusion-webui/txt2img2img_root/scripts\" \"{repo_storage_dir}/stable-diffusion-webui\"\n",
|
||||
" !cp -r \"{repo_storage_dir}/stable-diffusion-webui/txt2img2img_root/txt2img2img\" \"{repo_storage_dir}/stable-diffusion-webui\"\n",
|
||||
" !cp -r \"{repo_storage_dir}/stable-diffusion-webui/txt2img2img_root/venv\" \"{repo_storage_dir}/stable-diffusion-webui\"\n",
|
||||
"\n",
|
||||
" # Download and set up txt2mask\n",
|
||||
" update_repo_if_not_exists(f'{repo_storage_dir}/stable-diffusion-webui/txt2mask', 'https://github.com/ThereforeGames/txt2mask.git')\n",
|
||||
" !echo \"Copying txt2mask...\"\n",
|
||||
" !cp -r \"{repo_storage_dir}/stable-diffusion-webui/txt2mask/repositories/clipseg\" \"{repo_storage_dir}/stable-diffusion-webui/repositories\"\n",
|
||||
" !cp -r \"{repo_storage_dir}/stable-diffusion-webui/txt2mask/scripts/\" \"{repo_storage_dir}/stable-diffusion-webui/\"\n",
|
||||
"\n",
|
||||
" # Install the dynamic-prompts/wildcard script\n",
|
||||
" # !git clone https://github.com/adieyal/sd-dynamic-prompting/ extensions/dynamic-prompts\n",
|
||||
"# if not os.path.exists(f'{repo_storage_dir}/stable-diffusion-webui/repositories/latent-diffusion'):\n",
|
||||
"# !git clone https://github.com/crowsonkb/k-diffusion.git \"{repo_storage_dir}/stable-diffusion-webui/repositories/k-diffusion\"\n",
|
||||
"# !git clone https://github.com/Hafiidz/latent-diffusion.git \"{repo_storage_dir}/stable-diffusion-webui/repositories/latent-diffusion\"\n",
|
||||
"\n",
|
||||
"if activate_xformers:\n",
|
||||
" print('Installing xformers...')\n",
|
||||
|
@ -343,19 +285,12 @@
|
|||
" xformers_whl = download_release('https://raw.githubusercontent.com/Cyberes/xformers-compiled/main/various/xformers-0.0.14.dev0-cp37-cp37m-linux_x86_64.whl')\n",
|
||||
" !pip install --force-reinstall \"{xformers_whl}\"\n",
|
||||
"\n",
|
||||
"# Make sure your models storage directory exists\n",
|
||||
"# Make sure important directories exists\n",
|
||||
"!mkdir -p \"{model_storage_dir}/hypernetworks\"\n",
|
||||
"!mkdir -p \"{model_storage_dir}/vae\"\n",
|
||||
"!mkdir -p \"{repo_storage_dir}/stable-diffusion-webui/models/hypernetworks\"\n",
|
||||
"\n",
|
||||
"# Link the output folders to /notebooks/outputs\n",
|
||||
"!mkdir -p \"{repo_storage_dir}/stable-diffusion-webui/models/VAE\"\n",
|
||||
"!mkdir -p \"{repo_storage_dir}/stable-diffusion-webui/log/images\"\n",
|
||||
"!mkdir -p \"{repo_storage_dir}/stable-diffusion-webui/outputs\"\n",
|
||||
"!ln -s \"{repo_storage_dir}/stable-diffusion-webui/outputs\" /notebooks/\n",
|
||||
"!ln -s \"{repo_storage_dir}/stable-diffusion-webui/log\" \"{repo_storage_dir}/stable-diffusion-webui/outputs\"\n",
|
||||
"\n",
|
||||
"# Link /storage/ to /notebooks/\n",
|
||||
"!ln -s /storage/ /notebooks/\n",
|
||||
"\n",
|
||||
"!echo -e \"\\n===================================\\nDone! If you're seeing this the process has exited successfully.\\n\""
|
||||
]
|
||||
|
@ -432,7 +367,7 @@
|
|||
" sys.exit(1)\n",
|
||||
"\n",
|
||||
"!if [ $(dpkg-query -W -f='${Status}' aria2 2>/dev/null | grep -c \"ok installed\") = 0 ]; then sudo apt update && sudo apt install -y aria2; fi\n",
|
||||
"!aria2c --file-allocation=none -c -x 16 -s 16 --summary-interval=0 https://huggingface.co/stabilityai/stable-diffusion-2/resolve/main/768-v-ema.ckpt -d \"{model_storage_dir}\" -o \"sd-v2-0-768-v-ema.ckpt\"\n",
|
||||
"!aria2c --file-allocation=none -c -x 16 -s 16 --summary-interval=0 --console-log-level=warn --continue https://huggingface.co/stabilityai/stable-diffusion-2/resolve/main/768-v-ema.ckpt -d \"{model_storage_dir}\" -o \"sd-v2-0-768-v-ema.ckpt\"\n",
|
||||
"!wget https://raw.githubusercontent.com/Stability-AI/stablediffusion/main/configs/stable-diffusion/v2-inference-v.yaml -O \"{model_storage_dir}/sd-v2-0-768-v-ema.yaml\""
|
||||
]
|
||||
},
|
||||
|
@ -462,7 +397,7 @@
|
|||
" sys.exit(1)\n",
|
||||
"\n",
|
||||
"!if [ $(dpkg-query -W -f='${Status}' aria2 2>/dev/null | grep -c \"ok installed\") = 0 ]; then sudo apt update && sudo apt install -y aria2; fi\n",
|
||||
"!aria2c --file-allocation=none -c -x 16 -s 16 --summary-interval=0 https://huggingface.co/stabilityai/stable-diffusion-2-base/resolve/main/512-base-ema.ckpt -d \"{model_storage_dir}\" -o \"sd-v2-0-512-base-ema.ckpt\"\n",
|
||||
"!aria2c --file-allocation=none -c -x 16 -s 16 --summary-interval=0 --console-log-level=warn --continue https://huggingface.co/stabilityai/stable-diffusion-2-base/resolve/main/512-base-ema.ckpt -d \"{model_storage_dir}\" -o \"sd-v2-0-512-base-ema.ckpt\"\n",
|
||||
"!wget https://raw.githubusercontent.com/Stability-AI/stablediffusion/main/configs/stable-diffusion/v2-inference.yaml -O \"{model_storage_dir}/sd-v2-0-512-base-ema.yaml\""
|
||||
]
|
||||
},
|
||||
|
@ -490,7 +425,7 @@
|
|||
" sys.exit(1)\n",
|
||||
"\n",
|
||||
"!if [ $(dpkg-query -W -f='${Status}' aria2 2>/dev/null | grep -c \"ok installed\") = 0 ]; then sudo apt update && sudo apt install -y aria2; fi\n",
|
||||
"!aria2c --file-allocation=none -c -x 16 -s 16 --summary-interval=0 https://huggingface.co/stabilityai/stable-diffusion-2-depth/resolve/main/512-depth-ema.ckpt -d \"{model_storage_dir}\" -o \"sd-v2-0-512-depth-ema.ckpt\"\n",
|
||||
"!aria2c --file-allocation=none -c -x 16 -s 16 --summary-interval=0 --console-log-level=warn --continue https://huggingface.co/stabilityai/stable-diffusion-2-depth/resolve/main/512-depth-ema.ckpt -d \"{model_storage_dir}\" -o \"sd-v2-0-512-depth-ema.ckpt\"\n",
|
||||
"!wget https://raw.githubusercontent.com/Stability-AI/stablediffusion/main/configs/stable-diffusion/v2-midas-inference.yaml -O \"{model_storage_dir}/sd-v2-0-512-depth-ema.yaml\""
|
||||
]
|
||||
},
|
||||
|
@ -518,7 +453,7 @@
|
|||
" sys.exit(1)\n",
|
||||
"\n",
|
||||
"!if [ $(dpkg-query -W -f='${Status}' aria2 2>/dev/null | grep -c \"ok installed\") = 0 ]; then sudo apt update && sudo apt install -y aria2; fi\n",
|
||||
"!aria2c --file-allocation=none -c -x 16 -s 16 --summary-interval=0 https://huggingface.co/stabilityai/stable-diffusion-x4-upscaler/resolve/main/x4-upscaler-ema.ckpt -d \"{model_storage_dir}\" -o \"sd-v2-0-x4-upscaler-ema.ckpt\"\n",
|
||||
"!aria2c --file-allocation=none -c -x 16 -s 16 --summary-interval=0 --console-log-level=warn --continue https://huggingface.co/stabilityai/stable-diffusion-x4-upscaler/resolve/main/x4-upscaler-ema.ckpt -d \"{model_storage_dir}\" -o \"sd-v2-0-x4-upscaler-ema.ckpt\"\n",
|
||||
"!wget https://raw.githubusercontent.com/Stability-AI/stablediffusion/main/configs/stable-diffusion/x4-upscaling.yaml -O \"{model_storage_dir}/sd-v2-0-x4-upscaler-ema.yaml\""
|
||||
]
|
||||
},
|
||||
|
@ -736,7 +671,7 @@
|
|||
" sys.exit(1)\n",
|
||||
"\n",
|
||||
"!if [ $(dpkg-query -W -f='${Status}' aria2 2>/dev/null | grep -c \"ok installed\") = 0 ]; then sudo apt update && sudo apt install -y aria2; fi\n",
|
||||
"!aria2c --file-allocation=none -c -x 16 -s 16 --summary-interval=0 https://huggingface.co/naclbit/trinart_stable_diffusion_v2/resolve/main/trinart2_step60000.ckpt -d \"{model_storage_dir}\" -o \"trinart2_step60000.ckpt\""
|
||||
"!aria2c --file-allocation=none -c -x 16 -s 16 --summary-interval=0 --console-log-level=warn --continue https://huggingface.co/naclbit/trinart_stable_diffusion_v2/resolve/main/trinart2_step60000.ckpt -d \"{model_storage_dir}\" -o \"trinart2_step60000.ckpt\""
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -763,7 +698,7 @@
|
|||
" sys.exit(1)\n",
|
||||
"\n",
|
||||
"!if [ $(dpkg-query -W -f='${Status}' aria2 2>/dev/null | grep -c \"ok installed\") = 0 ]; then sudo apt update && sudo apt install -y aria2; fi\n",
|
||||
"!aria2c --file-allocation=none -c -x 16 -s 16 --summary-interval=0 https://huggingface.co/naclbit/trinart_stable_diffusion_v2/resolve/main/trinart2_step95000.ckpt -d \"{model_storage_dir}\" -o \"trinart2_step95000.ckpt\""
|
||||
"!aria2c --file-allocation=none -c -x 16 -s 16 --summary-interval=0 --console-log-level=warn --continue https://huggingface.co/naclbit/trinart_stable_diffusion_v2/resolve/main/trinart2_step95000.ckpt -d \"{model_storage_dir}\" -o \"trinart2_step95000.ckpt\""
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -790,7 +725,7 @@
|
|||
" sys.exit(1)\n",
|
||||
"\n",
|
||||
"!if [ $(dpkg-query -W -f='${Status}' aria2 2>/dev/null | grep -c \"ok installed\") = 0 ]; then sudo apt update && sudo apt install -y aria2; fi\n",
|
||||
"!aria2c --file-allocation=none -c -x 16 -s 16 --summary-interval=0 https://huggingface.co/naclbit/trinart_stable_diffusion_v2/resolve/main/trinart2_step115000.ckpt -d \"{model_storage_dir}\" -o \"trinart2_step115000.ckpt\""
|
||||
"!aria2c --file-allocation=none -c -x 16 -s 16 --summary-interval=0 --console-log-level=warn --continue https://huggingface.co/naclbit/trinart_stable_diffusion_v2/resolve/main/trinart2_step115000.ckpt -d \"{model_storage_dir}\" -o \"trinart2_step115000.ckpt\""
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -819,7 +754,7 @@
|
|||
" sys.exit(1)\n",
|
||||
"\n",
|
||||
"!if [ $(dpkg-query -W -f='${Status}' aria2 2>/dev/null | grep -c \"ok installed\") = 0 ]; then sudo apt update && sudo apt install -y aria2; fi\n",
|
||||
"!aria2c --file-allocation=none -c -x 16 -s 16 --summary-interval=0 https://huggingface.co/naclbit/trinart_characters_19.2m_stable_diffusion_v1/resolve/main/trinart_characters_it4_v1.ckpt -d \"{model_storage_dir}\" -o \"trinart_characters_it4_v1.ckpt\""
|
||||
"!aria2c --file-allocation=none -c -x 16 -s 16 --summary-interval=0 --console-log-level=warn --continue https://huggingface.co/naclbit/trinart_characters_19.2m_stable_diffusion_v1/resolve/main/trinart_characters_it4_v1.ckpt -d \"{model_storage_dir}\" -o \"trinart_characters_it4_v1.ckpt\""
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -942,10 +877,9 @@
|
|||
"!aria2c -d . --bt-metadata-only=true --bt-save-metadata=true --bt-max-peers=120 --summary-interval=0 --file-allocation=none \"magnet:?xt=urn:btih:5bde442da86265b670a3e5ea3163afad2c6f8ecc&dn=novelaileak&tr=udp%3A%2F%2Ftracker.opentrackr.org%3A1337%2Fannounce&tr=udp%3A%2F%2F9.rarbg.com%3A2810%2Fannounce&tr=udp%3A%2F%2Ftracker.openbittorrent.com%3A6969%2Fannounce&tr=http%3A%2F%2Ftracker.openbittorrent.com%3A80%2Fannounce&tr=udp%3A%2F%2Fopentracker.i2p.rocks%3A6969%2Fannounce\"\n",
|
||||
"!aria2c --select-file=76,81,82,83,84,85,86,87,88,89,90,91,92,93 --seed-time=0 --max-overall-upload-limit=1K --bt-max-peers=120 --summary-interval=0 --file-allocation=none \"{infohash}.torrent\"\n",
|
||||
"# -exec mv doesn't work with python variables so we'll set an environment variable instead\n",
|
||||
"import os\n",
|
||||
"os.environ[\"MODEL_STORAGE_DIR\"] = model_storage_dir\n",
|
||||
"!rm novelaileak/stableckpt/extra-sd-prune/sd-prune/anime700k-64bs-0.1ucg-penultimate-1epoch-clip-ema-continue-76000.pt # aria2 downloads this file even though I told it not to\n",
|
||||
"!find novelaileak/ -type f -name '*.pt' -exec mv {} \"$MODEL_STORAGE_DIR/hypernetworks\" \\;"
|
||||
"import subprocess\n",
|
||||
"s = subprocess.run(f'find novelaileak/ -type f -name \"*.pt\" -exec mv \"{{}}\" \"{Path(model_storage_dir, \"hypernetworks\")}\" \\;', shell=True)"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -974,7 +908,6 @@
|
|||
" sys.exit(1)\n",
|
||||
"\n",
|
||||
"# Get some storage back\n",
|
||||
"\n",
|
||||
"if not pip_cache_dir:\n",
|
||||
" !pip cache purge\n",
|
||||
" !echo \"Purged pip cache\"\n",
|
||||
|
@ -1014,28 +947,28 @@
|
|||
" sys.exit(1)\n",
|
||||
"\n",
|
||||
"import os\n",
|
||||
"import glob\n",
|
||||
"import subprocess\n",
|
||||
"import re\n",
|
||||
"from glob import glob\n",
|
||||
"from pathlib import Path\n",
|
||||
"import sys\n",
|
||||
"\n",
|
||||
"if not os.path.isdir(model_storage_dir):\n",
|
||||
"model_storage_dir = Path(model_storage_dir)\n",
|
||||
"\n",
|
||||
"if not model_storage_dir.exists():\n",
|
||||
" print('Your model storage directory does not exist:', model_storage_dir)\n",
|
||||
" sys.exit(1)\n",
|
||||
"\n",
|
||||
"webui_model_path = Path(repo_storage_dir, 'stable-diffusion-webui/models')\n",
|
||||
"webui_sd_models = Path(webui_model_path, 'Stable-diffusion')\n",
|
||||
"webui_hypernetworks = Path(webui_model_path, 'hypernetworks')\n",
|
||||
"webui_vaes = Path(webui_model_path, 'VAE')\n",
|
||||
"webui_root_model_path = Path(repo_storage_dir, 'stable-diffusion-webui/models')\n",
|
||||
"webui_sd_model_path = Path(webui_root_model_path, 'Stable-diffusion')\n",
|
||||
"webui_hypernetwork_path = Path(webui_root_model_path, 'hypernetworks')\n",
|
||||
"webui_vae_path = Path(webui_root_model_path, 'VAE')\n",
|
||||
"\n",
|
||||
"def delete_broken_symlinks(dir):\n",
|
||||
" deleted = False\n",
|
||||
" for file in os.listdir(dir):\n",
|
||||
" path = f'{dir}/{file}'\n",
|
||||
" if os.path.islink(path) and not os.path.exists(os.readlink(path)):\n",
|
||||
" print(f'Symlink broken, removing: {file}')\n",
|
||||
" os.unlink(path)\n",
|
||||
" dir = Path(dir)\n",
|
||||
" for file in dir.iterdir():\n",
|
||||
" if file.is_symlink() and not file.exists():\n",
|
||||
" print('Symlink broken, removing:', file)\n",
|
||||
" file.unlink()\n",
|
||||
" deleted = True\n",
|
||||
" if deleted:\n",
|
||||
" print('')\n",
|
||||
|
@ -1043,75 +976,73 @@
|
|||
"def create_symlink(source, dest):\n",
|
||||
" if os.path.isdir(dest):\n",
|
||||
" dest = Path(dest, os.path.basename(source))\n",
|
||||
" if not os.path.exists(dest):\n",
|
||||
" !ln -s \"{source}\" \"{dest}\"\n",
|
||||
" if not dest.exists():\n",
|
||||
" os.symlink(source, dest)\n",
|
||||
" print(source, '->', Path(dest).absolute())\n",
|
||||
"\n",
|
||||
"# Check for broken symlinks and remove them\n",
|
||||
"print('Removing broken symlinks...')\n",
|
||||
"delete_broken_symlinks(webui_sd_models)\n",
|
||||
"delete_broken_symlinks(webui_hypernetworks)\n",
|
||||
"delete_broken_symlinks(webui_vaes)\n",
|
||||
"\n",
|
||||
"# Link .ckpt files (recursive)\n",
|
||||
"print('\\nLinking .ckpt files...')\n",
|
||||
"for file in glob.glob(f'{model_storage_dir}/**/*.ckpt', recursive=True):\n",
|
||||
" if not Path(file).parent.parts[-1] in ['hypernetworks', 'vae'] :\n",
|
||||
" if not os.path.exists(file):\n",
|
||||
" print('New model:', os.path.basename(file))\n",
|
||||
" create_symlink(file, webui_sd_models)\n",
|
||||
"delete_broken_symlinks(webui_sd_model_path)\n",
|
||||
"delete_broken_symlinks(webui_hypernetwork_path)\n",
|
||||
"delete_broken_symlinks(webui_vae_path)\n",
|
||||
"\n",
|
||||
"def link_ckpts(source_path):\n",
|
||||
" # Link .ckpt and .safetensor/.st files (recursive)\n",
|
||||
" print('\\nLinking .ckpt and .safetensor/.safetensors/.st files in', source_path)\n",
|
||||
" source_path = Path(source_path)\n",
|
||||
" for file in [p for p in source_path.rglob('*') if p.suffix in ['.ckpt', '.safetensor', '.safetensors', '.st']]:\n",
|
||||
" if Path(file).parent.parts[-1] not in ['hypernetworks', 'vae'] :\n",
|
||||
" if not (webui_sd_model_path / file.name):\n",
|
||||
" print('New model:', file.name)\n",
|
||||
" create_symlink(file, webui_sd_model_path)\n",
|
||||
" # Link config yaml files\n",
|
||||
"print('\\nLinking config .yaml files...')\n",
|
||||
"for file in glob.glob(f'{model_storage_dir}/**/*.yaml', recursive=True):\n",
|
||||
" create_symlink(file, webui_sd_models)\n",
|
||||
" print('\\nLinking config .yaml files in', source_path)\n",
|
||||
" for file in model_storage_dir.glob('*.yaml'):\n",
|
||||
" create_symlink(file, webui_sd_model_path)\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"link_ckpts(model_storage_dir)\n",
|
||||
"\n",
|
||||
"# Link hypernetworks\n",
|
||||
"print('\\nLinking hypernetworks...')\n",
|
||||
"source_hypernetworks = Path(model_storage_dir, 'hypernetworks')\n",
|
||||
"if os.path.isdir(source_hypernetworks):\n",
|
||||
" for file in os.listdir(source_hypernetworks):\n",
|
||||
" create_symlink(Path(source_hypernetworks, file), webui_hypernetworks)\n",
|
||||
"hypernetwork_source_path = Path(model_storage_dir, 'hypernetworks')\n",
|
||||
"if hypernetwork_source_path.is_dir():\n",
|
||||
" for file in hypernetwork_source_path.iterdir():\n",
|
||||
" create_symlink(hypernetwork_source_path / file, webui_hypernetwork_path)\n",
|
||||
"else:\n",
|
||||
" print('Hypernetwork storage directory not found:', source_hypernetworks)\n",
|
||||
" print('Hypernetwork storage directory not found:', hypernetwork_source_path)\n",
|
||||
"\n",
|
||||
"# Link VAEs\n",
|
||||
"print('\\nLinking VAEs...')\n",
|
||||
"source_vaes = Path(model_storage_dir, 'vae')\n",
|
||||
"if os.path.isdir(source_vaes):\n",
|
||||
" for file in os.listdir(source_vaes):\n",
|
||||
" create_symlink(Path(source_vaes, file), webui_vaes)\n",
|
||||
"vae_source_path = Path(model_storage_dir, 'vae')\n",
|
||||
"if vae_source_path.is_dir():\n",
|
||||
" for file in vae_source_path.iterdir():\n",
|
||||
" create_symlink(vae_source_path / file, webui_vae_path)\n",
|
||||
"else:\n",
|
||||
" print('VAE storage directory not found:', source_vaes)\n",
|
||||
" print('VAE storage directory not found:', vae_source_path)\n",
|
||||
"\n",
|
||||
"# Link the NovelAI files for each of the NovelAI models\n",
|
||||
"print('\\nLinking NovelAI files for each of the NovelAI models...')\n",
|
||||
"for model in glob.glob(f'{model_storage_dir}/novelai-*.ckpt'):\n",
|
||||
" yaml = model.replace('.ckpt', '.yaml')\n",
|
||||
"for model in model_storage_dir.glob('novelai-*.ckpt'):\n",
|
||||
" yaml = model.stem + '.yaml'\n",
|
||||
" if os.path.exists(yaml):\n",
|
||||
" # if not os.path.exists(yaml):\n",
|
||||
" print(f'New NovelAI model config: {yaml}')\n",
|
||||
" create_symlink(yaml, webui_sd_models)\n",
|
||||
" print('New NovelAI model config:', yaml)\n",
|
||||
" create_symlink(yaml, webui_sd_model_path)\n",
|
||||
"\n",
|
||||
"if link_novelai_anime_vae:\n",
|
||||
" print('\\nLinking NovelAI anime VAE...')\n",
|
||||
" for model in glob.glob('novelai-*.ckpt'):\n",
|
||||
" if os.path.exists(Path(model_storage_dir, 'hypernetworks/animevae.pt')):\n",
|
||||
" vae = model.replace('.ckpt', '.vae.pt')\n",
|
||||
" if not os.path.exists(webui_vaes):\n",
|
||||
" for model in model_storage_dir.glob('novelai-*.ckpt'):\n",
|
||||
" if (model_storage_dir / 'hypernetworks' / 'animevae.pt').is_file():\n",
|
||||
" vae = model.stem + '.vae.pt'\n",
|
||||
" if not os.path.exists(webui_vae_path):\n",
|
||||
" print(f'Linking NovelAI {vae} and {model}')\n",
|
||||
" create_symlink(Path(model_storage_dir, 'hypernetworks/animevae.pt'), webui_vaes)\n",
|
||||
" create_symlink(model_storage_dir / 'hypernetworks' / 'animevae.pt', webui_vae_path)\n",
|
||||
" else:\n",
|
||||
" print(f'{model_storage_dir}/hypernetworks/animevae.pt NOT FOUND')\n",
|
||||
" \n",
|
||||
" print(f'{model_storage_dir}/hypernetworks/animevae.pt not found!')\n",
|
||||
"\n",
|
||||
"if search_paperspace_datasets:\n",
|
||||
" if os.path.isdir('/datasets'):\n",
|
||||
" print('\\nSearching /datasets')\n",
|
||||
" for file in glob.glob(f'/datasets/**/*.ckpt', recursive=True):\n",
|
||||
" create_symlink(file, webui_sd_models)\n",
|
||||
" for file in glob.glob(f'/datasets/**/*.yaml', recursive=True):\n",
|
||||
" create_symlink(file, webui_sd_models)\n",
|
||||
" if Path('/datasets').is_dir():\n",
|
||||
" link_ckpts('/datasets')\n",
|
||||
" else:\n",
|
||||
" print('\\nNo datasets mounted!')"
|
||||
]
|
||||
|
@ -1163,7 +1094,7 @@
|
|||
" import sys\n",
|
||||
" sys.exit(1)\n",
|
||||
"\n",
|
||||
"%cd \"{repo_storage_dir}/stable-diffusion-webui\"\n",
|
||||
"%cd \"{Path(repo_storage_dir, 'stable-diffusion-webui')}\"\n",
|
||||
"\n",
|
||||
"# Code to set the options you want as defined in the very first block\n",
|
||||
"x_arg = '--xformers' if activate_xformers else ''\n",
|
||||
|
@ -1215,19 +1146,22 @@
|
|||
" sys.exit(1)\n",
|
||||
"\n",
|
||||
"import os\n",
|
||||
"from pathlib import Path\n",
|
||||
"import subprocess\n",
|
||||
"\n",
|
||||
"if not os.path.exists(export_storage_dir):\n",
|
||||
" os.makedirs(export_storage_dir)\n",
|
||||
"repo_storage_dir = Path(repo_storage_dir)\n",
|
||||
"export_storage_dir = Path(export_storage_dir)\n",
|
||||
"export_storage_dir.mkdir(exist_ok=True)\n",
|
||||
"\n",
|
||||
"!if [ $(dpkg-query -W -f='${Status}' p7zip-full 2>/dev/null | grep -c \"ok installed\") = 0 ]; then sudo apt update && sudo apt install -y p7zip-full; fi # install 7z if it isn't already installed\n",
|
||||
"from datetime import datetime\n",
|
||||
"datetime_str = datetime.now().strftime('%m-%d-%Y_%H:%M:%S')\n",
|
||||
"%cd \"{export_storage_dir}\"\n",
|
||||
"!mkdir -p \"{datetime_str}/log\"\n",
|
||||
"!cd \"{repo_storage_dir}/stable-diffusion-webui/log/\" && mv * \"{export_storage_dir}/{datetime_str}/log\"\n",
|
||||
"!cd \"{repo_storage_dir}/stable-diffusion-webui/outputs/\" && mv * \"{export_storage_dir}/{datetime_str}\"\n",
|
||||
"!find \"{export_storage_dir}/{datetime_str}\" -name .ipynb_checkpoints -exec rm -rf {{}} +\n",
|
||||
"!7z a -t7z -m0=lzma2 -mx=9 -mfb=64 -md=32m -ms=on \"{datetime_str}.7z\" \"{export_storage_dir}/{datetime_str}/\""
|
||||
"!mkdir -p \"{datetime_str}/logs\"\n",
|
||||
"!cd \"{repo_storage_dir / 'stable-diffusion-webui' / 'logs'}\" && mv * \"{export_storage_dir / datetime_str / 'logs'}\"\n",
|
||||
"!cd \"{repo_storage_dir / 'stable-diffusion-webui' / 'outputs'}\" && mv * \"{export_storage_dir / datetime_str}\"\n",
|
||||
"s = subprocess.run(f'find \"{Path(export_storage_dir, datetime_str)}\" -type d -name .ipynb_checkpoints -exec rm -rv {{}} +', shell=True)\n",
|
||||
"!7z a -t7z -m0=lzma2 -mx=9 -mfb=64 -md=32m -ms=on \"{datetime_str}.7z\" \"{export_storage_dir / datetime_str}\""
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -1247,8 +1181,8 @@
|
|||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# !rm -rf \"{export_storage_dir}/{datetime_str}/\"\n",
|
||||
"# !echo Deleted {export_storage_dir}/{datetime_str}/"
|
||||
"# !rm -rf \"{export_storage_dir / datetime_str}\"\n",
|
||||
"# !echo \"Deleted {export_storage_dir / datetime_str}\""
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -1327,7 +1261,8 @@
|
|||
" print('Error:', e)\n",
|
||||
" import sys\n",
|
||||
" sys.exit(1)\n",
|
||||
"%cd \"{repo_storage_dir}/stable-diffusion-webui\"\n",
|
||||
"\n",
|
||||
"%cd \"{Path(repo_storage_dir, 'stable-diffusion-webui')}\"\n",
|
||||
"!git reset --hard <commit>"
|
||||
]
|
||||
},
|
||||
|
@ -1346,7 +1281,18 @@
|
|||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"!find . -type d -name .ipynb_checkpoints -delete"
|
||||
"try:\n",
|
||||
" %store -r model_storage_dir repo_storage_dir\n",
|
||||
" test = [model_storage_dir, repo_storage_dir]\n",
|
||||
"except NameError as e:\n",
|
||||
" print(\"There is an issue with your variables.\")\n",
|
||||
" print(\"Please go back to the first block and make sure your settings are correct, then run the cell.\")\n",
|
||||
" print('Error:', e)\n",
|
||||
" import sys\n",
|
||||
" sys.exit(1)\n",
|
||||
"import subprocess\n",
|
||||
"!find /notebooks/ -type d -name .ipynb_checkpoints -type d -exec rm -rv {} +\n",
|
||||
"s = subprocess.run(f'find \"{repo_storage_dir}\" -type d -name .ipynb_checkpoints -exec rm -rv {{}} +', shell=True)"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -1419,7 +1365,7 @@
|
|||
"source": [
|
||||
"### Automated Model Downloader\n",
|
||||
"\n",
|
||||
"Here's a tool to download a model from a torrent magnet link, web link, Google Drive, or HuggingFace."
|
||||
"Here's a tool to download a model from a torrent magnet link, web link, Google Drive, HuggingFace, or CivitAI."
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -1442,44 +1388,88 @@
|
|||
"import re\n",
|
||||
"import requests\n",
|
||||
"import gdown\n",
|
||||
"import json\n",
|
||||
"from bs4 import BeautifulSoup\n",
|
||||
"from markdownify import markdownify\n",
|
||||
"import urllib.request\n",
|
||||
"from pathlib import Path\n",
|
||||
"\n",
|
||||
"user_agent = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36'\n",
|
||||
"\n",
|
||||
"def dl_web_file(web_dl_file):\n",
|
||||
" %cd \"{model_storage_dir}\"\n",
|
||||
" # We're going to use aria2 to split the download into threads which will allow us to download\n",
|
||||
" # the file very fast even if the site serves the file slow.\n",
|
||||
" !if [ $(dpkg-query -W -f='${Status}' aria2 2>/dev/null | grep -c \"ok installed\") = 0 ]; then sudo apt update && sudo apt install -y aria2; fi\n",
|
||||
" !aria2c --file-allocation=none -c -x 16 -s 16 --summary-interval=0 \"{web_dl_file}\" \n",
|
||||
" !aria2c --file-allocation=none -c -x 16 -s 16 --summary-interval=0 --console-log-level=warn --continue --user-agent=\"{user_agent}\" \"{web_dl_file}\" \n",
|
||||
"\n",
|
||||
"def is_url(url_str):\n",
|
||||
" return re.search(r'https?:\\/\\/(?:www\\.|(?!www))[a-zA-Z0-9][a-zA-Z0-9-]+[a-zA-Z0-9]\\.[^\\s]{2,}|www\\.[a-zA-Z0-9][a-zA-Z0-9-]+[a-zA-Z0-9]\\.[^\\s]{2,}|https?:\\/\\/(?:www\\.|(?!www))[a-zA-Z0-9]+\\.[^\\s]{2,}|www\\.[a-zA-Z0-9]+\\.[^\\s]{2,}', url_str)\n",
|
||||
"\n",
|
||||
"magnet_match = re.search(r'magnet:\\?xt=urn:btih:[\\-_A-Za-z0-9&=%.]*', model_uri)\n",
|
||||
"web_match = re.search(r'(https?:\\/\\/(?:www\\.|(?!www))[a-zA-Z0-9][a-zA-Z0-9-]+[a-zA-Z0-9]\\.[^\\s]{2,}|www\\.[a-zA-Z0-9][a-zA-Z0-9-]+[a-zA-Z0-9]\\.[^\\s]{2,}|https?:\\/\\/(?:www\\.|(?!www))[a-zA-Z0-9]+\\.[^\\s]{2,}|www\\.[a-zA-Z0-9]+\\.[^\\s]{2,})', model_uri)\n",
|
||||
"civitai_match = re.search(r'^https?:\\/\\/(?:www\\.|(?!www))civitai\\.com\\/models\\/\\d*\\/.*?$', model_uri)\n",
|
||||
"web_match = is_url(model_uri)\n",
|
||||
"\n",
|
||||
"if magnet_match:\n",
|
||||
" !if [ $(dpkg-query -W -f='${Status}' aria2 2>/dev/null | grep -c \"ok installed\") = 0 ]; then sudo apt update && sudo apt install -y aria2; fi\n",
|
||||
" %cd \"{model_storage_dir}\"\n",
|
||||
" bash_var = magnet_match[0]\n",
|
||||
" !aria2c --seed-time=0 --max-overall-upload-limit=1K --bt-max-peers=120 --summary-interval=0 --file-allocation=none \"{bash_var}\"\n",
|
||||
" !aria2c --seed-time=0 --max-overall-upload-limit=1K --bt-max-peers=120 --summary-interval=0 --console-log-level=warn --file-allocation=none \"{bash_var}\"\n",
|
||||
" # clean exit here\n",
|
||||
"elif 'https://huggingface.co/' in model_uri:\n",
|
||||
" response = requests.head(web_match[0], allow_redirects=True)\n",
|
||||
" response = requests.head(model_uri, allow_redirects=True, headers={'User-Agent': user_agent})\n",
|
||||
" if 'octet-stream' not in response.headers['content-type']:\n",
|
||||
" response = requests.head(web_match[0].replace('/blob/', '/resolve/'), allow_redirects=True)\n",
|
||||
" response = requests.head(model_uri.replace('/blob/', '/resolve/'), allow_redirects=True, headers={'User-Agent': user_agent})\n",
|
||||
" if 'octet-stream' not in response.headers['content-type']:\n",
|
||||
" print(f'Wrong content-type: {response.headers[\"content-type\"].split(\";\")[0]}')\n",
|
||||
" # clean exit here\n",
|
||||
" else:\n",
|
||||
" dl_web_file(web_match[0].replace('/blob/', '/resolve/'))\n",
|
||||
" dl_web_file(model_uri.replace('/blob/', '/resolve/'))\n",
|
||||
" # clean exit here\n",
|
||||
" else:\n",
|
||||
" dl_web_file(web_match[0])\n",
|
||||
" dl_web_file(model_uri)\n",
|
||||
" # clean exit here\n",
|
||||
"elif 'https://drive.google.com' in model_uri:\n",
|
||||
" gdrive_file_id, _ = gdown.parse_url.parse_url(web_match[0])\n",
|
||||
" gdrive_file_id, _ = gdown.parse_url.parse_url(model_uri)\n",
|
||||
" %cd \"{model_storage_dir}\"\n",
|
||||
" gdown.download(f\"https://drive.google.com/uc?id={gdrive_file_id}&confirm=t\")\n",
|
||||
" # clean exit here\n",
|
||||
"elif civitai_match:\n",
|
||||
" if not is_url(civitai_match[0]):\n",
|
||||
" print('URL does not match known civitai.com pattern.')\n",
|
||||
" # clean exit here\n",
|
||||
" else:\n",
|
||||
" soup = BeautifulSoup(requests.get(model_uri, headers={'User-Agent': user_agent}).text, features=\"html.parser\")\n",
|
||||
" data = json.loads(soup.find('script', {'id': '__NEXT_DATA__'}).text)\n",
|
||||
" model_data = data[\"props\"][\"pageProps\"][\"trpcState\"][\"json\"][\"queries\"][0][\"state\"][\"data\"]\n",
|
||||
" latest_model = model_data['modelVersions'][0]\n",
|
||||
" latest_model_url = f\"https://civitai.com/api/download/models/{latest_model['id']}\"\n",
|
||||
" print('Downloading model:', model_data['name'])\n",
|
||||
" \n",
|
||||
" # Download the description to a markdown file next to the checkpoint\n",
|
||||
" desc = markdownify(model_data['description'])\n",
|
||||
" req = urllib.request.Request(latest_model_url, data=None, headers={'User-Agent': user_agent})\n",
|
||||
" content_disp = urllib.request.urlopen(req).getheader('Content-Disposition')\n",
|
||||
" if content_disp:\n",
|
||||
" filename = Path(re.match(r'attachment; filename=\"(.*?)\"', content_disp)[1]).stem\n",
|
||||
" with open(Path(model_storage_dir, f'{filename}.md'), 'w') as file:\n",
|
||||
" file.write(f\"# {model_data['name']} \\n\")\n",
|
||||
" file.write(f'Original CivitAI URL: {model_uri} \\n\\n <br> \\nhttps://civitai.com/models/3950/art-and-eros-aeros-a-tribute-to-beauty\\n')\n",
|
||||
" file.write(desc)\n",
|
||||
" else:\n",
|
||||
" print('Failed to get filename of checkpoint for markdown file')\n",
|
||||
"\n",
|
||||
" dl_web_file(latest_model_url)\n",
|
||||
" # clean exit here\n",
|
||||
"elif web_match:\n",
|
||||
" response = requests.head(web_match[0], allow_redirects=True)\n",
|
||||
" # Always do the web match last\n",
|
||||
" response = requests.head(model_uri, allow_redirects=True, headers={'User-Agent': user_agent})\n",
|
||||
" if 'octet-stream' not in response.headers['content-type']:\n",
|
||||
" print(f'Wrong content-type: {response.headers[\"content-type\"].split(\";\")[0]}')\n",
|
||||
" # clean exit here\n",
|
||||
" else:\n",
|
||||
" dl_web_file(web_match[0])\n",
|
||||
" dl_web_file(model_uri)\n",
|
||||
" # clean exit here\n",
|
||||
"else:\n",
|
||||
" print('Could not parse your URI.')\n",
|
||||
" # clean exit here"
|
||||
|
|
Loading…
Reference in New Issue