- Downloader: fix HF file names
- Downloader: fix civiti.ai content-type
- New xformers builder
- Also symlink model_storage_dir
- Added a notice to use old commits if something is broken
- Remove --force-reinstall from xformers wheel install which was breaking the pytorch installation
- Add option for --gradio_queue
- Re-added custom scripts downloader as a tool
- Reorganize tools section
- Misc. fixes
This commit is contained in:
Drake Panzer 2023-01-26 21:10:42 -07:00
parent 6008841d36
commit abca842b30
No known key found for this signature in database
GPG Key ID: 194A1C358AACFC39
2 changed files with 423 additions and 189 deletions

View File

@ -15,6 +15,12 @@ This notebook is designed to automate pretty much the entire process of getting
Guide: [docs/Paperspace Guide for Idiots.md](https://github.com/Engineer-of-Stuff/stable-diffusion-paperspace/blob/main/docs/Paperspace%20Guide%20for%20Idiots.md)
<br>
Did I break something with a new update? You can download an old version of this notebook here: https://github.com/Engineer-of-Stuff/stable-diffusion-paperspace/commits/master
<br>
### xformers
I've compiled wheels for a bunch of GPUs → https://github.com/Cyberes/xformers-compiled

View File

@ -22,7 +22,11 @@
"- [Artist Name Prompts](https://sgreens.notion.site/sgreens/4ca6f4e229e24da6845b6d49e6b08ae7?v=fdf861d1c65d456e98904fe3f3670bd3)\n",
"- [Stable Diffusion Models](https://cyberes.github.io/stable-diffusion-models)\n",
"- [Textual Inversion Models](https://cyberes.github.io/stable-diffusion-textual-inversion-models/)\n",
"- [Have I Been Trained?](https://haveibeentrained.com/)"
"- [Have I Been Trained?](https://haveibeentrained.com/)\n",
"\n",
"<br>\n",
"\n",
"Did I break something with a new update? You can download an old version of this notebook here: https://github.com/Engineer-of-Stuff/stable-diffusion-paperspace/commits/master"
]
},
{
@ -97,7 +101,7 @@
" # This disables online Gradio app mode and you will only be able to access it on your local network.\n",
"\n",
"gradio_auth = False # Enable gradio_auth and insecure-extension-access option.\n",
" # Set to \"me:password\" to enable.\n",
" # Set to a username:password (for example: \"me:password\") to enable.\n",
"\n",
"search_paperspace_datasets = True # Enable searching for checkpoints in /datasets to link to the webui\n",
"\n",
@ -108,10 +112,12 @@
" # Instead, use gradio_auth which will automatically enable extensions when set.\n",
"\n",
"export_storage_dir = '/notebooks/exports' # Where the generated images will be exported to.\n",
" \n",
"\n",
"gradio_queue = False # Uses gradio queue; experimental option; breaks restart UI button.\n",
"\n",
"# ===================================================================================================\n",
"# Save variables to Jupiter's temp storage so we can access it even if the kernel restarts.\n",
"%store symlink_to_notebooks model_storage_dir repo_storage_dir export_storage_dir activate_xformers link_novelai_anime_vae activate_deepdanbooru activate_medvram disable_pickle_check gradio_port gradio_auth search_paperspace_datasets ui_theme insecure_extension_access pip_cache_dir"
"%store symlink_to_notebooks model_storage_dir repo_storage_dir export_storage_dir activate_xformers link_novelai_anime_vae activate_deepdanbooru activate_medvram disable_pickle_check gradio_port gradio_auth search_paperspace_datasets ui_theme insecure_extension_access pip_cache_dir gradio_queue"
]
},
{
@ -174,6 +180,7 @@
" (repo_storage_dir / 'stable-diffusion-webui' / 'outputs', Path('/notebooks/outputs')),\n",
" (repo_storage_dir / 'stable-diffusion-webui' / 'log', repo_storage_dir / 'stable-diffusion-webui' / 'outputs' / 'log'),\n",
" (Path('/storage'), Path('/notebooks/storage')),\n",
" (Path(model_storage_dir), Path('/notebooks/models')),\n",
" ]\n",
"\n",
"if symlink_to_notebooks and repo_storage_dir != '/notebooks':\n",
@ -219,6 +226,7 @@
" import sys\n",
" sys.exit(1)\n",
"\n",
"from pathlib import Path\n",
"%cd \"{Path(repo_storage_dir, 'stable-diffusion-webui')}\"\n",
"\n",
"!pip install --upgrade pip\n",
@ -253,37 +261,49 @@
"if activate_xformers:\n",
" print('Installing xformers...')\n",
" import subprocess\n",
" def download_release(url):\n",
" binary = 'xformers-0.0.14.dev0-cp39-cp39-linux_x86_64.whl' # have to save the binary as a specific name that pip likes\n",
" from glob import glob\n",
" def download_release(url, binary_name='xformers-0.0.14.dev0-cp39-cp39-linux_x86_64.whl'):\n",
" tmp_dir = subprocess.check_output(['mktemp', '-d']).decode('ascii').strip('\\n')\n",
" !wget \"{url}\" -O \"{tmp_dir}/{binary}\"\n",
" return os.path.join(tmp_dir, binary)\n",
"\n",
" # Set up pip packages\n",
" !pip uninstall -y torch torchvision torchaudio # Remove existing pytorch install.\n",
" !pip install torch torchvision torchaudio --extra-index-url https://download.pytorch.org/whl/cu113 # Install pytorch for cuda 11.3\n",
" s = subprocess.getoutput('nvidia-smi')\n",
" if 'A4000' in s:\n",
" xformers_whl = download_release('https://github.com/Cyberes/xformers-compiled/releases/download/A4000-Oct-28-2022/a4000-xformers-0.0.14.dev0-cp39-cp39-linux_x86_64.whl')\n",
" elif 'A5000' in s:\n",
" xformers_whl = download_release('https://github.com/Cyberes/xformers-compiled/releases/download/A5000-Nov-1-2022/a5000-xformers-0.0.14.dev0-cp39-cp39-linux_x86_64.whl')\n",
" elif 'A6000' in s:\n",
" xformers_whl = download_release('https://github.com/Cyberes/xformers-compiled/releases/download/A6000-Nov-1-2022/a6000-xformers-0.0.14.dev0-cp39-cp39-linux_x86_64.whl')\n",
" elif 'P5000' in s:\n",
" xformers_whl = download_release('https://github.com/Cyberes/xformers-compiled/releases/download/P5000-Nov-1-2022/p5000-xformers-0.0.14.dev0-cp39-cp39-linux_x86_64.whl')\n",
" elif 'RTX 4000' in s:\n",
" xformers_whl = download_release('https://github.com/Cyberes/xformers-compiled/releases/download/RTX-4000-Nov-1-2022/rtx4000-xformers-0.0.14.dev0-cp39-cp39-linux_x86_64.whl')\n",
" elif 'RTX 5000' in s:\n",
" xformers_whl = download_release('https://github.com/Cyberes/xformers-compiled/releases/download/RTX-5000-Nov-1-2022/rtx5000-xformers-0.0.14.dev0-cp39-cp39-linux_x86_64.whl')\n",
" elif 'A100' in s:\n",
" xformers_whl = download_release('https://github.com/Cyberes/xformers-compiled/releases/download/A100-Nov-1-2022/a100-xformers-0.0.14.dev0-cp39-cp39-linux_x86_64.whl')\n",
" elif 'M4000' in s:\n",
" print('xformers for M4000 hasn\\'t been built yet.')\n",
" # xformers_whl = download_release('https://github.com/Cyberes/xformers-compiled/releases/download/A100-Nov-1-2022/a100-xformers-0.0.14.dev0-cp39-cp39-linux_x86_64.whl')\n",
" !wget \"{url}\" -O \"{tmp_dir}/{binary_name}\"\n",
" return os.path.join(tmp_dir, binary_name)\n",
" \n",
" xformers_whl = None\n",
" found_xformers_whls = glob('/notebooks/xformers-*')\n",
" if len(found_xformers_whls) == 1:\n",
" xformers_whl = found_xformers_whls[0]\n",
" delete_whl = False\n",
" elif len(found_xformers_whls) > 1:\n",
" print('Found more than one Xformers wheel in /notebooks so not doing anything!')\n",
" else:\n",
" print('GPU not matched to xformers binary so a one-size-fits-all binary was installed. If you have any issues, please build xformers using the Tools block below.')\n",
" xformers_whl = download_release('https://raw.githubusercontent.com/Cyberes/xformers-compiled/main/various/xformers-0.0.14.dev0-cp37-cp37m-linux_x86_64.whl')\n",
" !pip install --force-reinstall \"{xformers_whl}\"\n",
" delete_whl = True\n",
" # Set up pip packages\n",
" # !pip uninstall -y torch torchvision torchaudio # Remove existing pytorch install.\n",
" # !pip install torch torchvision torchaudio --extra-index-url https://download.pytorch.org/whl/cu113 # Install pytorch for cuda 11.3\n",
" s = subprocess.getoutput('nvidia-smi')\n",
" if 'A4000' in s:\n",
" xformers_whl = download_release('https://raw.githubusercontent.com/Cyberes/xformers-compiled/main/a4000/xformers-0.0.16%2B6f3c20f.d20230127-cp39-cp39-linux_x86_64.whl')\n",
" elif 'A5000' in s:\n",
" xformers_whl = download_release('https://github.com/Cyberes/xformers-compiled/releases/download/A5000-Nov-1-2022/a5000-xformers-0.0.14.dev0-cp39-cp39-linux_x86_64.whl')\n",
" elif 'A6000' in s:\n",
" xformers_whl = download_release('https://github.com/Cyberes/xformers-compiled/releases/download/A6000-Nov-1-2022/a6000-xformers-0.0.14.dev0-cp39-cp39-linux_x86_64.whl')\n",
" elif 'P5000' in s:\n",
" xformers_whl = download_release('https://github.com/Cyberes/xformers-compiled/releases/download/P5000-Nov-1-2022/p5000-xformers-0.0.14.dev0-cp39-cp39-linux_x86_64.whl')\n",
" elif 'RTX 4000' in s:\n",
" xformers_whl = download_release('https://github.com/Cyberes/xformers-compiled/releases/download/RTX-4000-Nov-1-2022/rtx4000-xformers-0.0.14.dev0-cp39-cp39-linux_x86_64.whl')\n",
" elif 'RTX 5000' in s:\n",
" xformers_whl = download_release('https://github.com/Cyberes/xformers-compiled/releases/download/RTX-5000-Nov-1-2022/rtx5000-xformers-0.0.14.dev0-cp39-cp39-linux_x86_64.whl')\n",
" elif 'A100' in s:\n",
" xformers_whl = download_release('https://github.com/Cyberes/xformers-compiled/releases/download/A100-Nov-1-2022/a100-xformers-0.0.14.dev0-cp39-cp39-linux_x86_64.whl')\n",
" elif 'M4000' in s:\n",
" print('xformers for M4000 hasn\\'t been built yet.')\n",
" # xformers_whl = download_release('https://github.com/Cyberes/xformers-compiled/releases/download/A100-Nov-1-2022/a100-xformers-0.0.14.dev0-cp39-cp39-linux_x86_64.whl')\n",
" else:\n",
" print('GPU not matched to xformers binary so a one-size-fits-all binary was installed. If you have any issues, please build xformers using the Tools block below.')\n",
" xformers_whl = download_release('https://raw.githubusercontent.com/Cyberes/xformers-compiled/main/various/xformers-0.0.14.dev0-cp37-cp37m-linux_x86_64.whl')\n",
" if xformers_whl:\n",
" !pip install --force-reinstall \"{xformers_whl}\"\n",
" if delete_whl:\n",
" !rm -rf \"{xformers_whl}\"\n",
"\n",
"# Make sure important directories exists\n",
"!mkdir -p \"{model_storage_dir}/hypernetworks\"\n",
@ -313,7 +333,11 @@
"\n",
"<br>\n",
"\n",
"**There are additional models available here: https://cyberes.github.io/stable-diffusion-models**\n",
"**Want a model that isn't listed? [Use the automated model downloader!](#Automated-Model-Downloader)**\n",
"\n",
"<br>\n",
"\n",
"There are additional models available here: https://cyberes.github.io/stable-diffusion-models\n",
"\n",
"Textual inversion: https://cyberes.github.io/stable-diffusion-textual-inversion-models\n",
"\n",
@ -1085,8 +1109,8 @@
"outputs": [],
"source": [
"try:\n",
" %store -r model_storage_dir repo_storage_dir activate_xformers activate_deepdanbooru activate_medvram disable_pickle_check gradio_port gradio_auth ui_theme insecure_extension_access\n",
" test = [model_storage_dir, repo_storage_dir, activate_xformers, activate_deepdanbooru, activate_medvram, disable_pickle_check, gradio_port, gradio_auth, ui_theme, insecure_extension_access]\n",
" %store -r model_storage_dir repo_storage_dir activate_xformers activate_deepdanbooru activate_medvram disable_pickle_check gradio_port gradio_auth ui_theme insecure_extension_access gradio_queue\n",
" test = [model_storage_dir, repo_storage_dir, activate_xformers, activate_deepdanbooru, activate_medvram, disable_pickle_check, gradio_port, gradio_auth, ui_theme, insecure_extension_access, gradio_queue]\n",
"except NameError as e:\n",
" print(\"There is an issue with your variables.\")\n",
" print(\"Please go back to the first block and make sure your settings are correct, then run the cell.\")\n",
@ -1094,6 +1118,7 @@
" import sys\n",
" sys.exit(1)\n",
"\n",
"from pathlib import Path\n",
"%cd \"{Path(repo_storage_dir, 'stable-diffusion-webui')}\"\n",
"\n",
"# Code to set the options you want as defined in the very first block\n",
@ -1105,9 +1130,10 @@
"auth = f'--gradio-auth {gradio_auth} --enable-insecure-extension-access' if gradio_auth else ''\n",
"theme = f'--theme {ui_theme}' if ui_theme else ''\n",
"insecure_extension_access = '--enable-insecure-extension-access' if insecure_extension_access else ''\n",
"queue = '--gradio-queue' if gradio_queue else ''\n",
"\n",
"# Launch args go below:\n",
"!python webui.py {x_arg} {dd_arg} {mvram_arg} {pickled} {port} {auth} {theme} --gradio-debug"
"!python webui.py {x_arg} {dd_arg} {mvram_arg} {pickled} {port} {auth} {theme} {queue} # --gradio-debug"
]
},
{
@ -1196,26 +1222,18 @@
},
{
"cell_type": "markdown",
"metadata": {},
"metadata": {
"jp-MarkdownHeadingCollapsed": true,
"tags": []
},
"source": [
"### Show graphics card info"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"!nvidia-smi"
"### Update this notebook from Github"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Download the latest version of this notebook from Github\n",
"\n",
"Run this and refresh the page (press F5). Don't save anything or you will overwrite the downloaded file."
]
},
@ -1233,139 +1251,20 @@
{
"cell_type": "markdown",
"metadata": {
"jp-MarkdownHeadingCollapsed": true,
"tags": []
},
"source": [
"### Reset Repository\n",
"\n",
"Sometimes AUTOMATIC1111 breaks something. Go to https://github.com/AUTOMATIC1111/stable-diffusion-webui/commits/master and choose a commit to revert to.\n",
"\n",
"If you're looking for a specific date, do: `git log --since='Sept 17 2022' --until='Sept 18 2022'`\n",
"\n",
"\n",
"**This shouldn't delete your outputs or any changes you've made to files, but I'd back up anything important just to be safe.**"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"try:\n",
" %store -r model_storage_dir repo_storage_dir\n",
" test = [model_storage_dir, repo_storage_dir]\n",
"except NameError as e:\n",
" print(\"There is an issue with your variables.\")\n",
" print(\"Please go back to the first block and make sure your settings are correct, then run the cell.\")\n",
" print('Error:', e)\n",
" import sys\n",
" sys.exit(1)\n",
"\n",
"%cd \"{Path(repo_storage_dir, 'stable-diffusion-webui')}\"\n",
"!git reset --hard <commit>"
"### Automated Model Downloader"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Delete .ipynb_checkpoints\n",
"Here's a tool to download a model from a torrent magnet link, web link, Google Drive, HuggingFace, or CivitAI.\n",
"\n",
"Jupyter stores temporary files in folders named `.ipynb_checkpoints`. It gets a little excessive sometimes so if you're running low on storage space or getting weird errors about a directory named `.ipynb_checkpoints`, run this block."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"try:\n",
" %store -r model_storage_dir repo_storage_dir\n",
" test = [model_storage_dir, repo_storage_dir]\n",
"except NameError as e:\n",
" print(\"There is an issue with your variables.\")\n",
" print(\"Please go back to the first block and make sure your settings are correct, then run the cell.\")\n",
" print('Error:', e)\n",
" import sys\n",
" sys.exit(1)\n",
"import subprocess\n",
"!find /notebooks/ -type d -name .ipynb_checkpoints -type d -exec rm -rv {} +\n",
"s = subprocess.run(f'find \"{repo_storage_dir}\" -type d -name .ipynb_checkpoints -exec rm -rv {{}} +', shell=True)"
]
},
{
"cell_type": "markdown",
"metadata": {
"tags": []
},
"source": [
"### Reset storage\n",
"\n",
"This will delete ALL your files in `/notebooks/`, `/storage/`, `model_storage_dir`, and `repo_storage_dir`. Use if you're having issues with zero storage space and you don't want to delete your notebook."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Uncomment the lines below to run this block. You can highlight the lines and do ctrl + /\n",
"# %store -r model_storage_dir repo_storage_dir\n",
"# try:\n",
"# test = [model_storage_dir, repo_storage_dir]\n",
"# except NameError as e:\n",
"# print(\"There is an issue with your variables.\")\n",
"# print(\"Please go back to the first block and make sure your settings are correct, then run the cell.\")\n",
"# print('Error:', e)\n",
"# import sys\n",
"# sys.exit(1)\n",
"# !rm -rf /storage/*\n",
"# !mv /notebooks/*.ipynb / # move the notebook out of the directory before we nuke it\n",
"# !rm -rf /notebooks/*\n",
"# !mv /*.ipynb /notebooks/ # move it back\n",
"# !rm -rf {model_storage_dir}\n",
"# !rm -rf {repo_storage_dir}"
]
},
{
"cell_type": "markdown",
"metadata": {
"tags": []
},
"source": [
"### Build and Install Xformers\n",
"\n",
"This is an advanced feature that should boost your generation speeds.\n",
"\n",
"1. Run the block below to download the install script to `/notebooks/`\n",
"2. Go to https://developer.nvidia.com/cuda-gpus and find the Cuda arch for your GPU model. It's likely 7.5, but double check.\n",
"3. Once you have read these instructions, uncomment the second line and insert your Cuda arch.\n",
"4. Enable xformers in the settings block above.\n",
"\n",
"If you have any issues, open the Jpyter Lab and run `build-xformers.sh` from the terminal."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"!wget https://raw.githubusercontent.com/Engineer-of-Stuff/stable-diffusion-paperspace/main/other/build-xformers.sh -O /notebooks/build-xformers.sh\n",
"# !bash /notebooks/build-xformers.sh [your cuda arch]\n",
"!echo \"COMPLETED!\""
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Automated Model Downloader\n",
"\n",
"Here's a tool to download a model from a torrent magnet link, web link, Google Drive, HuggingFace, or CivitAI."
"Websites may update and this download could go out of date. If you encounter any problems, please [open an issue](https://github.com/Engineer-of-Stuff/stable-diffusion-paperspace/issues/new) on this notebook's repository."
]
},
{
@ -1393,19 +1292,24 @@
"from markdownify import markdownify\n",
"import urllib.request\n",
"from pathlib import Path\n",
"import os\n",
"\n",
"user_agent = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36'\n",
"\n",
"def dl_web_file(web_dl_file):\n",
" %cd \"{model_storage_dir}\"\n",
" # We're going to use aria2 to split the download into threads which will allow us to download\n",
" # the file very fast even if the site serves the file slow.\n",
" !if [ $(dpkg-query -W -f='${Status}' aria2 2>/dev/null | grep -c \"ok installed\") = 0 ]; then sudo apt update && sudo apt install -y aria2; fi\n",
" !aria2c --file-allocation=none -c -x 16 -s 16 --summary-interval=0 --console-log-level=warn --continue --user-agent=\"{user_agent}\" \"{web_dl_file}\" \n",
"\n",
"def is_url(url_str):\n",
" return re.search(r'https?:\\/\\/(?:www\\.|(?!www))[a-zA-Z0-9][a-zA-Z0-9-]+[a-zA-Z0-9]\\.[^\\s]{2,}|www\\.[a-zA-Z0-9][a-zA-Z0-9-]+[a-zA-Z0-9]\\.[^\\s]{2,}|https?:\\/\\/(?:www\\.|(?!www))[a-zA-Z0-9]+\\.[^\\s]{2,}|www\\.[a-zA-Z0-9]+\\.[^\\s]{2,}', url_str)\n",
"\n",
"def dl_web_file(web_dl_file, filename=None):\n",
" web_dl_file = is_url(web_dl_file)[0] # clean the URL string\n",
" !if [ $(dpkg-query -W -f='${Status}' aria2 2>/dev/null | grep -c \"ok installed\") = 0 ]; then sudo apt update && sudo apt install -y aria2; fi\n",
" if filename:\n",
" filename_cmd = f'--out=\"{filename}\"'\n",
" else:\n",
" filename_cmd = ''\n",
" # We're going to use aria2 to split the download into threads which will allow us to download\n",
" # the file very fast even if the site serves the file slow.\n",
" !cd \"{model_storage_dir}\" && aria2c --file-allocation=none -c -x 16 -s 16 --summary-interval=0 --console-log-level=warn --continue --user-agent \"{user_agent}\" {filename_cmd} \"{web_dl_file}\" \n",
"\n",
"magnet_match = re.search(r'magnet:\\?xt=urn:btih:[\\-_A-Za-z0-9&=%.]*', model_uri)\n",
"civitai_match = re.search(r'^https?:\\/\\/(?:www\\.|(?!www))civitai\\.com\\/models\\/\\d*\\/.*?$', model_uri)\n",
"web_match = is_url(model_uri)\n",
@ -1417,6 +1321,8 @@
" !aria2c --seed-time=0 --max-overall-upload-limit=1K --bt-max-peers=120 --summary-interval=0 --console-log-level=warn --file-allocation=none \"{bash_var}\"\n",
" # clean exit here\n",
"elif 'https://huggingface.co/' in model_uri:\n",
" from urllib.parse import urlparse\n",
" filename = os.path.basename(urlparse(model_uri.replace('/blob/', '/resolve/')).path)\n",
" response = requests.head(model_uri, allow_redirects=True, headers={'User-Agent': user_agent})\n",
" if 'octet-stream' not in response.headers['content-type']:\n",
" response = requests.head(model_uri.replace('/blob/', '/resolve/'), allow_redirects=True, headers={'User-Agent': user_agent})\n",
@ -1424,10 +1330,10 @@
" print(f'Wrong content-type: {response.headers[\"content-type\"].split(\";\")[0]}')\n",
" # clean exit here\n",
" else:\n",
" dl_web_file(model_uri.replace('/blob/', '/resolve/'))\n",
" dl_web_file(model_uri.replace('/blob/', '/resolve/'), filename)\n",
" # clean exit here\n",
" else:\n",
" dl_web_file(model_uri)\n",
" dl_web_file(model_uri, filename)\n",
" # clean exit here\n",
"elif 'https://drive.google.com' in model_uri:\n",
" gdrive_file_id, _ = gdown.parse_url.parse_url(model_uri)\n",
@ -1458,22 +1364,344 @@
" file.write(desc)\n",
" else:\n",
" print('Failed to get filename of checkpoint for markdown file')\n",
"\n",
" dl_web_file(latest_model_url)\n",
" # clean exit here\n",
"elif web_match:\n",
" # Always do the web match last\n",
" response = requests.head(model_uri, allow_redirects=True, headers={'User-Agent': user_agent})\n",
" if 'octet-stream' not in response.headers['content-type']:\n",
" print(f'Wrong content-type: {response.headers[\"content-type\"].split(\";\")[0]}')\n",
" # clean exit here\n",
" with requests.get('https://civitai.com/api/download/models/5848', allow_redirects=True, stream=True, headers={'User-Agent': user_agent}) as r:\n",
" # Uing GET since some servers respond differently to HEAD.\n",
" # Using `with` so we can close the connection and not download the entire file.\n",
" response = r\n",
" r.close()\n",
" if response.headers.get('content-type') or response.headers.get('content-disposition'):\n",
" if 'octet-stream' in response.headers.get('content-type', '') or 'attachment' in response.headers.get('content-disposition', ''):\n",
" dl_web_file(model_uri)\n",
" # clean exit here\n",
" else:\n",
" print('Required HTTP headers are incorrect:', end='\\n\\n')\n",
" print('Content-Type:', response.headers['content-type'].split(\";\")[0] if response.headers.get('content-type') else 'None')\n",
" print('Must be \"application/octet-stream\"', end='\\n\\n')\n",
" print('Content-Disposition:', response.headers['content-disposition'] if response.headers.get('content-disposition') else 'None')\n",
" print('Must start with \"attachment;\"')\n",
" # clean exit here\n",
" else:\n",
" dl_web_file(model_uri)\n",
" # clean exit here\n",
" print('Required HTTP headers are missing. You need at lease one of these:', end='\\n\\n')\n",
" print('Content-Type:', response.headers['content-type'].split(\";\")[0] if response.headers.get('content-type') else 'None')\n",
" print('Must be \"application/octet-stream\"', end='\\n\\n')\n",
" print('Content-Disposition:', response.headers['content-disposition'] if response.headers.get('content-disposition') else 'None')\n",
" print('Must start with \"attachment;\"')\n",
"else:\n",
" print('Could not parse your URI.')\n",
" # clean exit here"
]
},
{
"cell_type": "markdown",
"metadata": {
"jp-MarkdownHeadingCollapsed": true,
"tags": []
},
"source": [
"### Custom Scripts Collection"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Custom scripts is an easy way to add simple functionality to the WebUI. The custom script system has been replaced by extensions that provide similar functionality but some people still rely on these scripts. This block will install some of the most popular scripts. Just note that some of these scripts may be outdated and possibly not work.\n",
"\n",
"https://github.com/AUTOMATIC1111/stable-diffusion-webui/wiki/Custom-Scripts"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"try:\n",
" %store -r model_storage_dir repo_storage_dir\n",
" test = [model_storage_dir, repo_storage_dir]\n",
"except NameError as e:\n",
" print(\"There is an issue with your variables.\")\n",
" print(\"Please go back to the first block and make sure your settings are correct, then run the cell.\")\n",
" print('Error:', e)\n",
" import sys\n",
" sys.exit(1)\n",
"\n",
"import shutil\n",
"import requests\n",
"!pip install moviepy==1.0.3\n",
"!apt update\n",
"!apt install -y potrace python3-tk\n",
"\n",
"def update_repo_if_not_exists(path, repo_clone_url, pre=None):\n",
" if pre is not None:\n",
" pre() \n",
" if not os.path.exists(path):\n",
" !git clone \"{repo_clone_url}\" \"{path}\"\n",
" else:\n",
" print(f'{repo_clone_url.split(\"/\")[-1]} already downloaded, updating...')\n",
" !cd \"{path}\" && git pull # no % so we don't interfere with the main process\n",
"\n",
"def download_file_dir(url, output_dir):\n",
" # output_dir must have a trailing slash\n",
" local_filename = url.split('/')[-1]\n",
" with requests.get(url, stream=True) as r:\n",
" r.raise_for_status()\n",
" with open(f'{output_dir}{local_filename}', 'wb') as f:\n",
" for chunk in r.iter_content(chunk_size=8192):\n",
" f.write(chunk)\n",
" return local_filename\n",
"def do_script_download(scripts_list, domain, path):\n",
" for item in scripts_list:\n",
" download_file_dir(f'https://{domain}/{item}', path)\n",
" print(f'{item.split(\"/\")[-1]} downloaded...')\n",
"\n",
"repo_storage_dir = Path(repo_storage_dir)\n",
"webui_dir = repo_storage_dir / 'stable-diffusion-webui'\n",
"scripts_dir = webui_dir / 'scripts'\n",
" \n",
"do_script_download([\n",
" 'GRMrGecko/stable-diffusion-webui-automatic/advanced_matrix/scripts/advanced_prompt_matrix.py',\n",
" 'dfaker/stable-diffusion-webui-cv2-external-masking-script/main/external_masking.py',\n",
" 'memes-forever/Stable-diffusion-webui-video/main/videos.py',\n",
" 'yownas/seed_travel/main/scripts/seed_travel.py',\n",
" 'Animator-Anon/Animator/main/animation.py',\n",
" 'Filarius/stable-diffusion-webui/master/scripts/vid2vid.py',\n",
" 'GeorgLegato/Txt2Vectorgraphics/main/txt2vectorgfx.py',\n",
" 'yownas/shift-attention/main/scripts/shift_attention.py',\n",
" 'DiceOwl/StableDiffusionStuff/main/loopback_superimpose.py',\n",
" 'Engineer-of-Stuff/stable-diffusion-paperspace/main/other/save_steps.py',\n",
" 'Pfaeff/sd-web-ui-scripts/main/moisaic.py'\n",
"], 'raw.githubusercontent.com', scripts_dir)\n",
"\n",
"do_script_download([\n",
" 'dfaker/f88aa62e3a14b559fe4e5f6b345db664/raw/791dabfa0ab26399aa2635bcbc1cf6267aa4ffc2/alternate_sampler_noise_schedules.py',\n",
" 'camenduru/9ec5f8141db9902e375967e93250860f/raw/c1a03eb447548adbef1858c0e69d3567a390d2f4/run_n_times.py'\n",
"], 'gist.githubusercontent.com', scripts_dir)\n",
"\n",
"# Download and set up txt2img2img\n",
"update_repo_if_not_exists(webui_dir / 'txt2img2img_root', 'https://github.com/ThereforeGames/txt2img2img.git')\n",
"!cp -r \"{webui_dir}/txt2img2img_root/scripts\" \"{webui_dir}\"\n",
"!cp -r \"{webui_dir}/txt2img2img_root/txt2img2img\" \"{webui_dir}\"\n",
"!cp -r \"{webui_dir}/txt2img2img_root/venv\" \"{webui_dir}\"\n",
"\n",
"# Download and set up txt2mask\n",
"update_repo_if_not_exists(webui_dir / 'txt2mask', 'https://github.com/ThereforeGames/txt2mask.git')\n",
"!echo \"Copying txt2mask...\"\n",
"!cp -r \"{webui_dir}/txt2mask/repositories/clipseg\" \"{webui_dir}/repositories\"\n",
"!cp -r \"{webui_dir}/txt2mask/scripts/\" \"{webui_dir}/\"\n",
"echo \"Done!\n",
"# Install the dynamic-prompts/wildcard script\n",
"# !git clone https://github.com/adieyal/sd-dynamic-prompting/ extensions/dynamic-prompts"
]
},
{
"cell_type": "markdown",
"metadata": {
"jp-MarkdownHeadingCollapsed": true,
"tags": []
},
"source": [
"### Show graphics card info"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"!nvidia-smi"
]
},
{
"cell_type": "markdown",
"metadata": {
"jp-MarkdownHeadingCollapsed": true,
"tags": []
},
"source": [
"### Build and Install Xformers"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"**After running either of these two blocks you need to re-run the installer block above because you've installed a new Python version.**\n",
"\n",
"First, try installing Xformers through pip:"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"!pip install xformers"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"If you're still encountering issues then try building it yourself using the next block. You can also try [my old script](https://github.com/Engineer-of-Stuff/stable-diffusion-paperspace/blob/master/other/build-xformers.sh).\n",
"\n",
"This will take over 25 minutes but you should only have to do this once. Leave the Xformers `.whl` in `/notebooks/` and it will automatically be installed by the notebook's installer."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"%%bash\n",
"apt update && apt install jq\n",
"\n",
"TMP=$(mktemp -d)\n",
"\n",
"cd \"$TMP\"\n",
"git clone --no-checkout https://github.com/facebookresearch/xformers.git\n",
"cd xformers\n",
"LATEST_TAG=$(git describe --tags `git rev-list --tags --max-count=1`)\n",
"echo \"Building version: $LATEST_TAG\"\n",
"\n",
"XFORMERS_DISABLE_FLASH_ATTN=1 NVCC_FLAGS=\"--use_fast_math -DXFORMERS_MEM_EFF_ATTENTION_DISABLE_BACKWARD\" MAX_JOBS=$(nproc) pip wheel --wheel-dir=\"$TMP\" \"git+https://github.com/facebookresearch/xformers.git@$LATEST_TAG#egg=xformers\"\n",
"if [[ $? -eq 0 ]]; then\n",
" echo -e \"Finished!\\nMoving .whl to /notebooks/\"\n",
" cp \"$TMP\"/xformers-* /notebooks/\n",
" echo \"Here is your wheel file:\"\n",
" find /notebooks -name xformers-*.whl\n",
" echo \"Installing your new Xformers wheel...\"\n",
" pip install \"$TMP\"/xformers-*\n",
"fi\n",
"rm -rf \"$TMP\""
]
},
{
"cell_type": "markdown",
"metadata": {
"jp-MarkdownHeadingCollapsed": true,
"tags": []
},
"source": [
"### Reset Repository"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Sometimes AUTOMATIC1111 breaks something. Go to https://github.com/AUTOMATIC1111/stable-diffusion-webui/commits/master and choose a commit to revert to.\n",
"\n",
"If you're looking for a specific date, do: `git log --since='Sept 17 2022' --until='Sept 18 2022'`\n",
"\n",
"**This shouldn't delete your outputs or any changes you've made to files, but I'd back up anything important just to be safe.**"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"try:\n",
" %store -r model_storage_dir repo_storage_dir\n",
" test = [model_storage_dir, repo_storage_dir]\n",
"except NameError as e:\n",
" print(\"There is an issue with your variables.\")\n",
" print(\"Please go back to the first block and make sure your settings are correct, then run the cell.\")\n",
" print('Error:', e)\n",
" import sys\n",
" sys.exit(1)\n",
"\n",
"from pathlib import Path\n",
"%cd \"{Path(repo_storage_dir, 'stable-diffusion-webui')}\"\n",
"!git reset --hard <commit>"
]
},
{
"cell_type": "markdown",
"metadata": {
"jp-MarkdownHeadingCollapsed": true,
"tags": []
},
"source": [
"### Delete .ipynb_checkpoints"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Jupyter stores temporary files in folders named `.ipynb_checkpoints`. It gets a little excessive sometimes so if you're running low on storage space or getting weird errors about a directory named `.ipynb_checkpoints`, run this block."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"try:\n",
" %store -r model_storage_dir repo_storage_dir\n",
" test = [model_storage_dir, repo_storage_dir]\n",
"except NameError as e:\n",
" print(\"There is an issue with your variables.\")\n",
" print(\"Please go back to the first block and make sure your settings are correct, then run the cell.\")\n",
" print('Error:', e)\n",
" import sys\n",
" sys.exit(1)\n",
"import subprocess\n",
"!find /notebooks/ -type d -name .ipynb_checkpoints -type d -exec rm -rv {} +\n",
"s = subprocess.run(f'find \"{repo_storage_dir}\" -type d -name .ipynb_checkpoints -exec rm -rv {{}} +', shell=True)"
]
},
{
"cell_type": "markdown",
"metadata": {
"jp-MarkdownHeadingCollapsed": true,
"tags": []
},
"source": [
"### Reset storage"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"This will delete ALL your files in `/notebooks/`, `/storage/`, `model_storage_dir`, and `repo_storage_dir`. Use if you're having issues with zero storage space and you don't want to delete your notebook."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Uncomment the lines below to run this block. You can highlight the lines and do ctrl + /\n",
"# %store -r model_storage_dir repo_storage_dir\n",
"# try:\n",
"# test = [model_storage_dir, repo_storage_dir]\n",
"# except NameError as e:\n",
"# print(\"There is an issue with your variables.\")\n",
"# print(\"Please go back to the first block and make sure your settings are correct, then run the cell.\")\n",
"# print('Error:', e)\n",
"# import sys\n",
"# sys.exit(1)\n",
"# !rm -rf /storage/*\n",
"# !mv /notebooks/*.ipynb / # move the notebook out of the directory before we nuke it\n",
"# !rm -rf /notebooks/*\n",
"# !mv /*.ipynb /notebooks/ # move it back\n",
"# !rm -rf {model_storage_dir}\n",
"# !rm -rf {repo_storage_dir}"
]
}
],
"metadata": {