From 2b717bb195a3034853ed45a52c5752f010e1302b Mon Sep 17 00:00:00 2001
From: w-e-w <40751091+w-e-w@users.noreply.github.com>
Date: Tue, 23 Apr 2024 02:35:25 +0900
Subject: [PATCH 01/78] fix initial corrupt model loop
if for some reason the initial loading model at loading phase of webui is corrupted
after entering this state the user will not be able to load even a good model is selected, due the the unload_model_weights > send_model_to_cpu > m.lowvram attribute check will fail becaules m is None
webui will be stuck in the loop unable to recover without manual intervention
---
modules/sd_models.py | 9 +++++----
1 file changed, 5 insertions(+), 4 deletions(-)
diff --git a/modules/sd_models.py b/modules/sd_models.py
index ff245b7a6..1747ca621 100644
--- a/modules/sd_models.py
+++ b/modules/sd_models.py
@@ -659,10 +659,11 @@ def get_empty_cond(sd_model):
def send_model_to_cpu(m):
- if m.lowvram:
- lowvram.send_everything_to_cpu()
- else:
- m.to(devices.cpu)
+ if m is not None:
+ if m.lowvram:
+ lowvram.send_everything_to_cpu()
+ else:
+ m.to(devices.cpu)
devices.torch_gc()
From 4bc39d234d6535e3d8f8531d0c0f4e049261c922 Mon Sep 17 00:00:00 2001
From: w-e-w <40751091+w-e-w@users.noreply.github.com>
Date: Tue, 23 Apr 2024 02:39:45 +0900
Subject: [PATCH 02/78] Show LoRA if model is None
---
.../Lora/ui_extra_networks_lora.py | 23 ++++++++++---------
1 file changed, 12 insertions(+), 11 deletions(-)
diff --git a/extensions-builtin/Lora/ui_extra_networks_lora.py b/extensions-builtin/Lora/ui_extra_networks_lora.py
index b627f7dc2..e35d90c6e 100644
--- a/extensions-builtin/Lora/ui_extra_networks_lora.py
+++ b/extensions-builtin/Lora/ui_extra_networks_lora.py
@@ -60,18 +60,19 @@ class ExtraNetworksPageLora(ui_extra_networks.ExtraNetworksPage):
else:
sd_version = lora_on_disk.sd_version
- if shared.opts.lora_show_all or not enable_filter:
- pass
- elif sd_version == network.SdVersion.Unknown:
- model_version = network.SdVersion.SDXL if shared.sd_model.is_sdxl else network.SdVersion.SD2 if shared.sd_model.is_sd2 else network.SdVersion.SD1
- if model_version.name in shared.opts.lora_hide_unknown_for_versions:
+ if shared.sd_model is not None: # still show LoRA in case an error occurs during initial model loading
+ if shared.opts.lora_show_all or not enable_filter:
+ pass
+ elif sd_version == network.SdVersion.Unknown:
+ model_version = network.SdVersion.SDXL if shared.sd_model.is_sdxl else network.SdVersion.SD2 if shared.sd_model.is_sd2 else network.SdVersion.SD1
+ if model_version.name in shared.opts.lora_hide_unknown_for_versions:
+ return None
+ elif shared.sd_model.is_sdxl and sd_version != network.SdVersion.SDXL:
+ return None
+ elif shared.sd_model.is_sd2 and sd_version != network.SdVersion.SD2:
+ return None
+ elif shared.sd_model.is_sd1 and sd_version != network.SdVersion.SD1:
return None
- elif shared.sd_model.is_sdxl and sd_version != network.SdVersion.SDXL:
- return None
- elif shared.sd_model.is_sd2 and sd_version != network.SdVersion.SD2:
- return None
- elif shared.sd_model.is_sd1 and sd_version != network.SdVersion.SD1:
- return None
return item
From 246c269af87757998f57bb27ddda59fdc7cff976 Mon Sep 17 00:00:00 2001
From: w-e-w <40751091+w-e-w@users.noreply.github.com>
Date: Tue, 23 Apr 2024 03:08:09 +0900
Subject: [PATCH 03/78] add option to check file hash after download
if the sha256 hash does not match it will be automatically deleted
---
modules/modelloader.py | 8 +++++---
1 file changed, 5 insertions(+), 3 deletions(-)
diff --git a/modules/modelloader.py b/modules/modelloader.py
index 115415c8e..5421e59b0 100644
--- a/modules/modelloader.py
+++ b/modules/modelloader.py
@@ -23,6 +23,7 @@ def load_file_from_url(
model_dir: str,
progress: bool = True,
file_name: str | None = None,
+ hash_prefix: str | None = None,
) -> str:
"""Download a file from `url` into `model_dir`, using the file present if possible.
@@ -36,11 +37,11 @@ def load_file_from_url(
if not os.path.exists(cached_file):
print(f'Downloading: "{url}" to {cached_file}\n')
from torch.hub import download_url_to_file
- download_url_to_file(url, cached_file, progress=progress)
+ download_url_to_file(url, cached_file, progress=progress, hash_prefix=hash_prefix)
return cached_file
-def load_models(model_path: str, model_url: str = None, command_path: str = None, ext_filter=None, download_name=None, ext_blacklist=None) -> list:
+def load_models(model_path: str, model_url: str = None, command_path: str = None, ext_filter=None, download_name=None, ext_blacklist=None, hash_prefix=None) -> list:
"""
A one-and done loader to try finding the desired models in specified directories.
@@ -49,6 +50,7 @@ def load_models(model_path: str, model_url: str = None, command_path: str = None
@param model_path: The location to store/find models in.
@param command_path: A command-line argument to search for models in first.
@param ext_filter: An optional list of filename extensions to filter by
+ @param hash_prefix: the expected sha256 of the model_url
@return: A list of paths containing the desired model(s)
"""
output = []
@@ -78,7 +80,7 @@ def load_models(model_path: str, model_url: str = None, command_path: str = None
if model_url is not None and len(output) == 0:
if download_name is not None:
- output.append(load_file_from_url(model_url, model_dir=places[0], file_name=download_name))
+ output.append(load_file_from_url(model_url, model_dir=places[0], file_name=download_name, hash_prefix=hash_prefix))
else:
output.append(model_url)
From c69773d7e8f23f8b6c46a8e177b50386e1f1b8e8 Mon Sep 17 00:00:00 2001
From: w-e-w <40751091+w-e-w@users.noreply.github.com>
Date: Tue, 23 Apr 2024 03:08:57 +0900
Subject: [PATCH 04/78] ensure integrity for initial sd model download
---
modules/sd_models.py | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/modules/sd_models.py b/modules/sd_models.py
index ff245b7a6..35d5952af 100644
--- a/modules/sd_models.py
+++ b/modules/sd_models.py
@@ -149,10 +149,12 @@ def list_models():
cmd_ckpt = shared.cmd_opts.ckpt
if shared.cmd_opts.no_download_sd_model or cmd_ckpt != shared.sd_model_file or os.path.exists(cmd_ckpt):
model_url = None
+ expected_sha256 = None
else:
model_url = f"{shared.hf_endpoint}/runwayml/stable-diffusion-v1-5/resolve/main/v1-5-pruned-emaonly.safetensors"
+ expected_sha256 = '6ce0161689b3853acaa03779ec93eafe75a02f4ced659bee03f50797806fa2fa'
- model_list = modelloader.load_models(model_path=model_path, model_url=model_url, command_path=shared.cmd_opts.ckpt_dir, ext_filter=[".ckpt", ".safetensors"], download_name="v1-5-pruned-emaonly.safetensors", ext_blacklist=[".vae.ckpt", ".vae.safetensors"])
+ model_list = modelloader.load_models(model_path=model_path, model_url=model_url, command_path=shared.cmd_opts.ckpt_dir, ext_filter=[".ckpt", ".safetensors"], download_name="v1-5-pruned-emaonly.safetensors", ext_blacklist=[".vae.ckpt", ".vae.safetensors"], hash_prefix=expected_sha256)
if os.path.exists(cmd_ckpt):
checkpoint_info = CheckpointInfo(cmd_ckpt)
From a1aa0af8a45f4c30f1d3fce5635c090d64d4e55b Mon Sep 17 00:00:00 2001
From: drhead <1313496+drhead@users.noreply.github.com>
Date: Mon, 22 Apr 2024 23:38:44 -0400
Subject: [PATCH 05/78] add code for skipping CFG on early steps
---
modules/sd_samplers_cfg_denoiser.py | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/modules/sd_samplers_cfg_denoiser.py b/modules/sd_samplers_cfg_denoiser.py
index 93581c9ac..8ccc837aa 100644
--- a/modules/sd_samplers_cfg_denoiser.py
+++ b/modules/sd_samplers_cfg_denoiser.py
@@ -212,6 +212,11 @@ class CFGDenoiser(torch.nn.Module):
uncond = denoiser_params.text_uncond
skip_uncond = False
+ if self.step < shared.opts.skip_cond_steps:
+ skip_uncond = True
+ x_in = x_in[:-batch_size]
+ sigma_in = sigma_in[:-batch_size]
+
# alternating uncond allows for higher thresholds without the quality loss normally expected from raising it
if self.step % 2 and s_min_uncond > 0 and sigma[0] < s_min_uncond and not is_edit_model:
skip_uncond = True
From 8016d78a4b9c8bdd02b0031694ad56553f89161e Mon Sep 17 00:00:00 2001
From: drhead <1313496+drhead@users.noreply.github.com>
Date: Mon, 22 Apr 2024 23:42:24 -0400
Subject: [PATCH 06/78] add option for early cfg skip
---
modules/shared_options.py | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/modules/shared_options.py b/modules/shared_options.py
index 326a317e0..2f70ef65a 100644
--- a/modules/shared_options.py
+++ b/modules/shared_options.py
@@ -380,7 +380,8 @@ options_templates.update(options_section(('sampler-params', "Sampler parameters"
'uni_pc_skip_type': OptionInfo("time_uniform", "UniPC skip type", gr.Radio, {"choices": ["time_uniform", "time_quadratic", "logSNR"]}, infotext='UniPC skip type'),
'uni_pc_order': OptionInfo(3, "UniPC order", gr.Slider, {"minimum": 1, "maximum": 50, "step": 1}, infotext='UniPC order').info("must be < sampling steps"),
'uni_pc_lower_order_final': OptionInfo(True, "UniPC lower order final", infotext='UniPC lower order final'),
- 'sd_noise_schedule': OptionInfo("Default", "Noise schedule for sampling", gr.Radio, {"choices": ["Default", "Zero Terminal SNR"]}, infotext="Noise Schedule").info("for use with zero terminal SNR trained models")
+ 'sd_noise_schedule': OptionInfo("Default", "Noise schedule for sampling", gr.Radio, {"choices": ["Default", "Zero Terminal SNR"]}, infotext="Noise Schedule").info("for use with zero terminal SNR trained models"),
+ 'skip_cond_steps': OptionInfo(0, "Skip CFG on first N steps of sampling", gr.Slider, {"minimum": 0, "maximum": 50, "step": 1}, infotext="Skip CFG first steps"),
}))
options_templates.update(options_section(('postprocessing', "Postprocessing", "postprocessing"), {
From 83266205d0b55ddbff34ea36b47f69c5ea11cc28 Mon Sep 17 00:00:00 2001
From: drhead <1313496+drhead@users.noreply.github.com>
Date: Tue, 23 Apr 2024 00:09:43 -0400
Subject: [PATCH 07/78] Add KL Optimal scheduler
---
modules/sd_schedulers.py | 10 ++++++++++
1 file changed, 10 insertions(+)
diff --git a/modules/sd_schedulers.py b/modules/sd_schedulers.py
index 75eb3ac03..10ae4e081 100644
--- a/modules/sd_schedulers.py
+++ b/modules/sd_schedulers.py
@@ -31,6 +31,15 @@ def sgm_uniform(n, sigma_min, sigma_max, inner_model, device):
return torch.FloatTensor(sigs).to(device)
+def kl_optimal(n, sigma_min, sigma_max, device):
+ alpha_min = torch.arctan(torch.tensor(sigma_min, device=device))
+ alpha_max = torch.arctan(torch.tensor(sigma_max, device=device))
+ sigmas = torch.empty((n+1,), device=device)
+ for i in range(n+1):
+ sigmas[i] = torch.tan((i/n) * alpha_min + (1.0-i/n) * alpha_max)
+ return sigmas
+
+
schedulers = [
Scheduler('automatic', 'Automatic', None),
Scheduler('uniform', 'Uniform', uniform, need_inner_model=True),
@@ -38,6 +47,7 @@ schedulers = [
Scheduler('exponential', 'Exponential', k_diffusion.sampling.get_sigmas_exponential),
Scheduler('polyexponential', 'Polyexponential', k_diffusion.sampling.get_sigmas_polyexponential, default_rho=1.0),
Scheduler('sgm_uniform', 'SGM Uniform', sgm_uniform, need_inner_model=True, aliases=["SGMUniform"]),
+ Scheduler('kl_optimal', 'KL Optimal', kl_optimal),
]
schedulers_map = {**{x.name: x for x in schedulers}, **{x.label: x for x in schedulers}}
From 83182d2799f12ee2b5e5425d750db062ad67eb90 Mon Sep 17 00:00:00 2001
From: drhead <1313496+drhead@users.noreply.github.com>
Date: Tue, 23 Apr 2024 03:07:25 -0400
Subject: [PATCH 08/78] change skip early cond option name and to float
---
modules/shared_options.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/modules/shared_options.py b/modules/shared_options.py
index 2f70ef65a..91ba72b5e 100644
--- a/modules/shared_options.py
+++ b/modules/shared_options.py
@@ -381,7 +381,7 @@ options_templates.update(options_section(('sampler-params', "Sampler parameters"
'uni_pc_order': OptionInfo(3, "UniPC order", gr.Slider, {"minimum": 1, "maximum": 50, "step": 1}, infotext='UniPC order').info("must be < sampling steps"),
'uni_pc_lower_order_final': OptionInfo(True, "UniPC lower order final", infotext='UniPC lower order final'),
'sd_noise_schedule': OptionInfo("Default", "Noise schedule for sampling", gr.Radio, {"choices": ["Default", "Zero Terminal SNR"]}, infotext="Noise Schedule").info("for use with zero terminal SNR trained models"),
- 'skip_cond_steps': OptionInfo(0, "Skip CFG on first N steps of sampling", gr.Slider, {"minimum": 0, "maximum": 50, "step": 1}, infotext="Skip CFG first steps"),
+ 'skip_early_cond': OptionInfo(0, "Skip CFG during early sampling", gr.Slider, {"minimum": 0.0, "maximum": 1.0, "step": 0.01}, infotext="Skip Early CFG").info("CFG will be disabled (set to 1) on early steps, can both improve sample diversity/quality and speed up sampling"),
}))
options_templates.update(options_section(('postprocessing', "Postprocessing", "postprocessing"), {
From 6e9b69a33853e1bcee81cea6f01cf13de612fef7 Mon Sep 17 00:00:00 2001
From: drhead <1313496+drhead@users.noreply.github.com>
Date: Tue, 23 Apr 2024 03:08:28 -0400
Subject: [PATCH 09/78] change skip_early_cond code to use float
---
modules/sd_samplers_cfg_denoiser.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/modules/sd_samplers_cfg_denoiser.py b/modules/sd_samplers_cfg_denoiser.py
index 8ccc837aa..fba5c48c0 100644
--- a/modules/sd_samplers_cfg_denoiser.py
+++ b/modules/sd_samplers_cfg_denoiser.py
@@ -212,7 +212,7 @@ class CFGDenoiser(torch.nn.Module):
uncond = denoiser_params.text_uncond
skip_uncond = False
- if self.step < shared.opts.skip_cond_steps:
+ if self.step / self.total_steps <= shared.opts.skip_early_cond:
skip_uncond = True
x_in = x_in[:-batch_size]
sigma_in = sigma_in[:-batch_size]
From 33cbbf9f8b46666a2325c98b723b6cb2ec192ef7 Mon Sep 17 00:00:00 2001
From: drhead <1313496+drhead@users.noreply.github.com>
Date: Tue, 23 Apr 2024 03:15:00 -0400
Subject: [PATCH 10/78] add s_min_uncond_all option
---
modules/shared_options.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/modules/shared_options.py b/modules/shared_options.py
index 91ba72b5e..c711fa5f6 100644
--- a/modules/shared_options.py
+++ b/modules/shared_options.py
@@ -210,6 +210,7 @@ options_templates.update(options_section(('img2img', "img2img", "sd"), {
options_templates.update(options_section(('optimizations', "Optimizations", "sd"), {
"cross_attention_optimization": OptionInfo("Automatic", "Cross attention optimization", gr.Dropdown, lambda: {"choices": shared_items.cross_attention_optimizations()}),
"s_min_uncond": OptionInfo(0.0, "Negative Guidance minimum sigma", gr.Slider, {"minimum": 0.0, "maximum": 15.0, "step": 0.01}).link("PR", "https://github.com/AUTOMATIC1111/stable-diffusion-webui/pull/9177").info("skip negative prompt for some steps when the image is almost ready; 0=disable, higher=faster"),
+ "s_min_uncond_all": OptionInfo(False, "NGMS: Skip every step").info("makes Negative Guidance minimum sigma skip negative guidance on every step instead of only half"),
"token_merging_ratio": OptionInfo(0.0, "Token merging ratio", gr.Slider, {"minimum": 0.0, "maximum": 0.9, "step": 0.1}, infotext='Token merging ratio').link("PR", "https://github.com/AUTOMATIC1111/stable-diffusion-webui/pull/9256").info("0=disable, higher=faster"),
"token_merging_ratio_img2img": OptionInfo(0.0, "Token merging ratio for img2img", gr.Slider, {"minimum": 0.0, "maximum": 0.9, "step": 0.1}).info("only applies if non-zero and overrides above"),
"token_merging_ratio_hr": OptionInfo(0.0, "Token merging ratio for high-res pass", gr.Slider, {"minimum": 0.0, "maximum": 0.9, "step": 0.1}, infotext='Token merging ratio hr').info("only applies if non-zero and overrides above"),
From 029adbe5318b57c04dbc0d92273cce38e1ecf457 Mon Sep 17 00:00:00 2001
From: drhead <1313496+drhead@users.noreply.github.com>
Date: Tue, 23 Apr 2024 03:15:56 -0400
Subject: [PATCH 11/78] implement option to skip uncond on all steps below ngms
---
modules/sd_samplers_cfg_denoiser.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/modules/sd_samplers_cfg_denoiser.py b/modules/sd_samplers_cfg_denoiser.py
index fba5c48c0..082a4f63c 100644
--- a/modules/sd_samplers_cfg_denoiser.py
+++ b/modules/sd_samplers_cfg_denoiser.py
@@ -218,7 +218,7 @@ class CFGDenoiser(torch.nn.Module):
sigma_in = sigma_in[:-batch_size]
# alternating uncond allows for higher thresholds without the quality loss normally expected from raising it
- if self.step % 2 and s_min_uncond > 0 and sigma[0] < s_min_uncond and not is_edit_model:
+ if (self.step % 2 or shared.opts.s_min_uncond_all) and s_min_uncond > 0 and sigma[0] < s_min_uncond and not is_edit_model:
skip_uncond = True
x_in = x_in[:-batch_size]
sigma_in = sigma_in[:-batch_size]
From 9d964d3fc3285b3df877479081968ebf6dbccce4 Mon Sep 17 00:00:00 2001
From: w-e-w <40751091+w-e-w@users.noreply.github.com>
Date: Sat, 27 Apr 2024 19:21:34 +0900
Subject: [PATCH 12/78] no-referrer
---
modules/ui_gradio_extensions.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/modules/ui_gradio_extensions.py b/modules/ui_gradio_extensions.py
index f5278d22f..18fbd6777 100644
--- a/modules/ui_gradio_extensions.py
+++ b/modules/ui_gradio_extensions.py
@@ -50,7 +50,7 @@ def reload_javascript():
def template_response(*args, **kwargs):
res = shared.GradioTemplateResponseOriginal(*args, **kwargs)
- res.body = res.body.replace(b'', f'{js}'.encode("utf8"))
+ res.body = res.body.replace(b'', f'{js}'.encode("utf8"))
res.body = res.body.replace(b'