stable-diffusion-webui/webui.py

166 lines
4.7 KiB
Python
Raw Normal View History

import os
import threading
2022-10-01 14:47:42 -06:00
import time
2022-10-01 15:50:03 -06:00
import importlib
import signal
2022-09-26 08:29:50 -06:00
import threading
2022-10-18 00:51:53 -06:00
from fastapi import FastAPI
2022-10-07 15:56:00 -06:00
from fastapi.middleware.gzip import GZipMiddleware
from modules.paths import script_path
from modules import devices, sd_samplers
2022-09-26 08:29:50 -06:00
import modules.codeformer_model as codeformer
2022-09-11 09:48:36 -06:00
import modules.extras
2022-09-26 08:29:50 -06:00
import modules.face_restoration
import modules.gfpgan_model as gfpgan
import modules.img2img
2022-10-02 06:56:22 -06:00
2022-09-26 08:29:50 -06:00
import modules.lowvram
import modules.paths
2022-09-26 08:29:50 -06:00
import modules.scripts
import modules.sd_hijack
import modules.sd_models
2022-09-26 08:29:50 -06:00
import modules.shared as shared
import modules.txt2img
2022-10-02 06:56:22 -06:00
2022-09-26 08:29:50 -06:00
import modules.ui
from modules import devices
2022-09-26 09:27:18 -06:00
from modules import modelloader
2022-09-26 08:29:50 -06:00
from modules.paths import script_path
from modules.shared import cmd_opts
import modules.hypernetworks.hypernetwork
queue_lock = threading.Lock()
2022-09-11 09:48:36 -06:00
def wrap_queued_call(func):
def f(*args, **kwargs):
with queue_lock:
res = func(*args, **kwargs)
2022-08-22 08:15:46 -06:00
return res
return f
def wrap_gradio_gpu_call(func, extra_outputs=None):
def f(*args, **kwargs):
devices.torch_gc()
shared.state.sampling_step = 0
shared.state.job_count = -1
shared.state.job_no = 0
2022-09-25 06:45:20 -06:00
shared.state.job_timestamp = shared.state.get_job_timestamp()
2022-09-06 10:33:51 -06:00
shared.state.current_latent = None
shared.state.current_image = None
shared.state.current_image_sampling_step = 0
shared.state.skipped = False
shared.state.interrupted = False
shared.state.textinfo = None
with queue_lock:
res = func(*args, **kwargs)
shared.state.job = ""
shared.state.job_count = 0
devices.torch_gc()
return res
return modules.ui.wrap_gradio_call(f, extra_outputs=extra_outputs)
def initialize():
modelloader.cleanup_models()
modules.sd_models.setup_model()
codeformer.setup_model(cmd_opts.codeformer_models_path)
gfpgan.setup_model(cmd_opts.gfpgan_models_path)
shared.face_restorers.append(modules.face_restoration.FaceRestoration())
modelloader.load_upscalers()
modules.scripts.load_scripts()
modules.sd_models.load_model()
shared.opts.onchange("sd_model_checkpoint", wrap_queued_call(lambda: modules.sd_models.reload_model_weights(shared.sd_model)))
shared.opts.onchange("sd_hypernetwork", wrap_queued_call(lambda: modules.hypernetworks.hypernetwork.load_hypernetwork(shared.opts.sd_hypernetwork)))
2022-10-13 11:12:37 -06:00
shared.opts.onchange("sd_hypernetwork_strength", modules.hypernetworks.hypernetwork.apply_strength)
2022-09-05 23:54:11 -06:00
# make the program just exit at ctrl+c without waiting for anything
def sigint_handler(sig, frame):
2022-09-06 10:33:51 -06:00
print(f'Interrupted with signal {sig} in {frame}')
2022-09-05 23:54:11 -06:00
os._exit(0)
2022-08-22 08:15:46 -06:00
2022-09-05 23:54:11 -06:00
signal.signal(signal.SIGINT, sigint_handler)
2022-10-02 12:26:38 -06:00
def create_api(app):
from modules.api.api import Api
2022-10-18 00:51:53 -06:00
api = Api(app, queue_lock)
return api
def wait_on_server(demo=None):
2022-10-01 11:31:58 -06:00
while 1:
time.sleep(0.5)
if demo and getattr(demo, 'do_restart', False):
time.sleep(0.5)
demo.close()
time.sleep(0.5)
break
def api_only():
initialize()
2022-10-01 11:31:58 -06:00
app = FastAPI()
app.add_middleware(GZipMiddleware, minimum_size=1000)
api = create_api(app)
api.launch(server_name="0.0.0.0" if cmd_opts.listen else "127.0.0.1", port=cmd_opts.port if cmd_opts.port else 7861)
def webui():
launch_api = cmd_opts.api
initialize()
while 1:
demo = modules.ui.create_ui(wrap_gradio_gpu_call=wrap_gradio_gpu_call)
app, local_url, share_url = demo.launch(
2022-10-02 12:26:38 -06:00
share=cmd_opts.share,
server_name="0.0.0.0" if cmd_opts.listen else None,
server_port=cmd_opts.port,
debug=cmd_opts.gradio_debug,
auth=[tuple(cred.split(':')) for cred in cmd_opts.gradio_auth.strip('"').split(',')] if cmd_opts.gradio_auth else None,
inbrowser=cmd_opts.autolaunch,
prevent_thread_lock=True
)
# after initial launch, disable --autolaunch for subsequent restarts
cmd_opts.autolaunch = False
2022-10-11 05:53:02 -06:00
app.add_middleware(GZipMiddleware, minimum_size=1000)
2022-10-02 12:26:38 -06:00
if (launch_api):
create_api(app)
2022-10-02 12:26:38 -06:00
wait_on_server(demo)
sd_samplers.set_samplers()
2022-10-02 12:26:38 -06:00
print('Reloading Custom Scripts')
modules.scripts.reload_scripts()
2022-10-02 12:26:38 -06:00
print('Reloading modules: modules.ui')
importlib.reload(modules.ui)
print('Refreshing Model List')
modules.sd_models.list_models()
2022-10-02 12:26:38 -06:00
print('Restarting Gradio')
2022-09-11 09:48:36 -06:00
2022-10-18 00:51:53 -06:00
task = []
if __name__ == "__main__":
2022-10-18 00:51:53 -06:00
if cmd_opts.nowebui:
api_only()
else:
webui()