Merge pull request #15287 from AUTOMATIC1111/diskcache

use diskcache library for caching
This commit is contained in:
AUTOMATIC1111 2024-03-17 23:20:00 +03:00 committed by GitHub
commit f1b090e9e0
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 40 additions and 45 deletions

1
.gitignore vendored
View File

@ -38,3 +38,4 @@ notification.mp3
/package-lock.json /package-lock.json
/.coverage* /.coverage*
/test/test_outputs /test/test_outputs
/cache

View File

@ -2,48 +2,47 @@ import json
import os import os
import os.path import os.path
import threading import threading
import time
import diskcache
import tqdm
from modules.paths import data_path, script_path from modules.paths import data_path, script_path
cache_filename = os.environ.get('SD_WEBUI_CACHE_FILE', os.path.join(data_path, "cache.json")) cache_filename = os.environ.get('SD_WEBUI_CACHE_FILE', os.path.join(data_path, "cache.json"))
cache_data = None cache_dir = os.environ.get('SD_WEBUI_CACHE_DIR', os.path.join(data_path, "cache"))
caches = {}
cache_lock = threading.Lock() cache_lock = threading.Lock()
dump_cache_after = None
dump_cache_thread = None
def dump_cache(): def dump_cache():
""" """old function for dumping cache to disk; does nothing since diskcache."""
Marks cache for writing to disk. 5 seconds after no one else flags the cache for writing, it is written.
"""
global dump_cache_after pass
global dump_cache_thread
def thread_func():
global dump_cache_after
global dump_cache_thread
while dump_cache_after is not None and time.time() < dump_cache_after: def convert_old_cached_data():
time.sleep(1) try:
with open(cache_filename, "r", encoding="utf8") as file:
data = json.load(file)
except FileNotFoundError:
return
except Exception:
os.replace(cache_filename, os.path.join(script_path, "tmp", "cache.json"))
print('[ERROR] issue occurred while trying to read cache.json; old cache has been moved to tmp/cache.json')
return
with cache_lock: total_count = sum(len(keyvalues) for keyvalues in data.values())
cache_filename_tmp = cache_filename + "-"
with open(cache_filename_tmp, "w", encoding="utf8") as file:
json.dump(cache_data, file, indent=4, ensure_ascii=False)
os.replace(cache_filename_tmp, cache_filename) with tqdm.tqdm(total=total_count, desc="converting cache") as progress:
for subsection, keyvalues in data.items():
cache_obj = caches.get(subsection)
if cache_obj is None:
cache_obj = diskcache.Cache(os.path.join(cache_dir, subsection))
caches[subsection] = cache_obj
dump_cache_after = None for key, value in keyvalues.items():
dump_cache_thread = None cache_obj[key] = value
progress.update(1)
with cache_lock:
dump_cache_after = time.time() + 5
if dump_cache_thread is None:
dump_cache_thread = threading.Thread(name='cache-writer', target=thread_func)
dump_cache_thread.start()
def cache(subsection): def cache(subsection):
@ -54,28 +53,21 @@ def cache(subsection):
subsection (str): The subsection identifier for the cache. subsection (str): The subsection identifier for the cache.
Returns: Returns:
dict: The cache data for the specified subsection. diskcache.Cache: The cache data for the specified subsection.
""" """
global cache_data cache_obj = caches.get(subsection)
if not cache_obj:
if cache_data is None:
with cache_lock: with cache_lock:
if cache_data is None: if not os.path.exists(cache_dir) and os.path.isfile(cache_filename):
try: convert_old_cached_data()
with open(cache_filename, "r", encoding="utf8") as file:
cache_data = json.load(file)
except FileNotFoundError:
cache_data = {}
except Exception:
os.replace(cache_filename, os.path.join(script_path, "tmp", "cache.json"))
print('[ERROR] issue occurred while trying to read cache.json, move current cache to tmp/cache.json and create new cache')
cache_data = {}
s = cache_data.get(subsection, {}) cache_obj = caches.get(subsection)
cache_data[subsection] = s if not cache_obj:
cache_obj = diskcache.Cache(os.path.join(cache_dir, subsection))
caches[subsection] = cache_obj
return s return cache_obj
def cached_data_for_file(subsection, title, filename, func): def cached_data_for_file(subsection, title, filename, func):

View File

@ -4,6 +4,7 @@ accelerate
blendmodes blendmodes
clean-fid clean-fid
diskcache
einops einops
facexlib facexlib
fastapi>=0.90.1 fastapi>=0.90.1

View File

@ -3,6 +3,7 @@ Pillow==9.5.0
accelerate==0.21.0 accelerate==0.21.0
blendmodes==2022 blendmodes==2022
clean-fid==0.1.35 clean-fid==0.1.35
diskcache==5.6.3
einops==0.4.1 einops==0.4.1
facexlib==0.3.0 facexlib==0.3.0
fastapi==0.94.0 fastapi==0.94.0