sqlite cache
This commit is contained in:
parent
04b90328c0
commit
df7aa28b98
|
@ -37,3 +37,4 @@ notification.mp3
|
|||
/node_modules
|
||||
/package-lock.json
|
||||
/.coverage*
|
||||
cache_database.db
|
||||
|
|
|
@ -3,10 +3,13 @@ import os
|
|||
import os.path
|
||||
import threading
|
||||
import time
|
||||
import sqlite3
|
||||
|
||||
from modules.paths import data_path, script_path
|
||||
from modules import shared
|
||||
|
||||
cache_filename = os.environ.get('SD_WEBUI_CACHE_FILE', os.path.join(data_path, "cache.json"))
|
||||
cache_db_path = os.environ.get('SD_WEBUI_CACHE_DATABASE', os.path.join(data_path, "cache_database.db"))
|
||||
cache_data = None
|
||||
cache_lock = threading.Lock()
|
||||
|
||||
|
@ -14,6 +17,20 @@ dump_cache_after = None
|
|||
dump_cache_thread = None
|
||||
|
||||
|
||||
def cache_db_to_dict(db_path):
|
||||
try:
|
||||
with sqlite3.connect(db_path) as conn:
|
||||
database_dict = {}
|
||||
for table in conn.execute("SELECT name FROM sqlite_master WHERE type='table'").fetchall():
|
||||
table_name = table[0]
|
||||
table_data = conn.execute(f"SELECT * FROM `{table_name}`").fetchall()
|
||||
database_dict[table_name] = {row[0]: {"mtime": row[1], "value": json.loads(row[2])} for row in table_data}
|
||||
return database_dict
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return {}
|
||||
|
||||
|
||||
def dump_cache():
|
||||
"""
|
||||
Marks cache for writing to disk. 5 seconds after no one else flags the cache for writing, it is written.
|
||||
|
@ -59,6 +76,21 @@ def cache(subsection):
|
|||
|
||||
global cache_data
|
||||
|
||||
if shared.opts.experimental_sqlite_cache:
|
||||
if cache_data is None:
|
||||
with cache_lock:
|
||||
if cache_data is None:
|
||||
cache_data = cache_db_to_dict(cache_db_path)
|
||||
s = cache_data.get(subsection, {})
|
||||
if not s:
|
||||
try:
|
||||
with sqlite3.connect(cache_db_path) as conn:
|
||||
conn.execute(f'CREATE TABLE IF NOT EXISTS `{subsection}` (path TEXT PRIMARY KEY, mtime REAL, value TEXT)')
|
||||
except Exception as e:
|
||||
print(e)
|
||||
cache_data[subsection] = s
|
||||
return s
|
||||
|
||||
if cache_data is None:
|
||||
with cache_lock:
|
||||
if cache_data is None:
|
||||
|
@ -116,9 +148,22 @@ def cached_data_for_file(subsection, title, filename, func):
|
|||
if value is None:
|
||||
return None
|
||||
|
||||
if shared.opts.experimental_sqlite_cache:
|
||||
try:
|
||||
with cache_lock:
|
||||
with sqlite3.connect(cache_db_path) as conn:
|
||||
insert_or_replace = f"INSERT OR REPLACE INTO `{subsection}` (path, mtime, value) VALUES (?, ?, ?)"
|
||||
conn.execute(insert_or_replace, (title, ondisk_mtime, json.dumps(value)))
|
||||
existing_cache = cache(subsection)
|
||||
existing_cache[title] = {'mtime': ondisk_mtime, 'value': value}
|
||||
return value
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return None
|
||||
|
||||
entry = {'mtime': ondisk_mtime, 'value': value}
|
||||
existing_cache[title] = entry
|
||||
|
||||
dump_cache()
|
||||
if not shared.opts.experimental_sqlite_cache:
|
||||
dump_cache()
|
||||
|
||||
return entry['value']
|
||||
|
|
|
@ -26,7 +26,7 @@ def sha256_from_cache(filename, title, use_addnet_hash=False):
|
|||
if title not in hashes:
|
||||
return None
|
||||
|
||||
cached_sha256 = hashes[title].get("sha256", None)
|
||||
cached_sha256 = hashes[title].get("value" if shared.opts.experimental_sqlite_cache else "sha256", None)
|
||||
cached_mtime = hashes[title].get("mtime", 0)
|
||||
|
||||
if ondisk_mtime > cached_mtime or cached_sha256 is None:
|
||||
|
@ -36,6 +36,22 @@ def sha256_from_cache(filename, title, use_addnet_hash=False):
|
|||
|
||||
|
||||
def sha256(filename, title, use_addnet_hash=False):
|
||||
if shared.opts.experimental_sqlite_cache:
|
||||
if use_addnet_hash:
|
||||
def calculate_addnet_hash_sqlite3():
|
||||
with open(filename, "rb") as file:
|
||||
return {addnet_hash_safetensors(file)}
|
||||
return modules.cache.cached_data_for_file("hashes-addnet", title, filename, calculate_addnet_hash_sqlite3)
|
||||
else:
|
||||
def calculate_sha256_sqlite3():
|
||||
hash_sha256 = hashlib.sha256()
|
||||
blksize = 1024 * 1024
|
||||
with open(filename, "rb") as f:
|
||||
for chunk in iter(lambda: f.read(blksize), b""):
|
||||
hash_sha256.update(chunk)
|
||||
return hash_sha256.hexdigest()
|
||||
return modules.cache.cached_data_for_file("hashes", title, filename, calculate_sha256_sqlite3)
|
||||
|
||||
hashes = cache("hashes-addnet") if use_addnet_hash else cache("hashes")
|
||||
|
||||
sha256_value = sha256_from_cache(filename, title, use_addnet_hash)
|
||||
|
|
|
@ -109,6 +109,7 @@ options_templates.update(options_section(('system', "System"), {
|
|||
"list_hidden_files": OptionInfo(True, "Load models/files in hidden directories").info("directory is hidden if its name starts with \".\""),
|
||||
"disable_mmap_load_safetensors": OptionInfo(False, "Disable memmapping for loading .safetensors files.").info("fixes very slow loading speed in some cases"),
|
||||
"hide_ldm_prints": OptionInfo(True, "Prevent Stability-AI's ldm/sgm modules from printing noise to console."),
|
||||
"experimental_sqlite_cache": OptionInfo(False, "Use sqlite for cache").needs_restart(),
|
||||
}))
|
||||
|
||||
options_templates.update(options_section(('API', "API"), {
|
||||
|
|
Loading…
Reference in New Issue