This repository has been archived on 2024-10-27. You can view files and clone it, but cannot push or open issues or pull requests.
local-llm-server/llm_server/workers/threader.py

57 lines
1.5 KiB
Python
Raw Normal View History

2023-09-29 00:09:44 -06:00
import time
from threading import Thread
from llm_server import opts
from llm_server.cluster.stores import redis_running_models
from llm_server.cluster.worker import cluster_worker
2023-10-01 00:20:00 -06:00
from llm_server.routes.v1.generate_stats import generate_stats
2023-09-29 00:09:44 -06:00
from llm_server.workers.inferencer import start_workers
from llm_server.workers.logger import db_logger
2023-09-29 00:09:44 -06:00
from llm_server.workers.mainer import main_background_thread
from llm_server.workers.moderator import start_moderation_workers
from llm_server.workers.printer import console_printer
from llm_server.workers.recenter import recent_prompters_thread
def cache_stats():
while True:
generate_stats(regen=True)
2023-09-30 19:41:50 -06:00
time.sleep(5)
2023-09-29 00:09:44 -06:00
def start_background():
2023-09-30 19:41:50 -06:00
start_workers(opts.cluster_workers)
2023-09-29 00:09:44 -06:00
t = Thread(target=main_background_thread)
t.daemon = True
t.start()
print('Started the main background thread.')
start_moderation_workers(opts.cluster_workers * 3)
2023-09-29 00:09:44 -06:00
t = Thread(target=cache_stats)
t.daemon = True
t.start()
print('Started the stats cacher.')
t = Thread(target=recent_prompters_thread)
t.daemon = True
t.start()
print('Started the recent proompters thread.')
t = Thread(target=console_printer)
t.daemon = True
t.start()
print('Started the console printer.')
redis_running_models.flush()
t = Thread(target=cluster_worker)
t.daemon = True
t.start()
print('Started the cluster worker.')
t = Thread(target=db_logger)
t.daemon = True
t.start()
2023-10-05 17:00:35 -06:00
print('Started background logger.')