reorganize to api v2
This commit is contained in:
parent
114f36e709
commit
e0f86d053a
|
@ -10,7 +10,7 @@ from llm_server.config.load import load_config, parse_backends
|
|||
from llm_server.custom_redis import redis
|
||||
from llm_server.database.create import create_db
|
||||
from llm_server.routes.queue import priority_queue
|
||||
from llm_server.routes.v1.generate_stats import generate_stats
|
||||
from llm_server.routes.v2.generate_stats import generate_stats
|
||||
from llm_server.workers.threader import start_background
|
||||
|
||||
script_path = os.path.dirname(os.path.realpath(__file__))
|
||||
|
|
|
@ -4,7 +4,7 @@ from threading import Thread
|
|||
from llm_server import opts
|
||||
from llm_server.cluster.stores import redis_running_models
|
||||
from llm_server.cluster.worker import cluster_worker
|
||||
from llm_server.routes.v1.generate_stats import generate_stats
|
||||
from llm_server.routes.v2.generate_stats import generate_stats
|
||||
from llm_server.workers.inferencer import start_workers
|
||||
from llm_server.workers.mainer import main_background_thread
|
||||
from llm_server.workers.moderator import start_moderation_workers
|
||||
|
|
|
@ -21,7 +21,7 @@ from llm_server.database.create import create_db
|
|||
from llm_server.pre_fork import server_startup
|
||||
from llm_server.routes.openai import openai_bp
|
||||
from llm_server.routes.server_error import handle_server_error
|
||||
from llm_server.routes.v1 import bp
|
||||
from llm_server.routes.v2 import bp
|
||||
from llm_server.sock import init_socketio
|
||||
|
||||
# TODO: per-backend workers
|
||||
|
@ -65,7 +65,7 @@ from llm_server.helpers import auto_set_base_client_api
|
|||
from llm_server.llm.vllm.info import vllm_info
|
||||
from llm_server.custom_redis import flask_cache
|
||||
from llm_server.llm import redis
|
||||
from llm_server.routes.v1.generate_stats import generate_stats
|
||||
from llm_server.routes.v2.generate_stats import generate_stats
|
||||
|
||||
app = Flask(__name__)
|
||||
init_socketio(app)
|
||||
|
|
Reference in New Issue