From 64d7a9edbb4e108579fae9da74c1e395b4b1919f Mon Sep 17 00:00:00 2001 From: Cyberes Date: Thu, 5 Oct 2023 18:09:24 -0600 Subject: [PATCH] fix --- server.py | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/server.py b/server.py index 5966bfb..37c254b 100644 --- a/server.py +++ b/server.py @@ -12,16 +12,22 @@ from pathlib import Path import simplejson as json from flask import Flask, jsonify, render_template, request +import config +from llm_server import opts from llm_server.cluster.backend import get_model_choices from llm_server.cluster.cluster_config import cluster_config from llm_server.config.config import mode_ui_names from llm_server.config.load import load_config +from llm_server.custom_redis import flask_cache, redis from llm_server.database.conn import database from llm_server.database.create import create_db +from llm_server.helpers import auto_set_base_client_api +from llm_server.llm.vllm.info import vllm_info from llm_server.pre_fork import server_startup from llm_server.routes.openai import openai_bp from llm_server.routes.server_error import handle_server_error from llm_server.routes.v1 import bp +from llm_server.routes.v1.generate_stats import generate_stats from llm_server.sock import init_socketio # TODO: return an `error: True`, error code, and error message rather than just a formatted message @@ -60,14 +66,6 @@ except ModuleNotFoundError as e: print('Please see README.md for install instructions.') sys.exit(1) -import config -from llm_server import opts -from llm_server.helpers import auto_set_base_client_api -from llm_server.llm.vllm.info import vllm_info -from llm_server.custom_redis import flask_cache -from llm_server.llm import redis -from llm_server.routes.v1.generate_stats import generate_stats - app = Flask(__name__) app.register_blueprint(bp, url_prefix='/api/') app.register_blueprint(openai_bp, url_prefix='/api/openai/v1/')