Merge cluster to master #3

Merged
cyberes merged 163 commits from cluster into master 2023-10-27 19:19:22 -06:00
1 changed files with 6 additions and 8 deletions
Showing only changes of commit 64d7a9edbb - Show all commits

View File

@ -12,16 +12,22 @@ from pathlib import Path
import simplejson as json import simplejson as json
from flask import Flask, jsonify, render_template, request from flask import Flask, jsonify, render_template, request
import config
from llm_server import opts
from llm_server.cluster.backend import get_model_choices from llm_server.cluster.backend import get_model_choices
from llm_server.cluster.cluster_config import cluster_config from llm_server.cluster.cluster_config import cluster_config
from llm_server.config.config import mode_ui_names from llm_server.config.config import mode_ui_names
from llm_server.config.load import load_config from llm_server.config.load import load_config
from llm_server.custom_redis import flask_cache, redis
from llm_server.database.conn import database from llm_server.database.conn import database
from llm_server.database.create import create_db from llm_server.database.create import create_db
from llm_server.helpers import auto_set_base_client_api
from llm_server.llm.vllm.info import vllm_info
from llm_server.pre_fork import server_startup from llm_server.pre_fork import server_startup
from llm_server.routes.openai import openai_bp from llm_server.routes.openai import openai_bp
from llm_server.routes.server_error import handle_server_error from llm_server.routes.server_error import handle_server_error
from llm_server.routes.v1 import bp from llm_server.routes.v1 import bp
from llm_server.routes.v1.generate_stats import generate_stats
from llm_server.sock import init_socketio from llm_server.sock import init_socketio
# TODO: return an `error: True`, error code, and error message rather than just a formatted message # TODO: return an `error: True`, error code, and error message rather than just a formatted message
@ -60,14 +66,6 @@ except ModuleNotFoundError as e:
print('Please see README.md for install instructions.') print('Please see README.md for install instructions.')
sys.exit(1) sys.exit(1)
import config
from llm_server import opts
from llm_server.helpers import auto_set_base_client_api
from llm_server.llm.vllm.info import vllm_info
from llm_server.custom_redis import flask_cache
from llm_server.llm import redis
from llm_server.routes.v1.generate_stats import generate_stats
app = Flask(__name__) app = Flask(__name__)
app.register_blueprint(bp, url_prefix='/api/') app.register_blueprint(bp, url_prefix='/api/')
app.register_blueprint(openai_bp, url_prefix='/api/openai/v1/') app.register_blueprint(openai_bp, url_prefix='/api/openai/v1/')