local-llm-server/llm_server/routes/openai/__init__.py

33 lines
1.1 KiB
Python
Raw Normal View History

2023-09-12 16:40:09 -06:00
from flask import Blueprint, request
from ..helpers.client import format_sillytavern_err
from ..helpers.http import require_api_key
from ..openai_request_handler import build_openai_response
from ..server_error import handle_server_error
from ... import opts
openai_bp = Blueprint('openai/v1/', __name__)
@openai_bp.before_request
def before_request():
if not opts.http_host:
opts.http_host = request.headers.get("Host")
if not opts.enable_openi_compatible_backend:
return build_openai_response('', format_sillytavern_err('The OpenAI-compatible backend is disabled.', 'Access Denied')), 401
if not opts.base_client_api:
opts.base_client_api = f'{request.headers.get("Host")}/{opts.frontend_api_client.strip("/")}'
if request.endpoint != 'v1.get_stats':
response = require_api_key()
if response is not None:
return response
@openai_bp.errorhandler(500)
def handle_error(e):
return handle_server_error(e)
from .models import openai_list_models
from .chat_completions import openai_chat_completions