This repository has been archived on 2024-10-27. You can view files and clone it, but cannot push or open issues or pull requests.
local-llm-server/llm_server/routes/openai/__init__.py

38 lines
998 B
Python

from flask import Blueprint
from ..request_handler import before_request
from ...config.global_config import GlobalConfig
from ...llm.openai.oai_to_vllm import return_oai_internal_server_error
from ...logging import create_logger
_logger = create_logger('OpenAI')
openai_bp = Blueprint('openai/v1/', __name__)
openai_model_bp = Blueprint('openai/', __name__)
@openai_bp.before_request
@openai_model_bp.before_request
def before_oai_request():
if not GlobalConfig.get().enable_openi_compatible_backend:
return 'The OpenAI-compatible backend is disabled.', 401
return before_request()
@openai_bp.errorhandler(500)
@openai_model_bp.errorhandler(500)
def handle_error(e):
"""
Found Codes:
"auth_subrequest_error"
"""
return return_oai_internal_server_error(e)
from .models import openai_list_models
from .chat_completions import openai_chat_completions
from .info import get_openai_info
from .simulated import *
from .completions import openai_completions