local-llm-server/llm_server/routes/openai/__init__.py

27 lines
669 B
Python
Raw Normal View History

2023-09-26 22:09:11 -06:00
from flask import Blueprint
2023-09-12 16:40:09 -06:00
2023-09-26 22:09:11 -06:00
from ..request_handler import before_request
2023-09-12 16:40:09 -06:00
from ..server_error import handle_server_error
from ... import opts
openai_bp = Blueprint('openai/v1/', __name__)
@openai_bp.before_request
2023-09-17 18:55:36 -06:00
def before_oai_request():
2023-09-12 16:40:09 -06:00
if not opts.enable_openi_compatible_backend:
2023-09-26 22:09:11 -06:00
return 'The OpenAI-compatible backend is disabled.', 401
return before_request()
2023-09-12 16:40:09 -06:00
@openai_bp.errorhandler(500)
def handle_error(e):
return handle_server_error(e)
from .models import openai_list_models
from .chat_completions import openai_chat_completions
2023-09-13 20:25:56 -06:00
from .info import get_openai_info
2023-09-26 22:09:11 -06:00
from .simulated import *
from .completions import openai_completions