local-llm-server/llm_server/routes/openai/chat_completions.py

25 lines
838 B
Python

from flask import jsonify, request
from . import openai_bp
from ..helpers.http import validate_json
from ..openai_request_handler import OpenAIRequestHandler
class FakeFlaskRequest():
def __init__(self, *args, **kwargs):
self.data = kwargs.get('data')
self.headers = kwargs.get('headers')
self.json = kwargs.get('json')
self.remote_addr = kwargs.get('remote_addr')
@openai_bp.route('/chat/completions', methods=['POST'])
def openai_chat_completions():
# TODO: make this work with oobabooga
request_valid_json, request_json_body = validate_json(request)
if not request_valid_json or not request_json_body.get('messages'):
return jsonify({'code': 400, 'msg': 'invalid JSON'}), 400
else:
handler = OpenAIRequestHandler(request)
return handler.handle_request()