local-llm-server/llm_server/routes/openai/chat_completions.py

24 lines
941 B
Python
Raw Normal View History

2023-09-14 14:36:22 -06:00
import traceback
2023-09-12 16:40:09 -06:00
from flask import jsonify, request
from . import openai_bp
2023-09-14 14:36:22 -06:00
from ..helpers.client import format_sillytavern_err
2023-09-12 16:40:09 -06:00
from ..helpers.http import validate_json
2023-09-14 14:36:22 -06:00
from ..openai_request_handler import OpenAIRequestHandler, build_openai_response
2023-09-12 16:40:09 -06:00
@openai_bp.route('/chat/completions', methods=['POST'])
def openai_chat_completions():
request_valid_json, request_json_body = validate_json(request)
if not request_valid_json or not request_json_body.get('messages'):
return jsonify({'code': 400, 'msg': 'invalid JSON'}), 400
else:
try:
return OpenAIRequestHandler(request).handle_request()
except Exception as e:
print(f'EXCEPTION on {request.url}!!!', f'{e.__class__.__name__}: {e}')
print(traceback.format_exc())
print(request.data)
return build_openai_response('', format_sillytavern_err(f'Server encountered exception.', 'error')), 200