This commit is contained in:
Cyberes 2023-10-04 10:26:39 -06:00
parent b76e77a66a
commit 4634e36eeb
1 changed files with 2 additions and 2 deletions

View File

@ -57,7 +57,7 @@ class OpenAIRequestHandler(RequestHandler):
self.prompt = transform_messages_to_prompt(self.request.json['messages']) self.prompt = transform_messages_to_prompt(self.request.json['messages'])
except Exception as e: except Exception as e:
print(f'OpenAI moderation endpoint failed:', f'{e.__class__.__name__}: {e}') print(f'OpenAI moderation endpoint failed:', f'{e.__class__.__name__}: {e}')
print(traceback.format_exc()) traceback.print_exc()
# TODO: support Ooba # TODO: support Ooba
print('converting to vllm') print('converting to vllm')
@ -73,7 +73,7 @@ class OpenAIRequestHandler(RequestHandler):
print('sent success response') print('sent success response')
return self.build_openai_response(self.prompt, backend_response.json['results'][0]['text'], model=model), backend_response_status_code return self.build_openai_response(self.prompt, backend_response.json['results'][0]['text'], model=model), backend_response_status_code
else: else:
print(backend_response) print(backend_response_status_code, backend_response.data)
return backend_response, backend_response_status_code return backend_response, backend_response_status_code
def handle_ratelimited(self, do_log: bool = True): def handle_ratelimited(self, do_log: bool = True):