This commit is contained in:
Cyberes 2023-09-14 15:14:59 -06:00
parent a89295193f
commit 1cf4c95ba2
1 changed files with 5 additions and 6 deletions

View File

@ -33,12 +33,6 @@ class OpenAIRequestHandler(RequestHandler):
self.prompt = self.transform_messages_to_prompt() self.prompt = self.transform_messages_to_prompt()
# Reconstruct the request JSON with the validated parameters and prompt.
self.parameters['stop'].extend(['\n### INSTRUCTION', '\n### USER', '\n### ASSISTANT', '\n### RESPONSE'])
llm_request = {**self.parameters, 'prompt': self.prompt}
_, (backend_response, backend_response_status_code) = self.generate_response(llm_request)
if opts.openai_api_key: if opts.openai_api_key:
try: try:
flagged = check_moderation_endpoint(self.request.json['messages'][-1]['content']) flagged = check_moderation_endpoint(self.request.json['messages'][-1]['content'])
@ -51,6 +45,11 @@ class OpenAIRequestHandler(RequestHandler):
print(f'OpenAI moderation endpoint failed:', f'{e.__class__.__name__}: {e}') print(f'OpenAI moderation endpoint failed:', f'{e.__class__.__name__}: {e}')
print(traceback.format_exc()) print(traceback.format_exc())
# Reconstruct the request JSON with the validated parameters and prompt.
self.parameters['stop'].extend(['\n### INSTRUCTION', '\n### USER', '\n### ASSISTANT', '\n### RESPONSE'])
llm_request = {**self.parameters, 'prompt': self.prompt}
_, (backend_response, backend_response_status_code) = self.generate_response(llm_request)
return build_openai_response(self.prompt, backend_response.json['results'][0]['text']), backend_response_status_code return build_openai_response(self.prompt, backend_response.json['results'][0]['text']), backend_response_status_code
def handle_ratelimited(self): def handle_ratelimited(self):