fix issue with null data on openai

This commit is contained in:
Cyberes 2023-10-08 19:36:12 -06:00
parent 3e5feb9c97
commit 467e1893ea
3 changed files with 10 additions and 1 deletions

View File

@ -87,8 +87,9 @@ def transform_messages_to_prompt(oai_messages):
try:
prompt = f'### INSTRUCTION: {opts.openai_system_prompt}'
for msg in oai_messages:
if not msg.get('content') or not msg.get('role'):
if 'content' not in msg.keys() or 'role' not in msg.keys():
return False
msg['content'] = str(msg['content']) # Prevent any weird issues.
if msg['role'] == 'system':
prompt += f'### INSTRUCTION: {msg["content"]}\n\n'
elif msg['role'] == 'user':

View File

@ -57,6 +57,10 @@ def openai_chat_completions():
else:
handler.prompt = transform_messages_to_prompt(handler.request.json['messages'])
if not handler.prompt:
# Prevent issues on the backend.
return 'Invalid prompt', 400
event_id = None
response_status_code = 0
start_time = time.time()

View File

@ -61,6 +61,10 @@ class OpenAIRequestHandler(RequestHandler):
# TODO: support Ooba
self.parameters = oai_to_vllm(self.parameters, stop_hashes=('instruct' not in self.request_json_body['model'].lower()), mode=self.cluster_backend_info['mode'])
if not self.prompt:
# TODO: format this as an openai error message
return 'Invalid prompt', 400
llm_request = {**self.parameters, 'prompt': self.prompt}
(success, _, _, _), (backend_response, backend_response_status_code) = self.generate_response(llm_request)