fix issue with null data on openai
This commit is contained in:
parent
3e5feb9c97
commit
467e1893ea
|
@ -87,8 +87,9 @@ def transform_messages_to_prompt(oai_messages):
|
||||||
try:
|
try:
|
||||||
prompt = f'### INSTRUCTION: {opts.openai_system_prompt}'
|
prompt = f'### INSTRUCTION: {opts.openai_system_prompt}'
|
||||||
for msg in oai_messages:
|
for msg in oai_messages:
|
||||||
if not msg.get('content') or not msg.get('role'):
|
if 'content' not in msg.keys() or 'role' not in msg.keys():
|
||||||
return False
|
return False
|
||||||
|
msg['content'] = str(msg['content']) # Prevent any weird issues.
|
||||||
if msg['role'] == 'system':
|
if msg['role'] == 'system':
|
||||||
prompt += f'### INSTRUCTION: {msg["content"]}\n\n'
|
prompt += f'### INSTRUCTION: {msg["content"]}\n\n'
|
||||||
elif msg['role'] == 'user':
|
elif msg['role'] == 'user':
|
||||||
|
|
|
@ -57,6 +57,10 @@ def openai_chat_completions():
|
||||||
else:
|
else:
|
||||||
handler.prompt = transform_messages_to_prompt(handler.request.json['messages'])
|
handler.prompt = transform_messages_to_prompt(handler.request.json['messages'])
|
||||||
|
|
||||||
|
if not handler.prompt:
|
||||||
|
# Prevent issues on the backend.
|
||||||
|
return 'Invalid prompt', 400
|
||||||
|
|
||||||
event_id = None
|
event_id = None
|
||||||
response_status_code = 0
|
response_status_code = 0
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
|
|
|
@ -61,6 +61,10 @@ class OpenAIRequestHandler(RequestHandler):
|
||||||
# TODO: support Ooba
|
# TODO: support Ooba
|
||||||
self.parameters = oai_to_vllm(self.parameters, stop_hashes=('instruct' not in self.request_json_body['model'].lower()), mode=self.cluster_backend_info['mode'])
|
self.parameters = oai_to_vllm(self.parameters, stop_hashes=('instruct' not in self.request_json_body['model'].lower()), mode=self.cluster_backend_info['mode'])
|
||||||
|
|
||||||
|
if not self.prompt:
|
||||||
|
# TODO: format this as an openai error message
|
||||||
|
return 'Invalid prompt', 400
|
||||||
|
|
||||||
llm_request = {**self.parameters, 'prompt': self.prompt}
|
llm_request = {**self.parameters, 'prompt': self.prompt}
|
||||||
(success, _, _, _), (backend_response, backend_response_status_code) = self.generate_response(llm_request)
|
(success, _, _, _), (backend_response, backend_response_status_code) = self.generate_response(llm_request)
|
||||||
|
|
||||||
|
|
Reference in New Issue