test
This commit is contained in:
parent
70cf6843e5
commit
c3c053e071
|
@ -157,8 +157,8 @@ def openai_chat_completions(model_name=None):
|
|||
traceback.print_exc()
|
||||
yield 'data: [DONE]\n\n'
|
||||
finally:
|
||||
if event:
|
||||
redis.lpush(f'notifications:{event.event_id}', 'canceled')
|
||||
# if event:
|
||||
# redis.lpush(f'notifications:{event.event_id}', 'canceled')
|
||||
stream_redis.delete(stream_name)
|
||||
|
||||
return Response(generate(), mimetype='text/event-stream')
|
||||
|
|
|
@ -185,8 +185,8 @@ def do_stream(ws, model_name):
|
|||
backend_url=handler.backend_url
|
||||
)
|
||||
finally:
|
||||
if event_id:
|
||||
redis.lpush(f'notifications:{event_id}', 'canceled')
|
||||
# if event_id:
|
||||
# redis.lpush(f'notifications:{event_id}', 'canceled')
|
||||
try:
|
||||
# Must close the connection or greenlets will complain.
|
||||
ws.close()
|
||||
|
|
|
@ -33,11 +33,11 @@ def inference_do_stream(stream_name: str, msg_to_backend: dict, backend_url: str
|
|||
# If there is no more data, break the loop
|
||||
if not chunk:
|
||||
break
|
||||
message = redis.lpop(f'notifications:{event_id}')
|
||||
if message and message.decode('utf-8') == 'canceled':
|
||||
print('Client canceled generation')
|
||||
response.close()
|
||||
return
|
||||
# message = redis.lpop(f'notifications:{event_id}')
|
||||
# if message and message.decode('utf-8') == 'canceled':
|
||||
# print('Client canceled generation')
|
||||
# response.close()
|
||||
# return
|
||||
|
||||
partial_response += chunk
|
||||
if partial_response.endswith(b'\x00'):
|
||||
|
|
Reference in New Issue