This commit is contained in:
Cyberes 2023-10-16 23:29:17 -06:00
parent 70cf6843e5
commit c3c053e071
3 changed files with 9 additions and 9 deletions

View File

@ -157,8 +157,8 @@ def openai_chat_completions(model_name=None):
traceback.print_exc()
yield 'data: [DONE]\n\n'
finally:
if event:
redis.lpush(f'notifications:{event.event_id}', 'canceled')
# if event:
# redis.lpush(f'notifications:{event.event_id}', 'canceled')
stream_redis.delete(stream_name)
return Response(generate(), mimetype='text/event-stream')

View File

@ -185,8 +185,8 @@ def do_stream(ws, model_name):
backend_url=handler.backend_url
)
finally:
if event_id:
redis.lpush(f'notifications:{event_id}', 'canceled')
# if event_id:
# redis.lpush(f'notifications:{event_id}', 'canceled')
try:
# Must close the connection or greenlets will complain.
ws.close()

View File

@ -33,11 +33,11 @@ def inference_do_stream(stream_name: str, msg_to_backend: dict, backend_url: str
# If there is no more data, break the loop
if not chunk:
break
message = redis.lpop(f'notifications:{event_id}')
if message and message.decode('utf-8') == 'canceled':
print('Client canceled generation')
response.close()
return
# message = redis.lpop(f'notifications:{event_id}')
# if message and message.decode('utf-8') == 'canceled':
# print('Client canceled generation')
# response.close()
# return
partial_response += chunk
if partial_response.endswith(b'\x00'):