This commit is contained in:
Cyberes 2023-10-05 18:59:19 -06:00
parent 96dd62478f
commit 50992116f5
2 changed files with 1 additions and 8 deletions

View File

@ -3,13 +3,7 @@ from llm_server.llm import oobabooga, vllm
def get_token_count(prompt: str, backend_url: str):
assert isinstance(backend_url, str)
if not prompt:
# The tokenizers have issues when the prompt is None.
return 0
assert isinstance(prompt, str)
backend_url = cluster_config.validate_backend(backend_url)
backend_mode = cluster_config.get_backend(backend_url)['mode']
if backend_mode == 'vllm':
return vllm.tokenize(prompt, backend_url)

View File

@ -150,7 +150,6 @@ def openai_chat_completions():
# The worker incremented it, we'll decrement it.
decrement_ip_count(handler.client_ip, 'processing_ips')
decr_active_workers(handler.selected_model, handler.backend_url)
print(len(generated_text))
return Response(generate(), mimetype='text/event-stream')
except Exception: