From 25e3255c9bf6a1ea6a736a51994d066ae7011aea Mon Sep 17 00:00:00 2001 From: Cyberes Date: Thu, 24 Aug 2023 23:13:07 -0600 Subject: [PATCH] fix issue with tokenizer --- llm_server/routes/v1/generate.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llm_server/routes/v1/generate.py b/llm_server/routes/v1/generate.py index 2abbd4d..923c3e7 100644 --- a/llm_server/routes/v1/generate.py +++ b/llm_server/routes/v1/generate.py @@ -96,7 +96,7 @@ def generate(): else: raise Exception - log_prompt(client_ip, token, request_json_body['prompt'], backend_response if not backend_err else None, elapsed_time, parameters, dict(request.headers), response.status_code) + log_prompt(client_ip, token, request_json_body['prompt'], backend_response if not backend_err else '', elapsed_time, parameters, dict(request.headers), response.status_code) return jsonify({ **response_json_body }), 200