From b3f0c4b28f2893ce30f405586df77ee6bb6ba3e5 Mon Sep 17 00:00:00 2001 From: Cyberes Date: Sun, 15 Oct 2023 15:14:32 -0600 Subject: [PATCH] remove debug print --- llm_server/routes/openai_request_handler.py | 1 - 1 file changed, 1 deletion(-) diff --git a/llm_server/routes/openai_request_handler.py b/llm_server/routes/openai_request_handler.py index 246c3b6..549cc93 100644 --- a/llm_server/routes/openai_request_handler.py +++ b/llm_server/routes/openai_request_handler.py @@ -85,7 +85,6 @@ class OpenAIRequestHandler(RequestHandler): return backend_response, backend_response_status_code def handle_ratelimited(self, do_log: bool = True): - print('OAI ratelimited:', self.client_ip) model_choices, default_model = get_model_choices() default_model_info = model_choices[default_model] w = int(default_model_info['estimated_wait']) if default_model_info['estimated_wait'] > 0 else 2