diff --git a/llm_server/routes/openai/chat_completions.py b/llm_server/routes/openai/chat_completions.py index 940b043..0be4c5d 100644 --- a/llm_server/routes/openai/chat_completions.py +++ b/llm_server/routes/openai/chat_completions.py @@ -65,7 +65,6 @@ def openai_chat_completions(): try: json_obj = json.loads(json_str.decode()) new = json_obj['text'][0].split(handler.prompt + generated_text)[1] - print(new) generated_text = generated_text + new except IndexError: # ???? @@ -111,5 +110,4 @@ def openai_chat_completions(): except Exception as e: print(f'EXCEPTION on {request.url}!!!', f'{e.__class__.__name__}: {e}') traceback.print_exc() - print(request.data) return build_openai_response('', format_sillytavern_err(f'Server encountered exception.', 'error')), 500 diff --git a/llm_server/routes/v1/generate.py b/llm_server/routes/v1/generate.py index b2d52a1..aa0309e 100644 --- a/llm_server/routes/v1/generate.py +++ b/llm_server/routes/v1/generate.py @@ -19,5 +19,4 @@ def generate(): except Exception as e: print(f'EXCEPTION on {request.url}!!!', f'{e.__class__.__name__}: {e}') print(traceback.format_exc()) - print(request.data) return format_sillytavern_err(f'Server encountered exception.', 'error'), 500