fix two exceptions

This commit is contained in:
Cyberes 2023-09-23 20:55:49 -06:00
parent 0530fa9870
commit 41e622d19c
3 changed files with 15 additions and 4 deletions

View File

@ -1,5 +1,6 @@
import json
import time
import traceback
import llm_server
from llm_server import opts
@ -113,9 +114,13 @@ def average_column_for_model(table_name, column_name, model_name):
def weighted_average_column_for_model(table_name, column_name, model_name, backend_name, backend_url, exclude_zeros: bool = False):
conn = db_pool.connection()
cursor = conn.cursor()
try:
try:
cursor.execute(f"SELECT {column_name}, id FROM {table_name} WHERE model = %s AND backend_mode = %s AND backend_url = %s ORDER BY id DESC", (model_name, backend_name, backend_url,))
results = cursor.fetchall()
except Exception:
traceback.print_exc()
return -1
total_weight = 0
weighted_sum = 0

View File

@ -70,6 +70,10 @@ def stream(ws):
if partial_response.endswith(b'\x00'):
json_str = partial_response[:-1].decode() # Remove the null character and decode the byte string to a string
json_obj = json.loads(json_str)
if not len(json_obj['text'][0].split(input_prompt + generated_text)):
# ????
continue
new = json_obj['text'][0].split(input_prompt + generated_text)[1]
ws.send(json.dumps({

View File

@ -47,12 +47,14 @@ class MainBackgroundThread(Thread):
# exclude_zeros=True filters out rows where an error message was returned. Previously, if there was an error, 0
# was entered into the column. The new code enters null instead but we need to be backwards compatible for now.
average_generation_elapsed_sec = weighted_average_column_for_model('prompts', 'generation_time', opts.running_model, opts.mode, opts.backend_url, exclude_zeros=True) or 0
if average_generation_elapsed_sec > -1:
redis.set('average_generation_elapsed_sec', average_generation_elapsed_sec)
# overall = average_column_for_model('prompts', 'generation_time', opts.running_model)
# print(f'Weighted: {average_generation_elapsed_sec}, overall: {overall}')
average_output_tokens = weighted_average_column_for_model('prompts', 'response_tokens', opts.running_model, opts.mode, opts.backend_url, exclude_zeros=True) or 0
if average_generation_elapsed_sec > -1:
redis.set('average_output_tokens', average_output_tokens)
# overall = average_column_for_model('prompts', 'response_tokens', opts.running_model)