calculate time stats based on backend url

This commit is contained in:
Cyberes 2023-09-13 12:34:14 -06:00
parent 3d40ed4cfb
commit 320c3fc710
2 changed files with 26 additions and 29 deletions

View File

@ -132,39 +132,39 @@ def average_column_for_model(table_name, column_name, model_name):
return result[0]
def weighted_average_column_for_model(table_name, column_name, model_name, backend_name, exclude_zeros: bool = False):
def weighted_average_column_for_model(table_name, column_name, model_name, backend_name, backend_url, exclude_zeros: bool = False):
conn = sqlite3.connect(opts.database_path)
cursor = conn.cursor()
cursor.execute(f"SELECT DISTINCT model, backend_mode FROM {table_name}")
models_backends = [(row[0], row[1]) for row in cursor.fetchall()]
# cursor.execute(f"SELECT DISTINCT model, backend_mode FROM {table_name}")
# models_backends = [(row[0], row[1]) for row in cursor.fetchall()]
#
# model_averages = {}
# for model, backend in models_backends:
# if backend != backend_name:
# continue
cursor.execute(f"SELECT {column_name}, ROWID FROM {table_name} WHERE model = ? AND backend_mode = ? AND backend_url = ? ORDER BY ROWID DESC", (model_name, backend_name, backend_url))
results = cursor.fetchall()
model_averages = {}
for model, backend in models_backends:
if backend != backend_name:
# if not results:
# continue
total_weight = 0
weighted_sum = 0
for i, (value, rowid) in enumerate(results):
if value is None or (exclude_zeros and value == 0):
continue
cursor.execute(f"SELECT {column_name}, ROWID FROM {table_name} WHERE model = ? AND backend_mode = ? ORDER BY ROWID DESC", (model, backend))
results = cursor.fetchall()
weight = i + 1
total_weight += weight
weighted_sum += weight * value
if not results:
continue
# if total_weight == 0:
# continue
total_weight = 0
weighted_sum = 0
for i, (value, rowid) in enumerate(results):
if value is None or (exclude_zeros and value == 0):
continue
weight = i + 1
total_weight += weight
weighted_sum += weight * value
if total_weight == 0:
continue
model_averages[(model, backend)] = weighted_sum / total_weight
calculated_avg = weighted_sum / total_weight
conn.close()
return model_averages.get((model_name, backend_name))
return calculated_avg
def sum_column(table_name, column_name):

View File

@ -1,9 +1,6 @@
import time
from datetime import datetime
from threading import Thread
import requests
from llm_server import opts
from llm_server.database import weighted_average_column_for_model
from llm_server.llm.info import get_running_model
@ -56,13 +53,13 @@ class MainBackgroundThread(Thread):
# exclude_zeros=True filters out rows where an error message was returned. Previously, if there was an error, 0
# was entered into the column. The new code enters null instead but we need to be backwards compatible for now
average_generation_elapsed_sec = weighted_average_column_for_model('prompts', 'generation_time', opts.running_model, opts.mode, exclude_zeros=True) or 0
average_generation_elapsed_sec = weighted_average_column_for_model('prompts', 'generation_time', opts.running_model, opts.mode, opts.backend_url, exclude_zeros=True) or 0
redis.set('average_generation_elapsed_sec', average_generation_elapsed_sec)
# overall = average_column_for_model('prompts', 'generation_time', opts.running_model)
# print(f'Weighted: {average_generation_elapsed_sec}, overall: {overall}')
average_output_tokens = weighted_average_column_for_model('prompts', 'response_tokens', opts.running_model, opts.mode, exclude_zeros=True) or 0
average_output_tokens = weighted_average_column_for_model('prompts', 'response_tokens', opts.running_model, opts.mode, opts.backend_url, exclude_zeros=True) or 0
redis.set('average_output_tokens', average_output_tokens)
# overall = average_column_for_model('prompts', 'response_tokens', opts.running_model)