local-llm-server/llm_server/routes/v1/proxy.py

26 lines
747 B
Python

import time
from datetime import datetime
from flask import jsonify
from llm_server import opts
from llm_server.routes.v1.generate import concurrent_semaphore
from . import bp
from .. import stats
from ..cache import cache
from ..helpers.http import cache_control
from ..stats import proompters_1_min
@bp.route('/stats', methods=['GET'])
@cache.cached(timeout=60, query_string=True)
@cache_control(60)
def get_stats():
return jsonify({
'proompters_now': opts.concurrent_generates - concurrent_semaphore._value,
'proompters_1_min': proompters_1_min,
'total_proompts': stats.proompts.value,
'uptime': int((datetime.now() - stats.start_time).total_seconds()),
'timestamp': int(time.time())
}), 200