cache again

This commit is contained in:
Cyberes 2023-08-22 23:14:56 -06:00
parent 36b793e8a2
commit 61b9e313d2
1 changed files with 4 additions and 2 deletions

View File

@ -7,13 +7,15 @@ from llm_server import opts
from llm_server.routes.v1.generate import concurrent_semaphore
from . import bp
from .. import stats
from ..cache import cache
from ..helpers.http import cache_control
from ..stats import SemaphoreCheckerThread
from ...llm.info import get_running_model
@bp.route('/stats', methods=['GET'])
# @cache.cached(timeout=5, query_string=True)
# @cache_control(5)
@cache.cached(timeout=5, query_string=True)
@cache_control(5)
def get_stats():
model_list = get_running_model() # will return False when the fetch fails
if isinstance(model_list, bool):