cache again
This commit is contained in:
parent
36b793e8a2
commit
61b9e313d2
|
@ -7,13 +7,15 @@ from llm_server import opts
|
||||||
from llm_server.routes.v1.generate import concurrent_semaphore
|
from llm_server.routes.v1.generate import concurrent_semaphore
|
||||||
from . import bp
|
from . import bp
|
||||||
from .. import stats
|
from .. import stats
|
||||||
|
from ..cache import cache
|
||||||
|
from ..helpers.http import cache_control
|
||||||
from ..stats import SemaphoreCheckerThread
|
from ..stats import SemaphoreCheckerThread
|
||||||
from ...llm.info import get_running_model
|
from ...llm.info import get_running_model
|
||||||
|
|
||||||
|
|
||||||
@bp.route('/stats', methods=['GET'])
|
@bp.route('/stats', methods=['GET'])
|
||||||
# @cache.cached(timeout=5, query_string=True)
|
@cache.cached(timeout=5, query_string=True)
|
||||||
# @cache_control(5)
|
@cache_control(5)
|
||||||
def get_stats():
|
def get_stats():
|
||||||
model_list = get_running_model() # will return False when the fetch fails
|
model_list = get_running_model() # will return False when the fetch fails
|
||||||
if isinstance(model_list, bool):
|
if isinstance(model_list, bool):
|
||||||
|
|
Reference in New Issue