47 lines
1.2 KiB
Python
47 lines
1.2 KiB
Python
import time
|
|
|
|
from flask import jsonify, request
|
|
|
|
from . import bp
|
|
from ..helpers.http import cache_control
|
|
from ... import opts
|
|
from ...llm.info import get_running_model
|
|
from ..cache import cache
|
|
|
|
|
|
# cache = Cache(bp, config={'CACHE_TYPE': 'simple'})
|
|
|
|
|
|
# @bp.route('/info', methods=['GET'])
|
|
# # @cache.cached(timeout=3600, query_string=True)
|
|
# def get_info():
|
|
# # requests.get()
|
|
# return 'yes'
|
|
|
|
|
|
@bp.route('/model', methods=['GET'])
|
|
def get_model():
|
|
# We will manage caching ourself since we don't want to cache
|
|
# when the backend is down. Also, Cloudflare won't cache 500 errors.
|
|
cache_key = 'model_cache::' + request.url
|
|
cached_response = cache.get(cache_key)
|
|
|
|
if cached_response:
|
|
return cached_response
|
|
|
|
model, error = get_running_model()
|
|
if not model:
|
|
response = jsonify({
|
|
'code': 502,
|
|
'error': 'failed to reach backend',
|
|
'type': error.__class__.__name__
|
|
}), 500 # return 500 so Cloudflare doesn't intercept us
|
|
else:
|
|
response = jsonify({
|
|
'result': model,
|
|
'timestamp': int(time.time())
|
|
}), 200
|
|
cache.set(cache_key, response, timeout=60)
|
|
|
|
return response
|