2023-09-25 09:32:23 -06:00
|
|
|
import traceback
|
|
|
|
|
|
|
|
import requests
|
|
|
|
from flask import jsonify
|
2023-09-12 16:40:09 -06:00
|
|
|
|
|
|
|
from . import openai_bp
|
2023-09-26 22:09:11 -06:00
|
|
|
from ..cache import ONE_MONTH_SECONDS, flask_cache, redis
|
2023-09-12 16:40:09 -06:00
|
|
|
from ..stats import server_start_time
|
|
|
|
from ... import opts
|
2023-09-25 09:32:23 -06:00
|
|
|
from ...helpers import jsonify_pretty
|
2023-09-12 16:40:09 -06:00
|
|
|
from ...llm.info import get_running_model
|
|
|
|
|
|
|
|
|
|
|
|
@openai_bp.route('/models', methods=['GET'])
|
2023-09-26 22:09:11 -06:00
|
|
|
@flask_cache.cached(timeout=60, query_string=True)
|
2023-09-12 16:40:09 -06:00
|
|
|
def openai_list_models():
|
|
|
|
model, error = get_running_model()
|
|
|
|
if not model:
|
|
|
|
response = jsonify({
|
|
|
|
'code': 502,
|
|
|
|
'msg': 'failed to reach backend',
|
|
|
|
'type': error.__class__.__name__
|
|
|
|
}), 500 # return 500 so Cloudflare doesn't intercept us
|
|
|
|
else:
|
2023-09-26 13:32:33 -06:00
|
|
|
running_model = redis.get('running_model', str, 'ERROR')
|
2023-09-24 21:45:30 -06:00
|
|
|
oai = fetch_openai_models()
|
2023-09-25 09:32:23 -06:00
|
|
|
r = []
|
2023-09-25 17:20:21 -06:00
|
|
|
if opts.openai_expose_our_model:
|
2023-09-25 09:32:23 -06:00
|
|
|
r = [{
|
|
|
|
"object": "list",
|
|
|
|
"data": [
|
|
|
|
{
|
2023-09-26 13:32:33 -06:00
|
|
|
"id": running_model,
|
2023-09-25 09:32:23 -06:00
|
|
|
"object": "model",
|
|
|
|
"created": int(server_start_time.timestamp()),
|
|
|
|
"owned_by": opts.llm_middleware_name,
|
|
|
|
"permission": [
|
|
|
|
{
|
2023-09-26 13:32:33 -06:00
|
|
|
"id": running_model,
|
2023-09-25 09:32:23 -06:00
|
|
|
"object": "model_permission",
|
|
|
|
"created": int(server_start_time.timestamp()),
|
|
|
|
"allow_create_engine": False,
|
|
|
|
"allow_sampling": False,
|
|
|
|
"allow_logprobs": False,
|
|
|
|
"allow_search_indices": False,
|
|
|
|
"allow_view": True,
|
|
|
|
"allow_fine_tuning": False,
|
|
|
|
"organization": "*",
|
|
|
|
"group": None,
|
|
|
|
"is_blocking": False
|
|
|
|
}
|
|
|
|
],
|
|
|
|
"root": None,
|
|
|
|
"parent": None
|
|
|
|
}
|
|
|
|
]
|
|
|
|
}]
|
|
|
|
response = jsonify_pretty(r + oai), 200
|
2023-09-12 16:40:09 -06:00
|
|
|
return response
|
2023-09-24 21:45:30 -06:00
|
|
|
|
|
|
|
|
2023-09-26 22:09:11 -06:00
|
|
|
@flask_cache.memoize(timeout=ONE_MONTH_SECONDS)
|
2023-09-24 21:45:30 -06:00
|
|
|
def fetch_openai_models():
|
2023-09-25 09:32:23 -06:00
|
|
|
if opts.openai_api_key:
|
|
|
|
try:
|
|
|
|
response = requests.get('https://api.openai.com/v1/models', headers={'Authorization': f"Bearer {opts.openai_api_key}"}, timeout=10)
|
|
|
|
return response.json()['data']
|
|
|
|
except:
|
|
|
|
traceback.print_exc()
|
|
|
|
return []
|
|
|
|
else:
|
|
|
|
return []
|