This repository has been archived on 2024-10-27. You can view files and clone it, but cannot push or open issues or pull requests.
local-llm-server/llm_server/routes/openai/info.py

17 lines
505 B
Python

from flask import Response
from llm_server.custom_redis import flask_cache
from . import openai_bp
from ...config.global_config import GlobalConfig
@openai_bp.route('/prompt', methods=['GET'])
@flask_cache.cached(timeout=2678000, query_string=True)
def get_openai_info():
if GlobalConfig.get().expose_openai_system_prompt:
resp = Response(GlobalConfig.get().openai_system_prompt)
resp.headers['Content-Type'] = 'text/plain'
return resp, 200
else:
return '', 403