local-llm-server/llm_server/routes/openai/info.py

17 lines
449 B
Python

from flask import Response
from . import openai_bp
from llm_server.custom_redis import flask_cache
from ... import opts
@openai_bp.route('/prompt', methods=['GET'])
@flask_cache.cached(timeout=2678000, query_string=True)
def get_openai_info():
if opts.expose_openai_system_prompt:
resp = Response(opts.openai_system_prompt)
resp.headers['Content-Type'] = 'text/plain'
return resp, 200
else:
return '', 403