local-llm-server/llm_server/routes/openai/info.py

15 lines
345 B
Python

from flask import Response
from . import openai_bp
from ... import opts
@openai_bp.route('/prompt', methods=['GET'])
def get_openai_info():
if opts.expose_openai_system_prompt:
resp = Response(opts.openai_system_prompt)
resp.headers['Content-Type'] = 'text/plain'
return resp, 200
else:
return '', 403