local-llm-server/llm_server/routes/openai/info.py

17 lines
433 B
Python
Raw Normal View History

2023-09-13 20:25:56 -06:00
from flask import Response
from . import openai_bp
2023-09-26 22:09:11 -06:00
from ..cache import flask_cache
2023-09-13 20:25:56 -06:00
from ... import opts
@openai_bp.route('/prompt', methods=['GET'])
2023-09-26 22:09:11 -06:00
@flask_cache.cached(timeout=2678000, query_string=True)
2023-09-13 20:25:56 -06:00
def get_openai_info():
if opts.expose_openai_system_prompt:
resp = Response(opts.openai_system_prompt)
resp.headers['Content-Type'] = 'text/plain'
return resp, 200
else:
return '', 403