show the openai system prompt

This commit is contained in:
Cyberes 2023-09-13 20:25:56 -06:00
parent 320c3fc710
commit 12e894032e
8 changed files with 29 additions and 5 deletions

View File

@ -18,7 +18,8 @@ config_default_vars = {
'max_new_tokens': 500,
'manual_model_name': False,
'enable_openi_compatible_backend': True,
'openai_system_prompt': """You are an AI assistant chatbot. Your main function is to provide accurate and helpful responses to the user's queries. You should always be polite, respectful, and patient. You should not provide any personal opinions or advice unless specifically asked by the user. You should not make any assumptions about the user's knowledge or abilities. You should always strive to provide clear and concise answers. If you do not understand a user's query, ask for clarification. If you cannot provide an answer, apologize and suggest the user seek help elsewhere.\nYou are the assistant and answer to the `### RESPONSE` prompt. Lines that start with `### ASSISTANT` were messages you sent previously.\nLines that start with `### USER` were messages sent by the user you are chatting with.\nYou will respond to the "### RESPONSE:" prompts and follow the instructions given by the user.\n\n""",
'expose_openai_system_prompt': True,
'openai_system_prompt': """You are an assistant chatbot. Your main function is to provide accurate and helpful responses to the user's queries. You should always be polite, respectful, and patient. You should not provide any personal opinions or advice unless specifically asked by the user. You should not make any assumptions about the user's knowledge or abilities. You should always strive to provide clear and concise answers. If you do not understand a user's query, ask for clarification. If you cannot provide an answer, apologize and suggest the user seek help elsewhere.\nLines that start with "### ASSISTANT" were messages you sent previously.\nLines that start with "### USER" were messages sent by the user you are chatting with.\nYou will respond to the "### RESPONSE:" prompt as the assistant and follow the instructions given by the user.\n\n""",
}
config_required_vars = ['token_limit', 'concurrent_gens', 'mode', 'llm_middleware_name']

View File

@ -25,4 +25,5 @@ show_backend_info = True
manual_model_name = None
llm_middleware_name = ''
enable_openi_compatible_backend = True
openai_system_prompt = """You are an AI assistant chatbot. Your main function is to provide accurate and helpful responses to the user's queries. You should always be polite, respectful, and patient. You should not provide any personal opinions or advice unless specifically asked by the user. You should not make any assumptions about the user's knowledge or abilities. You should always strive to provide clear and concise answers. If you do not understand a user's query, ask for clarification. If you cannot provide an answer, apologize and suggest the user seek help elsewhere.\nYou are the assistant and answer to the `### RESPONSE` prompt. Lines that start with `### ASSISTANT` were messages you sent previously.\nLines that start with `### USER` were messages sent by the user you are chatting with.\nYou will respond to the "### RESPONSE:" prompts and follow the instructions given by the user.\n\n"""
openai_system_prompt = """You are an assistant chatbot. Your main function is to provide accurate and helpful responses to the user's queries. You should always be polite, respectful, and patient. You should not provide any personal opinions or advice unless specifically asked by the user. You should not make any assumptions about the user's knowledge or abilities. You should always strive to provide clear and concise answers. If you do not understand a user's query, ask for clarification. If you cannot provide an answer, apologize and suggest the user seek help elsewhere.\nLines that start with "### ASSISTANT" were messages you sent previously.\nLines that start with "### USER" were messages sent by the user you are chatting with.\nYou will respond to the "### RESPONSE:" prompt as the assistant and follow the instructions given by the user.\n\n"""
expose_openai_system_prompt = True

View File

@ -30,3 +30,4 @@ def handle_error(e):
from .models import openai_list_models
from .chat_completions import openai_chat_completions
from .info import get_openai_info

View File

@ -0,0 +1,14 @@
from flask import Response
from . import openai_bp
from ... import opts
@openai_bp.route('/prompt', methods=['GET'])
def get_openai_info():
if opts.expose_openai_system_prompt:
resp = Response(opts.openai_system_prompt)
resp.headers['Content-Type'] = 'text/plain'
return resp, 200
else:
return '', 403

View File

@ -8,9 +8,6 @@ from ... import opts
from ...llm.info import get_running_model
# cache = Cache(bp, config={'CACHE_TYPE': 'simple'})
# @bp.route('/info', methods=['GET'])
# # @cache.cached(timeout=3600, query_string=True)
# def get_info():

0
other/vllm/vllm_api_server.py Executable file → Normal file
View File

View File

@ -72,6 +72,7 @@ opts.manual_model_name = config['manual_model_name']
opts.llm_middleware_name = config['llm_middleware_name']
opts.enable_openi_compatible_backend = config['enable_openi_compatible_backend']
opts.openai_system_prompt = config['openai_system_prompt']
opts.expose_openai_system_prompt = config['expose_openai_system_prompt']
opts.verify_ssl = config['verify_ssl']
if not opts.verify_ssl:
@ -164,6 +165,7 @@ def home():
stats_json=json.dumps(stats, indent=4, ensure_ascii=False),
extra_info=mode_info,
openai_client_api=f'https://{opts.base_client_api}/openai/v1' if opts.enable_openi_compatible_backend else 'disabled',
expose_openai_system_prompt=opts.expose_openai_system_prompt
)

View File

@ -101,6 +101,14 @@
</li>
</ol>
</div>
{% if openai_client_api != 'disabled' and expose_openai_system_prompt %}
<br>
<div id="openai">
<strong>OpenAI-Compatible API</strong>
<p>The OpenAI-Compatible API adds a system prompt to set the AI's behavior to a "helpful assistant". You can view this prompt <a href="/api/openai/v1/prompt">here</a>.</p>
</div>
{% endif %}
<br>
<div id="extra-info">{{ extra_info|safe }}</div>
</div>