openai_force_no_hashes

This commit is contained in:
Cyberes 2023-09-25 22:01:57 -06:00
parent 8240a1ebbb
commit 2d299dbae5
5 changed files with 9 additions and 1 deletions

View File

@ -25,6 +25,7 @@ config_default_vars = {
'http_host': None,
'admin_token': None,
'openai_epose_our_model': False,
'openai_force_no_hashes': True,
}
config_required_vars = ['token_limit', 'concurrent_gens', 'mode', 'llm_middleware_name']

View File

@ -30,3 +30,4 @@ backend_request_timeout = 30
backend_generate_request_timeout = 95
admin_token = None
openai_expose_our_model = False
openai_force_no_hashes = True

View File

@ -51,6 +51,7 @@ def openai_chat_completions():
r_headers = dict(request.headers)
r_url = request.url
model = opts.running_model if opts.openai_expose_our_model else request_json_body.get('model')
oai_string = generate_oai_string(30)
def generate():
generated_text = ''
@ -64,13 +65,14 @@ def openai_chat_completions():
try:
json_obj = json.loads(json_str.decode())
new = json_obj['text'][0].split(handler.prompt + generated_text)[1]
print(new)
generated_text = generated_text + new
except IndexError:
# ????
continue
data = {
"id": f"chatcmpl-{generate_oai_string(30)}",
"id": f"chatcmpl-{oai_string}",
"object": "chat.completion.chunk",
"created": int(time.time()),
"model": model,

View File

@ -67,6 +67,9 @@ class OpenAIRequestHandler(RequestHandler):
# Reconstruct the request JSON with the validated parameters and prompt.
self.parameters['stop'].extend(['\n### INSTRUCTION', '\n### USER', '\n### ASSISTANT', '\n### RESPONSE'])
if opts.openai_force_no_hashes:
self.parameters['stop'].append('### ')
llm_request = {**self.parameters, 'prompt': self.prompt}
(success, _, _, _), (backend_response, backend_response_status_code) = self.generate_response(llm_request)

View File

@ -98,6 +98,7 @@ opts.openai_api_key = config['openai_api_key']
openai.api_key = opts.openai_api_key
opts.admin_token = config['admin_token']
opts.openai_expose_our_model = config['openai_epose_our_model']
opts.openai_force_no_hashes = config['openai_force_no_hashes']
if opts.openai_expose_our_model and not opts.openai_api_key:
print('If you set openai_epose_our_model to false, you must set your OpenAI key in openai_api_key.')