fix exception

This commit is contained in:
Cyberes 2023-10-09 10:31:35 -06:00
parent 467e1893ea
commit ae4d4e5ca9
2 changed files with 3 additions and 0 deletions

View File

@ -27,6 +27,8 @@ def oai_to_vllm(request_json_body, stop_hashes: bool, mode):
if mode == 'vllm' and request_json_body.get('top_p') == 0: if mode == 'vllm' and request_json_body.get('top_p') == 0:
request_json_body['top_p'] = 0.01 request_json_body['top_p'] = 0.01
request_json_body['max_tokens'] = min(max(request_json_body.get('max_new_tokens', 0), request_json_body.get('max_tokens', 0)), opts.max_new_tokens)
return request_json_body return request_json_body

View File

@ -37,6 +37,7 @@ class RequestHandler:
self.parameters = None self.parameters = None
self.used = False self.used = False
self.selected_model = selected_model
self.backend_url = get_a_cluster_backend(selected_model) self.backend_url = get_a_cluster_backend(selected_model)
self.cluster_backend_info = cluster_config.get_backend(self.backend_url) self.cluster_backend_info = cluster_config.get_backend(self.backend_url)