This commit is contained in:
Cyberes 2023-10-04 13:21:43 -06:00
parent 5e90fa54d4
commit d78ef652fc
2 changed files with 8 additions and 11 deletions

View File

@ -18,7 +18,7 @@ class OobaRequestHandler(RequestHandler):
if self.offline:
msg = f'{self.selected_model} is not a valid model choice.'
print(msg)
return jsonify({'results': [{'text': format_sillytavern_err(msg)}]}), 200
self.handle_error(msg)
request_valid, invalid_response = self.validate_request()
if not request_valid:

View File

@ -13,7 +13,6 @@ from llm_server.helpers import auto_set_base_client_api
from llm_server.llm.oobabooga.ooba_backend import OobaboogaBackend
from llm_server.llm.vllm.vllm_backend import VLLMBackend
from llm_server.routes.auth import parse_token
from llm_server.routes.helpers.client import format_sillytavern_err
from llm_server.routes.helpers.http import require_api_key, validate_json
from llm_server.routes.queue import priority_queue
@ -47,15 +46,13 @@ class RequestHandler:
self.offline = True
else:
self.offline = False
self.selected_model = self.cluster_backend_info['model']
self.backend = get_backend_handler(self.cluster_backend_info['mode'], self.backend_url)
self.parameters = None
self.used = False
if self.token and not self.token.startswith('SYSTEM__'):
# "recent_prompters" is only used for stats.
redis.zadd('recent_prompters', {self.client_ip: time.time()})
self.selected_model = self.cluster_backend_info['model']
self.backend = get_backend_handler(self.cluster_backend_info['mode'], self.backend_url)
self.parameters = None
self.used = False
if self.token and not self.token.startswith('SYSTEM__'):
# "recent_prompters" is only used for stats.
redis.zadd('recent_prompters', {self.client_ip: time.time()})
def get_auth_token(self):
if self.request_json_body.get('X-API-KEY'):