Merge cluster to master #3

Merged
cyberes merged 163 commits from cluster into master 2023-10-27 19:19:22 -06:00
3 changed files with 5 additions and 2 deletions
Showing only changes of commit 4deb32bf1c - Show all commits

View File

@ -10,7 +10,7 @@ def check_moderation_endpoint(prompt: str):
} }
response = requests.post('https://api.openai.com/v1/moderations', headers=headers, json={"input": prompt}, timeout=10) response = requests.post('https://api.openai.com/v1/moderations', headers=headers, json={"input": prompt}, timeout=10)
if response.status_code != 200: if response.status_code != 200:
print(response) print('moderation failed:', response)
response.raise_for_status() response.raise_for_status()
response = response.json() response = response.json()

View File

@ -73,7 +73,6 @@ class OpenAIRequestHandler(RequestHandler):
print('sent success response') print('sent success response')
return self.build_openai_response(self.prompt, backend_response.json['results'][0]['text'], model=model), backend_response_status_code return self.build_openai_response(self.prompt, backend_response.json['results'][0]['text'], model=model), backend_response_status_code
else: else:
print(backend_response_status_code, backend_response.data)
return backend_response, backend_response_status_code return backend_response, backend_response_status_code
def handle_ratelimited(self, do_log: bool = True): def handle_ratelimited(self, do_log: bool = True):

View File

@ -1,3 +1,5 @@
from llm_server.routes.queue import priority_queue
try: try:
import gevent.monkey import gevent.monkey
@ -95,6 +97,8 @@ create_db()
@app.route('/api/openai') @app.route('/api/openai')
@flask_cache.cached(timeout=10) @flask_cache.cached(timeout=10)
def home(): def home():
print(len(priority_queue))
base_client_api = redis.get('base_client_api', dtype=str) base_client_api = redis.get('base_client_api', dtype=str)
stats = generate_stats() stats = generate_stats()
model_choices, default_model = get_model_choices() model_choices, default_model = get_model_choices()