local-llm-server/llm_server/llm/oobabooga/generate.py

18 lines
459 B
Python

"""
This file is used by the worker that processes requests.
"""
import requests
from llm_server import opts
def generate(json_data: dict):
try:
r = requests.post(f'{opts.backend_url}/api/v1/generate', json=json_data, verify=opts.verify_ssl)
except Exception as e:
return False, None, f'{e.__class__.__name__}: {e}'
if r.status_code != 200:
return False, r, f'Backend returned {r.status_code}'
return True, r, None