local-llm-server/llm_server/llm/oobabooga/generate.py

14 lines
369 B
Python
Raw Normal View History

2023-08-21 21:28:52 -06:00
import requests
from llm_server import opts
def generate(json_data: dict):
try:
r = requests.post(f'{opts.backend_url}/api/v1/generate', json=json_data)
except Exception as e:
return False, None, f'{e.__class__.__name__}: {e}'
if r.status_code != 200:
return False, r, f'Backend returned {r.status_code}'
return True, r, None