This repository has been archived on 2024-10-27. You can view files and clone it, but cannot push or open issues or pull requests.
local-llm-server/llm_server/llm/generator.py

13 lines
352 B
Python

from llm_server import opts
def generator(request_json_body):
if opts.mode == 'oobabooga':
from .oobabooga.generate import generate
return generate(request_json_body)
elif opts.mode == 'hf-textgen':
from .hf_textgen.generate import generate
return generate(request_json_body)
else:
raise Exception