This repository has been archived on 2024-10-27. You can view files and clone it, but cannot push or open issues or pull requests.
local-llm-server/llm_server/opts.py

11 lines
234 B
Python

# Global settings that never change after startup
running_model = 'none'
concurrent_generates = 3
mode = 'oobabooga'
backend_url = None
token_limit = 5555
database_path = './proxy-server.db'
auth_required = False
log_prompts = False