local-llm-server/llm_server/opts.py

13 lines
277 B
Python

# Global settings that never change after startup/init
running_model = 'none'
concurrent_gens = 3
mode = 'oobabooga'
backend_url = None
context_size = 5555
database_path = './proxy-server.db'
auth_required = False
log_prompts = False
frontend_api_client = ''
http_host = None