local-llm-server/llm_server/opts.py

25 lines
586 B
Python

# Read-only global variables
# TODO: rewrite the config system so I don't have to add every single config default here
running_model = 'none'
concurrent_gens = 3
mode = 'oobabooga'
backend_url = None
context_size = 5555
max_new_tokens = 500
database_path = './proxy-server.db'
auth_required = False
log_prompts = False
frontend_api_client = ''
base_client_api = None
http_host = None
verify_ssl = True
show_num_prompts = True
show_uptime = True
average_generation_time_mode = 'database'
show_total_output_tokens = True
netdata_root = None
ip_in_queue_max = 3
show_backend_info = True