local-llm-server/llm_server/opts.py

18 lines
377 B
Python
Raw Normal View History

2023-08-23 23:11:12 -06:00
# Read-only global variables
2023-08-21 22:49:44 -06:00
2023-08-21 21:28:52 -06:00
running_model = 'none'
2023-08-22 00:26:46 -06:00
concurrent_gens = 3
2023-08-21 21:28:52 -06:00
mode = 'oobabooga'
backend_url = None
2023-08-22 20:42:38 -06:00
context_size = 5555
2023-08-21 21:28:52 -06:00
database_path = './proxy-server.db'
auth_required = False
log_prompts = False
2023-08-22 16:50:49 -06:00
frontend_api_client = ''
2023-08-23 23:11:12 -06:00
full_client_api = None
2023-08-22 20:28:41 -06:00
http_host = None
2023-08-23 16:11:32 -06:00
verify_ssl = True
2023-08-23 22:08:10 -06:00
show_num_prompts = True
show_uptime = True
average_generation_time_mode = 'database'