This repository has been archived on 2024-10-27. You can view files and clone it, but cannot push or open issues or pull requests.
local-llm-server/config/config.yml

29 lines
680 B
YAML
Raw Normal View History

2023-08-21 21:28:52 -06:00
# TODO: add this file to gitignore and add a .sample.yml
log_prompts: true
2023-08-23 16:11:32 -06:00
verify_ssl: false # Python request has issues with self-signed certs
2023-08-21 21:28:52 -06:00
mode: oobabooga
auth_required: false
2023-08-22 00:26:46 -06:00
concurrent_gens: 3
2023-08-23 01:14:19 -06:00
token_limit: 7777
2023-08-21 21:28:52 -06:00
2023-08-23 16:00:54 -06:00
backend_url: https://10.0.0.86:8083
2023-08-21 21:28:52 -06:00
2023-08-23 23:11:12 -06:00
llm_middleware_name: proxy.chub-archive.evulid.cc
2023-08-23 22:08:10 -06:00
## STATS ##
# Display the total_proompts item on the stats screen.
show_num_prompts: true
# Display the uptime item on the stats screen.
show_uptime: true
# Load the number of prompts from the database to display on the stats page.
load_num_prompts: true
2023-08-22 16:50:49 -06:00
# Path that is shown to users for them to connect to
frontend_api_client: /api
2023-08-21 21:28:52 -06:00
database_path: ./proxy-server.db