This repository has been archived on 2024-10-27. You can view files and clone it, but cannot push or open issues or pull requests.
local-llm-server/llm_server/config/load.py

87 lines
3.8 KiB
Python
Raw Normal View History

import re
import sys
2024-05-07 12:20:53 -06:00
from pathlib import Path
import openai
2024-05-07 12:20:53 -06:00
from bison import bison, Option, ListOption, Scheme
import llm_server
2024-05-07 12:20:53 -06:00
from llm_server.config.global_config import GlobalConfig
from llm_server.config.model import ConfigModel
from llm_server.config.scheme import config_scheme
from llm_server.custom_redis import redis
from llm_server.logging import create_logger
from llm_server.routes.queue import PriorityQueue
_logger = create_logger('config')
2024-05-07 12:20:53 -06:00
def validate_config(config: bison.Bison):
def do(v, scheme: Scheme = None):
if isinstance(v, Option) and v.choices is None:
if not isinstance(config.config[v.name], v.type):
raise ValueError(f'"{v.name}" must be type {v.type}. Current value: "{config.config[v.name]}"')
elif isinstance(v, Option) and v.choices is not None:
if config.config[v.name] not in v.choices:
raise ValueError(f'"{v.name}" must be one of {v.choices}. Current value: "{config.config[v.name]}"')
elif isinstance(v, ListOption):
if isinstance(config.config[v.name], list):
for item in config.config[v.name]:
do(item, v.member_scheme)
elif isinstance(config.config[v.name], dict):
for kk, vv in config.config[v.name].items():
scheme_dict = v.member_scheme.flatten()
if not isinstance(vv, scheme_dict[kk].type):
raise ValueError(f'"{kk}" must be type {scheme_dict[kk].type}. Current value: "{vv}"')
elif isinstance(scheme_dict[kk], Option) and scheme_dict[kk].choices is not None:
if vv not in scheme_dict[kk].choices:
raise ValueError(f'"{kk}" must be one of {scheme_dict[kk].choices}. Current value: "{vv}"')
elif isinstance(v, dict) and scheme is not None:
scheme_dict = scheme.flatten()
for kk, vv in v.items():
if not isinstance(vv, scheme_dict[kk].type):
raise ValueError(f'"{kk}" must be type {scheme_dict[kk].type}. Current value: "{vv}"')
elif isinstance(scheme_dict[kk], Option) and scheme_dict[kk].choices is not None:
if vv not in scheme_dict[kk].choices:
raise ValueError(f'"{kk}" must be one of {scheme_dict[kk].choices}. Current value: "{vv}"')
for k, v in config_scheme.flatten().items():
do(v)
def load_config(config_path: Path):
config = bison.Bison(scheme=config_scheme)
config.config_name = 'config'
config.add_config_paths(str(config_path.parent))
config.parse()
try:
validate_config(config)
except ValueError as e:
return False, str(e)
config_model = ConfigModel(**config.config)
GlobalConfig.initalize(config_model)
if GlobalConfig.get().postgresql.maxconn < 0:
return False, f'"maxcon" should be higher than 0. Current value: "{GlobalConfig.get().postgresql.maxconn}"'
2024-05-07 12:20:53 -06:00
openai.api_key = GlobalConfig.get().openai_api_key
llm_server.routes.queue.priority_queue = PriorityQueue(set([x.backend_url for x in config_model.cluster]))
if GlobalConfig.get().openai_expose_our_model and not GlobalConfig.get().openai_api_key:
_logger.error('If you set openai_expose_our_model to false, you must set your OpenAI key in openai_api_key.')
sys.exit(1)
2024-05-07 12:20:53 -06:00
if not GlobalConfig.get().verify_ssl:
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
2024-05-07 12:20:53 -06:00
if GlobalConfig.get().http_host:
http_host = re.sub(r'https?://', '', config["http_host"])
redis.set('http_host', http_host)
2024-05-07 12:20:53 -06:00
redis.set('base_client_api', f'{http_host}/{GlobalConfig.get().frontend_api_client.strip("/")}')
2024-05-07 12:20:53 -06:00
return True, None