From 9fc674878dd17f57920688aa653f7bab9467fba1 Mon Sep 17 00:00:00 2001 From: Cyberes Date: Wed, 23 Aug 2023 16:11:32 -0600 Subject: [PATCH] allow disabling ssl verification --- config/config.yml | 1 + llm_server/llm/hf_textgen/generate.py | 2 +- llm_server/llm/info.py | 4 ++-- llm_server/llm/oobabooga/generate.py | 2 +- llm_server/opts.py | 1 + server.py | 3 ++- 6 files changed, 8 insertions(+), 5 deletions(-) diff --git a/config/config.yml b/config/config.yml index 96ff5f9..e1a4a13 100644 --- a/config/config.yml +++ b/config/config.yml @@ -1,6 +1,7 @@ # TODO: add this file to gitignore and add a .sample.yml log_prompts: true +verify_ssl: false # Python request has issues with self-signed certs mode: oobabooga auth_required: false diff --git a/llm_server/llm/hf_textgen/generate.py b/llm_server/llm/hf_textgen/generate.py index a864580..385263e 100644 --- a/llm_server/llm/hf_textgen/generate.py +++ b/llm_server/llm/hf_textgen/generate.py @@ -35,7 +35,7 @@ def prepare_json(json_data: dict): def generate(json_data: dict): print(json.dumps(prepare_json(json_data))) # try: - r = requests.post(f'{opts.backend_url}/generate', json=prepare_json(json_data)) + r = requests.post(f'{opts.backend_url}/generate', json=prepare_json(json_data), verify=opts.verify_ssl) print(r.text) # except Exception as e: # return False, None, f'{e.__class__.__name__}: {e}' diff --git a/llm_server/llm/info.py b/llm_server/llm/info.py index 0f4ed9b..47104ec 100644 --- a/llm_server/llm/info.py +++ b/llm_server/llm/info.py @@ -6,7 +6,7 @@ from llm_server import opts def get_running_model(): if opts.mode == 'oobabooga': try: - backend_response = requests.get(f'{opts.backend_url}/api/v1/model', timeout=3) + backend_response = requests.get(f'{opts.backend_url}/api/v1/model', timeout=3, verify=opts.verify_ssl) except Exception as e: return False, e try: @@ -16,7 +16,7 @@ def get_running_model(): return False, e elif opts.mode == 'hf-textgen': try: - backend_response = requests.get(f'{opts.backend_url}/info') + backend_response = requests.get(f'{opts.backend_url}/info', verify=opts.verify_ssl) except Exception as e: return False, e try: diff --git a/llm_server/llm/oobabooga/generate.py b/llm_server/llm/oobabooga/generate.py index ace030e..99260d6 100644 --- a/llm_server/llm/oobabooga/generate.py +++ b/llm_server/llm/oobabooga/generate.py @@ -5,7 +5,7 @@ from llm_server import opts def generate(json_data: dict): try: - r = requests.post(f'{opts.backend_url}/api/v1/generate', json=json_data) + r = requests.post(f'{opts.backend_url}/api/v1/generate', json=json_data, verify=opts.verify_ssl) except Exception as e: return False, None, f'{e.__class__.__name__}: {e}' if r.status_code != 200: diff --git a/llm_server/opts.py b/llm_server/opts.py index 9e961cb..b4893dc 100644 --- a/llm_server/opts.py +++ b/llm_server/opts.py @@ -10,3 +10,4 @@ auth_required = False log_prompts = False frontend_api_client = '' http_host = None +verify_ssl = True diff --git a/server.py b/server.py index 28db475..87cc4e8 100644 --- a/server.py +++ b/server.py @@ -21,7 +21,7 @@ if config_path_environ: else: config_path = Path(script_path, 'config', 'config.yml') -default_vars = {'mode': 'oobabooga', 'log_prompts': False, 'database_path': './proxy-server.db', 'auth_required': False, 'concurrent_gens': 3, 'frontend_api_client': ''} +default_vars = {'mode': 'oobabooga', 'log_prompts': False, 'database_path': './proxy-server.db', 'auth_required': False, 'concurrent_gens': 3, 'frontend_api_client': '', 'verify_ssl': True} required_vars = ['token_limit'] config_loader = ConfigLoader(config_path, default_vars, required_vars) success, config, msg = config_loader.load_config() @@ -46,6 +46,7 @@ opts.log_prompts = config['log_prompts'] opts.concurrent_gens = config['concurrent_gens'] opts.frontend_api_client = config['frontend_api_client'] opts.context_size = config['token_limit'] +opts.verify_ssl = config['verify_ssl'] SemaphoreCheckerThread(concurrent_semaphore).start()