This repository has been archived on 2024-10-27. You can view files and clone it, but cannot push or open issues or pull requests.
local-llm-server/llm_server/routes/v1/__init__.py

24 lines
621 B
Python
Raw Normal View History

2023-08-21 21:28:52 -06:00
from flask import Blueprint, request
from ..helpers.http import require_api_key
2023-08-22 20:28:41 -06:00
from ... import opts
2023-08-21 21:28:52 -06:00
bp = Blueprint('v1', __name__)
# openai_bp = Blueprint('/v1', __name__)
@bp.before_request
def before_request():
2023-08-22 20:28:41 -06:00
if not opts.http_host:
opts.http_host = request.headers.get("Host")
2023-08-23 23:11:12 -06:00
if not opts.full_client_api:
opts.full_client_api = f'https://{request.headers.get("Host")}/{opts.frontend_api_client.strip("/")}'
2023-08-21 21:28:52 -06:00
if request.endpoint != 'v1.get_stats':
response = require_api_key()
if response is not None:
return response
from . import generate, info, proxy