local-llm-server/llm_server/routes/v1/__init__.py

22 lines
478 B
Python

from flask import Blueprint, request
from ..helpers.http import require_api_key
from ... import opts
bp = Blueprint('v1', __name__)
# openai_bp = Blueprint('/v1', __name__)
@bp.before_request
def before_request():
if not opts.http_host:
opts.http_host = request.headers.get("Host")
if request.endpoint != 'v1.get_stats':
response = require_api_key()
if response is not None:
return response
from . import generate, info, proxy