From c51a0ef94df41241ffa54094fe2733cb27b462c0 Mon Sep 17 00:00:00 2001 From: nai-degen <44111-khanon@users.noreply.gitgud.io> Date: Sat, 8 Apr 2023 18:32:49 -0500 Subject: [PATCH] improves config handling --- .env.example | 11 +++++---- src/config.ts | 59 +++++++++++++++++++++++++++++++++++++++++++++++ src/info-page.ts | 7 +++--- src/keys.ts | 5 ++-- src/logger.ts | 5 +++- src/proxy/auth.ts | 3 ++- src/server.ts | 5 ++-- 7 files changed, 81 insertions(+), 14 deletions(-) create mode 100644 src/config.ts diff --git a/.env.example b/.env.example index 29de5fa..74e59fb 100644 --- a/.env.example +++ b/.env.example @@ -1,9 +1,5 @@ # Copy this file to .env and fill in the values. -# Uncomment the following line and replace the value with your own secret key -# to control access to the proxy server -# PROXY_KEY=your-secret-key - # Set your OpenAI API key below. OPENAI_KEY=sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx @@ -18,3 +14,10 @@ OPENAI_KEY=sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx # Encoded in base-64, this would look like: # OPENAI_KEYS=WwogeyAia2V5IjogInlvdXItb3BlbmFpLWtleS0xIiwgImlzVHJpYWwiOiB0cnVlLCAiaXNHcHQ0IjogZmFsc2UgfSwKIHsgImtleSI6ICJ5b3VyLW9wZW5haS1rZXktMiIsICJpc1RyaWFsIjogZmFsc2UsICJpc0dwdDQiOiBmYWxzZSB9LAogeyAia2V5IjogInlvdXItb3BlbmFpLWtleS0zIiwgImlzVHJpYWwiOiBmYWxzZSwgImlzR3B0NCI6IHRydWUgfQpd +# Optional settings (please see config.ts for more details) +# PORT=7860 +# PROXY_KEY=your-secret-key +# MODEL_RATE_LIMIT=2 +# MAX_OUTPUT_TOKENS=256 +# LOG_LEVEL=info +# LOG_PROMPTS=false diff --git a/src/config.ts b/src/config.ts new file mode 100644 index 0000000..f2dcea4 --- /dev/null +++ b/src/config.ts @@ -0,0 +1,59 @@ +import dotenv from "dotenv"; +dotenv.config(); + +type Config = { + /** The port the proxy server will listen on. */ + port: number; + /** OpenAI API key, either a single key or a base64-encoded JSON array of key configs. */ + openaiKey?: string; + /** Proxy key. If set, requests must provide this key in the Authorization header to use the proxy. */ + proxyKey?: string; + /** Per-IP limit for requests per minute to OpenAI's completions endpoint. */ + modelRateLimit: number; // TODO + /** Max number of tokens to generate. Requests which specify a higher value will be rewritten to use this value. */ + maxOutputTokens: number; // TODO + /** Logging threshold. */ + logLevel?: "debug" | "info" | "warn" | "error"; + /** Whether prompts and responses should be logged. */ + logPrompts?: boolean; // TODO +}; + +export const config: Config = { + port: getEnvWithDefault("PORT", 7860), + openaiKey: getEnvWithDefault("OPENAI_KEY", ""), + proxyKey: getEnvWithDefault("PROXY_KEY", ""), + modelRateLimit: getEnvWithDefault("MODEL_RATE_LIMIT", 2), + maxOutputTokens: getEnvWithDefault("MAX_OUTPUT_TOKENS", 256), + logLevel: getEnvWithDefault("LOG_LEVEL", "info"), + logPrompts: getEnvWithDefault("LOG_PROMPTS", false), +} as const; + +export const SENSITIVE_KEYS: (keyof Config)[] = ["proxyKey", "openaiKey"]; +const getKeys = Object.keys as (obj: T) => Array; +export function listConfig(): Record { + const result: Record = {}; + for (const key of getKeys(config)) { + const value = config[key]?.toString() || ""; + if (value && SENSITIVE_KEYS.includes(key)) { + result[key] = "********"; + } else { + result[key] = value; + } + } + return result; +} + +function getEnvWithDefault(name: string, defaultValue: T): T { + const value = process.env[name]; + if (value === undefined) { + return defaultValue; + } + try { + if (name === "OPENAI_KEY") { + return value as unknown as T; + } + return JSON.parse(value) as T; + } catch (err) { + return value as unknown as T; + } +} diff --git a/src/info-page.ts b/src/info-page.ts index 20f9a4f..97d3db7 100644 --- a/src/info-page.ts +++ b/src/info-page.ts @@ -1,5 +1,6 @@ import { Request, Response } from "express"; import showdown from "showdown"; +import { listConfig } from "./config"; import { keys } from "./keys"; export const handleInfoPage = (req: Request, res: Response) => { @@ -13,23 +14,23 @@ export const handleInfoPage = (req: Request, res: Response) => { function getInfoPageHtml(host: string) { const keylist = keys.list(); const info = { - message: "OpenAI Reverse Proxy", uptime: process.uptime(), timestamp: Date.now(), baseUrl: host, kobold: host + "/proxy/kobold" + " (not yet implemented)", openai: host + "/proxy/openai", + proompts: keylist.reduce((acc, k) => acc + k.promptCount, 0), keys: { all: keylist.length, active: keylist.filter((k) => !k.isDisabled).length, trial: keylist.filter((k) => k.isTrial).length, gpt4: keylist.filter((k) => k.isGpt4).length, - proompts: keylist.reduce((acc, k) => acc + k.promptCount, 0), }, + config: listConfig(), }; const readme = require("fs").readFileSync("README.md", "utf8"); - const readmeBody = readme.split("---")[2]; + const readmeBody = readme.split("---")[2] || readme; const converter = new showdown.Converter(); const html = converter.makeHtml(readmeBody); diff --git a/src/keys.ts b/src/keys.ts index 2ccbc9f..41cbb28 100644 --- a/src/keys.ts +++ b/src/keys.ts @@ -1,8 +1,9 @@ /* Manages OpenAI API keys. Tracks usage, disables expired keys, and provides round-robin access to keys. Keys are stored in the OPENAI_KEY environment variable, either as a single key, or a base64-encoded JSON array of keys.*/ -import { logger } from "./logger"; import crypto from "crypto"; +import { config } from "./config"; +import { logger } from "./logger"; /** Represents a key stored in the OPENAI_KEY environment variable. */ type KeySchema = { @@ -37,7 +38,7 @@ export type Key = KeySchema & { const keyPool: Key[] = []; function init() { - const keyString = process.env.OPENAI_KEY; + const keyString = config.openaiKey; if (!keyString?.trim()) { throw new Error("OPENAI_KEY environment variable is not set"); } diff --git a/src/logger.ts b/src/logger.ts index ed26420..aa2fd3b 100644 --- a/src/logger.ts +++ b/src/logger.ts @@ -1,3 +1,6 @@ import pino from "pino"; +import { config } from "./config"; -export const logger = pino(); +export const logger = pino({ + level: config.logLevel, +}); diff --git a/src/proxy/auth.ts b/src/proxy/auth.ts index f746241..9064f7a 100644 --- a/src/proxy/auth.ts +++ b/src/proxy/auth.ts @@ -1,6 +1,7 @@ import type { Request, Response, NextFunction } from "express"; +import { config } from "../config"; -const PROXY_KEY = process.env.PROXY_KEY; +const PROXY_KEY = config.proxyKey; export const auth = (req: Request, res: Response, next: NextFunction) => { if (!PROXY_KEY) { diff --git a/src/server.ts b/src/server.ts index 89d2784..012a0df 100644 --- a/src/server.ts +++ b/src/server.ts @@ -1,5 +1,4 @@ -import dotenv from "dotenv"; -dotenv.config(); +import { config } from "./config"; import express from "express"; import cors from "cors"; import pinoHttp from "pino-http"; @@ -8,7 +7,7 @@ import { keys } from "./keys"; import { proxyRouter } from "./proxy/routes"; import { handleInfoPage } from "./info-page"; -const PORT = process.env.PORT || 7860; +const PORT = config.port; const app = express(); // middleware