improves config handling

This commit is contained in:
nai-degen 2023-04-08 18:32:49 -05:00 committed by nai-degen
parent 11308b2baa
commit c51a0ef94d
7 changed files with 81 additions and 14 deletions

View File

@ -1,9 +1,5 @@
# Copy this file to .env and fill in the values. # Copy this file to .env and fill in the values.
# Uncomment the following line and replace the value with your own secret key
# to control access to the proxy server
# PROXY_KEY=your-secret-key
# Set your OpenAI API key below. # Set your OpenAI API key below.
OPENAI_KEY=sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx OPENAI_KEY=sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
@ -18,3 +14,10 @@ OPENAI_KEY=sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
# Encoded in base-64, this would look like: # Encoded in base-64, this would look like:
# OPENAI_KEYS=WwogeyAia2V5IjogInlvdXItb3BlbmFpLWtleS0xIiwgImlzVHJpYWwiOiB0cnVlLCAiaXNHcHQ0IjogZmFsc2UgfSwKIHsgImtleSI6ICJ5b3VyLW9wZW5haS1rZXktMiIsICJpc1RyaWFsIjogZmFsc2UsICJpc0dwdDQiOiBmYWxzZSB9LAogeyAia2V5IjogInlvdXItb3BlbmFpLWtleS0zIiwgImlzVHJpYWwiOiBmYWxzZSwgImlzR3B0NCI6IHRydWUgfQpd # OPENAI_KEYS=WwogeyAia2V5IjogInlvdXItb3BlbmFpLWtleS0xIiwgImlzVHJpYWwiOiB0cnVlLCAiaXNHcHQ0IjogZmFsc2UgfSwKIHsgImtleSI6ICJ5b3VyLW9wZW5haS1rZXktMiIsICJpc1RyaWFsIjogZmFsc2UsICJpc0dwdDQiOiBmYWxzZSB9LAogeyAia2V5IjogInlvdXItb3BlbmFpLWtleS0zIiwgImlzVHJpYWwiOiBmYWxzZSwgImlzR3B0NCI6IHRydWUgfQpd
# Optional settings (please see config.ts for more details)
# PORT=7860
# PROXY_KEY=your-secret-key
# MODEL_RATE_LIMIT=2
# MAX_OUTPUT_TOKENS=256
# LOG_LEVEL=info
# LOG_PROMPTS=false

59
src/config.ts Normal file
View File

@ -0,0 +1,59 @@
import dotenv from "dotenv";
dotenv.config();
type Config = {
/** The port the proxy server will listen on. */
port: number;
/** OpenAI API key, either a single key or a base64-encoded JSON array of key configs. */
openaiKey?: string;
/** Proxy key. If set, requests must provide this key in the Authorization header to use the proxy. */
proxyKey?: string;
/** Per-IP limit for requests per minute to OpenAI's completions endpoint. */
modelRateLimit: number; // TODO
/** Max number of tokens to generate. Requests which specify a higher value will be rewritten to use this value. */
maxOutputTokens: number; // TODO
/** Logging threshold. */
logLevel?: "debug" | "info" | "warn" | "error";
/** Whether prompts and responses should be logged. */
logPrompts?: boolean; // TODO
};
export const config: Config = {
port: getEnvWithDefault("PORT", 7860),
openaiKey: getEnvWithDefault("OPENAI_KEY", ""),
proxyKey: getEnvWithDefault("PROXY_KEY", ""),
modelRateLimit: getEnvWithDefault("MODEL_RATE_LIMIT", 2),
maxOutputTokens: getEnvWithDefault("MAX_OUTPUT_TOKENS", 256),
logLevel: getEnvWithDefault("LOG_LEVEL", "info"),
logPrompts: getEnvWithDefault("LOG_PROMPTS", false),
} as const;
export const SENSITIVE_KEYS: (keyof Config)[] = ["proxyKey", "openaiKey"];
const getKeys = Object.keys as <T extends object>(obj: T) => Array<keyof T>;
export function listConfig(): Record<string, string> {
const result: Record<string, string> = {};
for (const key of getKeys(config)) {
const value = config[key]?.toString() || "";
if (value && SENSITIVE_KEYS.includes(key)) {
result[key] = "********";
} else {
result[key] = value;
}
}
return result;
}
function getEnvWithDefault<T>(name: string, defaultValue: T): T {
const value = process.env[name];
if (value === undefined) {
return defaultValue;
}
try {
if (name === "OPENAI_KEY") {
return value as unknown as T;
}
return JSON.parse(value) as T;
} catch (err) {
return value as unknown as T;
}
}

View File

@ -1,5 +1,6 @@
import { Request, Response } from "express"; import { Request, Response } from "express";
import showdown from "showdown"; import showdown from "showdown";
import { listConfig } from "./config";
import { keys } from "./keys"; import { keys } from "./keys";
export const handleInfoPage = (req: Request, res: Response) => { export const handleInfoPage = (req: Request, res: Response) => {
@ -13,23 +14,23 @@ export const handleInfoPage = (req: Request, res: Response) => {
function getInfoPageHtml(host: string) { function getInfoPageHtml(host: string) {
const keylist = keys.list(); const keylist = keys.list();
const info = { const info = {
message: "OpenAI Reverse Proxy",
uptime: process.uptime(), uptime: process.uptime(),
timestamp: Date.now(), timestamp: Date.now(),
baseUrl: host, baseUrl: host,
kobold: host + "/proxy/kobold" + " (not yet implemented)", kobold: host + "/proxy/kobold" + " (not yet implemented)",
openai: host + "/proxy/openai", openai: host + "/proxy/openai",
proompts: keylist.reduce((acc, k) => acc + k.promptCount, 0),
keys: { keys: {
all: keylist.length, all: keylist.length,
active: keylist.filter((k) => !k.isDisabled).length, active: keylist.filter((k) => !k.isDisabled).length,
trial: keylist.filter((k) => k.isTrial).length, trial: keylist.filter((k) => k.isTrial).length,
gpt4: keylist.filter((k) => k.isGpt4).length, gpt4: keylist.filter((k) => k.isGpt4).length,
proompts: keylist.reduce((acc, k) => acc + k.promptCount, 0),
}, },
config: listConfig(),
}; };
const readme = require("fs").readFileSync("README.md", "utf8"); const readme = require("fs").readFileSync("README.md", "utf8");
const readmeBody = readme.split("---")[2]; const readmeBody = readme.split("---")[2] || readme;
const converter = new showdown.Converter(); const converter = new showdown.Converter();
const html = converter.makeHtml(readmeBody); const html = converter.makeHtml(readmeBody);

View File

@ -1,8 +1,9 @@
/* Manages OpenAI API keys. Tracks usage, disables expired keys, and provides /* Manages OpenAI API keys. Tracks usage, disables expired keys, and provides
round-robin access to keys. Keys are stored in the OPENAI_KEY environment round-robin access to keys. Keys are stored in the OPENAI_KEY environment
variable, either as a single key, or a base64-encoded JSON array of keys.*/ variable, either as a single key, or a base64-encoded JSON array of keys.*/
import { logger } from "./logger";
import crypto from "crypto"; import crypto from "crypto";
import { config } from "./config";
import { logger } from "./logger";
/** Represents a key stored in the OPENAI_KEY environment variable. */ /** Represents a key stored in the OPENAI_KEY environment variable. */
type KeySchema = { type KeySchema = {
@ -37,7 +38,7 @@ export type Key = KeySchema & {
const keyPool: Key[] = []; const keyPool: Key[] = [];
function init() { function init() {
const keyString = process.env.OPENAI_KEY; const keyString = config.openaiKey;
if (!keyString?.trim()) { if (!keyString?.trim()) {
throw new Error("OPENAI_KEY environment variable is not set"); throw new Error("OPENAI_KEY environment variable is not set");
} }

View File

@ -1,3 +1,6 @@
import pino from "pino"; import pino from "pino";
import { config } from "./config";
export const logger = pino(); export const logger = pino({
level: config.logLevel,
});

View File

@ -1,6 +1,7 @@
import type { Request, Response, NextFunction } from "express"; import type { Request, Response, NextFunction } from "express";
import { config } from "../config";
const PROXY_KEY = process.env.PROXY_KEY; const PROXY_KEY = config.proxyKey;
export const auth = (req: Request, res: Response, next: NextFunction) => { export const auth = (req: Request, res: Response, next: NextFunction) => {
if (!PROXY_KEY) { if (!PROXY_KEY) {

View File

@ -1,5 +1,4 @@
import dotenv from "dotenv"; import { config } from "./config";
dotenv.config();
import express from "express"; import express from "express";
import cors from "cors"; import cors from "cors";
import pinoHttp from "pino-http"; import pinoHttp from "pino-http";
@ -8,7 +7,7 @@ import { keys } from "./keys";
import { proxyRouter } from "./proxy/routes"; import { proxyRouter } from "./proxy/routes";
import { handleInfoPage } from "./info-page"; import { handleInfoPage } from "./info-page";
const PORT = process.env.PORT || 7860; const PORT = config.port;
const app = express(); const app = express();
// middleware // middleware