implements preliminary openai proxy
This commit is contained in:
parent
ce09e4c0be
commit
5ed37bf035
|
@ -8,6 +8,7 @@ export function auth(req: Request, res: Response, next: NextFunction) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (req.headers.authorization === `Bearer ${PROXY_KEY}`) {
|
if (req.headers.authorization === `Bearer ${PROXY_KEY}`) {
|
||||||
|
delete req.headers.authorization;
|
||||||
next();
|
next();
|
||||||
} else {
|
} else {
|
||||||
res.status(401).json({ error: "Unauthorized" });
|
res.status(401).json({ error: "Unauthorized" });
|
||||||
|
|
12
src/keys.ts
12
src/keys.ts
|
@ -32,7 +32,7 @@ type Key = KeySchema & {
|
||||||
hash: string;
|
hash: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
const keys: Key[] = [];
|
const keyPool: Key[] = [];
|
||||||
|
|
||||||
function init() {
|
function init() {
|
||||||
const keyString = process.env.OPENAI_KEY;
|
const keyString = process.env.OPENAI_KEY;
|
||||||
|
@ -47,7 +47,7 @@ function init() {
|
||||||
keyList = [{ key: keyString, isTrial: false, isGpt4: true }];
|
keyList = [{ key: keyString, isTrial: false, isGpt4: true }];
|
||||||
}
|
}
|
||||||
for (const key of keyList) {
|
for (const key of keyList) {
|
||||||
keys.push({
|
keyPool.push({
|
||||||
...key,
|
...key,
|
||||||
isDisabled: false,
|
isDisabled: false,
|
||||||
softLimit: 0,
|
softLimit: 0,
|
||||||
|
@ -65,15 +65,15 @@ function init() {
|
||||||
}
|
}
|
||||||
|
|
||||||
function list() {
|
function list() {
|
||||||
return keys.map((key) => ({
|
return keyPool.map((key) => ({
|
||||||
...key,
|
...key,
|
||||||
key: undefined,
|
key: undefined,
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
function getKey(model: string) {
|
function get(model: string) {
|
||||||
const needsGpt4Key = model.startsWith("gpt-4");
|
const needsGpt4Key = model.startsWith("gpt-4");
|
||||||
const availableKeys = keys.filter(
|
const availableKeys = keyPool.filter(
|
||||||
(key) => !key.isDisabled && (!needsGpt4Key || key.isGpt4)
|
(key) => !key.isDisabled && (!needsGpt4Key || key.isGpt4)
|
||||||
);
|
);
|
||||||
if (availableKeys.length === 0) {
|
if (availableKeys.length === 0) {
|
||||||
|
@ -99,4 +99,4 @@ function getKey(model: string) {
|
||||||
return oldestKey;
|
return oldestKey;
|
||||||
}
|
}
|
||||||
|
|
||||||
export { init, list, getKey };
|
export const keys = { init, list, get };
|
||||||
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
import { Request, Response, NextFunction } from "express";
|
||||||
|
|
||||||
|
export const kobold = (req: Request, res: Response, next: NextFunction) => {
|
||||||
|
// TODO: Implement kobold
|
||||||
|
res.status(501).json({ error: "Not implemented" });
|
||||||
|
};
|
|
@ -0,0 +1,62 @@
|
||||||
|
import { Request, Response, NextFunction, Router } from "express";
|
||||||
|
import * as http from "http";
|
||||||
|
import { createProxyMiddleware } from "http-proxy-middleware";
|
||||||
|
import { logger } from "./logger";
|
||||||
|
import { keys } from "./keys";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Modifies the request body to add a randomly selected API key.
|
||||||
|
*/
|
||||||
|
const rewriteRequest = (proxyReq: http.ClientRequest, req: Request) => {
|
||||||
|
const key = keys.get(req.body?.model || "gpt-3.5")!;
|
||||||
|
|
||||||
|
proxyReq.setHeader("Authorization", `Bearer ${key}`);
|
||||||
|
if (req.body?.stream) {
|
||||||
|
req.body.stream = false;
|
||||||
|
const updatedBody = JSON.stringify(req.body);
|
||||||
|
proxyReq.setHeader("Content-Length", Buffer.byteLength(updatedBody));
|
||||||
|
proxyReq.write(updatedBody);
|
||||||
|
proxyReq.end();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleResponse = (
|
||||||
|
proxyRes: http.IncomingMessage,
|
||||||
|
req: Request,
|
||||||
|
res: Response
|
||||||
|
) => {
|
||||||
|
const { method, path } = req;
|
||||||
|
const statusCode = proxyRes.statusCode || 500;
|
||||||
|
|
||||||
|
if (statusCode === 429) {
|
||||||
|
// TODO: Handle rate limit by temporarily removing that key from the pool
|
||||||
|
logger.warn(`OpenAI rate limit exceeded: ${method} ${path}`);
|
||||||
|
} else if (statusCode >= 400) {
|
||||||
|
logger.warn(`OpenAI error: ${method} ${path} ${statusCode}`);
|
||||||
|
} else {
|
||||||
|
logger.info(`OpenAI request: ${method} ${path} ${statusCode}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
proxyRes.pipe(res);
|
||||||
|
};
|
||||||
|
|
||||||
|
const openaiProxy = createProxyMiddleware({
|
||||||
|
target: "https://api.openai.com",
|
||||||
|
changeOrigin: true,
|
||||||
|
onProxyReq: rewriteRequest,
|
||||||
|
onProxyRes: handleResponse,
|
||||||
|
selfHandleResponse: true,
|
||||||
|
pathRewrite: {
|
||||||
|
"^/proxy/openai": "",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
export const openaiRouter = Router();
|
||||||
|
openaiRouter.post("/v1/chat/completions", openaiProxy);
|
||||||
|
// openaiRouter.post("/v1/completions", openaiProxy);
|
||||||
|
// openaiRouter.get("/v1/models", handleModels);
|
||||||
|
// openaiRouter.get("/dashboard/billing/usage, handleUsage);
|
||||||
|
openaiRouter.use((req, res) => {
|
||||||
|
logger.warn(`Blocked openai proxy request: ${req.method} ${req.path}`);
|
||||||
|
res.status(404).json({ error: "Not found" });
|
||||||
|
});
|
Loading…
Reference in New Issue