From 7b5e612921a92e7b3292057cac98efee31ede64a Mon Sep 17 00:00:00 2001 From: khanon Date: Mon, 5 Jun 2023 21:55:59 +0000 Subject: [PATCH] Workaround for Anthropic requiring some prompts to start with `Human:` (khanon/oai-reverse-proxy!20) --- .../request/transform-outbound-payload.ts | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/src/proxy/middleware/request/transform-outbound-payload.ts b/src/proxy/middleware/request/transform-outbound-payload.ts index 336c429..c97aa62 100644 --- a/src/proxy/middleware/request/transform-outbound-payload.ts +++ b/src/proxy/middleware/request/transform-outbound-payload.ts @@ -140,14 +140,6 @@ function openaiToAnthropic(body: any, req: Request) { // This currently uses _character count_, not token count. const model = prompt.length > 25000 ? CLAUDE_BIG : CLAUDE_SMALL; - // wip - // const tokens = countTokens({ - // prompt, - // req, - // service: "anthropic", - // }); - // req.log.info({ tokens }, "Token count"); - let stops = rest.stop ? Array.isArray(rest.stop) ? rest.stop @@ -161,10 +153,19 @@ function openaiToAnthropic(body: any, req: Request) { // Remove duplicates stops = [...new Set(stops)]; + // TEMP: More shitty anthropic API hacks + // If you receive a 400 Bad Request error from Anthropic complaining about + // "prompt must start with a '\n\nHuman: ' turn", enable this setting. + // I will try to fix this when I can identify why it only happens sometimes. + let preamble = ""; + if (process.env.CLAUDE_ADD_HUMAN_PREAMBLE) { + preamble = "\n\nHuman: Hello Claude."; + } + return { ...rest, model, - prompt, + prompt: preamble + prompt, max_tokens_to_sample: rest.max_tokens, stop_sequences: stops, };