From ca4321b4cb7c3040c54929269d23c58fb0e93639 Mon Sep 17 00:00:00 2001 From: nai-degen Date: Fri, 7 Jun 2024 14:29:18 -0500 Subject: [PATCH] adjusts openai schema validation to allow ull stop sequence --- src/shared/api-schemas/anthropic.ts | 3 ++- src/shared/api-schemas/openai.ts | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/shared/api-schemas/anthropic.ts b/src/shared/api-schemas/anthropic.ts index fedb052..bb279d2 100644 --- a/src/shared/api-schemas/anthropic.ts +++ b/src/shared/api-schemas/anthropic.ts @@ -119,7 +119,8 @@ export const transformOpenAIToAnthropicChat: APIFormatTransformer< stream: rest.stream, temperature: rest.temperature, top_p: rest.top_p, - stop_sequences: typeof rest.stop === "string" ? [rest.stop] : rest.stop, + stop_sequences: + typeof rest.stop === "string" ? [rest.stop] : rest.stop || undefined, ...(rest.user ? { metadata: { user_id: rest.user } } : {}), // Anthropic supports top_k, but OpenAI does not // OpenAI supports frequency_penalty, presence_penalty, logit_bias, n, seed, diff --git a/src/shared/api-schemas/openai.ts b/src/shared/api-schemas/openai.ts index ca2edee..0f0e420 100644 --- a/src/shared/api-schemas/openai.ts +++ b/src/shared/api-schemas/openai.ts @@ -47,7 +47,7 @@ export const OpenAIV1ChatCompletionSchema = z stream: z.boolean().optional().default(false), stop: z .union([z.string().max(500), z.array(z.string().max(500))]) - .optional(), + .nullish(), max_tokens: z.coerce .number() .int()