always applies Mistral prompt fixes on messages input

This commit is contained in:
nai-degen 2024-08-14 10:44:22 -05:00
parent f531272b00
commit b58e7cb830
4 changed files with 17 additions and 33 deletions

View File

@ -59,15 +59,12 @@ function applyMistralPromptFixes(req: Request): void {
// don't properly handle the differences. We will try to validate the
// mistral prompt and try to fix it if it fails. It will be re-validated
// after this function returns.
const result = API_REQUEST_VALIDATORS["mistral-ai"].safeParse(req.body);
if (!result.success) {
const messages = req.body.messages;
req.body.messages = fixMistralPrompt(messages);
req.log.info(
{ old: messages.length, new: req.body.messages.length },
"Applied Mistral chat prompt fixes."
);
}
const result = API_REQUEST_VALIDATORS["mistral-ai"].parse(req.body);
req.body.messages = fixMistralPrompt(result.messages);
req.log.info(
{ n: req.body.messages.length, prev: result.messages.length },
"Applied Mistral chat prompt fixes."
);
// If the prompt relies on `prefix: true` for the last message, we need to
// convert it to a text completions request because Mistral support for

View File

@ -1,17 +1,5 @@
import { OpenAIChatCompletionStreamEvent } from "../index";
/*
"choices": [
{
"index": 0,
"message": {
"role": "assistant",
"content": "Genshin Impact is an action role-play"
},
"stop_reason": "length"
}
],
*/
export type MistralChatCompletionResponse = {
choices: {
index: number;

View File

@ -55,9 +55,7 @@ export class EventAggregator {
let openAIEvent: OpenAIChatCompletionStreamEvent | undefined;
switch (this.requestFormat) {
case "anthropic-text":
if (!eventIsAnthropicV2Event(event)) {
throw new Error(`Bad event for Anthropic V2 SSE aggregation`);
}
assertIsAnthropicV2Event(event);
openAIEvent = anthropicV2ToOpenAI({
data: `event: completion\ndata: ${JSON.stringify(event)}\n\n`,
lastPosition: -1,
@ -67,9 +65,7 @@ export class EventAggregator {
})?.event;
break;
case "mistral-ai":
if (!eventIsMistralChatEvent(event)) {
throw new Error(`Bad event for Mistral SSE aggregation`);
}
assertIsMistralChatEvent(event);
openAIEvent = mistralAIToOpenAI({
data: `data: ${JSON.stringify(event)}\n\n`,
lastPosition: -1,
@ -120,12 +116,16 @@ function eventIsOpenAIEvent(
return event?.object === "chat.completion.chunk";
}
function eventIsAnthropicV2Event(event: any): event is AnthropicV2StreamEvent {
return event?.completion;
function assertIsAnthropicV2Event(event: any): asserts event is AnthropicV2StreamEvent {
if (!event?.completion) {
throw new Error(`Bad event for Anthropic V2 SSE aggregation`);
}
}
function eventIsMistralChatEvent(
function assertIsMistralChatEvent(
event: any
): event is MistralChatCompletionEvent {
return event?.choices;
): asserts event is MistralChatCompletionEvent {
if (!event?.choices) {
throw new Error(`Bad event for Mistral SSE aggregation`);
}
}

View File

@ -27,7 +27,6 @@ const MistralMessagesSchema = z.array(MistralChatMessageSchema).refine(
// https://docs.mistral.ai/api#operation/createChatCompletion
const BaseMistralAIV1CompletionsSchema = z.object({
model: z.string(),
// One must be provided, checked in a refinement
messages: MistralMessagesSchema.optional(),
prompt: z.string().optional(),
temperature: z.number().optional().default(0.7),