From 8f4a65bc79a02d68f5686325a556380830072305 Mon Sep 17 00:00:00 2001 From: Saoud Rizwan <7799382+saoudrizwan@users.noreply.github.com> Date: Sun, 6 Oct 2024 08:50:04 -0400 Subject: [PATCH] Fixes --- src/api/providers/openrouter.ts | 1 + src/api/transform/openai-format.ts | 19 ++++++++++--------- 2 files changed, 11 insertions(+), 9 deletions(-) diff --git a/src/api/providers/openrouter.ts b/src/api/providers/openrouter.ts index 8601d59..1213e5d 100644 --- a/src/api/providers/openrouter.ts +++ b/src/api/providers/openrouter.ts @@ -46,6 +46,7 @@ export class OpenRouterHandler implements ApiHandler { ], } // Add cache_control to the last two user messages + // (note: this works because we only ever add one user message at a time, but if we added multiple we'd need to mark the user message before the last assistant message) const lastTwoUserMessages = openAiMessages.filter((msg) => msg.role === "user").slice(-2) lastTwoUserMessages.forEach((msg) => { if (typeof msg.content === "string") { diff --git a/src/api/transform/openai-format.ts b/src/api/transform/openai-format.ts index 1bed7dc..499e30a 100644 --- a/src/api/transform/openai-format.ts +++ b/src/api/transform/openai-format.ts @@ -66,15 +66,16 @@ export function convertToOpenAiMessages( // "Messages following `tool_use` blocks must begin with a matching number of `tool_result` blocks." // Therefore we need to send these images after the tool result messages // NOTE: it's actually okay to have multiple user messages in a row, the model will treat them as a continuation of the same input (this way works better than combining them into one message, since the tool result specifically mentions (see following user message for image) - if (toolResultImages.length > 0) { - openAiMessages.push({ - role: "user", - content: toolResultImages.map((part) => ({ - type: "image_url", - image_url: { url: `data:${part.source.media_type};base64,${part.source.data}` }, - })), - }) - } + // UPDATE v2.0: we don't use tools anymore, but if we did it's important to note that the openrouter prompt caching mechanism requires one user message at a time, so we would need to add these images to the user content array instead. + // if (toolResultImages.length > 0) { + // openAiMessages.push({ + // role: "user", + // content: toolResultImages.map((part) => ({ + // type: "image_url", + // image_url: { url: `data:${part.source.media_type};base64,${part.source.data}` }, + // })), + // }) + // } // Process non-tool messages if (nonToolMessages.length > 0) {