This commit is contained in:
Saoud Rizwan
2024-10-06 08:50:04 -04:00
parent d0bf4f3711
commit 8f4a65bc79
2 changed files with 11 additions and 9 deletions

View File

@@ -46,6 +46,7 @@ export class OpenRouterHandler implements ApiHandler {
], ],
} }
// Add cache_control to the last two user messages // Add cache_control to the last two user messages
// (note: this works because we only ever add one user message at a time, but if we added multiple we'd need to mark the user message before the last assistant message)
const lastTwoUserMessages = openAiMessages.filter((msg) => msg.role === "user").slice(-2) const lastTwoUserMessages = openAiMessages.filter((msg) => msg.role === "user").slice(-2)
lastTwoUserMessages.forEach((msg) => { lastTwoUserMessages.forEach((msg) => {
if (typeof msg.content === "string") { if (typeof msg.content === "string") {

View File

@@ -66,15 +66,16 @@ export function convertToOpenAiMessages(
// "Messages following `tool_use` blocks must begin with a matching number of `tool_result` blocks." // "Messages following `tool_use` blocks must begin with a matching number of `tool_result` blocks."
// Therefore we need to send these images after the tool result messages // Therefore we need to send these images after the tool result messages
// NOTE: it's actually okay to have multiple user messages in a row, the model will treat them as a continuation of the same input (this way works better than combining them into one message, since the tool result specifically mentions (see following user message for image) // NOTE: it's actually okay to have multiple user messages in a row, the model will treat them as a continuation of the same input (this way works better than combining them into one message, since the tool result specifically mentions (see following user message for image)
if (toolResultImages.length > 0) { // UPDATE v2.0: we don't use tools anymore, but if we did it's important to note that the openrouter prompt caching mechanism requires one user message at a time, so we would need to add these images to the user content array instead.
openAiMessages.push({ // if (toolResultImages.length > 0) {
role: "user", // openAiMessages.push({
content: toolResultImages.map((part) => ({ // role: "user",
type: "image_url", // content: toolResultImages.map((part) => ({
image_url: { url: `data:${part.source.media_type};base64,${part.source.data}` }, // type: "image_url",
})), // image_url: { url: `data:${part.source.media_type};base64,${part.source.data}` },
}) // })),
} // })
// }
// Process non-tool messages // Process non-tool messages
if (nonToolMessages.length > 0) { if (nonToolMessages.length > 0) {