mirror of
https://github.com/pacnpal/Roo-Code.git
synced 2025-12-20 12:21:13 -05:00
Fixes
This commit is contained in:
@@ -46,6 +46,7 @@ export class OpenRouterHandler implements ApiHandler {
|
|||||||
],
|
],
|
||||||
}
|
}
|
||||||
// Add cache_control to the last two user messages
|
// Add cache_control to the last two user messages
|
||||||
|
// (note: this works because we only ever add one user message at a time, but if we added multiple we'd need to mark the user message before the last assistant message)
|
||||||
const lastTwoUserMessages = openAiMessages.filter((msg) => msg.role === "user").slice(-2)
|
const lastTwoUserMessages = openAiMessages.filter((msg) => msg.role === "user").slice(-2)
|
||||||
lastTwoUserMessages.forEach((msg) => {
|
lastTwoUserMessages.forEach((msg) => {
|
||||||
if (typeof msg.content === "string") {
|
if (typeof msg.content === "string") {
|
||||||
|
|||||||
@@ -66,15 +66,16 @@ export function convertToOpenAiMessages(
|
|||||||
// "Messages following `tool_use` blocks must begin with a matching number of `tool_result` blocks."
|
// "Messages following `tool_use` blocks must begin with a matching number of `tool_result` blocks."
|
||||||
// Therefore we need to send these images after the tool result messages
|
// Therefore we need to send these images after the tool result messages
|
||||||
// NOTE: it's actually okay to have multiple user messages in a row, the model will treat them as a continuation of the same input (this way works better than combining them into one message, since the tool result specifically mentions (see following user message for image)
|
// NOTE: it's actually okay to have multiple user messages in a row, the model will treat them as a continuation of the same input (this way works better than combining them into one message, since the tool result specifically mentions (see following user message for image)
|
||||||
if (toolResultImages.length > 0) {
|
// UPDATE v2.0: we don't use tools anymore, but if we did it's important to note that the openrouter prompt caching mechanism requires one user message at a time, so we would need to add these images to the user content array instead.
|
||||||
openAiMessages.push({
|
// if (toolResultImages.length > 0) {
|
||||||
role: "user",
|
// openAiMessages.push({
|
||||||
content: toolResultImages.map((part) => ({
|
// role: "user",
|
||||||
type: "image_url",
|
// content: toolResultImages.map((part) => ({
|
||||||
image_url: { url: `data:${part.source.media_type};base64,${part.source.data}` },
|
// type: "image_url",
|
||||||
})),
|
// image_url: { url: `data:${part.source.media_type};base64,${part.source.data}` },
|
||||||
})
|
// })),
|
||||||
}
|
// })
|
||||||
|
// }
|
||||||
|
|
||||||
// Process non-tool messages
|
// Process non-tool messages
|
||||||
if (nonToolMessages.length > 0) {
|
if (nonToolMessages.length > 0) {
|
||||||
|
|||||||
Reference in New Issue
Block a user