mirror of
https://github.com/pacnpal/Roo-Code.git
synced 2025-12-20 04:11:10 -05:00
99 lines
3.2 KiB
TypeScript
99 lines
3.2 KiB
TypeScript
import { Anthropic } from "@anthropic-ai/sdk"
|
|
import OpenAI from "openai"
|
|
|
|
type ContentPartText = OpenAI.Chat.ChatCompletionContentPartText
|
|
type ContentPartImage = OpenAI.Chat.ChatCompletionContentPartImage
|
|
type UserMessage = OpenAI.Chat.ChatCompletionUserMessageParam
|
|
type AssistantMessage = OpenAI.Chat.ChatCompletionAssistantMessageParam
|
|
type Message = OpenAI.Chat.ChatCompletionMessageParam
|
|
type AnthropicMessage = Anthropic.Messages.MessageParam
|
|
|
|
/**
|
|
* Converts Anthropic messages to OpenAI format while merging consecutive messages with the same role.
|
|
* This is required for DeepSeek Reasoner which does not support successive messages with the same role.
|
|
*
|
|
* @param messages Array of Anthropic messages
|
|
* @returns Array of OpenAI messages where consecutive messages with the same role are combined
|
|
*/
|
|
export function convertToR1Format(messages: AnthropicMessage[]): Message[] {
|
|
return messages.reduce<Message[]>((merged, message) => {
|
|
const lastMessage = merged[merged.length - 1]
|
|
let messageContent: string | (ContentPartText | ContentPartImage)[] = ""
|
|
let hasImages = false
|
|
|
|
// Convert content to appropriate format
|
|
if (Array.isArray(message.content)) {
|
|
const textParts: string[] = []
|
|
const imageParts: ContentPartImage[] = []
|
|
|
|
message.content.forEach((part) => {
|
|
if (part.type === "text") {
|
|
textParts.push(part.text)
|
|
}
|
|
if (part.type === "image") {
|
|
hasImages = true
|
|
imageParts.push({
|
|
type: "image_url",
|
|
image_url: { url: `data:${part.source.media_type};base64,${part.source.data}` },
|
|
})
|
|
}
|
|
})
|
|
|
|
if (hasImages) {
|
|
const parts: (ContentPartText | ContentPartImage)[] = []
|
|
if (textParts.length > 0) {
|
|
parts.push({ type: "text", text: textParts.join("\n") })
|
|
}
|
|
parts.push(...imageParts)
|
|
messageContent = parts
|
|
} else {
|
|
messageContent = textParts.join("\n")
|
|
}
|
|
} else {
|
|
messageContent = message.content
|
|
}
|
|
|
|
// If last message has same role, merge the content
|
|
if (lastMessage?.role === message.role) {
|
|
if (typeof lastMessage.content === "string" && typeof messageContent === "string") {
|
|
lastMessage.content += `\n${messageContent}`
|
|
}
|
|
// If either has image content, convert both to array format
|
|
else {
|
|
const lastContent = Array.isArray(lastMessage.content)
|
|
? lastMessage.content
|
|
: [{ type: "text" as const, text: lastMessage.content || "" }]
|
|
|
|
const newContent = Array.isArray(messageContent)
|
|
? messageContent
|
|
: [{ type: "text" as const, text: messageContent }]
|
|
|
|
if (message.role === "assistant") {
|
|
const mergedContent = [...lastContent, ...newContent] as AssistantMessage["content"]
|
|
lastMessage.content = mergedContent
|
|
} else {
|
|
const mergedContent = [...lastContent, ...newContent] as UserMessage["content"]
|
|
lastMessage.content = mergedContent
|
|
}
|
|
}
|
|
} else {
|
|
// Add as new message with the correct type based on role
|
|
if (message.role === "assistant") {
|
|
const newMessage: AssistantMessage = {
|
|
role: "assistant",
|
|
content: messageContent as AssistantMessage["content"],
|
|
}
|
|
merged.push(newMessage)
|
|
} else {
|
|
const newMessage: UserMessage = {
|
|
role: "user",
|
|
content: messageContent as UserMessage["content"],
|
|
}
|
|
merged.push(newMessage)
|
|
}
|
|
}
|
|
|
|
return merged
|
|
}, [])
|
|
}
|