mirror of
https://github.com/pacnpal/Roo-Code.git
synced 2026-02-06 20:44:44 -05:00
Add ability to attach images to messages
This commit is contained in:
79
src/utils/export-markdown.ts
Normal file
79
src/utils/export-markdown.ts
Normal file
@@ -0,0 +1,79 @@
|
||||
import { Anthropic } from "@anthropic-ai/sdk"
|
||||
import os from "os"
|
||||
import * as path from "path"
|
||||
import * as vscode from "vscode"
|
||||
|
||||
export async function downloadTask(conversationHistory: Anthropic.MessageParam[]) {
|
||||
// File name
|
||||
const date = new Date()
|
||||
const month = date.toLocaleString("en-US", { month: "short" }).toLowerCase()
|
||||
const day = date.getDate()
|
||||
const year = date.getFullYear()
|
||||
let hours = date.getHours()
|
||||
const minutes = date.getMinutes().toString().padStart(2, "0")
|
||||
const ampm = hours >= 12 ? "pm" : "am"
|
||||
hours = hours % 12
|
||||
hours = hours ? hours : 12 // the hour '0' should be '12'
|
||||
const fileName = `claude_dev_task_${month}-${day}-${year}_${hours}-${minutes}-${ampm}.md`
|
||||
|
||||
// Generate markdown
|
||||
const markdownContent = conversationHistory
|
||||
.map((message) => {
|
||||
const role = message.role === "user" ? "**User:**" : "**Assistant:**"
|
||||
const content = Array.isArray(message.content)
|
||||
? message.content.map(formatContentBlockToMarkdown).join("\n")
|
||||
: message.content
|
||||
|
||||
return `${role}\n\n${content}\n\n`
|
||||
})
|
||||
.join("---\n\n")
|
||||
|
||||
// Prompt user for save location
|
||||
const saveUri = await vscode.window.showSaveDialog({
|
||||
filters: { Markdown: ["md"] },
|
||||
defaultUri: vscode.Uri.file(path.join(os.homedir(), "Downloads", fileName)),
|
||||
})
|
||||
|
||||
if (saveUri) {
|
||||
// Write content to the selected location
|
||||
await vscode.workspace.fs.writeFile(saveUri, Buffer.from(markdownContent))
|
||||
vscode.window.showTextDocument(saveUri, { preview: true })
|
||||
}
|
||||
}
|
||||
|
||||
function formatContentBlockToMarkdown(
|
||||
block:
|
||||
| Anthropic.TextBlockParam
|
||||
| Anthropic.ImageBlockParam
|
||||
| Anthropic.ToolUseBlockParam
|
||||
| Anthropic.ToolResultBlockParam
|
||||
): string {
|
||||
switch (block.type) {
|
||||
case "text":
|
||||
return block.text
|
||||
case "image":
|
||||
return `[Image]`
|
||||
case "tool_use":
|
||||
let input: string
|
||||
if (typeof block.input === "object" && block.input !== null) {
|
||||
input = Object.entries(block.input)
|
||||
.map(([key, value]) => `${key.charAt(0).toUpperCase() + key.slice(1)}: ${value}`)
|
||||
.join("\n")
|
||||
} else {
|
||||
input = String(block.input)
|
||||
}
|
||||
return `[Tool Use: ${block.name}]\n${input}`
|
||||
case "tool_result":
|
||||
if (typeof block.content === "string") {
|
||||
return `[Tool Result${block.is_error ? " (Error)" : ""}]\n${block.content}`
|
||||
} else if (Array.isArray(block.content)) {
|
||||
return `[Tool Result${block.is_error ? " (Error)" : ""}]\n${block.content
|
||||
.map(formatContentBlockToMarkdown)
|
||||
.join("\n")}`
|
||||
} else {
|
||||
return `[Tool Result${block.is_error ? " (Error)" : ""}]`
|
||||
}
|
||||
default:
|
||||
return "[Unexpected content type]"
|
||||
}
|
||||
}
|
||||
64
src/utils/process-images.ts
Normal file
64
src/utils/process-images.ts
Normal file
@@ -0,0 +1,64 @@
|
||||
import * as vscode from "vscode"
|
||||
import fs from "fs/promises"
|
||||
import sharp from "sharp"
|
||||
|
||||
export async function selectAndProcessImages(): Promise<string[]> {
|
||||
const options: vscode.OpenDialogOptions = {
|
||||
canSelectMany: true,
|
||||
openLabel: "Select",
|
||||
filters: {
|
||||
Images: ["png", "jpg", "jpeg", "gif", "webp", "tiff", "avif", "svg"], // sharp can convert these to webp which both anthropic and openrouter support
|
||||
},
|
||||
}
|
||||
|
||||
const fileUris = await vscode.window.showOpenDialog(options)
|
||||
|
||||
if (!fileUris || fileUris.length === 0) {
|
||||
return []
|
||||
}
|
||||
|
||||
return await Promise.all(
|
||||
fileUris.map(async (uri) => {
|
||||
const imagePath = uri.fsPath
|
||||
const originalBuffer = await fs.readFile(imagePath)
|
||||
return convertToWebpBase64(originalBuffer)
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
export async function processPastedImages(base64Strings: string[]): Promise<string[]> {
|
||||
return await Promise.all(
|
||||
base64Strings.map(async (base64) => {
|
||||
const buffer = Buffer.from(base64, "base64")
|
||||
return convertToWebpBase64(buffer)
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
async function convertToWebpBase64(buffer: Buffer): Promise<string> {
|
||||
const processedBuffer = await sharp(buffer)
|
||||
/*
|
||||
Anthropic docs recommendations:
|
||||
- To improve time-to-first-token resize images to no more than 1.15 megapixels (and within 1568 pixels in both dimensions)
|
||||
- WebP is a newer image format that's more efficient than PNG and JPEG, so ideal for keeping token usage low. (ive seen the following compression decrease size by 10x)
|
||||
*/
|
||||
.resize(1568, 1568, {
|
||||
fit: "inside", // maintain aspect ratio
|
||||
withoutEnlargement: true, // don't enlarge smaller images
|
||||
})
|
||||
.webp({
|
||||
// NOTE: consider increasing effort from 4 to 6 (max), this may increase processing time by up to ~500ms
|
||||
quality: 80,
|
||||
})
|
||||
.toBuffer()
|
||||
|
||||
const base64 = processedBuffer.toString("base64")
|
||||
|
||||
// console.log({
|
||||
// originalSize: buffer.length,
|
||||
// processedSize: processedBuffer.length,
|
||||
// base64,
|
||||
// })
|
||||
|
||||
return base64
|
||||
}
|
||||
Reference in New Issue
Block a user