mirror of
https://github.com/pacnpal/Roo-Code.git
synced 2025-12-22 13:21:07 -05:00
Add gemini support
This commit is contained in:
57
src/api/gemini.ts
Normal file
57
src/api/gemini.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
import { Anthropic } from "@anthropic-ai/sdk"
|
||||
import { FunctionCallingMode, GoogleGenerativeAI } from "@google/generative-ai"
|
||||
import { ApiHandler, ApiHandlerMessageResponse } from "."
|
||||
import { ApiHandlerOptions, geminiDefaultModelId, GeminiModelId, geminiModels, ModelInfo } from "../shared/api"
|
||||
import {
|
||||
convertAnthropicMessageToGemini,
|
||||
convertAnthropicToolToGemini,
|
||||
convertGeminiResponseToAnthropic,
|
||||
} from "../utils/gemini-format"
|
||||
|
||||
export class GeminiHandler implements ApiHandler {
|
||||
private options: ApiHandlerOptions
|
||||
private client: GoogleGenerativeAI
|
||||
|
||||
constructor(options: ApiHandlerOptions) {
|
||||
if (!options.geminiApiKey) {
|
||||
throw new Error("API key is required for Google Gemini")
|
||||
}
|
||||
this.options = options
|
||||
this.client = new GoogleGenerativeAI(options.geminiApiKey)
|
||||
}
|
||||
|
||||
async createMessage(
|
||||
systemPrompt: string,
|
||||
messages: Anthropic.Messages.MessageParam[],
|
||||
tools: Anthropic.Messages.Tool[]
|
||||
): Promise<ApiHandlerMessageResponse> {
|
||||
const model = this.client.getGenerativeModel({
|
||||
model: this.getModel().id,
|
||||
systemInstruction: systemPrompt,
|
||||
tools: [{ functionDeclarations: tools.map(convertAnthropicToolToGemini) }],
|
||||
toolConfig: {
|
||||
functionCallingConfig: {
|
||||
mode: FunctionCallingMode.AUTO,
|
||||
},
|
||||
},
|
||||
})
|
||||
const result = await model.generateContent({
|
||||
contents: messages.map(convertAnthropicMessageToGemini),
|
||||
generationConfig: {
|
||||
maxOutputTokens: this.getModel().info.maxTokens,
|
||||
},
|
||||
})
|
||||
const message = convertGeminiResponseToAnthropic(result.response)
|
||||
|
||||
return { message }
|
||||
}
|
||||
|
||||
getModel(): { id: GeminiModelId; info: ModelInfo } {
|
||||
const modelId = this.options.apiModelId
|
||||
if (modelId && modelId in geminiModels) {
|
||||
const id = modelId as GeminiModelId
|
||||
return { id, info: geminiModels[id] }
|
||||
}
|
||||
return { id: geminiDefaultModelId, info: geminiModels[geminiDefaultModelId] }
|
||||
}
|
||||
}
|
||||
@@ -6,6 +6,7 @@ import { OpenRouterHandler } from "./openrouter"
|
||||
import { VertexHandler } from "./vertex"
|
||||
import { OpenAiHandler } from "./openai"
|
||||
import { OllamaHandler } from "./ollama"
|
||||
import { GeminiHandler } from "./gemini"
|
||||
|
||||
export interface ApiHandlerMessageResponse {
|
||||
message: Anthropic.Messages.Message
|
||||
@@ -37,6 +38,8 @@ export function buildApiHandler(configuration: ApiConfiguration): ApiHandler {
|
||||
return new OpenAiHandler(options)
|
||||
case "ollama":
|
||||
return new OllamaHandler(options)
|
||||
case "gemini":
|
||||
return new GeminiHandler(options)
|
||||
default:
|
||||
return new AnthropicHandler(options)
|
||||
}
|
||||
|
||||
@@ -18,7 +18,14 @@ https://github.com/microsoft/vscode-webview-ui-toolkit-samples/blob/main/default
|
||||
https://github.com/KumarVariable/vscode-extension-sidebar-html/blob/master/src/customSidebarViewProvider.ts
|
||||
*/
|
||||
|
||||
type SecretKey = "apiKey" | "openRouterApiKey" | "awsAccessKey" | "awsSecretKey" | "awsSessionToken" | "openAiApiKey"
|
||||
type SecretKey =
|
||||
| "apiKey"
|
||||
| "openRouterApiKey"
|
||||
| "awsAccessKey"
|
||||
| "awsSecretKey"
|
||||
| "awsSessionToken"
|
||||
| "openAiApiKey"
|
||||
| "geminiApiKey"
|
||||
type GlobalStateKey =
|
||||
| "apiProvider"
|
||||
| "apiModelId"
|
||||
@@ -329,6 +336,7 @@ export class ClaudeDevProvider implements vscode.WebviewViewProvider {
|
||||
ollamaModelId,
|
||||
ollamaBaseUrl,
|
||||
anthropicBaseUrl,
|
||||
geminiApiKey,
|
||||
} = message.apiConfiguration
|
||||
await this.updateGlobalState("apiProvider", apiProvider)
|
||||
await this.updateGlobalState("apiModelId", apiModelId)
|
||||
@@ -346,6 +354,7 @@ export class ClaudeDevProvider implements vscode.WebviewViewProvider {
|
||||
await this.updateGlobalState("ollamaModelId", ollamaModelId)
|
||||
await this.updateGlobalState("ollamaBaseUrl", ollamaBaseUrl)
|
||||
await this.updateGlobalState("anthropicBaseUrl", anthropicBaseUrl)
|
||||
await this.storeSecret("geminiApiKey", geminiApiKey)
|
||||
this.claudeDev?.updateApi(message.apiConfiguration)
|
||||
}
|
||||
await this.postStateToWebview()
|
||||
@@ -667,6 +676,7 @@ export class ClaudeDevProvider implements vscode.WebviewViewProvider {
|
||||
ollamaModelId,
|
||||
ollamaBaseUrl,
|
||||
anthropicBaseUrl,
|
||||
geminiApiKey,
|
||||
lastShownAnnouncementId,
|
||||
customInstructions,
|
||||
alwaysAllowReadOnly,
|
||||
@@ -688,6 +698,7 @@ export class ClaudeDevProvider implements vscode.WebviewViewProvider {
|
||||
this.getGlobalState("ollamaModelId") as Promise<string | undefined>,
|
||||
this.getGlobalState("ollamaBaseUrl") as Promise<string | undefined>,
|
||||
this.getGlobalState("anthropicBaseUrl") as Promise<string | undefined>,
|
||||
this.getSecret("geminiApiKey") as Promise<string | undefined>,
|
||||
this.getGlobalState("lastShownAnnouncementId") as Promise<string | undefined>,
|
||||
this.getGlobalState("customInstructions") as Promise<string | undefined>,
|
||||
this.getGlobalState("alwaysAllowReadOnly") as Promise<boolean | undefined>,
|
||||
@@ -726,6 +737,7 @@ export class ClaudeDevProvider implements vscode.WebviewViewProvider {
|
||||
ollamaModelId,
|
||||
ollamaBaseUrl,
|
||||
anthropicBaseUrl,
|
||||
geminiApiKey,
|
||||
},
|
||||
lastShownAnnouncementId,
|
||||
customInstructions,
|
||||
@@ -804,6 +816,7 @@ export class ClaudeDevProvider implements vscode.WebviewViewProvider {
|
||||
"awsSecretKey",
|
||||
"awsSessionToken",
|
||||
"openAiApiKey",
|
||||
"geminiApiKey",
|
||||
]
|
||||
for (const key of secretKeys) {
|
||||
await this.storeSecret(key, undefined)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
export type ApiProvider = "anthropic" | "openrouter" | "bedrock" | "vertex" | "openai" | "ollama"
|
||||
export type ApiProvider = "anthropic" | "openrouter" | "bedrock" | "vertex" | "openai" | "ollama" | "gemini"
|
||||
|
||||
export interface ApiHandlerOptions {
|
||||
apiModelId?: string
|
||||
@@ -16,6 +16,7 @@ export interface ApiHandlerOptions {
|
||||
openAiModelId?: string
|
||||
ollamaModelId?: string
|
||||
ollamaBaseUrl?: string
|
||||
geminiApiKey?: string
|
||||
}
|
||||
|
||||
export type ApiConfiguration = ApiHandlerOptions & {
|
||||
@@ -305,3 +306,26 @@ export const openAiModelInfoSaneDefaults: ModelInfo = {
|
||||
inputPrice: 0,
|
||||
outputPrice: 0,
|
||||
}
|
||||
|
||||
// Gemini
|
||||
// https://ai.google.dev/gemini-api/docs/models/gemini
|
||||
export type GeminiModelId = keyof typeof geminiModels
|
||||
export const geminiDefaultModelId: GeminiModelId = "gemini-1.5-flash-latest"
|
||||
export const geminiModels = {
|
||||
"gemini-1.5-flash-latest": {
|
||||
maxTokens: 8192,
|
||||
contextWindow: 1_048_576,
|
||||
supportsImages: true,
|
||||
supportsPromptCache: false,
|
||||
inputPrice: 0,
|
||||
outputPrice: 0,
|
||||
},
|
||||
"gemini-1.5-pro-latest": {
|
||||
maxTokens: 8192,
|
||||
contextWindow: 2_097_152,
|
||||
supportsImages: true,
|
||||
supportsPromptCache: false,
|
||||
inputPrice: 0,
|
||||
outputPrice: 0,
|
||||
},
|
||||
} as const satisfies Record<string, ModelInfo>
|
||||
|
||||
137
src/utils/gemini-format.ts
Normal file
137
src/utils/gemini-format.ts
Normal file
@@ -0,0 +1,137 @@
|
||||
import { Anthropic } from "@anthropic-ai/sdk"
|
||||
import { Content, EnhancedGenerateContentResponse, FunctionDeclaration, Part, SchemaType } from "@google/generative-ai"
|
||||
|
||||
export function convertAnthropicContentToGemini(
|
||||
content:
|
||||
| string
|
||||
| Array<
|
||||
| Anthropic.Messages.TextBlockParam
|
||||
| Anthropic.Messages.ImageBlockParam
|
||||
| Anthropic.Messages.ToolUseBlockParam
|
||||
| Anthropic.Messages.ToolResultBlockParam
|
||||
>
|
||||
): Part[] {
|
||||
if (typeof content === "string") {
|
||||
return [{ text: content }]
|
||||
}
|
||||
return content.map((block) => {
|
||||
switch (block.type) {
|
||||
case "text":
|
||||
return { text: block.text }
|
||||
case "image":
|
||||
if (block.source.type !== "base64") {
|
||||
throw new Error("Unsupported image source type")
|
||||
}
|
||||
return {
|
||||
inlineData: {
|
||||
data: block.source.data,
|
||||
mimeType: block.source.media_type,
|
||||
},
|
||||
}
|
||||
case "tool_use":
|
||||
return {
|
||||
functionCall: {
|
||||
name: block.name,
|
||||
args: block.input,
|
||||
},
|
||||
} as Part
|
||||
case "tool_result":
|
||||
return {
|
||||
functionResponse: {
|
||||
name: block.tool_use_id,
|
||||
response: {
|
||||
content: block.content,
|
||||
},
|
||||
},
|
||||
}
|
||||
default:
|
||||
throw new Error(`Unsupported content block type: ${(block as any).type}`)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
export function convertAnthropicMessageToGemini(message: Anthropic.Messages.MessageParam): Content {
|
||||
return {
|
||||
role: message.role === "assistant" ? "model" : message.role,
|
||||
parts: convertAnthropicContentToGemini(message.content),
|
||||
}
|
||||
}
|
||||
|
||||
export function convertAnthropicToolToGemini(tool: Anthropic.Messages.Tool): FunctionDeclaration {
|
||||
return {
|
||||
name: tool.name,
|
||||
description: tool.description || "",
|
||||
parameters: {
|
||||
type: SchemaType.OBJECT,
|
||||
properties: Object.fromEntries(
|
||||
Object.entries(tool.input_schema.properties || {}).map(([key, value]) => [
|
||||
key,
|
||||
{
|
||||
type: (value as any).type.toUpperCase(),
|
||||
description: (value as any).description || "",
|
||||
},
|
||||
])
|
||||
),
|
||||
required: (tool.input_schema.required as string[]) || [],
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export function convertGeminiResponseToAnthropic(
|
||||
response: EnhancedGenerateContentResponse
|
||||
): Anthropic.Messages.Message {
|
||||
const content: Anthropic.Messages.ContentBlock[] = []
|
||||
|
||||
// Add the main text response
|
||||
const text = response.text()
|
||||
if (text) {
|
||||
content.push({ type: "text", text })
|
||||
}
|
||||
|
||||
// Add function calls as tool_use blocks
|
||||
const functionCalls = response.functionCalls()
|
||||
if (functionCalls) {
|
||||
functionCalls.forEach((call, index) => {
|
||||
content.push({
|
||||
type: "tool_use",
|
||||
id: `tool_${index}`,
|
||||
name: call.name,
|
||||
input: call.args,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
// Determine stop reason
|
||||
let stop_reason: Anthropic.Messages.Message["stop_reason"] = null
|
||||
const finishReason = response.candidates?.[0]?.finishReason
|
||||
if (finishReason) {
|
||||
switch (finishReason) {
|
||||
case "STOP":
|
||||
stop_reason = "end_turn"
|
||||
break
|
||||
case "MAX_TOKENS":
|
||||
stop_reason = "max_tokens"
|
||||
break
|
||||
case "SAFETY":
|
||||
case "RECITATION":
|
||||
case "OTHER":
|
||||
stop_reason = "stop_sequence"
|
||||
break
|
||||
// Add more cases if needed
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
id: `msg_${Date.now()}`, // Generate a unique ID
|
||||
type: "message",
|
||||
role: "assistant",
|
||||
content,
|
||||
model: "",
|
||||
stop_reason,
|
||||
stop_sequence: null, // Gemini doesn't provide this information
|
||||
usage: {
|
||||
input_tokens: response.usageMetadata?.promptTokenCount ?? 0,
|
||||
output_tokens: response.usageMetadata?.candidatesTokenCount ?? 0,
|
||||
},
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user