mirror of
https://github.com/pacnpal/Roo-Code.git
synced 2025-12-20 04:11:10 -05:00
merge(upstream): merge upstream changes keeping VSCode LM provider and adding Glama support
This commit is contained in:
@@ -24,6 +24,7 @@ import { getNonce } from "./getNonce"
|
||||
import { getUri } from "./getUri"
|
||||
import { playSound, setSoundEnabled, setSoundVolume } from "../../utils/sound"
|
||||
import { enhancePrompt } from "../../utils/enhance-prompt"
|
||||
import { getCommitInfo, searchCommits, getWorkingState } from "../../utils/git"
|
||||
|
||||
/*
|
||||
https://github.com/microsoft/vscode-webview-ui-toolkit-samples/blob/main/default/weather-webview/src/providers/WeatherViewProvider.ts
|
||||
@@ -33,6 +34,7 @@ https://github.com/KumarVariable/vscode-extension-sidebar-html/blob/master/src/c
|
||||
|
||||
type SecretKey =
|
||||
| "apiKey"
|
||||
| "glamaApiKey"
|
||||
| "openRouterApiKey"
|
||||
| "awsAccessKey"
|
||||
| "awsSecretKey"
|
||||
@@ -45,6 +47,8 @@ type SecretKey =
|
||||
type GlobalStateKey =
|
||||
| "apiProvider"
|
||||
| "apiModelId"
|
||||
| "glamaModelId"
|
||||
| "glamaModelInfo"
|
||||
| "awsRegion"
|
||||
| "awsUseCrossRegionInference"
|
||||
| "vertexProjectId"
|
||||
@@ -64,7 +68,7 @@ type GlobalStateKey =
|
||||
| "lmStudioBaseUrl"
|
||||
| "anthropicBaseUrl"
|
||||
| "azureApiVersion"
|
||||
| "includeStreamOptions"
|
||||
| "openAiStreamingEnabled"
|
||||
| "openRouterModelId"
|
||||
| "openRouterModelInfo"
|
||||
| "openRouterUseMiddleOutTransform"
|
||||
@@ -85,6 +89,7 @@ type GlobalStateKey =
|
||||
export const GlobalFileNames = {
|
||||
apiConversationHistory: "api_conversation_history.json",
|
||||
uiMessages: "ui_messages.json",
|
||||
glamaModels: "glama_models.json",
|
||||
openRouterModels: "openrouter_models.json",
|
||||
mcpSettings: "cline_mcp_settings.json",
|
||||
}
|
||||
@@ -388,6 +393,24 @@ export class ClineProvider implements vscode.WebviewViewProvider {
|
||||
}
|
||||
}
|
||||
})
|
||||
this.readGlamaModels().then((glamaModels) => {
|
||||
if (glamaModels) {
|
||||
this.postMessageToWebview({ type: "glamaModels", glamaModels })
|
||||
}
|
||||
})
|
||||
this.refreshGlamaModels().then(async (glamaModels) => {
|
||||
if (glamaModels) {
|
||||
// update model info in state (this needs to be done here since we don't want to update state while settings is open, and we may refresh models there)
|
||||
const { apiConfiguration } = await this.getState()
|
||||
if (apiConfiguration.glamaModelId) {
|
||||
await this.updateGlobalState(
|
||||
"glamaModelInfo",
|
||||
glamaModels[apiConfiguration.glamaModelId],
|
||||
)
|
||||
await this.postStateToWebview()
|
||||
}
|
||||
}
|
||||
})
|
||||
break
|
||||
case "newTask":
|
||||
// Code that should run in response to the hello message command
|
||||
@@ -406,6 +429,9 @@ export class ClineProvider implements vscode.WebviewViewProvider {
|
||||
apiProvider,
|
||||
apiModelId,
|
||||
apiKey,
|
||||
glamaModelId,
|
||||
glamaModelInfo,
|
||||
glamaApiKey,
|
||||
openRouterApiKey,
|
||||
awsAccessKey,
|
||||
awsSecretKey,
|
||||
@@ -425,7 +451,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
|
||||
geminiApiKey,
|
||||
openAiNativeApiKey,
|
||||
azureApiVersion,
|
||||
includeStreamOptions,
|
||||
openAiStreamingEnabled,
|
||||
openRouterModelId,
|
||||
openRouterModelInfo,
|
||||
openRouterUseMiddleOutTransform,
|
||||
@@ -434,6 +460,9 @@ export class ClineProvider implements vscode.WebviewViewProvider {
|
||||
await this.updateGlobalState("apiProvider", apiProvider)
|
||||
await this.updateGlobalState("apiModelId", apiModelId)
|
||||
await this.storeSecret("apiKey", apiKey)
|
||||
await this.updateGlobalState("glamaModelId", glamaModelId)
|
||||
await this.updateGlobalState("glamaModelInfo", glamaModelInfo)
|
||||
await this.storeSecret("glamaApiKey", glamaApiKey)
|
||||
await this.storeSecret("openRouterApiKey", openRouterApiKey)
|
||||
await this.storeSecret("awsAccessKey", awsAccessKey)
|
||||
await this.storeSecret("awsSecretKey", awsSecretKey)
|
||||
@@ -454,7 +483,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
|
||||
await this.storeSecret("openAiNativeApiKey", openAiNativeApiKey)
|
||||
await this.storeSecret("deepSeekApiKey", message.apiConfiguration.deepSeekApiKey)
|
||||
await this.updateGlobalState("azureApiVersion", azureApiVersion)
|
||||
await this.updateGlobalState("includeStreamOptions", includeStreamOptions)
|
||||
await this.updateGlobalState("openAiStreamingEnabled", openAiStreamingEnabled)
|
||||
await this.updateGlobalState("openRouterModelId", openRouterModelId)
|
||||
await this.updateGlobalState("openRouterModelInfo", openRouterModelInfo)
|
||||
await this.updateGlobalState("openRouterUseMiddleOutTransform", openRouterUseMiddleOutTransform)
|
||||
@@ -534,6 +563,9 @@ export class ClineProvider implements vscode.WebviewViewProvider {
|
||||
const vsCodeLmModels = await this.getVsCodeLmModels()
|
||||
this.postMessageToWebview({ type: "vsCodeLmModels", vsCodeLmModels })
|
||||
break
|
||||
case "refreshGlamaModels":
|
||||
await this.refreshGlamaModels()
|
||||
break
|
||||
case "refreshOpenRouterModels":
|
||||
await this.refreshOpenRouterModels()
|
||||
break
|
||||
@@ -710,6 +742,24 @@ export class ClineProvider implements vscode.WebviewViewProvider {
|
||||
}
|
||||
}
|
||||
break
|
||||
|
||||
|
||||
case "searchCommits": {
|
||||
const cwd = vscode.workspace.workspaceFolders?.map((folder) => folder.uri.fsPath).at(0)
|
||||
if (cwd) {
|
||||
try {
|
||||
const commits = await searchCommits(message.query || "", cwd)
|
||||
await this.postMessageToWebview({
|
||||
type: "commitSearchResults",
|
||||
commits
|
||||
})
|
||||
} catch (error) {
|
||||
console.error("Error searching commits:", error)
|
||||
vscode.window.showErrorMessage("Failed to search commits")
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
},
|
||||
null,
|
||||
@@ -851,6 +901,94 @@ export class ClineProvider implements vscode.WebviewViewProvider {
|
||||
return cacheDir
|
||||
}
|
||||
|
||||
async readGlamaModels(): Promise<Record<string, ModelInfo> | undefined> {
|
||||
const glamaModelsFilePath = path.join(
|
||||
await this.ensureCacheDirectoryExists(),
|
||||
GlobalFileNames.glamaModels,
|
||||
)
|
||||
const fileExists = await fileExistsAtPath(glamaModelsFilePath)
|
||||
if (fileExists) {
|
||||
const fileContents = await fs.readFile(glamaModelsFilePath, "utf8")
|
||||
return JSON.parse(fileContents)
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
|
||||
async refreshGlamaModels() {
|
||||
const glamaModelsFilePath = path.join(
|
||||
await this.ensureCacheDirectoryExists(),
|
||||
GlobalFileNames.glamaModels,
|
||||
)
|
||||
|
||||
let models: Record<string, ModelInfo> = {}
|
||||
try {
|
||||
const response = await axios.get("https://glama.ai/api/gateway/v1/models")
|
||||
/*
|
||||
{
|
||||
"added": "2024-12-24T15:12:49.324Z",
|
||||
"capabilities": [
|
||||
"adjustable_safety_settings",
|
||||
"caching",
|
||||
"code_execution",
|
||||
"function_calling",
|
||||
"json_mode",
|
||||
"json_schema",
|
||||
"system_instructions",
|
||||
"tuning",
|
||||
"input:audio",
|
||||
"input:image",
|
||||
"input:text",
|
||||
"input:video",
|
||||
"output:text"
|
||||
],
|
||||
"id": "google-vertex/gemini-1.5-flash-002",
|
||||
"maxTokensInput": 1048576,
|
||||
"maxTokensOutput": 8192,
|
||||
"pricePerToken": {
|
||||
"cacheRead": null,
|
||||
"cacheWrite": null,
|
||||
"input": "0.000000075",
|
||||
"output": "0.0000003"
|
||||
}
|
||||
}
|
||||
*/
|
||||
if (response.data) {
|
||||
const rawModels = response.data;
|
||||
const parsePrice = (price: any) => {
|
||||
if (price) {
|
||||
return parseFloat(price) * 1_000_000
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
for (const rawModel of rawModels) {
|
||||
const modelInfo: ModelInfo = {
|
||||
maxTokens: rawModel.maxTokensOutput,
|
||||
contextWindow: rawModel.maxTokensInput,
|
||||
supportsImages: rawModel.capabilities?.includes("input:image"),
|
||||
supportsComputerUse: rawModel.capabilities?.includes("computer_use"),
|
||||
supportsPromptCache: rawModel.capabilities?.includes("caching"),
|
||||
inputPrice: parsePrice(rawModel.pricePerToken?.input),
|
||||
outputPrice: parsePrice(rawModel.pricePerToken?.output),
|
||||
description: undefined,
|
||||
cacheWritesPrice: parsePrice(rawModel.pricePerToken?.cacheWrite),
|
||||
cacheReadsPrice: parsePrice(rawModel.pricePerToken?.cacheRead),
|
||||
}
|
||||
|
||||
models[rawModel.id] = modelInfo
|
||||
}
|
||||
} else {
|
||||
console.error("Invalid response from Glama API")
|
||||
}
|
||||
await fs.writeFile(glamaModelsFilePath, JSON.stringify(models))
|
||||
console.log("Glama models fetched and saved", models)
|
||||
} catch (error) {
|
||||
console.error("Error fetching Glama models:", error)
|
||||
}
|
||||
|
||||
await this.postMessageToWebview({ type: "glamaModels", glamaModels: models })
|
||||
return models
|
||||
}
|
||||
|
||||
async readOpenRouterModels(): Promise<Record<string, ModelInfo> | undefined> {
|
||||
const openRouterModelsFilePath = path.join(
|
||||
await this.ensureCacheDirectoryExists(),
|
||||
@@ -1174,6 +1312,9 @@ export class ClineProvider implements vscode.WebviewViewProvider {
|
||||
storedApiProvider,
|
||||
apiModelId,
|
||||
apiKey,
|
||||
glamaApiKey,
|
||||
glamaModelId,
|
||||
glamaModelInfo,
|
||||
openRouterApiKey,
|
||||
awsAccessKey,
|
||||
awsSecretKey,
|
||||
@@ -1194,7 +1335,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
|
||||
openAiNativeApiKey,
|
||||
deepSeekApiKey,
|
||||
azureApiVersion,
|
||||
includeStreamOptions,
|
||||
openAiStreamingEnabled,
|
||||
openRouterModelId,
|
||||
openRouterModelInfo,
|
||||
openRouterUseMiddleOutTransform,
|
||||
@@ -1222,6 +1363,9 @@ export class ClineProvider implements vscode.WebviewViewProvider {
|
||||
this.getGlobalState("apiProvider") as Promise<ApiProvider | undefined>,
|
||||
this.getGlobalState("apiModelId") as Promise<string | undefined>,
|
||||
this.getSecret("apiKey") as Promise<string | undefined>,
|
||||
this.getSecret("glamaApiKey") as Promise<string | undefined>,
|
||||
this.getGlobalState("glamaModelId") as Promise<string | undefined>,
|
||||
this.getGlobalState("glamaModelInfo") as Promise<ModelInfo | undefined>,
|
||||
this.getSecret("openRouterApiKey") as Promise<string | undefined>,
|
||||
this.getSecret("awsAccessKey") as Promise<string | undefined>,
|
||||
this.getSecret("awsSecretKey") as Promise<string | undefined>,
|
||||
@@ -1242,7 +1386,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
|
||||
this.getSecret("openAiNativeApiKey") as Promise<string | undefined>,
|
||||
this.getSecret("deepSeekApiKey") as Promise<string | undefined>,
|
||||
this.getGlobalState("azureApiVersion") as Promise<string | undefined>,
|
||||
this.getGlobalState("includeStreamOptions") as Promise<boolean | undefined>,
|
||||
this.getGlobalState("openAiStreamingEnabled") as Promise<boolean | undefined>,
|
||||
this.getGlobalState("openRouterModelId") as Promise<string | undefined>,
|
||||
this.getGlobalState("openRouterModelInfo") as Promise<ModelInfo | undefined>,
|
||||
this.getGlobalState("openRouterUseMiddleOutTransform") as Promise<boolean | undefined>,
|
||||
@@ -1287,6 +1431,9 @@ export class ClineProvider implements vscode.WebviewViewProvider {
|
||||
apiProvider,
|
||||
apiModelId,
|
||||
apiKey,
|
||||
glamaApiKey,
|
||||
glamaModelId,
|
||||
glamaModelInfo,
|
||||
openRouterApiKey,
|
||||
awsAccessKey,
|
||||
awsSecretKey,
|
||||
@@ -1307,7 +1454,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
|
||||
openAiNativeApiKey,
|
||||
deepSeekApiKey,
|
||||
azureApiVersion,
|
||||
includeStreamOptions,
|
||||
openAiStreamingEnabled,
|
||||
openRouterModelId,
|
||||
openRouterModelInfo,
|
||||
openRouterUseMiddleOutTransform,
|
||||
@@ -1426,6 +1573,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
|
||||
}
|
||||
const secretKeys: SecretKey[] = [
|
||||
"apiKey",
|
||||
"glamaApiKey",
|
||||
"openRouterApiKey",
|
||||
"awsAccessKey",
|
||||
"awsSecretKey",
|
||||
|
||||
Reference in New Issue
Block a user