feat(vscode-lm): implement VS Code Language Models provider

This commit is contained in:
RaySinner
2025-01-07 01:23:22 +03:00
parent 5e099e2960
commit 9d62a7bb77
14 changed files with 2473 additions and 153 deletions

View File

@@ -6,32 +6,36 @@ import { McpServer } from "./mcp"
// webview will hold state
export interface ExtensionMessage {
type:
| "action"
| "state"
| "selectedImages"
| "ollamaModels"
| "lmStudioModels"
| "theme"
| "workspaceUpdated"
| "invoke"
| "partialMessage"
| "openRouterModels"
| "openAiModels"
| "mcpServers"
| "enhancedPrompt"
type:
| "action"
| "state"
| "selectedImages"
| "ollamaModels"
| "lmStudioModels"
| "vsCodeLmModels"
| "vsCodeLmApiAvailable"
| "requestVsCodeLmModels"
| "theme"
| "workspaceUpdated"
| "invoke"
| "partialMessage"
| "openRouterModels"
| "openAiModels"
| "mcpServers"
| "enhancedPrompt"
text?: string
action?:
| "chatButtonClicked"
| "mcpButtonClicked"
| "settingsButtonClicked"
| "historyButtonClicked"
| "didBecomeVisible"
| "chatButtonClicked"
| "mcpButtonClicked"
| "settingsButtonClicked"
| "historyButtonClicked"
| "didBecomeVisible"
invoke?: "sendMessage" | "primaryButtonClick" | "secondaryButtonClick"
state?: ExtensionState
images?: string[]
ollamaModels?: string[]
lmStudioModels?: string[]
vsCodeLmModels?: { vendor?: string; family?: string; version?: string; id?: string }[]
filePaths?: string[]
partialMessage?: ClineMessage
openRouterModels?: Record<string, ModelInfo>
@@ -109,14 +113,14 @@ export type ClineSay =
export interface ClineSayTool {
tool:
| "editedExistingFile"
| "appliedDiff"
| "newFileCreated"
| "readFile"
| "listFilesTopLevel"
| "listFilesRecursive"
| "listCodeDefinitionNames"
| "searchFiles"
| "editedExistingFile"
| "appliedDiff"
| "newFileCreated"
| "readFile"
| "listFilesTopLevel"
| "listFilesRecursive"
| "listCodeDefinitionNames"
| "searchFiles"
path?: string
diff?: string
content?: string

View File

@@ -4,52 +4,53 @@ export type AudioType = "notification" | "celebration" | "progress_loop"
export interface WebviewMessage {
type:
| "apiConfiguration"
| "customInstructions"
| "allowedCommands"
| "alwaysAllowReadOnly"
| "alwaysAllowWrite"
| "alwaysAllowExecute"
| "webviewDidLaunch"
| "newTask"
| "askResponse"
| "clearTask"
| "didShowAnnouncement"
| "selectImages"
| "exportCurrentTask"
| "showTaskWithId"
| "deleteTaskWithId"
| "exportTaskWithId"
| "resetState"
| "requestOllamaModels"
| "requestLmStudioModels"
| "openImage"
| "openFile"
| "openMention"
| "cancelTask"
| "refreshOpenRouterModels"
| "refreshOpenAiModels"
| "alwaysAllowBrowser"
| "alwaysAllowMcp"
| "playSound"
| "soundEnabled"
| "soundVolume"
| "diffEnabled"
| "browserViewportSize"
| "screenshotQuality"
| "openMcpSettings"
| "restartMcpServer"
| "toggleToolAlwaysAllow"
| "toggleMcpServer"
| "fuzzyMatchThreshold"
| "preferredLanguage"
| "writeDelayMs"
| "enhancePrompt"
| "enhancedPrompt"
| "draggedImages"
| "deleteMessage"
| "terminalOutputLineLimit"
| "mcpEnabled"
| "apiConfiguration"
| "customInstructions"
| "allowedCommands"
| "alwaysAllowReadOnly"
| "alwaysAllowWrite"
| "alwaysAllowExecute"
| "webviewDidLaunch"
| "newTask"
| "askResponse"
| "clearTask"
| "didShowAnnouncement"
| "selectImages"
| "exportCurrentTask"
| "showTaskWithId"
| "deleteTaskWithId"
| "exportTaskWithId"
| "resetState"
| "requestOllamaModels"
| "requestLmStudioModels"
| "requestVsCodeLmModels"
| "openImage"
| "openFile"
| "openMention"
| "cancelTask"
| "refreshOpenRouterModels"
| "refreshOpenAiModels"
| "alwaysAllowBrowser"
| "alwaysAllowMcp"
| "playSound"
| "soundEnabled"
| "soundVolume"
| "diffEnabled"
| "browserViewportSize"
| "screenshotQuality"
| "openMcpSettings"
| "restartMcpServer"
| "toggleToolAlwaysAllow"
| "toggleMcpServer"
| "fuzzyMatchThreshold"
| "preferredLanguage"
| "writeDelayMs"
| "enhancePrompt"
| "enhancedPrompt"
| "draggedImages"
| "deleteMessage"
| "terminalOutputLineLimit"
| "mcpEnabled"
text?: string
disabled?: boolean
askResponse?: ClineAskResponse

View File

@@ -1,3 +1,5 @@
import * as vscode from 'vscode';
export type ApiProvider =
| "anthropic"
| "openrouter"
@@ -9,11 +11,13 @@ export type ApiProvider =
| "gemini"
| "openai-native"
| "deepseek"
| "vscode-lm"
export interface ApiHandlerOptions {
apiModelId?: string
apiKey?: string // anthropic
anthropicBaseUrl?: string
vsCodeLmModelSelector?: vscode.LanguageModelChatSelector
openRouterApiKey?: string
openRouterModelId?: string
openRouterModelInfo?: ModelInfo
@@ -47,16 +51,17 @@ export interface ApiHandlerOptions {
export type ApiConfiguration = ApiHandlerOptions & {
apiProvider?: ApiProvider
vsCodeLmModelSelector?: vscode.LanguageModelChatSelector;
}
// Models
export interface ModelInfo {
maxTokens?: number
contextWindow?: number
contextWindow: number
supportsImages?: boolean
supportsComputerUse?: boolean
supportsPromptCache: boolean // this value is hardcoded for now
supportsPromptCache: boolean
inputPrice?: number
outputPrice?: number
cacheWritesPrice?: number
@@ -115,24 +120,24 @@ export const anthropicModels = {
// AWS Bedrock
// https://docs.aws.amazon.com/bedrock/latest/userguide/conversation-inference.html
export interface MessageContent {
type: 'text' | 'image' | 'video' | 'tool_use' | 'tool_result';
text?: string;
source?: {
type: 'base64';
data: string | Uint8Array; // string for Anthropic, Uint8Array for Bedrock
media_type: 'image/jpeg' | 'image/png' | 'image/gif' | 'image/webp';
};
// Video specific fields
format?: string;
s3Location?: {
uri: string;
bucketOwner?: string;
};
// Tool use and result fields
toolUseId?: string;
name?: string;
input?: any;
output?: any; // Used for tool_result type
type: 'text' | 'image' | 'video' | 'tool_use' | 'tool_result';
text?: string;
source?: {
type: 'base64';
data: string | Uint8Array; // string for Anthropic, Uint8Array for Bedrock
media_type: 'image/jpeg' | 'image/png' | 'image/gif' | 'image/webp';
};
// Video specific fields
format?: string;
s3Location?: {
uri: string;
bucketOwner?: string;
};
// Tool use and result fields
toolUseId?: string;
name?: string;
input?: any;
output?: any; // Used for tool_result type
}
export type BedrockModelId = keyof typeof bedrockModels
@@ -226,7 +231,7 @@ export const bedrockModels = {
inputPrice: 0.25,
outputPrice: 1.25,
},
"meta.llama3-2-90b-instruct-v1:0" : {
"meta.llama3-2-90b-instruct-v1:0": {
maxTokens: 8192,
contextWindow: 128_000,
supportsImages: true,
@@ -235,7 +240,7 @@ export const bedrockModels = {
inputPrice: 0.72,
outputPrice: 0.72,
},
"meta.llama3-2-11b-instruct-v1:0" : {
"meta.llama3-2-11b-instruct-v1:0": {
maxTokens: 8192,
contextWindow: 128_000,
supportsImages: true,
@@ -244,7 +249,7 @@ export const bedrockModels = {
inputPrice: 0.16,
outputPrice: 0.16,
},
"meta.llama3-2-3b-instruct-v1:0" : {
"meta.llama3-2-3b-instruct-v1:0": {
maxTokens: 8192,
contextWindow: 128_000,
supportsImages: false,
@@ -253,7 +258,7 @@ export const bedrockModels = {
inputPrice: 0.15,
outputPrice: 0.15,
},
"meta.llama3-2-1b-instruct-v1:0" : {
"meta.llama3-2-1b-instruct-v1:0": {
maxTokens: 8192,
contextWindow: 128_000,
supportsImages: false,
@@ -262,7 +267,7 @@ export const bedrockModels = {
inputPrice: 0.1,
outputPrice: 0.1,
},
"meta.llama3-1-405b-instruct-v1:0" : {
"meta.llama3-1-405b-instruct-v1:0": {
maxTokens: 8192,
contextWindow: 128_000,
supportsImages: false,
@@ -271,7 +276,7 @@ export const bedrockModels = {
inputPrice: 2.4,
outputPrice: 2.4,
},
"meta.llama3-1-70b-instruct-v1:0" : {
"meta.llama3-1-70b-instruct-v1:0": {
maxTokens: 8192,
contextWindow: 128_000,
supportsImages: false,
@@ -280,7 +285,7 @@ export const bedrockModels = {
inputPrice: 0.72,
outputPrice: 0.72,
},
"meta.llama3-1-8b-instruct-v1:0" : {
"meta.llama3-1-8b-instruct-v1:0": {
maxTokens: 8192,
contextWindow: 8_000,
supportsImages: false,
@@ -289,8 +294,8 @@ export const bedrockModels = {
inputPrice: 0.22,
outputPrice: 0.22,
},
"meta.llama3-70b-instruct-v1:0" : {
maxTokens: 2048 ,
"meta.llama3-70b-instruct-v1:0": {
maxTokens: 2048,
contextWindow: 8_000,
supportsImages: false,
supportsComputerUse: false,
@@ -298,8 +303,8 @@ export const bedrockModels = {
inputPrice: 2.65,
outputPrice: 3.5,
},
"meta.llama3-8b-instruct-v1:0" : {
maxTokens: 2048 ,
"meta.llama3-8b-instruct-v1:0": {
maxTokens: 2048,
contextWindow: 4_000,
supportsImages: false,
supportsComputerUse: false,
@@ -514,4 +519,3 @@ export const deepSeekModels = {
// https://learn.microsoft.com/en-us/azure/ai-services/openai/api-version-deprecation
// https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#api-specs
export const azureOpenAiDefaultApiVersion = "2024-08-01-preview"

View File

@@ -0,0 +1,14 @@
import { LanguageModelChatSelector } from 'vscode';
export const SELECTOR_SEPARATOR = '/';
export function stringifyVsCodeLmModelSelector(selector: LanguageModelChatSelector): string {
return [
selector.vendor,
selector.family,
selector.version,
selector.id
]
.filter(Boolean)
.join(SELECTOR_SEPARATOR);
}