Spacing adjustments

This commit is contained in:
Matt Rubens
2025-01-08 17:21:23 -05:00
parent d5fd2bbb82
commit 7137f8c528
8 changed files with 787 additions and 860 deletions

View File

@@ -15,43 +15,40 @@ import { VsCodeLmHandler } from "./providers/vscode-lm"
import { ApiStream } from "./transform/stream" import { ApiStream } from "./transform/stream"
export interface SingleCompletionHandler { export interface SingleCompletionHandler {
completePrompt(prompt: string): Promise<string> completePrompt(prompt: string): Promise<string>
} }
export interface ApiHandler { export interface ApiHandler {
createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream
getModel(): { id: string; info: ModelInfo } getModel(): { id: string; info: ModelInfo }
} }
export function buildApiHandler(configuration: ApiConfiguration): ApiHandler { export function buildApiHandler(configuration: ApiConfiguration): ApiHandler {
const { apiProvider, ...options } = configuration const { apiProvider, ...options } = configuration
switch (apiProvider) {
switch (apiProvider) { case "anthropic":
case "anthropic": return new AnthropicHandler(options)
return new AnthropicHandler(options) case "glama":
case "glama": return new GlamaHandler(options)
return new GlamaHandler(options) case "openrouter":
case "openrouter": return new OpenRouterHandler(options)
return new OpenRouterHandler(options) case "bedrock":
case "bedrock": return new AwsBedrockHandler(options)
return new AwsBedrockHandler(options) case "vertex":
case "vertex": return new VertexHandler(options)
return new VertexHandler(options) case "openai":
case "openai": return new OpenAiHandler(options)
return new OpenAiHandler(options) case "ollama":
case "ollama": return new OllamaHandler(options)
return new OllamaHandler(options) case "lmstudio":
case "lmstudio": return new LmStudioHandler(options)
return new LmStudioHandler(options) case "gemini":
case "gemini": return new GeminiHandler(options)
return new GeminiHandler(options) case "openai-native":
case "openai-native": return new OpenAiNativeHandler(options)
return new OpenAiNativeHandler(options) case "deepseek":
case "deepseek": return new DeepSeekHandler(options)
return new DeepSeekHandler(options) default:
case "vscode-lm": return new AnthropicHandler(options)
return new VsCodeLmHandler(options) }
default:
return new AnthropicHandler(options)
}
} }

File diff suppressed because it is too large Load Diff

View File

@@ -5,205 +5,205 @@ import * as vscode from 'vscode';
* Safely converts a value into a plain object. * Safely converts a value into a plain object.
*/ */
function asObjectSafe(value: any): object { function asObjectSafe(value: any): object {
// Handle null/undefined // Handle null/undefined
if (!value) { if (!value) {
return {}; return {};
} }
try { try {
// Handle strings that might be JSON // Handle strings that might be JSON
if (typeof value === 'string') { if (typeof value === 'string') {
return JSON.parse(value); return JSON.parse(value);
} }
// Handle pre-existing objects // Handle pre-existing objects
if (typeof value === 'object') { if (typeof value === 'object') {
return Object.assign({}, value); return Object.assign({}, value);
} }
return {}; return {};
} }
catch (error) { catch (error) {
console.warn('Cline <Language Model API>: Failed to parse object:', error); console.warn('Cline <Language Model API>: Failed to parse object:', error);
return {}; return {};
} }
} }
export function convertToVsCodeLmMessages(anthropicMessages: Anthropic.Messages.MessageParam[]): vscode.LanguageModelChatMessage[] { export function convertToVsCodeLmMessages(anthropicMessages: Anthropic.Messages.MessageParam[]): vscode.LanguageModelChatMessage[] {
const vsCodeLmMessages: vscode.LanguageModelChatMessage[] = []; const vsCodeLmMessages: vscode.LanguageModelChatMessage[] = [];
for (const anthropicMessage of anthropicMessages) { for (const anthropicMessage of anthropicMessages) {
// Handle simple string messages // Handle simple string messages
if (typeof anthropicMessage.content === "string") { if (typeof anthropicMessage.content === "string") {
vsCodeLmMessages.push( vsCodeLmMessages.push(
anthropicMessage.role === "assistant" anthropicMessage.role === "assistant"
? vscode.LanguageModelChatMessage.Assistant(anthropicMessage.content) ? vscode.LanguageModelChatMessage.Assistant(anthropicMessage.content)
: vscode.LanguageModelChatMessage.User(anthropicMessage.content) : vscode.LanguageModelChatMessage.User(anthropicMessage.content)
); );
continue; continue;
} }
// Handle complex message structures // Handle complex message structures
switch (anthropicMessage.role) { switch (anthropicMessage.role) {
case "user": { case "user": {
const { nonToolMessages, toolMessages } = anthropicMessage.content.reduce<{ const { nonToolMessages, toolMessages } = anthropicMessage.content.reduce<{
nonToolMessages: (Anthropic.TextBlockParam | Anthropic.ImageBlockParam)[]; nonToolMessages: (Anthropic.TextBlockParam | Anthropic.ImageBlockParam)[];
toolMessages: Anthropic.ToolResultBlockParam[]; toolMessages: Anthropic.ToolResultBlockParam[];
}>( }>(
(acc, part) => { (acc, part) => {
if (part.type === "tool_result") { if (part.type === "tool_result") {
acc.toolMessages.push(part); acc.toolMessages.push(part);
} }
else if (part.type === "text" || part.type === "image") { else if (part.type === "text" || part.type === "image") {
acc.nonToolMessages.push(part); acc.nonToolMessages.push(part);
} }
return acc; return acc;
}, },
{ nonToolMessages: [], toolMessages: [] }, { nonToolMessages: [], toolMessages: [] },
); );
// Process tool messages first then non-tool messages // Process tool messages first then non-tool messages
const contentParts = [ const contentParts = [
// Convert tool messages to ToolResultParts // Convert tool messages to ToolResultParts
...toolMessages.map((toolMessage) => { ...toolMessages.map((toolMessage) => {
// Process tool result content into TextParts // Process tool result content into TextParts
const toolContentParts: vscode.LanguageModelTextPart[] = ( const toolContentParts: vscode.LanguageModelTextPart[] = (
typeof toolMessage.content === "string" typeof toolMessage.content === "string"
? [new vscode.LanguageModelTextPart(toolMessage.content)] ? [new vscode.LanguageModelTextPart(toolMessage.content)]
: ( : (
toolMessage.content?.map((part) => { toolMessage.content?.map((part) => {
if (part.type === "image") { if (part.type === "image") {
return new vscode.LanguageModelTextPart( return new vscode.LanguageModelTextPart(
`[Image (${part.source?.type || 'Unknown source-type'}): ${part.source?.media_type || 'unknown media-type'} not supported by VSCode LM API]` `[Image (${part.source?.type || 'Unknown source-type'}): ${part.source?.media_type || 'unknown media-type'} not supported by VSCode LM API]`
); );
} }
return new vscode.LanguageModelTextPart(part.text); return new vscode.LanguageModelTextPart(part.text);
}) })
?? [new vscode.LanguageModelTextPart("")] ?? [new vscode.LanguageModelTextPart("")]
) )
); );
return new vscode.LanguageModelToolResultPart( return new vscode.LanguageModelToolResultPart(
toolMessage.tool_use_id, toolMessage.tool_use_id,
toolContentParts toolContentParts
); );
}), }),
// Convert non-tool messages to TextParts after tool messages // Convert non-tool messages to TextParts after tool messages
...nonToolMessages.map((part) => { ...nonToolMessages.map((part) => {
if (part.type === "image") { if (part.type === "image") {
return new vscode.LanguageModelTextPart( return new vscode.LanguageModelTextPart(
`[Image (${part.source?.type || 'Unknown source-type'}): ${part.source?.media_type || 'unknown media-type'} not supported by VSCode LM API]` `[Image (${part.source?.type || 'Unknown source-type'}): ${part.source?.media_type || 'unknown media-type'} not supported by VSCode LM API]`
); );
} }
return new vscode.LanguageModelTextPart(part.text); return new vscode.LanguageModelTextPart(part.text);
}) })
]; ];
// Add single user message with all content parts // Add single user message with all content parts
vsCodeLmMessages.push(vscode.LanguageModelChatMessage.User(contentParts)); vsCodeLmMessages.push(vscode.LanguageModelChatMessage.User(contentParts));
break; break;
} }
case "assistant": { case "assistant": {
const { nonToolMessages, toolMessages } = anthropicMessage.content.reduce<{ const { nonToolMessages, toolMessages } = anthropicMessage.content.reduce<{
nonToolMessages: (Anthropic.TextBlockParam | Anthropic.ImageBlockParam)[]; nonToolMessages: (Anthropic.TextBlockParam | Anthropic.ImageBlockParam)[];
toolMessages: Anthropic.ToolUseBlockParam[]; toolMessages: Anthropic.ToolUseBlockParam[];
}>( }>(
(acc, part) => { (acc, part) => {
if (part.type === "tool_use") { if (part.type === "tool_use") {
acc.toolMessages.push(part); acc.toolMessages.push(part);
} }
else if (part.type === "text" || part.type === "image") { else if (part.type === "text" || part.type === "image") {
acc.nonToolMessages.push(part); acc.nonToolMessages.push(part);
} }
return acc; return acc;
}, },
{ nonToolMessages: [], toolMessages: [] }, { nonToolMessages: [], toolMessages: [] },
); );
// Process tool messages first then non-tool messages // Process tool messages first then non-tool messages
const contentParts = [ const contentParts = [
// Convert tool messages to ToolCallParts first // Convert tool messages to ToolCallParts first
...toolMessages.map((toolMessage) => ...toolMessages.map((toolMessage) =>
new vscode.LanguageModelToolCallPart( new vscode.LanguageModelToolCallPart(
toolMessage.id, toolMessage.id,
toolMessage.name, toolMessage.name,
asObjectSafe(toolMessage.input) asObjectSafe(toolMessage.input)
) )
), ),
// Convert non-tool messages to TextParts after tool messages // Convert non-tool messages to TextParts after tool messages
...nonToolMessages.map((part) => { ...nonToolMessages.map((part) => {
if (part.type === "image") { if (part.type === "image") {
return new vscode.LanguageModelTextPart("[Image generation not supported by VSCode LM API]"); return new vscode.LanguageModelTextPart("[Image generation not supported by VSCode LM API]");
} }
return new vscode.LanguageModelTextPart(part.text); return new vscode.LanguageModelTextPart(part.text);
}) })
]; ];
// Add the assistant message to the list of messages // Add the assistant message to the list of messages
vsCodeLmMessages.push(vscode.LanguageModelChatMessage.Assistant(contentParts)); vsCodeLmMessages.push(vscode.LanguageModelChatMessage.Assistant(contentParts));
break; break;
} }
} }
} }
return vsCodeLmMessages; return vsCodeLmMessages;
} }
export function convertToAnthropicRole(vsCodeLmMessageRole: vscode.LanguageModelChatMessageRole): string | null { export function convertToAnthropicRole(vsCodeLmMessageRole: vscode.LanguageModelChatMessageRole): string | null {
switch (vsCodeLmMessageRole) { switch (vsCodeLmMessageRole) {
case vscode.LanguageModelChatMessageRole.Assistant: case vscode.LanguageModelChatMessageRole.Assistant:
return "assistant"; return "assistant";
case vscode.LanguageModelChatMessageRole.User: case vscode.LanguageModelChatMessageRole.User:
return "user"; return "user";
default: default:
return null; return null;
} }
} }
export async function convertToAnthropicMessage(vsCodeLmMessage: vscode.LanguageModelChatMessage): Promise<Anthropic.Messages.Message> { export async function convertToAnthropicMessage(vsCodeLmMessage: vscode.LanguageModelChatMessage): Promise<Anthropic.Messages.Message> {
const anthropicRole: string | null = convertToAnthropicRole(vsCodeLmMessage.role); const anthropicRole: string | null = convertToAnthropicRole(vsCodeLmMessage.role);
if (anthropicRole !== "assistant") { if (anthropicRole !== "assistant") {
throw new Error("Cline <Language Model API>: Only assistant messages are supported."); throw new Error("Cline <Language Model API>: Only assistant messages are supported.");
} }
return { return {
id: crypto.randomUUID(), id: crypto.randomUUID(),
type: "message", type: "message",
model: "vscode-lm", model: "vscode-lm",
role: anthropicRole, role: anthropicRole,
content: ( content: (
vsCodeLmMessage.content vsCodeLmMessage.content
.map((part): Anthropic.ContentBlock | null => { .map((part): Anthropic.ContentBlock | null => {
if (part instanceof vscode.LanguageModelTextPart) { if (part instanceof vscode.LanguageModelTextPart) {
return { return {
type: "text", type: "text",
text: part.value text: part.value
}; };
} }
if (part instanceof vscode.LanguageModelToolCallPart) { if (part instanceof vscode.LanguageModelToolCallPart) {
return { return {
type: "tool_use", type: "tool_use",
id: part.callId || crypto.randomUUID(), id: part.callId || crypto.randomUUID(),
name: part.name, name: part.name,
input: asObjectSafe(part.input) input: asObjectSafe(part.input)
}; };
} }
return null; return null;
}) })
.filter( .filter(
(part): part is Anthropic.ContentBlock => part !== null (part): part is Anthropic.ContentBlock => part !== null
) )
), ),
stop_reason: null, stop_reason: null,
stop_sequence: null, stop_sequence: null,
usage: { usage: {
input_tokens: 0, input_tokens: 0,
output_tokens: 0, output_tokens: 0,
} }
}; };
} }

View File

@@ -45,7 +45,6 @@ type SecretKey =
| "geminiApiKey" | "geminiApiKey"
| "openAiNativeApiKey" | "openAiNativeApiKey"
| "deepSeekApiKey" | "deepSeekApiKey"
type GlobalStateKey = type GlobalStateKey =
| "apiProvider" | "apiProvider"
| "apiModelId" | "apiModelId"
@@ -482,72 +481,6 @@ export class ClineProvider implements vscode.WebviewViewProvider {
break break
case "apiConfiguration": case "apiConfiguration":
if (message.apiConfiguration) { if (message.apiConfiguration) {
const {
apiProvider,
apiModelId,
apiKey,
glamaModelId,
glamaModelInfo,
glamaApiKey,
openRouterApiKey,
awsAccessKey,
awsSecretKey,
awsSessionToken,
awsRegion,
awsUseCrossRegionInference,
vertexProjectId,
vertexRegion,
openAiBaseUrl,
openAiApiKey,
openAiModelId,
ollamaModelId,
ollamaBaseUrl,
lmStudioModelId,
lmStudioBaseUrl,
anthropicBaseUrl,
geminiApiKey,
openAiNativeApiKey,
azureApiVersion,
openAiStreamingEnabled,
openRouterModelId,
openRouterModelInfo,
openRouterUseMiddleOutTransform,
vsCodeLmModelSelector,
} = message.apiConfiguration
await this.updateGlobalState("apiProvider", apiProvider)
await this.updateGlobalState("apiModelId", apiModelId)
await this.storeSecret("apiKey", apiKey)
await this.updateGlobalState("glamaModelId", glamaModelId)
await this.updateGlobalState("glamaModelInfo", glamaModelInfo)
await this.storeSecret("glamaApiKey", glamaApiKey)
await this.storeSecret("openRouterApiKey", openRouterApiKey)
await this.storeSecret("awsAccessKey", awsAccessKey)
await this.storeSecret("awsSecretKey", awsSecretKey)
await this.storeSecret("awsSessionToken", awsSessionToken)
await this.updateGlobalState("awsRegion", awsRegion)
await this.updateGlobalState("awsUseCrossRegionInference", awsUseCrossRegionInference)
await this.updateGlobalState("vertexProjectId", vertexProjectId)
await this.updateGlobalState("vertexRegion", vertexRegion)
await this.updateGlobalState("openAiBaseUrl", openAiBaseUrl)
await this.storeSecret("openAiApiKey", openAiApiKey)
await this.updateGlobalState("openAiModelId", openAiModelId)
await this.updateGlobalState("ollamaModelId", ollamaModelId)
await this.updateGlobalState("ollamaBaseUrl", ollamaBaseUrl)
await this.updateGlobalState("lmStudioModelId", lmStudioModelId)
await this.updateGlobalState("lmStudioBaseUrl", lmStudioBaseUrl)
await this.updateGlobalState("anthropicBaseUrl", anthropicBaseUrl)
await this.storeSecret("geminiApiKey", geminiApiKey)
await this.storeSecret("openAiNativeApiKey", openAiNativeApiKey)
await this.storeSecret("deepSeekApiKey", message.apiConfiguration.deepSeekApiKey)
await this.updateGlobalState("azureApiVersion", azureApiVersion)
await this.updateGlobalState("openAiStreamingEnabled", openAiStreamingEnabled)
await this.updateGlobalState("openRouterModelId", openRouterModelId)
await this.updateGlobalState("openRouterModelInfo", openRouterModelInfo)
await this.updateGlobalState("openRouterUseMiddleOutTransform", openRouterUseMiddleOutTransform)
await this.updateGlobalState("vsCodeLmModelSelector", vsCodeLmModelSelector)
if (this.cline) {
this.cline.api = buildApiHandler(message.apiConfiguration)
}
await this.updateApiConfiguration(message.apiConfiguration) await this.updateApiConfiguration(message.apiConfiguration)
} }
await this.postStateToWebview() await this.postStateToWebview()
@@ -1016,6 +949,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
openRouterModelId, openRouterModelId,
openRouterModelInfo, openRouterModelInfo,
openRouterUseMiddleOutTransform, openRouterUseMiddleOutTransform,
vsCodeLmModelSelector,
} = apiConfiguration } = apiConfiguration
await this.updateGlobalState("apiProvider", apiProvider) await this.updateGlobalState("apiProvider", apiProvider)
await this.updateGlobalState("apiModelId", apiModelId) await this.updateGlobalState("apiModelId", apiModelId)
@@ -1047,6 +981,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
await this.updateGlobalState("openRouterModelId", openRouterModelId) await this.updateGlobalState("openRouterModelId", openRouterModelId)
await this.updateGlobalState("openRouterModelInfo", openRouterModelInfo) await this.updateGlobalState("openRouterModelInfo", openRouterModelInfo)
await this.updateGlobalState("openRouterUseMiddleOutTransform", openRouterUseMiddleOutTransform) await this.updateGlobalState("openRouterUseMiddleOutTransform", openRouterUseMiddleOutTransform)
await this.updateGlobalState("vsCodeLmModelSelector", vsCodeLmModelSelector)
if (this.cline) { if (this.cline) {
this.cline.api = buildApiHandler(apiConfiguration) this.cline.api = buildApiHandler(apiConfiguration)
} }

View File

@@ -36,7 +36,7 @@ export function activate(context: vscode.ExtensionContext) {
context.globalState.update('allowedCommands', defaultCommands); context.globalState.update('allowedCommands', defaultCommands);
} }
const sidebarProvider = new ClineProvider(context, outputChannel); const sidebarProvider = new ClineProvider(context, outputChannel)
context.subscriptions.push( context.subscriptions.push(
vscode.window.registerWebviewViewProvider(ClineProvider.sideBarId, sidebarProvider, { vscode.window.registerWebviewViewProvider(ClineProvider.sideBarId, sidebarProvider, {

View File

@@ -7,7 +7,6 @@ import { GitCommit } from "../utils/git"
// webview will hold state // webview will hold state
export interface ExtensionMessage { export interface ExtensionMessage {
type: type:
| "action" | "action"
| "state" | "state"
@@ -26,15 +25,15 @@ export interface ExtensionMessage {
| "commitSearchResults" | "commitSearchResults"
| "listApiConfig" | "listApiConfig"
| "vsCodeLmModels" | "vsCodeLmModels"
| "vsCodeLmApiAvailable" | "vsCodeLmApiAvailable"
| "requestVsCodeLmModels" | "requestVsCodeLmModels"
text?: string text?: string
action?: action?:
| "chatButtonClicked" | "chatButtonClicked"
| "mcpButtonClicked" | "mcpButtonClicked"
| "settingsButtonClicked" | "settingsButtonClicked"
| "historyButtonClicked" | "historyButtonClicked"
| "didBecomeVisible" | "didBecomeVisible"
invoke?: "sendMessage" | "primaryButtonClick" | "secondaryButtonClick" invoke?: "sendMessage" | "primaryButtonClick" | "secondaryButtonClick"
state?: ExtensionState state?: ExtensionState
images?: string[] images?: string[]
@@ -131,14 +130,14 @@ export type ClineSay =
export interface ClineSayTool { export interface ClineSayTool {
tool: tool:
| "editedExistingFile" | "editedExistingFile"
| "appliedDiff" | "appliedDiff"
| "newFileCreated" | "newFileCreated"
| "readFile" | "readFile"
| "listFilesTopLevel" | "listFilesTopLevel"
| "listFilesRecursive" | "listFilesRecursive"
| "listCodeDefinitionNames" | "listCodeDefinitionNames"
| "searchFiles" | "searchFiles"
path?: string path?: string
diff?: string diff?: string
content?: string content?: string

View File

@@ -65,7 +65,7 @@ export interface ModelInfo {
contextWindow: number contextWindow: number
supportsImages?: boolean supportsImages?: boolean
supportsComputerUse?: boolean supportsComputerUse?: boolean
supportsPromptCache: boolean supportsPromptCache: boolean // this value is hardcoded for now
inputPrice?: number inputPrice?: number
outputPrice?: number outputPrice?: number
cacheWritesPrice?: number cacheWritesPrice?: number
@@ -124,24 +124,24 @@ export const anthropicModels = {
// AWS Bedrock // AWS Bedrock
// https://docs.aws.amazon.com/bedrock/latest/userguide/conversation-inference.html // https://docs.aws.amazon.com/bedrock/latest/userguide/conversation-inference.html
export interface MessageContent { export interface MessageContent {
type: 'text' | 'image' | 'video' | 'tool_use' | 'tool_result'; type: 'text' | 'image' | 'video' | 'tool_use' | 'tool_result';
text?: string; text?: string;
source?: { source?: {
type: 'base64'; type: 'base64';
data: string | Uint8Array; // string for Anthropic, Uint8Array for Bedrock data: string | Uint8Array; // string for Anthropic, Uint8Array for Bedrock
media_type: 'image/jpeg' | 'image/png' | 'image/gif' | 'image/webp'; media_type: 'image/jpeg' | 'image/png' | 'image/gif' | 'image/webp';
}; };
// Video specific fields // Video specific fields
format?: string; format?: string;
s3Location?: { s3Location?: {
uri: string; uri: string;
bucketOwner?: string; bucketOwner?: string;
}; };
// Tool use and result fields // Tool use and result fields
toolUseId?: string; toolUseId?: string;
name?: string; name?: string;
input?: any; input?: any;
output?: any; // Used for tool_result type output?: any; // Used for tool_result type
} }
export type BedrockModelId = keyof typeof bedrockModels export type BedrockModelId = keyof typeof bedrockModels
@@ -235,7 +235,7 @@ export const bedrockModels = {
inputPrice: 0.25, inputPrice: 0.25,
outputPrice: 1.25, outputPrice: 1.25,
}, },
"meta.llama3-2-90b-instruct-v1:0": { "meta.llama3-2-90b-instruct-v1:0" : {
maxTokens: 8192, maxTokens: 8192,
contextWindow: 128_000, contextWindow: 128_000,
supportsImages: true, supportsImages: true,
@@ -244,7 +244,7 @@ export const bedrockModels = {
inputPrice: 0.72, inputPrice: 0.72,
outputPrice: 0.72, outputPrice: 0.72,
}, },
"meta.llama3-2-11b-instruct-v1:0": { "meta.llama3-2-11b-instruct-v1:0" : {
maxTokens: 8192, maxTokens: 8192,
contextWindow: 128_000, contextWindow: 128_000,
supportsImages: true, supportsImages: true,
@@ -253,7 +253,7 @@ export const bedrockModels = {
inputPrice: 0.16, inputPrice: 0.16,
outputPrice: 0.16, outputPrice: 0.16,
}, },
"meta.llama3-2-3b-instruct-v1:0": { "meta.llama3-2-3b-instruct-v1:0" : {
maxTokens: 8192, maxTokens: 8192,
contextWindow: 128_000, contextWindow: 128_000,
supportsImages: false, supportsImages: false,
@@ -262,7 +262,7 @@ export const bedrockModels = {
inputPrice: 0.15, inputPrice: 0.15,
outputPrice: 0.15, outputPrice: 0.15,
}, },
"meta.llama3-2-1b-instruct-v1:0": { "meta.llama3-2-1b-instruct-v1:0" : {
maxTokens: 8192, maxTokens: 8192,
contextWindow: 128_000, contextWindow: 128_000,
supportsImages: false, supportsImages: false,
@@ -271,7 +271,7 @@ export const bedrockModels = {
inputPrice: 0.1, inputPrice: 0.1,
outputPrice: 0.1, outputPrice: 0.1,
}, },
"meta.llama3-1-405b-instruct-v1:0": { "meta.llama3-1-405b-instruct-v1:0" : {
maxTokens: 8192, maxTokens: 8192,
contextWindow: 128_000, contextWindow: 128_000,
supportsImages: false, supportsImages: false,
@@ -280,7 +280,7 @@ export const bedrockModels = {
inputPrice: 2.4, inputPrice: 2.4,
outputPrice: 2.4, outputPrice: 2.4,
}, },
"meta.llama3-1-70b-instruct-v1:0": { "meta.llama3-1-70b-instruct-v1:0" : {
maxTokens: 8192, maxTokens: 8192,
contextWindow: 128_000, contextWindow: 128_000,
supportsImages: false, supportsImages: false,
@@ -289,7 +289,7 @@ export const bedrockModels = {
inputPrice: 0.72, inputPrice: 0.72,
outputPrice: 0.72, outputPrice: 0.72,
}, },
"meta.llama3-1-8b-instruct-v1:0": { "meta.llama3-1-8b-instruct-v1:0" : {
maxTokens: 8192, maxTokens: 8192,
contextWindow: 8_000, contextWindow: 8_000,
supportsImages: false, supportsImages: false,
@@ -298,8 +298,8 @@ export const bedrockModels = {
inputPrice: 0.22, inputPrice: 0.22,
outputPrice: 0.22, outputPrice: 0.22,
}, },
"meta.llama3-70b-instruct-v1:0": { "meta.llama3-70b-instruct-v1:0" : {
maxTokens: 2048, maxTokens: 2048 ,
contextWindow: 8_000, contextWindow: 8_000,
supportsImages: false, supportsImages: false,
supportsComputerUse: false, supportsComputerUse: false,
@@ -307,8 +307,8 @@ export const bedrockModels = {
inputPrice: 2.65, inputPrice: 2.65,
outputPrice: 3.5, outputPrice: 3.5,
}, },
"meta.llama3-8b-instruct-v1:0": { "meta.llama3-8b-instruct-v1:0" : {
maxTokens: 2048, maxTokens: 2048 ,
contextWindow: 4_000, contextWindow: 4_000,
supportsImages: false, supportsImages: false,
supportsComputerUse: false, supportsComputerUse: false,
@@ -548,3 +548,4 @@ export const deepSeekModels = {
// https://learn.microsoft.com/en-us/azure/ai-services/openai/api-version-deprecation // https://learn.microsoft.com/en-us/azure/ai-services/openai/api-version-deprecation
// https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#api-specs // https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#api-specs
export const azureOpenAiDefaultApiVersion = "2024-08-01-preview" export const azureOpenAiDefaultApiVersion = "2024-08-01-preview"

View File

@@ -3,12 +3,12 @@ import { LanguageModelChatSelector } from 'vscode';
export const SELECTOR_SEPARATOR = '/'; export const SELECTOR_SEPARATOR = '/';
export function stringifyVsCodeLmModelSelector(selector: LanguageModelChatSelector): string { export function stringifyVsCodeLmModelSelector(selector: LanguageModelChatSelector): string {
return [ return [
selector.vendor, selector.vendor,
selector.family, selector.family,
selector.version, selector.version,
selector.id selector.id
] ]
.filter(Boolean) .filter(Boolean)
.join(SELECTOR_SEPARATOR); .join(SELECTOR_SEPARATOR);
} }