Spacing adjustments

This commit is contained in:
Matt Rubens
2025-01-08 17:21:23 -05:00
parent d5fd2bbb82
commit 7137f8c528
8 changed files with 787 additions and 860 deletions

View File

@@ -25,7 +25,6 @@ export interface ApiHandler {
export function buildApiHandler(configuration: ApiConfiguration): ApiHandler { export function buildApiHandler(configuration: ApiConfiguration): ApiHandler {
const { apiProvider, ...options } = configuration const { apiProvider, ...options } = configuration
switch (apiProvider) { switch (apiProvider) {
case "anthropic": case "anthropic":
return new AnthropicHandler(options) return new AnthropicHandler(options)
@@ -49,8 +48,6 @@ export function buildApiHandler(configuration: ApiConfiguration): ApiHandler {
return new OpenAiNativeHandler(options) return new OpenAiNativeHandler(options)
case "deepseek": case "deepseek":
return new DeepSeekHandler(options) return new DeepSeekHandler(options)
case "vscode-lm":
return new VsCodeLmHandler(options)
default: default:
return new AnthropicHandler(options) return new AnthropicHandler(options)
} }

View File

@@ -50,23 +50,18 @@ export class VsCodeLmHandler implements ApiHandler, SingleCompletionHandler {
try { try {
// Listen for model changes and reset client // Listen for model changes and reset client
this.disposable = vscode.workspace.onDidChangeConfiguration(event => { this.disposable = vscode.workspace.onDidChangeConfiguration(event => {
if (event.affectsConfiguration('lm')) { if (event.affectsConfiguration('lm')) {
try { try {
this.client = null; this.client = null;
this.ensureCleanState(); this.ensureCleanState();
} }
catch (error) { catch (error) {
console.error('Error during configuration change cleanup:', error); console.error('Error during configuration change cleanup:', error);
} }
} }
}); });
} }
catch (error) { catch (error) {
// Ensure cleanup if constructor fails // Ensure cleanup if constructor fails
this.dispose(); this.dispose();

View File

@@ -45,7 +45,6 @@ type SecretKey =
| "geminiApiKey" | "geminiApiKey"
| "openAiNativeApiKey" | "openAiNativeApiKey"
| "deepSeekApiKey" | "deepSeekApiKey"
type GlobalStateKey = type GlobalStateKey =
| "apiProvider" | "apiProvider"
| "apiModelId" | "apiModelId"
@@ -482,72 +481,6 @@ export class ClineProvider implements vscode.WebviewViewProvider {
break break
case "apiConfiguration": case "apiConfiguration":
if (message.apiConfiguration) { if (message.apiConfiguration) {
const {
apiProvider,
apiModelId,
apiKey,
glamaModelId,
glamaModelInfo,
glamaApiKey,
openRouterApiKey,
awsAccessKey,
awsSecretKey,
awsSessionToken,
awsRegion,
awsUseCrossRegionInference,
vertexProjectId,
vertexRegion,
openAiBaseUrl,
openAiApiKey,
openAiModelId,
ollamaModelId,
ollamaBaseUrl,
lmStudioModelId,
lmStudioBaseUrl,
anthropicBaseUrl,
geminiApiKey,
openAiNativeApiKey,
azureApiVersion,
openAiStreamingEnabled,
openRouterModelId,
openRouterModelInfo,
openRouterUseMiddleOutTransform,
vsCodeLmModelSelector,
} = message.apiConfiguration
await this.updateGlobalState("apiProvider", apiProvider)
await this.updateGlobalState("apiModelId", apiModelId)
await this.storeSecret("apiKey", apiKey)
await this.updateGlobalState("glamaModelId", glamaModelId)
await this.updateGlobalState("glamaModelInfo", glamaModelInfo)
await this.storeSecret("glamaApiKey", glamaApiKey)
await this.storeSecret("openRouterApiKey", openRouterApiKey)
await this.storeSecret("awsAccessKey", awsAccessKey)
await this.storeSecret("awsSecretKey", awsSecretKey)
await this.storeSecret("awsSessionToken", awsSessionToken)
await this.updateGlobalState("awsRegion", awsRegion)
await this.updateGlobalState("awsUseCrossRegionInference", awsUseCrossRegionInference)
await this.updateGlobalState("vertexProjectId", vertexProjectId)
await this.updateGlobalState("vertexRegion", vertexRegion)
await this.updateGlobalState("openAiBaseUrl", openAiBaseUrl)
await this.storeSecret("openAiApiKey", openAiApiKey)
await this.updateGlobalState("openAiModelId", openAiModelId)
await this.updateGlobalState("ollamaModelId", ollamaModelId)
await this.updateGlobalState("ollamaBaseUrl", ollamaBaseUrl)
await this.updateGlobalState("lmStudioModelId", lmStudioModelId)
await this.updateGlobalState("lmStudioBaseUrl", lmStudioBaseUrl)
await this.updateGlobalState("anthropicBaseUrl", anthropicBaseUrl)
await this.storeSecret("geminiApiKey", geminiApiKey)
await this.storeSecret("openAiNativeApiKey", openAiNativeApiKey)
await this.storeSecret("deepSeekApiKey", message.apiConfiguration.deepSeekApiKey)
await this.updateGlobalState("azureApiVersion", azureApiVersion)
await this.updateGlobalState("openAiStreamingEnabled", openAiStreamingEnabled)
await this.updateGlobalState("openRouterModelId", openRouterModelId)
await this.updateGlobalState("openRouterModelInfo", openRouterModelInfo)
await this.updateGlobalState("openRouterUseMiddleOutTransform", openRouterUseMiddleOutTransform)
await this.updateGlobalState("vsCodeLmModelSelector", vsCodeLmModelSelector)
if (this.cline) {
this.cline.api = buildApiHandler(message.apiConfiguration)
}
await this.updateApiConfiguration(message.apiConfiguration) await this.updateApiConfiguration(message.apiConfiguration)
} }
await this.postStateToWebview() await this.postStateToWebview()
@@ -1016,6 +949,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
openRouterModelId, openRouterModelId,
openRouterModelInfo, openRouterModelInfo,
openRouterUseMiddleOutTransform, openRouterUseMiddleOutTransform,
vsCodeLmModelSelector,
} = apiConfiguration } = apiConfiguration
await this.updateGlobalState("apiProvider", apiProvider) await this.updateGlobalState("apiProvider", apiProvider)
await this.updateGlobalState("apiModelId", apiModelId) await this.updateGlobalState("apiModelId", apiModelId)
@@ -1047,6 +981,7 @@ export class ClineProvider implements vscode.WebviewViewProvider {
await this.updateGlobalState("openRouterModelId", openRouterModelId) await this.updateGlobalState("openRouterModelId", openRouterModelId)
await this.updateGlobalState("openRouterModelInfo", openRouterModelInfo) await this.updateGlobalState("openRouterModelInfo", openRouterModelInfo)
await this.updateGlobalState("openRouterUseMiddleOutTransform", openRouterUseMiddleOutTransform) await this.updateGlobalState("openRouterUseMiddleOutTransform", openRouterUseMiddleOutTransform)
await this.updateGlobalState("vsCodeLmModelSelector", vsCodeLmModelSelector)
if (this.cline) { if (this.cline) {
this.cline.api = buildApiHandler(apiConfiguration) this.cline.api = buildApiHandler(apiConfiguration)
} }

View File

@@ -36,7 +36,7 @@ export function activate(context: vscode.ExtensionContext) {
context.globalState.update('allowedCommands', defaultCommands); context.globalState.update('allowedCommands', defaultCommands);
} }
const sidebarProvider = new ClineProvider(context, outputChannel); const sidebarProvider = new ClineProvider(context, outputChannel)
context.subscriptions.push( context.subscriptions.push(
vscode.window.registerWebviewViewProvider(ClineProvider.sideBarId, sidebarProvider, { vscode.window.registerWebviewViewProvider(ClineProvider.sideBarId, sidebarProvider, {

View File

@@ -7,7 +7,6 @@ import { GitCommit } from "../utils/git"
// webview will hold state // webview will hold state
export interface ExtensionMessage { export interface ExtensionMessage {
type: type:
| "action" | "action"
| "state" | "state"

View File

@@ -65,7 +65,7 @@ export interface ModelInfo {
contextWindow: number contextWindow: number
supportsImages?: boolean supportsImages?: boolean
supportsComputerUse?: boolean supportsComputerUse?: boolean
supportsPromptCache: boolean supportsPromptCache: boolean // this value is hardcoded for now
inputPrice?: number inputPrice?: number
outputPrice?: number outputPrice?: number
cacheWritesPrice?: number cacheWritesPrice?: number
@@ -235,7 +235,7 @@ export const bedrockModels = {
inputPrice: 0.25, inputPrice: 0.25,
outputPrice: 1.25, outputPrice: 1.25,
}, },
"meta.llama3-2-90b-instruct-v1:0": { "meta.llama3-2-90b-instruct-v1:0" : {
maxTokens: 8192, maxTokens: 8192,
contextWindow: 128_000, contextWindow: 128_000,
supportsImages: true, supportsImages: true,
@@ -244,7 +244,7 @@ export const bedrockModels = {
inputPrice: 0.72, inputPrice: 0.72,
outputPrice: 0.72, outputPrice: 0.72,
}, },
"meta.llama3-2-11b-instruct-v1:0": { "meta.llama3-2-11b-instruct-v1:0" : {
maxTokens: 8192, maxTokens: 8192,
contextWindow: 128_000, contextWindow: 128_000,
supportsImages: true, supportsImages: true,
@@ -253,7 +253,7 @@ export const bedrockModels = {
inputPrice: 0.16, inputPrice: 0.16,
outputPrice: 0.16, outputPrice: 0.16,
}, },
"meta.llama3-2-3b-instruct-v1:0": { "meta.llama3-2-3b-instruct-v1:0" : {
maxTokens: 8192, maxTokens: 8192,
contextWindow: 128_000, contextWindow: 128_000,
supportsImages: false, supportsImages: false,
@@ -262,7 +262,7 @@ export const bedrockModels = {
inputPrice: 0.15, inputPrice: 0.15,
outputPrice: 0.15, outputPrice: 0.15,
}, },
"meta.llama3-2-1b-instruct-v1:0": { "meta.llama3-2-1b-instruct-v1:0" : {
maxTokens: 8192, maxTokens: 8192,
contextWindow: 128_000, contextWindow: 128_000,
supportsImages: false, supportsImages: false,
@@ -271,7 +271,7 @@ export const bedrockModels = {
inputPrice: 0.1, inputPrice: 0.1,
outputPrice: 0.1, outputPrice: 0.1,
}, },
"meta.llama3-1-405b-instruct-v1:0": { "meta.llama3-1-405b-instruct-v1:0" : {
maxTokens: 8192, maxTokens: 8192,
contextWindow: 128_000, contextWindow: 128_000,
supportsImages: false, supportsImages: false,
@@ -280,7 +280,7 @@ export const bedrockModels = {
inputPrice: 2.4, inputPrice: 2.4,
outputPrice: 2.4, outputPrice: 2.4,
}, },
"meta.llama3-1-70b-instruct-v1:0": { "meta.llama3-1-70b-instruct-v1:0" : {
maxTokens: 8192, maxTokens: 8192,
contextWindow: 128_000, contextWindow: 128_000,
supportsImages: false, supportsImages: false,
@@ -289,7 +289,7 @@ export const bedrockModels = {
inputPrice: 0.72, inputPrice: 0.72,
outputPrice: 0.72, outputPrice: 0.72,
}, },
"meta.llama3-1-8b-instruct-v1:0": { "meta.llama3-1-8b-instruct-v1:0" : {
maxTokens: 8192, maxTokens: 8192,
contextWindow: 8_000, contextWindow: 8_000,
supportsImages: false, supportsImages: false,
@@ -298,8 +298,8 @@ export const bedrockModels = {
inputPrice: 0.22, inputPrice: 0.22,
outputPrice: 0.22, outputPrice: 0.22,
}, },
"meta.llama3-70b-instruct-v1:0": { "meta.llama3-70b-instruct-v1:0" : {
maxTokens: 2048, maxTokens: 2048 ,
contextWindow: 8_000, contextWindow: 8_000,
supportsImages: false, supportsImages: false,
supportsComputerUse: false, supportsComputerUse: false,
@@ -307,8 +307,8 @@ export const bedrockModels = {
inputPrice: 2.65, inputPrice: 2.65,
outputPrice: 3.5, outputPrice: 3.5,
}, },
"meta.llama3-8b-instruct-v1:0": { "meta.llama3-8b-instruct-v1:0" : {
maxTokens: 2048, maxTokens: 2048 ,
contextWindow: 4_000, contextWindow: 4_000,
supportsImages: false, supportsImages: false,
supportsComputerUse: false, supportsComputerUse: false,
@@ -548,3 +548,4 @@ export const deepSeekModels = {
// https://learn.microsoft.com/en-us/azure/ai-services/openai/api-version-deprecation // https://learn.microsoft.com/en-us/azure/ai-services/openai/api-version-deprecation
// https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#api-specs // https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#api-specs
export const azureOpenAiDefaultApiVersion = "2024-08-01-preview" export const azureOpenAiDefaultApiVersion = "2024-08-01-preview"