diff --git a/src/api/providers/openai.ts b/src/api/providers/openai.ts index 13922a4..d71a51f 100644 --- a/src/api/providers/openai.ts +++ b/src/api/providers/openai.ts @@ -108,7 +108,7 @@ export class OpenAiHandler implements ApiHandler, SingleCompletionHandler { getModel(): { id: string; info: ModelInfo } { return { id: this.options.openAiModelId ?? "", - info: openAiModelInfoSaneDefaults, + info: this.options.openAiCustomModelInfo ?? openAiModelInfoSaneDefaults, } } diff --git a/src/core/webview/ClineProvider.ts b/src/core/webview/ClineProvider.ts index cce08e2..0fd2dd5 100644 --- a/src/core/webview/ClineProvider.ts +++ b/src/core/webview/ClineProvider.ts @@ -69,6 +69,7 @@ type GlobalStateKey = | "taskHistory" | "openAiBaseUrl" | "openAiModelId" + | "openAiCustomModelInfo" | "ollamaModelId" | "ollamaBaseUrl" | "lmStudioModelId" @@ -1208,6 +1209,7 @@ export class ClineProvider implements vscode.WebviewViewProvider { openAiBaseUrl, openAiApiKey, openAiModelId, + openAiCustomModelInfo, ollamaModelId, ollamaBaseUrl, lmStudioModelId, @@ -1241,6 +1243,7 @@ export class ClineProvider implements vscode.WebviewViewProvider { await this.updateGlobalState("openAiBaseUrl", openAiBaseUrl) await this.storeSecret("openAiApiKey", openAiApiKey) await this.updateGlobalState("openAiModelId", openAiModelId) + await this.updateGlobalState("openAiCustomModelInfo", openAiCustomModelInfo) await this.updateGlobalState("ollamaModelId", ollamaModelId) await this.updateGlobalState("ollamaBaseUrl", ollamaBaseUrl) await this.updateGlobalState("lmStudioModelId", lmStudioModelId) @@ -1857,6 +1860,7 @@ export class ClineProvider implements vscode.WebviewViewProvider { openAiBaseUrl, openAiApiKey, openAiModelId, + openAiCustomModelInfo, ollamaModelId, ollamaBaseUrl, lmStudioModelId, @@ -1920,6 +1924,7 @@ export class ClineProvider implements vscode.WebviewViewProvider { this.getGlobalState("openAiBaseUrl") as Promise, this.getSecret("openAiApiKey") as Promise, this.getGlobalState("openAiModelId") as Promise, + this.getGlobalState("openAiCustomModelInfo") as Promise, this.getGlobalState("ollamaModelId") as Promise, this.getGlobalState("ollamaBaseUrl") as Promise, this.getGlobalState("lmStudioModelId") as Promise, @@ -2000,6 +2005,7 @@ export class ClineProvider implements vscode.WebviewViewProvider { openAiBaseUrl, openAiApiKey, openAiModelId, + openAiCustomModelInfo, ollamaModelId, ollamaBaseUrl, lmStudioModelId, diff --git a/src/shared/WebviewMessage.ts b/src/shared/WebviewMessage.ts index ce05976..5705378 100644 --- a/src/shared/WebviewMessage.ts +++ b/src/shared/WebviewMessage.ts @@ -76,6 +76,7 @@ export interface WebviewMessage { | "autoApprovalEnabled" | "updateCustomMode" | "deleteCustomMode" + | "setopenAiCustomModelInfo" text?: string disabled?: boolean askResponse?: ClineAskResponse diff --git a/src/shared/api.ts b/src/shared/api.ts index 8f65c67..8d7b919 100644 --- a/src/shared/api.ts +++ b/src/shared/api.ts @@ -38,6 +38,7 @@ export interface ApiHandlerOptions { openAiBaseUrl?: string openAiApiKey?: string openAiModelId?: string + openAiCustomModelInfo?: ModelInfo ollamaModelId?: string ollamaBaseUrl?: string lmStudioModelId?: string diff --git a/webview-ui/src/components/settings/ApiOptions.tsx b/webview-ui/src/components/settings/ApiOptions.tsx index 561ae5b..13e7a57 100644 --- a/webview-ui/src/components/settings/ApiOptions.tsx +++ b/webview-ui/src/components/settings/ApiOptions.tsx @@ -1,4 +1,4 @@ -import { Checkbox, Dropdown } from "vscrui" +import { Checkbox, Dropdown, Pane } from "vscrui" import type { DropdownOption } from "vscrui" import { VSCodeLink, VSCodeRadio, VSCodeRadioGroup, VSCodeTextField } from "@vscode/webview-ui-toolkit/react" import { Fragment, memo, useCallback, useEffect, useMemo, useState } from "react" @@ -45,7 +45,7 @@ interface ApiOptionsProps { } const ApiOptions = ({ apiErrorMessage, modelIdErrorMessage }: ApiOptionsProps) => { - const { apiConfiguration, setApiConfiguration, uriScheme, handleInputChange } = useExtensionState() + const { apiConfiguration, uriScheme, handleInputChange } = useExtensionState() const [ollamaModels, setOllamaModels] = useState([]) const [lmStudioModels, setLmStudioModels] = useState([]) const [vsCodeLmModels, setVsCodeLmModels] = useState([]) @@ -166,7 +166,11 @@ const ApiOptions = ({ apiErrorMessage, modelIdErrorMessage }: ApiOptionsProps) = onChange={(checked: boolean) => { setAnthropicBaseUrlSelected(checked) if (!checked) { - setApiConfiguration({ ...apiConfiguration, anthropicBaseUrl: "" }) + handleInputChange("anthropicBaseUrl")({ + target: { + value: "", + }, + }) } }}> Use custom base URL @@ -537,7 +541,11 @@ const ApiOptions = ({ apiErrorMessage, modelIdErrorMessage }: ApiOptionsProps) = onChange={(checked: boolean) => { setAzureApiVersionSelected(checked) if (!checked) { - setApiConfiguration({ ...apiConfiguration, azureApiVersion: "" }) + handleInputChange("azureApiVersion")({ + target: { + value: "", + }, + }) } }}> Set Azure API version @@ -550,6 +558,432 @@ const ApiOptions = ({ apiErrorMessage, modelIdErrorMessage }: ApiOptionsProps) = placeholder={`Default: ${azureOpenAiDefaultApiVersion}`} /> )} + +
+ + handleInputChange("openAiCustomModelInfo")({ + target: { value: openAiModelInfoSaneDefaults }, + }), + }, + ]}> +
+

+ Configure the capabilities and pricing for your custom OpenAI-compatible model.
+ Be careful for the model capabilities, as they can affect how Roo Code can work. +

+ + {/* Capabilities Section */} +
+ + Model Capabilities + +
+
+ { + const value = apiConfiguration?.openAiCustomModelInfo?.maxTokens + if (!value) return "var(--vscode-input-border)" + return value > 0 + ? "var(--vscode-charts-green)" + : "var(--vscode-errorForeground)" + })(), + }} + title="Maximum number of tokens the model can generate in a single response" + onChange={(e: any) => { + const value = parseInt(e.target.value) + handleInputChange("openAiCustomModelInfo")({ + target: { + value: { + ...(apiConfiguration?.openAiCustomModelInfo || + openAiModelInfoSaneDefaults), + maxTokens: isNaN(value) ? undefined : value, + }, + }, + }) + }} + placeholder="e.g. 4096"> + Max Output Tokens + +
+ + + Maximum number of tokens the model can generate in a response.
+ (-1 is depend on server) +
+
+
+ +
+ { + const value = apiConfiguration?.openAiCustomModelInfo?.contextWindow + if (!value) return "var(--vscode-input-border)" + return value > 0 + ? "var(--vscode-charts-green)" + : "var(--vscode-errorForeground)" + })(), + }} + title="Total number of tokens (input + output) the model can process in a single request" + onChange={(e: any) => { + const parsed = parseInt(e.target.value) + handleInputChange("openAiCustomModelInfo")({ + target: { + value: { + ...(apiConfiguration?.openAiCustomModelInfo || + openAiModelInfoSaneDefaults), + contextWindow: + e.target.value === "" + ? undefined + : isNaN(parsed) + ? openAiModelInfoSaneDefaults.contextWindow + : parsed, + }, + }, + }) + }} + placeholder="e.g. 128000"> + Context Window Size + +
+ + + Total tokens (input + output) the model can process. This will help Roo + Code run correctly. + +
+
+ +
+ + Model Features + + +
+
+
+ { + handleInputChange("openAiCustomModelInfo")({ + target: { + value: { + ...(apiConfiguration?.openAiCustomModelInfo || + openAiModelInfoSaneDefaults), + supportsImages: checked, + }, + }, + }) + }}> + Image Support + + +
+

+ Allows the model to analyze and understand images, essential for + visual code assistance +

+
+ +
+
+ { + handleInputChange("openAiCustomModelInfo")({ + target: { + value: { + ...(apiConfiguration?.openAiCustomModelInfo || + openAiModelInfoSaneDefaults), + supportsComputerUse: checked, + }, + }, + }) + }}> + Computer Use + + +
+

+ This model feature is for computer use like sonnet 3.5 support +

+
+
+
+
+
+ + {/* Pricing Section */} +
+
+ + Model Pricing + + + Configure token-based pricing in USD per million tokens + +
+ +
+
+ { + const value = apiConfiguration?.openAiCustomModelInfo?.inputPrice + if (!value && value !== 0) return "var(--vscode-input-border)" + return value >= 0 + ? "var(--vscode-charts-green)" + : "var(--vscode-errorForeground)" + })(), + }} + onChange={(e: any) => { + const parsed = parseFloat(e.target.value) + handleInputChange("openAiCustomModelInfo")({ + target: { + value: { + ...(apiConfiguration?.openAiCustomModelInfo ?? + openAiModelInfoSaneDefaults), + inputPrice: + e.target.value === "" + ? undefined + : isNaN(parsed) + ? openAiModelInfoSaneDefaults.inputPrice + : parsed, + }, + }, + }) + }} + placeholder="e.g. 0.0001"> +
+ Input Price + +
+
+
+ +
+ { + const value = apiConfiguration?.openAiCustomModelInfo?.outputPrice + if (!value && value !== 0) return "var(--vscode-input-border)" + return value >= 0 + ? "var(--vscode-charts-green)" + : "var(--vscode-errorForeground)" + })(), + }} + onChange={(e: any) => { + const parsed = parseFloat(e.target.value) + handleInputChange("openAiCustomModelInfo")({ + target: { + value: { + ...(apiConfiguration?.openAiCustomModelInfo || + openAiModelInfoSaneDefaults), + outputPrice: + e.target.value === "" + ? undefined + : isNaN(parsed) + ? openAiModelInfoSaneDefaults.outputPrice + : parsed, + }, + }, + }) + }} + placeholder="e.g. 0.0002"> +
+ Output Price + +
+
+
+
+
+
+
+
+ + {/* end Model Info Configuration */} +