import { VSCodeCheckbox, VSCodeDropdown, VSCodeLink, VSCodeOption, VSCodeRadio, VSCodeRadioGroup, VSCodeTextField, } from "@vscode/webview-ui-toolkit/react" import { memo, useCallback, useEffect, useMemo, useState } from "react" import { useEvent, useInterval } from "react-use" import { ApiConfiguration, ModelInfo, OpenAiNativeModelId, anthropicDefaultModelId, anthropicModels, bedrockDefaultModelId, bedrockModels, geminiDefaultModelId, geminiModels, openAiModelInfoSaneDefaults, openAiNativeDefaultModelId, openAiNativeModels, openRouterDefaultModelId, openRouterModels, vertexDefaultModelId, vertexModels, } from "../../../src/shared/api" import { ExtensionMessage } from "../../../src/shared/ExtensionMessage" import { useExtensionState } from "../context/ExtensionStateContext" import { vscode } from "../utils/vscode" import VSCodeButtonLink from "./VSCodeButtonLink" interface ApiOptionsProps { showModelOptions: boolean apiErrorMessage?: string } const ApiOptions = ({ showModelOptions, apiErrorMessage }: ApiOptionsProps) => { const { apiConfiguration, setApiConfiguration, uriScheme } = useExtensionState() const [ollamaModels, setOllamaModels] = useState([]) const [anthropicBaseUrlSelected, setAnthropicBaseUrlSelected] = useState(!!apiConfiguration?.anthropicBaseUrl) const handleInputChange = (field: keyof ApiConfiguration) => (event: any) => { setApiConfiguration({ ...apiConfiguration, [field]: event.target.value }) } const { selectedProvider, selectedModelId, selectedModelInfo } = useMemo(() => { return normalizeApiConfiguration(apiConfiguration) }, [apiConfiguration]) // Poll ollama models const requestOllamaModels = useCallback(() => { if (selectedProvider === "ollama") { vscode.postMessage({ type: "requestOllamaModels", text: apiConfiguration?.ollamaBaseUrl }) } }, [selectedProvider, apiConfiguration?.ollamaBaseUrl]) useEffect(() => { if (selectedProvider === "ollama") { requestOllamaModels() } }, [selectedProvider, requestOllamaModels]) useInterval(requestOllamaModels, selectedProvider === "ollama" ? 2000 : null) const handleMessage = useCallback((event: MessageEvent) => { const message: ExtensionMessage = event.data if (message.type === "ollamaModels" && message.models) { setOllamaModels(message.models) } }, []) useEvent("message", handleMessage) /* VSCodeDropdown has an open bug where dynamically rendered options don't auto select the provided value prop. You can see this for yourself by comparing it with normal select/option elements, which work as expected. https://github.com/microsoft/vscode-webview-ui-toolkit/issues/433 In our case, when the user switches between providers, we recalculate the selectedModelId depending on the provider, the default model for that provider, and a modelId that the user may have selected. Unfortunately, the VSCodeDropdown component wouldn't select this calculated value, and would default to the first "Select a model..." option instead, which makes it seem like the model was cleared out when it wasn't. As a workaround, we create separate instances of the dropdown for each provider, and then conditionally render the one that matches the current provider. */ const createDropdown = (models: Record) => { return ( Select a model... {Object.keys(models).map((modelId) => ( {modelId} ))} ) } return (
Anthropic OpenAI OpenRouter Google Gemini AWS Bedrock GCP Vertex AI OpenAI Compatible Ollama
{selectedProvider === "anthropic" && (
Anthropic API Key
{ const isChecked = e.target.checked === true setAnthropicBaseUrlSelected(isChecked) if (!isChecked) { setApiConfiguration({ ...apiConfiguration, anthropicBaseUrl: "" }) } }}> Use custom base URL
{anthropicBaseUrlSelected && ( )}

This key is stored locally and only used to make API requests from this extension. {!apiConfiguration?.apiKey && ( You can get an Anthropic API key by signing up here. )}

)} {selectedProvider === "openai-native" && (
OpenAI API Key

This key is stored locally and only used to make API requests from this extension. {!apiConfiguration?.openAiNativeApiKey && ( You can get an OpenAI API key by signing up here. )}

)} {selectedProvider === "openrouter" && (
OpenRouter API Key {!apiConfiguration?.openRouterApiKey && ( Get OpenRouter API Key )}

This key is stored locally and only used to make API requests from this extension.{" "} {/* {!apiConfiguration?.openRouterApiKey && ( (Note: OpenRouter is recommended for high rate limits, prompt caching, and wider selection of models.) )} */}

)} {selectedProvider === "bedrock" && (
AWS Access Key AWS Secret Key AWS Session Token
Select a region... {/* The user will have to choose a region that supports the model they use, but this shouldn't be a problem since they'd have to request access for it in that region in the first place. */} us-east-1 {/* us-east-2 */} {/* us-west-1 */} us-west-2 {/* af-south-1 */} {/* ap-east-1 */} ap-south-1 ap-northeast-1 {/* ap-northeast-2 */} {/* ap-northeast-3 */} ap-southeast-1 ap-southeast-2 ca-central-1 eu-central-1 eu-west-1 eu-west-2 eu-west-3 {/* eu-north-1 */} {/* me-south-1 */} sa-east-1

Authenticate by either providing the keys above or use the default AWS credential providers, i.e. ~/.aws/credentials or environment variables. These credentials are only used locally to make API requests from this extension.

)} {apiConfiguration?.apiProvider === "vertex" && (
Google Cloud Project ID
Select a region... us-east5 us-central1 europe-west1 europe-west4 asia-southeast1

To use Google Cloud Vertex AI, you need to { "1) create a Google Cloud account › enable the Vertex AI API › enable the desired Claude models," } {" "} {"2) install the Google Cloud CLI › configure Application Default Credentials."}

)} {selectedProvider === "gemini" && (
Gemini API Key

This key is stored locally and only used to make API requests from this extension. {!apiConfiguration?.geminiApiKey && ( You can get a Gemini API key by signing up here. )}

)} {selectedProvider === "openai" && (
Base URL API Key Model ID

You can use any OpenAI compatible API with models that support tool use.{" "} (Note: Claude Dev uses complex prompts and works best with Claude models. Less capable models may not work as expected.)

)} {selectedProvider === "ollama" && (
Base URL (optional) Model ID {ollamaModels.length > 0 && ( { const value = (e.target as HTMLInputElement)?.value // need to check value first since radio group returns empty string sometimes if (value) { handleInputChange("ollamaModelId")({ target: { value }, }) } }}> {ollamaModels.map((model) => ( {model} ))} )}

Ollama allows you to run models locally on your computer. For instructions on how to get started, see their quickstart guide. {" "} You can use any model that supports{" "} tool use. (Note: Claude Dev uses complex prompts and works best with Claude models. Less capable models may not work as expected.)

)} {apiErrorMessage && (

{apiErrorMessage}

)} {selectedProvider !== "openai" && selectedProvider !== "ollama" && showModelOptions && ( <>
{selectedProvider === "anthropic" && createDropdown(anthropicModels)} {selectedProvider === "openrouter" && createDropdown(openRouterModels)} {selectedProvider === "bedrock" && createDropdown(bedrockModels)} {selectedProvider === "vertex" && createDropdown(vertexModels)} {selectedProvider === "gemini" && createDropdown(geminiModels)} {selectedProvider === "openai-native" && createDropdown(openAiNativeModels)}
)}
) } export function getOpenRouterAuthUrl(uriScheme?: string) { return `https://openrouter.ai/auth?callback_url=${uriScheme || "vscode"}://saoudrizwan.claude-dev/openrouter` } export const formatPrice = (price: number) => { return new Intl.NumberFormat("en-US", { style: "currency", currency: "USD", minimumFractionDigits: 2, maximumFractionDigits: 2, }).format(price) } const ModelInfoView = ({ selectedModelId, modelInfo }: { selectedModelId: string; modelInfo: ModelInfo }) => { const isGemini = Object.keys(geminiModels).includes(selectedModelId) const isO1 = (["o1-preview", "o1-mini"] as OpenAiNativeModelId[]).includes(selectedModelId as OpenAiNativeModelId) return (


{!isGemini && ( <>
)} Max output: {modelInfo?.maxTokens?.toLocaleString()} tokens {modelInfo.inputPrice > 0 && ( <>
Input price: {formatPrice(modelInfo.inputPrice)}/million tokens )} {modelInfo.supportsPromptCache && modelInfo.cacheWritesPrice && modelInfo.cacheReadsPrice && ( <>
Cache writes price:{" "} {formatPrice(modelInfo.cacheWritesPrice || 0)}/million tokens
Cache reads price:{" "} {formatPrice(modelInfo.cacheReadsPrice || 0)}/million tokens )} {modelInfo.outputPrice > 0 && ( <>
Output price: {formatPrice(modelInfo.outputPrice)}/million tokens )} {isGemini && ( <>
* Free up to {selectedModelId === geminiDefaultModelId ? "15" : "2"} requests per minute. After that, billing depends on prompt size.{" "} For more info, see pricing details. )} {isO1 && ( <>
* This model is newly released and may not be accessible to all users yet. )}

) } const ModelInfoSupportsItem = ({ isSupported, supportsLabel, doesNotSupportLabel, }: { isSupported: boolean supportsLabel: string doesNotSupportLabel: string }) => ( {isSupported ? supportsLabel : doesNotSupportLabel} ) export function normalizeApiConfiguration(apiConfiguration?: ApiConfiguration) { const provider = apiConfiguration?.apiProvider || "anthropic" const modelId = apiConfiguration?.apiModelId const getProviderData = (models: Record, defaultId: string) => { let selectedModelId: string let selectedModelInfo: ModelInfo if (modelId && modelId in models) { selectedModelId = modelId selectedModelInfo = models[modelId] } else { selectedModelId = defaultId selectedModelInfo = models[defaultId] } return { selectedProvider: provider, selectedModelId, selectedModelInfo } } switch (provider) { case "anthropic": return getProviderData(anthropicModels, anthropicDefaultModelId) case "openrouter": return getProviderData(openRouterModels, openRouterDefaultModelId) case "bedrock": return getProviderData(bedrockModels, bedrockDefaultModelId) case "vertex": return getProviderData(vertexModels, vertexDefaultModelId) case "gemini": return getProviderData(geminiModels, geminiDefaultModelId) case "openai-native": return getProviderData(openAiNativeModels, openAiNativeDefaultModelId) case "openai": return { selectedProvider: provider, selectedModelId: apiConfiguration?.openAiModelId ?? "", selectedModelInfo: openAiModelInfoSaneDefaults, } case "ollama": return { selectedProvider: provider, selectedModelId: apiConfiguration?.ollamaModelId ?? "", selectedModelInfo: openAiModelInfoSaneDefaults, } default: return getProviderData(anthropicModels, anthropicDefaultModelId) } } export default memo(ApiOptions)