import { VSCodeDropdown, VSCodeLink, VSCodeOption, VSCodeTextField } from "@vscode/webview-ui-toolkit/react" import React, { useMemo } from "react" import { ApiConfiguration, ApiModelId, ModelInfo, anthropicDefaultModelId, anthropicModels, bedrockDefaultModelId, bedrockModels, openRouterDefaultModelId, openRouterModels, } from "../../../src/shared/api" interface ApiOptionsProps { showModelOptions: boolean apiConfiguration?: ApiConfiguration setApiConfiguration: React.Dispatch> } const ApiOptions: React.FC = ({ showModelOptions, apiConfiguration, setApiConfiguration }) => { const handleInputChange = (field: keyof ApiConfiguration) => (event: any) => { setApiConfiguration((prev) => ({ ...prev, [field]: event.target.value })) } const { selectedProvider, selectedModelId, selectedModelInfo } = useMemo(() => { return normalizeApiConfiguration(apiConfiguration) }, [apiConfiguration]) /* VSCodeDropdown has an open bug where dynamically rendered options don't auto select the provided value prop. You can see this for yourself by comparing it with normal select/option elements, which work as expected. https://github.com/microsoft/vscode-webview-ui-toolkit/issues/433 In our case, when the user switches between providers, we recalculate the selectedModelId depending on the provider, the default model for that provider, and a modelId that the user may have selected. Unfortunately, the VSCodeDropdown component wouldn't select this calculated value, and would default to the first "Select a model..." option instead, which makes it seem like the model was cleared out when it wasn't. As a workaround, we create separate instances of the dropdown for each provider, and then conditionally render the one that matches the current provider. */ const createDropdown = (models: Record) => { return ( Select a model... {Object.keys(models).map((modelId) => ( {modelId} ))} ) } return (
Anthropic AWS Bedrock OpenRouter
{selectedProvider === "anthropic" && (
Anthropic API Key

This key is stored locally and only used to make API requests from this extension. You can get an Anthropic API key by signing up here.

)} {selectedProvider === "openrouter" && (
OpenRouter API Key

This key is stored locally and only used to make API requests from this extension. You can get an OpenRouter API key by signing up here. {" "} (Note: OpenRouter support is experimental and may not work well with large files.)

)} {selectedProvider === "bedrock" && (
AWS Access Key AWS Secret Key
Select a region... {/* The user will have to choose a region that supports the model they use, but this shouldn't be a problem since they'd have to request access for it in that region in the first place. */} us-east-1 {/* us-east-2 */} {/* us-west-1 */} us-west-2 {/* af-south-1 */} {/* ap-east-1 */} ap-south-1 ap-northeast-1 {/* ap-northeast-2 */} {/* ap-northeast-3 */} ap-southeast-1 ap-southeast-2 ca-central-1 eu-central-1 eu-west-1 eu-west-2 eu-west-3 {/* eu-north-1 */} {/* me-south-1 */} sa-east-1

These credentials are stored locally and only used to make API requests from this extension. You can find your AWS access key and secret key here.

)} {showModelOptions && ( <>
{selectedProvider === "anthropic" && createDropdown(anthropicModels)} {selectedProvider === "openrouter" && createDropdown(openRouterModels)} {selectedProvider === "bedrock" && createDropdown(bedrockModels)}
)}
) } const ModelInfoView = ({ modelInfo }: { modelInfo: ModelInfo }) => { const formatPrice = (price: number) => { return new Intl.NumberFormat("en-US", { style: "currency", currency: "USD", minimumFractionDigits: 2, maximumFractionDigits: 2, }).format(price) } const showPromptCachingPrices = modelInfo.supportsPromptCache && modelInfo.cacheWritesPrice && modelInfo.cacheReadsPrice return (

{" "} (what is this?)

Max output: {modelInfo.maxTokens.toLocaleString()} tokens
{showPromptCachingPrices ? "Base input price:" : "Input price:"} {" "} {formatPrice(modelInfo.inputPrice)} per million tokens {showPromptCachingPrices && ( <>
Prompt caching write price:{" "} {formatPrice(modelInfo.cacheWritesPrice || 0)} per million tokens
Prompt caching read price:{" "} {formatPrice(modelInfo.cacheReadsPrice || 0)} per million tokens )}
Output price: {formatPrice(modelInfo.outputPrice)} per million tokens

) } const ModelInfoSupportsItem = ({ isSupported, supportsLabel, doesNotSupportLabel, }: { isSupported: boolean supportsLabel: string doesNotSupportLabel: string }) => ( {isSupported ? supportsLabel : doesNotSupportLabel} ) export function normalizeApiConfiguration(apiConfiguration?: ApiConfiguration) { const provider = apiConfiguration?.apiProvider || "anthropic" const modelId = apiConfiguration?.apiModelId const getProviderData = (models: Record, defaultId: ApiModelId) => { let selectedModelId: ApiModelId let selectedModelInfo: ModelInfo if (modelId && modelId in models) { selectedModelId = modelId selectedModelInfo = models[modelId] } else { selectedModelId = defaultId selectedModelInfo = models[defaultId] } return { selectedProvider: provider, selectedModelId, selectedModelInfo } } switch (provider) { case "anthropic": return getProviderData(anthropicModels, anthropicDefaultModelId) case "openrouter": return getProviderData(openRouterModels, openRouterDefaultModelId) case "bedrock": return getProviderData(bedrockModels, bedrockDefaultModelId) } } export default ApiOptions