Add prompt caching to openrouter; remove o1 since it doesn't support tool use

This commit is contained in:
Saoud Rizwan
2024-09-13 00:49:04 -04:00
parent c29fdaa520
commit dc617a92a9
5 changed files with 115 additions and 63 deletions

View File

@@ -12,7 +12,6 @@ import { useEvent, useInterval } from "react-use"
import {
ApiConfiguration,
ModelInfo,
OpenAiNativeModelId,
anthropicDefaultModelId,
anthropicModels,
bedrockDefaultModelId,
@@ -115,11 +114,11 @@ const ApiOptions = ({ showModelOptions, apiErrorMessage }: ApiOptionsProps) => {
onChange={handleInputChange("apiProvider")}
style={{ minWidth: 130 }}>
<VSCodeOption value="anthropic">Anthropic</VSCodeOption>
<VSCodeOption value="openai-native">OpenAI</VSCodeOption>
<VSCodeOption value="openrouter">OpenRouter</VSCodeOption>
<VSCodeOption value="gemini">Google Gemini</VSCodeOption>
<VSCodeOption value="bedrock">AWS Bedrock</VSCodeOption>
<VSCodeOption value="vertex">GCP Vertex AI</VSCodeOption>
<VSCodeOption value="bedrock">AWS Bedrock</VSCodeOption>
<VSCodeOption value="openai-native">OpenAI</VSCodeOption>
<VSCodeOption value="openai">OpenAI Compatible</VSCodeOption>
<VSCodeOption value="ollama">Ollama</VSCodeOption>
</VSCodeDropdown>
@@ -547,7 +546,7 @@ export const formatPrice = (price: number) => {
const ModelInfoView = ({ selectedModelId, modelInfo }: { selectedModelId: string; modelInfo: ModelInfo }) => {
const isGemini = Object.keys(geminiModels).includes(selectedModelId)
const isO1 = (["o1-preview", "o1-mini"] as OpenAiNativeModelId[]).includes(selectedModelId as OpenAiNativeModelId)
const isO1 = false //(["o1-preview", "o1-mini"] as OpenAiNativeModelId[]).includes(selectedModelId as OpenAiNativeModelId)
return (
<p style={{ fontSize: "12px", marginTop: "2px", color: "var(--vscode-descriptionForeground)" }}>
<ModelInfoSupportsItem

View File

@@ -98,6 +98,8 @@ const TaskHeader: React.FC<TaskHeaderProps> = ({
)
}, [apiConfiguration?.apiProvider])
const shouldShowPromptCacheInfo = doesModelSupportPromptCache && apiConfiguration?.apiProvider !== "openrouter"
return (
<div style={{ padding: "10px 13px 10px 13px" }}>
<div
@@ -265,7 +267,7 @@ const TaskHeader: React.FC<TaskHeaderProps> = ({
{!isCostAvailable && <ExportButton />}
</div>
{(doesModelSupportPromptCache || cacheReads !== undefined || cacheWrites !== undefined) && (
{(shouldShowPromptCacheInfo || cacheReads !== undefined || cacheWrites !== undefined) && (
<div style={{ display: "flex", alignItems: "center", gap: "4px", flexWrap: "wrap" }}>
<span style={{ fontWeight: "bold" }}>Cache:</span>
<span style={{ display: "flex", alignItems: "center", gap: "3px" }}>