Add ollama provider option

This commit is contained in:
Saoud Rizwan
2024-09-03 23:03:30 -04:00
parent 4b99561294
commit 286e569e09
8 changed files with 140 additions and 8 deletions

View File

@@ -79,6 +79,7 @@ const ApiOptions: React.FC<ApiOptionsProps> = ({ showModelOptions, apiErrorMessa
<VSCodeOption value="bedrock">AWS Bedrock</VSCodeOption>
<VSCodeOption value="vertex">GCP Vertex AI</VSCodeOption>
<VSCodeOption value="openai">OpenAI Compatible</VSCodeOption>
<VSCodeOption value="ollama">Ollama</VSCodeOption>
</VSCodeDropdown>
</div>
@@ -268,7 +269,7 @@ const ApiOptions: React.FC<ApiOptionsProps> = ({ showModelOptions, apiErrorMessa
style={{ width: "100%" }}
type="url"
onInput={handleInputChange("openAiBaseUrl")}
placeholder={"e.g. http://localhost:11434/v1"}>
placeholder={"Enter base URL..."}>
<span style={{ fontWeight: 500 }}>Base URL</span>
</VSCodeTextField>
<VSCodeTextField
@@ -276,14 +277,14 @@ const ApiOptions: React.FC<ApiOptionsProps> = ({ showModelOptions, apiErrorMessa
style={{ width: "100%" }}
type="password"
onInput={handleInputChange("openAiApiKey")}
placeholder="e.g. ollama">
placeholder="Enter API Key...">
<span style={{ fontWeight: 500 }}>API Key</span>
</VSCodeTextField>
<VSCodeTextField
value={apiConfiguration?.openAiModelId || ""}
style={{ width: "100%" }}
onInput={handleInputChange("openAiModelId")}
placeholder={"e.g. llama3.1"}>
placeholder={"Enter Model ID..."}>
<span style={{ fontWeight: 500 }}>Model ID</span>
</VSCodeTextField>
<p
@@ -301,6 +302,40 @@ const ApiOptions: React.FC<ApiOptionsProps> = ({ showModelOptions, apiErrorMessa
</div>
)}
{selectedProvider === "ollama" && (
<div>
<VSCodeTextField
value={apiConfiguration?.ollamaModelId || ""}
style={{ width: "100%" }}
onInput={handleInputChange("ollamaModelId")}
placeholder={"e.g. llama3.1"}>
<span style={{ fontWeight: 500 }}>Model ID</span>
</VSCodeTextField>
<p
style={{
fontSize: "12px",
marginTop: "5px",
color: "var(--vscode-descriptionForeground)",
}}>
Ollama allows you to run models locally on your computer. For instructions on how to get started
with Ollama, see their
<VSCodeLink
href="https://github.com/ollama/ollama/blob/main/README.md"
style={{ display: "inline" }}>
quickstart guide.
</VSCodeLink>{" "}
You can use any models that support{" "}
<VSCodeLink href="https://ollama.com/search?c=tools" style={{ display: "inline" }}>
tool use.
</VSCodeLink>
<span style={{ color: "var(--vscode-errorForeground)" }}>
(<span style={{ fontWeight: 500 }}>Note:</span> Claude Dev uses complex prompts, so less
capable models may not work as expected.)
</span>
</p>
</div>
)}
{apiErrorMessage && (
<p
style={{
@@ -312,7 +347,7 @@ const ApiOptions: React.FC<ApiOptionsProps> = ({ showModelOptions, apiErrorMessa
</p>
)}
{selectedProvider !== "openai" && showModelOptions && (
{selectedProvider !== "openai" && selectedProvider !== "ollama" && showModelOptions && (
<>
<div className="dropdown-container">
<label htmlFor="model-id">
@@ -437,6 +472,12 @@ export function normalizeApiConfiguration(apiConfiguration?: ApiConfiguration) {
selectedModelId: apiConfiguration?.openAiModelId ?? "",
selectedModelInfo: openAiModelInfoSaneDefaults,
}
case "ollama":
return {
selectedProvider: provider,
selectedModelId: apiConfiguration?.ollamaModelId ?? "",
selectedModelInfo: openAiModelInfoSaneDefaults,
}
default:
return getProviderData(anthropicModels, anthropicDefaultModelId)
}

View File

@@ -226,7 +226,9 @@ const TaskHeader: React.FC<TaskHeaderProps> = ({
{tokensOut?.toLocaleString()}
</span>
</div>
{apiConfiguration?.apiProvider === "openai" && <ExportButton />}
{(apiConfiguration?.apiProvider === "openai" || apiConfiguration?.apiProvider === "ollama") && (
<ExportButton />
)}
</div>
{(doesModelSupportPromptCache || cacheReads !== undefined || cacheWrites !== undefined) && (
@@ -248,7 +250,7 @@ const TaskHeader: React.FC<TaskHeaderProps> = ({
</span>
</div>
)}
{apiConfiguration?.apiProvider !== "openai" && (
{apiConfiguration?.apiProvider !== "openai" && apiConfiguration?.apiProvider !== "ollama" && (
<div
style={{
display: "flex",

View File

@@ -32,6 +32,11 @@ export function validateApiConfiguration(apiConfiguration?: ApiConfiguration): s
return "You must provide a valid base URL, API key, and model ID."
}
break
case "ollama":
if (!apiConfiguration.ollamaModelId) {
return "You must provide a valid model ID."
}
break
}
}
return undefined