mirror of
https://github.com/pacnpal/Roo-Code.git
synced 2025-12-21 12:51:17 -05:00
Merge remote-tracking branch 'upstream/main'
This commit is contained in:
@@ -6,6 +6,7 @@ import { OpenRouterHandler } from "./providers/openrouter"
|
||||
import { VertexHandler } from "./providers/vertex"
|
||||
import { OpenAiHandler } from "./providers/openai"
|
||||
import { OllamaHandler } from "./providers/ollama"
|
||||
import { LmStudioHandler } from "./providers/lmstudio"
|
||||
import { GeminiHandler } from "./providers/gemini"
|
||||
import { OpenAiNativeHandler } from "./providers/openai-native"
|
||||
import { ApiStream } from "./transform/stream"
|
||||
@@ -30,6 +31,8 @@ export function buildApiHandler(configuration: ApiConfiguration): ApiHandler {
|
||||
return new OpenAiHandler(options)
|
||||
case "ollama":
|
||||
return new OllamaHandler(options)
|
||||
case "lmstudio":
|
||||
return new LmStudioHandler(options)
|
||||
case "gemini":
|
||||
return new GeminiHandler(options)
|
||||
case "openai-native":
|
||||
|
||||
56
src/api/providers/lmstudio.ts
Normal file
56
src/api/providers/lmstudio.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
import { Anthropic } from "@anthropic-ai/sdk"
|
||||
import OpenAI from "openai"
|
||||
import { ApiHandler } from "../"
|
||||
import { ApiHandlerOptions, ModelInfo, openAiModelInfoSaneDefaults } from "../../shared/api"
|
||||
import { convertToOpenAiMessages } from "../transform/openai-format"
|
||||
import { ApiStream } from "../transform/stream"
|
||||
|
||||
export class LmStudioHandler implements ApiHandler {
|
||||
private options: ApiHandlerOptions
|
||||
private client: OpenAI
|
||||
|
||||
constructor(options: ApiHandlerOptions) {
|
||||
this.options = options
|
||||
this.client = new OpenAI({
|
||||
baseURL: (this.options.lmStudioBaseUrl || "http://localhost:1234") + "/v1",
|
||||
apiKey: "noop",
|
||||
})
|
||||
}
|
||||
|
||||
async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream {
|
||||
const openAiMessages: OpenAI.Chat.ChatCompletionMessageParam[] = [
|
||||
{ role: "system", content: systemPrompt },
|
||||
...convertToOpenAiMessages(messages),
|
||||
]
|
||||
|
||||
try {
|
||||
const stream = await this.client.chat.completions.create({
|
||||
model: this.getModel().id,
|
||||
messages: openAiMessages,
|
||||
temperature: 0,
|
||||
stream: true,
|
||||
})
|
||||
for await (const chunk of stream) {
|
||||
const delta = chunk.choices[0]?.delta
|
||||
if (delta?.content) {
|
||||
yield {
|
||||
type: "text",
|
||||
text: delta.content,
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
// LM Studio doesn't return an error code/body for now
|
||||
throw new Error(
|
||||
"Please check the LM Studio developer logs to debug what went wrong. You may need to load the model with a larger context length to work with Cline's prompts."
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
getModel(): { id: string; info: ModelInfo } {
|
||||
return {
|
||||
id: this.options.lmStudioModelId || "",
|
||||
info: openAiModelInfoSaneDefaults,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -31,9 +31,19 @@ export class OpenRouterHandler implements ApiHandler {
|
||||
]
|
||||
|
||||
// prompt caching: https://openrouter.ai/docs/prompt-caching
|
||||
// this is specifically for claude models (some models may 'support prompt caching' automatically without this)
|
||||
switch (this.getModel().id) {
|
||||
case "anthropic/claude-3.5-sonnet":
|
||||
case "anthropic/claude-3.5-sonnet:beta":
|
||||
case "anthropic/claude-3.5-sonnet-20240620":
|
||||
case "anthropic/claude-3.5-sonnet-20240620:beta":
|
||||
case "anthropic/claude-3-5-haiku":
|
||||
case "anthropic/claude-3-5-haiku:beta":
|
||||
case "anthropic/claude-3-5-haiku-20241022":
|
||||
case "anthropic/claude-3-5-haiku-20241022:beta":
|
||||
case "anthropic/claude-3-haiku":
|
||||
case "anthropic/claude-3-haiku:beta":
|
||||
case "anthropic/claude-3-opus":
|
||||
case "anthropic/claude-3-opus:beta":
|
||||
openAiMessages[0] = {
|
||||
role: "system",
|
||||
@@ -76,6 +86,12 @@ export class OpenRouterHandler implements ApiHandler {
|
||||
switch (this.getModel().id) {
|
||||
case "anthropic/claude-3.5-sonnet":
|
||||
case "anthropic/claude-3.5-sonnet:beta":
|
||||
case "anthropic/claude-3.5-sonnet-20240620":
|
||||
case "anthropic/claude-3.5-sonnet-20240620:beta":
|
||||
case "anthropic/claude-3-5-haiku":
|
||||
case "anthropic/claude-3-5-haiku:beta":
|
||||
case "anthropic/claude-3-5-haiku-20241022":
|
||||
case "anthropic/claude-3-5-haiku-20241022:beta":
|
||||
maxTokens = 8_192
|
||||
break
|
||||
}
|
||||
|
||||
@@ -53,6 +53,8 @@ type GlobalStateKey =
|
||||
| "openAiModelId"
|
||||
| "ollamaModelId"
|
||||
| "ollamaBaseUrl"
|
||||
| "lmStudioModelId"
|
||||
| "lmStudioBaseUrl"
|
||||
| "anthropicBaseUrl"
|
||||
| "azureApiVersion"
|
||||
| "openRouterModelId"
|
||||
@@ -363,6 +365,8 @@ export class ClineProvider implements vscode.WebviewViewProvider {
|
||||
openAiModelId,
|
||||
ollamaModelId,
|
||||
ollamaBaseUrl,
|
||||
lmStudioModelId,
|
||||
lmStudioBaseUrl,
|
||||
anthropicBaseUrl,
|
||||
geminiApiKey,
|
||||
openAiNativeApiKey,
|
||||
@@ -386,6 +390,8 @@ export class ClineProvider implements vscode.WebviewViewProvider {
|
||||
await this.updateGlobalState("openAiModelId", openAiModelId)
|
||||
await this.updateGlobalState("ollamaModelId", ollamaModelId)
|
||||
await this.updateGlobalState("ollamaBaseUrl", ollamaBaseUrl)
|
||||
await this.updateGlobalState("lmStudioModelId", lmStudioModelId)
|
||||
await this.updateGlobalState("lmStudioBaseUrl", lmStudioBaseUrl)
|
||||
await this.updateGlobalState("anthropicBaseUrl", anthropicBaseUrl)
|
||||
await this.storeSecret("geminiApiKey", geminiApiKey)
|
||||
await this.storeSecret("openAiNativeApiKey", openAiNativeApiKey)
|
||||
@@ -460,6 +466,10 @@ export class ClineProvider implements vscode.WebviewViewProvider {
|
||||
const ollamaModels = await this.getOllamaModels(message.text)
|
||||
this.postMessageToWebview({ type: "ollamaModels", ollamaModels })
|
||||
break
|
||||
case "requestLmStudioModels":
|
||||
const lmStudioModels = await this.getLmStudioModels(message.text)
|
||||
this.postMessageToWebview({ type: "lmStudioModels", lmStudioModels })
|
||||
break
|
||||
case "refreshOpenRouterModels":
|
||||
await this.refreshOpenRouterModels()
|
||||
break
|
||||
@@ -527,6 +537,25 @@ export class ClineProvider implements vscode.WebviewViewProvider {
|
||||
}
|
||||
}
|
||||
|
||||
// LM Studio
|
||||
|
||||
async getLmStudioModels(baseUrl?: string) {
|
||||
try {
|
||||
if (!baseUrl) {
|
||||
baseUrl = "http://localhost:1234"
|
||||
}
|
||||
if (!URL.canParse(baseUrl)) {
|
||||
return []
|
||||
}
|
||||
const response = await axios.get(`${baseUrl}/v1/models`)
|
||||
const modelsArray = response.data?.data?.map((model: any) => model.id) || []
|
||||
const models = [...new Set<string>(modelsArray)]
|
||||
return models
|
||||
} catch (error) {
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
// OpenRouter
|
||||
|
||||
async handleOpenRouterCallback(code: string) {
|
||||
@@ -855,6 +884,8 @@ export class ClineProvider implements vscode.WebviewViewProvider {
|
||||
openAiModelId,
|
||||
ollamaModelId,
|
||||
ollamaBaseUrl,
|
||||
lmStudioModelId,
|
||||
lmStudioBaseUrl,
|
||||
anthropicBaseUrl,
|
||||
geminiApiKey,
|
||||
openAiNativeApiKey,
|
||||
@@ -884,6 +915,8 @@ export class ClineProvider implements vscode.WebviewViewProvider {
|
||||
this.getGlobalState("openAiModelId") as Promise<string | undefined>,
|
||||
this.getGlobalState("ollamaModelId") as Promise<string | undefined>,
|
||||
this.getGlobalState("ollamaBaseUrl") as Promise<string | undefined>,
|
||||
this.getGlobalState("lmStudioModelId") as Promise<string | undefined>,
|
||||
this.getGlobalState("lmStudioBaseUrl") as Promise<string | undefined>,
|
||||
this.getGlobalState("anthropicBaseUrl") as Promise<string | undefined>,
|
||||
this.getSecret("geminiApiKey") as Promise<string | undefined>,
|
||||
this.getSecret("openAiNativeApiKey") as Promise<string | undefined>,
|
||||
@@ -930,6 +963,8 @@ export class ClineProvider implements vscode.WebviewViewProvider {
|
||||
openAiModelId,
|
||||
ollamaModelId,
|
||||
ollamaBaseUrl,
|
||||
lmStudioModelId,
|
||||
lmStudioBaseUrl,
|
||||
anthropicBaseUrl,
|
||||
geminiApiKey,
|
||||
openAiNativeApiKey,
|
||||
|
||||
@@ -10,6 +10,7 @@ export interface ExtensionMessage {
|
||||
| "state"
|
||||
| "selectedImages"
|
||||
| "ollamaModels"
|
||||
| "lmStudioModels"
|
||||
| "theme"
|
||||
| "workspaceUpdated"
|
||||
| "invoke"
|
||||
@@ -21,6 +22,7 @@ export interface ExtensionMessage {
|
||||
state?: ExtensionState
|
||||
images?: string[]
|
||||
ollamaModels?: string[]
|
||||
lmStudioModels?: string[]
|
||||
filePaths?: string[]
|
||||
partialMessage?: ClineMessage
|
||||
openRouterModels?: Record<string, ModelInfo>
|
||||
|
||||
@@ -19,6 +19,7 @@ export interface WebviewMessage {
|
||||
| "exportTaskWithId"
|
||||
| "resetState"
|
||||
| "requestOllamaModels"
|
||||
| "requestLmStudioModels"
|
||||
| "openImage"
|
||||
| "openFile"
|
||||
| "openMention"
|
||||
|
||||
@@ -5,6 +5,7 @@ export type ApiProvider =
|
||||
| "vertex"
|
||||
| "openai"
|
||||
| "ollama"
|
||||
| "lmstudio"
|
||||
| "gemini"
|
||||
| "openai-native"
|
||||
|
||||
@@ -27,6 +28,8 @@ export interface ApiHandlerOptions {
|
||||
openAiModelId?: string
|
||||
ollamaModelId?: string
|
||||
ollamaBaseUrl?: string
|
||||
lmStudioModelId?: string
|
||||
lmStudioBaseUrl?: string
|
||||
geminiApiKey?: string
|
||||
openAiNativeApiKey?: string
|
||||
azureApiVersion?: string
|
||||
|
||||
Reference in New Issue
Block a user