mirror of
https://github.com/pacnpal/Roo-Code.git
synced 2025-12-20 12:21:13 -05:00
Deepseek fixes
This commit is contained in:
@@ -71,7 +71,7 @@ describe("DeepSeekHandler", () => {
|
|||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
mockOptions = {
|
mockOptions = {
|
||||||
deepSeekApiKey: "test-api-key",
|
deepSeekApiKey: "test-api-key",
|
||||||
deepSeekModelId: "deepseek-chat",
|
apiModelId: "deepseek-chat",
|
||||||
deepSeekBaseUrl: "https://api.deepseek.com/v1",
|
deepSeekBaseUrl: "https://api.deepseek.com/v1",
|
||||||
}
|
}
|
||||||
handler = new DeepSeekHandler(mockOptions)
|
handler = new DeepSeekHandler(mockOptions)
|
||||||
@@ -81,7 +81,7 @@ describe("DeepSeekHandler", () => {
|
|||||||
describe("constructor", () => {
|
describe("constructor", () => {
|
||||||
it("should initialize with provided options", () => {
|
it("should initialize with provided options", () => {
|
||||||
expect(handler).toBeInstanceOf(DeepSeekHandler)
|
expect(handler).toBeInstanceOf(DeepSeekHandler)
|
||||||
expect(handler.getModel().id).toBe(mockOptions.deepSeekModelId)
|
expect(handler.getModel().id).toBe(mockOptions.apiModelId)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should throw error if API key is missing", () => {
|
it("should throw error if API key is missing", () => {
|
||||||
@@ -96,7 +96,7 @@ describe("DeepSeekHandler", () => {
|
|||||||
it("should use default model ID if not provided", () => {
|
it("should use default model ID if not provided", () => {
|
||||||
const handlerWithoutModel = new DeepSeekHandler({
|
const handlerWithoutModel = new DeepSeekHandler({
|
||||||
...mockOptions,
|
...mockOptions,
|
||||||
deepSeekModelId: undefined,
|
apiModelId: undefined,
|
||||||
})
|
})
|
||||||
expect(handlerWithoutModel.getModel().id).toBe(deepSeekDefaultModelId)
|
expect(handlerWithoutModel.getModel().id).toBe(deepSeekDefaultModelId)
|
||||||
})
|
})
|
||||||
@@ -144,7 +144,7 @@ describe("DeepSeekHandler", () => {
|
|||||||
describe("getModel", () => {
|
describe("getModel", () => {
|
||||||
it("should return model info for valid model ID", () => {
|
it("should return model info for valid model ID", () => {
|
||||||
const model = handler.getModel()
|
const model = handler.getModel()
|
||||||
expect(model.id).toBe(mockOptions.deepSeekModelId)
|
expect(model.id).toBe(mockOptions.apiModelId)
|
||||||
expect(model.info).toBeDefined()
|
expect(model.info).toBeDefined()
|
||||||
expect(model.info.maxTokens).toBe(8192)
|
expect(model.info.maxTokens).toBe(8192)
|
||||||
expect(model.info.contextWindow).toBe(64_000)
|
expect(model.info.contextWindow).toBe(64_000)
|
||||||
@@ -155,7 +155,7 @@ describe("DeepSeekHandler", () => {
|
|||||||
it("should return provided model ID with default model info if model does not exist", () => {
|
it("should return provided model ID with default model info if model does not exist", () => {
|
||||||
const handlerWithInvalidModel = new DeepSeekHandler({
|
const handlerWithInvalidModel = new DeepSeekHandler({
|
||||||
...mockOptions,
|
...mockOptions,
|
||||||
deepSeekModelId: "invalid-model",
|
apiModelId: "invalid-model",
|
||||||
})
|
})
|
||||||
const model = handlerWithInvalidModel.getModel()
|
const model = handlerWithInvalidModel.getModel()
|
||||||
expect(model.id).toBe("invalid-model") // Returns provided ID
|
expect(model.id).toBe("invalid-model") // Returns provided ID
|
||||||
@@ -166,7 +166,7 @@ describe("DeepSeekHandler", () => {
|
|||||||
it("should return default model if no model ID is provided", () => {
|
it("should return default model if no model ID is provided", () => {
|
||||||
const handlerWithoutModel = new DeepSeekHandler({
|
const handlerWithoutModel = new DeepSeekHandler({
|
||||||
...mockOptions,
|
...mockOptions,
|
||||||
deepSeekModelId: undefined,
|
apiModelId: undefined,
|
||||||
})
|
})
|
||||||
const model = handlerWithoutModel.getModel()
|
const model = handlerWithoutModel.getModel()
|
||||||
expect(model.id).toBe(deepSeekDefaultModelId)
|
expect(model.id).toBe(deepSeekDefaultModelId)
|
||||||
|
|||||||
@@ -193,7 +193,6 @@ describe("OpenAiHandler", () => {
|
|||||||
expect(mockCreate).toHaveBeenCalledWith({
|
expect(mockCreate).toHaveBeenCalledWith({
|
||||||
model: mockOptions.openAiModelId,
|
model: mockOptions.openAiModelId,
|
||||||
messages: [{ role: "user", content: "Test prompt" }],
|
messages: [{ role: "user", content: "Test prompt" }],
|
||||||
temperature: 0,
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|||||||
@@ -10,14 +10,15 @@ export class DeepSeekHandler extends OpenAiHandler {
|
|||||||
super({
|
super({
|
||||||
...options,
|
...options,
|
||||||
openAiApiKey: options.deepSeekApiKey,
|
openAiApiKey: options.deepSeekApiKey,
|
||||||
openAiModelId: options.deepSeekModelId ?? deepSeekDefaultModelId,
|
openAiModelId: options.apiModelId ?? deepSeekDefaultModelId,
|
||||||
openAiBaseUrl: options.deepSeekBaseUrl ?? "https://api.deepseek.com/v1",
|
openAiBaseUrl: options.deepSeekBaseUrl ?? "https://api.deepseek.com/v1",
|
||||||
|
openAiStreamingEnabled: true,
|
||||||
includeMaxTokens: true,
|
includeMaxTokens: true,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
override getModel(): { id: string; info: ModelInfo } {
|
override getModel(): { id: string; info: ModelInfo } {
|
||||||
const modelId = this.options.deepSeekModelId ?? deepSeekDefaultModelId
|
const modelId = this.options.apiModelId ?? deepSeekDefaultModelId
|
||||||
return {
|
return {
|
||||||
id: modelId,
|
id: modelId,
|
||||||
info: deepSeekModels[modelId as keyof typeof deepSeekModels] || deepSeekModels[deepSeekDefaultModelId],
|
info: deepSeekModels[modelId as keyof typeof deepSeekModels] || deepSeekModels[deepSeekDefaultModelId],
|
||||||
|
|||||||
@@ -36,7 +36,9 @@ export class OpenAiHandler implements ApiHandler, SingleCompletionHandler {
|
|||||||
const modelInfo = this.getModel().info
|
const modelInfo = this.getModel().info
|
||||||
const modelId = this.options.openAiModelId ?? ""
|
const modelId = this.options.openAiModelId ?? ""
|
||||||
|
|
||||||
if (this.options.openAiStreamingEnabled ?? true) {
|
const deepseekReasoner = modelId.includes("deepseek-reasoner")
|
||||||
|
|
||||||
|
if (!deepseekReasoner && (this.options.openAiStreamingEnabled ?? true)) {
|
||||||
const systemMessage: OpenAI.Chat.ChatCompletionSystemMessageParam = {
|
const systemMessage: OpenAI.Chat.ChatCompletionSystemMessageParam = {
|
||||||
role: "system",
|
role: "system",
|
||||||
content: systemPrompt,
|
content: systemPrompt,
|
||||||
@@ -71,11 +73,20 @@ export class OpenAiHandler implements ApiHandler, SingleCompletionHandler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
let systemMessage: OpenAI.Chat.ChatCompletionUserMessageParam | OpenAI.Chat.ChatCompletionSystemMessageParam
|
||||||
|
|
||||||
// o1 for instance doesnt support streaming, non-1 temp, or system prompt
|
// o1 for instance doesnt support streaming, non-1 temp, or system prompt
|
||||||
const systemMessage: OpenAI.Chat.ChatCompletionUserMessageParam = {
|
// deepseek reasoner supports system prompt
|
||||||
|
systemMessage = deepseekReasoner
|
||||||
|
? {
|
||||||
|
role: "system",
|
||||||
|
content: systemPrompt,
|
||||||
|
}
|
||||||
|
: {
|
||||||
role: "user",
|
role: "user",
|
||||||
content: systemPrompt,
|
content: systemPrompt,
|
||||||
}
|
}
|
||||||
|
|
||||||
const requestOptions: OpenAI.Chat.Completions.ChatCompletionCreateParamsNonStreaming = {
|
const requestOptions: OpenAI.Chat.Completions.ChatCompletionCreateParamsNonStreaming = {
|
||||||
model: modelId,
|
model: modelId,
|
||||||
messages: [systemMessage, ...convertToOpenAiMessages(messages)],
|
messages: [systemMessage, ...convertToOpenAiMessages(messages)],
|
||||||
@@ -106,7 +117,6 @@ export class OpenAiHandler implements ApiHandler, SingleCompletionHandler {
|
|||||||
const requestOptions: OpenAI.Chat.Completions.ChatCompletionCreateParamsNonStreaming = {
|
const requestOptions: OpenAI.Chat.Completions.ChatCompletionCreateParamsNonStreaming = {
|
||||||
model: this.getModel().id,
|
model: this.getModel().id,
|
||||||
messages: [{ role: "user", content: prompt }],
|
messages: [{ role: "user", content: prompt }],
|
||||||
temperature: 0,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const response = await this.client.chat.completions.create(requestOptions)
|
const response = await this.client.chat.completions.create(requestOptions)
|
||||||
|
|||||||
@@ -51,7 +51,6 @@ export interface ApiHandlerOptions {
|
|||||||
setAzureApiVersion?: boolean
|
setAzureApiVersion?: boolean
|
||||||
deepSeekBaseUrl?: string
|
deepSeekBaseUrl?: string
|
||||||
deepSeekApiKey?: string
|
deepSeekApiKey?: string
|
||||||
deepSeekModelId?: string
|
|
||||||
includeMaxTokens?: boolean
|
includeMaxTokens?: boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user