Add support for perplexity/sonar-reasoning

This commit is contained in:
Piotr Rogowski
2025-01-29 09:54:19 +01:00
parent 4026a87d2c
commit 1534a9c047

View File

@@ -114,11 +114,21 @@ export class OpenRouterHandler implements ApiHandler, SingleCompletionHandler {
} }
let temperature = 0 let temperature = 0
if (this.getModel().id === "deepseek/deepseek-r1" || this.getModel().id.startsWith("deepseek/deepseek-r1:")) { let topP: number | undefined = undefined
// Handle models based on deepseek-r1
if (
this.getModel().id === "deepseek/deepseek-r1" ||
this.getModel().id.startsWith("deepseek/deepseek-r1:") ||
this.getModel().id === "perplexity/sonar-reasoning"
) {
// Recommended temperature for DeepSeek reasoning models // Recommended temperature for DeepSeek reasoning models
temperature = 0.6 temperature = 0.6
// DeepSeek highly recommends using user instead of system role // DeepSeek highly recommends using user instead of system
// role
openAiMessages = convertToR1Format([{ role: "user", content: systemPrompt }, ...messages]) openAiMessages = convertToR1Format([{ role: "user", content: systemPrompt }, ...messages])
// Some provider support topP and 0.95 is value that Deepseek used in their benchmarks
topP = 0.95
} }
// https://openrouter.ai/docs/transforms // https://openrouter.ai/docs/transforms
@@ -127,6 +137,7 @@ export class OpenRouterHandler implements ApiHandler, SingleCompletionHandler {
model: this.getModel().id, model: this.getModel().id,
max_tokens: maxTokens, max_tokens: maxTokens,
temperature: temperature, temperature: temperature,
top_p: topP,
messages: openAiMessages, messages: openAiMessages,
stream: true, stream: true,
include_reasoning: true, include_reasoning: true,