Add support for displaying reasoning for openrouter models

This commit is contained in:
Piotr Rogowski
2025-01-26 02:11:56 +01:00
parent 663747eb2d
commit c6607065b9
6 changed files with 109 additions and 2 deletions

View File

@@ -10,6 +10,7 @@ import delay from "delay"
// Add custom interface for OpenRouter params
type OpenRouterChatCompletionParams = OpenAI.Chat.ChatCompletionCreateParams & {
transforms?: string[]
include_reasoning?: boolean
}
// Add custom interface for OpenRouter usage chunk
@@ -126,6 +127,7 @@ export class OpenRouterHandler implements ApiHandler, SingleCompletionHandler {
temperature: temperature,
messages: openAiMessages,
stream: true,
include_reasoning: true,
// This way, the transforms field will only be included in the parameters when openRouterUseMiddleOutTransform is true.
...(this.options.openRouterUseMiddleOutTransform && { transforms: ["middle-out"] }),
} as OpenRouterChatCompletionParams)
@@ -145,6 +147,12 @@ export class OpenRouterHandler implements ApiHandler, SingleCompletionHandler {
}
const delta = chunk.choices[0]?.delta
if ("reasoning" in delta && delta.reasoning) {
yield {
type: "reasoning",
text: delta.reasoning,
} as ApiStreamChunk
}
if (delta?.content) {
fullResponseText += delta.content
yield {