Add support for displaying reasoning for openrouter models

This commit is contained in:
Piotr Rogowski
2025-01-26 02:11:56 +01:00
parent 663747eb2d
commit c6607065b9
6 changed files with 109 additions and 2 deletions

View File

@@ -10,6 +10,7 @@ import delay from "delay"
// Add custom interface for OpenRouter params
type OpenRouterChatCompletionParams = OpenAI.Chat.ChatCompletionCreateParams & {
transforms?: string[]
include_reasoning?: boolean
}
// Add custom interface for OpenRouter usage chunk
@@ -126,6 +127,7 @@ export class OpenRouterHandler implements ApiHandler, SingleCompletionHandler {
temperature: temperature,
messages: openAiMessages,
stream: true,
include_reasoning: true,
// This way, the transforms field will only be included in the parameters when openRouterUseMiddleOutTransform is true.
...(this.options.openRouterUseMiddleOutTransform && { transforms: ["middle-out"] }),
} as OpenRouterChatCompletionParams)
@@ -145,6 +147,12 @@ export class OpenRouterHandler implements ApiHandler, SingleCompletionHandler {
}
const delta = chunk.choices[0]?.delta
if ("reasoning" in delta && delta.reasoning) {
yield {
type: "reasoning",
text: delta.reasoning,
} as ApiStreamChunk
}
if (delta?.content) {
fullResponseText += delta.content
yield {

View File

@@ -1,11 +1,16 @@
export type ApiStream = AsyncGenerator<ApiStreamChunk>
export type ApiStreamChunk = ApiStreamTextChunk | ApiStreamUsageChunk
export type ApiStreamChunk = ApiStreamTextChunk | ApiStreamUsageChunk | ApiStreamReasoningChunk
export interface ApiStreamTextChunk {
type: "text"
text: string
}
export interface ApiStreamReasoningChunk {
type: "reasoning"
text: string
}
export interface ApiStreamUsageChunk {
type: "usage"
inputTokens: number

View File

@@ -2219,7 +2219,7 @@ export class Cline {
}
/*
Seeing out of bounds is fine, it means that the next too call is being built up and ready to add to assistantMessageContent to present.
Seeing out of bounds is fine, it means that the next too call is being built up and ready to add to assistantMessageContent to present.
When you see the UI inactive during this, it means that a tool is breaking without presenting any UI. For example the write_to_file tool was breaking when relpath was undefined, and for invalid relpath it never presented UI.
*/
this.presentAssistantMessageLocked = false // this needs to be placed here, if not then calling this.presentAssistantMessage below would fail (sometimes) since it's locked
@@ -2391,9 +2391,14 @@ export class Cline {
const stream = this.attemptApiRequest(previousApiReqIndex) // yields only if the first chunk is successful, otherwise will allow the user to retry the request (most likely due to rate limit error, which gets thrown on the first chunk)
let assistantMessage = ""
let reasoningMessage = ""
try {
for await (const chunk of stream) {
switch (chunk.type) {
case "reasoning":
reasoningMessage += chunk.text
await this.say("reasoning", reasoningMessage, undefined, true)
break
case "usage":
inputTokens += chunk.inputTokens
outputTokens += chunk.outputTokens

View File

@@ -121,6 +121,7 @@ export interface ClineMessage {
text?: string
images?: string[]
partial?: boolean
reasoning?: string
}
export type ClineAsk =
@@ -142,6 +143,7 @@ export type ClineSay =
| "api_req_started"
| "api_req_finished"
| "text"
| "reasoning"
| "completion_result"
| "user_feedback"
| "user_feedback_diff"