mirror of
https://github.com/pacnpal/Roo-Code.git
synced 2025-12-20 04:11:10 -05:00
Merge pull request #563 from Szpadel/r1-display-reason
Display reasoning for supported openrouter models
This commit is contained in:
@@ -10,6 +10,7 @@ import delay from "delay"
|
|||||||
// Add custom interface for OpenRouter params
|
// Add custom interface for OpenRouter params
|
||||||
type OpenRouterChatCompletionParams = OpenAI.Chat.ChatCompletionCreateParams & {
|
type OpenRouterChatCompletionParams = OpenAI.Chat.ChatCompletionCreateParams & {
|
||||||
transforms?: string[]
|
transforms?: string[]
|
||||||
|
include_reasoning?: boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add custom interface for OpenRouter usage chunk
|
// Add custom interface for OpenRouter usage chunk
|
||||||
@@ -110,14 +111,23 @@ export class OpenRouterHandler implements ApiHandler, SingleCompletionHandler {
|
|||||||
maxTokens = 8_192
|
maxTokens = 8_192
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let temperature = 0
|
||||||
|
switch (this.getModel().id) {
|
||||||
|
case "deepseek/deepseek-r1":
|
||||||
|
// Recommended temperature for DeepSeek reasoning models
|
||||||
|
temperature = 0.6
|
||||||
|
}
|
||||||
|
|
||||||
// https://openrouter.ai/docs/transforms
|
// https://openrouter.ai/docs/transforms
|
||||||
let fullResponseText = ""
|
let fullResponseText = ""
|
||||||
const stream = await this.client.chat.completions.create({
|
const stream = await this.client.chat.completions.create({
|
||||||
model: this.getModel().id,
|
model: this.getModel().id,
|
||||||
max_tokens: maxTokens,
|
max_tokens: maxTokens,
|
||||||
temperature: 0,
|
temperature: temperature,
|
||||||
messages: openAiMessages,
|
messages: openAiMessages,
|
||||||
stream: true,
|
stream: true,
|
||||||
|
include_reasoning: true,
|
||||||
// This way, the transforms field will only be included in the parameters when openRouterUseMiddleOutTransform is true.
|
// This way, the transforms field will only be included in the parameters when openRouterUseMiddleOutTransform is true.
|
||||||
...(this.options.openRouterUseMiddleOutTransform && { transforms: ["middle-out"] }),
|
...(this.options.openRouterUseMiddleOutTransform && { transforms: ["middle-out"] }),
|
||||||
} as OpenRouterChatCompletionParams)
|
} as OpenRouterChatCompletionParams)
|
||||||
@@ -137,6 +147,12 @@ export class OpenRouterHandler implements ApiHandler, SingleCompletionHandler {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const delta = chunk.choices[0]?.delta
|
const delta = chunk.choices[0]?.delta
|
||||||
|
if ("reasoning" in delta && delta.reasoning) {
|
||||||
|
yield {
|
||||||
|
type: "reasoning",
|
||||||
|
text: delta.reasoning,
|
||||||
|
} as ApiStreamChunk
|
||||||
|
}
|
||||||
if (delta?.content) {
|
if (delta?.content) {
|
||||||
fullResponseText += delta.content
|
fullResponseText += delta.content
|
||||||
yield {
|
yield {
|
||||||
|
|||||||
@@ -1,11 +1,16 @@
|
|||||||
export type ApiStream = AsyncGenerator<ApiStreamChunk>
|
export type ApiStream = AsyncGenerator<ApiStreamChunk>
|
||||||
export type ApiStreamChunk = ApiStreamTextChunk | ApiStreamUsageChunk
|
export type ApiStreamChunk = ApiStreamTextChunk | ApiStreamUsageChunk | ApiStreamReasoningChunk
|
||||||
|
|
||||||
export interface ApiStreamTextChunk {
|
export interface ApiStreamTextChunk {
|
||||||
type: "text"
|
type: "text"
|
||||||
text: string
|
text: string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface ApiStreamReasoningChunk {
|
||||||
|
type: "reasoning"
|
||||||
|
text: string
|
||||||
|
}
|
||||||
|
|
||||||
export interface ApiStreamUsageChunk {
|
export interface ApiStreamUsageChunk {
|
||||||
type: "usage"
|
type: "usage"
|
||||||
inputTokens: number
|
inputTokens: number
|
||||||
|
|||||||
@@ -2219,7 +2219,7 @@ export class Cline {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Seeing out of bounds is fine, it means that the next too call is being built up and ready to add to assistantMessageContent to present.
|
Seeing out of bounds is fine, it means that the next too call is being built up and ready to add to assistantMessageContent to present.
|
||||||
When you see the UI inactive during this, it means that a tool is breaking without presenting any UI. For example the write_to_file tool was breaking when relpath was undefined, and for invalid relpath it never presented UI.
|
When you see the UI inactive during this, it means that a tool is breaking without presenting any UI. For example the write_to_file tool was breaking when relpath was undefined, and for invalid relpath it never presented UI.
|
||||||
*/
|
*/
|
||||||
this.presentAssistantMessageLocked = false // this needs to be placed here, if not then calling this.presentAssistantMessage below would fail (sometimes) since it's locked
|
this.presentAssistantMessageLocked = false // this needs to be placed here, if not then calling this.presentAssistantMessage below would fail (sometimes) since it's locked
|
||||||
@@ -2391,9 +2391,14 @@ export class Cline {
|
|||||||
|
|
||||||
const stream = this.attemptApiRequest(previousApiReqIndex) // yields only if the first chunk is successful, otherwise will allow the user to retry the request (most likely due to rate limit error, which gets thrown on the first chunk)
|
const stream = this.attemptApiRequest(previousApiReqIndex) // yields only if the first chunk is successful, otherwise will allow the user to retry the request (most likely due to rate limit error, which gets thrown on the first chunk)
|
||||||
let assistantMessage = ""
|
let assistantMessage = ""
|
||||||
|
let reasoningMessage = ""
|
||||||
try {
|
try {
|
||||||
for await (const chunk of stream) {
|
for await (const chunk of stream) {
|
||||||
switch (chunk.type) {
|
switch (chunk.type) {
|
||||||
|
case "reasoning":
|
||||||
|
reasoningMessage += chunk.text
|
||||||
|
await this.say("reasoning", reasoningMessage, undefined, true)
|
||||||
|
break
|
||||||
case "usage":
|
case "usage":
|
||||||
inputTokens += chunk.inputTokens
|
inputTokens += chunk.inputTokens
|
||||||
outputTokens += chunk.outputTokens
|
outputTokens += chunk.outputTokens
|
||||||
|
|||||||
@@ -121,6 +121,7 @@ export interface ClineMessage {
|
|||||||
text?: string
|
text?: string
|
||||||
images?: string[]
|
images?: string[]
|
||||||
partial?: boolean
|
partial?: boolean
|
||||||
|
reasoning?: string
|
||||||
}
|
}
|
||||||
|
|
||||||
export type ClineAsk =
|
export type ClineAsk =
|
||||||
@@ -142,6 +143,7 @@ export type ClineSay =
|
|||||||
| "api_req_started"
|
| "api_req_started"
|
||||||
| "api_req_finished"
|
| "api_req_finished"
|
||||||
| "text"
|
| "text"
|
||||||
|
| "reasoning"
|
||||||
| "completion_result"
|
| "completion_result"
|
||||||
| "user_feedback"
|
| "user_feedback"
|
||||||
| "user_feedback_diff"
|
| "user_feedback_diff"
|
||||||
|
|||||||
@@ -15,6 +15,7 @@ import { vscode } from "../../utils/vscode"
|
|||||||
import CodeAccordian, { removeLeadingNonAlphanumeric } from "../common/CodeAccordian"
|
import CodeAccordian, { removeLeadingNonAlphanumeric } from "../common/CodeAccordian"
|
||||||
import CodeBlock, { CODE_BLOCK_BG_COLOR } from "../common/CodeBlock"
|
import CodeBlock, { CODE_BLOCK_BG_COLOR } from "../common/CodeBlock"
|
||||||
import MarkdownBlock from "../common/MarkdownBlock"
|
import MarkdownBlock from "../common/MarkdownBlock"
|
||||||
|
import ReasoningBlock from "./ReasoningBlock"
|
||||||
import Thumbnails from "../common/Thumbnails"
|
import Thumbnails from "../common/Thumbnails"
|
||||||
import McpResourceRow from "../mcp/McpResourceRow"
|
import McpResourceRow from "../mcp/McpResourceRow"
|
||||||
import McpToolRow from "../mcp/McpToolRow"
|
import McpToolRow from "../mcp/McpToolRow"
|
||||||
@@ -79,6 +80,14 @@ export const ChatRowContent = ({
|
|||||||
isStreaming,
|
isStreaming,
|
||||||
}: ChatRowContentProps) => {
|
}: ChatRowContentProps) => {
|
||||||
const { mcpServers, alwaysAllowMcp } = useExtensionState()
|
const { mcpServers, alwaysAllowMcp } = useExtensionState()
|
||||||
|
const [reasoningCollapsed, setReasoningCollapsed] = useState(false)
|
||||||
|
|
||||||
|
// Auto-collapse reasoning when new messages arrive
|
||||||
|
useEffect(() => {
|
||||||
|
if (!isLast && message.say === "reasoning") {
|
||||||
|
setReasoningCollapsed(true)
|
||||||
|
}
|
||||||
|
}, [isLast, message.say])
|
||||||
const [cost, apiReqCancelReason, apiReqStreamingFailedMessage] = useMemo(() => {
|
const [cost, apiReqCancelReason, apiReqStreamingFailedMessage] = useMemo(() => {
|
||||||
if (message.text != null && message.say === "api_req_started") {
|
if (message.text != null && message.say === "api_req_started") {
|
||||||
const info: ClineApiReqInfo = JSON.parse(message.text)
|
const info: ClineApiReqInfo = JSON.parse(message.text)
|
||||||
@@ -472,6 +481,14 @@ export const ChatRowContent = ({
|
|||||||
switch (message.type) {
|
switch (message.type) {
|
||||||
case "say":
|
case "say":
|
||||||
switch (message.say) {
|
switch (message.say) {
|
||||||
|
case "reasoning":
|
||||||
|
return (
|
||||||
|
<ReasoningBlock
|
||||||
|
content={message.text || ""}
|
||||||
|
isCollapsed={reasoningCollapsed}
|
||||||
|
onToggleCollapse={() => setReasoningCollapsed(!reasoningCollapsed)}
|
||||||
|
/>
|
||||||
|
)
|
||||||
case "api_req_started":
|
case "api_req_started":
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
|
|||||||
70
webview-ui/src/components/chat/ReasoningBlock.tsx
Normal file
70
webview-ui/src/components/chat/ReasoningBlock.tsx
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
import React, { useEffect, useRef } from "react"
|
||||||
|
import { CODE_BLOCK_BG_COLOR } from "../common/CodeBlock"
|
||||||
|
import MarkdownBlock from "../common/MarkdownBlock"
|
||||||
|
|
||||||
|
interface ReasoningBlockProps {
|
||||||
|
content: string
|
||||||
|
isCollapsed?: boolean
|
||||||
|
onToggleCollapse?: () => void
|
||||||
|
autoHeight?: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
const ReasoningBlock: React.FC<ReasoningBlockProps> = ({
|
||||||
|
content,
|
||||||
|
isCollapsed = false,
|
||||||
|
onToggleCollapse,
|
||||||
|
autoHeight = false,
|
||||||
|
}) => {
|
||||||
|
const contentRef = useRef<HTMLDivElement>(null)
|
||||||
|
|
||||||
|
// Scroll to bottom when content updates
|
||||||
|
useEffect(() => {
|
||||||
|
if (contentRef.current && !isCollapsed) {
|
||||||
|
contentRef.current.scrollTop = contentRef.current.scrollHeight
|
||||||
|
}
|
||||||
|
}, [content, isCollapsed])
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
style={{
|
||||||
|
backgroundColor: CODE_BLOCK_BG_COLOR,
|
||||||
|
border: "1px solid var(--vscode-editorGroup-border)",
|
||||||
|
borderRadius: "3px",
|
||||||
|
overflow: "hidden",
|
||||||
|
}}>
|
||||||
|
<div
|
||||||
|
onClick={onToggleCollapse}
|
||||||
|
style={{
|
||||||
|
padding: "8px 12px",
|
||||||
|
cursor: "pointer",
|
||||||
|
userSelect: "none",
|
||||||
|
display: "flex",
|
||||||
|
alignItems: "center",
|
||||||
|
justifyContent: "space-between",
|
||||||
|
borderBottom: isCollapsed ? "none" : "1px solid var(--vscode-editorGroup-border)",
|
||||||
|
}}>
|
||||||
|
<span style={{ fontWeight: "bold" }}>Reasoning</span>
|
||||||
|
<span className={`codicon codicon-chevron-${isCollapsed ? "right" : "down"}`}></span>
|
||||||
|
</div>
|
||||||
|
{!isCollapsed && (
|
||||||
|
<div
|
||||||
|
ref={contentRef}
|
||||||
|
style={{
|
||||||
|
padding: "8px 12px",
|
||||||
|
maxHeight: autoHeight ? "none" : "160px",
|
||||||
|
overflowY: "auto",
|
||||||
|
}}>
|
||||||
|
<div
|
||||||
|
style={{
|
||||||
|
fontSize: "13px",
|
||||||
|
opacity: 0.9,
|
||||||
|
}}>
|
||||||
|
<MarkdownBlock markdown={content} />
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export default ReasoningBlock
|
||||||
Reference in New Issue
Block a user