mirror of
https://github.com/pacnpal/Roo-Code.git
synced 2025-12-20 12:21:13 -05:00
Use an exponential delay for API retries
This commit is contained in:
5
.changeset/tame-walls-kiss.md
Normal file
5
.changeset/tame-walls-kiss.md
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
---
|
||||||
|
"roo-cline": patch
|
||||||
|
---
|
||||||
|
|
||||||
|
Use an exponential backoff for API retries
|
||||||
@@ -793,7 +793,7 @@ export class Cline {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async *attemptApiRequest(previousApiReqIndex: number): ApiStream {
|
async *attemptApiRequest(previousApiReqIndex: number, retryAttempt: number = 0): ApiStream {
|
||||||
let mcpHub: McpHub | undefined
|
let mcpHub: McpHub | undefined
|
||||||
|
|
||||||
const { mcpEnabled, alwaysApproveResubmit, requestDelaySeconds } =
|
const { mcpEnabled, alwaysApproveResubmit, requestDelaySeconds } =
|
||||||
@@ -887,21 +887,29 @@ export class Cline {
|
|||||||
// note that this api_req_failed ask is unique in that we only present this option if the api hasn't streamed any content yet (ie it fails on the first chunk due), as it would allow them to hit a retry button. However if the api failed mid-stream, it could be in any arbitrary state where some tools may have executed, so that error is handled differently and requires cancelling the task entirely.
|
// note that this api_req_failed ask is unique in that we only present this option if the api hasn't streamed any content yet (ie it fails on the first chunk due), as it would allow them to hit a retry button. However if the api failed mid-stream, it could be in any arbitrary state where some tools may have executed, so that error is handled differently and requires cancelling the task entirely.
|
||||||
if (alwaysApproveResubmit) {
|
if (alwaysApproveResubmit) {
|
||||||
const errorMsg = error.message ?? "Unknown error"
|
const errorMsg = error.message ?? "Unknown error"
|
||||||
const requestDelay = requestDelaySeconds || 5
|
const baseDelay = requestDelaySeconds || 5
|
||||||
// Automatically retry with delay
|
const exponentialDelay = Math.ceil(baseDelay * Math.pow(2, retryAttempt))
|
||||||
// Show countdown timer in error color
|
|
||||||
for (let i = requestDelay; i > 0; i--) {
|
// Show countdown timer with exponential backoff
|
||||||
|
for (let i = exponentialDelay; i > 0; i--) {
|
||||||
await this.say(
|
await this.say(
|
||||||
"api_req_retry_delayed",
|
"api_req_retry_delayed",
|
||||||
`${errorMsg}\n\nRetrying in ${i} seconds...`,
|
`${errorMsg}\n\nRetry attempt ${retryAttempt + 1}\nRetrying in ${i} seconds...`,
|
||||||
undefined,
|
undefined,
|
||||||
true,
|
true,
|
||||||
)
|
)
|
||||||
await delay(1000)
|
await delay(1000)
|
||||||
}
|
}
|
||||||
await this.say("api_req_retry_delayed", `${errorMsg}\n\nRetrying now...`, undefined, false)
|
|
||||||
// delegate generator output from the recursive call
|
await this.say(
|
||||||
yield* this.attemptApiRequest(previousApiReqIndex)
|
"api_req_retry_delayed",
|
||||||
|
`${errorMsg}\n\nRetry attempt ${retryAttempt + 1}\nRetrying now...`,
|
||||||
|
undefined,
|
||||||
|
false,
|
||||||
|
)
|
||||||
|
|
||||||
|
// delegate generator output from the recursive call with incremented retry count
|
||||||
|
yield* this.attemptApiRequest(previousApiReqIndex, retryAttempt + 1)
|
||||||
return
|
return
|
||||||
} else {
|
} else {
|
||||||
const { response } = await this.ask(
|
const { response } = await this.ask(
|
||||||
|
|||||||
@@ -730,25 +730,19 @@ describe("Cline", () => {
|
|||||||
const iterator = cline.attemptApiRequest(0)
|
const iterator = cline.attemptApiRequest(0)
|
||||||
await iterator.next()
|
await iterator.next()
|
||||||
|
|
||||||
|
// Calculate expected delay for first retry
|
||||||
|
const baseDelay = 3 // from requestDelaySeconds
|
||||||
|
|
||||||
// Verify countdown messages
|
// Verify countdown messages
|
||||||
|
for (let i = baseDelay; i > 0; i--) {
|
||||||
expect(saySpy).toHaveBeenCalledWith(
|
expect(saySpy).toHaveBeenCalledWith(
|
||||||
"api_req_retry_delayed",
|
"api_req_retry_delayed",
|
||||||
expect.stringContaining("Retrying in 3 seconds"),
|
expect.stringContaining(`Retrying in ${i} seconds`),
|
||||||
undefined,
|
|
||||||
true,
|
|
||||||
)
|
|
||||||
expect(saySpy).toHaveBeenCalledWith(
|
|
||||||
"api_req_retry_delayed",
|
|
||||||
expect.stringContaining("Retrying in 2 seconds"),
|
|
||||||
undefined,
|
|
||||||
true,
|
|
||||||
)
|
|
||||||
expect(saySpy).toHaveBeenCalledWith(
|
|
||||||
"api_req_retry_delayed",
|
|
||||||
expect.stringContaining("Retrying in 1 seconds"),
|
|
||||||
undefined,
|
undefined,
|
||||||
true,
|
true,
|
||||||
)
|
)
|
||||||
|
}
|
||||||
|
|
||||||
expect(saySpy).toHaveBeenCalledWith(
|
expect(saySpy).toHaveBeenCalledWith(
|
||||||
"api_req_retry_delayed",
|
"api_req_retry_delayed",
|
||||||
expect.stringContaining("Retrying now"),
|
expect.stringContaining("Retrying now"),
|
||||||
@@ -757,12 +751,14 @@ describe("Cline", () => {
|
|||||||
)
|
)
|
||||||
|
|
||||||
// Verify delay was called correctly
|
// Verify delay was called correctly
|
||||||
expect(mockDelay).toHaveBeenCalledTimes(3)
|
expect(mockDelay).toHaveBeenCalledTimes(baseDelay)
|
||||||
expect(mockDelay).toHaveBeenCalledWith(1000)
|
expect(mockDelay).toHaveBeenCalledWith(1000)
|
||||||
|
|
||||||
// Verify error message content
|
// Verify error message content
|
||||||
const errorMessage = saySpy.mock.calls.find((call) => call[1]?.includes(mockError.message))?.[1]
|
const errorMessage = saySpy.mock.calls.find((call) => call[1]?.includes(mockError.message))?.[1]
|
||||||
expect(errorMessage).toBe(`${mockError.message}\n\nRetrying in 3 seconds...`)
|
expect(errorMessage).toBe(
|
||||||
|
`${mockError.message}\n\nRetry attempt 1\nRetrying in ${baseDelay} seconds...`,
|
||||||
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("loadContext", () => {
|
describe("loadContext", () => {
|
||||||
|
|||||||
Reference in New Issue
Block a user