Merge pull request #662 from RooVetGit/use_exponential_retry_for_api_failures

Use an exponential delay for API retries
This commit is contained in:
Matt Rubens
2025-01-30 11:51:53 -05:00
committed by GitHub
3 changed files with 38 additions and 29 deletions

View File

@@ -0,0 +1,5 @@
---
"roo-cline": patch
---
Use an exponential backoff for API retries

View File

@@ -793,7 +793,7 @@ export class Cline {
} }
} }
async *attemptApiRequest(previousApiReqIndex: number): ApiStream { async *attemptApiRequest(previousApiReqIndex: number, retryAttempt: number = 0): ApiStream {
let mcpHub: McpHub | undefined let mcpHub: McpHub | undefined
const { mcpEnabled, alwaysApproveResubmit, requestDelaySeconds } = const { mcpEnabled, alwaysApproveResubmit, requestDelaySeconds } =
@@ -887,21 +887,29 @@ export class Cline {
// note that this api_req_failed ask is unique in that we only present this option if the api hasn't streamed any content yet (ie it fails on the first chunk due), as it would allow them to hit a retry button. However if the api failed mid-stream, it could be in any arbitrary state where some tools may have executed, so that error is handled differently and requires cancelling the task entirely. // note that this api_req_failed ask is unique in that we only present this option if the api hasn't streamed any content yet (ie it fails on the first chunk due), as it would allow them to hit a retry button. However if the api failed mid-stream, it could be in any arbitrary state where some tools may have executed, so that error is handled differently and requires cancelling the task entirely.
if (alwaysApproveResubmit) { if (alwaysApproveResubmit) {
const errorMsg = error.message ?? "Unknown error" const errorMsg = error.message ?? "Unknown error"
const requestDelay = requestDelaySeconds || 5 const baseDelay = requestDelaySeconds || 5
// Automatically retry with delay const exponentialDelay = Math.ceil(baseDelay * Math.pow(2, retryAttempt))
// Show countdown timer in error color
for (let i = requestDelay; i > 0; i--) { // Show countdown timer with exponential backoff
for (let i = exponentialDelay; i > 0; i--) {
await this.say( await this.say(
"api_req_retry_delayed", "api_req_retry_delayed",
`${errorMsg}\n\nRetrying in ${i} seconds...`, `${errorMsg}\n\nRetry attempt ${retryAttempt + 1}\nRetrying in ${i} seconds...`,
undefined, undefined,
true, true,
) )
await delay(1000) await delay(1000)
} }
await this.say("api_req_retry_delayed", `${errorMsg}\n\nRetrying now...`, undefined, false)
// delegate generator output from the recursive call await this.say(
yield* this.attemptApiRequest(previousApiReqIndex) "api_req_retry_delayed",
`${errorMsg}\n\nRetry attempt ${retryAttempt + 1}\nRetrying now...`,
undefined,
false,
)
// delegate generator output from the recursive call with incremented retry count
yield* this.attemptApiRequest(previousApiReqIndex, retryAttempt + 1)
return return
} else { } else {
const { response } = await this.ask( const { response } = await this.ask(

View File

@@ -730,25 +730,19 @@ describe("Cline", () => {
const iterator = cline.attemptApiRequest(0) const iterator = cline.attemptApiRequest(0)
await iterator.next() await iterator.next()
// Calculate expected delay for first retry
const baseDelay = 3 // from requestDelaySeconds
// Verify countdown messages // Verify countdown messages
expect(saySpy).toHaveBeenCalledWith( for (let i = baseDelay; i > 0; i--) {
"api_req_retry_delayed", expect(saySpy).toHaveBeenCalledWith(
expect.stringContaining("Retrying in 3 seconds"), "api_req_retry_delayed",
undefined, expect.stringContaining(`Retrying in ${i} seconds`),
true, undefined,
) true,
expect(saySpy).toHaveBeenCalledWith( )
"api_req_retry_delayed", }
expect.stringContaining("Retrying in 2 seconds"),
undefined,
true,
)
expect(saySpy).toHaveBeenCalledWith(
"api_req_retry_delayed",
expect.stringContaining("Retrying in 1 seconds"),
undefined,
true,
)
expect(saySpy).toHaveBeenCalledWith( expect(saySpy).toHaveBeenCalledWith(
"api_req_retry_delayed", "api_req_retry_delayed",
expect.stringContaining("Retrying now"), expect.stringContaining("Retrying now"),
@@ -757,12 +751,14 @@ describe("Cline", () => {
) )
// Verify delay was called correctly // Verify delay was called correctly
expect(mockDelay).toHaveBeenCalledTimes(3) expect(mockDelay).toHaveBeenCalledTimes(baseDelay)
expect(mockDelay).toHaveBeenCalledWith(1000) expect(mockDelay).toHaveBeenCalledWith(1000)
// Verify error message content // Verify error message content
const errorMessage = saySpy.mock.calls.find((call) => call[1]?.includes(mockError.message))?.[1] const errorMessage = saySpy.mock.calls.find((call) => call[1]?.includes(mockError.message))?.[1]
expect(errorMessage).toBe(`${mockError.message}\n\nRetrying in 3 seconds...`) expect(errorMessage).toBe(
`${mockError.message}\n\nRetry attempt 1\nRetrying in ${baseDelay} seconds...`,
)
}) })
describe("loadContext", () => { describe("loadContext", () => {