Merge pull request #662 from RooVetGit/use_exponential_retry_for_api_failures

Use an exponential delay for API retries
This commit is contained in:
Matt Rubens
2025-01-30 11:51:53 -05:00
committed by GitHub
3 changed files with 38 additions and 29 deletions

View File

@@ -0,0 +1,5 @@
---
"roo-cline": patch
---
Use an exponential backoff for API retries

View File

@@ -793,7 +793,7 @@ export class Cline {
}
}
async *attemptApiRequest(previousApiReqIndex: number): ApiStream {
async *attemptApiRequest(previousApiReqIndex: number, retryAttempt: number = 0): ApiStream {
let mcpHub: McpHub | undefined
const { mcpEnabled, alwaysApproveResubmit, requestDelaySeconds } =
@@ -887,21 +887,29 @@ export class Cline {
// note that this api_req_failed ask is unique in that we only present this option if the api hasn't streamed any content yet (ie it fails on the first chunk due), as it would allow them to hit a retry button. However if the api failed mid-stream, it could be in any arbitrary state where some tools may have executed, so that error is handled differently and requires cancelling the task entirely.
if (alwaysApproveResubmit) {
const errorMsg = error.message ?? "Unknown error"
const requestDelay = requestDelaySeconds || 5
// Automatically retry with delay
// Show countdown timer in error color
for (let i = requestDelay; i > 0; i--) {
const baseDelay = requestDelaySeconds || 5
const exponentialDelay = Math.ceil(baseDelay * Math.pow(2, retryAttempt))
// Show countdown timer with exponential backoff
for (let i = exponentialDelay; i > 0; i--) {
await this.say(
"api_req_retry_delayed",
`${errorMsg}\n\nRetrying in ${i} seconds...`,
`${errorMsg}\n\nRetry attempt ${retryAttempt + 1}\nRetrying in ${i} seconds...`,
undefined,
true,
)
await delay(1000)
}
await this.say("api_req_retry_delayed", `${errorMsg}\n\nRetrying now...`, undefined, false)
// delegate generator output from the recursive call
yield* this.attemptApiRequest(previousApiReqIndex)
await this.say(
"api_req_retry_delayed",
`${errorMsg}\n\nRetry attempt ${retryAttempt + 1}\nRetrying now...`,
undefined,
false,
)
// delegate generator output from the recursive call with incremented retry count
yield* this.attemptApiRequest(previousApiReqIndex, retryAttempt + 1)
return
} else {
const { response } = await this.ask(

View File

@@ -730,25 +730,19 @@ describe("Cline", () => {
const iterator = cline.attemptApiRequest(0)
await iterator.next()
// Calculate expected delay for first retry
const baseDelay = 3 // from requestDelaySeconds
// Verify countdown messages
expect(saySpy).toHaveBeenCalledWith(
"api_req_retry_delayed",
expect.stringContaining("Retrying in 3 seconds"),
undefined,
true,
)
expect(saySpy).toHaveBeenCalledWith(
"api_req_retry_delayed",
expect.stringContaining("Retrying in 2 seconds"),
undefined,
true,
)
expect(saySpy).toHaveBeenCalledWith(
"api_req_retry_delayed",
expect.stringContaining("Retrying in 1 seconds"),
undefined,
true,
)
for (let i = baseDelay; i > 0; i--) {
expect(saySpy).toHaveBeenCalledWith(
"api_req_retry_delayed",
expect.stringContaining(`Retrying in ${i} seconds`),
undefined,
true,
)
}
expect(saySpy).toHaveBeenCalledWith(
"api_req_retry_delayed",
expect.stringContaining("Retrying now"),
@@ -757,12 +751,14 @@ describe("Cline", () => {
)
// Verify delay was called correctly
expect(mockDelay).toHaveBeenCalledTimes(3)
expect(mockDelay).toHaveBeenCalledTimes(baseDelay)
expect(mockDelay).toHaveBeenCalledWith(1000)
// Verify error message content
const errorMessage = saySpy.mock.calls.find((call) => call[1]?.includes(mockError.message))?.[1]
expect(errorMessage).toBe(`${mockError.message}\n\nRetrying in 3 seconds...`)
expect(errorMessage).toBe(
`${mockError.message}\n\nRetry attempt 1\nRetrying in ${baseDelay} seconds...`,
)
})
describe("loadContext", () => {