Add rate limit aware retries

Enhance retry logic to detect 429 rate limits, parse Retry-After headers, and apply smart backoff across all entity submissions. Adds rate-limit-aware backoff, preserves user feedback via UI events, and ensures retries respect server-provided guidance.
This commit is contained in:
gpt-engineer-app[bot]
2025-11-10 19:05:31 +00:00
parent 8ed5edbe24
commit 73e847015d
3 changed files with 381 additions and 35 deletions

View File

@@ -23,6 +23,97 @@ export interface RetryOptions {
shouldRetry?: (error: unknown) => boolean;
}
/**
* Extract Retry-After value from error headers
* @param error - The error object
* @returns Delay in milliseconds, or null if not found
*/
export function extractRetryAfter(error: unknown): number | null {
if (!error || typeof error !== 'object') return null;
// Check for Retry-After in error object
const errorWithHeaders = error as { headers?: Headers | Record<string, string>; retryAfter?: number | string };
// Direct retryAfter property
if (errorWithHeaders.retryAfter) {
const retryAfter = errorWithHeaders.retryAfter;
if (typeof retryAfter === 'number') {
return retryAfter * 1000; // Convert seconds to milliseconds
}
if (typeof retryAfter === 'string') {
// Try parsing as number first (delay-seconds)
const seconds = parseInt(retryAfter, 10);
if (!isNaN(seconds)) {
return seconds * 1000;
}
// Try parsing as HTTP-date
const date = new Date(retryAfter);
if (!isNaN(date.getTime())) {
const delay = date.getTime() - Date.now();
return Math.max(0, delay);
}
}
}
// Check headers object
if (errorWithHeaders.headers) {
let retryAfterValue: string | null = null;
if (errorWithHeaders.headers instanceof Headers) {
retryAfterValue = errorWithHeaders.headers.get('retry-after');
} else if (typeof errorWithHeaders.headers === 'object') {
// Check both lowercase and capitalized versions
retryAfterValue = errorWithHeaders.headers['retry-after']
|| errorWithHeaders.headers['Retry-After']
|| null;
}
if (retryAfterValue) {
// Try parsing as number first (delay-seconds)
const seconds = parseInt(retryAfterValue, 10);
if (!isNaN(seconds)) {
return seconds * 1000;
}
// Try parsing as HTTP-date
const date = new Date(retryAfterValue);
if (!isNaN(date.getTime())) {
const delay = date.getTime() - Date.now();
return Math.max(0, delay);
}
}
}
return null;
}
/**
* Check if error is a rate limit (429) error
* @param error - The error to check
* @returns true if error is a rate limit error
*/
export function isRateLimitError(error: unknown): boolean {
if (!error || typeof error !== 'object') return false;
const errorWithStatus = error as { status?: number; code?: string };
// HTTP 429 status
if (errorWithStatus.status === 429) return true;
// Check error message for rate limit indicators
if (error instanceof Error) {
const message = error.message.toLowerCase();
if (message.includes('rate limit') ||
message.includes('too many requests') ||
message.includes('quota exceeded')) {
return true;
}
}
return false;
}
/**
* Determines if an error is transient and retryable
* @param error - The error to check
@@ -56,7 +147,7 @@ export function isRetryableError(error: unknown): boolean {
if (supabaseError.code === 'PGRST000') return true; // Connection error
// HTTP status codes indicating transient failures
if (supabaseError.status === 429) return true; // Rate limit
if (supabaseError.status === 429) return true; // Rate limit - ALWAYS retry
if (supabaseError.status === 503) return true; // Service unavailable
if (supabaseError.status === 504) return true; // Gateway timeout
if (supabaseError.status && supabaseError.status >= 500 && supabaseError.status < 600) {
@@ -78,12 +169,46 @@ export function isRetryableError(error: unknown): boolean {
}
/**
* Calculates delay for next retry attempt using exponential backoff
* Calculates delay for next retry attempt using exponential backoff or Retry-After header
* @param attempt - Current attempt number (0-indexed)
* @param options - Retry configuration
* @param error - The error that triggered the retry (to check for Retry-After)
* @returns Delay in milliseconds
*/
function calculateBackoffDelay(attempt: number, options: Required<RetryOptions>): number {
function calculateBackoffDelay(
attempt: number,
options: Required<RetryOptions>,
error?: unknown
): number {
// Check for rate limit with Retry-After header
if (error && isRateLimitError(error)) {
const retryAfter = extractRetryAfter(error);
if (retryAfter !== null) {
// Respect the Retry-After header, but cap it at maxDelay
const cappedRetryAfter = Math.min(retryAfter, options.maxDelay);
logger.info('[Retry] Rate limit detected - respecting Retry-After header', {
retryAfterMs: retryAfter,
cappedMs: cappedRetryAfter,
attempt
});
return cappedRetryAfter;
}
// No Retry-After header but is rate limit - use aggressive backoff
const rateLimitDelay = options.baseDelay * Math.pow(options.backoffMultiplier, attempt + 1);
const cappedDelay = Math.min(rateLimitDelay, options.maxDelay);
logger.info('[Retry] Rate limit detected - using aggressive backoff', {
delayMs: cappedDelay,
attempt
});
return cappedDelay;
}
// Standard exponential backoff
const exponentialDelay = options.baseDelay * Math.pow(options.backoffMultiplier, attempt);
const cappedDelay = Math.min(exponentialDelay, options.maxDelay);
@@ -246,18 +371,23 @@ export async function withRetry<T>(
throw error;
}
// Calculate delay for next attempt
const delay = calculateBackoffDelay(attempt, config);
// Calculate delay for next attempt (respects Retry-After for rate limits)
const delay = calculateBackoffDelay(attempt, config, error);
// Log retry attempt with rate limit detection
const isRateLimit = isRateLimitError(error);
const retryAfter = isRateLimit ? extractRetryAfter(error) : null;
// Log retry attempt
logger.warn('Retrying after error', {
attempt: attempt + 1,
maxAttempts: config.maxAttempts,
delay,
isRateLimit,
retryAfterMs: retryAfter,
error: error instanceof Error ? error.message : String(error)
});
// Invoke callback
// Invoke callback with additional context
config.onRetry(attempt + 1, error, delay);
// Wait before retrying