feat: Implement retry logic for composite submissions

This commit is contained in:
gpt-engineer-app[bot]
2025-11-05 13:16:30 +00:00
parent 876119c079
commit 5e0640252c
3 changed files with 288 additions and 32 deletions

190
src/lib/retryHelpers.ts Normal file
View File

@@ -0,0 +1,190 @@
/**
* Retry utility with exponential backoff
* Handles transient failures gracefully with configurable retry logic
*/
import { logger } from './logger';
export interface RetryOptions {
/** Maximum number of attempts (default: 3) */
maxAttempts?: number;
/** Base delay in milliseconds (default: 1000) */
baseDelay?: number;
/** Maximum delay in milliseconds (default: 10000) */
maxDelay?: number;
/** Multiplier for exponential backoff (default: 2) */
backoffMultiplier?: number;
/** Add jitter to prevent thundering herd (default: true) */
jitter?: boolean;
/** Callback invoked before each retry attempt */
onRetry?: (attempt: number, error: unknown, delay: number) => void;
/** Custom function to determine if error is retryable (default: isRetryableError) */
shouldRetry?: (error: unknown) => boolean;
}
/**
* Determines if an error is transient and retryable
* @param error - The error to check
* @returns true if error is retryable, false otherwise
*/
export function isRetryableError(error: unknown): boolean {
// Network/timeout errors from fetch
if (error instanceof TypeError && error.message.includes('fetch')) {
return true;
}
// Network/timeout errors
if (error instanceof Error) {
const message = error.message.toLowerCase();
if (message.includes('network') ||
message.includes('timeout') ||
message.includes('connection') ||
message.includes('econnrefused') ||
message.includes('enotfound')) {
return true;
}
}
// Supabase/PostgreSQL errors
if (error && typeof error === 'object') {
const supabaseError = error as { code?: string; status?: number };
// Connection/timeout errors
if (supabaseError.code === 'PGRST301') return true; // Connection timeout
if (supabaseError.code === 'PGRST204') return true; // Temporary failure
if (supabaseError.code === 'PGRST000') return true; // Connection error
// HTTP status codes indicating transient failures
if (supabaseError.status === 429) return true; // Rate limit
if (supabaseError.status === 503) return true; // Service unavailable
if (supabaseError.status === 504) return true; // Gateway timeout
if (supabaseError.status && supabaseError.status >= 500 && supabaseError.status < 600) {
return true; // Server errors (5xx)
}
// Database-level transient errors
if (supabaseError.code === '40001') return true; // Serialization failure
if (supabaseError.code === '40P01') return true; // Deadlock detected
if (supabaseError.code === '57014') return true; // Query cancelled
if (supabaseError.code === '08000') return true; // Connection exception
if (supabaseError.code === '08003') return true; // Connection does not exist
if (supabaseError.code === '08006') return true; // Connection failure
if (supabaseError.code === '08001') return true; // Unable to connect
if (supabaseError.code === '08004') return true; // Server rejected connection
}
return false;
}
/**
* Calculates delay for next retry attempt using exponential backoff
* @param attempt - Current attempt number (0-indexed)
* @param options - Retry configuration
* @returns Delay in milliseconds
*/
function calculateBackoffDelay(attempt: number, options: Required<RetryOptions>): number {
const exponentialDelay = options.baseDelay * Math.pow(options.backoffMultiplier, attempt);
const cappedDelay = Math.min(exponentialDelay, options.maxDelay);
// Add jitter (randomness) to prevent thundering herd
if (options.jitter) {
const jitterAmount = cappedDelay * 0.3; // ±30% jitter
const jitterOffset = (Math.random() * 2 - 1) * jitterAmount;
return Math.max(0, cappedDelay + jitterOffset);
}
return cappedDelay;
}
/**
* Executes a function with retry logic and exponential backoff
*
* @param fn - Async function to execute
* @param options - Retry configuration options
* @returns Result of the function execution
* @throws Last error if all retry attempts fail
*
* @example
* ```typescript
* const result = await withRetry(
* async () => await supabase.rpc('my_function', { data }),
* {
* maxAttempts: 3,
* onRetry: (attempt, error, delay) => {
* toast.info(`Retrying... (${attempt}/3)`);
* }
* }
* );
* ```
*/
export async function withRetry<T>(
fn: () => Promise<T>,
options?: RetryOptions
): Promise<T> {
const config: Required<RetryOptions> = {
maxAttempts: options?.maxAttempts ?? 3,
baseDelay: options?.baseDelay ?? 1000,
maxDelay: options?.maxDelay ?? 10000,
backoffMultiplier: options?.backoffMultiplier ?? 2,
jitter: options?.jitter ?? true,
onRetry: options?.onRetry ?? (() => {}),
shouldRetry: options?.shouldRetry ?? isRetryableError,
};
let lastError: unknown;
for (let attempt = 0; attempt < config.maxAttempts; attempt++) {
try {
// Execute the function
const result = await fn();
// Log successful retry if not first attempt
if (attempt > 0) {
logger.info('Retry succeeded', {
attempt: attempt + 1,
totalAttempts: config.maxAttempts
});
}
return result;
} catch (error) {
lastError = error;
// Check if we should retry
const isLastAttempt = attempt === config.maxAttempts - 1;
const shouldRetry = config.shouldRetry(error);
if (isLastAttempt || !shouldRetry) {
// Log final failure
logger.error('Retry exhausted or non-retryable error', {
attempt: attempt + 1,
maxAttempts: config.maxAttempts,
isRetryable: shouldRetry,
error: error instanceof Error ? error.message : String(error)
});
throw error;
}
// Calculate delay for next attempt
const delay = calculateBackoffDelay(attempt, config);
// Log retry attempt
logger.warn('Retrying after error', {
attempt: attempt + 1,
maxAttempts: config.maxAttempts,
delay,
error: error instanceof Error ? error.message : String(error)
});
// Invoke callback
config.onRetry(attempt + 1, error, delay);
// Wait before retrying
await new Promise(resolve => setTimeout(resolve, delay));
}
}
// This should never be reached, but TypeScript requires it
throw lastError;
}