mirror of
https://github.com/pacnpal/thrilltrack-explorer.git
synced 2025-12-22 05:51:12 -05:00
Enhance retry logic to detect 429 rate limits, parse Retry-After headers, and apply smart backoff across all entity submissions. Adds rate-limit-aware backoff, preserves user feedback via UI events, and ensures retries respect server-provided guidance.
401 lines
13 KiB
TypeScript
401 lines
13 KiB
TypeScript
/**
|
|
* Retry utility with exponential backoff
|
|
* Handles transient failures gracefully with configurable retry logic
|
|
*/
|
|
|
|
import { logger } from './logger';
|
|
import { supabase } from './supabaseClient';
|
|
|
|
export interface RetryOptions {
|
|
/** Maximum number of attempts (default: 3) */
|
|
maxAttempts?: number;
|
|
/** Base delay in milliseconds (default: 1000) */
|
|
baseDelay?: number;
|
|
/** Maximum delay in milliseconds (default: 10000) */
|
|
maxDelay?: number;
|
|
/** Multiplier for exponential backoff (default: 2) */
|
|
backoffMultiplier?: number;
|
|
/** Add jitter to prevent thundering herd (default: true) */
|
|
jitter?: boolean;
|
|
/** Callback invoked before each retry attempt */
|
|
onRetry?: (attempt: number, error: unknown, delay: number) => void;
|
|
/** Custom function to determine if error is retryable (default: isRetryableError) */
|
|
shouldRetry?: (error: unknown) => boolean;
|
|
}
|
|
|
|
/**
|
|
* Extract Retry-After value from error headers
|
|
* @param error - The error object
|
|
* @returns Delay in milliseconds, or null if not found
|
|
*/
|
|
export function extractRetryAfter(error: unknown): number | null {
|
|
if (!error || typeof error !== 'object') return null;
|
|
|
|
// Check for Retry-After in error object
|
|
const errorWithHeaders = error as { headers?: Headers | Record<string, string>; retryAfter?: number | string };
|
|
|
|
// Direct retryAfter property
|
|
if (errorWithHeaders.retryAfter) {
|
|
const retryAfter = errorWithHeaders.retryAfter;
|
|
if (typeof retryAfter === 'number') {
|
|
return retryAfter * 1000; // Convert seconds to milliseconds
|
|
}
|
|
if (typeof retryAfter === 'string') {
|
|
// Try parsing as number first (delay-seconds)
|
|
const seconds = parseInt(retryAfter, 10);
|
|
if (!isNaN(seconds)) {
|
|
return seconds * 1000;
|
|
}
|
|
|
|
// Try parsing as HTTP-date
|
|
const date = new Date(retryAfter);
|
|
if (!isNaN(date.getTime())) {
|
|
const delay = date.getTime() - Date.now();
|
|
return Math.max(0, delay);
|
|
}
|
|
}
|
|
}
|
|
|
|
// Check headers object
|
|
if (errorWithHeaders.headers) {
|
|
let retryAfterValue: string | null = null;
|
|
|
|
if (errorWithHeaders.headers instanceof Headers) {
|
|
retryAfterValue = errorWithHeaders.headers.get('retry-after');
|
|
} else if (typeof errorWithHeaders.headers === 'object') {
|
|
// Check both lowercase and capitalized versions
|
|
retryAfterValue = errorWithHeaders.headers['retry-after']
|
|
|| errorWithHeaders.headers['Retry-After']
|
|
|| null;
|
|
}
|
|
|
|
if (retryAfterValue) {
|
|
// Try parsing as number first (delay-seconds)
|
|
const seconds = parseInt(retryAfterValue, 10);
|
|
if (!isNaN(seconds)) {
|
|
return seconds * 1000;
|
|
}
|
|
|
|
// Try parsing as HTTP-date
|
|
const date = new Date(retryAfterValue);
|
|
if (!isNaN(date.getTime())) {
|
|
const delay = date.getTime() - Date.now();
|
|
return Math.max(0, delay);
|
|
}
|
|
}
|
|
}
|
|
|
|
return null;
|
|
}
|
|
|
|
/**
|
|
* Check if error is a rate limit (429) error
|
|
* @param error - The error to check
|
|
* @returns true if error is a rate limit error
|
|
*/
|
|
export function isRateLimitError(error: unknown): boolean {
|
|
if (!error || typeof error !== 'object') return false;
|
|
|
|
const errorWithStatus = error as { status?: number; code?: string };
|
|
|
|
// HTTP 429 status
|
|
if (errorWithStatus.status === 429) return true;
|
|
|
|
// Check error message for rate limit indicators
|
|
if (error instanceof Error) {
|
|
const message = error.message.toLowerCase();
|
|
if (message.includes('rate limit') ||
|
|
message.includes('too many requests') ||
|
|
message.includes('quota exceeded')) {
|
|
return true;
|
|
}
|
|
}
|
|
|
|
return false;
|
|
}
|
|
|
|
/**
|
|
* Determines if an error is transient and retryable
|
|
* @param error - The error to check
|
|
* @returns true if error is retryable, false otherwise
|
|
*/
|
|
export function isRetryableError(error: unknown): boolean {
|
|
// Network/timeout errors from fetch
|
|
if (error instanceof TypeError && error.message.includes('fetch')) {
|
|
return true;
|
|
}
|
|
|
|
// Network/timeout errors
|
|
if (error instanceof Error) {
|
|
const message = error.message.toLowerCase();
|
|
if (message.includes('network') ||
|
|
message.includes('timeout') ||
|
|
message.includes('connection') ||
|
|
message.includes('econnrefused') ||
|
|
message.includes('enotfound')) {
|
|
return true;
|
|
}
|
|
}
|
|
|
|
// Supabase/PostgreSQL errors
|
|
if (error && typeof error === 'object') {
|
|
const supabaseError = error as { code?: string; status?: number };
|
|
|
|
// Connection/timeout errors
|
|
if (supabaseError.code === 'PGRST301') return true; // Connection timeout
|
|
if (supabaseError.code === 'PGRST204') return true; // Temporary failure
|
|
if (supabaseError.code === 'PGRST000') return true; // Connection error
|
|
|
|
// HTTP status codes indicating transient failures
|
|
if (supabaseError.status === 429) return true; // Rate limit - ALWAYS retry
|
|
if (supabaseError.status === 503) return true; // Service unavailable
|
|
if (supabaseError.status === 504) return true; // Gateway timeout
|
|
if (supabaseError.status && supabaseError.status >= 500 && supabaseError.status < 600) {
|
|
return true; // Server errors (5xx)
|
|
}
|
|
|
|
// Database-level transient errors
|
|
if (supabaseError.code === '40001') return true; // Serialization failure
|
|
if (supabaseError.code === '40P01') return true; // Deadlock detected
|
|
if (supabaseError.code === '57014') return true; // Query cancelled
|
|
if (supabaseError.code === '08000') return true; // Connection exception
|
|
if (supabaseError.code === '08003') return true; // Connection does not exist
|
|
if (supabaseError.code === '08006') return true; // Connection failure
|
|
if (supabaseError.code === '08001') return true; // Unable to connect
|
|
if (supabaseError.code === '08004') return true; // Server rejected connection
|
|
}
|
|
|
|
return false;
|
|
}
|
|
|
|
/**
|
|
* Calculates delay for next retry attempt using exponential backoff or Retry-After header
|
|
* @param attempt - Current attempt number (0-indexed)
|
|
* @param options - Retry configuration
|
|
* @param error - The error that triggered the retry (to check for Retry-After)
|
|
* @returns Delay in milliseconds
|
|
*/
|
|
function calculateBackoffDelay(
|
|
attempt: number,
|
|
options: Required<RetryOptions>,
|
|
error?: unknown
|
|
): number {
|
|
// Check for rate limit with Retry-After header
|
|
if (error && isRateLimitError(error)) {
|
|
const retryAfter = extractRetryAfter(error);
|
|
if (retryAfter !== null) {
|
|
// Respect the Retry-After header, but cap it at maxDelay
|
|
const cappedRetryAfter = Math.min(retryAfter, options.maxDelay);
|
|
|
|
logger.info('[Retry] Rate limit detected - respecting Retry-After header', {
|
|
retryAfterMs: retryAfter,
|
|
cappedMs: cappedRetryAfter,
|
|
attempt
|
|
});
|
|
|
|
return cappedRetryAfter;
|
|
}
|
|
|
|
// No Retry-After header but is rate limit - use aggressive backoff
|
|
const rateLimitDelay = options.baseDelay * Math.pow(options.backoffMultiplier, attempt + 1);
|
|
const cappedDelay = Math.min(rateLimitDelay, options.maxDelay);
|
|
|
|
logger.info('[Retry] Rate limit detected - using aggressive backoff', {
|
|
delayMs: cappedDelay,
|
|
attempt
|
|
});
|
|
|
|
return cappedDelay;
|
|
}
|
|
|
|
// Standard exponential backoff
|
|
const exponentialDelay = options.baseDelay * Math.pow(options.backoffMultiplier, attempt);
|
|
const cappedDelay = Math.min(exponentialDelay, options.maxDelay);
|
|
|
|
// Add jitter (randomness) to prevent thundering herd
|
|
if (options.jitter) {
|
|
const jitterAmount = cappedDelay * 0.3; // ±30% jitter
|
|
const jitterOffset = (Math.random() * 2 - 1) * jitterAmount;
|
|
return Math.max(0, cappedDelay + jitterOffset);
|
|
}
|
|
|
|
return cappedDelay;
|
|
}
|
|
|
|
/**
|
|
* Load retry configuration from admin settings
|
|
*/
|
|
export async function loadRetryConfig(): Promise<Required<RetryOptions>> {
|
|
try {
|
|
const { data: settings } = await supabase
|
|
.from('admin_settings')
|
|
.select('setting_key, setting_value')
|
|
.in('setting_key', [
|
|
'retry.max_attempts',
|
|
'retry.base_delay',
|
|
'retry.max_delay',
|
|
'retry.backoff_multiplier'
|
|
]);
|
|
|
|
if (!settings || settings.length === 0) {
|
|
return getDefaultRetryConfig();
|
|
}
|
|
|
|
const config: any = {};
|
|
settings.forEach(s => {
|
|
const key = s.setting_key.replace('retry.', '');
|
|
const camelKey = key.replace(/_([a-z])/g, (g) => g[1].toUpperCase());
|
|
|
|
if (key === 'backoff_multiplier') {
|
|
config[camelKey] = parseFloat(String(s.setting_value));
|
|
} else {
|
|
config[camelKey] = parseInt(String(s.setting_value));
|
|
}
|
|
});
|
|
|
|
return {
|
|
maxAttempts: config.maxAttempts ?? 3,
|
|
baseDelay: config.baseDelay ?? 1000,
|
|
maxDelay: config.maxDelay ?? 10000,
|
|
backoffMultiplier: config.backoffMultiplier ?? 2,
|
|
jitter: true,
|
|
onRetry: () => {},
|
|
shouldRetry: isRetryableError
|
|
};
|
|
} catch (error) {
|
|
logger.error('Failed to load retry config', { error });
|
|
return getDefaultRetryConfig();
|
|
}
|
|
}
|
|
|
|
function getDefaultRetryConfig(): Required<RetryOptions> {
|
|
return {
|
|
maxAttempts: 3,
|
|
baseDelay: 1000,
|
|
maxDelay: 10000,
|
|
backoffMultiplier: 2,
|
|
jitter: true,
|
|
onRetry: () => {},
|
|
shouldRetry: isRetryableError
|
|
};
|
|
}
|
|
|
|
// Cache admin config for 5 minutes
|
|
let cachedRetryConfig: Required<RetryOptions> | null = null;
|
|
let configCacheTime: number = 0;
|
|
const CONFIG_CACHE_TTL = 5 * 60 * 1000; // 5 minutes
|
|
|
|
async function getCachedRetryConfig(): Promise<Required<RetryOptions>> {
|
|
const now = Date.now();
|
|
|
|
if (cachedRetryConfig && (now - configCacheTime < CONFIG_CACHE_TTL)) {
|
|
return cachedRetryConfig;
|
|
}
|
|
|
|
cachedRetryConfig = await loadRetryConfig();
|
|
configCacheTime = now;
|
|
return cachedRetryConfig;
|
|
}
|
|
|
|
/**
|
|
* Executes a function with retry logic and exponential backoff
|
|
*
|
|
* @param fn - Async function to execute
|
|
* @param options - Retry configuration options
|
|
* @returns Result of the function execution
|
|
* @throws Last error if all retry attempts fail
|
|
*
|
|
* @example
|
|
* ```typescript
|
|
* const result = await withRetry(
|
|
* async () => await supabase.rpc('my_function', { data }),
|
|
* {
|
|
* maxAttempts: 3,
|
|
* onRetry: (attempt, error, delay) => {
|
|
* toast.info(`Retrying... (${attempt}/3)`);
|
|
* }
|
|
* }
|
|
* );
|
|
* ```
|
|
*/
|
|
export async function withRetry<T>(
|
|
fn: () => Promise<T>,
|
|
options?: RetryOptions
|
|
): Promise<T> {
|
|
// Load config from admin settings
|
|
const adminConfig = await getCachedRetryConfig();
|
|
|
|
// Merge: options override admin settings
|
|
const config: Required<RetryOptions> = {
|
|
maxAttempts: options?.maxAttempts ?? adminConfig.maxAttempts,
|
|
baseDelay: options?.baseDelay ?? adminConfig.baseDelay,
|
|
maxDelay: options?.maxDelay ?? adminConfig.maxDelay,
|
|
backoffMultiplier: options?.backoffMultiplier ?? adminConfig.backoffMultiplier,
|
|
jitter: options?.jitter ?? adminConfig.jitter,
|
|
onRetry: options?.onRetry ?? adminConfig.onRetry,
|
|
shouldRetry: options?.shouldRetry ?? adminConfig.shouldRetry,
|
|
};
|
|
|
|
let lastError: unknown;
|
|
|
|
for (let attempt = 0; attempt < config.maxAttempts; attempt++) {
|
|
try {
|
|
// Execute the function directly
|
|
const result = await fn();
|
|
|
|
// Log successful retry if not first attempt
|
|
if (attempt > 0) {
|
|
logger.info('Retry succeeded', {
|
|
attempt: attempt + 1,
|
|
totalAttempts: config.maxAttempts
|
|
});
|
|
}
|
|
|
|
return result;
|
|
} catch (error) {
|
|
lastError = error;
|
|
|
|
// Check if we should retry
|
|
const isLastAttempt = attempt === config.maxAttempts - 1;
|
|
const shouldRetry = config.shouldRetry(error);
|
|
|
|
if (isLastAttempt || !shouldRetry) {
|
|
// Log final failure
|
|
logger.error('Retry exhausted or non-retryable error', {
|
|
attempt: attempt + 1,
|
|
maxAttempts: config.maxAttempts,
|
|
isRetryable: shouldRetry,
|
|
error: error instanceof Error ? error.message : String(error)
|
|
});
|
|
|
|
throw error;
|
|
}
|
|
|
|
// Calculate delay for next attempt (respects Retry-After for rate limits)
|
|
const delay = calculateBackoffDelay(attempt, config, error);
|
|
|
|
// Log retry attempt with rate limit detection
|
|
const isRateLimit = isRateLimitError(error);
|
|
const retryAfter = isRateLimit ? extractRetryAfter(error) : null;
|
|
|
|
logger.warn('Retrying after error', {
|
|
attempt: attempt + 1,
|
|
maxAttempts: config.maxAttempts,
|
|
delay,
|
|
isRateLimit,
|
|
retryAfterMs: retryAfter,
|
|
error: error instanceof Error ? error.message : String(error)
|
|
});
|
|
|
|
// Invoke callback with additional context
|
|
config.onRetry(attempt + 1, error, delay);
|
|
|
|
// Wait before retrying
|
|
await new Promise(resolve => setTimeout(resolve, delay));
|
|
}
|
|
}
|
|
|
|
// This should never be reached, but TypeScript requires it
|
|
throw lastError;
|
|
}
|