mirror of
https://github.com/pacnpal/thrilltrack-explorer.git
synced 2025-12-21 20:11:14 -05:00
Refactor code structure and remove redundant changes
This commit is contained in:
270
src-old/lib/retryHelpers.ts
Normal file
270
src-old/lib/retryHelpers.ts
Normal file
@@ -0,0 +1,270 @@
|
||||
/**
|
||||
* Retry utility with exponential backoff
|
||||
* Handles transient failures gracefully with configurable retry logic
|
||||
*/
|
||||
|
||||
import { logger } from './logger';
|
||||
import { supabase } from './supabaseClient';
|
||||
|
||||
export interface RetryOptions {
|
||||
/** Maximum number of attempts (default: 3) */
|
||||
maxAttempts?: number;
|
||||
/** Base delay in milliseconds (default: 1000) */
|
||||
baseDelay?: number;
|
||||
/** Maximum delay in milliseconds (default: 10000) */
|
||||
maxDelay?: number;
|
||||
/** Multiplier for exponential backoff (default: 2) */
|
||||
backoffMultiplier?: number;
|
||||
/** Add jitter to prevent thundering herd (default: true) */
|
||||
jitter?: boolean;
|
||||
/** Callback invoked before each retry attempt */
|
||||
onRetry?: (attempt: number, error: unknown, delay: number) => void;
|
||||
/** Custom function to determine if error is retryable (default: isRetryableError) */
|
||||
shouldRetry?: (error: unknown) => boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if an error is transient and retryable
|
||||
* @param error - The error to check
|
||||
* @returns true if error is retryable, false otherwise
|
||||
*/
|
||||
export function isRetryableError(error: unknown): boolean {
|
||||
// Network/timeout errors from fetch
|
||||
if (error instanceof TypeError && error.message.includes('fetch')) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Network/timeout errors
|
||||
if (error instanceof Error) {
|
||||
const message = error.message.toLowerCase();
|
||||
if (message.includes('network') ||
|
||||
message.includes('timeout') ||
|
||||
message.includes('connection') ||
|
||||
message.includes('econnrefused') ||
|
||||
message.includes('enotfound')) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// Supabase/PostgreSQL errors
|
||||
if (error && typeof error === 'object') {
|
||||
const supabaseError = error as { code?: string; status?: number };
|
||||
|
||||
// Connection/timeout errors
|
||||
if (supabaseError.code === 'PGRST301') return true; // Connection timeout
|
||||
if (supabaseError.code === 'PGRST204') return true; // Temporary failure
|
||||
if (supabaseError.code === 'PGRST000') return true; // Connection error
|
||||
|
||||
// HTTP status codes indicating transient failures
|
||||
if (supabaseError.status === 429) return true; // Rate limit
|
||||
if (supabaseError.status === 503) return true; // Service unavailable
|
||||
if (supabaseError.status === 504) return true; // Gateway timeout
|
||||
if (supabaseError.status && supabaseError.status >= 500 && supabaseError.status < 600) {
|
||||
return true; // Server errors (5xx)
|
||||
}
|
||||
|
||||
// Database-level transient errors
|
||||
if (supabaseError.code === '40001') return true; // Serialization failure
|
||||
if (supabaseError.code === '40P01') return true; // Deadlock detected
|
||||
if (supabaseError.code === '57014') return true; // Query cancelled
|
||||
if (supabaseError.code === '08000') return true; // Connection exception
|
||||
if (supabaseError.code === '08003') return true; // Connection does not exist
|
||||
if (supabaseError.code === '08006') return true; // Connection failure
|
||||
if (supabaseError.code === '08001') return true; // Unable to connect
|
||||
if (supabaseError.code === '08004') return true; // Server rejected connection
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates delay for next retry attempt using exponential backoff
|
||||
* @param attempt - Current attempt number (0-indexed)
|
||||
* @param options - Retry configuration
|
||||
* @returns Delay in milliseconds
|
||||
*/
|
||||
function calculateBackoffDelay(attempt: number, options: Required<RetryOptions>): number {
|
||||
const exponentialDelay = options.baseDelay * Math.pow(options.backoffMultiplier, attempt);
|
||||
const cappedDelay = Math.min(exponentialDelay, options.maxDelay);
|
||||
|
||||
// Add jitter (randomness) to prevent thundering herd
|
||||
if (options.jitter) {
|
||||
const jitterAmount = cappedDelay * 0.3; // ±30% jitter
|
||||
const jitterOffset = (Math.random() * 2 - 1) * jitterAmount;
|
||||
return Math.max(0, cappedDelay + jitterOffset);
|
||||
}
|
||||
|
||||
return cappedDelay;
|
||||
}
|
||||
|
||||
/**
|
||||
* Load retry configuration from admin settings
|
||||
*/
|
||||
export async function loadRetryConfig(): Promise<Required<RetryOptions>> {
|
||||
try {
|
||||
const { data: settings } = await supabase
|
||||
.from('admin_settings')
|
||||
.select('setting_key, setting_value')
|
||||
.in('setting_key', [
|
||||
'retry.max_attempts',
|
||||
'retry.base_delay',
|
||||
'retry.max_delay',
|
||||
'retry.backoff_multiplier'
|
||||
]);
|
||||
|
||||
if (!settings || settings.length === 0) {
|
||||
return getDefaultRetryConfig();
|
||||
}
|
||||
|
||||
const config: any = {};
|
||||
settings.forEach(s => {
|
||||
const key = s.setting_key.replace('retry.', '');
|
||||
const camelKey = key.replace(/_([a-z])/g, (g) => g[1].toUpperCase());
|
||||
|
||||
if (key === 'backoff_multiplier') {
|
||||
config[camelKey] = parseFloat(String(s.setting_value));
|
||||
} else {
|
||||
config[camelKey] = parseInt(String(s.setting_value));
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
maxAttempts: config.maxAttempts ?? 3,
|
||||
baseDelay: config.baseDelay ?? 1000,
|
||||
maxDelay: config.maxDelay ?? 10000,
|
||||
backoffMultiplier: config.backoffMultiplier ?? 2,
|
||||
jitter: true,
|
||||
onRetry: () => {},
|
||||
shouldRetry: isRetryableError
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error('Failed to load retry config', { error });
|
||||
return getDefaultRetryConfig();
|
||||
}
|
||||
}
|
||||
|
||||
function getDefaultRetryConfig(): Required<RetryOptions> {
|
||||
return {
|
||||
maxAttempts: 3,
|
||||
baseDelay: 1000,
|
||||
maxDelay: 10000,
|
||||
backoffMultiplier: 2,
|
||||
jitter: true,
|
||||
onRetry: () => {},
|
||||
shouldRetry: isRetryableError
|
||||
};
|
||||
}
|
||||
|
||||
// Cache admin config for 5 minutes
|
||||
let cachedRetryConfig: Required<RetryOptions> | null = null;
|
||||
let configCacheTime: number = 0;
|
||||
const CONFIG_CACHE_TTL = 5 * 60 * 1000; // 5 minutes
|
||||
|
||||
async function getCachedRetryConfig(): Promise<Required<RetryOptions>> {
|
||||
const now = Date.now();
|
||||
|
||||
if (cachedRetryConfig && (now - configCacheTime < CONFIG_CACHE_TTL)) {
|
||||
return cachedRetryConfig;
|
||||
}
|
||||
|
||||
cachedRetryConfig = await loadRetryConfig();
|
||||
configCacheTime = now;
|
||||
return cachedRetryConfig;
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes a function with retry logic and exponential backoff
|
||||
*
|
||||
* @param fn - Async function to execute
|
||||
* @param options - Retry configuration options
|
||||
* @returns Result of the function execution
|
||||
* @throws Last error if all retry attempts fail
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const result = await withRetry(
|
||||
* async () => await supabase.rpc('my_function', { data }),
|
||||
* {
|
||||
* maxAttempts: 3,
|
||||
* onRetry: (attempt, error, delay) => {
|
||||
* toast.info(`Retrying... (${attempt}/3)`);
|
||||
* }
|
||||
* }
|
||||
* );
|
||||
* ```
|
||||
*/
|
||||
export async function withRetry<T>(
|
||||
fn: () => Promise<T>,
|
||||
options?: RetryOptions
|
||||
): Promise<T> {
|
||||
// Load config from admin settings
|
||||
const adminConfig = await getCachedRetryConfig();
|
||||
|
||||
// Merge: options override admin settings
|
||||
const config: Required<RetryOptions> = {
|
||||
maxAttempts: options?.maxAttempts ?? adminConfig.maxAttempts,
|
||||
baseDelay: options?.baseDelay ?? adminConfig.baseDelay,
|
||||
maxDelay: options?.maxDelay ?? adminConfig.maxDelay,
|
||||
backoffMultiplier: options?.backoffMultiplier ?? adminConfig.backoffMultiplier,
|
||||
jitter: options?.jitter ?? adminConfig.jitter,
|
||||
onRetry: options?.onRetry ?? adminConfig.onRetry,
|
||||
shouldRetry: options?.shouldRetry ?? adminConfig.shouldRetry,
|
||||
};
|
||||
|
||||
let lastError: unknown;
|
||||
|
||||
for (let attempt = 0; attempt < config.maxAttempts; attempt++) {
|
||||
try {
|
||||
// Execute the function directly
|
||||
const result = await fn();
|
||||
|
||||
// Log successful retry if not first attempt
|
||||
if (attempt > 0) {
|
||||
logger.info('Retry succeeded', {
|
||||
attempt: attempt + 1,
|
||||
totalAttempts: config.maxAttempts
|
||||
});
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
lastError = error;
|
||||
|
||||
// Check if we should retry
|
||||
const isLastAttempt = attempt === config.maxAttempts - 1;
|
||||
const shouldRetry = config.shouldRetry(error);
|
||||
|
||||
if (isLastAttempt || !shouldRetry) {
|
||||
// Log final failure
|
||||
logger.error('Retry exhausted or non-retryable error', {
|
||||
attempt: attempt + 1,
|
||||
maxAttempts: config.maxAttempts,
|
||||
isRetryable: shouldRetry,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Calculate delay for next attempt
|
||||
const delay = calculateBackoffDelay(attempt, config);
|
||||
|
||||
// Log retry attempt
|
||||
logger.warn('Retrying after error', {
|
||||
attempt: attempt + 1,
|
||||
maxAttempts: config.maxAttempts,
|
||||
delay,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
|
||||
// Invoke callback
|
||||
config.onRetry(attempt + 1, error, delay);
|
||||
|
||||
// Wait before retrying
|
||||
await new Promise(resolve => setTimeout(resolve, delay));
|
||||
}
|
||||
}
|
||||
|
||||
// This should never be reached, but TypeScript requires it
|
||||
throw lastError;
|
||||
}
|
||||
Reference in New Issue
Block a user