Fix migration for admin settings

This commit is contained in:
gpt-engineer-app[bot]
2025-11-05 13:40:25 +00:00
parent ec5181b9e6
commit 80826a83a8
5 changed files with 619 additions and 112 deletions

View File

@@ -8,6 +8,7 @@
*/
import { logger } from './logger';
import { supabase } from './supabaseClient';
export interface CircuitBreakerConfig {
/** Number of failures before opening circuit (default: 5) */
@@ -29,7 +30,7 @@ export class CircuitBreaker {
private failures: number[] = []; // Timestamps of recent failures
private lastFailureTime: number | null = null;
private successCount: number = 0;
private readonly config: Required<CircuitBreakerConfig>;
private config: Required<CircuitBreakerConfig>;
constructor(config: Partial<CircuitBreakerConfig> = {}) {
this.config = {
@@ -39,6 +40,18 @@ export class CircuitBreaker {
};
}
/**
* Update configuration from admin settings
*/
async updateConfig(newConfig: Partial<CircuitBreakerConfig>): Promise<void> {
this.config = {
...this.config,
...newConfig
};
logger.info('Circuit breaker config updated', { config: this.config });
}
/**
* Execute a function through the circuit breaker
* @throws Error if circuit is OPEN (service unavailable)
@@ -140,12 +153,62 @@ export class CircuitBreaker {
}
}
/**
* Load circuit breaker configuration from admin settings
* Falls back to defaults if settings unavailable
*/
export async function loadCircuitBreakerConfig(): Promise<CircuitBreakerConfig> {
try {
const { data: settings } = await supabase
.from('admin_settings')
.select('setting_key, setting_value')
.in('setting_key', [
'circuit_breaker.failure_threshold',
'circuit_breaker.reset_timeout',
'circuit_breaker.monitoring_window'
]);
if (!settings || settings.length === 0) {
return {
failureThreshold: 5,
resetTimeout: 60000,
monitoringWindow: 120000
};
}
const config: any = {};
settings.forEach(s => {
const key = s.setting_key.replace('circuit_breaker.', '');
const camelKey = key.replace(/_([a-z])/g, (g) => g[1].toUpperCase());
config[camelKey] = parseInt(String(s.setting_value));
});
return {
failureThreshold: config.failureThreshold ?? 5,
resetTimeout: config.resetTimeout ?? 60000,
monitoringWindow: config.monitoringWindow ?? 120000
};
} catch (error) {
logger.error('Failed to load circuit breaker config from admin settings', { error });
return {
failureThreshold: 5,
resetTimeout: 60000,
monitoringWindow: 120000
};
}
}
/**
* Singleton circuit breaker for Supabase operations
* Shared across all submission flows to detect service-wide outages
*/
export const supabaseCircuitBreaker = new CircuitBreaker({
failureThreshold: 5,
resetTimeout: 60000, // 1 minute
monitoringWindow: 120000 // 2 minutes
resetTimeout: 60000,
monitoringWindow: 120000
});
// Load config from admin settings on startup
loadCircuitBreakerConfig().then(config => {
supabaseCircuitBreaker.updateConfig(config);
});

View File

@@ -5,6 +5,7 @@
import { logger } from './logger';
import { supabaseCircuitBreaker } from './circuitBreaker';
import { supabase } from './supabaseClient';
export interface RetryOptions {
/** Maximum number of attempts (default: 3) */
@@ -97,6 +98,81 @@ function calculateBackoffDelay(attempt: number, options: Required<RetryOptions>)
return cappedDelay;
}
/**
* Load retry configuration from admin settings
*/
export async function loadRetryConfig(): Promise<Required<RetryOptions>> {
try {
const { data: settings } = await supabase
.from('admin_settings')
.select('setting_key, setting_value')
.in('setting_key', [
'retry.max_attempts',
'retry.base_delay',
'retry.max_delay',
'retry.backoff_multiplier'
]);
if (!settings || settings.length === 0) {
return getDefaultRetryConfig();
}
const config: any = {};
settings.forEach(s => {
const key = s.setting_key.replace('retry.', '');
const camelKey = key.replace(/_([a-z])/g, (g) => g[1].toUpperCase());
if (key === 'backoff_multiplier') {
config[camelKey] = parseFloat(String(s.setting_value));
} else {
config[camelKey] = parseInt(String(s.setting_value));
}
});
return {
maxAttempts: config.maxAttempts ?? 3,
baseDelay: config.baseDelay ?? 1000,
maxDelay: config.maxDelay ?? 10000,
backoffMultiplier: config.backoffMultiplier ?? 2,
jitter: true,
onRetry: () => {},
shouldRetry: isRetryableError
};
} catch (error) {
logger.error('Failed to load retry config', { error });
return getDefaultRetryConfig();
}
}
function getDefaultRetryConfig(): Required<RetryOptions> {
return {
maxAttempts: 3,
baseDelay: 1000,
maxDelay: 10000,
backoffMultiplier: 2,
jitter: true,
onRetry: () => {},
shouldRetry: isRetryableError
};
}
// Cache admin config for 5 minutes
let cachedRetryConfig: Required<RetryOptions> | null = null;
let configCacheTime: number = 0;
const CONFIG_CACHE_TTL = 5 * 60 * 1000; // 5 minutes
async function getCachedRetryConfig(): Promise<Required<RetryOptions>> {
const now = Date.now();
if (cachedRetryConfig && (now - configCacheTime < CONFIG_CACHE_TTL)) {
return cachedRetryConfig;
}
cachedRetryConfig = await loadRetryConfig();
configCacheTime = now;
return cachedRetryConfig;
}
/**
* Executes a function with retry logic and exponential backoff
*
@@ -122,14 +198,18 @@ export async function withRetry<T>(
fn: () => Promise<T>,
options?: RetryOptions
): Promise<T> {
// Load config from admin settings
const adminConfig = await getCachedRetryConfig();
// Merge: options override admin settings
const config: Required<RetryOptions> = {
maxAttempts: options?.maxAttempts ?? 3,
baseDelay: options?.baseDelay ?? 1000,
maxDelay: options?.maxDelay ?? 10000,
backoffMultiplier: options?.backoffMultiplier ?? 2,
jitter: options?.jitter ?? true,
onRetry: options?.onRetry ?? (() => {}),
shouldRetry: options?.shouldRetry ?? isRetryableError,
maxAttempts: options?.maxAttempts ?? adminConfig.maxAttempts,
baseDelay: options?.baseDelay ?? adminConfig.baseDelay,
maxDelay: options?.maxDelay ?? adminConfig.maxDelay,
backoffMultiplier: options?.backoffMultiplier ?? adminConfig.backoffMultiplier,
jitter: options?.jitter ?? adminConfig.jitter,
onRetry: options?.onRetry ?? adminConfig.onRetry,
shouldRetry: options?.shouldRetry ?? adminConfig.shouldRetry,
};
let lastError: unknown;