Refactor code structure and remove redundant changes

This commit is contained in:
pacnpal
2025-11-09 16:31:34 -05:00
parent 2884bc23ce
commit eb68cf40c6
1080 changed files with 27361 additions and 56687 deletions

View File

@@ -0,0 +1,125 @@
/**
* AAL2 Error Detection Utilities
*
* Detects when operations fail due to AAL2/MFA requirements
* and provides user-friendly error messages
*/
import { PostgrestError } from '@supabase/supabase-js';
/**
* Check if an error is due to AAL2/RLS policy failure
*/
export function isAAL2PolicyError(error: unknown): boolean {
if (!error) return false;
// Handle Supabase PostgrestError
if (isPostgrestError(error)) {
const code = error.code;
const message = error.message?.toLowerCase() || '';
const details = error.details?.toLowerCase() || '';
// Check for RLS policy violations
if (code === 'PGRST301' || code === '42501') {
return true;
}
// Check for permission denied messages
if (
message.includes('permission denied') ||
message.includes('row-level security') ||
message.includes('policy') ||
details.includes('policy')
) {
return true;
}
}
// Handle generic errors with 403 status
if (hasStatusCode(error) && error.status === 403) {
return true;
}
// Handle error messages
if (error instanceof Error) {
const message = error.message.toLowerCase();
return (
message.includes('row-level security') ||
message.includes('permission denied') ||
message.includes('policy') ||
message.includes('403')
);
}
return false;
}
/**
* Get user-friendly error message for AAL2 errors
*/
export function getAAL2ErrorMessage(error: unknown): string {
// Default message
const defaultMessage = 'This action requires additional security verification';
if (!error) return defaultMessage;
// Check if error mentions specific operations
if (error instanceof Error) {
const message = error.message.toLowerCase();
if (message.includes('delete') || message.includes('remove')) {
return 'Deleting this data requires additional security verification';
}
if (message.includes('update') || message.includes('modify')) {
return 'Modifying this data requires additional security verification';
}
if (message.includes('insert') || message.includes('create')) {
return 'Creating this data requires additional security verification';
}
}
return defaultMessage;
}
/**
* Type guard for PostgrestError
*/
function isPostgrestError(error: unknown): error is PostgrestError {
return (
typeof error === 'object' &&
error !== null &&
'code' in error &&
'message' in error
);
}
/**
* Type guard for errors with status code
*/
function hasStatusCode(error: unknown): error is { status: number } {
return (
typeof error === 'object' &&
error !== null &&
'status' in error &&
typeof (error as any).status === 'number'
);
}
/**
* Create a user-cancellation error
*/
export class MFACancelledError extends Error {
constructor() {
super('MFA verification was cancelled by user');
this.name = 'MFACancelledError';
}
}
/**
* Check if error is a user cancellation
*/
export function isMFACancelledError(error: unknown): boolean {
return error instanceof MFACancelledError;
}

View File

@@ -0,0 +1,125 @@
import { z } from 'zod';
/**
* Admin form validation schemas
* Provides type-safe validation for admin settings and user management forms
*/
/**
* Email validation schema
* Ensures valid email format with reasonable length constraints
*/
export const emailSchema = z
.string()
.trim()
.min(1, 'Email is required')
.max(255, 'Email must be less than 255 characters')
.email('Invalid email address')
.toLowerCase();
/**
* URL validation schema
* Validates URLs with http/https protocol and reasonable length
*/
export const urlSchema = z
.string()
.trim()
.min(1, 'URL is required')
.max(2048, 'URL must be less than 2048 characters')
.url('Invalid URL format')
.refine(
(url) => url.startsWith('http://') || url.startsWith('https://'),
'URL must start with http:// or https://'
);
/**
* Username validation schema
* Alphanumeric with underscores and hyphens, 3-30 characters
*/
export const usernameSchema = z
.string()
.trim()
.min(3, 'Username must be at least 3 characters')
.max(30, 'Username must be less than 30 characters')
.regex(
/^[a-zA-Z0-9_-]+$/,
'Username can only contain letters, numbers, underscores, and hyphens'
);
/**
* Display name validation schema
* More permissive than username, allows spaces and special characters
*/
export const displayNameSchema = z
.string()
.trim()
.min(1, 'Display name is required')
.max(100, 'Display name must be less than 100 characters');
/**
* Admin settings validation schema
* For system-wide configuration values
*/
export const adminSettingsSchema = z.object({
email: emailSchema.optional(),
url: urlSchema.optional(),
username: usernameSchema.optional(),
displayName: displayNameSchema.optional(),
});
/**
* User search validation schema
* For searching users in admin panel
*/
export const userSearchSchema = z.object({
query: z
.string()
.trim()
.min(1, 'Search query must be at least 1 character')
.max(100, 'Search query must be less than 100 characters'),
});
/**
* Helper function to validate email
*/
export function validateEmail(email: string): { valid: boolean; error?: string } {
try {
emailSchema.parse(email);
return { valid: true };
} catch (error: unknown) {
if (error instanceof z.ZodError) {
return { valid: false, error: error.issues[0]?.message };
}
return { valid: false, error: 'Invalid email' };
}
}
/**
* Helper function to validate URL
*/
export function validateUrl(url: string): { valid: boolean; error?: string } {
try {
urlSchema.parse(url);
return { valid: true };
} catch (error: unknown) {
if (error instanceof z.ZodError) {
return { valid: false, error: error.issues[0]?.message };
}
return { valid: false, error: 'Invalid URL' };
}
}
/**
* Helper function to validate username
*/
export function validateUsername(username: string): { valid: boolean; error?: string } {
try {
usernameSchema.parse(username);
return { valid: true };
} catch (error: unknown) {
if (error instanceof z.ZodError) {
return { valid: false, error: error.issues[0]?.message };
}
return { valid: false, error: 'Invalid username' };
}
}

285
src-old/lib/auditHelpers.ts Normal file
View File

@@ -0,0 +1,285 @@
/**
* Helper functions for relational audit logging
* Replaces JSONB storage with proper relational tables
*/
import { supabase } from '@/lib/supabaseClient';
import { handleNonCriticalError } from './errorHandler';
/**
* Write admin audit details to relational table
* Replaces JSONB admin_audit_log.details column
*/
export async function writeAdminAuditDetails(
auditLogId: string,
details: Record<string, unknown>
): Promise<void> {
if (!details || Object.keys(details).length === 0) return;
const entries = Object.entries(details).map(([key, value]) => ({
audit_log_id: auditLogId,
detail_key: key,
detail_value: typeof value === 'object' ? JSON.stringify(value) : String(value),
}));
const { error } = await supabase
.from('admin_audit_details')
.insert(entries);
if (error) {
handleNonCriticalError(error, {
action: 'Write admin audit details',
metadata: { auditLogId },
});
throw error;
}
}
/**
* Write moderation audit metadata to relational table
* Replaces JSONB moderation_audit_log.metadata column
*/
export async function writeModerationAuditMetadata(
auditLogId: string,
metadata: Record<string, unknown>
): Promise<void> {
if (!metadata || Object.keys(metadata).length === 0) return;
const entries = Object.entries(metadata).map(([key, value]) => ({
audit_log_id: auditLogId,
metadata_key: key,
metadata_value: typeof value === 'object' ? JSON.stringify(value) : String(value),
}));
const { error } = await supabase
.from('moderation_audit_metadata')
.insert(entries);
if (error) {
handleNonCriticalError(error, {
action: 'Write moderation audit metadata',
metadata: { auditLogId },
});
throw error;
}
}
/**
* Write item change fields to relational table
* Replaces JSONB item_edit_history.changes column
*/
export async function writeItemChangeFields(
editHistoryId: string,
changes: Record<string, { old_value?: unknown; new_value?: unknown }>
): Promise<void> {
if (!changes || Object.keys(changes).length === 0) return;
const entries = Object.entries(changes).map(([fieldName, change]) => ({
edit_history_id: editHistoryId,
field_name: fieldName,
old_value: change.old_value !== undefined
? (typeof change.old_value === 'object' ? JSON.stringify(change.old_value) : String(change.old_value))
: null,
new_value: change.new_value !== undefined
? (typeof change.new_value === 'object' ? JSON.stringify(change.new_value) : String(change.new_value))
: null,
}));
const { error } = await supabase
.from('item_change_fields')
.insert(entries);
if (error) {
handleNonCriticalError(error, {
action: 'Write item change fields',
metadata: { editHistoryId },
});
throw error;
}
}
/**
* Write request breadcrumbs to relational table
* Replaces JSONB request_metadata.breadcrumbs column
*/
export async function writeRequestBreadcrumbs(
requestId: string,
breadcrumbs: Array<{
timestamp: string;
category: string;
message: string;
level?: 'debug' | 'info' | 'warn' | 'error';
}>
): Promise<void> {
if (!breadcrumbs || breadcrumbs.length === 0) return;
const entries = breadcrumbs.map((breadcrumb, index) => ({
request_id: requestId,
timestamp: breadcrumb.timestamp,
category: breadcrumb.category,
message: breadcrumb.message,
level: breadcrumb.level || 'info',
sequence_order: index,
}));
const { error } = await supabase
.from('request_breadcrumbs')
.insert(entries);
if (error) {
handleNonCriticalError(error, {
action: 'Write request breadcrumbs',
metadata: { requestId },
});
throw error;
}
}
/**
* Read admin audit details from relational table
*/
export async function readAdminAuditDetails(
auditLogId: string
): Promise<Record<string, string>> {
const { data, error } = await supabase
.from('admin_audit_details')
.select('detail_key, detail_value')
.eq('audit_log_id', auditLogId);
if (error) {
handleNonCriticalError(error, {
action: 'Read admin audit details',
metadata: { auditLogId },
});
return {};
}
return data.reduce((acc, row) => {
acc[row.detail_key] = row.detail_value;
return acc;
}, {} as Record<string, string>);
}
/**
* Read moderation audit metadata from relational table
*/
export async function readModerationAuditMetadata(
auditLogId: string
): Promise<Record<string, string>> {
const { data, error } = await supabase
.from('moderation_audit_metadata')
.select('metadata_key, metadata_value')
.eq('audit_log_id', auditLogId);
if (error) {
handleNonCriticalError(error, {
action: 'Read moderation audit metadata',
metadata: { auditLogId },
});
return {};
}
return data.reduce((acc, row) => {
acc[row.metadata_key] = row.metadata_value;
return acc;
}, {} as Record<string, string>);
}
/**
* Read item change fields from relational table
*/
export async function readItemChangeFields(
editHistoryId: string
): Promise<Record<string, { old_value: string | null; new_value: string | null }>> {
const { data, error } = await supabase
.from('item_change_fields')
.select('field_name, old_value, new_value')
.eq('edit_history_id', editHistoryId);
if (error) {
handleNonCriticalError(error, {
action: 'Read item change fields',
metadata: { editHistoryId },
});
return {};
}
return data.reduce((acc, row) => {
acc[row.field_name] = {
old_value: row.old_value,
new_value: row.new_value,
};
return acc;
}, {} as Record<string, { old_value: string | null; new_value: string | null }>);
}
/**
* Write profile change fields to relational table
* Replaces JSONB profile_audit_log.changes column
*/
export async function writeProfileChangeFields(
auditLogId: string,
changes: Record<string, { old_value?: unknown; new_value?: unknown }>
): Promise<void> {
if (!changes || Object.keys(changes).length === 0) return;
const entries = Object.entries(changes).map(([fieldName, change]) => ({
audit_log_id: auditLogId,
field_name: fieldName,
old_value: change.old_value !== undefined
? (typeof change.old_value === 'object' ? JSON.stringify(change.old_value) : String(change.old_value))
: null,
new_value: change.new_value !== undefined
? (typeof change.new_value === 'object' ? JSON.stringify(change.new_value) : String(change.new_value))
: null,
}));
const { error } = await supabase
.from('profile_change_fields')
.insert(entries);
if (error) {
handleNonCriticalError(error, {
action: 'Write profile change fields',
metadata: { auditLogId },
});
throw error;
}
}
/**
* Write conflict detail fields to relational table
* Replaces JSONB conflict_resolutions.conflict_details column
*/
export async function writeConflictDetailFields(
conflictResolutionId: string,
conflictData: Record<string, unknown>
): Promise<void> {
if (!conflictData || Object.keys(conflictData).length === 0) return;
const entries = Object.entries(conflictData).map(([fieldName, value]) => ({
conflict_resolution_id: conflictResolutionId,
field_name: fieldName,
conflicting_value_1: typeof value === 'object' && value !== null && 'v1' in value
? String((value as any).v1)
: null,
conflicting_value_2: typeof value === 'object' && value !== null && 'v2' in value
? String((value as any).v2)
: null,
resolved_value: typeof value === 'object' && value !== null && 'resolved' in value
? String((value as any).resolved)
: null,
}));
const { error } = await supabase
.from('conflict_detail_fields')
.insert(entries);
if (error) {
handleNonCriticalError(error, {
action: 'Write conflict detail fields',
metadata: { conflictResolutionId },
});
throw error;
}
}

18
src-old/lib/authLogger.ts Normal file
View File

@@ -0,0 +1,18 @@
/**
* Conditional authentication logging utility
* Uses structured logger internally
*/
import { logger } from './logger';
export const authLog = (...args: unknown[]) => {
logger.info('[Auth]', ...args);
};
export const authWarn = (...args: unknown[]) => {
logger.warn('[Auth]', ...args);
};
export const authError = (...args: unknown[]) => {
logger.error('[Auth] Error occurred', { error: args });
};

310
src-old/lib/authService.ts Normal file
View File

@@ -0,0 +1,310 @@
/**
* Centralized Authentication Service
* Handles all authentication flows with consistent AAL checking and MFA verification
*/
import { supabase } from '@/lib/supabaseClient';
import type { Session } from '@supabase/supabase-js';
import type {
AALLevel,
MFAFactor,
CheckAalResult,
AuthServiceResponse,
MFAChallengeResult
} from '@/types/auth';
import { setStepUpRequired, setAuthMethod, clearAllAuthFlags } from './sessionFlags';
import { logger } from './logger';
import { getErrorMessage, handleNonCriticalError } from './errorHandler';
/**
* Extract AAL level from session using Supabase API
* Always returns ground truth from server, not cached session data
*/
export async function getSessionAal(session: Session | null): Promise<AALLevel> {
if (!session) {
logger.log('[AuthService] No session, returning aal1');
return 'aal1';
}
try {
const { data, error } = await supabase.auth.mfa.getAuthenticatorAssuranceLevel();
logger.log('[AuthService] getSessionAal result', {
hasData: !!data,
currentLevel: data?.currentLevel,
nextLevel: data?.nextLevel,
error: error?.message
});
if (error) {
handleNonCriticalError(error, {
action: 'Get session AAL',
});
return 'aal1';
}
const level = (data.currentLevel as AALLevel) || 'aal1';
logger.log('[AuthService] Returning AAL', { level });
return level;
} catch (error: unknown) {
handleNonCriticalError(error, {
action: 'Get session AAL exception',
});
return 'aal1';
}
}
/**
* Get enrolled MFA factors for the current user
*/
export async function getEnrolledFactors(): Promise<MFAFactor[]> {
try {
const { data, error } = await supabase.auth.mfa.listFactors();
if (error) {
handleNonCriticalError(error, {
action: 'List MFA factors',
});
return [];
}
return (data?.totp || [])
.filter(f => f.status === 'verified')
.map(f => ({
id: f.id,
factor_type: 'totp' as const,
status: 'verified' as const,
friendly_name: f.friendly_name,
created_at: f.created_at,
updated_at: f.updated_at,
}));
} catch (error: unknown) {
handleNonCriticalError(error, {
action: 'List MFA factors exception',
});
return [];
}
}
/**
* Check if user needs AAL step-up
* Returns detailed information about enrollment and current AAL level
*/
export async function checkAalStepUp(session: Session | null): Promise<CheckAalResult> {
if (!session?.user) {
return {
needsStepUp: false,
hasMfaEnrolled: false,
currentLevel: 'aal1',
hasEnrolledFactors: false,
};
}
const [currentLevel, factors] = await Promise.all([
getSessionAal(session),
getEnrolledFactors(),
]);
const hasEnrolledFactors = factors.length > 0;
const needsStepUp = hasEnrolledFactors && currentLevel === 'aal1';
return {
needsStepUp,
hasMfaEnrolled: hasEnrolledFactors,
currentLevel,
hasEnrolledFactors,
factorId: factors[0]?.id,
};
}
/**
* Verify MFA is required for a user based on their role
*/
export async function verifyMfaRequired(userId: string): Promise<boolean> {
try {
const { data, error } = await supabase
.from('user_roles')
.select('role')
.eq('user_id', userId)
.in('role', ['admin', 'moderator']);
if (error) {
handleNonCriticalError(error, {
action: 'Verify MFA required',
userId,
});
return false;
}
return (data?.length || 0) > 0;
} catch (error: unknown) {
handleNonCriticalError(error, {
action: 'Verify MFA required exception',
userId,
});
return false;
}
}
/**
* Handle post-authentication flow for all auth methods
* Detects if MFA step-up is needed and redirects accordingly
*/
export async function handlePostAuthFlow(
session: Session,
authMethod: 'password' | 'oauth' | 'magiclink'
): Promise<AuthServiceResponse<{ shouldRedirect: boolean; redirectTo?: string }>> {
try {
// Store auth method for audit logging
setAuthMethod(authMethod);
// Check if step-up is needed
const aalCheck = await checkAalStepUp(session);
if (aalCheck.needsStepUp) {
logger.info('[AuthService] MFA step-up required', {
authMethod,
currentAal: aalCheck.currentLevel
});
// Set flag and redirect to step-up page
setStepUpRequired(true, window.location.pathname);
// Log audit event
await logAuthEvent(session.user.id, 'mfa_step_up_required', {
auth_method: authMethod,
current_aal: aalCheck.currentLevel,
});
return {
success: true,
data: {
shouldRedirect: true,
redirectTo: '/auth/mfa-step-up',
},
};
}
// Log successful authentication
await logAuthEvent(session.user.id, 'authentication_success', {
auth_method: authMethod,
aal: aalCheck.currentLevel,
});
return {
success: true,
data: {
shouldRedirect: false,
},
};
} catch (error: unknown) {
handleNonCriticalError(error, {
action: 'Handle post-auth flow',
metadata: { authMethod },
});
return {
success: false,
error: getErrorMessage(error),
};
}
}
/**
* Verify MFA challenge was successful and session upgraded to AAL2
*/
export async function verifyMfaUpgrade(session: Session | null): Promise<MFAChallengeResult> {
if (!session) {
return {
success: false,
error: 'No session found',
};
}
const currentAal = await getSessionAal(session);
if (currentAal !== 'aal2') {
handleNonCriticalError(new Error('MFA verification failed'), {
action: 'Verify MFA upgrade',
metadata: { expectedAal: 'aal2', actualAal: currentAal },
});
await logAuthEvent(session.user.id, 'mfa_verification_failed', {
expected_aal: 'aal2',
actual_aal: currentAal,
});
return {
success: false,
error: 'Failed to upgrade session to AAL2',
newAal: currentAal,
};
}
// Log successful upgrade
await logAuthEvent(session.user.id, 'mfa_verification_success', {
new_aal: currentAal,
});
// Clear auth flags
clearAllAuthFlags();
return {
success: true,
newAal: currentAal,
};
}
/**
* Log authentication event to audit log
*/
async function logAuthEvent(
userId: string,
action: string,
details: Record<string, any>
): Promise<void> {
try {
const { error } = await supabase.rpc('log_admin_action', {
_admin_user_id: userId,
_action: action,
_target_user_id: userId,
_details: details,
});
if (error) {
handleNonCriticalError(error, {
action: 'Log auth event',
metadata: { eventAction: action, userId },
});
}
} catch (error: unknown) {
handleNonCriticalError(error, {
action: 'Log auth event exception',
metadata: { eventAction: action, userId },
});
}
}
/**
* Handle sign out with proper cleanup
*/
export async function signOutUser(): Promise<AuthServiceResponse> {
try {
const { error } = await supabase.auth.signOut();
if (error) {
return {
success: false,
error: error.message,
};
}
// Clear all session flags
clearAllAuthFlags();
return { success: true };
} catch (error: unknown) {
return {
success: false,
error: error instanceof Error ? error.message : 'Unknown error',
};
}
}

194
src-old/lib/authStorage.ts Normal file
View File

@@ -0,0 +1,194 @@
import { authLog, authWarn, authError } from './authLogger';
/**
* Custom storage adapter for Supabase authentication that handles iframe localStorage restrictions.
* Falls back to sessionStorage or in-memory storage if localStorage is blocked.
*/
class AuthStorage {
private storage: Storage | null = null;
private memoryStorage: Map<string, string> = new Map();
private storageType: 'localStorage' | 'sessionStorage' | 'memory' = 'memory';
private sessionRecoveryAttempted = false;
constructor() {
// Try localStorage first
try {
localStorage.setItem('__supabase_test__', 'test');
localStorage.removeItem('__supabase_test__');
this.storage = localStorage;
this.storageType = 'localStorage';
authLog('[AuthStorage] Using localStorage ✓');
} catch {
// Try sessionStorage as fallback
try {
sessionStorage.setItem('__supabase_test__', 'test');
sessionStorage.removeItem('__supabase_test__');
this.storage = sessionStorage;
this.storageType = 'sessionStorage';
authWarn('[AuthStorage] localStorage blocked, using sessionStorage ⚠️');
} catch {
// Use in-memory storage as last resort
this.storageType = 'memory';
authError('[AuthStorage] Both localStorage and sessionStorage blocked, using in-memory storage ⛔');
authError('[AuthStorage] Sessions will NOT persist across page reloads!');
// Attempt to recover session from URL
this.attemptSessionRecoveryFromURL();
}
}
// Listen for storage events to sync across tabs (when possible)
if (this.storage) {
window.addEventListener('storage', this.handleStorageChange.bind(this));
}
}
private attemptSessionRecoveryFromURL() {
if (this.sessionRecoveryAttempted) return;
this.sessionRecoveryAttempted = true;
try {
const urlParams = new URLSearchParams(window.location.hash.substring(1));
const accessToken = urlParams.get('access_token');
const refreshToken = urlParams.get('refresh_token');
if (accessToken && refreshToken) {
authLog('[AuthStorage] Recovering session from URL parameters');
// Store in memory
this.memoryStorage.set('sb-auth-token', JSON.stringify({
access_token: accessToken,
refresh_token: refreshToken,
expires_at: Date.now() + 3600000, // 1 hour
}));
// Clean URL
window.history.replaceState({}, document.title, window.location.pathname);
}
} catch (error: unknown) {
authError('[AuthStorage] Failed to recover session from URL:', error);
}
}
private handleStorageChange(event: StorageEvent) {
// Sync auth state across tabs
if (event.key?.startsWith('sb-') && event.newValue) {
authLog('[AuthStorage] Syncing auth state across tabs');
}
}
getItem(key: string): string | null {
authLog('[AuthStorage] Getting key:', key);
try {
if (this.storage) {
const value = this.storage.getItem(key);
authLog('[AuthStorage] Retrieved from storage:', !!value);
if (value) {
// Verify it's not expired
if (key.includes('auth-token')) {
try {
const parsed = JSON.parse(value);
// Supabase stores expires_at in seconds, Date.now() is in milliseconds
// Check if expires_at is in seconds (< year 3000 in milliseconds)
const expiryTime = parsed.expires_at > 10000000000
? parsed.expires_at // Already in milliseconds
: parsed.expires_at * 1000; // Convert from seconds to milliseconds
if (parsed.expires_at && expiryTime < Date.now()) {
authWarn('[AuthStorage] Token expired, removing', {
expires_at: parsed.expires_at,
expiryTime: new Date(expiryTime),
now: new Date()
});
this.removeItem(key);
return null;
}
authLog('[AuthStorage] Token valid, expires:', new Date(expiryTime));
} catch (e) {
authWarn('[AuthStorage] Could not parse token for expiry check:', e);
}
}
}
return value;
}
authLog('[AuthStorage] Using memory storage');
return this.memoryStorage.get(key) || null;
} catch (error: unknown) {
authError('[AuthStorage] Error reading from storage:', error);
return this.memoryStorage.get(key) || null;
}
}
setItem(key: string, value: string): void {
authLog('[AuthStorage] Setting key:', key);
try {
if (this.storage) {
this.storage.setItem(key, value);
}
// Always keep in memory as backup
this.memoryStorage.set(key, value);
} catch (error: unknown) {
authError('[AuthStorage] Error writing to storage:', error);
// Fallback to memory only
this.memoryStorage.set(key, value);
}
}
removeItem(key: string): void {
try {
if (this.storage) {
this.storage.removeItem(key);
}
this.memoryStorage.delete(key);
} catch (error: unknown) {
authError('[AuthStorage] Error removing from storage:', error);
this.memoryStorage.delete(key);
}
}
// Get storage status for diagnostics
getStorageStatus(): { type: string; persistent: boolean; warning: string | null } {
return {
type: this.storageType,
persistent: this.storageType !== 'memory',
warning: this.storageType === 'memory'
? 'Sessions will not persist across page reloads. Please enable cookies/storage for this site.'
: null
};
}
// Clear all auth-related storage (for force logout)
clearAll(): void {
authLog('[AuthStorage] Clearing all auth storage');
try {
if (this.storage) {
// Get all keys from storage
const keys: string[] = [];
for (let i = 0; i < this.storage.length; i++) {
const key = this.storage.key(i);
if (key?.startsWith('sb-')) {
keys.push(key);
}
}
// Remove all Supabase auth keys
keys.forEach(key => {
authLog('[AuthStorage] Removing key:', key);
this.storage!.removeItem(key);
});
}
// Clear memory storage
this.memoryStorage.clear();
authLog('[AuthStorage] ✓ All auth storage cleared');
} catch (error: unknown) {
authError('[AuthStorage] Error clearing storage:', error);
// Still clear memory storage as fallback
this.memoryStorage.clear();
}
}
}
export const authStorage = new AuthStorage();

View File

@@ -0,0 +1,65 @@
/**
* Cloudflare Images variant utilities
* Generates properly formatted URLs for Cloudflare Image variants
*/
export type CloudflareVariant =
| 'avatar'
| 'banner'
| 'bannermobile'
| 'card'
| 'cardthumb'
| 'logo'
| 'public';
/**
* Build a Cloudflare image URL with specified variant
* Uses CDN proxy for branded URLs
*/
export function getCloudflareImageUrl(
imageId: string | undefined,
variant: CloudflareVariant = 'public'
): string | undefined {
if (!imageId) return undefined;
return `https://cdn.thrillwiki.com/images/${imageId}/${variant}`;
}
/**
* Generate responsive image srcset for card images
* Useful for <img srcset> elements
*/
export function getCloudflareImageSrcSet(imageId: string | undefined): string | undefined {
if (!imageId) return undefined;
return [
`${getCloudflareImageUrl(imageId, 'cardthumb')} 600w`,
`${getCloudflareImageUrl(imageId, 'card')} 1200w`,
`${getCloudflareImageUrl(imageId, 'public')} 1366w`
].join(', ');
}
/**
* Get responsive banner URLs for mobile and desktop
*/
export function getBannerUrls(imageId: string | undefined) {
if (!imageId) return { mobile: undefined, desktop: undefined };
return {
mobile: getCloudflareImageUrl(imageId, 'bannermobile'),
desktop: getCloudflareImageUrl(imageId, 'banner')
};
}
/**
* Extract Cloudflare image ID from various URL formats
* Supports both old imagedelivery.net and new CDN URLs
*/
export function extractCloudflareImageId(url: string): string | null {
// Match old imagedelivery.net URLs
const deliveryMatch = url.match(/imagedelivery\.net\/[^\/]+\/([a-f0-9-]+)\//i);
if (deliveryMatch) return deliveryMatch[1];
// Match new cdn.thrillwiki.com URLs
const cdnMatch = url.match(/cdn\.thrillwiki\.com\/images\/([a-f0-9-]+)\//i);
return cdnMatch ? cdnMatch[1] : null;
}

View File

@@ -0,0 +1,335 @@
import { supabase } from '@/lib/supabaseClient';
import type { Json } from '@/integrations/supabase/types';
import { uploadPendingImages } from './imageUploadHelper';
import { CompanyFormData, TempCompanyData } from '@/types/company';
import { handleError } from './errorHandler';
import { withRetry, isRetryableError } from './retryHelpers';
import { logger } from './logger';
import { checkSubmissionRateLimit, recordSubmissionAttempt } from './submissionRateLimiter';
import { sanitizeErrorMessage } from './errorSanitizer';
import { reportRateLimitViolation, reportBanEvasionAttempt } from './pipelineAlerts';
export type { CompanyFormData, TempCompanyData };
/**
* Rate limiting helper - checks rate limits before allowing submission
*/
function checkRateLimitOrThrow(userId: string, action: string): void {
const rateLimit = checkSubmissionRateLimit(userId);
if (!rateLimit.allowed) {
const sanitizedMessage = sanitizeErrorMessage(rateLimit.reason || 'Rate limit exceeded');
logger.warn('[RateLimit] Company submission blocked', {
userId,
action,
reason: rateLimit.reason,
retryAfter: rateLimit.retryAfter,
});
// Report to system alerts for admin visibility
reportRateLimitViolation(userId, action, rateLimit.retryAfter || 60).catch(() => {
// Non-blocking - don't fail submission if alert fails
});
throw new Error(sanitizedMessage);
}
logger.info('[RateLimit] Company submission allowed', {
userId,
action,
remaining: rateLimit.remaining,
});
}
export async function submitCompanyCreation(
data: CompanyFormData,
companyType: 'manufacturer' | 'designer' | 'operator' | 'property_owner',
userId: string
) {
// Phase 3: Rate limiting check
checkRateLimitOrThrow(userId, 'company_creation');
recordSubmissionAttempt(userId);
// Check if user is banned (with quick retry for read operation)
const profile = await withRetry(
async () => {
const { data: profile } = await supabase
.from('profiles')
.select('banned')
.eq('user_id', userId)
.single();
return profile;
},
{ maxAttempts: 2 }
);
if (profile?.banned) {
// Report ban evasion attempt
reportBanEvasionAttempt(userId, 'company_creation').catch(() => {
// Non-blocking - don't fail if alert fails
});
throw new Error('Account suspended. Contact support for assistance.');
}
// Upload any pending local images first
let processedImages = data.images;
if (data.images?.uploaded && data.images.uploaded.length > 0) {
try {
const uploadedImages = await uploadPendingImages(data.images.uploaded);
processedImages = {
...data.images,
uploaded: uploadedImages
};
} catch (error: unknown) {
handleError(error, {
action: 'Upload company images',
metadata: { companyType },
});
throw new Error('Failed to upload images. Please check your connection and try again.');
}
}
// Create submission with retry logic
const retryId = crypto.randomUUID();
const result = await withRetry(
async () => {
// Create the main submission record
const { data: submissionData, error: submissionError } = await supabase
.from('content_submissions')
.insert({
user_id: userId,
submission_type: companyType,
content: {
action: 'create'
},
status: 'pending' as const
})
.select('id')
.single();
if (submissionError) throw submissionError;
// Create the submission item with actual company data
const { error: itemError } = await supabase
.from('submission_items')
.insert({
submission_id: submissionData.id,
item_type: companyType,
item_data: {
name: data.name,
slug: data.slug,
description: data.description,
person_type: data.person_type,
website_url: data.website_url,
founded_year: data.founded_year,
headquarters_location: data.headquarters_location,
company_type: companyType,
images: processedImages as unknown as Json
},
status: 'pending' as const,
order_index: 0
});
if (itemError) throw itemError;
return { submitted: true, submissionId: submissionData.id };
},
{
maxAttempts: 3,
onRetry: (attempt, error, delay) => {
logger.warn('Retrying company submission', { attempt, delay, companyType });
// Emit event for UI indicator
window.dispatchEvent(new CustomEvent('submission-retry', {
detail: { id: retryId, attempt, maxAttempts: 3, delay, type: companyType }
}));
},
shouldRetry: (error) => {
// Don't retry validation/business logic errors
if (error instanceof Error) {
const message = error.message.toLowerCase();
if (message.includes('required')) return false;
if (message.includes('banned')) return false;
if (message.includes('slug')) return false;
if (message.includes('permission')) return false;
}
return isRetryableError(error);
}
}
).then((data) => {
// Emit success event
window.dispatchEvent(new CustomEvent('submission-retry-success', {
detail: { id: retryId }
}));
return data;
}).catch((error) => {
const errorId = handleError(error, {
action: `${companyType} submission`,
metadata: { retriesExhausted: true },
});
// Emit failure event
window.dispatchEvent(new CustomEvent('submission-retry-failed', {
detail: { id: retryId, errorId }
}));
throw error;
});
return result;
}
export async function submitCompanyUpdate(
companyId: string,
data: CompanyFormData,
userId: string
) {
// Phase 3: Rate limiting check
checkRateLimitOrThrow(userId, 'company_update');
recordSubmissionAttempt(userId);
// Check if user is banned (with quick retry for read operation)
const profile = await withRetry(
async () => {
const { data: profile } = await supabase
.from('profiles')
.select('banned')
.eq('user_id', userId)
.single();
return profile;
},
{ maxAttempts: 2 }
);
if (profile?.banned) {
// Report ban evasion attempt
reportBanEvasionAttempt(userId, 'company_update').catch(() => {
// Non-blocking - don't fail if alert fails
});
throw new Error('Account suspended. Contact support for assistance.');
}
// Fetch existing company data (all fields for original_data)
const { data: existingCompany, error: fetchError } = await supabase
.from('companies')
.select('id, name, slug, description, company_type, person_type, logo_url, card_image_url, banner_image_url, banner_image_id, card_image_id, headquarters_location, website_url, founded_year, founded_date, founded_date_precision')
.eq('id', companyId)
.single();
if (fetchError) throw fetchError;
if (!existingCompany) throw new Error('Company not found');
// Upload any pending local images first
let processedImages = data.images;
if (data.images?.uploaded && data.images.uploaded.length > 0) {
try {
const uploadedImages = await uploadPendingImages(data.images.uploaded);
processedImages = {
...data.images,
uploaded: uploadedImages
};
} catch (error: unknown) {
handleError(error, {
action: 'Upload company images for update',
metadata: { companyType: existingCompany.company_type, companyId },
});
throw new Error('Failed to upload images. Please check your connection and try again.');
}
}
// Create submission with retry logic
const retryId = crypto.randomUUID();
const result = await withRetry(
async () => {
// Create the main submission record
const { data: submissionData, error: submissionError } = await supabase
.from('content_submissions')
.insert({
user_id: userId,
submission_type: existingCompany.company_type,
content: {
action: 'edit',
company_id: companyId
},
status: 'pending' as const
})
.select('id')
.single();
if (submissionError) throw submissionError;
// Create the submission item with actual company data AND original data
const { error: itemError } = await supabase
.from('submission_items')
.insert({
submission_id: submissionData.id,
item_type: existingCompany.company_type,
item_data: {
company_id: companyId,
name: data.name,
slug: data.slug,
description: data.description,
person_type: data.person_type,
website_url: data.website_url,
founded_year: data.founded_year,
headquarters_location: data.headquarters_location,
images: processedImages as unknown as Json
},
original_data: JSON.parse(JSON.stringify(existingCompany)),
status: 'pending' as const,
order_index: 0
});
if (itemError) throw itemError;
return { submitted: true, submissionId: submissionData.id };
},
{
maxAttempts: 3,
onRetry: (attempt, error, delay) => {
logger.warn('Retrying company update', { attempt, delay, companyId });
// Emit event for UI indicator
window.dispatchEvent(new CustomEvent('submission-retry', {
detail: { id: retryId, attempt, maxAttempts: 3, delay, type: `${existingCompany.company_type} update` }
}));
},
shouldRetry: (error) => {
// Don't retry validation/business logic errors
if (error instanceof Error) {
const message = error.message.toLowerCase();
if (message.includes('required')) return false;
if (message.includes('banned')) return false;
if (message.includes('slug')) return false;
if (message.includes('permission')) return false;
}
return isRetryableError(error);
}
}
).then((data) => {
// Emit success event
window.dispatchEvent(new CustomEvent('submission-retry-success', {
detail: { id: retryId }
}));
return data;
}).catch((error) => {
const errorId = handleError(error, {
action: `${existingCompany.company_type} update`,
metadata: { retriesExhausted: true, companyId },
});
// Emit failure event
window.dispatchEvent(new CustomEvent('submission-retry-failed', {
detail: { id: retryId, errorId }
}));
throw error;
});
return result;
}

View File

@@ -0,0 +1,299 @@
import { supabase } from '@/lib/supabaseClient';
import { handleError, handleNonCriticalError } from '@/lib/errorHandler';
import { updateSubmissionItem, type SubmissionItemWithDeps, type DependencyConflict } from './submissionItemsService';
export interface ResolutionResult {
success: boolean;
updatedSelections?: Set<string>;
error?: string;
}
/**
* Main conflict resolution processor
*/
export async function resolveConflicts(
conflicts: DependencyConflict[],
resolutions: Record<string, string>,
items: SubmissionItemWithDeps[],
userId: string
): Promise<ResolutionResult> {
try {
const updatedSelections = new Set<string>();
for (const conflict of conflicts) {
const resolution = resolutions[conflict.itemId];
if (!resolution) {
return {
success: false,
error: `No resolution selected for ${conflict.itemId}`,
};
}
const suggestion = conflict.suggestions.find(s => s.action === resolution);
if (!suggestion) {
return {
success: false,
error: `Invalid resolution action: ${resolution}`,
};
}
// Process each resolution action
switch (suggestion.action) {
case 'link_existing':
if (!suggestion.entityId) {
return {
success: false,
error: 'Entity ID required for link_existing action',
};
}
await linkToExistingEntity(conflict.itemId, suggestion.entityId);
updatedSelections.add(conflict.itemId);
break;
case 'create_parent':
const item = items.find(i => i.id === conflict.itemId);
if (item?.depends_on) {
updatedSelections.add(item.depends_on);
updatedSelections.add(conflict.itemId);
}
break;
case 'cascade_reject':
await cascadeRejectDependents(conflict.itemId, items);
break;
case 'escalate':
const submissionId = items[0]?.submission_id;
if (submissionId) {
await escalateForAdminReview(submissionId, `Conflict resolution needed: ${conflict.message}`, userId);
}
return {
success: true,
updatedSelections: new Set(),
};
default:
return {
success: false,
error: `Unknown action: ${suggestion.action}`,
};
}
}
return {
success: true,
updatedSelections,
};
} catch (error: unknown) {
handleError(error, {
action: 'Resolve conflicts',
metadata: { conflictCount: conflicts.length },
});
return {
success: false,
error: error instanceof Error ? error.message : 'Unknown error',
};
}
}
/**
* Link submission item to existing database entity
*/
async function linkToExistingEntity(itemId: string, entityId: string): Promise<void> {
await updateSubmissionItem(itemId, {
approved_entity_id: entityId,
status: 'approved' as const,
});
}
/**
* Cascade reject all dependent items
*/
async function cascadeRejectDependents(
itemId: string,
items: SubmissionItemWithDeps[]
): Promise<void> {
const item = items.find(i => i.id === itemId);
if (!item?.dependents) return;
const toReject: string[] = [];
function collectDependents(current: SubmissionItemWithDeps) {
if (current.dependents) {
for (const dep of current.dependents) {
toReject.push(dep.id);
collectDependents(dep);
}
}
}
collectDependents(item);
// Reject all collected dependents
for (const depId of toReject) {
await updateSubmissionItem(depId, {
status: 'rejected' as const,
rejection_reason: 'Parent dependency was rejected',
});
}
}
/**
* Escalate submission for admin review
*/
async function escalateForAdminReview(
submissionId: string,
reason: string,
userId: string
): Promise<void> {
const { error } = await supabase
.from('content_submissions')
.update({
status: 'pending' as const,
escalation_reason: reason,
escalated_by: userId,
updated_at: new Date().toISOString(),
})
.eq('id', submissionId);
if (error) {
throw new Error(`Failed to escalate submission: ${error.message}`);
}
}
/**
* Find existing entities that match submission data
*/
export async function findMatchingEntities(
itemType: string,
itemData: any
): Promise<Array<{ id: string; name: string; similarity: number }>> {
const tableName = getTableNameForItemType(itemType);
if (!tableName) return [];
try {
// Query based on table type
if (tableName === 'companies') {
const { data, error } = await supabase
.from('companies')
.select('id, name')
.ilike('name', `%${itemData.name}%`)
.limit(5);
if (error) throw error;
return (data || []).map(entity => ({
id: entity.id,
name: entity.name,
similarity: calculateSimilarity(itemData.name, entity.name),
})).sort((a, b) => b.similarity - a.similarity);
} else if (tableName === 'parks') {
const { data, error } = await supabase
.from('parks')
.select('id, name')
.ilike('name', `%${itemData.name}%`)
.limit(5);
if (error) throw error;
return (data || []).map(entity => ({
id: entity.id,
name: entity.name,
similarity: calculateSimilarity(itemData.name, entity.name),
})).sort((a, b) => b.similarity - a.similarity);
} else if (tableName === 'rides') {
const { data, error } = await supabase
.from('rides')
.select('id, name')
.ilike('name', `%${itemData.name}%`)
.limit(5);
if (error) throw error;
return (data || []).map(entity => ({
id: entity.id,
name: entity.name,
similarity: calculateSimilarity(itemData.name, entity.name),
})).sort((a, b) => b.similarity - a.similarity);
} else if (tableName === 'ride_models') {
const { data, error } = await supabase
.from('ride_models')
.select('id, name')
.ilike('name', `%${itemData.name}%`)
.limit(5);
if (error) throw error;
return (data || []).map(entity => ({
id: entity.id,
name: entity.name,
similarity: calculateSimilarity(itemData.name, entity.name),
})).sort((a, b) => b.similarity - a.similarity);
}
return [];
} catch (error: unknown) {
handleNonCriticalError(error, {
action: 'Find matching entities',
metadata: { itemType },
});
return [];
}
}
/**
* Calculate string similarity (simple implementation)
*/
function calculateSimilarity(str1: string, str2: string): number {
const s1 = str1.toLowerCase();
const s2 = str2.toLowerCase();
if (s1 === s2) return 1.0;
if (s1.includes(s2) || s2.includes(s1)) return 0.8;
// Levenshtein distance approximation
const maxLen = Math.max(s1.length, s2.length);
const distance = levenshteinDistance(s1, s2);
return 1 - (distance / maxLen);
}
function levenshteinDistance(str1: string, str2: string): number {
const matrix: number[][] = [];
for (let i = 0; i <= str2.length; i++) {
matrix[i] = [i];
}
for (let j = 0; j <= str1.length; j++) {
matrix[0][j] = j;
}
for (let i = 1; i <= str2.length; i++) {
for (let j = 1; j <= str1.length; j++) {
if (str2.charAt(i - 1) === str1.charAt(j - 1)) {
matrix[i][j] = matrix[i - 1][j - 1];
} else {
matrix[i][j] = Math.min(
matrix[i - 1][j - 1] + 1,
matrix[i][j - 1] + 1,
matrix[i - 1][j] + 1
);
}
}
}
return matrix[str2.length][str1.length];
}
function getTableNameForItemType(itemType: string): string | null {
const typeMap: Record<string, string> = {
park: 'parks',
ride: 'rides',
manufacturer: 'companies',
operator: 'companies',
designer: 'companies',
property_owner: 'companies',
ride_model: 'ride_models',
};
return typeMap[itemType] || null;
}

View File

@@ -0,0 +1,46 @@
import { z } from 'zod';
export const contactCategories = [
{ value: 'general', label: 'General Inquiry' },
{ value: 'moderation', label: 'Moderation Questions' },
{ value: 'technical', label: 'Technical Support' },
{ value: 'account', label: 'Account Issues' },
{ value: 'partnership', label: 'Partnership/Business' },
{ value: 'report', label: 'Report an Issue' },
{ value: 'other', label: 'Other' },
] as const;
export const contactFormSchema = z.object({
name: z
.string()
.trim()
.min(2, 'Name must be at least 2 characters')
.max(100, 'Name must be less than 100 characters'),
email: z
.string()
.trim()
.email('Invalid email address')
.max(255, 'Email must be less than 255 characters'),
subject: z
.string()
.trim()
.min(5, 'Subject must be at least 5 characters')
.max(200, 'Subject must be less than 200 characters'),
category: z.string()
.refine(
(val) => ['general', 'moderation', 'technical', 'account', 'partnership', 'report', 'other'].includes(val),
{ message: 'Please select a valid category' }
),
message: z
.string()
.trim()
.min(20, 'Message must be at least 20 characters')
.max(2000, 'Message must be less than 2000 characters'),
captchaToken: z.string().min(1, 'Please complete the CAPTCHA'),
});
export type ContactFormData = z.infer<typeof contactFormSchema>;
export interface ContactSubmission extends ContactFormData {
userId?: string;
}

View File

@@ -0,0 +1,121 @@
import { z } from 'zod';
/**
* Validation schemas for data export
*/
/**
* User statistics schema
*/
export const userStatisticsSchema = z.object({
ride_count: z.number().int().min(0),
coaster_count: z.number().int().min(0),
park_count: z.number().int().min(0),
review_count: z.number().int().min(0),
reputation_score: z.number().int().min(0),
photo_count: z.number().int().min(0),
list_count: z.number().int().min(0),
submission_count: z.number().int().min(0),
account_created: z.string(),
last_updated: z.string()
});
/**
* Activity log entry schema
*/
export const activityLogEntrySchema = z.object({
id: z.string().uuid(),
action: z.string(),
changes: z.record(z.string(), z.any()),
created_at: z.string(),
changed_by: z.string().uuid(),
ip_address_hash: z.string().optional(),
user_agent: z.string().optional()
});
/**
* Export options schema
*/
export const exportOptionsSchema = z.object({
include_reviews: z.boolean(),
include_lists: z.boolean(),
include_activity_log: z.boolean(),
include_preferences: z.boolean(),
format: z.literal('json')
});
/**
* Export profile data schema
*/
export const exportProfileDataSchema = z.object({
username: z.string(),
display_name: z.string().nullable(),
bio: z.string().nullable(),
preferred_pronouns: z.string().nullable(),
personal_location: z.string().nullable(),
timezone: z.string(),
preferred_language: z.string(),
theme_preference: z.string(),
privacy_level: z.string(),
created_at: z.string(),
updated_at: z.string()
});
/**
* Export review data schema
*/
export const exportReviewDataSchema = z.object({
id: z.string().uuid(),
rating: z.number().min(1).max(5),
review_text: z.string().nullable(),
ride_name: z.string().optional(),
park_name: z.string().optional(),
created_at: z.string()
});
/**
* Export list data schema
*/
export const exportListDataSchema = z.object({
id: z.string().uuid(),
name: z.string(),
description: z.string().nullable(),
is_public: z.boolean(),
item_count: z.number().int().min(0),
created_at: z.string()
});
/**
* Complete export data structure schema
*/
export const exportDataSchema = z.object({
export_date: z.string(),
user_id: z.string().uuid(),
profile: exportProfileDataSchema,
statistics: userStatisticsSchema,
reviews: z.array(exportReviewDataSchema),
lists: z.array(exportListDataSchema),
activity_log: z.array(activityLogEntrySchema),
preferences: z.object({
unit_preferences: z.any(),
accessibility_options: z.any(),
notification_preferences: z.any(),
privacy_settings: z.any()
}),
metadata: z.object({
export_version: z.string(),
data_retention_info: z.string(),
instructions: z.string()
})
});
/**
* Default export options
*/
export const DEFAULT_EXPORT_OPTIONS = {
include_reviews: true,
include_lists: true,
include_activity_log: true,
include_preferences: true,
format: 'json' as const
};

200
src-old/lib/dateUtils.ts Normal file
View File

@@ -0,0 +1,200 @@
/**
* Date Utility Functions for Timezone-Agnostic Date Handling
*
* This module provides utilities for handling calendar dates (not moments in time)
* without timezone shifts. All dates are stored as YYYY-MM-DD strings in the database
* using the DATE type (not TIMESTAMP).
*
* Key Principle: Calendar dates like "January 1, 2024" should remain "2024-01-01"
* regardless of user timezone. We never use UTC conversion for DATE fields.
*
* @see docs/DATE_HANDLING.md for full documentation
*/
/**
* Converts a Date object to YYYY-MM-DD string in LOCAL timezone
*
* This prevents timezone shifts where selecting "Jan 1, 2024" could
* save as "2023-12-31" or "2024-01-02" due to UTC conversion.
*
* @param date - Date object to convert
* @returns YYYY-MM-DD formatted string in local timezone
*
* @example
* // User in UTC-8 selects Jan 1, 2024 11:00 PM
* const date = new Date('2024-01-01T23:00:00-08:00');
* toDateOnly(date); // Returns "2024-01-01" ✅ (NOT "2024-01-02")
*/
export function toDateOnly(date: Date): string {
const year = date.getFullYear();
const month = String(date.getMonth() + 1).padStart(2, '0');
const day = String(date.getDate()).padStart(2, '0');
return `${year}-${month}-${day}`;
}
/**
* Parses a YYYY-MM-DD string to a Date object at midnight local time
*
* @param dateString - YYYY-MM-DD formatted string
* @returns Date object set to midnight local time
*
* @example
* parseDateOnly('2024-01-01'); // Returns Date object for Jan 1, 2024 00:00:00 local
*/
export function parseDateOnly(dateString: string): Date {
const [year, month, day] = dateString.split('-').map(Number);
return new Date(year, month - 1, day);
}
/**
* Gets current date as YYYY-MM-DD string in local timezone
*
* @returns Current date in YYYY-MM-DD format
*
* @example
* getCurrentDateLocal(); // "2024-01-15"
*/
export function getCurrentDateLocal(): string {
return toDateOnly(new Date());
}
/**
* Formats a date string for display based on precision
*
* @param dateString - YYYY-MM-DD formatted string
* @param precision - Display precision: 'day', 'month', or 'year'
* @returns Formatted display string
*
* @example
* formatDateDisplay('2024-01-01', 'year'); // "2024"
* formatDateDisplay('2024-01-01', 'month'); // "January 2024"
* formatDateDisplay('2024-01-01', 'day'); // "January 1, 2024"
*/
export function formatDateDisplay(
dateString: string | null | undefined,
precision: 'day' | 'month' | 'year' = 'day'
): string {
if (!dateString) return '';
const date = parseDateOnly(dateString);
switch (precision) {
case 'year':
return date.getFullYear().toString();
case 'month':
return date.toLocaleDateString('en-US', { year: 'numeric', month: 'long' });
case 'day':
default:
return date.toLocaleDateString('en-US', {
year: 'numeric',
month: 'long',
day: 'numeric'
});
}
}
/**
* Validates YYYY-MM-DD date format
*
* @param dateString - String to validate
* @returns true if valid YYYY-MM-DD format
*
* @example
* isValidDateString('2024-01-01'); // true
* isValidDateString('01/01/2024'); // false
* isValidDateString('2024-1-1'); // false
*/
export function isValidDateString(dateString: string): boolean {
return /^\d{4}-\d{2}-\d{2}$/.test(dateString);
}
/**
* Checks if a date is within a valid range
*
* @param dateString - YYYY-MM-DD formatted string
* @param minYear - Minimum allowed year (default: 1800)
* @param maxYear - Maximum allowed year (default: current year + 10)
* @returns true if date is within range
*/
export function isDateInRange(
dateString: string,
minYear: number = 1800,
maxYear: number = new Date().getFullYear() + 10
): boolean {
if (!isValidDateString(dateString)) return false;
const year = parseInt(dateString.split('-')[0]);
return year >= minYear && year <= maxYear;
}
/**
* Compares two date strings
*
* @param date1 - First date in YYYY-MM-DD format
* @param date2 - Second date in YYYY-MM-DD format
* @returns -1 if date1 < date2, 0 if equal, 1 if date1 > date2
*/
export function compareDateStrings(date1: string, date2: string): number {
if (date1 === date2) return 0;
return date1 < date2 ? -1 : 1;
}
/**
* Safely parses a date value (string or Date) for display formatting
* Ensures YYYY-MM-DD strings are interpreted as local dates, not UTC
*
* This prevents timezone bugs where "1972-10-01" would display as
* "September 30, 1972" for users in negative UTC offset timezones.
*
* @param date - Date string (YYYY-MM-DD) or Date object
* @returns Date object in local timezone
*
* @example
* // User in UTC-8 viewing "1972-10-01"
* parseDateForDisplay("1972-10-01"); // Returns Oct 1, 1972 00:00 PST ✅
* // NOT Sep 30, 1972 16:00 PST (what new Date() would create)
*/
export function parseDateForDisplay(date: string | Date): Date {
if (date instanceof Date) {
return date;
}
// If it's a YYYY-MM-DD string, use parseDateOnly for local interpretation
if (typeof date === 'string' && /^\d{4}-\d{2}-\d{2}$/.test(date)) {
return parseDateOnly(date);
}
// Fallback for other date strings (timestamps, ISO strings, etc.)
return new Date(date);
}
/**
* Creates a date string for a specific precision
* Sets the date to the first day of the period for month/year precision
*
* @param date - Date object
* @param precision - 'day', 'month', or 'year'
* @returns YYYY-MM-DD formatted string
*
* @example
* const date = new Date('2024-06-15');
* toDateWithPrecision(date, 'year'); // "2024-01-01"
* toDateWithPrecision(date, 'month'); // "2024-06-01"
* toDateWithPrecision(date, 'day'); // "2024-06-15"
*/
export function toDateWithPrecision(
date: Date,
precision: 'day' | 'month' | 'year'
): string {
const year = date.getFullYear();
const month = date.getMonth() + 1;
const day = date.getDate();
switch (precision) {
case 'year':
return `${year}-01-01`;
case 'month':
return `${year}-${String(month).padStart(2, '0')}-01`;
case 'day':
default:
return `${year}-${String(month).padStart(2, '0')}-${String(day).padStart(2, '0')}`;
}
}

View File

@@ -0,0 +1,91 @@
export type DeletionStep = 'warning' | 'confirm' | 'code';
export type DeletionDialogState = {
step: DeletionStep;
confirmationCode: string;
codeReceived: boolean;
scheduledDate: string;
isLoading: boolean;
error: string | null;
};
export type DeletionDialogAction =
| { type: 'CONTINUE_TO_CONFIRM' }
| { type: 'GO_BACK_TO_WARNING' }
| { type: 'REQUEST_DELETION'; payload: { scheduledDate: string } }
| { type: 'UPDATE_CODE'; payload: { code: string } }
| { type: 'TOGGLE_CODE_RECEIVED' }
| { type: 'SET_LOADING'; payload: boolean }
| { type: 'SET_ERROR'; payload: string | null }
| { type: 'RESET' };
export const initialState: DeletionDialogState = {
step: 'warning',
confirmationCode: '',
codeReceived: false,
scheduledDate: '',
isLoading: false,
error: null
};
export function deletionDialogReducer(
state: DeletionDialogState,
action: DeletionDialogAction
): DeletionDialogState {
switch (action.type) {
case 'CONTINUE_TO_CONFIRM':
return { ...state, step: 'confirm' };
case 'GO_BACK_TO_WARNING':
return { ...state, step: 'warning', error: null };
case 'REQUEST_DELETION':
return {
...state,
step: 'code',
scheduledDate: action.payload.scheduledDate,
isLoading: false,
error: null
};
case 'UPDATE_CODE':
// Only allow digits, max 6
const sanitized = action.payload.code.replace(/\D/g, '').slice(0, 6);
return { ...state, confirmationCode: sanitized };
case 'TOGGLE_CODE_RECEIVED':
return { ...state, codeReceived: !state.codeReceived };
case 'SET_LOADING':
return { ...state, isLoading: action.payload };
case 'SET_ERROR':
return { ...state, error: action.payload, isLoading: false };
case 'RESET':
return initialState;
default:
// Exhaustive check
const _exhaustive: never = action;
return state;
}
}
// Validation helpers
export const canProceedToConfirm = (state: DeletionDialogState): boolean => {
return state.step === 'warning' && !state.isLoading;
};
export const canRequestDeletion = (state: DeletionDialogState): boolean => {
return state.step === 'confirm' && !state.isLoading;
};
export const canConfirmDeletion = (state: DeletionDialogState): boolean => {
return (
state.step === 'code' &&
state.confirmationCode.length === 6 &&
state.codeReceived &&
!state.isLoading
);
};

View File

@@ -0,0 +1,192 @@
/**
* Edge Function Request Tracking Wrapper
*
* Wraps Supabase function invocations with request tracking for debugging and monitoring.
* Provides correlation IDs for tracing requests across the system.
*/
import { supabase } from '@/lib/supabaseClient';
import { trackRequest } from './requestTracking';
import { getErrorMessage } from './errorHandler';
import { withRetry, isRetryableError, type RetryOptions } from './retryHelpers';
import { breadcrumb } from './errorBreadcrumbs';
/**
* Invoke a Supabase edge function with request tracking
*
* @param functionName - Name of the edge function to invoke
* @param payload - Request payload
* @param userId - User ID for tracking (optional)
* @param parentRequestId - Parent request ID for chaining (optional)
* @param traceId - Trace ID for distributed tracing (optional)
* @param timeout - Request timeout in milliseconds (default: 30000)
* @param retryOptions - Optional retry configuration
* @param customHeaders - Custom headers to include in the request (e.g., X-Idempotency-Key)
* @returns Response data with requestId, status, and tracking info
*/
export async function invokeWithTracking<T = any>(
functionName: string,
payload: any = {},
userId?: string,
parentRequestId?: string,
traceId?: string,
timeout: number = 30000,
retryOptions?: Partial<RetryOptions>,
customHeaders?: Record<string, string>
): Promise<{ data: T | null; error: any; requestId: string; duration: number; attempts?: number; status?: number }> {
// Configure retry options with defaults
const effectiveRetryOptions: RetryOptions = {
maxAttempts: retryOptions?.maxAttempts ?? 3,
baseDelay: retryOptions?.baseDelay ?? 1000,
maxDelay: retryOptions?.maxDelay ?? 10000,
backoffMultiplier: retryOptions?.backoffMultiplier ?? 2,
jitter: true,
shouldRetry: isRetryableError,
onRetry: (attempt, error, delay) => {
// Log retry attempt to breadcrumbs
breadcrumb.apiCall(
`/functions/${functionName}`,
'POST',
undefined // status unknown during retry
);
console.info(`Retrying ${functionName} (attempt ${attempt}) after ${delay}ms:`,
getErrorMessage(error)
);
},
};
let attemptCount = 0;
try {
const { result, requestId, duration } = await trackRequest(
{
endpoint: `/functions/${functionName}`,
method: 'POST',
userId,
parentRequestId,
traceId,
},
async (context) => {
return await withRetry(
async () => {
attemptCount++;
const controller = new AbortController();
const timeoutId = setTimeout(() => controller.abort(), timeout);
try {
const { data, error } = await supabase.functions.invoke<T>(functionName, {
body: { ...payload, clientRequestId: context.requestId },
signal: controller.signal,
headers: customHeaders,
});
clearTimeout(timeoutId);
if (error) {
// Enhance error with status and context for retry logic
const enhancedError = new Error(error.message || 'Edge function error');
(enhancedError as any).status = error.status;
(enhancedError as any).context = error.context;
throw enhancedError;
}
return data;
} catch (error) {
clearTimeout(timeoutId);
throw error;
}
},
effectiveRetryOptions
);
}
);
return { data: result, error: null, requestId, duration, attempts: attemptCount, status: 200 };
} catch (error: unknown) {
// Handle AbortError specifically
if (error instanceof Error && error.name === 'AbortError') {
return {
data: null,
error: {
message: `Request timeout: ${functionName} took longer than ${timeout}ms to respond`,
code: 'TIMEOUT',
},
requestId: 'timeout',
duration: timeout,
attempts: attemptCount,
status: 408,
};
}
const errorMessage = getErrorMessage(error);
return {
data: null,
error: { message: errorMessage, status: (error as any)?.status },
requestId: 'unknown',
duration: 0,
attempts: attemptCount,
status: (error as any)?.status,
};
}
}
/**
* Invoke multiple edge functions in parallel with batch tracking
*
* Uses a shared trace ID to correlate all operations.
*
* @param operations - Array of function invocation configurations
* @param userId - User ID for tracking
* @returns Array of results with their request IDs
*/
export async function invokeBatchWithTracking<T = any>(
operations: Array<{
functionName: string;
payload: any;
retryOptions?: Partial<RetryOptions>;
}>,
userId?: string
): Promise<
Array<{
functionName: string;
data: T | null;
error: any;
requestId: string;
duration: number;
attempts?: number;
status?: number;
}>
> {
const traceId = crypto.randomUUID();
const results = await Promise.allSettled(
operations.map(async (op) => {
const result = await invokeWithTracking<T>(
op.functionName,
op.payload,
userId,
undefined,
traceId,
30000,
op.retryOptions
);
return { functionName: op.functionName, ...result };
})
);
return results.map((result, index) => {
if (result.status === 'fulfilled') {
return result.value;
} else {
return {
functionName: operations[index].functionName,
data: null,
error: { message: result.reason?.message || 'Unknown error' },
requestId: 'unknown',
duration: 0,
};
}
});
}

View File

@@ -0,0 +1,43 @@
import { supabase } from '@/lib/supabaseClient';
import { invokeWithTracking } from '@/lib/edgeFunctionTracking';
import { handleNonCriticalError } from '@/lib/errorHandler';
interface EmailValidationResult {
valid: boolean;
reason?: string;
suggestions?: string[];
}
/**
* Validates an email address against disposable email domains
* Uses the validate-email-backend edge function for server-side validation
*/
export async function validateEmailNotDisposable(email: string): Promise<EmailValidationResult> {
try {
const { data, error } = await invokeWithTracking(
'validate-email-backend',
{ email },
undefined
);
if (error) {
handleNonCriticalError(error, {
action: 'Validate email backend',
});
return {
valid: false,
reason: 'Unable to validate email address. Please try again.'
};
}
return data as EmailValidationResult;
} catch (error: unknown) {
handleNonCriticalError(error, {
action: 'Validate email disposable',
});
return {
valid: false,
reason: 'Unable to validate email address. Please try again.'
};
}
}

View File

@@ -0,0 +1,96 @@
/**
* ⚠️ CRITICAL SECURITY PATTERN ⚠️
*
* These wrappers enforce the submission flow for all entity edits/creations.
* DO NOT bypass these - they ensure moderation queue → versioning → live display.
*
* Flow: User Submit → Moderation Queue → Approval → Versioning → Live
*
* @see docs/SUBMISSION_FLOW.md
*/
import {
submitParkCreation,
submitParkUpdate,
submitRideCreation,
submitRideUpdate,
ParkFormData,
RideFormData
} from './entitySubmissionHelpers';
import { logger } from './logger';
export type EntitySubmissionHandler<T> = (
data: T,
userId: string
) => Promise<{ submitted: boolean; submissionId: string }>;
/**
* Creates a type-safe submission handler for parks.
* Automatically routes to creation or update based on isEditing flag.
*
* @example
* const handleSubmit = enforceParkSubmissionFlow(true, park.id);
* await handleSubmit(formData, user.id);
*/
export function enforceParkSubmissionFlow(
isEditing: boolean,
existingId?: string
): EntitySubmissionHandler<ParkFormData> {
if (isEditing && !existingId) {
throw new Error('existingId is required when isEditing is true');
}
return async (data: ParkFormData, userId: string) => {
if (isEditing && existingId) {
return await submitParkUpdate(existingId, data, userId);
} else {
return await submitParkCreation(data, userId);
}
};
}
/**
* Creates a type-safe submission handler for rides.
* Automatically routes to creation or update based on isEditing flag.
*
* @example
* const handleSubmit = enforceRideSubmissionFlow(true, ride.id);
* await handleSubmit(formData, user.id);
*/
export function enforceRideSubmissionFlow(
isEditing: boolean,
existingId?: string
): EntitySubmissionHandler<RideFormData> {
if (isEditing && !existingId) {
throw new Error('existingId is required when isEditing is true');
}
return async (data: RideFormData, userId: string) => {
if (isEditing && existingId) {
return await submitRideUpdate(existingId, data, userId);
} else {
return await submitRideCreation(data, userId);
}
};
}
/**
* Development-mode validation helper.
* Warns if a form's onSubmit handler doesn't use submission helpers.
*/
export function validateSubmissionHandler(
onSubmit: Function,
entityType: 'park' | 'ride'
): void {
if (process.env.NODE_ENV === 'development') {
const funcString = onSubmit.toString();
const expectedPattern = entityType === 'park' ? 'submitPark' : 'submitRide';
if (!funcString.includes(expectedPattern)) {
logger.warn(
`⚠️ ${entityType}Form: onSubmit should use ${expectedPattern}Creation/${expectedPattern}Update from entitySubmissionHelpers.\n` +
`Direct database writes bypass the moderation queue and versioning system.`
);
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,233 @@
import { Database } from '@/integrations/supabase/types';
import type {
ParkSubmissionData,
RideSubmissionData,
CompanySubmissionData,
RideModelSubmissionData,
} from '@/types/submission-data';
type ParkInsert = Database['public']['Tables']['parks']['Insert'];
type RideInsert = Database['public']['Tables']['rides']['Insert'];
type CompanyInsert = Database['public']['Tables']['companies']['Insert'];
type RideModelInsert = Database['public']['Tables']['ride_models']['Insert'];
/**
* Transform park submission data to database insert format
* @param submissionData - Validated park submission data
* @returns Database insert object for parks table
*/
export function transformParkData(submissionData: ParkSubmissionData): ParkInsert {
return {
name: submissionData.name,
slug: submissionData.slug,
description: submissionData.description || null,
park_type: submissionData.park_type,
status: normalizeStatus(submissionData.status),
opening_date: submissionData.opening_date?.trim() || null,
closing_date: submissionData.closing_date?.trim() || null,
website_url: submissionData.website_url || null,
phone: submissionData.phone || null,
email: submissionData.email || null,
operator_id: submissionData.operator_id || null,
property_owner_id: submissionData.property_owner_id || null,
location_id: submissionData.location_id || null,
banner_image_url: submissionData.banner_image_url || null,
banner_image_id: submissionData.banner_image_id || null,
card_image_url: submissionData.card_image_url || null,
card_image_id: submissionData.card_image_id || null,
average_rating: 0,
review_count: 0,
ride_count: 0,
coaster_count: 0,
is_test_data: submissionData.is_test_data || false,
};
}
/**
* Transform ride submission data to database insert format
* Note: Relational data (technical_specs, coaster_stats, former_names) are now
* stored in separate tables and should not be included in the main ride insert.
* @param submissionData - Validated ride submission data
* @returns Database insert object for rides table
*/
export function transformRideData(submissionData: RideSubmissionData): RideInsert {
return {
name: submissionData.name,
slug: submissionData.slug,
description: submissionData.description || null,
category: submissionData.category,
ride_sub_type: submissionData.ride_sub_type || null,
status: normalizeStatus(submissionData.status),
park_id: submissionData.park_id,
ride_model_id: submissionData.ride_model_id || null,
manufacturer_id: submissionData.manufacturer_id || null,
designer_id: submissionData.designer_id || null,
opening_date: submissionData.opening_date?.trim() || null,
closing_date: submissionData.closing_date?.trim() || null,
height_requirement: submissionData.height_requirement || null,
age_requirement: submissionData.age_requirement || null,
capacity_per_hour: submissionData.capacity_per_hour || null,
duration_seconds: submissionData.duration_seconds || null,
max_speed_kmh: submissionData.max_speed_kmh || null,
max_height_meters: submissionData.max_height_meters || null,
length_meters: submissionData.length_meters || null,
drop_height_meters: submissionData.drop_height_meters || null,
inversions: submissionData.inversions || null,
max_g_force: submissionData.max_g_force || null,
coaster_type: submissionData.coaster_type || null,
seating_type: submissionData.seating_type || null,
intensity_level: submissionData.intensity_level || null,
track_material: submissionData.track_material || null,
support_material: submissionData.support_material || null,
propulsion_method: submissionData.propulsion_method || null,
// Water ride specific fields
water_depth_cm: submissionData.water_depth_cm || null,
splash_height_meters: submissionData.splash_height_meters || null,
wetness_level: submissionData.wetness_level || null,
flume_type: submissionData.flume_type || null,
boat_capacity: submissionData.boat_capacity || null,
// Dark ride specific fields
theme_name: submissionData.theme_name || null,
story_description: submissionData.story_description || null,
show_duration_seconds: submissionData.show_duration_seconds || null,
animatronics_count: submissionData.animatronics_count || null,
projection_type: submissionData.projection_type || null,
ride_system: submissionData.ride_system || null,
scenes_count: submissionData.scenes_count || null,
// Flat ride specific fields
rotation_type: submissionData.rotation_type || null,
motion_pattern: submissionData.motion_pattern || null,
platform_count: submissionData.platform_count || null,
swing_angle_degrees: submissionData.swing_angle_degrees || null,
rotation_speed_rpm: submissionData.rotation_speed_rpm || null,
arm_length_meters: submissionData.arm_length_meters || null,
max_height_reached_meters: submissionData.max_height_reached_meters || null,
// Kiddie ride specific fields
min_age: submissionData.min_age || null,
max_age: submissionData.max_age || null,
educational_theme: submissionData.educational_theme || null,
character_theme: submissionData.character_theme || null,
// Transportation ride specific fields
transport_type: submissionData.transport_type || null,
route_length_meters: submissionData.route_length_meters || null,
stations_count: submissionData.stations_count || null,
vehicle_capacity: submissionData.vehicle_capacity || null,
vehicles_count: submissionData.vehicles_count || null,
round_trip_duration_seconds: submissionData.round_trip_duration_seconds || null,
banner_image_url: submissionData.banner_image_url || null,
banner_image_id: submissionData.banner_image_id || null,
card_image_url: submissionData.card_image_url || null,
card_image_id: submissionData.card_image_id || null,
image_url: submissionData.image_url || null,
average_rating: 0,
review_count: 0,
is_test_data: submissionData.is_test_data || false,
};
}
/**
* Transform company submission data to database insert format
* @param submissionData - Validated company submission data
* @param companyType - Type of company (manufacturer, operator, property_owner, designer)
* @returns Database insert object for companies table
*/
export function transformCompanyData(
submissionData: CompanySubmissionData,
companyType: 'manufacturer' | 'operator' | 'property_owner' | 'designer'
): CompanyInsert {
return {
name: submissionData.name,
slug: submissionData.slug,
description: submissionData.description || null,
company_type: companyType,
person_type: submissionData.person_type || 'company',
founded_year: submissionData.founded_year || null,
headquarters_location: submissionData.headquarters_location || null,
website_url: submissionData.website_url || null,
logo_url: submissionData.logo_url || null,
average_rating: 0,
review_count: 0,
is_test_data: submissionData.is_test_data || false,
};
}
/**
* Transform ride model submission data to database insert format
* Note: Technical specifications are now stored in the ride_model_technical_specifications
* table and should not be included in the main ride model insert.
* @param submissionData - Validated ride model submission data
* @returns Database insert object for ride_models table
*/
export function transformRideModelData(submissionData: RideModelSubmissionData): RideModelInsert {
return {
name: submissionData.name,
slug: submissionData.slug,
manufacturer_id: submissionData.manufacturer_id,
category: submissionData.category,
ride_type: (submissionData.ride_type || null) as string,
description: submissionData.description || null,
banner_image_url: submissionData.banner_image_url || null,
banner_image_id: submissionData.banner_image_id || null,
card_image_url: submissionData.card_image_url || null,
card_image_id: submissionData.card_image_id || null,
is_test_data: submissionData.is_test_data || false,
};
}
/**
* Normalize status values to match database enums
*/
function normalizeStatus(status: string): string {
if (!status) return 'operating';
const statusMap: Record<string, string> = {
'Operating': 'operating',
'operating': 'operating',
'Seasonal': 'seasonal',
'seasonal': 'seasonal',
'Closed Temporarily': 'closed_temporarily',
'closed_temporarily': 'closed_temporarily',
'Closed Permanently': 'closed_permanently',
'closed_permanently': 'closed_permanently',
'Under Construction': 'under_construction',
'under_construction': 'under_construction',
'Planned': 'planned',
'planned': 'planned',
'SBNO': 'sbno',
'sbno': 'sbno',
};
return statusMap[status] || 'operating';
}
/**
* Extract Cloudflare image ID from URL
*/
export function extractImageId(url: string): string {
const match = url.match(/\/([a-f0-9-]{36})\//);
return match ? match[1] : '';
}
/**
* Validate and sanitize submission data before transformation
* @param data - Submission data to validate
* @param itemType - Type of entity being validated (for error messages)
* @throws Error if validation fails
*/
export function validateSubmissionData(
data: ParkSubmissionData | RideSubmissionData | CompanySubmissionData | RideModelSubmissionData,
itemType: string
): void {
if (!data.name || typeof data.name !== 'string' || data.name.trim() === '') {
throw new Error(`${itemType} name is required`);
}
if (!data.slug || typeof data.slug !== 'string' || data.slug.trim() === '') {
throw new Error(`${itemType} slug is required`);
}
// Validate slug format
if (!/^[a-z0-9-]+$/.test(data.slug)) {
throw new Error(`${itemType} slug must contain only lowercase letters, numbers, and hyphens`);
}
}

View File

@@ -0,0 +1,859 @@
import { z } from 'zod';
import { supabase } from '@/lib/supabaseClient';
import { handleNonCriticalError, getErrorMessage } from '@/lib/errorHandler';
import { logger } from '@/lib/logger';
// ============================================
// VALIDATION SCHEMAS - DOCUMENTATION ONLY
// ============================================
// ⚠️ NOTE: These schemas are currently NOT used in the React application.
// All business logic validation happens server-side in the edge function.
// These schemas are kept for:
// 1. Documentation of validation rules
// 2. Potential future use for client-side UX validation (basic checks only)
// 3. Reference when updating edge function validation logic
//
// DO NOT import these in production code for business logic validation.
// ============================================
// ============================================
// CENTRALIZED VALIDATION SCHEMAS
// ⚠️ CRITICAL: These schemas represent the validation rules
// They should mirror the validation in process-selective-approval edge function
// Client-side should NOT perform business logic validation
// Client-side only does basic UX validation (non-empty, format checks) in forms
// ============================================
const currentYear = new Date().getFullYear();
// ============================================
// SHARED IMAGE UPLOAD SCHEMA
// ============================================
const imageAssignmentSchema = z.object({
uploaded: z.array(z.any()),
banner_assignment: z.number().int().min(0).nullable().optional(),
card_assignment: z.number().int().min(0).nullable().optional()
}).optional().default({ uploaded: [], banner_assignment: null, card_assignment: null });
// ============================================
// PARK SCHEMA
// ============================================
export const parkValidationSchema = z.object({
name: z.string().trim().min(1, 'Park name is required').max(200, 'Name must be less than 200 characters'),
slug: z.string().trim().min(1, 'Slug is required').regex(/^[a-z0-9-]+$/, 'Slug must contain only lowercase letters, numbers, and hyphens'),
description: z.string().trim().max(2000, 'Description must be less than 2000 characters').nullish().transform(val => val ?? undefined),
park_type: z.string().min(1, 'Park type is required'),
status: z.enum(['operating', 'closed_permanently', 'closed_temporarily', 'under_construction', 'planned', 'abandoned']),
opening_date: z.string().nullish().transform(val => val ?? undefined).refine((val) => {
if (!val) return true;
const date = new Date(val);
return date <= new Date();
}, 'Opening date cannot be in the future'),
opening_date_precision: z.enum(['day', 'month', 'year']).nullable().optional(),
closing_date: z.string().nullish().transform(val => val ?? undefined),
closing_date_precision: z.enum(['day', 'month', 'year']).nullable().optional(),
location_id: z.string().uuid().optional().nullable(),
location: z.object({
name: z.string(),
street_address: z.string().optional().nullable(),
city: z.string().optional().nullable(),
state_province: z.string().optional().nullable(),
country: z.string(),
postal_code: z.string().optional().nullable(),
latitude: z.number(),
longitude: z.number(),
timezone: z.string().optional().nullable(),
display_name: z.string(),
}).optional(),
website_url: z.string().trim().nullish().transform(val => val ?? undefined).refine((val) => {
if (!val || val === '') return true;
return z.string().url().safeParse(val).success;
}, 'Invalid URL format'),
phone: z.string().trim().max(50, 'Phone must be less than 50 characters').nullish().transform(val => val ?? undefined),
email: z.string().trim().nullish().transform(val => val ?? undefined).refine((val) => {
if (!val || val === '') return true;
return z.string().email().safeParse(val).success;
}, 'Invalid email format'),
operator_id: z.string()
.refine(
val => !val || val === '' || val.startsWith('temp-') || /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i.test(val),
'Must be a valid UUID or temporary placeholder'
)
.nullish()
.transform(val => val ?? undefined),
property_owner_id: z.string()
.refine(
val => !val || val === '' || val.startsWith('temp-') || /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i.test(val),
'Must be a valid UUID or temporary placeholder'
)
.nullish()
.transform(val => val ?? undefined),
banner_image_id: z.string().nullish().transform(val => val ?? undefined),
banner_image_url: z.string().nullish().transform(val => val ?? undefined),
card_image_id: z.string().nullish().transform(val => val ?? undefined),
card_image_url: z.string().nullish().transform(val => val ?? undefined),
images: imageAssignmentSchema,
source_url: z.string().trim().nullish().transform(val => val ?? undefined).refine((val) => {
if (!val || val === '') return true;
return z.string().url().safeParse(val).success;
}, 'Invalid URL format. Must be a valid URL starting with http:// or https://'),
submission_notes: z.string().trim()
.max(1000, 'Submission notes must be less than 1000 characters')
.nullish()
.transform(val => val ?? undefined),
}).refine((data) => {
if (data.closing_date && data.opening_date) {
return new Date(data.closing_date) >= new Date(data.opening_date);
}
return true;
}, {
message: 'Closing date must be after opening date',
path: ['closing_date'],
}).refine((data) => {
// Either location object OR location_id must be provided
return !!(data.location || data.location_id);
}, {
message: 'Location is required. Please search and select a location for the park.',
path: ['location']
});
// ============================================
// RIDE SCHEMA
// ============================================
export const rideValidationSchema = z.object({
name: z.string().trim().min(1, 'Ride name is required').max(200, 'Name must be less than 200 characters'),
slug: z.string().trim().min(1, 'Slug is required').regex(/^[a-z0-9-]+$/, 'Slug must contain only lowercase letters, numbers, and hyphens'),
description: z.string().trim().max(2000, 'Description must be less than 2000 characters').nullish().transform(val => val ?? undefined),
category: z.string().min(1, 'Category is required'),
ride_sub_type: z.string().trim().max(100, 'Sub type must be less than 100 characters').nullish().transform(val => val ?? undefined),
status: z.enum(['operating', 'closed_permanently', 'closed_temporarily', 'under_construction', 'relocated', 'stored', 'demolished']),
park_id: z.string().uuid().optional().nullable(),
designer_id: z.string()
.refine(
val => !val || val === '' || val.startsWith('temp-') || /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i.test(val),
'Must be a valid UUID or temporary placeholder'
)
.optional()
.nullable(),
opening_date: z.string().nullish().transform(val => val ?? undefined),
opening_date_precision: z.enum(['day', 'month', 'year']).nullable().optional(),
closing_date: z.string().nullish().transform(val => val ?? undefined),
closing_date_precision: z.enum(['day', 'month', 'year']).nullable().optional(),
height_requirement: z.preprocess(
(val) => val === '' || val === null || val === undefined ? undefined : Number(val),
z.number().int().min(0, 'Height requirement must be positive').max(300, 'Height requirement must be less than 300cm').optional()
),
age_requirement: z.preprocess(
(val) => val === '' || val === null || val === undefined ? undefined : Number(val),
z.number().int().min(0, 'Age requirement must be positive').max(100, 'Age requirement must be less than 100').optional()
),
capacity_per_hour: z.preprocess(
(val) => val === '' || val === null || val === undefined ? undefined : Number(val),
z.number().int().min(1, 'Capacity must be positive').max(99999, 'Capacity must be less than 100,000').optional()
),
duration_seconds: z.preprocess(
(val) => val === '' || val === null || val === undefined ? undefined : Number(val),
z.number().int().min(1, 'Duration must be positive').max(86400, 'Duration must be less than 24 hours').optional()
),
max_speed_kmh: z.preprocess(
(val) => val === '' || val === null || val === undefined ? undefined : Number(val),
z.number().min(0, 'Speed must be positive').max(500, 'Speed must be less than 500 km/h').optional()
),
max_height_meters: z.preprocess(
(val) => val === '' || val === null || val === undefined ? undefined : Number(val),
z.number().min(0, 'Height must be positive').max(200, 'Height must be less than 200 meters').optional()
),
length_meters: z.preprocess(
(val) => val === '' || val === null || val === undefined ? undefined : Number(val),
z.number().min(0, 'Length must be positive').max(10000, 'Length must be less than 10km').optional()
),
inversions: z.preprocess(
(val) => val === '' || val === null || val === undefined ? undefined : Number(val),
z.number().int().min(0, 'Inversions must be positive').max(20, 'Inversions must be less than 20').optional()
),
drop_height_meters: z.preprocess(
(val) => val === '' || val === null || val === undefined ? undefined : Number(val),
z.number().min(0, 'Drop height must be positive').max(200, 'Drop height must be less than 200 meters').optional()
),
max_g_force: z.preprocess(
(val) => val === '' || val === null || val === undefined ? undefined : Number(val),
z.number().min(-10, 'G-force must be greater than -10').max(10, 'G-force must be less than 10').optional()
),
manufacturer_id: z.string()
.refine(
val => !val || val === '' || val.startsWith('temp-') || /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i.test(val),
'Must be a valid UUID or temporary placeholder'
)
.optional()
.nullable(),
ride_model_id: z.string()
.refine(
val => !val || val === '' || val.startsWith('temp-') || /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i.test(val),
'Must be a valid UUID or temporary placeholder'
)
.optional()
.nullable(),
coaster_type: z.string().nullable().optional(),
seating_type: z.string().nullable().optional(),
intensity_level: z.string().nullable().optional(),
track_material: z.array(z.string()).optional().nullable(),
support_material: z.array(z.string()).optional().nullable(),
propulsion_method: z.array(z.string()).optional().nullable(),
// Water ride specific fields
water_depth_cm: z.preprocess(
(val) => val === '' || val === null || val === undefined ? undefined : Number(val),
z.number().min(0, 'Water depth must be positive').max(1000, 'Water depth must be less than 1000cm').optional()
),
splash_height_meters: z.preprocess(
(val) => val === '' || val === null || val === undefined ? undefined : Number(val),
z.number().min(0, 'Splash height must be positive').max(100, 'Splash height must be less than 100 meters').optional()
),
wetness_level: z.enum(['dry', 'light', 'moderate', 'soaked']).nullable().optional(),
flume_type: z.string().trim().max(100, 'Flume type must be less than 100 characters').nullish().transform(val => val ?? undefined),
boat_capacity: z.preprocess(
(val) => val === '' || val === null || val === undefined ? undefined : Number(val),
z.number().int().min(1, 'Boat capacity must be positive').max(100, 'Boat capacity must be less than 100').optional()
),
// Dark ride specific fields
theme_name: z.string().trim().max(200, 'Theme name must be less than 200 characters').nullish().transform(val => val ?? undefined),
story_description: z.string().trim().max(2000, 'Story description must be less than 2000 characters').nullish().transform(val => val ?? undefined),
show_duration_seconds: z.preprocess(
(val) => val === '' || val === null || val === undefined ? undefined : Number(val),
z.number().int().min(0, 'Show duration must be positive').max(7200, 'Show duration must be less than 2 hours').optional()
),
animatronics_count: z.preprocess(
(val) => val === '' || val === null || val === undefined ? undefined : Number(val),
z.number().int().min(0, 'Animatronics count must be positive').max(1000, 'Animatronics count must be less than 1000').optional()
),
projection_type: z.string().trim().max(100, 'Projection type must be less than 100 characters').nullish().transform(val => val ?? undefined),
ride_system: z.string().trim().max(100, 'Ride system must be less than 100 characters').nullish().transform(val => val ?? undefined),
scenes_count: z.preprocess(
(val) => val === '' || val === null || val === undefined ? undefined : Number(val),
z.number().int().min(0, 'Scenes count must be positive').max(100, 'Scenes count must be less than 100').optional()
),
// Flat ride specific fields
rotation_type: z.enum(['horizontal', 'vertical', 'multi_axis', 'pendulum', 'none']).nullable().optional(),
motion_pattern: z.string().trim().max(200, 'Motion pattern must be less than 200 characters').nullish().transform(val => val ?? undefined),
platform_count: z.preprocess(
(val) => val === '' || val === null || val === undefined ? undefined : Number(val),
z.number().int().min(1, 'Platform count must be positive').max(100, 'Platform count must be less than 100').optional()
),
swing_angle_degrees: z.preprocess(
(val) => val === '' || val === null || val === undefined ? undefined : Number(val),
z.number().min(0, 'Swing angle must be positive').max(360, 'Swing angle must be less than 360 degrees').optional()
),
rotation_speed_rpm: z.preprocess(
(val) => val === '' || val === null || val === undefined ? undefined : Number(val),
z.number().min(0, 'Rotation speed must be positive').max(200, 'Rotation speed must be less than 200 RPM').optional()
),
arm_length_meters: z.preprocess(
(val) => val === '' || val === null || val === undefined ? undefined : Number(val),
z.number().min(0, 'Arm length must be positive').max(100, 'Arm length must be less than 100 meters').optional()
),
max_height_reached_meters: z.preprocess(
(val) => val === '' || val === null || val === undefined ? undefined : Number(val),
z.number().min(0, 'Max height reached must be positive').max(200, 'Max height reached must be less than 200 meters').optional()
),
// Kiddie ride specific fields
min_age: z.preprocess(
(val) => val === '' || val === null || val === undefined ? undefined : Number(val),
z.number().int().min(0, 'Min age must be positive').max(18, 'Min age must be less than 18').optional()
),
max_age: z.preprocess(
(val) => val === '' || val === null || val === undefined ? undefined : Number(val),
z.number().int().min(0, 'Max age must be positive').max(18, 'Max age must be less than 18').optional()
),
educational_theme: z.string().trim().max(200, 'Educational theme must be less than 200 characters').nullish().transform(val => val ?? undefined),
character_theme: z.string().trim().max(200, 'Character theme must be less than 200 characters').nullish().transform(val => val ?? undefined),
// Transportation ride specific fields
transport_type: z.enum(['train', 'monorail', 'skylift', 'ferry', 'peoplemover', 'cable_car']).nullable().optional(),
route_length_meters: z.preprocess(
(val) => val === '' || val === null || val === undefined ? undefined : Number(val),
z.number().min(0, 'Route length must be positive').max(50000, 'Route length must be less than 50km').optional()
),
stations_count: z.preprocess(
(val) => val === '' || val === null || val === undefined ? undefined : Number(val),
z.number().int().min(2, 'Stations count must be at least 2').max(50, 'Stations count must be less than 50').optional()
),
vehicle_capacity: z.preprocess(
(val) => val === '' || val === null || val === undefined ? undefined : Number(val),
z.number().int().min(1, 'Vehicle capacity must be positive').max(500, 'Vehicle capacity must be less than 500').optional()
),
vehicles_count: z.preprocess(
(val) => val === '' || val === null || val === undefined ? undefined : Number(val),
z.number().int().min(1, 'Vehicles count must be positive').max(100, 'Vehicles count must be less than 100').optional()
),
round_trip_duration_seconds: z.preprocess(
(val) => val === '' || val === null || val === undefined ? undefined : Number(val),
z.number().int().min(0, 'Round trip duration must be positive').max(7200, 'Round trip duration must be less than 2 hours').optional()
),
banner_image_id: z.string().nullish().transform(val => val ?? undefined),
banner_image_url: z.string().nullish().transform(val => val ?? undefined),
card_image_id: z.string().nullish().transform(val => val ?? undefined),
card_image_url: z.string().nullish().transform(val => val ?? undefined),
images: imageAssignmentSchema,
source_url: z.string().trim().nullish().transform(val => val ?? undefined).refine((val) => {
if (!val || val === '') return true;
return z.string().url().safeParse(val).success;
}, 'Invalid URL format. Must be a valid URL starting with http:// or https://'),
submission_notes: z.string().trim()
.max(1000, 'Submission notes must be less than 1000 characters')
.nullish()
.transform(val => val ?? undefined),
}).refine((data) => {
// park_id is required (either real UUID or temp- reference)
return !!(data.park_id && data.park_id.trim().length > 0);
}, {
message: 'Park is required. Please select or create a park for this ride.',
path: ['park_id']
});
// ============================================
// COMPANY SCHEMA (Manufacturer, Designer, Operator, Property Owner)
// ============================================
export const companyValidationSchema = z.object({
name: z.string().trim().min(1, 'Company name is required').max(200, 'Name must be less than 200 characters'),
slug: z.string().trim().min(1, 'Slug is required').regex(/^[a-z0-9-]+$/, 'Slug must contain only lowercase letters, numbers, and hyphens'),
company_type: z.enum(['manufacturer', 'designer', 'operator', 'property_owner']),
description: z.string().trim().max(2000, 'Description must be less than 2000 characters').nullish().transform(val => val ?? undefined),
person_type: z.enum(['company', 'individual', 'firm', 'organization']),
founded_date: z.string().nullish().transform(val => val ?? undefined),
founded_date_precision: z.enum(['day', 'month', 'year']).nullable().optional(),
founded_year: z.preprocess(
(val) => val === '' || val === null || val === undefined ? undefined : Number(val),
z.number().int().min(1800, 'Founded year must be after 1800').max(currentYear, `Founded year cannot be after ${currentYear}`).optional()
),
headquarters_location: z.string().trim().max(200, 'Location must be less than 200 characters').nullish().transform(val => val ?? undefined),
website_url: z.string().trim().nullish().transform(val => val ?? undefined).refine((val) => {
if (!val || val === '') return true;
return z.string().url().safeParse(val).success;
}, 'Invalid URL format'),
banner_image_id: z.string().nullish().transform(val => val ?? undefined),
banner_image_url: z.string().nullish().transform(val => val ?? undefined),
card_image_id: z.string().nullish().transform(val => val ?? undefined),
card_image_url: z.string().nullish().transform(val => val ?? undefined),
images: imageAssignmentSchema,
source_url: z.string().trim().nullish().transform(val => val ?? undefined).refine((val) => {
if (!val || val === '') return true;
return z.string().url().safeParse(val).success;
}, 'Invalid URL format. Must be a valid URL starting with http:// or https://'),
submission_notes: z.string().trim()
.max(1000, 'Submission notes must be less than 1000 characters')
.nullish()
.transform(val => val ?? undefined),
});
// ============================================
// RIDE MODEL SCHEMA
// ============================================
export const rideModelValidationSchema = z.object({
name: z.string().trim().min(1, 'Model name is required').max(200, 'Name must be less than 200 characters'),
slug: z.string().trim().min(1, 'Slug is required').regex(/^[a-z0-9-]+$/, 'Slug must contain only lowercase letters, numbers, and hyphens'),
category: z.string().min(1, 'Category is required'),
ride_type: z.string().trim().min(1, 'Ride type is required').max(100, 'Ride type must be less than 100 characters'),
description: z.string().trim().max(2000, 'Description must be less than 2000 characters').nullish().transform(val => val ?? undefined),
manufacturer_id: z.string()
.refine(
val => !val || val === '' || val.startsWith('temp-') || /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i.test(val),
'Must be a valid UUID or temporary placeholder'
)
.optional(),
source_url: z.string().trim().nullish().transform(val => val ?? undefined).refine((val) => {
if (!val || val === '') return true;
return z.string().url().safeParse(val).success;
}, 'Invalid URL format. Must be a valid URL starting with http:// or https://'),
submission_notes: z.string().trim()
.max(1000, 'Submission notes must be less than 1000 characters')
.nullish()
.transform(val => val ?? undefined),
});
// ============================================
// PHOTO SCHEMA
// ============================================
export const photoValidationSchema = z.object({
cloudflare_image_id: z.string().min(1, 'Image ID is required'),
cloudflare_image_url: z.string().url('Invalid image URL'),
entity_type: z.string().min(1, 'Entity type is required'),
entity_id: z.string().uuid('Invalid entity ID'),
caption: z.string().trim().max(500, 'Caption must be less than 500 characters').optional().or(z.literal('')),
photographer_credit: z.string().trim().max(200, 'Credit must be less than 200 characters').optional().or(z.literal('')),
});
// ============================================
// MILESTONE/TIMELINE EVENT SCHEMA
// ============================================
export const milestoneValidationSchema = z.object({
title: z.string().trim().min(1, 'Event title is required').max(200, 'Title must be less than 200 characters'),
description: z.string().trim().max(2000, 'Description must be less than 2000 characters').optional().or(z.literal('')),
event_type: z.string().min(1, 'Event type is required'),
event_date: z.string().min(1, 'Event date is required').refine((val) => {
if (!val) return true;
const date = new Date(val);
const fiveYearsFromNow = new Date();
fiveYearsFromNow.setFullYear(fiveYearsFromNow.getFullYear() + 5);
return date <= fiveYearsFromNow;
}, 'Event date cannot be more than 5 years in the future'),
event_date_precision: z.enum(['day', 'month', 'year']).optional().default('day'),
entity_type: z.string().min(1, 'Entity type is required'),
entity_id: z.string().uuid('Invalid entity ID'),
is_public: z.boolean().optional(),
display_order: z.number().optional(),
from_value: z.string().trim().max(200).optional().or(z.literal('')),
to_value: z.string().trim().max(200).optional().or(z.literal('')),
from_entity_id: z.string().uuid().optional().nullable(),
to_entity_id: z.string().uuid().optional().nullable(),
from_location_id: z.string().uuid().optional().nullable(),
to_location_id: z.string().uuid().optional().nullable(),
}).refine((data) => {
// For change events, require from_value or to_value
const changeEvents = ['name_change', 'operator_change', 'owner_change', 'location_change', 'status_change'];
if (changeEvents.includes(data.event_type)) {
return data.from_value || data.to_value || data.from_entity_id || data.to_entity_id || data.from_location_id || data.to_location_id;
}
return true;
}, {
message: 'Change events must specify what changed (from/to values or entity IDs)',
path: ['from_value'],
});
// ============================================
// PHOTO OPERATION SCHEMAS
// ============================================
export const photoEditValidationSchema = z.object({
photo_id: z.string().uuid('Invalid photo ID'),
cloudflare_image_url: z.string().url('Invalid image URL'),
caption: z.string().trim().max(500, 'Caption must be less than 500 characters').optional().or(z.literal('')),
title: z.string().trim().max(200, 'Title must be less than 200 characters').optional().or(z.literal('')),
entity_type: z.string().min(1, 'Entity type is required'),
entity_id: z.string().uuid('Invalid entity ID'),
});
export const photoDeleteValidationSchema = z.object({
photo_id: z.string().uuid('Invalid photo ID'),
cloudflare_image_id: z.string().min(1, 'Image ID is required'),
cloudflare_image_url: z.string().url('Invalid image URL').optional(),
entity_type: z.string().min(1, 'Entity type is required'),
entity_id: z.string().uuid('Invalid entity ID'),
});
// ============================================
// SCHEMA REGISTRY
// ============================================
export const entitySchemas = {
park: parkValidationSchema,
ride: rideValidationSchema,
manufacturer: companyValidationSchema,
designer: companyValidationSchema,
operator: companyValidationSchema,
property_owner: companyValidationSchema,
ride_model: rideModelValidationSchema,
photo: photoValidationSchema,
photo_edit: photoEditValidationSchema,
photo_delete: photoDeleteValidationSchema,
milestone: milestoneValidationSchema,
timeline_event: milestoneValidationSchema, // Alias for milestone
};
// ============================================
// VALIDATION RESULT TYPES
// ============================================
export interface ValidationError {
field: string;
message: string;
severity: 'blocking' | 'warning' | 'suggestion';
}
export interface ValidationResult {
isValid: boolean;
blockingErrors: ValidationError[];
warnings: ValidationError[];
suggestions: ValidationError[];
allErrors: ValidationError[];
}
// ============================================
// VALIDATION HELPERS
// ============================================
/**
* Validate entity data against its schema
* Returns detailed validation result with errors categorized by severity
*/
export async function validateEntityData(
entityType: keyof typeof entitySchemas,
data: unknown
): Promise<ValidationResult> {
try {
// Debug logging for operator entity
if (entityType === 'operator') {
logger.log('Validating operator entity', {
dataKeys: data ? Object.keys(data as object) : [],
dataTypes: data ? Object.entries(data as object).reduce((acc, [key, val]) => {
acc[key] = typeof val;
return acc;
}, {} as Record<string, string>) : {},
rawData: JSON.stringify(data).substring(0, 500)
});
}
const schema = entitySchemas[entityType];
if (!schema) {
const error = {
field: 'entity_type',
message: `Unknown entity type: ${entityType}`,
severity: 'blocking' as const
};
handleNonCriticalError(new Error(`Unknown entity type: ${entityType}`), {
action: 'Entity Validation',
metadata: { entityType, providedData: data }
});
return {
isValid: false,
blockingErrors: [error],
warnings: [],
suggestions: [],
allErrors: [error],
};
}
const result = schema.safeParse(data);
const blockingErrors: ValidationError[] = [];
const warnings: ValidationError[] = [];
const suggestions: ValidationError[] = [];
// Process Zod errors
if (!result.success) {
const zodError = result.error as z.ZodError;
// Log detailed validation failure
handleNonCriticalError(zodError, {
action: 'Zod Validation Failed',
metadata: {
entityType,
issues: zodError.issues,
providedData: JSON.stringify(data).substring(0, 500),
issueCount: zodError.issues.length
}
});
zodError.issues.forEach((issue) => {
const field = issue.path.join('.') || entityType;
blockingErrors.push({
field,
message: `${issue.message} (code: ${issue.code})`,
severity: 'blocking',
});
});
}
// Add warnings for optional but recommended fields
const validData = data as Record<string, unknown>;
if (validData.description && typeof validData.description === 'string' && validData.description.length < 50) {
warnings.push({
field: 'description',
message: 'Description is short. Recommended: 50+ characters',
severity: 'warning',
});
}
if (entityType === 'park' || entityType === 'ride') {
if (!validData.description || (typeof validData.description === 'string' && validData.description.trim() === '')) {
warnings.push({
field: 'description',
message: 'No description provided. Adding a description improves content quality',
severity: 'warning',
});
}
}
// Check slug uniqueness (async) - only if slug has changed
if (validData.slug && typeof validData.slug === 'string') {
// Extract the correct ID field based on entity type
let entityId: string | undefined;
switch (entityType) {
case 'park':
entityId = typeof validData.park_id === 'string' ? validData.park_id : undefined;
break;
case 'ride':
entityId = typeof validData.ride_id === 'string' ? validData.ride_id : undefined;
break;
case 'manufacturer':
case 'designer':
case 'operator':
case 'property_owner':
entityId = typeof validData.company_id === 'string'
? validData.company_id
: (typeof validData.id === 'string' ? validData.id : undefined);
break;
case 'ride_model':
entityId = typeof validData.ride_model_id === 'string'
? validData.ride_model_id
: (typeof validData.id === 'string' ? validData.id : undefined);
break;
default:
entityId = typeof validData.id === 'string' ? validData.id : undefined;
}
// If we have an entity ID, check if slug has actually changed
let shouldCheckUniqueness = true;
if (entityId) {
const tableName = getTableNameFromEntityType(entityType);
// Use switch to avoid TypeScript type instantiation issues
let originalSlug: string | null = null;
try {
switch (tableName) {
case 'parks': {
const { data, error } = await supabase.from('parks').select('slug').eq('id', entityId).maybeSingle();
if (error || !data) {
originalSlug = null;
break;
}
originalSlug = data.slug || null;
break;
}
case 'rides': {
const { data, error } = await supabase.from('rides').select('slug').eq('id', entityId).maybeSingle();
if (error || !data) {
originalSlug = null;
break;
}
originalSlug = data.slug || null;
break;
}
case 'companies': {
const { data, error } = await supabase.from('companies').select('slug').eq('id', entityId).maybeSingle();
if (error || !data) {
originalSlug = null;
break;
}
originalSlug = data.slug || null;
break;
}
case 'ride_models': {
const { data, error } = await supabase.from('ride_models').select('slug').eq('id', entityId).maybeSingle();
if (error || !data) {
originalSlug = null;
break;
}
originalSlug = data.slug || null;
break;
}
}
// If slug hasn't changed, skip uniqueness check
if (originalSlug && originalSlug === validData.slug) {
shouldCheckUniqueness = false;
}
} catch (error) {
// Entity doesn't exist yet (CREATE action) - proceed with uniqueness check
// This is expected for new submissions where entityId is a submission_id
console.log('Entity not found in live table (likely a new submission)', {
entityType,
entityId,
tableName
});
}
}
// Only check uniqueness if this is a new entity or slug has changed
if (shouldCheckUniqueness) {
const isSlugUnique = await checkSlugUniqueness(
entityType,
validData.slug,
entityId
);
if (!isSlugUnique) {
blockingErrors.push({
field: 'slug',
message: 'This slug is already in use. Manually check if this entity already exists, and reject if so. Otherwise, escalate to an admin for manual editing of the slug.',
severity: 'blocking',
});
}
}
}
const allErrors = [...blockingErrors, ...warnings, ...suggestions];
const isValid = blockingErrors.length === 0;
return {
isValid,
blockingErrors,
warnings,
suggestions,
allErrors,
};
} catch (error) {
// Catch any unexpected errors during validation
const errorId = handleNonCriticalError(error, {
action: 'Entity Validation Unexpected Error',
metadata: {
entityType,
dataType: typeof data,
hasData: !!data
}
});
return {
isValid: false,
blockingErrors: [{
field: entityType,
message: `Validation error: ${getErrorMessage(error)} (ref: ${errorId.slice(0, 8)})`,
severity: 'blocking'
}],
warnings: [],
suggestions: [],
allErrors: [{
field: entityType,
message: `Validation error: ${getErrorMessage(error)} (ref: ${errorId.slice(0, 8)})`,
severity: 'blocking'
}],
};
}
}
/**
* Check if slug is unique for the entity type
*/
async function checkSlugUniqueness(
entityType: keyof typeof entitySchemas,
slug: string,
excludeId?: string
): Promise<boolean> {
const tableName = getTableNameFromEntityType(entityType);
try {
// Query with explicit table name - use simple approach to avoid type instantiation issues
let result;
switch (tableName) {
case 'parks':
result = await supabase.from('parks').select('id').eq('slug', slug).limit(1);
break;
case 'rides':
result = await supabase.from('rides').select('id').eq('slug', slug).limit(1);
break;
case 'companies':
result = await supabase.from('companies').select('id').eq('slug', slug).limit(1);
break;
case 'ride_models':
result = await supabase.from('ride_models').select('id').eq('slug', slug).limit(1);
break;
default:
return true; // Assume unique on invalid table
}
const { data, error } = result;
if (error) {
return true; // Assume unique on error to avoid blocking
}
// If no data, slug is unique
if (!data || data.length === 0) {
return true;
}
// If excludeId provided and matches, it's the same entity (editing)
if (excludeId && data[0] && data[0].id === excludeId) {
return true;
}
// Slug is in use by a different entity
return false;
} catch (error) {
return true; // Assume unique on error to avoid false positives
}
}
/**
* Get database table name from entity type
*/
function getTableNameFromEntityType(entityType: keyof typeof entitySchemas): string {
switch (entityType) {
case 'park':
return 'parks';
case 'ride':
return 'rides';
case 'manufacturer':
case 'designer':
case 'operator':
case 'property_owner':
return 'companies';
case 'ride_model':
return 'ride_models';
case 'photo':
return 'photos';
default:
return entityType + 's';
}
}
/**
* Batch validate multiple items
*/
export async function validateMultipleItems(
items: Array<{ item_type: string; item_data: unknown; id?: string }>
): Promise<Map<string, ValidationResult>> {
const results = new Map<string, ValidationResult>();
await Promise.all(
items.map(async (item) => {
const result = await validateEntityData(
item.item_type as keyof typeof entitySchemas,
{ ...(item.item_data as object), id: item.id }
);
const validData = item.item_data as Record<string, unknown>;
results.set(
item.id || (typeof validData.slug === 'string' ? validData.slug : ''),
result
);
})
);
return results;
}
/**
* Validate required fields before submission
* Returns user-friendly error messages
*/
export function validateRequiredFields(
entityType: keyof typeof entitySchemas,
data: any
): { valid: boolean; errors: string[] } {
const errors: string[] = [];
if (entityType === 'park') {
if (!data.location && !data.location_id) {
errors.push('Location is required. Please search and select a location for the park.');
}
}
if (entityType === 'ride') {
if (!data.park_id || data.park_id.trim().length === 0) {
errors.push('Park is required. Please select or create a park for this ride.');
}
}
return {
valid: errors.length === 0,
errors
};
}

View File

@@ -0,0 +1,64 @@
/**
* Environment Context Capture
* Captures browser/device information for error reports
*/
export interface EnvironmentContext {
viewport: { width: number; height: number };
screen: { width: number; height: number };
memory?: { usedJSHeapSize?: number; totalJSHeapSize?: number };
connection?: string;
timezone: string;
language: string;
platform: string;
cookiesEnabled: boolean;
localStorage: boolean;
sessionStorage: boolean;
}
export function captureEnvironmentContext(): EnvironmentContext {
const context: EnvironmentContext = {
viewport: {
width: window.innerWidth,
height: window.innerHeight,
},
screen: {
width: window.screen.width,
height: window.screen.height,
},
timezone: Intl.DateTimeFormat().resolvedOptions().timeZone,
language: navigator.language,
platform: navigator.platform,
cookiesEnabled: navigator.cookieEnabled,
localStorage: isStorageAvailable('localStorage'),
sessionStorage: isStorageAvailable('sessionStorage'),
};
// Memory info (Chrome only)
if ('memory' in performance && (performance as any).memory) {
const memory = (performance as any).memory;
context.memory = {
usedJSHeapSize: memory.usedJSHeapSize,
totalJSHeapSize: memory.totalJSHeapSize,
};
}
// Connection info
if ('connection' in navigator) {
context.connection = (navigator as any).connection?.effectiveType;
}
return context;
}
function isStorageAvailable(type: 'localStorage' | 'sessionStorage'): boolean {
try {
const storage = window[type];
const test = '__storage_test__';
storage.setItem(test, test);
storage.removeItem(test);
return true;
} catch {
return false;
}
}

View File

@@ -0,0 +1,82 @@
/**
* Error Breadcrumb Tracking
* Captures user actions before errors occur for better debugging
*/
export interface Breadcrumb {
timestamp: string;
category: 'navigation' | 'user_action' | 'api_call' | 'state_change';
message: string;
level: 'info' | 'warning' | 'error';
data?: Record<string, any>;
}
class BreadcrumbManager {
private breadcrumbs: Breadcrumb[] = [];
private readonly MAX_BREADCRUMBS = 10;
add(breadcrumb: Omit<Breadcrumb, 'timestamp'>): void {
const newBreadcrumb: Breadcrumb = {
...breadcrumb,
timestamp: new Date().toISOString(),
};
this.breadcrumbs.push(newBreadcrumb);
// Keep only last 10 breadcrumbs
if (this.breadcrumbs.length > this.MAX_BREADCRUMBS) {
this.breadcrumbs.shift();
}
}
getAll(): Breadcrumb[] {
return [...this.breadcrumbs];
}
clear(): void {
this.breadcrumbs = [];
}
}
export const breadcrumbManager = new BreadcrumbManager();
// Helper functions for common breadcrumb types
export const breadcrumb = {
navigation: (to: string, from?: string) => {
breadcrumbManager.add({
category: 'navigation',
message: `Navigated to ${to}`,
level: 'info',
data: { to, from },
});
},
userAction: (action: string, component: string, data?: Record<string, any>) => {
breadcrumbManager.add({
category: 'user_action',
message: `User ${action} in ${component}`,
level: 'info',
data,
});
},
apiCall: (endpoint: string, method: string, status?: number) => {
const isError = status && status >= 400;
breadcrumbManager.add({
category: 'api_call',
message: `API ${method} ${endpoint}`,
level: isError ? 'error' : 'info',
data: { endpoint, method, status },
});
},
stateChange: (description: string, data?: Record<string, any>) => {
breadcrumbManager.add({
category: 'state_change',
message: description,
level: 'info',
data,
});
},
};

335
src-old/lib/errorHandler.ts Normal file
View File

@@ -0,0 +1,335 @@
import { toast } from 'sonner';
import { logger } from './logger';
import { supabase } from '@/integrations/supabase/client';
import { breadcrumbManager } from './errorBreadcrumbs';
import { captureEnvironmentContext } from './environmentContext';
export type ErrorContext = {
action: string;
userId?: string;
metadata?: Record<string, unknown>;
duration?: number; // Optional: milliseconds the operation took
};
export class AppError extends Error {
constructor(
message: string,
public code: string,
public userMessage?: string
) {
super(message);
this.name = 'AppError';
}
}
/**
* Check if error is a Supabase connection/API error
*/
export function isSupabaseConnectionError(error: unknown): boolean {
if (error && typeof error === 'object') {
const supabaseError = error as { code?: string; status?: number; message?: string };
// Connection timeout errors
if (supabaseError.code === 'PGRST301') return true; // Timeout
if (supabaseError.code === 'PGRST000') return true; // Connection error
// 5xx server errors
if (supabaseError.status && supabaseError.status >= 500) return true;
// Database connection errors (08xxx codes)
if (supabaseError.code?.startsWith('08')) return true;
}
// Network fetch errors
if (error instanceof TypeError) {
const message = error.message.toLowerCase();
if (message.includes('fetch') || message.includes('network') || message.includes('failed to fetch')) {
return true;
}
}
return false;
}
export const handleError = (
error: unknown,
context: ErrorContext
): string => {
// Generate or use existing error ID
const errorId = (context.metadata?.requestId as string) || crypto.randomUUID();
const shortErrorId = errorId.slice(0, 8);
// Check if this is a connection error and dispatch event
if (isSupabaseConnectionError(error)) {
window.dispatchEvent(new CustomEvent('api-connectivity-down'));
}
// Enhanced error message and stack extraction
let errorMessage: string;
let stack: string | undefined;
let errorName = 'UnknownError';
let supabaseErrorDetails: Record<string, any> | undefined;
if (error instanceof Error) {
errorMessage = error instanceof AppError
? error.userMessage || error.message
: error.message;
stack = error.stack;
errorName = error.name;
// Check if Error instance has attached Supabase metadata
if ((error as any).supabaseCode) {
supabaseErrorDetails = {
code: (error as any).supabaseCode,
details: (error as any).supabaseDetails,
hint: (error as any).supabaseHint
};
}
} else if (error && typeof error === 'object') {
// Handle Supabase errors (plain objects with message/code/details)
const supabaseError = error as {
message?: string;
code?: string;
details?: string;
hint?: string;
stack?: string;
};
errorMessage = supabaseError.message || 'An unexpected error occurred';
errorName = 'SupabaseError';
// Capture Supabase error details for metadata
supabaseErrorDetails = {
code: supabaseError.code,
details: supabaseError.details,
hint: supabaseError.hint
};
// Try to extract stack from object
if (supabaseError.stack && typeof supabaseError.stack === 'string') {
stack = supabaseError.stack;
} else if (supabaseError.code || supabaseError.details || supabaseError.hint) {
// Create synthetic stack trace for Supabase errors to aid debugging
const stackParts = [
`SupabaseError: ${errorMessage}`,
supabaseError.code ? ` Code: ${supabaseError.code}` : null,
supabaseError.details ? ` Details: ${supabaseError.details}` : null,
supabaseError.hint ? ` Hint: ${supabaseError.hint}` : null,
` at ${context.action}`,
` Reference ID: ${errorId}`
].filter(Boolean);
stack = stackParts.join('\n');
}
} else if (typeof error === 'string') {
errorMessage = error;
// Generate synthetic stack trace for string errors
stack = new Error().stack?.replace(/^Error\n/, `StringError: ${error}\n`);
} else {
errorMessage = 'An unexpected error occurred';
// Generate synthetic stack trace for unknown error types
stack = new Error().stack?.replace(/^Error\n/, `UnknownError: ${String(error)}\n`);
}
// Log to console/monitoring with enhanced debugging
logger.error('Error occurred', {
...context,
error: errorMessage,
stack,
errorId,
errorName,
errorType: typeof error,
errorConstructor: error?.constructor?.name,
hasStack: !!stack,
isSyntheticStack: !!(error && typeof error === 'object' && !(error instanceof Error) && stack),
supabaseError: supabaseErrorDetails,
});
// Additional debug logging when stack is missing
if (!stack) {
console.error('[handleError] Error without stack trace:', {
type: typeof error,
constructor: error?.constructor?.name,
error: error,
context,
errorId
});
}
// Log to database with breadcrumbs (non-blocking)
try {
const envContext = captureEnvironmentContext();
const breadcrumbs = breadcrumbManager.getAll();
// Fire-and-forget database logging
supabase.rpc('log_request_metadata', {
p_request_id: errorId,
p_user_id: context.userId || undefined,
p_endpoint: context.action,
p_method: 'ERROR',
p_status_code: 500,
p_error_type: errorName,
p_error_message: errorMessage,
p_error_stack: stack,
p_user_agent: navigator.userAgent,
p_breadcrumbs: JSON.stringify({
breadcrumbs,
isRetry: context.metadata?.isRetry || false,
attempt: context.metadata?.attempt,
retriesExhausted: context.metadata?.retriesExhausted || false,
supabaseError: supabaseErrorDetails,
metadata: context.metadata
}),
p_timezone: envContext.timezone,
p_referrer: document.referrer || undefined,
p_duration_ms: context.duration,
}).then(({ error: dbError }) => {
if (dbError) {
logger.error('Failed to log error to database', { dbError });
}
});
} catch (logError) {
logger.error('Failed to capture error context', { logError });
}
// Show user-friendly toast with error ID (skip for retry attempts)
const isRetry = context.metadata?.isRetry === true || context.metadata?.attempt;
if (!isRetry) {
toast.error(context.action, {
description: `${errorMessage}\n\nReference ID: ${shortErrorId}`,
duration: 5000,
});
}
return errorId;
};
export const handleSuccess = (
title: string,
description?: string
): void => {
toast.success(title, {
description,
duration: 3000
});
};
export const handleInfo = (
title: string,
description?: string
): void => {
toast.info(title, {
description,
duration: 4000
});
};
/**
* Handle non-critical errors (background failures) that should be logged
* to the database WITHOUT showing user toasts
* Use this for fire-and-forget operations where user shouldn't be interrupted
*/
export const handleNonCriticalError = (
error: unknown,
context: ErrorContext
): string => {
const errorId = crypto.randomUUID();
const shortErrorId = errorId.slice(0, 8);
const errorMessage = error instanceof AppError
? error.userMessage || error.message
: error instanceof Error
? error.message
: 'An unexpected error occurred';
// Log to console/monitoring (same as handleError)
logger.error('Non-critical error occurred', {
...context,
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
errorId,
severity: 'low',
});
// Log to database with breadcrumbs (non-blocking, fire-and-forget)
try {
const envContext = captureEnvironmentContext();
const breadcrumbs = breadcrumbManager.getAll();
supabase.rpc('log_request_metadata', {
p_request_id: errorId,
p_user_id: context.userId || undefined,
p_endpoint: context.action,
p_method: 'NON_CRITICAL_ERROR',
p_status_code: 500,
p_error_type: error instanceof Error ? error.name : 'UnknownError',
p_error_message: errorMessage,
p_error_stack: error instanceof Error ? error.stack : undefined,
p_user_agent: navigator.userAgent,
p_breadcrumbs: JSON.stringify({
breadcrumbs,
metadata: context.metadata // Include metadata for debugging
}),
p_timezone: envContext.timezone,
p_referrer: document.referrer || undefined,
p_duration_ms: context.duration,
}).then(({ error: dbError }) => {
if (dbError) {
logger.error('Failed to log non-critical error to database', { dbError });
}
});
} catch (logError) {
logger.error('Failed to capture non-critical error context', { logError });
}
// NO TOAST - This is the key difference from handleError()
return errorId;
};
/**
* Type-safe error message extraction utility
* Use this instead of `error: any` in catch blocks
*/
export function getErrorMessage(error: unknown): string {
if (error instanceof Error) {
return error.message;
}
if (typeof error === 'string') {
return error;
}
if (error && typeof error === 'object' && 'message' in error) {
return String(error.message);
}
return 'An unexpected error occurred';
}
/**
* Type guard to check if error has a code property
*/
export function hasErrorCode(error: unknown): error is { code: string } {
return (
error !== null &&
typeof error === 'object' &&
'code' in error &&
typeof (error as { code: unknown }).code === 'string'
);
}
/**
* Helper to wrap async operations with automatic duration tracking
* Use this for operations where you want to track how long they took before failing
*/
export async function withErrorTiming<T>(
fn: () => Promise<T>,
errorContext: Omit<ErrorContext, 'duration'>
): Promise<T> {
const start = performance.now();
try {
return await fn();
} catch (error) {
const duration = Math.round(performance.now() - start);
handleError(error, { ...errorContext, duration });
throw error;
}
}

View File

@@ -0,0 +1,213 @@
/**
* Error Sanitizer
*
* Removes sensitive information from error messages before
* displaying to users or logging to external systems.
*
* Part of Sacred Pipeline Phase 3: Enhanced Error Handling
*/
import { logger } from './logger';
/**
* Patterns that indicate sensitive data in error messages
*/
const SENSITIVE_PATTERNS = [
// Authentication & Tokens
/bearer\s+[a-zA-Z0-9\-_.]+/gi,
/token[:\s]+[a-zA-Z0-9\-_.]+/gi,
/api[_-]?key[:\s]+[a-zA-Z0-9\-_.]+/gi,
/password[:\s]+[^\s]+/gi,
/secret[:\s]+[a-zA-Z0-9\-_.]+/gi,
// Database connection strings
/postgresql:\/\/[^\s]+/gi,
/postgres:\/\/[^\s]+/gi,
/mysql:\/\/[^\s]+/gi,
// IP addresses (internal)
/\b(?:10|172\.(?:1[6-9]|2[0-9]|3[01])|192\.168)\.\d{1,3}\.\d{1,3}\b/g,
// Email addresses (in error messages)
/[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}/g,
// UUIDs (can reveal internal IDs)
/[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}/gi,
// File paths (Unix & Windows)
/\/(?:home|root|usr|var|opt|mnt)\/[^\s]*/g,
/[A-Z]:\\(?:Users|Windows|Program Files)[^\s]*/g,
// Stack traces with file paths
/at\s+[^\s]+\s+\([^\)]+\)/g,
// SQL queries (can reveal schema)
/SELECT\s+.+?\s+FROM\s+[^\s]+/gi,
/INSERT\s+INTO\s+[^\s]+/gi,
/UPDATE\s+[^\s]+\s+SET/gi,
/DELETE\s+FROM\s+[^\s]+/gi,
];
/**
* Common error message patterns to make more user-friendly
*/
const ERROR_MESSAGE_REPLACEMENTS: Array<[RegExp, string]> = [
// Database errors
[/duplicate key value violates unique constraint/gi, 'This item already exists'],
[/foreign key constraint/gi, 'Related item not found'],
[/violates check constraint/gi, 'Invalid data provided'],
[/null value in column/gi, 'Required field is missing'],
[/invalid input syntax for type/gi, 'Invalid data format'],
// Auth errors
[/JWT expired/gi, 'Session expired. Please log in again'],
[/Invalid JWT/gi, 'Authentication failed. Please log in again'],
[/No API key found/gi, 'Authentication required'],
// Network errors
[/ECONNREFUSED/gi, 'Service temporarily unavailable'],
[/ETIMEDOUT/gi, 'Request timed out. Please try again'],
[/ENOTFOUND/gi, 'Service not available'],
[/Network request failed/gi, 'Network error. Check your connection'],
// Rate limiting
[/Too many requests/gi, 'Rate limit exceeded. Please wait before trying again'],
// Supabase specific
[/permission denied for table/gi, 'Access denied'],
[/row level security policy/gi, 'Access denied'],
];
/**
* Sanitize error message by removing sensitive information
*
* @param error - Error object or message
* @param context - Optional context for logging
* @returns Sanitized error message safe for display
*/
export function sanitizeErrorMessage(
error: unknown,
context?: { action?: string; userId?: string }
): string {
let message: string;
// Extract message from error object
if (error instanceof Error) {
message = error.message;
} else if (typeof error === 'string') {
message = error;
} else if (error && typeof error === 'object' && 'message' in error) {
message = String((error as { message: unknown }).message);
} else {
message = 'An unexpected error occurred';
}
// Store original for logging
const originalMessage = message;
// Remove sensitive patterns
SENSITIVE_PATTERNS.forEach(pattern => {
message = message.replace(pattern, '[REDACTED]');
});
// Apply user-friendly replacements
ERROR_MESSAGE_REPLACEMENTS.forEach(([pattern, replacement]) => {
if (pattern.test(message)) {
message = replacement;
}
});
// If message was heavily sanitized, provide generic message
if (message.includes('[REDACTED]')) {
message = 'An error occurred. Please contact support if this persists';
}
// Log sanitization if message changed significantly
if (originalMessage !== message && originalMessage.length > message.length + 10) {
logger.info('[ErrorSanitizer] Sanitized error message', {
action: context?.action,
userId: context?.userId,
originalLength: originalMessage.length,
sanitizedLength: message.length,
containsRedacted: message.includes('[REDACTED]'),
});
}
return message;
}
/**
* Check if error message contains sensitive data
*
* @param message - Error message to check
* @returns True if message contains sensitive patterns
*/
export function containsSensitiveData(message: string): boolean {
return SENSITIVE_PATTERNS.some(pattern => pattern.test(message));
}
/**
* Sanitize error object for logging to external systems
*
* @param error - Error object to sanitize
* @returns Sanitized error object
*/
export function sanitizeErrorForLogging(error: unknown): {
message: string;
name?: string;
code?: string;
stack?: string;
} {
const sanitized: {
message: string;
name?: string;
code?: string;
stack?: string;
} = {
message: sanitizeErrorMessage(error),
};
if (error instanceof Error) {
sanitized.name = error.name;
// Sanitize stack trace
if (error.stack) {
let stack = error.stack;
SENSITIVE_PATTERNS.forEach(pattern => {
stack = stack.replace(pattern, '[REDACTED]');
});
sanitized.stack = stack;
}
// Include error code if present
if ('code' in error && typeof error.code === 'string') {
sanitized.code = error.code;
}
}
return sanitized;
}
/**
* Create a user-safe error response
*
* @param error - Original error
* @param fallbackMessage - Optional fallback message
* @returns User-safe error object
*/
export function createSafeErrorResponse(
error: unknown,
fallbackMessage = 'An error occurred'
): {
message: string;
code?: string;
} {
const sanitized = sanitizeErrorMessage(error);
return {
message: sanitized || fallbackMessage,
code: error instanceof Error && 'code' in error
? String((error as { code: string }).code)
: undefined,
};
}

View File

@@ -0,0 +1,101 @@
/**
* Optimized List Hook
* Provides memoized filtering, sorting, and pagination for large lists
*/
import { useMemo } from 'react';
export interface UseOptimizedListOptions<T> {
items: T[];
searchTerm?: string;
searchFields?: (keyof T)[];
sortField?: keyof T;
sortDirection?: 'asc' | 'desc';
pageSize?: number;
currentPage?: number;
}
export interface UseOptimizedListResult<T> {
filteredItems: T[];
paginatedItems: T[];
totalCount: number;
pageCount: number;
}
export function useOptimizedList<T extends Record<string, any>>({
items,
searchTerm = '',
searchFields = [],
sortField,
sortDirection = 'asc',
pageSize,
currentPage = 1,
}: UseOptimizedListOptions<T>): UseOptimizedListResult<T> {
// Memoized filtering
const filteredItems = useMemo(() => {
if (!searchTerm || searchFields.length === 0) {
return items;
}
const lowerSearchTerm = searchTerm.toLowerCase();
return items.filter(item =>
searchFields.some(field => {
const value = item[field];
if (value == null) return false;
return String(value).toLowerCase().includes(lowerSearchTerm);
})
);
}, [items, searchTerm, searchFields]);
// Memoized sorting
const sortedItems = useMemo(() => {
if (!sortField) {
return filteredItems;
}
return [...filteredItems].sort((a, b) => {
const aValue = a[sortField];
const bValue = b[sortField];
if (aValue == null && bValue == null) return 0;
if (aValue == null) return sortDirection === 'asc' ? 1 : -1;
if (bValue == null) return sortDirection === 'asc' ? -1 : 1;
if (typeof aValue === 'string' && typeof bValue === 'string') {
return sortDirection === 'asc'
? aValue.localeCompare(bValue)
: bValue.localeCompare(aValue);
}
if (typeof aValue === 'number' && typeof bValue === 'number') {
return sortDirection === 'asc' ? aValue - bValue : bValue - aValue;
}
return 0;
});
}, [filteredItems, sortField, sortDirection]);
// Memoized pagination
const paginatedItems = useMemo(() => {
if (!pageSize) {
return sortedItems;
}
const startIndex = (currentPage - 1) * pageSize;
const endIndex = startIndex + pageSize;
return sortedItems.slice(startIndex, endIndex);
}, [sortedItems, pageSize, currentPage]);
// Calculate page count
const pageCount = useMemo(() => {
if (!pageSize) return 1;
return Math.ceil(sortedItems.length / pageSize);
}, [sortedItems.length, pageSize]);
return {
filteredItems: sortedItems,
paginatedItems,
totalCount: sortedItems.length,
pageCount,
};
}

View File

@@ -0,0 +1,159 @@
/**
* Idempotency Key Utilities
*
* Provides helper functions for generating and managing idempotency keys
* for moderation operations to prevent duplicate requests.
*
* Integrated with idempotencyLifecycle.ts for full lifecycle tracking.
*/
import {
registerIdempotencyKey,
updateIdempotencyStatus,
getIdempotencyRecord,
isIdempotencyKeyValid,
type IdempotencyRecord,
} from './idempotencyLifecycle';
/**
* Generate a deterministic idempotency key for a moderation action
*
* Format: action_submissionId_itemIds_userId_timestamp
* Example: approval_abc123_def456_ghi789_user123_1699564800000
*
* @param action - The moderation action type ('approval', 'rejection', 'retry')
* @param submissionId - The submission ID
* @param itemIds - Array of item IDs being processed
* @param userId - The moderator's user ID
* @returns Deterministic idempotency key
*/
export function generateIdempotencyKey(
action: 'approval' | 'rejection' | 'retry',
submissionId: string,
itemIds: string[],
userId: string
): string {
// Sort itemIds to ensure consistency regardless of order
const sortedItemIds = [...itemIds].sort().join('_');
// Include timestamp to allow same moderator to retry after 24h window
const timestamp = Date.now();
return `${action}_${submissionId}_${sortedItemIds}_${userId}_${timestamp}`;
}
/**
* Check if an error is a 409 Conflict (duplicate request)
*
* @param error - Error object to check
* @returns True if error is 409 Conflict
*/
export function is409Conflict(error: unknown): boolean {
if (!error || typeof error !== 'object') return false;
const errorObj = error as { status?: number; message?: string };
// Check status code
if (errorObj.status === 409) return true;
// Check error message for conflict indicators
const message = errorObj.message?.toLowerCase() || '';
return message.includes('duplicate request') ||
message.includes('already in progress') ||
message.includes('race condition');
}
/**
* Extract retry-after value from error response
*
* @param error - Error object with potential Retry-After header
* @returns Seconds to wait before retry, defaults to 3
*/
export function getRetryAfter(error: unknown): number {
if (!error || typeof error !== 'object') return 3;
const errorObj = error as {
retryAfter?: number;
context?: { headers?: { 'Retry-After'?: string } }
};
// Check structured retryAfter field
if (errorObj.retryAfter) return errorObj.retryAfter;
// Check Retry-After header
const retryAfterHeader = errorObj.context?.headers?.['Retry-After'];
if (retryAfterHeader) {
const seconds = parseInt(retryAfterHeader, 10);
return isNaN(seconds) ? 3 : seconds;
}
return 3; // Default 3 seconds
}
/**
* Sleep for a specified duration
*
* @param ms - Milliseconds to sleep
*/
export function sleep(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms));
}
/**
* Generate and register a new idempotency key with lifecycle tracking
*
* @param action - The moderation action type
* @param submissionId - The submission ID
* @param itemIds - Array of item IDs being processed
* @param userId - The moderator's user ID
* @returns Idempotency key and record
*/
export async function generateAndRegisterKey(
action: 'approval' | 'rejection' | 'retry',
submissionId: string,
itemIds: string[],
userId: string
): Promise<{ key: string; record: IdempotencyRecord }> {
const key = generateIdempotencyKey(action, submissionId, itemIds, userId);
const record = await registerIdempotencyKey(key, action, submissionId, itemIds, userId);
return { key, record };
}
/**
* Validate and mark idempotency key as processing
*
* @param key - Idempotency key to validate
* @returns True if valid and marked as processing
*/
export async function validateAndStartProcessing(key: string): Promise<boolean> {
const isValid = await isIdempotencyKeyValid(key);
if (!isValid) {
return false;
}
const record = await getIdempotencyRecord(key);
// Only allow transition from pending to processing
if (record?.status !== 'pending') {
return false;
}
await updateIdempotencyStatus(key, 'processing');
return true;
}
/**
* Mark idempotency key as completed
*/
export async function markKeyCompleted(key: string): Promise<void> {
await updateIdempotencyStatus(key, 'completed');
}
/**
* Mark idempotency key as failed
*/
export async function markKeyFailed(key: string, error: string): Promise<void> {
await updateIdempotencyStatus(key, 'failed', error);
}

View File

@@ -0,0 +1,281 @@
/**
* Idempotency Key Lifecycle Management
*
* Tracks idempotency keys through their lifecycle:
* - pending: Key generated, request not yet sent
* - processing: Request in progress
* - completed: Request succeeded
* - failed: Request failed
* - expired: Key expired (24h window)
*
* Part of Sacred Pipeline Phase 4: Transaction Resilience
*/
import { openDB, DBSchema, IDBPDatabase } from 'idb';
import { logger } from './logger';
export type IdempotencyStatus = 'pending' | 'processing' | 'completed' | 'failed' | 'expired';
export interface IdempotencyRecord {
key: string;
action: 'approval' | 'rejection' | 'retry';
submissionId: string;
itemIds: string[];
userId: string;
status: IdempotencyStatus;
createdAt: number;
updatedAt: number;
expiresAt: number;
attempts: number;
lastError?: string;
completedAt?: number;
}
interface IdempotencyDB extends DBSchema {
idempotency_keys: {
key: string;
value: IdempotencyRecord;
indexes: {
'by-submission': string;
'by-status': IdempotencyStatus;
'by-expiry': number;
};
};
}
const DB_NAME = 'thrillwiki-idempotency';
const DB_VERSION = 1;
const STORE_NAME = 'idempotency_keys';
const KEY_TTL_MS = 24 * 60 * 60 * 1000; // 24 hours
let dbInstance: IDBPDatabase<IdempotencyDB> | null = null;
async function getDB(): Promise<IDBPDatabase<IdempotencyDB>> {
if (dbInstance) return dbInstance;
dbInstance = await openDB<IdempotencyDB>(DB_NAME, DB_VERSION, {
upgrade(db) {
if (!db.objectStoreNames.contains(STORE_NAME)) {
const store = db.createObjectStore(STORE_NAME, { keyPath: 'key' });
store.createIndex('by-submission', 'submissionId');
store.createIndex('by-status', 'status');
store.createIndex('by-expiry', 'expiresAt');
}
},
});
return dbInstance;
}
/**
* Register a new idempotency key
*/
export async function registerIdempotencyKey(
key: string,
action: IdempotencyRecord['action'],
submissionId: string,
itemIds: string[],
userId: string
): Promise<IdempotencyRecord> {
const db = await getDB();
const now = Date.now();
const record: IdempotencyRecord = {
key,
action,
submissionId,
itemIds,
userId,
status: 'pending',
createdAt: now,
updatedAt: now,
expiresAt: now + KEY_TTL_MS,
attempts: 0,
};
await db.add(STORE_NAME, record);
logger.info('[IdempotencyLifecycle] Registered key', {
key,
action,
submissionId,
itemCount: itemIds.length,
});
return record;
}
/**
* Update idempotency key status
*/
export async function updateIdempotencyStatus(
key: string,
status: IdempotencyStatus,
error?: string
): Promise<void> {
const db = await getDB();
const record = await db.get(STORE_NAME, key);
if (!record) {
logger.warn('[IdempotencyLifecycle] Key not found for update', { key, status });
return;
}
const now = Date.now();
record.status = status;
record.updatedAt = now;
if (status === 'processing') {
record.attempts += 1;
}
if (status === 'completed') {
record.completedAt = now;
}
if (status === 'failed' && error) {
record.lastError = error;
}
await db.put(STORE_NAME, record);
logger.info('[IdempotencyLifecycle] Updated key status', {
key,
status,
attempts: record.attempts,
});
}
/**
* Get idempotency record by key
*/
export async function getIdempotencyRecord(key: string): Promise<IdempotencyRecord | null> {
const db = await getDB();
const record = await db.get(STORE_NAME, key);
// Check if expired
if (record && record.expiresAt < Date.now()) {
await updateIdempotencyStatus(key, 'expired');
return { ...record, status: 'expired' };
}
return record || null;
}
/**
* Check if key exists and is valid
*/
export async function isIdempotencyKeyValid(key: string): Promise<boolean> {
const record = await getIdempotencyRecord(key);
if (!record) return false;
if (record.status === 'expired') return false;
if (record.expiresAt < Date.now()) return false;
return true;
}
/**
* Get all keys for a submission
*/
export async function getSubmissionIdempotencyKeys(
submissionId: string
): Promise<IdempotencyRecord[]> {
const db = await getDB();
const index = db.transaction(STORE_NAME).store.index('by-submission');
return await index.getAll(submissionId);
}
/**
* Get keys by status
*/
export async function getIdempotencyKeysByStatus(
status: IdempotencyStatus
): Promise<IdempotencyRecord[]> {
const db = await getDB();
const index = db.transaction(STORE_NAME).store.index('by-status');
return await index.getAll(status);
}
/**
* Clean up expired keys
*/
export async function cleanupExpiredKeys(): Promise<number> {
const db = await getDB();
const now = Date.now();
const tx = db.transaction(STORE_NAME, 'readwrite');
const index = tx.store.index('by-expiry');
let deletedCount = 0;
// Get all expired keys
for await (const cursor of index.iterate()) {
if (cursor.value.expiresAt < now) {
await cursor.delete();
deletedCount++;
}
}
await tx.done;
if (deletedCount > 0) {
logger.info('[IdempotencyLifecycle] Cleaned up expired keys', { deletedCount });
}
return deletedCount;
}
/**
* Get idempotency statistics
*/
export async function getIdempotencyStats(): Promise<{
total: number;
pending: number;
processing: number;
completed: number;
failed: number;
expired: number;
}> {
const db = await getDB();
const all = await db.getAll(STORE_NAME);
const now = Date.now();
const stats = {
total: all.length,
pending: 0,
processing: 0,
completed: 0,
failed: 0,
expired: 0,
};
all.forEach(record => {
// Mark as expired if TTL passed
if (record.expiresAt < now) {
stats.expired++;
} else {
stats[record.status]++;
}
});
return stats;
}
/**
* Auto-cleanup: Run periodically to remove expired keys
*/
export function startAutoCleanup(intervalMinutes: number = 60): () => void {
const intervalId = setInterval(async () => {
try {
await cleanupExpiredKeys();
} catch (error) {
logger.error('[IdempotencyLifecycle] Auto-cleanup failed', { error });
}
}, intervalMinutes * 60 * 1000);
// Run immediately on start
cleanupExpiredKeys();
// Return cleanup function
return () => clearInterval(intervalId);
}

View File

@@ -0,0 +1,301 @@
/**
* Identity Management Service
* Handles OAuth provider connections, disconnections, and password fallback
*/
import { supabase } from '@/lib/supabaseClient';
import type { UserIdentity as SupabaseUserIdentity } from '@supabase/supabase-js';
import type {
UserIdentity,
OAuthProvider,
IdentitySafetyCheck,
IdentityOperationResult
} from '@/types/identity';
import { handleNonCriticalError, handleError, getErrorMessage } from './errorHandler';
/**
* Get all identities for the current user
*/
export async function getUserIdentities(): Promise<UserIdentity[]> {
try {
const { data, error } = await supabase.auth.getUserIdentities();
if (error) throw error;
return (data?.identities || []) as UserIdentity[];
} catch (error) {
handleNonCriticalError(error, {
action: 'Get User Identities',
metadata: { returnedEmptyArray: true }
});
return [];
}
}
/**
* Check if user has password authentication (email provider)
*/
export async function hasPasswordAuth(): Promise<boolean> {
const identities = await getUserIdentities();
return identities.some(identity => identity.provider === 'email');
}
/**
* Check if it's safe to disconnect a provider
* Returns safety information and reason if unsafe
*/
export async function checkDisconnectSafety(
provider: OAuthProvider
): Promise<IdentitySafetyCheck> {
const identities = await getUserIdentities();
const hasPassword = identities.some(i => i.provider === 'email');
const oauthIdentities = identities.filter(i =>
i.provider !== 'email' && i.provider !== 'phone'
);
const totalIdentities = identities.length;
// Can't disconnect if it's the only identity
if (totalIdentities === 1) {
return {
canDisconnect: false,
reason: 'last_identity',
hasPasswordAuth: hasPassword,
totalIdentities,
oauthIdentities: oauthIdentities.length
};
}
// Can't disconnect last OAuth provider if no password backup
if (oauthIdentities.length === 1 && !hasPassword) {
return {
canDisconnect: false,
reason: 'no_password_backup',
hasPasswordAuth: hasPassword,
totalIdentities,
oauthIdentities: oauthIdentities.length
};
}
return {
canDisconnect: true,
reason: 'safe',
hasPasswordAuth: hasPassword,
totalIdentities,
oauthIdentities: oauthIdentities.length
};
}
/**
* Disconnect an OAuth identity from the user's account
* Requires AAL2 session for security
*/
export async function disconnectIdentity(
provider: OAuthProvider
): Promise<IdentityOperationResult> {
try {
// AAL2 check for security-critical operation (MUST fail closed)
const { data: { session } } = await supabase.auth.getSession();
// Get AAL level - fail closed on error
const { data: aalData, error: aalError } = await supabase.auth.mfa.getAuthenticatorAssuranceLevel();
if (aalError) {
handleNonCriticalError(aalError, {
action: 'Get AAL Level (Identity Disconnect)',
metadata: { failClosed: true }
});
return {
success: false,
error: 'Unable to verify security level. Please try again.',
requiresAAL2: true
};
}
const currentAal = aalData?.currentLevel || 'aal1';
// If not at AAL2, check if MFA is enrolled - fail closed on error
if (currentAal !== 'aal2') {
const { data: factors, error: factorsError } = await supabase.auth.mfa.listFactors();
if (factorsError) {
handleNonCriticalError(factorsError, {
action: 'List MFA Factors (Identity Disconnect)',
metadata: { failClosed: true }
});
return {
success: false,
error: 'Unable to verify MFA status. Please try again.',
requiresAAL2: true
};
}
const hasEnrolledMFA = factors?.totp?.some(f => f.status === 'verified') || false;
if (hasEnrolledMFA) {
return {
success: false,
error: 'Please verify your identity with MFA before disconnecting accounts',
requiresAAL2: true
};
}
}
// Safety check
const safetyCheck = await checkDisconnectSafety(provider);
if (!safetyCheck.canDisconnect) {
return {
success: false,
error: safetyCheck.reason === 'last_identity'
? 'Cannot disconnect your only login method'
: 'Please set a password before disconnecting your last social login'
};
}
// Get all identities to find the one to unlink
const identities = await getUserIdentities();
const identity = identities.find(i => i.provider === provider);
if (!identity) {
return {
success: false,
error: `No ${provider} identity found`
};
}
// Unlink the identity - cast to Supabase's expected type
const { error } = await supabase.auth.unlinkIdentity(identity as SupabaseUserIdentity);
if (error) throw error;
// Log audit event
const { data: { user } } = await supabase.auth.getUser();
if (user) {
await logIdentityChange(user.id, 'identity_disconnected', { provider });
}
return { success: true };
} catch (error) {
handleError(error, {
action: 'Disconnect Identity',
metadata: { provider }
});
return {
success: false,
error: getErrorMessage(error)
};
}
}
/**
* Connect an OAuth identity to the user's account
*/
export async function connectIdentity(
provider: OAuthProvider,
redirectTo?: string
): Promise<IdentityOperationResult> {
try {
const { error } = await supabase.auth.signInWithOAuth({
provider,
options: {
redirectTo: redirectTo || `${window.location.origin}/settings?tab=security`,
skipBrowserRedirect: false
}
});
if (error) throw error;
return { success: true };
} catch (error) {
handleError(error, {
action: 'Connect Identity',
metadata: { provider }
});
return {
success: false,
error: getErrorMessage(error)
};
}
}
/**
* Add password authentication to an OAuth-only account
* Triggers Supabase password reset flow - user sets password via email link
*/
export async function addPasswordToAccount(): Promise<IdentityOperationResult> {
try {
const { data: { user } } = await supabase.auth.getUser();
const userEmail = user?.email;
if (!userEmail) {
return {
success: false,
error: 'No email address found on your account'
};
}
// Trigger Supabase password reset email
// User clicks link and sets password, which automatically creates email identity
const { error: resetError } = await supabase.auth.resetPasswordForEmail(
userEmail,
{
redirectTo: `${window.location.origin}/auth/callback?type=recovery`
}
);
if (resetError) {
handleError(resetError, {
action: 'Send Password Reset Email',
userId: user?.id,
metadata: { email: userEmail }
});
throw resetError;
}
// Log the action
await logIdentityChange(user!.id, 'password_setup_initiated', {
method: 'reset_password_flow',
timestamp: new Date().toISOString()
});
return {
success: true,
needsEmailConfirmation: true,
email: userEmail
};
} catch (error) {
handleError(error, {
action: 'Initiate Password Setup'
});
return {
success: false,
error: getErrorMessage(error)
};
}
}
/**
* Log identity changes to audit log
*/
async function logIdentityChange(
userId: string,
action: string,
details: Record<string, any>
): Promise<void> {
try {
await supabase.rpc('log_admin_action', {
_admin_user_id: userId,
_target_user_id: userId,
_action: action,
_details: details
});
} catch (error) {
handleNonCriticalError(error, {
action: 'Log Identity Change to Audit',
userId,
metadata: { auditAction: action }
});
// Don't fail the operation if audit logging fails
}
}

View File

@@ -0,0 +1,210 @@
import { supabase } from '@/lib/supabaseClient';
import { invokeWithTracking } from './edgeFunctionTracking';
import type { UploadedImage } from '@/components/upload/EntityMultiImageUploader';
import { handleError, handleNonCriticalError } from './errorHandler';
export interface CloudflareUploadResponse {
result: {
id: string;
variants: string[];
};
success: boolean;
}
// Internal type to track upload status
interface UploadedImageWithFlag extends UploadedImage {
wasNewlyUploaded?: boolean;
}
// Upload timeout in milliseconds (30 seconds)
const UPLOAD_TIMEOUT_MS = 30000;
/**
* Creates a promise that rejects after a timeout
*/
function withTimeout<T>(promise: Promise<T>, timeoutMs: number, operation: string): Promise<T> {
return Promise.race([
promise,
new Promise<T>((_, reject) =>
setTimeout(() => reject(new Error(`${operation} timed out after ${timeoutMs}ms`)), timeoutMs)
)
]);
}
/**
* Uploads pending local images to Cloudflare via Supabase Edge Function
* @param images Array of UploadedImage objects (mix of local and already uploaded)
* @returns Array of UploadedImage objects with all images uploaded
*/
export async function uploadPendingImages(images: UploadedImage[]): Promise<UploadedImage[]> {
// Process all images in parallel for better performance using allSettled
const uploadPromises = images.map(async (image, index): Promise<UploadedImageWithFlag> => {
if (image.isLocal && image.file) {
const fileName = image.file.name;
// Step 1: Get upload URL from our Supabase Edge Function (with tracking and timeout)
const { data: uploadUrlData, error: urlError, requestId } = await withTimeout(
invokeWithTracking(
'upload-image',
{ action: 'get-upload-url' }
),
UPLOAD_TIMEOUT_MS,
'Get upload URL'
);
if (urlError || !uploadUrlData?.uploadURL) {
const error = new Error(`Failed to get upload URL for "${fileName}": ${urlError?.message || 'Unknown error'}`);
handleError(error, {
action: 'Get Upload URL',
metadata: { fileName, requestId }
});
throw error;
}
// Step 2: Upload file directly to Cloudflare with retry on transient failures
const formData = new FormData();
formData.append('file', image.file);
const { withRetry } = await import('./retryHelpers');
const uploadResponse = await withRetry(
() => withTimeout(
fetch(uploadUrlData.uploadURL, {
method: 'POST',
body: formData,
}),
UPLOAD_TIMEOUT_MS,
'Cloudflare upload'
),
{
maxAttempts: 3,
baseDelay: 500,
shouldRetry: (error) => {
// Retry on network errors, timeouts, or 5xx errors
if (error instanceof Error) {
const msg = error.message.toLowerCase();
if (msg.includes('timeout')) return true;
if (msg.includes('network')) return true;
if (msg.includes('failed to fetch')) return true;
}
return false;
}
}
);
if (!uploadResponse.ok) {
const errorText = await uploadResponse.text();
const error = new Error(`Upload failed for "${fileName}" (status ${uploadResponse.status}): ${errorText}`);
handleError(error, {
action: 'Cloudflare Upload',
metadata: { fileName, status: uploadResponse.status, timeout_ms: UPLOAD_TIMEOUT_MS }
});
throw error;
}
const result: CloudflareUploadResponse = await uploadResponse.json();
if (!result.success || !result.result) {
const error = new Error(`Cloudflare upload returned unsuccessful response for "${fileName}"`);
handleError(error, {
action: 'Cloudflare Upload',
metadata: { fileName }
});
throw error;
}
// Clean up object URL
URL.revokeObjectURL(image.url);
// Step 3: Return uploaded image metadata with wasNewlyUploaded flag
return {
url: `https://cdn.thrillwiki.com/images/${result.result.id}/public`,
cloudflare_id: result.result.id,
caption: image.caption,
isLocal: false,
wasNewlyUploaded: true // Flag to track newly uploaded images
};
} else {
// Already uploaded, keep as is
return {
url: image.url,
cloudflare_id: image.cloudflare_id,
caption: image.caption,
isLocal: false,
wasNewlyUploaded: false // Pre-existing image
};
}
});
// Wait for all uploads to settle (succeed or fail)
const results = await Promise.allSettled(uploadPromises);
// Separate successful and failed uploads
const successfulUploads: UploadedImageWithFlag[] = [];
const newlyUploadedImageIds: string[] = []; // Track ONLY newly uploaded images for cleanup
const errors: string[] = [];
results.forEach((result, index) => {
if (result.status === 'fulfilled') {
const uploadedImage = result.value;
successfulUploads.push(uploadedImage);
// Only track newly uploaded images for potential cleanup
if (uploadedImage.wasNewlyUploaded && uploadedImage.cloudflare_id) {
newlyUploadedImageIds.push(uploadedImage.cloudflare_id);
}
} else {
errors.push(result.reason?.message || `Upload ${index + 1} failed`);
}
});
// If any uploads failed, clean up ONLY newly uploaded images and throw error
if (errors.length > 0) {
if (newlyUploadedImageIds.length > 0) {
const cleanupError = new Error(`Some uploads failed, cleaning up ${newlyUploadedImageIds.length} newly uploaded images`);
handleError(cleanupError, {
action: 'Upload Cleanup',
metadata: {
newlyUploadedCount: newlyUploadedImageIds.length,
failureCount: errors.length
}
});
// Attempt cleanup in parallel with detailed error tracking
const cleanupResults = await Promise.allSettled(
newlyUploadedImageIds.map(imageId =>
invokeWithTracking('upload-image', {
action: 'delete',
imageId,
})
)
);
// Track cleanup failures silently (non-critical)
const cleanupFailures = cleanupResults.filter(r => r.status === 'rejected');
if (cleanupFailures.length > 0) {
handleNonCriticalError(
new Error(`Failed to cleanup ${cleanupFailures.length} of ${newlyUploadedImageIds.length} images`),
{
action: 'Image Cleanup',
metadata: {
cleanupFailures: cleanupFailures.length,
totalCleanup: newlyUploadedImageIds.length,
orphanedImages: newlyUploadedImageIds.filter((_, i) => cleanupResults[i].status === 'rejected')
}
}
);
}
}
const finalError = new Error(`Failed to upload ${errors.length} of ${images.length} images: ${errors.join('; ')}`);
handleError(finalError, {
action: 'Image Upload',
metadata: { failureCount: errors.length, totalCount: images.length }
});
throw finalError;
}
// Remove the wasNewlyUploaded flag before returning
return successfulUploads.map(({ wasNewlyUploaded, ...image }) => image as UploadedImage);
}

View File

@@ -0,0 +1,188 @@
import { supabase } from '@/lib/supabaseClient';
import type { Database } from '@/integrations/supabase/types';
import { logger } from '@/lib/logger';
type TableName = keyof Database['public']['Tables'];
/**
* TestDataTracker - Manages test data lifecycle for integration tests
*
* Tracks all created test entities and ensures proper cleanup in dependency order.
* All tracked entities are marked with is_test_data=true for easy identification.
*/
export class TestDataTracker {
private entities = new Map<string, Set<string>>();
/**
* Track an entity for cleanup
* @param table - Database table name
* @param id - Entity ID
*/
track(table: string, id: string): void {
if (!this.entities.has(table)) {
this.entities.set(table, new Set());
}
this.entities.get(table)!.add(id);
}
/**
* Track multiple entities at once
* @param table - Database table name
* @param ids - Array of entity IDs
*/
trackMany(table: string, ids: string[]): void {
ids.forEach(id => this.track(table, id));
}
/**
* Get all tracked entity IDs for a specific table
* @param table - Database table name
* @returns Array of tracked IDs
*/
getTracked(table: string): string[] {
return Array.from(this.entities.get(table) || []);
}
/**
* Cleanup all tracked test data in proper dependency order
* Deletes children first, then parents to avoid foreign key violations
*/
async cleanup(): Promise<void> {
// Define deletion order (children first, parents last)
const deletionOrder: TableName[] = [
'reviews',
'photos',
'submission_items',
'content_submissions',
'ride_versions',
'park_versions',
'company_versions',
'ride_model_versions',
'rides',
'parks',
'ride_models',
'companies',
'test_data_registry'
];
const errors: Array<{ table: string; error: any }> = [];
for (const table of deletionOrder) {
const ids = this.getTracked(table);
if (ids.length === 0) continue;
try {
const { error } = await supabase
.from(table as any)
.delete()
.in('id', ids);
if (error) {
errors.push({ table, error });
logger.warn('Failed to cleanup test data table', { table, error });
}
} catch (err) {
errors.push({ table, error: err });
logger.warn('Exception cleaning up test data table', { table, error: err });
}
}
// Clear tracking after cleanup attempt
this.entities.clear();
if (errors.length > 0) {
logger.warn('Cleanup completed with errors', { errorCount: errors.length, errors });
}
}
/**
* Verify that all tracked test data has been cleaned up
* @returns Array of remaining test data items
*/
async verifyCleanup(): Promise<Array<{ table: string; count: number }>> {
const tables: TableName[] = [
'parks', 'rides', 'companies', 'ride_models',
'content_submissions', 'submission_items',
'park_versions', 'ride_versions', 'company_versions', 'ride_model_versions',
'photos', 'reviews', 'test_data_registry'
];
const remaining: Array<{ table: string; count: number }> = [];
for (const table of tables) {
try {
const { count, error } = await supabase
.from(table as any)
.select('*', { count: 'exact', head: true })
.eq('is_test_data', true);
if (error) {
logger.warn('Failed to check test data table', { table, error });
continue;
}
if (count && count > 0) {
remaining.push({ table, count });
}
} catch (err) {
logger.warn('Exception checking test data table', { table, error: err });
}
}
return remaining;
}
/**
* Bulk delete all test data from the database (emergency cleanup)
* WARNING: This deletes ALL data marked with is_test_data=true
*/
static async bulkCleanupAllTestData(): Promise<{ deleted: number; errors: number }> {
const tables: TableName[] = [
'reviews', 'photos', 'submission_items', 'content_submissions',
'ride_versions', 'park_versions', 'company_versions', 'ride_model_versions',
'rides', 'parks', 'ride_models', 'companies', 'test_data_registry'
];
let totalDeleted = 0;
let totalErrors = 0;
for (const table of tables) {
try {
// First count how many will be deleted
const { count: countToDelete } = await supabase
.from(table as any)
.select('*', { count: 'exact', head: true })
.eq('is_test_data', true);
// Then delete without selecting (avoids needing SELECT permission on deleted rows)
const { error } = await supabase
.from(table as any)
.delete()
.eq('is_test_data', true);
if (error) {
logger.warn('Failed to bulk delete test data', { table, error });
totalErrors++;
} else if (countToDelete) {
totalDeleted += countToDelete;
}
} catch (err) {
logger.warn('Exception bulk deleting test data', { table, error: err });
totalErrors++;
}
}
return { deleted: totalDeleted, errors: totalErrors };
}
/**
* Get summary of tracked entities
*/
getSummary(): Record<string, number> {
const summary: Record<string, number> = {};
this.entities.forEach((ids, table) => {
summary[table] = ids.size;
});
return summary;
}
}

View File

@@ -0,0 +1,10 @@
/**
* Integration Testing System
*
* Main exports for the comprehensive integration testing framework.
*/
export { IntegrationTestRunner } from './testRunner';
export { allTestSuites } from './suites';
export type { TestResult, Test, TestSuite } from './testRunner';

View File

@@ -0,0 +1,259 @@
/**
* Authentication & Authorization Test Suite
*
* Tests auth flows, MFA enforcement, role checks, and session management.
*/
import { supabase } from '@/lib/supabaseClient';
import type { TestSuite, TestResult } from '../testRunner';
export const authTestSuite: TestSuite = {
id: 'auth',
name: 'Authentication & Authorization',
description: 'Tests for auth flows, MFA, roles, and permissions',
tests: [
{
id: 'auth-001',
name: 'User Session Validation',
description: 'Validates current user session is valid with proper JWT structure',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
try {
// Get current session
const { data: { session }, error } = await supabase.auth.getSession();
if (error) throw new Error(`Session fetch failed: ${error.message}`);
if (!session) throw new Error('No active session found');
if (!session.access_token) throw new Error('No access token in session');
if (!session.user) throw new Error('No user in session');
if (!session.user.id) throw new Error('No user ID in session');
// Validate token structure (JWT has 3 parts separated by dots)
const tokenParts = session.access_token.split('.');
if (tokenParts.length !== 3) {
throw new Error(`Invalid JWT structure: expected 3 parts, got ${tokenParts.length}`);
}
// Check expiration
if (session.expires_at && session.expires_at < Date.now() / 1000) {
throw new Error('Session token is expired');
}
const duration = Date.now() - startTime;
return {
id: 'auth-001',
name: 'User Session Validation',
suite: 'Authentication & Authorization',
status: 'pass',
duration,
timestamp: new Date().toISOString(),
details: {
userId: session.user.id,
email: session.user.email,
expiresAt: session.expires_at,
aal: (session.user as any).aal || 'aal1'
}
};
} catch (error) {
const duration = Date.now() - startTime;
return {
id: 'auth-001',
name: 'User Session Validation',
suite: 'Authentication & Authorization',
status: 'fail',
duration,
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
timestamp: new Date().toISOString()
};
}
}
},
{
id: 'auth-002',
name: 'Role-Based Access Control (RBAC)',
description: 'Tests role checks are consistent across hooks and database functions',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
try {
const { data: { user } } = await supabase.auth.getUser();
if (!user) throw new Error('No authenticated user');
// Query user_roles table
const { data: roles, error: rolesError } = await supabase
.from('user_roles')
.select('role')
.eq('user_id', user.id);
if (rolesError) throw new Error(`Failed to fetch roles: ${rolesError.message}`);
// Test is_moderator() database function
const { data: isMod, error: modError } = await supabase
.rpc('is_moderator', { _user_id: user.id });
if (modError) throw new Error(`is_moderator() failed: ${modError.message}`);
// Test is_superuser() database function
const { data: isSuper, error: superError } = await supabase
.rpc('is_superuser', { _user_id: user.id });
if (superError) throw new Error(`is_superuser() failed: ${superError.message}`);
// Validate consistency
const hasModRole = roles?.some(r => ['moderator', 'admin', 'superuser'].includes(r.role));
if (hasModRole !== isMod) {
throw new Error(`Inconsistent moderator check: has role=${hasModRole}, is_moderator()=${isMod}`);
}
const hasSuperRole = roles?.some(r => r.role === 'superuser');
if (hasSuperRole !== isSuper) {
throw new Error(`Inconsistent superuser check: has role=${hasSuperRole}, is_superuser()=${isSuper}`);
}
const duration = Date.now() - startTime;
return {
id: 'auth-002',
name: 'Role-Based Access Control (RBAC)',
suite: 'Authentication & Authorization',
status: 'pass',
duration,
timestamp: new Date().toISOString(),
details: {
roles: roles?.map(r => r.role) || [],
isModerator: isMod,
isSuperuser: isSuper,
consistent: true
}
};
} catch (error) {
const duration = Date.now() - startTime;
return {
id: 'auth-002',
name: 'Role-Based Access Control (RBAC)',
suite: 'Authentication & Authorization',
status: 'fail',
duration,
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
timestamp: new Date().toISOString()
};
}
}
},
{
id: 'auth-003',
name: 'MFA Factor Detection',
description: 'Tests MFA enrollment detection and AAL level',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
try {
const { data: { user } } = await supabase.auth.getUser();
if (!user) throw new Error('No authenticated user');
// Get MFA factors
const { data: factors, error: factorsError } = await supabase.auth.mfa.listFactors();
if (factorsError) throw new Error(`Failed to list MFA factors: ${factorsError.message}`);
const hasVerifiedFactor = factors?.totp?.some(f => f.status === 'verified') || false;
const currentAAL = (user as any).aal || 'aal1';
const duration = Date.now() - startTime;
return {
id: 'auth-003',
name: 'MFA Factor Detection',
suite: 'Authentication & Authorization',
status: 'pass',
duration,
timestamp: new Date().toISOString(),
details: {
hasVerifiedMFA: hasVerifiedFactor,
currentAAL: currentAAL,
totpFactorCount: factors?.totp?.length || 0,
verifiedFactorCount: factors?.totp?.filter(f => f.status === 'verified').length || 0
}
};
} catch (error) {
const duration = Date.now() - startTime;
return {
id: 'auth-003',
name: 'MFA Factor Detection',
suite: 'Authentication & Authorization',
status: 'fail',
duration,
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
timestamp: new Date().toISOString()
};
}
}
},
{
id: 'auth-004',
name: 'Banned User Detection',
description: 'Tests banned user detection in profiles table',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
try {
const { data: { user } } = await supabase.auth.getUser();
if (!user) throw new Error('No authenticated user');
// Query profile banned status
const { data: profile, error: profileError } = await supabase
.from('profiles')
.select('banned')
.eq('user_id', user.id)
.single();
if (profileError) throw new Error(`Failed to fetch profile: ${profileError.message}`);
if (!profile) throw new Error('No profile found');
// Test is_user_banned() database function
const { data: isBanned, error: bannedError } = await supabase
.rpc('is_user_banned', { p_user_id: user.id });
if (bannedError) throw new Error(`is_user_banned() failed: ${bannedError.message}`);
// Validate consistency
if (profile.banned !== isBanned) {
throw new Error(`Inconsistent banned check: profile=${profile.banned}, is_user_banned()=${isBanned}`);
}
const duration = Date.now() - startTime;
return {
id: 'auth-004',
name: 'Banned User Detection',
suite: 'Authentication & Authorization',
status: 'pass',
duration,
timestamp: new Date().toISOString(),
details: {
isBanned: profile.banned,
consistent: true
}
};
} catch (error) {
const duration = Date.now() - startTime;
return {
id: 'auth-004',
name: 'Banned User Detection',
suite: 'Authentication & Authorization',
status: 'fail',
duration,
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
timestamp: new Date().toISOString()
};
}
}
}
]
};

View File

@@ -0,0 +1,300 @@
/**
* Data Integrity & Constraints Test Suite
*
* Tests database constraints, RLS policies, and data integrity rules.
*/
import { supabase } from '@/lib/supabaseClient';
import type { TestSuite, TestResult } from '../testRunner';
import { TestDataTracker } from '../TestDataTracker';
export const dataIntegrityTestSuite: TestSuite = {
id: 'data-integrity',
name: 'Data Integrity & Constraints',
description: 'Tests database constraints, RLS policies, and data integrity',
tests: [
{
id: 'integrity-001',
name: 'RLS Policy Enforcement - Public Read',
description: 'Validates public read access to entity tables',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
try {
// Test public read access to parks
const { data: parks, error: parksError } = await supabase
.from('parks')
.select('id, name, slug')
.limit(5);
if (parksError) throw new Error(`Parks read failed: ${parksError.message}`);
// Test public read access to rides
const { data: rides, error: ridesError } = await supabase
.from('rides')
.select('id, name, slug')
.limit(5);
if (ridesError) throw new Error(`Rides read failed: ${ridesError.message}`);
// Test public read access to companies
const { data: companies, error: companiesError } = await supabase
.from('companies')
.select('id, name, slug')
.limit(5);
if (companiesError) throw new Error(`Companies read failed: ${companiesError.message}`);
// Test public read access to ride_models
const { data: models, error: modelsError } = await supabase
.from('ride_models')
.select('id, name, slug')
.limit(5);
if (modelsError) throw new Error(`Ride models read failed: ${modelsError.message}`);
const duration = Date.now() - startTime;
return {
id: 'integrity-001',
name: 'RLS Policy Enforcement - Public Read',
suite: 'Data Integrity & Constraints',
status: 'pass',
duration,
timestamp: new Date().toISOString(),
details: {
parksReadable: Array.isArray(parks),
ridesReadable: Array.isArray(rides),
companiesReadable: Array.isArray(companies),
rideModelsReadable: Array.isArray(models)
}
};
} catch (error) {
const duration = Date.now() - startTime;
return {
id: 'integrity-001',
name: 'RLS Policy Enforcement - Public Read',
suite: 'Data Integrity & Constraints',
status: 'fail',
duration,
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
timestamp: new Date().toISOString()
};
}
}
},
{
id: 'integrity-002',
name: 'Foreign Key Constraint Enforcement',
description: 'Tests foreign key constraints prevent invalid references',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
try {
// Try to create a ride with non-existent park_id
const invalidParkId = '00000000-0000-0000-0000-000000000000';
const slug = `test-ride-${Date.now()}`;
const { error } = await supabase
.from('rides')
.insert({
name: 'Invalid Ride',
slug,
park_id: invalidParkId,
category: 'roller_coaster',
status: 'operating',
is_test_data: true
});
// This SHOULD fail with foreign key violation
if (!error) {
throw new Error('Foreign key constraint not enforced - invalid park_id was accepted');
}
// Verify it's a foreign key violation
if (!error.message.includes('foreign key') && !error.message.includes('violates')) {
throw new Error(`Expected foreign key error, got: ${error.message}`);
}
const duration = Date.now() - startTime;
return {
id: 'integrity-002',
name: 'Foreign Key Constraint Enforcement',
suite: 'Data Integrity & Constraints',
status: 'pass',
duration,
timestamp: new Date().toISOString(),
details: {
constraintEnforced: true,
errorMessage: error.message
}
};
} catch (error) {
const duration = Date.now() - startTime;
return {
id: 'integrity-002',
name: 'Foreign Key Constraint Enforcement',
suite: 'Data Integrity & Constraints',
status: 'fail',
duration,
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
timestamp: new Date().toISOString()
};
}
}
},
{
id: 'integrity-003',
name: 'Unique Constraint Enforcement',
description: 'Tests unique constraints prevent duplicate slugs',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
const tracker = new TestDataTracker();
let parkId: string | null = null;
try {
// Create a park
const slug = `unique-test-${Date.now()}`;
const { data: park, error: createError } = await supabase
.from('parks')
.insert({
name: 'Unique Test Park',
slug,
park_type: 'theme_park',
status: 'operating',
is_test_data: true
})
.select('id')
.single();
if (createError) throw new Error(`Park creation failed: ${createError.message}`);
if (!park) throw new Error('No park returned');
parkId = park.id;
tracker.track('parks', parkId);
// Try to create another park with same slug
const { error: duplicateError } = await supabase
.from('parks')
.insert({
name: 'Duplicate Park',
slug, // Same slug
park_type: 'theme_park',
status: 'operating',
is_test_data: true
});
// This SHOULD fail with unique violation
if (!duplicateError) {
throw new Error('Unique constraint not enforced - duplicate slug was accepted');
}
// Verify it's a unique violation
if (!duplicateError.message.includes('unique') && !duplicateError.message.includes('duplicate')) {
throw new Error(`Expected unique constraint error, got: ${duplicateError.message}`);
}
const duration = Date.now() - startTime;
return {
id: 'integrity-003',
name: 'Unique Constraint Enforcement',
suite: 'Data Integrity & Constraints',
status: 'pass',
duration,
timestamp: new Date().toISOString(),
details: {
constraintEnforced: true,
errorMessage: duplicateError.message
}
};
} catch (error) {
const duration = Date.now() - startTime;
return {
id: 'integrity-003',
name: 'Unique Constraint Enforcement',
suite: 'Data Integrity & Constraints',
status: 'fail',
duration,
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
timestamp: new Date().toISOString()
};
} finally {
await tracker.cleanup();
const remaining = await tracker.verifyCleanup();
if (remaining.length > 0) {
console.warn('integrity-003 cleanup incomplete:', remaining);
}
}
}
},
{
id: 'integrity-004',
name: 'No JSONB in Entity Tables',
description: 'Validates no JSONB columns exist in entity tables (per requirements)',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
try {
// Sample actual data and check structure (information_schema not accessible via RLS)
const { data: parks } = await supabase.from('parks').select('*').limit(1);
const { data: rides } = await supabase.from('rides').select('*').limit(1);
const { data: companies } = await supabase.from('companies').select('*').limit(1);
const { data: models } = await supabase.from('ride_models').select('*').limit(1);
// Check if any fields appear to be JSONB objects
const hasJsonbFields = [parks, rides, companies, models].some(dataset => {
if (!dataset || dataset.length === 0) return false;
const record = dataset[0] as any;
return Object.keys(record).some(key => {
const val = record[key];
// Check if value is a plain object (not Date, not Array, not null)
if (val === null || val === undefined) return false;
if (typeof val !== 'object') return false;
if (Array.isArray(val)) return false;
// Check if it's a Date by checking if it has getTime method
if (val && typeof val.getTime === 'function') return false;
// If we get here, it's likely a JSONB object
return true;
});
});
if (hasJsonbFields) {
throw new Error('Found JSONB-like fields in entity tables');
}
const duration = Date.now() - startTime;
return {
id: 'integrity-004',
name: 'No JSONB in Entity Tables',
suite: 'Data Integrity & Constraints',
status: 'pass',
duration,
timestamp: new Date().toISOString(),
details: {
noJsonbColumns: true,
validation: 'Entity tables use relational structure only'
}
};
} catch (error) {
const duration = Date.now() - startTime;
return {
id: 'integrity-004',
name: 'No JSONB in Entity Tables',
suite: 'Data Integrity & Constraints',
status: 'fail',
duration,
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
timestamp: new Date().toISOString()
};
}
}
}
]
};

View File

@@ -0,0 +1,197 @@
/**
* Edge Function Integration Tests
*
* Tests for edge function authentication, authorization, and functionality.
*/
import { supabase } from '@/lib/supabaseClient';
import type { TestSuite, TestResult } from '../testRunner';
export const edgeFunctionTestSuite: TestSuite = {
id: 'edge-functions',
name: 'Edge Function Tests',
description: 'Tests for edge function authentication and business logic',
tests: [
{
id: 'edge-001',
name: 'Edge Function Authentication',
description: 'Validates edge functions require authentication',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
try {
// Get current session
const { data: session } = await supabase.auth.getSession();
if (!session.session) {
throw new Error('No active session for test');
}
// Verify we have a valid JWT token
const token = session.session.access_token;
if (!token || token.length < 50) {
throw new Error('Invalid access token');
}
// Decode JWT to check structure (basic validation)
const parts = token.split('.');
if (parts.length !== 3) {
throw new Error('JWT token has invalid structure');
}
// Test that auth is working by calling a protected endpoint
const { data: user } = await supabase.auth.getUser();
if (!user.user) {
throw new Error('Cannot retrieve authenticated user');
}
const duration = Date.now() - startTime;
return {
id: 'edge-001',
name: 'Edge Function Authentication',
suite: 'Edge Function Tests',
status: 'pass',
duration,
timestamp: new Date().toISOString(),
details: {
hasToken: true,
userId: user.user.id,
tokenLength: token.length
}
};
} catch (error) {
return {
id: 'edge-001',
name: 'Edge Function Authentication',
suite: 'Edge Function Tests',
status: 'fail',
duration: Date.now() - startTime,
error: error instanceof Error ? error.message : String(error),
timestamp: new Date().toISOString()
};
}
}
},
{
id: 'edge-002',
name: 'User Ban Check Function',
description: 'Tests is_user_banned database function',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
try {
const { data: userData } = await supabase.auth.getUser();
if (!userData.user) throw new Error('No authenticated user');
// Call the ban check function
const { data: isBanned, error: banError } = await supabase
.rpc('is_user_banned', {
p_user_id: userData.user.id
});
if (banError) throw new Error(`Ban check failed: ${banError.message}`);
// Superuser running tests should not be banned
if (isBanned === true) {
throw new Error('Test user is banned (superuser should not be banned)');
}
const duration = Date.now() - startTime;
return {
id: 'edge-002',
name: 'User Ban Check Function',
suite: 'Edge Function Tests',
status: 'pass',
duration,
timestamp: new Date().toISOString(),
details: {
userId: userData.user.id,
isBanned: isBanned,
functionWorks: true
}
};
} catch (error) {
return {
id: 'edge-002',
name: 'User Ban Check Function',
suite: 'Edge Function Tests',
status: 'fail',
duration: Date.now() - startTime,
error: error instanceof Error ? error.message : String(error),
timestamp: new Date().toISOString()
};
}
}
},
{
id: 'edge-003',
name: 'Moderator Permissions Function',
description: 'Tests is_moderator and permission checking',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
try {
const { data: userData } = await supabase.auth.getUser();
if (!userData.user) throw new Error('No authenticated user');
// Test is_moderator function
const { data: isMod, error: modError } = await supabase
.rpc('is_moderator', {
_user_id: userData.user.id
});
if (modError) throw new Error(`Moderator check failed: ${modError.message}`);
// Test user running tests should be a moderator (superuser)
if (!isMod) {
throw new Error('Test user is not a moderator (superuser should be moderator)');
}
// Test is_superuser function
const { data: isSuperuser, error: superError } = await supabase
.rpc('is_superuser', {
_user_id: userData.user.id
});
if (superError) throw new Error(`Superuser check failed: ${superError.message}`);
if (!isSuperuser) {
throw new Error('Test user is not a superuser');
}
const duration = Date.now() - startTime;
return {
id: 'edge-003',
name: 'Moderator Permissions Function',
suite: 'Edge Function Tests',
status: 'pass',
duration,
timestamp: new Date().toISOString(),
details: {
userId: userData.user.id,
isModerator: isMod,
isSuperuser: isSuperuser,
functionsWork: true
}
};
} catch (error) {
return {
id: 'edge-003',
name: 'Moderator Permissions Function',
suite: 'Edge Function Tests',
status: 'fail',
duration: Date.now() - startTime,
error: error instanceof Error ? error.message : String(error),
timestamp: new Date().toISOString()
};
}
}
}
]
};

View File

@@ -0,0 +1,37 @@
/**
* Integration Test Suites Export
*
* Exports all test suites for the integration testing system.
*/
import { authTestSuite } from './authTests';
import { versioningTestSuite } from './versioningTests';
import { dataIntegrityTestSuite } from './dataIntegrityTests';
import { submissionTestSuite } from './submissionTests';
import { moderationTestSuite } from './moderationTests';
import { edgeFunctionTestSuite } from './edgeFunctionTests';
import { unitConversionTestSuite } from './unitConversionTests';
import { performanceTestSuite } from './performanceTests';
import type { TestSuite } from '../testRunner';
export const allTestSuites: TestSuite[] = [
authTestSuite,
versioningTestSuite,
dataIntegrityTestSuite,
submissionTestSuite,
moderationTestSuite,
edgeFunctionTestSuite,
unitConversionTestSuite,
performanceTestSuite,
];
export {
authTestSuite,
versioningTestSuite,
dataIntegrityTestSuite,
submissionTestSuite,
moderationTestSuite,
edgeFunctionTestSuite,
unitConversionTestSuite,
performanceTestSuite,
};

View File

@@ -0,0 +1,154 @@
/**
* Multi-Item Dependency Resolution Integration Tests
*
* Tests for handling complex submission dependencies
*/
import { supabase } from '@/lib/supabaseClient';
import type { TestSuite, TestResult } from '../testRunner';
export const moderationDependencyTestSuite: TestSuite = {
id: 'moderation-dependencies',
name: 'Multi-Item Dependency Resolution',
description: 'Tests for handling complex submission dependencies',
tests: [
{
id: 'dep-001',
name: 'Approve Independent Items in Any Order',
description: 'Verifies that items without dependencies can be approved in any order',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
try {
const { data: userData } = await supabase.auth.getUser();
if (!userData.user) throw new Error('No authenticated user');
// Create submission with 2 independent park items
const { data: submission, error: createError } = await supabase
.from('content_submissions')
.insert({
user_id: userData.user.id,
submission_type: 'park',
status: 'pending',
content: { test: true }
})
.select()
.single();
if (createError) throw createError;
// Create two park submission items (independent)
const { error: items1Error } = await supabase
.from('submission_items')
.insert([
{
submission_id: submission.id,
item_type: 'park',
item_data: { name: 'Test Park 1', slug: 'test-park-1', country: 'US' },
status: 'pending'
},
{
submission_id: submission.id,
item_type: 'park',
item_data: { name: 'Test Park 2', slug: 'test-park-2', country: 'US' },
status: 'pending'
}
]);
if (items1Error) throw items1Error;
// Get items
const { data: items } = await supabase
.from('submission_items')
.select('id')
.eq('submission_id', submission.id)
.order('created_at', { ascending: true });
if (!items || items.length !== 2) {
throw new Error('Failed to create submission items');
}
// Approve second item first (should work - no dependencies)
const { error: approve2Error } = await supabase
.from('submission_items')
.update({ status: 'approved' })
.eq('id', items[1].id);
if (approve2Error) throw new Error('Failed to approve second item first');
// Approve first item second (should also work)
const { error: approve1Error } = await supabase
.from('submission_items')
.update({ status: 'approved' })
.eq('id', items[0].id);
if (approve1Error) throw new Error('Failed to approve first item second');
// Cleanup
await supabase.from('content_submissions').delete().eq('id', submission.id);
return {
id: 'dep-001',
name: 'Approve Independent Items in Any Order',
suite: 'Multi-Item Dependency Resolution',
status: 'pass',
duration: Date.now() - startTime,
timestamp: new Date().toISOString()
};
} catch (error) {
return {
id: 'dep-001',
name: 'Approve Independent Items in Any Order',
suite: 'Multi-Item Dependency Resolution',
status: 'fail',
duration: Date.now() - startTime,
error: error instanceof Error ? error.message : String(error),
timestamp: new Date().toISOString()
};
}
}
},
{
id: 'dep-002',
name: 'Verify Submission Item Dependencies Exist',
description: 'Verifies that submission items have proper dependency tracking',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
try {
// Verify submission_items table has dependency columns
const { data: testItem } = await supabase
.from('submission_items')
.select('id, status')
.limit(1)
.maybeSingle();
// If query succeeds, table exists and is accessible
return {
id: 'dep-002',
name: 'Verify Submission Item Dependencies Exist',
suite: 'Multi-Item Dependency Resolution',
status: 'pass',
duration: Date.now() - startTime,
timestamp: new Date().toISOString(),
details: {
tableAccessible: true,
testQuery: 'submission_items table verified'
}
};
} catch (error) {
return {
id: 'dep-002',
name: 'Verify Submission Item Dependencies Exist',
suite: 'Multi-Item Dependency Resolution',
status: 'fail',
duration: Date.now() - startTime,
error: error instanceof Error ? error.message : String(error),
timestamp: new Date().toISOString()
};
}
}
}
]
};

View File

@@ -0,0 +1,294 @@
/**
* Moderation Lock Management Integration Tests
*
* Tests for submission locking, claiming, extending, and release mechanisms
*/
import { supabase } from '@/lib/supabaseClient';
import type { TestSuite, TestResult } from '../testRunner';
export const moderationLockTestSuite: TestSuite = {
id: 'moderation-locks',
name: 'Moderation Lock Management',
description: 'Tests for submission locking, claiming, and release mechanisms',
tests: [
{
id: 'lock-001',
name: 'Claim Submission Creates Active Lock',
description: 'Verifies that claiming a submission creates a lock with correct expiry',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
try {
const { data: userData } = await supabase.auth.getUser();
if (!userData.user) throw new Error('No authenticated user');
// 1. Create test submission
const { data: submission, error: createError } = await supabase
.from('content_submissions')
.insert({
user_id: userData.user.id,
submission_type: 'park',
status: 'pending',
content: { test: true }
})
.select()
.single();
if (createError) throw createError;
// 2. Claim the submission (manual update for testing)
const { error: lockError } = await supabase
.from('content_submissions')
.update({
assigned_to: userData.user.id,
locked_until: new Date(Date.now() + 15 * 60 * 1000).toISOString()
})
.eq('id', submission.id);
if (lockError) throw new Error(`Claim failed: ${lockError.message}`);
// 3. Verify lock exists
const { data: lockedSubmission, error: fetchError } = await supabase
.from('content_submissions')
.select('assigned_to, locked_until')
.eq('id', submission.id)
.single();
if (fetchError) throw fetchError;
// 4. Assertions
if (lockedSubmission.assigned_to !== userData.user.id) {
throw new Error('Submission not assigned to current user');
}
if (!lockedSubmission.locked_until) {
throw new Error('locked_until not set');
}
const lockedUntil = new Date(lockedSubmission.locked_until);
const now = new Date();
const diffMinutes = (lockedUntil.getTime() - now.getTime()) / (1000 * 60);
if (diffMinutes < 14 || diffMinutes > 16) {
throw new Error(`Lock duration incorrect: ${diffMinutes} minutes`);
}
// Cleanup
await supabase.from('content_submissions').delete().eq('id', submission.id);
return {
id: 'lock-001',
name: 'Claim Submission Creates Active Lock',
suite: 'Moderation Lock Management',
status: 'pass',
duration: Date.now() - startTime,
timestamp: new Date().toISOString(),
details: {
submissionId: submission.id,
lockDurationMinutes: diffMinutes,
assignedTo: lockedSubmission.assigned_to
}
};
} catch (error) {
return {
id: 'lock-001',
name: 'Claim Submission Creates Active Lock',
suite: 'Moderation Lock Management',
status: 'fail',
duration: Date.now() - startTime,
error: error instanceof Error ? error.message : String(error),
timestamp: new Date().toISOString()
};
}
}
},
{
id: 'lock-002',
name: 'Release Lock Clears Assignment',
description: 'Verifies that releasing a lock clears assigned_to and locked_until',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
try {
const { data: userData } = await supabase.auth.getUser();
if (!userData.user) throw new Error('No authenticated user');
// Create and claim submission
const { data: submission, error: createError } = await supabase
.from('content_submissions')
.insert({
user_id: userData.user.id,
submission_type: 'park',
status: 'pending',
content: { test: true }
})
.select()
.single();
if (createError) throw createError;
await supabase
.from('content_submissions')
.update({
assigned_to: userData.user.id,
locked_until: new Date(Date.now() + 15 * 60 * 1000).toISOString()
})
.eq('id', submission.id);
// Release lock
const { error: releaseError } = await supabase
.from('content_submissions')
.update({
assigned_to: null,
locked_until: null
})
.eq('id', submission.id);
if (releaseError) throw new Error(`release_lock failed: ${releaseError.message}`);
// Verify lock cleared
const { data: releasedSubmission, error: fetchError } = await supabase
.from('content_submissions')
.select('assigned_to, locked_until')
.eq('id', submission.id)
.single();
if (fetchError) throw fetchError;
if (releasedSubmission.assigned_to !== null) {
throw new Error('assigned_to not cleared');
}
if (releasedSubmission.locked_until !== null) {
throw new Error('locked_until not cleared');
}
// Cleanup
await supabase.from('content_submissions').delete().eq('id', submission.id);
return {
id: 'lock-002',
name: 'Release Lock Clears Assignment',
suite: 'Moderation Lock Management',
status: 'pass',
duration: Date.now() - startTime,
timestamp: new Date().toISOString()
};
} catch (error) {
return {
id: 'lock-002',
name: 'Release Lock Clears Assignment',
suite: 'Moderation Lock Management',
status: 'fail',
duration: Date.now() - startTime,
error: error instanceof Error ? error.message : String(error),
timestamp: new Date().toISOString()
};
}
}
},
{
id: 'lock-003',
name: 'Extend Lock Adds 15 Minutes',
description: 'Verifies that extending a lock adds correct duration',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
try {
const { data: userData } = await supabase.auth.getUser();
if (!userData.user) throw new Error('No authenticated user');
// Create and claim submission
const { data: submission, error: createError } = await supabase
.from('content_submissions')
.insert({
user_id: userData.user.id,
submission_type: 'park',
status: 'pending',
content: { test: true }
})
.select()
.single();
if (createError) throw createError;
const initialLockTime = new Date(Date.now() + 15 * 60 * 1000);
await supabase
.from('content_submissions')
.update({
assigned_to: userData.user.id,
locked_until: initialLockTime.toISOString()
})
.eq('id', submission.id);
// Get initial lock time
const { data: initialLock } = await supabase
.from('content_submissions')
.select('locked_until')
.eq('id', submission.id)
.single();
// Extend lock (add 15 more minutes)
const extendedLockTime = new Date(initialLockTime.getTime() + 15 * 60 * 1000);
const { error: extendError } = await supabase
.from('content_submissions')
.update({
locked_until: extendedLockTime.toISOString()
})
.eq('id', submission.id);
if (extendError) throw new Error(`extend_lock failed: ${extendError.message}`);
// Verify extended lock
const { data: extendedLock, error: fetchError } = await supabase
.from('content_submissions')
.select('locked_until')
.eq('id', submission.id)
.single();
if (fetchError) throw fetchError;
if (!initialLock?.locked_until || !extendedLock.locked_until) {
throw new Error('Lock times not found');
}
const initialTime = new Date(initialLock.locked_until);
const extendedTime = new Date(extendedLock.locked_until);
const diffMinutes = (extendedTime.getTime() - initialTime.getTime()) / (1000 * 60);
if (diffMinutes < 14 || diffMinutes > 16) {
throw new Error(`Extension duration incorrect: ${diffMinutes} minutes`);
}
// Cleanup
await supabase.from('content_submissions').delete().eq('id', submission.id);
return {
id: 'lock-003',
name: 'Extend Lock Adds 15 Minutes',
suite: 'Moderation Lock Management',
status: 'pass',
duration: Date.now() - startTime,
timestamp: new Date().toISOString(),
details: {
extensionMinutes: diffMinutes
}
};
} catch (error) {
return {
id: 'lock-003',
name: 'Extend Lock Adds 15 Minutes',
suite: 'Moderation Lock Management',
status: 'fail',
duration: Date.now() - startTime,
error: error instanceof Error ? error.message : String(error),
timestamp: new Date().toISOString()
};
}
}
}
]
};

View File

@@ -0,0 +1,63 @@
/**
* Moderation Queue & Workflow Integration Tests
*
* Tests for moderation queue operations, locking, and state management.
*/
import { supabase } from '@/lib/supabaseClient';
import type { TestSuite, TestResult } from '../testRunner';
export const moderationTestSuite: TestSuite = {
id: 'moderation',
name: 'Moderation Queue & Workflow',
description: 'Tests for moderation queue operations and submission workflows',
tests: [
{
id: 'moderation-001',
name: 'Moderation Functions Available',
description: 'Validates moderation database functions are accessible',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
try {
const { data: userData } = await supabase.auth.getUser();
if (!userData.user) throw new Error('No authenticated user');
// Test that moderation functions exist and are callable
const { data: isMod, error: modError } = await supabase
.rpc('is_moderator', {
_user_id: userData.user.id
});
if (modError) throw new Error(`is_moderator function failed: ${modError.message}`);
const duration = Date.now() - startTime;
return {
id: 'moderation-001',
name: 'Moderation Functions Available',
suite: 'Moderation Queue & Workflow',
status: 'pass',
duration,
timestamp: new Date().toISOString(),
details: {
isModerator: isMod,
functionsAccessible: true
}
};
} catch (error) {
return {
id: 'moderation-001',
name: 'Moderation Functions Available',
suite: 'Moderation Queue & Workflow',
status: 'fail',
duration: Date.now() - startTime,
error: error instanceof Error ? error.message : String(error),
timestamp: new Date().toISOString()
};
}
}
}
]
};

View File

@@ -0,0 +1,275 @@
/**
* Performance & Scalability Integration Tests
*
* Tests for system performance under various conditions.
*/
import { supabase } from '@/lib/supabaseClient';
import type { TestSuite, TestResult } from '../testRunner';
import { TestDataTracker } from '../TestDataTracker';
export const performanceTestSuite: TestSuite = {
id: 'performance',
name: 'Performance & Scalability',
description: 'Tests for system performance and query efficiency',
tests: [
{
id: 'perf-001',
name: 'Entity Query Performance',
description: 'Measures query performance for entity lists',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
try {
// Test parks query performance
const parksStart = Date.now();
const { data: parks, error: parksError } = await supabase
.from('parks')
.select('id, name, slug, park_type, status')
.limit(50);
const parksDuration = Date.now() - parksStart;
if (parksError) throw new Error(`Parks query failed: ${parksError.message}`);
// Test rides query performance
const ridesStart = Date.now();
const { data: rides, error: ridesError } = await supabase
.from('rides')
.select('id, name, slug, category, status, park_id')
.limit(50);
const ridesDuration = Date.now() - ridesStart;
if (ridesError) throw new Error(`Rides query failed: ${ridesError.message}`);
// Test companies query performance
const companiesStart = Date.now();
const { data: companies, error: companiesError } = await supabase
.from('companies')
.select('id, name, slug, company_type')
.limit(50);
const companiesDuration = Date.now() - companiesStart;
if (companiesError) throw new Error(`Companies query failed: ${companiesError.message}`);
// Set performance thresholds (in milliseconds)
const threshold = 1000; // 1 second
const warnings: string[] = [];
if (parksDuration > threshold) {
warnings.push(`Parks query slow: ${parksDuration}ms`);
}
if (ridesDuration > threshold) {
warnings.push(`Rides query slow: ${ridesDuration}ms`);
}
if (companiesDuration > threshold) {
warnings.push(`Companies query slow: ${companiesDuration}ms`);
}
const totalDuration = Date.now() - startTime;
return {
id: 'perf-001',
name: 'Entity Query Performance',
suite: 'Performance & Scalability',
status: warnings.length === 0 ? 'pass' : 'fail',
duration: totalDuration,
timestamp: new Date().toISOString(),
error: warnings.length > 0 ? warnings.join('; ') : undefined,
details: {
parksDuration,
ridesDuration,
companiesDuration,
threshold,
parksCount: parks?.length || 0,
ridesCount: rides?.length || 0,
companiesCount: companies?.length || 0
}
};
} catch (error) {
return {
id: 'perf-001',
name: 'Entity Query Performance',
suite: 'Performance & Scalability',
status: 'fail',
duration: Date.now() - startTime,
error: error instanceof Error ? error.message : String(error),
timestamp: new Date().toISOString()
};
}
}
},
{
id: 'perf-002',
name: 'Version History Query Performance',
description: 'Measures performance of version history queries',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
const tracker = new TestDataTracker();
let parkId: string | null = null;
try {
// Create test park
const parkSlug = `test-park-perf-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
const { data: park, error: parkError } = await supabase
.from('parks')
.insert({
name: 'Test Park Performance',
slug: parkSlug,
park_type: 'theme_park',
status: 'operating',
is_test_data: true
})
.select('id')
.single();
if (parkError) throw parkError;
parkId = park.id;
tracker.track('parks', parkId);
// Create multiple versions (updates)
for (let i = 0; i < 10; i++) {
await supabase
.from('parks')
.update({ description: `Version ${i + 1}` })
.eq('id', parkId);
// Small delay to ensure versions are created
await new Promise(resolve => setTimeout(resolve, 50));
}
// Measure version history query
const versionStart = Date.now();
const { data: versions, error: versionError } = await supabase
.from('park_versions')
.select('version_id, version_number, change_type, created_at')
.eq('park_id', parkId)
.order('version_number', { ascending: false });
const versionDuration = Date.now() - versionStart;
if (versionError) throw new Error(`Version query failed: ${versionError.message}`);
// Performance threshold: 500ms for version history
const threshold = 500;
const isSlow = versionDuration > threshold;
const totalDuration = Date.now() - startTime;
return {
id: 'perf-002',
name: 'Version History Query Performance',
suite: 'Performance & Scalability',
status: isSlow ? 'fail' : 'pass',
duration: totalDuration,
timestamp: new Date().toISOString(),
error: isSlow ? `Version query took ${versionDuration}ms (threshold: ${threshold}ms)` : undefined,
details: {
versionDuration,
threshold,
versionsFound: versions?.length || 0,
isSlow
}
};
} catch (error) {
return {
id: 'perf-002',
name: 'Version History Query Performance',
suite: 'Performance & Scalability',
status: 'fail',
duration: Date.now() - startTime,
error: error instanceof Error ? error.message : String(error),
timestamp: new Date().toISOString()
};
} finally {
await tracker.cleanup();
const remaining = await tracker.verifyCleanup();
if (remaining.length > 0) {
console.warn('perf-002 cleanup incomplete:', remaining);
}
}
}
},
{
id: 'perf-003',
name: 'Database Function Performance',
description: 'Measures performance of database functions',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
try {
const { data: userData } = await supabase.auth.getUser();
if (!userData.user) throw new Error('No authenticated user');
// Test is_moderator function performance
const modStart = Date.now();
const { data: isMod, error: modError } = await supabase
.rpc('is_moderator', {
_user_id: userData.user.id
});
const modDuration = Date.now() - modStart;
if (modError) throw modError;
// Test is_user_banned function performance
const banStart = Date.now();
const { data: isBanned, error: banError } = await supabase
.rpc('is_user_banned', {
p_user_id: userData.user.id
});
const banDuration = Date.now() - banStart;
if (banError) throw banError;
// Performance threshold: 200ms for simple functions
const threshold = 200;
const warnings: string[] = [];
if (modDuration > threshold) {
warnings.push(`is_moderator slow: ${modDuration}ms`);
}
if (banDuration > threshold) {
warnings.push(`is_user_banned slow: ${banDuration}ms`);
}
const totalDuration = Date.now() - startTime;
return {
id: 'perf-003',
name: 'Database Function Performance',
suite: 'Performance & Scalability',
status: warnings.length === 0 ? 'pass' : 'fail',
duration: totalDuration,
timestamp: new Date().toISOString(),
error: warnings.length > 0 ? warnings.join('; ') : undefined,
details: {
isModerator: isMod,
isBanned,
modDuration,
banDuration,
threshold,
allFast: warnings.length === 0
}
};
} catch (error) {
return {
id: 'perf-003',
name: 'Database Function Performance',
suite: 'Performance & Scalability',
status: 'fail',
duration: Date.now() - startTime,
error: error instanceof Error ? error.message : String(error),
timestamp: new Date().toISOString()
};
}
}
}
]
};

View File

@@ -0,0 +1,410 @@
/**
* Entity Submission & Validation Integration Tests
*
* Tests for submission validation, schema validation, and entity creation.
*/
import { supabase } from '@/lib/supabaseClient';
import type { TestSuite, TestResult } from '../testRunner';
import { TestDataTracker } from '../TestDataTracker';
export const submissionTestSuite: TestSuite = {
id: 'submission',
name: 'Entity Submission & Validation',
description: 'Tests for entity submission workflows and validation schemas',
tests: [
{
id: 'submission-001',
name: 'Park Creation Validation',
description: 'Validates park submission and creation',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
const tracker = new TestDataTracker();
let parkId: string | null = null;
try {
const parkSlug = `test-park-submit-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
// Create park with valid data
const { data: park, error: createError } = await supabase
.from('parks')
.insert({
name: 'Test Park Submission',
slug: parkSlug,
park_type: 'theme_park',
status: 'operating',
description: 'Test park for submission validation'
})
.select('id, name, slug, park_type, status')
.single();
if (createError) throw new Error(`Park creation failed: ${createError.message}`);
if (!park) throw new Error('Park not returned after creation');
parkId = park.id;
// Validate created park has correct data
if (park.name !== 'Test Park Submission') {
throw new Error(`Expected name "Test Park Submission", got "${park.name}"`);
}
if (park.slug !== parkSlug) {
throw new Error(`Expected slug "${parkSlug}", got "${park.slug}"`);
}
if (park.park_type !== 'theme_park') {
throw new Error(`Expected park_type "theme_park", got "${park.park_type}"`);
}
// Test slug uniqueness constraint
const { error: duplicateError } = await supabase
.from('parks')
.insert({
name: 'Duplicate Slug Park',
slug: parkSlug, // Same slug
park_type: 'theme_park',
status: 'operating'
});
if (!duplicateError) {
throw new Error('Duplicate slug was allowed (uniqueness constraint failed)');
}
const duration = Date.now() - startTime;
return {
id: 'submission-001',
name: 'Park Creation Validation',
suite: 'Entity Submission & Validation',
status: 'pass',
duration,
timestamp: new Date().toISOString(),
details: {
parkId,
parkSlug,
validationsPassed: ['name', 'slug', 'park_type', 'uniqueness_constraint']
}
};
} catch (error) {
return {
id: 'submission-001',
name: 'Park Creation Validation',
suite: 'Entity Submission & Validation',
status: 'fail',
duration: Date.now() - startTime,
error: error instanceof Error ? error.message : String(error),
timestamp: new Date().toISOString()
};
} finally {
await tracker.cleanup();
const remaining = await tracker.verifyCleanup();
if (remaining.length > 0) {
console.warn('submission-001 cleanup incomplete:', remaining);
}
}
}
},
{
id: 'submission-002',
name: 'Ride Creation with Dependencies',
description: 'Validates ride submission requires valid park_id',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
const tracker = new TestDataTracker();
let parkId: string | null = null;
let rideId: string | null = null;
try {
// First create a park
const parkSlug = `test-park-ride-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
const { data: park, error: parkError } = await supabase
.from('parks')
.insert({
name: 'Test Park for Ride',
slug: parkSlug,
park_type: 'theme_park',
status: 'operating',
is_test_data: true
})
.select('id')
.single();
if (parkError) throw new Error(`Park creation failed: ${parkError.message}`);
parkId = park.id;
// Try to create ride with invalid park_id (should fail)
const invalidParkId = '00000000-0000-0000-0000-000000000000';
const { error: invalidError } = await supabase
.from('rides')
.insert({
name: 'Test Ride Invalid Park',
slug: `test-ride-invalid-${Date.now()}`,
park_id: invalidParkId,
category: 'roller_coaster',
status: 'operating'
});
if (!invalidError) {
throw new Error('Ride with invalid park_id was allowed (foreign key constraint failed)');
}
// Create ride with valid park_id (should succeed)
const rideSlug = `test-ride-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
const { data: ride, error: rideError } = await supabase
.from('rides')
.insert({
name: 'Test Ride Valid Park',
slug: rideSlug,
park_id: parkId,
category: 'roller_coaster',
status: 'operating'
})
.select('id, name, park_id')
.single();
if (rideError) throw new Error(`Ride creation failed: ${rideError.message}`);
if (!ride) throw new Error('Ride not returned after creation');
rideId = ride.id;
if (ride.park_id !== parkId) {
throw new Error(`Expected park_id "${parkId}", got "${ride.park_id}"`);
}
const duration = Date.now() - startTime;
return {
id: 'submission-002',
name: 'Ride Creation with Dependencies',
suite: 'Entity Submission & Validation',
status: 'pass',
duration,
timestamp: new Date().toISOString(),
details: {
parkId,
rideId,
validationsPassed: ['foreign_key_constraint', 'valid_dependency']
}
};
} catch (error) {
return {
id: 'submission-002',
name: 'Ride Creation with Dependencies',
suite: 'Entity Submission & Validation',
status: 'fail',
duration: Date.now() - startTime,
error: error instanceof Error ? error.message : String(error),
timestamp: new Date().toISOString()
};
} finally {
await tracker.cleanup();
const remaining = await tracker.verifyCleanup();
if (remaining.length > 0) {
console.warn('submission-002 cleanup incomplete:', remaining);
}
}
}
},
{
id: 'submission-003',
name: 'Company Creation All Types',
description: 'Validates company creation for all company types',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
const tracker = new TestDataTracker();
const companyIds: string[] = [];
try {
const companyTypes = ['manufacturer', 'operator', 'designer', 'property_owner'] as const;
for (const companyType of companyTypes) {
const slug = `test-company-${companyType}-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
const { data: company, error: createError } = await supabase
.from('companies')
.insert({
name: `Test ${companyType} Company`,
slug,
company_type: companyType,
description: `Test company of type ${companyType}`
})
.select('id, company_type')
.single();
if (createError) {
throw new Error(`${companyType} creation failed: ${createError.message}`);
}
if (!company) {
throw new Error(`${companyType} not returned after creation`);
}
companyIds.push(company.id);
tracker.track('companies', company.id);
if (company.company_type !== companyType) {
throw new Error(`Expected company_type "${companyType}", got "${company.company_type}"`);
}
}
const duration = Date.now() - startTime;
return {
id: 'submission-003',
name: 'Company Creation All Types',
suite: 'Entity Submission & Validation',
status: 'pass',
duration,
timestamp: new Date().toISOString(),
details: {
companiesCreated: companyIds.length,
companyTypes: companyTypes,
companyIds
}
};
} catch (error) {
return {
id: 'submission-003',
name: 'Company Creation All Types',
suite: 'Entity Submission & Validation',
status: 'fail',
duration: Date.now() - startTime,
error: error instanceof Error ? error.message : String(error),
timestamp: new Date().toISOString()
};
} finally {
await tracker.cleanup();
const remaining = await tracker.verifyCleanup();
if (remaining.length > 0) {
console.warn('submission-003 cleanup incomplete:', remaining);
}
}
}
},
{
id: 'submission-004',
name: 'Ride Model with Images',
description: 'Validates ride model creation with image fields',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
let manufacturerId: string | null = null;
let modelId: string | null = null;
try {
// Create manufacturer first
const mfgSlug = `test-mfg-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
const { data: manufacturer, error: mfgError } = await supabase
.from('companies')
.insert({
name: 'Test Manufacturer',
slug: mfgSlug,
company_type: 'manufacturer'
})
.select('id')
.single();
if (mfgError) throw new Error(`Manufacturer creation failed: ${mfgError.message}`);
manufacturerId = manufacturer.id;
// Create ride model with images
const modelSlug = `test-model-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
const testImageUrl = 'https://imagedelivery.net/test-account/test-image-id/public';
const testImageId = 'test-image-id';
const { data: model, error: modelError } = await supabase
.from('ride_models')
.insert({
name: 'Test Ride Model',
slug: modelSlug,
manufacturer_id: manufacturerId,
category: 'roller_coaster',
ride_type: 'steel_coaster',
banner_image_url: testImageUrl,
banner_image_id: testImageId,
card_image_url: testImageUrl,
card_image_id: testImageId
})
.select('id, banner_image_url, banner_image_id, card_image_url, card_image_id')
.single();
if (modelError) throw new Error(`Ride model creation failed: ${modelError.message}`);
if (!model) throw new Error('Ride model not returned after creation');
modelId = model.id;
// Validate image fields
if (model.banner_image_url !== testImageUrl) {
throw new Error(`banner_image_url mismatch: expected "${testImageUrl}", got "${model.banner_image_url}"`);
}
if (model.banner_image_id !== testImageId) {
throw new Error(`banner_image_id mismatch: expected "${testImageId}", got "${model.banner_image_id}"`);
}
if (model.card_image_url !== testImageUrl) {
throw new Error(`card_image_url mismatch`);
}
if (model.card_image_id !== testImageId) {
throw new Error(`card_image_id mismatch`);
}
// Verify version was created with images
let version: any = null;
const pollStart = Date.now();
while (!version && Date.now() - pollStart < 5000) {
const { data } = await supabase
.from('ride_model_versions')
.select('banner_image_url, banner_image_id, card_image_url, card_image_id')
.eq('ride_model_id', modelId)
.eq('version_number', 1)
.single();
if (data) {
version = data;
break;
}
await new Promise(resolve => setTimeout(resolve, 100));
}
if (!version) throw new Error('Version not created after 5s timeout');
if (version.banner_image_url !== testImageUrl) {
throw new Error('Version missing banner_image_url');
}
const duration = Date.now() - startTime;
return {
id: 'submission-004',
name: 'Ride Model with Images',
suite: 'Entity Submission & Validation',
status: 'pass',
duration,
timestamp: new Date().toISOString(),
details: {
modelId,
manufacturerId,
imageFieldsValidated: ['banner_image_url', 'banner_image_id', 'card_image_url', 'card_image_id'],
versionCreated: true
}
};
} catch (error) {
return {
id: 'submission-004',
name: 'Ride Model with Images',
suite: 'Entity Submission & Validation',
status: 'fail',
duration: Date.now() - startTime,
error: error instanceof Error ? error.message : String(error),
timestamp: new Date().toISOString()
};
} finally {
if (modelId) {
await supabase.from('ride_models').delete().eq('id', modelId);
}
if (manufacturerId) {
await supabase.from('companies').delete().eq('id', manufacturerId);
}
}
}
}
]
};

View File

@@ -0,0 +1,317 @@
/**
* Unit Conversion Integration Tests
*
* Tests for metric storage and display unit conversion.
*/
import { supabase } from '@/lib/supabaseClient';
import type { TestSuite, TestResult } from '../testRunner';
import { TestDataTracker } from '../TestDataTracker';
export const unitConversionTestSuite: TestSuite = {
id: 'unit-conversion',
name: 'Unit Conversion Tests',
description: 'Tests for metric storage requirements and unit conversion',
tests: [
{
id: 'unit-001',
name: 'Metric Storage Validation',
description: 'Validates all measurements are stored in metric units',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
const tracker = new TestDataTracker();
let parkId: string | null = null;
let rideId: string | null = null;
try {
// Create test park
const parkSlug = `test-park-units-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
const { data: park, error: parkError } = await supabase
.from('parks')
.insert({
name: 'Test Park Units',
slug: parkSlug,
park_type: 'theme_park',
status: 'operating',
is_test_data: true
})
.select('id')
.single();
if (parkError) throw parkError;
parkId = park.id;
tracker.track('parks', parkId);
// Create ride with metric values
const rideSlug = `test-ride-units-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
const testData = {
name: 'Test Ride Metric',
slug: rideSlug,
park_id: parkId,
category: 'roller_coaster',
status: 'operating',
max_speed_kmh: 100.0, // km/h (metric)
max_height_meters: 50.0, // meters (metric)
length_meters: 1000.0, // meters (metric)
drop_height_meters: 45.0, // meters (metric)
height_requirement: 120 // cm (metric)
};
const { data: ride, error: rideError } = await supabase
.from('rides')
.insert({ ...testData, is_test_data: true })
.select('id, max_speed_kmh, max_height_meters, length_meters, drop_height_meters, height_requirement')
.single();
if (rideError) throw new Error(`Ride creation failed: ${rideError.message}`);
if (!ride) throw new Error('Ride not returned');
rideId = ride.id;
tracker.track('rides', rideId);
// Validate values are stored in metric
const tolerance = 0.01; // Allow small floating point differences
if (Math.abs((ride.max_speed_kmh ?? 0) - testData.max_speed_kmh) > tolerance) {
throw new Error(`max_speed_kmh mismatch: expected ${testData.max_speed_kmh}, got ${ride.max_speed_kmh}`);
}
if (Math.abs((ride.max_height_meters ?? 0) - testData.max_height_meters) > tolerance) {
throw new Error(`max_height_meters mismatch: expected ${testData.max_height_meters}, got ${ride.max_height_meters}`);
}
if (Math.abs((ride.length_meters ?? 0) - testData.length_meters) > tolerance) {
throw new Error(`length_meters mismatch: expected ${testData.length_meters}, got ${ride.length_meters}`);
}
if (Math.abs((ride.height_requirement ?? 0) - testData.height_requirement) > tolerance) {
throw new Error(`height_requirement mismatch: expected ${testData.height_requirement} cm, got ${ride.height_requirement}`);
}
const duration = Date.now() - startTime;
return {
id: 'unit-001',
name: 'Metric Storage Validation',
suite: 'Unit Conversion Tests',
status: 'pass',
duration,
timestamp: new Date().toISOString(),
details: {
rideId,
metricFieldsValidated: ['max_speed_kmh', 'max_height_meters', 'length_meters', 'drop_height_meters', 'height_requirement'],
allMetric: true
}
};
} catch (error) {
return {
id: 'unit-001',
name: 'Metric Storage Validation',
suite: 'Unit Conversion Tests',
status: 'fail',
duration: Date.now() - startTime,
error: error instanceof Error ? error.message : String(error),
timestamp: new Date().toISOString()
};
} finally {
await tracker.cleanup();
const remaining = await tracker.verifyCleanup();
if (remaining.length > 0) {
console.warn('unit-001 cleanup incomplete:', remaining);
}
}
}
},
{
id: 'unit-002',
name: 'Version Storage Units',
description: 'Validates version tables store measurements in metric',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
const tracker = new TestDataTracker();
let parkId: string | null = null;
let rideId: string | null = null;
try {
// Create test park
const parkSlug = `test-park-ver-units-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
const { data: park, error: parkError } = await supabase
.from('parks')
.insert({
name: 'Test Park Version Units',
slug: parkSlug,
park_type: 'theme_park',
status: 'operating',
is_test_data: true
})
.select('id')
.single();
if (parkError) throw parkError;
parkId = park.id;
tracker.track('parks', parkId);
// Create ride with metric values
const rideSlug = `test-ride-ver-units-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
const { data: ride, error: rideError } = await supabase
.from('rides')
.insert({
name: 'Test Ride Version Metric',
slug: rideSlug,
park_id: parkId,
category: 'roller_coaster',
status: 'operating',
max_speed_kmh: 120.0,
max_height_meters: 60.0,
height_requirement: 140,
is_test_data: true
})
.select('id')
.single();
if (rideError) throw rideError;
rideId = ride.id;
tracker.track('rides', rideId);
// Poll for version creation
let version: any = null;
const pollStart = Date.now();
while (!version && Date.now() - pollStart < 5000) {
const { data } = await supabase
.from('ride_versions')
.select('max_speed_kmh, height_meters, height_requirement_cm')
.eq('ride_id', rideId)
.eq('version_number', 1)
.single();
if (data) {
version = data;
break;
}
await new Promise(resolve => setTimeout(resolve, 100));
}
if (!version) throw new Error('Version not created after 5s timeout');
// Validate version has metric units
const tolerance = 0.01;
if (Math.abs((version.max_speed_kmh ?? 0) - 120.0) > tolerance) {
throw new Error(`Version max_speed_kmh mismatch: expected 120.0, got ${version.max_speed_kmh}`);
}
if (Math.abs((version.height_meters ?? 0) - 60.0) > tolerance) {
throw new Error(`Version height_meters mismatch: expected 60.0, got ${version.height_meters}`);
}
if (Math.abs((version.height_requirement_cm ?? 0) - 140) > tolerance) {
throw new Error(`Version height_requirement_cm mismatch: expected 140, got ${version.height_requirement_cm}`);
}
const duration = Date.now() - startTime;
return {
id: 'unit-002',
name: 'Version Storage Units',
suite: 'Unit Conversion Tests',
status: 'pass',
duration,
timestamp: new Date().toISOString(),
details: {
rideId,
versionMetricFields: ['max_speed_kmh', 'height_meters', 'height_requirement_cm'],
allMetric: true
}
};
} catch (error) {
return {
id: 'unit-002',
name: 'Version Storage Units',
suite: 'Unit Conversion Tests',
status: 'fail',
duration: Date.now() - startTime,
error: error instanceof Error ? error.message : String(error),
timestamp: new Date().toISOString()
};
} finally {
await tracker.cleanup();
const remaining = await tracker.verifyCleanup();
if (remaining.length > 0) {
console.warn('unit-002 cleanup incomplete:', remaining);
}
}
}
},
{
id: 'unit-003',
name: 'No Imperial Storage',
description: 'Validates no imperial units are stored in database',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
try {
// Query rides table to check column names
const { data: rides } = await supabase
.from('rides')
.select('*')
.limit(1);
if (!rides || rides.length === 0) {
// No rides to test, but we can still check structure
return {
id: 'unit-003',
name: 'No Imperial Storage',
suite: 'Unit Conversion Tests',
status: 'pass',
duration: Date.now() - startTime,
timestamp: new Date().toISOString(),
details: {
noDataToTest: true,
note: 'No rides in database to test, but schema validation passed'
}
};
}
const ride = rides[0] as any;
const imperialFields = [
'max_speed_mph',
'height_feet',
'length_feet',
'drop_feet',
'height_requirement_inches'
];
// Check if any imperial field names exist
const foundImperial = imperialFields.filter(field => field in ride);
if (foundImperial.length > 0) {
throw new Error(`Imperial unit fields found in database: ${foundImperial.join(', ')}`);
}
const duration = Date.now() - startTime;
return {
id: 'unit-003',
name: 'No Imperial Storage',
suite: 'Unit Conversion Tests',
status: 'pass',
duration,
timestamp: new Date().toISOString(),
details: {
checkedFields: imperialFields,
imperialFieldsFound: 0,
allMetric: true
}
};
} catch (error) {
return {
id: 'unit-003',
name: 'No Imperial Storage',
suite: 'Unit Conversion Tests',
status: 'fail',
duration: Date.now() - startTime,
error: error instanceof Error ? error.message : String(error),
timestamp: new Date().toISOString()
};
}
}
}
]
};

View File

@@ -0,0 +1,490 @@
/**
* Versioning & Rollback Test Suite
*
* Tests the complete versioning system end-to-end including automatic
* version creation, attribution, and rollback functionality.
*/
import { supabase } from '@/lib/supabaseClient';
import type { TestSuite, TestResult } from '../testRunner';
import { TestDataTracker } from '../TestDataTracker';
export const versioningTestSuite: TestSuite = {
id: 'versioning',
name: 'Versioning & Rollback',
description: 'Tests version creation, attribution, rollback, and cleanup',
tests: [
{
id: 'version-001',
name: 'Automatic Version Creation on Insert',
description: 'Verifies version 1 is created automatically when entity is created',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
const tracker = new TestDataTracker();
let parkId: string | null = null;
try {
// Create a park
const slug = `test-park-${Date.now()}`;
const { data: park, error: createError } = await supabase
.from('parks')
.insert({
name: 'Version Test Park',
slug,
park_type: 'theme_park',
status: 'operating'
})
.select('id')
.single();
if (createError) throw new Error(`Park creation failed: ${createError.message}`);
if (!park) throw new Error('No park returned from insert');
parkId = park.id;
// Poll for version creation
let v1: any = null;
const pollStart = Date.now();
while (!v1 && Date.now() - pollStart < 5000) {
const { data } = await supabase
.from('park_versions')
.select('version_id')
.eq('park_id', park.id)
.eq('version_number', 1)
.single();
if (data) {
v1 = data;
break;
}
await new Promise(resolve => setTimeout(resolve, 100));
}
// Check version was created
const { data: version, error: versionError } = await supabase
.from('park_versions')
.select('*')
.eq('park_id', park.id)
.eq('version_number', 1)
.single();
if (versionError) throw new Error(`Version query failed: ${versionError.message}`);
if (!version) throw new Error('Version 1 not created');
if (version.name !== 'Version Test Park') throw new Error('Version has incorrect name');
if (version.change_type !== 'created') throw new Error(`Expected change_type "created", got "${version.change_type}"`);
if (!version.is_current) throw new Error('Version is not marked as current');
const duration = Date.now() - startTime;
return {
id: 'version-001',
name: 'Automatic Version Creation on Insert',
suite: 'Versioning & Rollback',
status: 'pass',
duration,
timestamp: new Date().toISOString(),
details: {
parkId: park.id,
versionNumber: version.version_number,
changeType: version.change_type,
isCurrent: version.is_current
}
};
} catch (error) {
const duration = Date.now() - startTime;
return {
id: 'version-001',
name: 'Automatic Version Creation on Insert',
suite: 'Versioning & Rollback',
status: 'fail',
duration,
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
timestamp: new Date().toISOString()
};
} finally {
// Cleanup
if (parkId) {
await supabase.from('parks').delete().eq('id', parkId);
}
}
}
},
{
id: 'version-002',
name: 'Automatic Version Creation on Update',
description: 'Verifies version 2 is created when entity is updated',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
const tracker = new TestDataTracker();
let parkId: string | null = null;
try {
// Create a park
const slug = `test-park-${Date.now()}`;
const { data: park, error: createError } = await supabase
.from('parks')
.insert({
name: 'Original Name',
slug,
park_type: 'theme_park',
status: 'operating'
})
.select('id')
.single();
if (createError) throw new Error(`Park creation failed: ${createError.message}`);
if (!park) throw new Error('No park returned');
parkId = park.id;
// Wait for version 1
await new Promise(resolve => setTimeout(resolve, 100));
// Update the park
const { error: updateError } = await supabase
.from('parks')
.update({ name: 'Updated Name' })
.eq('id', park.id);
if (updateError) throw new Error(`Park update failed: ${updateError.message}`);
// Wait for version 2
await new Promise(resolve => setTimeout(resolve, 100));
// Check version 2 exists
const { data: v2, error: v2Error } = await supabase
.from('park_versions')
.select('*')
.eq('park_id', park.id)
.eq('version_number', 2)
.single();
if (v2Error) throw new Error(`Version 2 query failed: ${v2Error.message}`);
if (!v2) throw new Error('Version 2 not created');
if (v2.name !== 'Updated Name') throw new Error('Version 2 has incorrect name');
if (v2.change_type !== 'updated') throw new Error(`Expected change_type "updated", got "${v2.change_type}"`);
if (!v2.is_current) throw new Error('Version 2 is not marked as current');
// Check version 1 is no longer current
const { data: v1, error: v1Error } = await supabase
.from('park_versions')
.select('is_current')
.eq('park_id', park.id)
.eq('version_number', 1)
.single();
if (v1Error) throw new Error(`Version 1 query failed: ${v1Error.message}`);
if (v1?.is_current) throw new Error('Version 1 is still marked as current');
const duration = Date.now() - startTime;
return {
id: 'version-002',
name: 'Automatic Version Creation on Update',
suite: 'Versioning & Rollback',
status: 'pass',
duration,
timestamp: new Date().toISOString(),
details: {
parkId: park.id,
v1IsCurrent: v1?.is_current,
v2IsCurrent: v2.is_current,
v2ChangeType: v2.change_type
}
};
} catch (error) {
const duration = Date.now() - startTime;
return {
id: 'version-002',
name: 'Automatic Version Creation on Update',
suite: 'Versioning & Rollback',
status: 'fail',
duration,
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
timestamp: new Date().toISOString()
};
} finally {
await tracker.cleanup();
const remaining = await tracker.verifyCleanup();
if (remaining.length > 0) {
console.warn('version-001 cleanup incomplete:', remaining);
}
}
}
},
{
id: 'version-003',
name: 'Rollback Authorization Check',
description: 'Tests that rollback_to_version requires moderator role',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
const tracker = new TestDataTracker();
let parkId: string | null = null;
try {
// Create a park
const slug = `test-park-${Date.now()}`;
const { data: park, error: createError } = await supabase
.from('parks')
.insert({
name: 'Rollback Test Park',
slug,
park_type: 'theme_park',
status: 'operating'
})
.select('id')
.single();
if (createError) throw new Error(`Park creation failed: ${createError.message}`);
if (!park) throw new Error('No park returned');
parkId = park.id;
// Poll for version creation
let v1: any = null;
const pollStart = Date.now();
while (!v1 && Date.now() - pollStart < 5000) {
const { data } = await supabase
.from('park_versions')
.select('version_id')
.eq('park_id', park.id)
.eq('version_number', 1)
.single();
if (data) {
v1 = data;
break;
}
await new Promise(resolve => setTimeout(resolve, 100));
}
if (!v1) throw new Error('Version 1 not created after 5s timeout');
// Check current user is moderator
const { data: { user } } = await supabase.auth.getUser();
if (!user) throw new Error('No authenticated user');
const { data: isMod } = await supabase.rpc('is_moderator', { _user_id: user.id });
// Try rollback
const { error: rollbackError } = await supabase.rpc('rollback_to_version', {
p_entity_type: 'park',
p_entity_id: park.id,
p_target_version_id: v1.version_id,
p_changed_by: user.id,
p_reason: 'Authorization test'
});
// If user is moderator, rollback should succeed
// If not, rollback should fail with permission error
if (isMod && rollbackError) {
throw new Error(`Rollback failed for moderator: ${rollbackError.message}`);
}
if (!isMod && !rollbackError) {
throw new Error('Rollback succeeded for non-moderator (should have failed)');
}
const duration = Date.now() - startTime;
return {
id: 'version-003',
name: 'Rollback Authorization Check',
suite: 'Versioning & Rollback',
status: 'pass',
duration,
timestamp: new Date().toISOString(),
details: {
userIsModerator: isMod,
rollbackBlocked: !isMod && !!rollbackError,
authorizationEnforced: true
}
};
} catch (error) {
const duration = Date.now() - startTime;
return {
id: 'version-003',
name: 'Rollback Authorization Check',
suite: 'Versioning & Rollback',
status: 'fail',
duration,
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
timestamp: new Date().toISOString()
};
} finally {
await tracker.cleanup();
const remaining = await tracker.verifyCleanup();
if (remaining.length > 0) {
console.warn('version-002 cleanup incomplete:', remaining);
}
}
}
},
{
id: 'version-004',
name: 'Complete Rollback Flow',
description: 'Tests end-to-end rollback with version 3 creation',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
const tracker = new TestDataTracker();
let parkId: string | null = null;
try {
// Check if user is moderator
const { data: { user } } = await supabase.auth.getUser();
if (!user) throw new Error('No authenticated user');
const { data: isMod } = await supabase.rpc('is_moderator', { _user_id: user.id });
if (!isMod) {
// Skip test if not moderator
const duration = Date.now() - startTime;
return {
id: 'version-004',
name: 'Complete Rollback Flow',
suite: 'Versioning & Rollback',
status: 'skip',
duration,
timestamp: new Date().toISOString(),
details: { reason: 'User is not a moderator, test requires moderator role' }
};
}
// Create park
const slug = `test-park-${Date.now()}`;
const { data: park, error: createError } = await supabase
.from('parks')
.insert({
name: 'Original Name',
slug,
park_type: 'theme_park',
status: 'operating',
description: 'Original Description'
})
.select('id')
.single();
if (createError) throw new Error(`Park creation failed: ${createError.message}`);
if (!park) throw new Error('No park returned');
parkId = park.id;
await new Promise(resolve => setTimeout(resolve, 100));
// Get version 1
const { data: v1, error: v1Error } = await supabase
.from('park_versions')
.select('version_id, name, description')
.eq('park_id', park.id)
.eq('version_number', 1)
.single();
if (v1Error || !v1) throw new Error('Version 1 not found');
// Update park
const { error: updateError } = await supabase
.from('parks')
.update({ name: 'Modified Name', description: 'Modified Description' })
.eq('id', park.id);
if (updateError) throw new Error(`Park update failed: ${updateError.message}`);
await new Promise(resolve => setTimeout(resolve, 100));
// Verify version 2
const { data: v2 } = await supabase
.from('park_versions')
.select('version_number, name')
.eq('park_id', park.id)
.eq('version_number', 2)
.single();
if (!v2) throw new Error('Version 2 not created');
if (v2.name !== 'Modified Name') throw new Error('Version 2 has incorrect data');
// Rollback to version 1
const { error: rollbackError } = await supabase.rpc('rollback_to_version', {
p_entity_type: 'park',
p_entity_id: park.id,
p_target_version_id: v1.version_id,
p_changed_by: user.id,
p_reason: 'Integration test rollback'
});
if (rollbackError) throw new Error(`Rollback failed: ${rollbackError.message}`);
await new Promise(resolve => setTimeout(resolve, 200));
// Verify park data restored
const { data: restored, error: restoredError } = await supabase
.from('parks')
.select('name, description')
.eq('id', park.id)
.single();
if (restoredError) throw new Error(`Failed to fetch restored park: ${restoredError.message}`);
if (!restored) throw new Error('Restored park not found');
if (restored.name !== 'Original Name') {
throw new Error(`Rollback failed: expected "Original Name", got "${restored.name}"`);
}
if (restored.description !== 'Original Description') {
throw new Error(`Description not restored: expected "Original Description", got "${restored.description}"`);
}
// Verify version 3 created with change_type = 'restored'
const { data: v3, error: v3Error } = await supabase
.from('park_versions')
.select('*')
.eq('park_id', park.id)
.eq('version_number', 3)
.single();
if (v3Error || !v3) throw new Error('Version 3 (restored) not created');
if (v3.change_type !== 'restored') {
throw new Error(`Expected change_type "restored", got "${v3.change_type}"`);
}
if (v3.name !== 'Original Name') throw new Error('Version 3 has incorrect data');
if (!v3.is_current) throw new Error('Version 3 is not marked as current');
const duration = Date.now() - startTime;
return {
id: 'version-004',
name: 'Complete Rollback Flow',
suite: 'Versioning & Rollback',
status: 'pass',
duration,
timestamp: new Date().toISOString(),
details: {
parkId: park.id,
versionsCreated: 3,
dataRestored: true,
v3ChangeType: v3.change_type,
v3IsCurrent: v3.is_current
}
};
} catch (error) {
const duration = Date.now() - startTime;
return {
id: 'version-004',
name: 'Complete Rollback Flow',
suite: 'Versioning & Rollback',
status: 'fail',
duration,
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
timestamp: new Date().toISOString()
};
} finally {
await tracker.cleanup();
const remaining = await tracker.verifyCleanup();
if (remaining.length > 0) {
console.warn('version-003 cleanup incomplete:', remaining);
}
}
}
}
]
};

View File

@@ -0,0 +1,206 @@
/**
* Integration Test Runner
*
* Core infrastructure for running comprehensive integration tests.
* Tests run against real database functions, edge functions, and API endpoints.
*/
import { moderationTestSuite } from './suites/moderationTests';
import { moderationLockTestSuite } from './suites/moderationLockTests';
import { moderationDependencyTestSuite } from './suites/moderationDependencyTests';
/**
* Registry of all available test suites
*/
export const ALL_TEST_SUITES = [
moderationTestSuite,
moderationLockTestSuite,
moderationDependencyTestSuite
];
export interface TestResult {
id: string;
name: string;
suite: string;
status: 'pass' | 'fail' | 'skip' | 'running';
duration: number; // milliseconds
error?: string;
details?: any;
timestamp: string;
stack?: string;
}
export interface Test {
id: string;
name: string;
description: string;
run: () => Promise<TestResult>;
}
export interface TestSuite {
id: string;
name: string;
description: string;
tests: Test[];
}
export class IntegrationTestRunner {
private results: TestResult[] = [];
private isRunning = false;
private shouldStop = false;
private onProgress?: (result: TestResult) => void;
constructor(onProgress?: (result: TestResult) => void) {
this.onProgress = onProgress;
}
/**
* Run a single test with error handling
*/
async runTest(test: Test, suiteName: string): Promise<TestResult> {
if (this.shouldStop) {
return {
id: test.id,
name: test.name,
suite: suiteName,
status: 'skip',
duration: 0,
timestamp: new Date().toISOString(),
details: { reason: 'Test run stopped by user' }
};
}
// Mark as running
const runningResult: TestResult = {
id: test.id,
name: test.name,
suite: suiteName,
status: 'running',
duration: 0,
timestamp: new Date().toISOString(),
};
if (this.onProgress) {
this.onProgress(runningResult);
}
try {
const result = await test.run();
this.results.push(result);
if (this.onProgress) {
this.onProgress(result);
}
return result;
} catch (error) {
const failResult: TestResult = {
id: test.id,
name: test.name,
suite: suiteName,
status: 'fail',
duration: 0,
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
timestamp: new Date().toISOString(),
};
this.results.push(failResult);
if (this.onProgress) {
this.onProgress(failResult);
}
return failResult;
}
}
/**
* Run all tests in a suite
*/
async runSuite(suite: TestSuite): Promise<TestResult[]> {
const suiteResults: TestResult[] = [];
for (const test of suite.tests) {
const result = await this.runTest(test, suite.name);
suiteResults.push(result);
if (this.shouldStop) {
break;
}
}
return suiteResults;
}
/**
* Run all suites sequentially
*/
async runAllSuites(suites: TestSuite[]): Promise<TestResult[]> {
this.results = [];
this.isRunning = true;
this.shouldStop = false;
for (const suite of suites) {
await this.runSuite(suite);
if (this.shouldStop) {
break;
}
}
this.isRunning = false;
return this.results;
}
/**
* Stop the test run
*/
stop(): void {
this.shouldStop = true;
}
/**
* Get all results
*/
getResults(): TestResult[] {
return this.results;
}
/**
* Get summary statistics
*/
getSummary(): {
total: number;
passed: number;
failed: number;
skipped: number;
running: number;
totalDuration: number;
} {
const total = this.results.length;
const passed = this.results.filter(r => r.status === 'pass').length;
const failed = this.results.filter(r => r.status === 'fail').length;
const skipped = this.results.filter(r => r.status === 'skip').length;
const running = this.results.filter(r => r.status === 'running').length;
const totalDuration = this.results.reduce((sum, r) => sum + r.duration, 0);
return { total, passed, failed, skipped, running, totalDuration };
}
/**
* Check if runner is currently running
*/
getIsRunning(): boolean {
return this.isRunning;
}
/**
* Reset runner state
*/
reset(): void {
this.results = [];
this.isRunning = false;
this.shouldStop = false;
}
}

176
src-old/lib/localStorage.ts Normal file
View File

@@ -0,0 +1,176 @@
/**
* Safe localStorage wrapper with proper error handling and validation
*
* Handles:
* - Private browsing mode (localStorage unavailable)
* - Storage quota exceeded
* - JSON parse errors
* - Cross-origin restrictions
*/
import { logger } from './logger';
export class LocalStorageError extends Error {
constructor(message: string, public readonly cause?: Error) {
super(message);
this.name = 'LocalStorageError';
}
}
/**
* Check if localStorage is available
*/
export function isLocalStorageAvailable(): boolean {
try {
const testKey = '__localStorage_test__';
localStorage.setItem(testKey, 'test');
localStorage.removeItem(testKey);
return true;
} catch {
return false;
}
}
/**
* Safely get an item from localStorage
*/
export function getItem(key: string): string | null {
try {
if (!isLocalStorageAvailable()) {
return null;
}
return localStorage.getItem(key);
} catch (error) {
logger.warn(`Failed to get localStorage item: ${key}`, { error });
return null;
}
}
/**
* Safely set an item in localStorage
*/
export function setItem(key: string, value: string): boolean {
try {
if (!isLocalStorageAvailable()) {
return false;
}
localStorage.setItem(key, value);
return true;
} catch (error) {
if (error instanceof Error && error.name === 'QuotaExceededError') {
logger.warn('localStorage quota exceeded', { key });
} else {
logger.warn(`Failed to set localStorage item: ${key}`, { error });
}
return false;
}
}
/**
* Safely remove an item from localStorage
*/
export function removeItem(key: string): boolean {
try {
if (!isLocalStorageAvailable()) {
return false;
}
localStorage.removeItem(key);
return true;
} catch (error) {
logger.warn(`Failed to remove localStorage item: ${key}`, { error });
return false;
}
}
/**
* Safely clear all localStorage
*/
export function clear(): boolean {
try {
if (!isLocalStorageAvailable()) {
return false;
}
localStorage.clear();
return true;
} catch (error) {
logger.warn('Failed to clear localStorage', { error });
return false;
}
}
/**
* Get and parse a JSON object from localStorage
*/
export function getJSON<T>(key: string, defaultValue: T): T {
try {
const item = getItem(key);
if (!item) {
return defaultValue;
}
const parsed = JSON.parse(item);
return parsed as T;
} catch (error) {
logger.warn(`Failed to parse localStorage JSON for key: ${key}`, { error });
// Remove corrupted data
removeItem(key);
return defaultValue;
}
}
/**
* Stringify and set a JSON object in localStorage
*/
export function setJSON<T>(key: string, value: T): boolean {
try {
const serialized = JSON.stringify(value);
return setItem(key, serialized);
} catch (error) {
logger.warn(`Failed to stringify localStorage JSON for key: ${key}`, { error });
return false;
}
}
/**
* Check if a key exists in localStorage
*/
export function hasItem(key: string): boolean {
return getItem(key) !== null;
}
/**
* Get multiple items at once
*/
export function getItems(keys: string[]): Record<string, string | null> {
const result: Record<string, string | null> = {};
for (const key of keys) {
result[key] = getItem(key);
}
return result;
}
/**
* Set multiple items at once
*/
export function setItems(items: Record<string, string>): boolean {
let allSuccessful = true;
for (const [key, value] of Object.entries(items)) {
if (!setItem(key, value)) {
allSuccessful = false;
}
}
return allSuccessful;
}
/**
* Remove multiple items at once
*/
export function removeItems(keys: string[]): boolean {
let allSuccessful = true;
for (const key of keys) {
if (!removeItem(key)) {
allSuccessful = false;
}
}
return allSuccessful;
}

View File

@@ -0,0 +1,64 @@
/**
* Location Formatting Utilities
*
* Centralized utilities for formatting location data consistently across the app.
*/
export interface LocationData {
street_address?: string | null;
city?: string | null;
state_province?: string | null;
country?: string | null;
postal_code?: string | null;
}
/**
* Format location for display
* @param location - Location data object
* @param includeStreet - Whether to include street address in the output
* @returns Formatted location string or null if no location data
*/
export function formatLocationDisplay(
location: LocationData | null | undefined,
includeStreet: boolean = false
): string | null {
if (!location) return null;
const parts: string[] = [];
if (includeStreet && location.street_address) {
parts.push(location.street_address);
}
if (location.city) {
parts.push(location.city);
}
if (location.state_province) {
parts.push(location.state_province);
}
if (location.country) {
parts.push(location.country);
}
return parts.length > 0 ? parts.join(', ') : null;
}
/**
* Format full address including street
* @param location - Location data object
* @returns Formatted full address or null if no location data
*/
export function formatFullAddress(location: LocationData | null | undefined): string | null {
return formatLocationDisplay(location, true);
}
/**
* Format location without street address (city, state, country only)
* @param location - Location data object
* @returns Formatted location without street or null if no location data
*/
export function formatLocationShort(location: LocationData | null | undefined): string | null {
return formatLocationDisplay(location, false);
}

View File

@@ -0,0 +1,78 @@
import { z } from 'zod';
import { personalLocationSchema, preferredPronounsSchema } from '@/lib/validation';
import type { AccessibilityOptions } from '@/types/location';
/**
* Schema for accessibility options
*/
export const accessibilityOptionsSchema = z.object({
font_size: z.enum(['small', 'medium', 'large'] as const),
high_contrast: z.boolean(),
reduced_motion: z.boolean()
});
/**
* Schema for location form data
*/
export const locationFormSchema = z.object({
personal_location: personalLocationSchema,
home_park_id: z.string().uuid('Invalid park ID').optional().nullable(),
timezone: z.string().min(1, 'Timezone is required'),
preferred_language: z.string().min(2, 'Language code must be at least 2 characters').max(10),
preferred_pronouns: preferredPronounsSchema
});
/**
* Default accessibility options for new users
*/
export const DEFAULT_ACCESSIBILITY_OPTIONS: AccessibilityOptions = {
font_size: 'medium',
high_contrast: false,
reduced_motion: false
};
/**
* Common timezones for selection
*/
export const COMMON_TIMEZONES = [
'UTC',
'America/New_York',
'America/Chicago',
'America/Denver',
'America/Los_Angeles',
'America/Anchorage',
'America/Toronto',
'America/Vancouver',
'America/Mexico_City',
'America/Sao_Paulo',
'Europe/London',
'Europe/Paris',
'Europe/Berlin',
'Europe/Madrid',
'Europe/Rome',
'Europe/Stockholm',
'Europe/Moscow',
'Asia/Dubai',
'Asia/Kolkata',
'Asia/Bangkok',
'Asia/Singapore',
'Asia/Shanghai',
'Asia/Tokyo',
'Asia/Seoul',
'Australia/Sydney',
'Australia/Melbourne',
'Pacific/Auckland'
] as const;
/**
* Validate park option data
*/
export const parkOptionSchema = z.object({
id: z.string().uuid(),
name: z.string(),
location: z.object({
city: z.string().optional(),
state_province: z.string().optional(),
country: z.string()
}).optional()
});

49
src-old/lib/logger.ts Normal file
View File

@@ -0,0 +1,49 @@
/**
* Logger Utility
*
* Provides conditional logging based on environment.
* Prevents console noise in production builds.
*/
const isDev = import.meta.env.DEV;
type LogContext = Record<string, unknown>;
export const logger = {
log: (...args: unknown[]): void => {
if (isDev) console.log(...args);
},
error: (message: string, context?: LogContext): void => {
console.error(message, context); // Always log errors
},
warn: (...args: unknown[]): void => {
if (isDev) console.warn(...args);
},
info: (...args: unknown[]): void => {
if (isDev) console.info(...args);
},
debug: (...args: unknown[]): void => {
if (isDev) console.debug(...args);
},
// Performance monitoring
performance: (component: string, duration: number): void => {
if (isDev) {
console.log(`${component} rendered in ${duration}ms`, {
component,
duration,
category: 'performance',
});
}
},
// Moderation action tracking
moderationAction: (action: string, itemId: string, duration: number): void => {
if (isDev) {
console.log(`🎯 Moderation action: ${action}`, {
action,
itemId,
duration,
category: 'moderation',
});
}
}
};

View File

@@ -0,0 +1,582 @@
/**
* Moderation Actions
*
* Business logic for performing moderation actions on submissions.
* Handles approval, rejection, and deletion workflows with proper
* error handling and database updates.
*/
import { SupabaseClient } from '@supabase/supabase-js';
import { createTableQuery } from '@/lib/supabaseHelpers';
import type { ModerationItem } from '@/types/moderation';
import { handleError, handleNonCriticalError, getErrorMessage } from '@/lib/errorHandler';
import { invokeWithTracking, invokeBatchWithTracking } from '@/lib/edgeFunctionTracking';
/**
* Type-safe update data for review moderation
* Note: These types document the expected structure. Type assertions (as any) are used
* during database updates due to Supabase's strict typed client, but the actual types
* are validated by the database schema and RLS policies.
*/
interface ReviewUpdateData {
moderation_status: string;
moderated_at: string;
moderated_by: string;
reviewer_notes?: string;
locked_until?: null;
locked_by?: null;
}
/**
* Type-safe update data for submission moderation
* Note: These types document the expected structure. Type assertions (as any) are used
* during database updates due to Supabase's strict typed client, but the actual types
* are validated by the database schema and RLS policies.
*/
interface SubmissionUpdateData {
status: string;
reviewed_at: string;
reviewer_id: string;
reviewer_notes?: string;
locked_until?: null;
locked_by?: null;
}
/**
* Discriminated union for moderation updates (documentation purposes)
*/
type ModerationUpdateData = ReviewUpdateData | SubmissionUpdateData;
/**
* Result of a moderation action
*/
export interface ModerationActionResult {
success: boolean;
message: string;
error?: Error;
shouldRemoveFromQueue: boolean;
}
/**
* Configuration for photo approval
*/
interface PhotoApprovalConfig {
submissionId: string;
moderatorId: string;
moderatorNotes?: string;
}
/**
* Approve a photo submission
*
* Creates photo records in the database and updates submission status.
* Handles both new approvals and re-approvals (where photos already exist).
*
* @param supabase - Supabase client
* @param config - Photo approval configuration
* @returns Action result with success status and message
*/
export async function approvePhotoSubmission(
supabase: SupabaseClient,
config: PhotoApprovalConfig
): Promise<ModerationActionResult> {
try {
// Fetch photo submission from relational tables
const { data: photoSubmission, error: fetchError } = await supabase
.from('photo_submissions')
.select(`
*,
items:photo_submission_items(*),
submission:content_submissions!inner(user_id, status)
`)
.eq('submission_id', config.submissionId)
.single();
if (fetchError || !photoSubmission) {
throw new Error('Failed to fetch photo submission data');
}
if (!photoSubmission.items || photoSubmission.items.length === 0) {
throw new Error('No photos found in submission');
}
// Check if photos already exist for this submission (re-approval case)
const { data: existingPhotos } = await supabase
.from('photos')
.select('id')
.eq('submission_id', config.submissionId);
if (!existingPhotos || existingPhotos.length === 0) {
// Create new photo records from photo_submission_items
const photoRecords = photoSubmission.items.map((item: any) => ({
entity_id: photoSubmission.entity_id,
entity_type: photoSubmission.entity_type,
cloudflare_image_id: item.cloudflare_image_id,
cloudflare_image_url: item.cloudflare_image_url,
title: item.title || null,
caption: item.caption || null,
date_taken: item.date_taken || null,
order_index: item.order_index,
submission_id: photoSubmission.submission_id,
submitted_by: photoSubmission.submission?.user_id,
approved_by: config.moderatorId,
approved_at: new Date().toISOString(),
}));
const { error: insertError } = await supabase
.from('photos')
.insert(photoRecords);
if (insertError) {
throw insertError;
}
}
// Update submission status
const { error: updateError } = await supabase
.from('content_submissions')
.update({
status: 'approved' as const,
reviewer_id: config.moderatorId,
reviewed_at: new Date().toISOString(),
reviewer_notes: config.moderatorNotes,
})
.eq('id', config.submissionId);
if (updateError) {
throw updateError;
}
return {
success: true,
message: `Successfully approved and published ${photoSubmission.items.length} photo(s)`,
shouldRemoveFromQueue: true,
};
} catch (error: unknown) {
handleError(error, {
action: 'Approve Photo Submission',
userId: config.moderatorId,
metadata: { submissionId: config.submissionId }
});
return {
success: false,
message: 'Failed to approve photo submission',
error: error instanceof Error ? error : new Error(getErrorMessage(error)),
shouldRemoveFromQueue: false,
};
}
}
/**
* Approve a submission with submission_items
*
* Uses the edge function to process all pending submission items.
*
* @param supabase - Supabase client
* @param submissionId - Submission ID
* @param itemIds - Array of item IDs to approve
* @returns Action result
*/
/**
* Approve submission items using atomic transaction RPC.
*
* This function uses PostgreSQL's ACID transaction guarantees to ensure
* all-or-nothing approval with automatic rollback on any error.
*
* The approval process is handled entirely within a single database transaction
* via the process_approval_transaction() RPC function, which guarantees:
* - True atomic transactions (all-or-nothing)
* - Automatic rollback on ANY error
* - Network-resilient (edge function crash = auto rollback)
* - Zero orphaned entities
*/
export async function approveSubmissionItems(
supabase: SupabaseClient,
submissionId: string,
itemIds: string[]
): Promise<ModerationActionResult> {
try {
console.log(`[Approval] Processing ${itemIds.length} items via atomic transaction`, {
submissionId,
itemCount: itemIds.length
});
const { data: approvalData, error: approvalError, requestId } = await invokeWithTracking(
'process-selective-approval',
{
itemIds,
submissionId,
}
);
if (approvalError) {
const error = new Error(`Failed to process submission items: ${approvalError.message}`);
handleError(error, {
action: 'Approve Submission Items',
metadata: { submissionId, itemCount: itemIds.length, requestId }
});
throw error;
}
return {
success: true,
message: `Successfully processed ${itemIds.length} item(s)`,
shouldRemoveFromQueue: true,
};
} catch (error: unknown) {
handleError(error, {
action: 'Approve Submission Items',
metadata: { submissionId, itemCount: itemIds.length }
});
return {
success: false,
message: 'Failed to approve submission items',
error: error instanceof Error ? error : new Error(getErrorMessage(error)),
shouldRemoveFromQueue: false,
};
}
}
/**
* Reject a submission with submission_items
*
* Cascades rejection to all pending items.
*
* @param supabase - Supabase client
* @param submissionId - Submission ID
* @param rejectionReason - Reason for rejection
* @returns Action result
*/
export async function rejectSubmissionItems(
supabase: SupabaseClient,
submissionId: string,
rejectionReason?: string
): Promise<ModerationActionResult> {
try {
const { error: rejectError } = await supabase
.from('submission_items')
.update({
status: 'rejected' as const,
rejection_reason: rejectionReason || 'Parent submission rejected',
updated_at: new Date().toISOString(),
})
.eq('submission_id', submissionId)
.eq('status', 'pending');
if (rejectError) {
handleError(rejectError, {
action: 'Reject Submission Items (Cascade)',
metadata: { submissionId }
});
}
return {
success: true,
message: 'Submission items rejected',
shouldRemoveFromQueue: false, // Parent rejection will handle removal
};
} catch (error: unknown) {
handleError(error, {
action: 'Reject Submission Items',
metadata: { submissionId }
});
return {
success: false,
message: 'Failed to reject submission items',
error: error instanceof Error ? error : new Error(getErrorMessage(error)),
shouldRemoveFromQueue: false,
};
}
}
/**
* Configuration for standard moderation actions
*/
export interface ModerationConfig {
item: ModerationItem;
action: 'approved' | 'rejected';
moderatorId: string;
moderatorNotes?: string;
}
/**
* Perform a standard moderation action (approve/reject)
*
* Updates the submission or review status in the database.
* Handles both content_submissions and reviews.
*
* @param supabase - Supabase client
* @param config - Moderation configuration
* @returns Action result
*/
export async function performModerationAction(
supabase: SupabaseClient,
config: ModerationConfig
): Promise<ModerationActionResult> {
const { item, action, moderatorId, moderatorNotes } = config;
try {
// Handle photo submissions specially
if (
action === 'approved' &&
item.type === 'content_submission' &&
item.submission_type === 'photo'
) {
return await approvePhotoSubmission(supabase, {
submissionId: item.id,
moderatorId,
moderatorNotes,
});
}
// Check if this submission has submission_items
if (item.type === 'content_submission') {
const { data: submissionItems, error: itemsError } = await supabase
.from('submission_items')
.select('id, status')
.eq('submission_id', item.id)
.in('status', ['pending', 'rejected']);
if (!itemsError && submissionItems && submissionItems.length > 0) {
if (action === 'approved') {
return await approveSubmissionItems(
supabase,
item.id,
submissionItems.map(i => i.id)
);
} else if (action === 'rejected') {
await rejectSubmissionItems(supabase, item.id, moderatorNotes);
}
}
}
// Standard moderation flow - Build update object with type-appropriate fields
let error: any = null;
let data: any = null;
// Use type-safe table queries based on item type
if (item.type === 'review') {
const reviewUpdate: {
moderation_status: 'approved' | 'rejected' | 'pending';
moderated_at: string;
moderated_by: string;
reviewer_notes?: string;
} = {
moderation_status: action,
moderated_at: new Date().toISOString(),
moderated_by: moderatorId,
...(moderatorNotes && { reviewer_notes: moderatorNotes }),
};
const result = await createTableQuery('reviews')
.update(reviewUpdate)
.eq('id', item.id)
.select();
error = result.error;
data = result.data;
} else {
const submissionUpdate: {
status: 'approved' | 'rejected' | 'pending';
reviewed_at: string;
reviewer_id: string;
reviewer_notes?: string;
} = {
status: action,
reviewed_at: new Date().toISOString(),
reviewer_id: moderatorId,
...(moderatorNotes && { reviewer_notes: moderatorNotes }),
};
const result = await createTableQuery('content_submissions')
.update(submissionUpdate)
.eq('id', item.id)
.select();
error = result.error;
data = result.data;
}
if (error) {
throw error;
}
// Check if the update actually affected any rows
if (!data || data.length === 0) {
throw new Error(
'Failed to update item - no rows affected. You might not have permission to moderate this content.'
);
}
return {
success: true,
message: `Content ${action}`,
shouldRemoveFromQueue: action === 'approved' || action === 'rejected',
};
} catch (error: unknown) {
handleError(error, {
action: `${config.action === 'approved' ? 'Approve' : 'Reject'} Content`,
userId: config.moderatorId,
metadata: { itemType: item.type, itemId: item.id }
});
return {
success: false,
message: `Failed to ${config.action} content`,
error: error instanceof Error ? error : new Error(getErrorMessage(error)),
shouldRemoveFromQueue: false,
};
}
}
/**
* Configuration for submission deletion
*/
export interface DeleteSubmissionConfig {
item: ModerationItem;
deletePhotos?: boolean;
}
/**
* Delete a submission and its associated photos
*
* Extracts photo IDs, deletes them from Cloudflare, then deletes the submission.
*
* @param supabase - Supabase client
* @param config - Deletion configuration
* @returns Action result
*/
export async function deleteSubmission(
supabase: SupabaseClient,
config: DeleteSubmissionConfig
): Promise<ModerationActionResult> {
const { item, deletePhotos = true } = config;
if (item.type !== 'content_submission') {
return {
success: false,
message: 'Can only delete content submissions',
shouldRemoveFromQueue: false,
};
}
try {
let deletedPhotoCount = 0;
let skippedPhotoCount = 0;
// Extract and delete photos if requested
if (deletePhotos) {
const photosArray = item.content?.content?.photos || item.content?.photos;
if (photosArray && Array.isArray(photosArray)) {
const validImageIds: string[] = [];
for (const photo of photosArray) {
let imageId = '';
if (photo.imageId) {
imageId = photo.imageId;
} else if (photo.url && !photo.url.startsWith('blob:')) {
// Try to extract from URL
const uuidRegex =
/^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$/i;
if (uuidRegex.test(photo.url)) {
imageId = photo.url;
} else {
const cloudflareMatch = photo.url.match(
/imagedelivery\.net\/[^\/]+\/([a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12})/i
);
if (cloudflareMatch) {
imageId = cloudflareMatch[1];
}
}
}
if (imageId) {
validImageIds.push(imageId);
} else {
skippedPhotoCount++;
}
}
// Delete photos from Cloudflare
if (validImageIds.length > 0) {
const deleteResults = await invokeBatchWithTracking(
validImageIds.map(imageId => ({
functionName: 'upload-image',
payload: { action: 'delete', imageId },
})),
undefined
);
// Count successful deletions
const successfulDeletions = deleteResults.filter(r => !r.error);
deletedPhotoCount = successfulDeletions.length;
// Log any failures silently (background operation)
const failedDeletions = deleteResults.filter(r => r.error);
if (failedDeletions.length > 0) {
handleNonCriticalError(
new Error(`Failed to delete ${failedDeletions.length} of ${validImageIds.length} photos`),
{
action: 'Delete Submission Photos',
metadata: {
failureCount: failedDeletions.length,
totalAttempted: validImageIds.length,
failedRequestIds: failedDeletions.map(r => r.requestId)
}
}
);
}
}
}
}
// Delete the submission from the database
const { error } = await supabase
.from('content_submissions')
.delete()
.eq('id', item.id);
if (error) {
throw error;
}
// Verify deletion
const { data: checkData, error: checkError } = await supabase
.from('content_submissions')
.select('id')
.eq('id', item.id)
.single();
if (checkData && !checkError) {
throw new Error('Deletion failed - item still exists in database');
}
// Build result message
let message = 'The submission has been permanently deleted';
if (deletedPhotoCount > 0 && skippedPhotoCount > 0) {
message = `The submission and ${deletedPhotoCount} photo(s) have been deleted. ${skippedPhotoCount} photo(s) could not be deleted from storage`;
} else if (deletedPhotoCount > 0) {
message = `The submission and ${deletedPhotoCount} associated photo(s) have been permanently deleted`;
} else if (skippedPhotoCount > 0) {
message = `The submission has been deleted. ${skippedPhotoCount} photo(s) could not be deleted from storage`;
}
return {
success: true,
message,
shouldRemoveFromQueue: true,
};
} catch (error: unknown) {
handleError(error, {
action: 'Delete Submission',
metadata: { submissionId: item.id, deletePhotos }
});
return {
success: false,
message: 'Failed to delete submission',
error: error instanceof Error ? error : new Error('Unknown error'),
shouldRemoveFromQueue: false,
};
}
}

View File

@@ -0,0 +1,121 @@
/**
* Moderation Queue Constants
*
* Centralized configuration values for the moderation system.
*/
export const MODERATION_CONSTANTS = {
// TanStack Query configuration
QUERY_STALE_TIME: 30000, // 30 seconds
QUERY_GC_TIME: 5 * 60 * 1000, // 5 minutes
QUERY_RETRY_COUNT: 2,
// Realtime configuration
REALTIME_DEBOUNCE_MS: 500, // 500ms
REALTIME_OPTIMISTIC_REMOVAL_TIMEOUT: 5000, // 5 seconds
// Lock configuration
LOCK_DURATION_MS: 15 * 60 * 1000, // 15 minutes
LOCK_EXTENSION_MS: 10 * 60 * 1000, // 10 minutes
// Cache configuration
MAX_ENTITY_CACHE_SIZE: 500,
MAX_PROFILE_CACHE_SIZE: 500,
// Pagination
DEFAULT_PAGE_SIZE: 25,
MAX_PAGE_SIZE: 100,
// Filter debounce
FILTER_DEBOUNCE_MS: 300,
// Role Labels
ROLE_LABELS: {
admin: 'Administrator',
moderator: 'Moderator',
user: 'User',
superuser: 'Superuser',
} as const,
// Status Labels
STATUS_LABELS: {
pending: 'Pending Review',
approved: 'Approved',
rejected: 'Rejected',
partially_approved: 'Partially Approved',
escalated: 'Escalated',
in_review: 'In Review',
} as const,
// Submission Type Labels
SUBMISSION_TYPE_LABELS: {
park: 'Park',
ride: 'Ride',
company: 'Company',
ride_model: 'Ride Model',
photo: 'Photo',
} as const,
// Report Type Labels
REPORT_TYPE_LABELS: {
spam: 'Spam',
inappropriate: 'Inappropriate Content',
harassment: 'Harassment',
misinformation: 'Misinformation',
fake_info: 'Fake Information',
offensive: 'Offensive Language',
other: 'Other',
} as const,
// Entity Type Labels
ENTITY_TYPE_LABELS: {
park: 'Park',
ride: 'Ride',
company: 'Company',
ride_model: 'Ride Model',
review: 'Review',
profile: 'Profile',
content_submission: 'Content Submission',
} as const,
// Status Colors (for badges)
STATUS_COLORS: {
pending: 'secondary',
approved: 'default',
rejected: 'destructive',
partially_approved: 'outline',
escalated: 'destructive',
in_review: 'secondary',
} as const,
// Report Status Colors
REPORT_STATUS_COLORS: {
pending: 'secondary',
reviewed: 'default',
dismissed: 'outline',
resolved: 'default',
} as const,
} as const;
export type ModerationConstants = typeof MODERATION_CONSTANTS;
// Helper functions for type-safe label access
export function getRoleLabel(role: keyof typeof MODERATION_CONSTANTS.ROLE_LABELS): string {
return MODERATION_CONSTANTS.ROLE_LABELS[role] || role;
}
export function getStatusLabel(status: keyof typeof MODERATION_CONSTANTS.STATUS_LABELS): string {
return MODERATION_CONSTANTS.STATUS_LABELS[status] || status;
}
export function getSubmissionTypeLabel(type: keyof typeof MODERATION_CONSTANTS.SUBMISSION_TYPE_LABELS): string {
return MODERATION_CONSTANTS.SUBMISSION_TYPE_LABELS[type] || type;
}
export function getReportTypeLabel(type: keyof typeof MODERATION_CONSTANTS.REPORT_TYPE_LABELS): string {
return MODERATION_CONSTANTS.REPORT_TYPE_LABELS[type] || type;
}
export function getEntityTypeLabel(type: keyof typeof MODERATION_CONSTANTS.ENTITY_TYPE_LABELS): string {
return MODERATION_CONSTANTS.ENTITY_TYPE_LABELS[type] || type;
}

View File

@@ -0,0 +1,222 @@
/**
* Entity Resolution Utilities
*
* Functions for resolving entity names and display information
* from cached entity data used in moderation workflows.
*/
/**
* Entity cache structure (matching useEntityCache hook)
*/
interface EntityCache {
rides: Map<string, { id: string; name: string; park_id?: string }>;
parks: Map<string, { id: string; name: string }>;
companies: Map<string, { id: string; name: string }>;
}
/**
* Generic submission content type
*/
interface GenericSubmissionContent {
name?: string;
entity_id?: string;
entity_name?: string;
park_id?: string;
ride_id?: string;
company_id?: string;
manufacturer_id?: string;
designer_id?: string;
operator_id?: string;
property_owner_id?: string;
[key: string]: unknown;
}
/**
* Result of entity name resolution
*/
export interface ResolvedEntityNames {
entityName: string;
parkName?: string;
}
/**
* Resolve entity name and related park name from submission content
*
* This function determines what entity is being modified based on the
* submission type and content, then looks up cached entity data to
* resolve display names.
*
* @param submissionType - Type of submission (e.g., 'ride', 'park', 'manufacturer')
* @param content - Submission content containing entity IDs
* @param entityCache - Cache of entity data
* @returns Resolved entity and park names
*
* @example
* ```tsx
* const { entityName, parkName } = resolveEntityName(
* 'ride',
* { entity_id: 'ride-123' },
* entityCacheRef.current
* );
* // Returns: { entityName: "Steel Vengeance", parkName: "Cedar Point" }
* ```
*/
export function resolveEntityName(
submissionType: string,
content: GenericSubmissionContent | null | undefined,
entityCache: EntityCache
): ResolvedEntityNames {
let entityName = content?.name || 'Unknown';
let parkName: string | undefined;
// Handle ride submissions - look up ride name and park
if (submissionType === 'ride' && content?.entity_id) {
const ride = entityCache.rides.get(content.entity_id);
if (ride) {
entityName = ride.name;
if (ride.park_id) {
const park = entityCache.parks.get(ride.park_id);
if (park) parkName = park.name;
}
}
}
// Handle park submissions
else if (submissionType === 'park' && content?.entity_id) {
const park = entityCache.parks.get(content.entity_id);
if (park) entityName = park.name;
}
// Handle company submissions (manufacturer, operator, designer, property_owner)
else if (
['manufacturer', 'operator', 'designer', 'property_owner'].includes(submissionType) &&
content?.entity_id
) {
const company = entityCache.companies.get(content.entity_id);
if (company) entityName = company.name;
}
// Handle content with ride_id reference
else if (content?.ride_id) {
const ride = entityCache.rides.get(content.ride_id);
if (ride) {
entityName = ride.name;
if (ride.park_id) {
const park = entityCache.parks.get(ride.park_id);
if (park) parkName = park.name;
}
}
}
// Handle content with park_id reference
else if (content?.park_id) {
const park = entityCache.parks.get(content.park_id);
if (park) parkName = park.name;
}
return { entityName, parkName };
}
/**
* Get a display-ready entity identifier string
*
* Combines entity name and park name (if available) into a single
* human-readable string for display in the moderation interface.
*
* @param entityName - Primary entity name
* @param parkName - Optional related park name
* @returns Formatted display string
*
* @example
* ```tsx
* getEntityDisplayName("Steel Vengeance", "Cedar Point")
* // Returns: "Steel Vengeance at Cedar Point"
*
* getEntityDisplayName("Cedar Point")
* // Returns: "Cedar Point"
* ```
*/
export function getEntityDisplayName(
entityName: string,
parkName?: string
): string {
if (parkName) {
return `${entityName} at ${parkName}`;
}
return entityName;
}
/**
* Extract all entity IDs from a list of submissions
*
* Scans through submission content to find all referenced entity IDs,
* grouped by entity type (rides, parks, companies).
*
* @param submissions - Array of submission objects
* @returns Object containing Sets of IDs for each entity type
*/
export function extractEntityIds(submissions: Array<{ content: unknown; submission_type: string }>): {
rideIds: Set<string>;
parkIds: Set<string>;
companyIds: Set<string>;
} {
const rideIds = new Set<string>();
const parkIds = new Set<string>();
const companyIds = new Set<string>();
submissions.forEach(submission => {
const content = submission.content as GenericSubmissionContent | null | undefined;
if (content && typeof content === 'object') {
// Direct entity references
if (content.ride_id) rideIds.add(content.ride_id);
if (content.park_id) parkIds.add(content.park_id);
if (content.company_id) companyIds.add(content.company_id);
// Entity ID based on submission type
if (content.entity_id) {
if (submission.submission_type === 'ride') {
rideIds.add(content.entity_id);
} else if (submission.submission_type === 'park') {
parkIds.add(content.entity_id);
} else if (
['manufacturer', 'operator', 'designer', 'property_owner'].includes(
submission.submission_type
)
) {
companyIds.add(content.entity_id);
}
}
// Company role references
if (content.manufacturer_id) companyIds.add(content.manufacturer_id);
if (content.designer_id) companyIds.add(content.designer_id);
if (content.operator_id) companyIds.add(content.operator_id);
if (content.property_owner_id) companyIds.add(content.property_owner_id);
}
});
return { rideIds, parkIds, companyIds };
}
/**
* Determine submission type display label
*
* Converts internal submission type identifiers to human-readable labels.
*
* @param submissionType - Internal submission type
* @returns Human-readable label
*/
export function getSubmissionTypeLabel(submissionType: string): string {
const labels: Record<string, string> = {
park: 'Park',
ride: 'Ride',
manufacturer: 'Manufacturer',
operator: 'Operator',
designer: 'Designer',
property_owner: 'Property Owner',
ride_model: 'Ride Model',
photo: 'Photo',
photo_delete: 'Photo Deletion',
milestone: 'Timeline Event',
timeline_event: 'Timeline Event',
review: 'Review',
};
return labels[submissionType] || submissionType;
}

View File

@@ -0,0 +1,68 @@
/**
* Moderation Library
*
* Centralized exports for all moderation-related utilities.
* Provides business logic for moderation workflows, queries, and entity resolution.
*/
// Query builders and data fetching
export {
buildSubmissionQuery,
buildCountQuery,
fetchSubmissions,
isLockedByOther,
getQueueStats,
} from './queries';
export type { QueryConfig, FetchSubmissionsResult } from './queries';
// Entity resolution
export {
resolveEntityName,
getEntityDisplayName,
extractEntityIds,
getSubmissionTypeLabel,
} from './entities';
export type { ResolvedEntityNames } from './entities';
// Moderation actions
export {
approvePhotoSubmission,
approveSubmissionItems,
rejectSubmissionItems,
performModerationAction,
deleteSubmission,
} from './actions';
export type {
ModerationActionResult,
ModerationConfig,
DeleteSubmissionConfig,
} from './actions';
// Removed - sorting functionality deleted
// Realtime subscription utilities
export {
matchesEntityFilter,
matchesStatusFilter,
hasItemChanged,
extractChangedFields,
buildModerationItem,
} from './realtime';
// Lock management utilities
export {
canClaimSubmission,
isActiveLock,
getLockStatus,
formatLockExpiry,
getLockUrgency,
} from './lockHelpers';
export type { LockStatus, LockUrgency } from './lockHelpers';
// Constants
export { MODERATION_CONSTANTS } from './constants';
export type { ModerationConstants } from './constants';

View File

@@ -0,0 +1,236 @@
/**
* Lock Auto-Release Mechanism
*
* Automatically releases submission locks when operations fail, timeout,
* or are abandoned by moderators. Prevents deadlocks and improves queue flow.
*
* Part of Sacred Pipeline Phase 4: Transaction Resilience
*/
import { supabase } from '@/lib/supabaseClient';
import { logger } from '@/lib/logger';
import { isTimeoutError } from '@/lib/timeoutDetection';
import { toast } from '@/hooks/use-toast';
export interface LockReleaseOptions {
submissionId: string;
moderatorId: string;
reason: 'timeout' | 'error' | 'abandoned' | 'manual';
error?: unknown;
silent?: boolean; // Don't show toast notification
}
/**
* Release a lock on a submission
*/
export async function releaseLock(options: LockReleaseOptions): Promise<boolean> {
const { submissionId, moderatorId, reason, error, silent = false } = options;
try {
// Call Supabase RPC to release lock
const { error: releaseError } = await supabase.rpc('release_submission_lock', {
submission_id: submissionId,
moderator_id: moderatorId,
});
if (releaseError) {
logger.error('Failed to release lock', {
submissionId,
moderatorId,
reason,
error: releaseError,
});
if (!silent) {
toast({
title: 'Lock Release Failed',
description: 'Failed to release submission lock. It will expire automatically.',
variant: 'destructive',
});
}
return false;
}
logger.info('Lock released', {
submissionId,
moderatorId,
reason,
hasError: !!error,
});
if (!silent) {
const message = getLockReleaseMessage(reason);
toast({
title: 'Lock Released',
description: message,
});
}
return true;
} catch (err) {
logger.error('Exception while releasing lock', {
submissionId,
moderatorId,
reason,
error: err,
});
return false;
}
}
/**
* Auto-release lock when an operation fails
*
* @param submissionId - Submission ID
* @param moderatorId - Moderator ID
* @param error - Error that triggered the release
*/
export async function autoReleaseLockOnError(
submissionId: string,
moderatorId: string,
error: unknown
): Promise<void> {
const isTimeout = isTimeoutError(error);
logger.warn('Auto-releasing lock due to error', {
submissionId,
moderatorId,
isTimeout,
error: error instanceof Error ? error.message : String(error),
});
await releaseLock({
submissionId,
moderatorId,
reason: isTimeout ? 'timeout' : 'error',
error,
silent: false, // Show notification for transparency
});
}
/**
* Auto-release lock when moderator abandons review
* Triggered by navigation away, tab close, or inactivity
*/
export async function autoReleaseLockOnAbandon(
submissionId: string,
moderatorId: string
): Promise<void> {
logger.info('Auto-releasing lock due to abandonment', {
submissionId,
moderatorId,
});
await releaseLock({
submissionId,
moderatorId,
reason: 'abandoned',
silent: true, // Silent for better UX
});
}
/**
* Setup auto-release on page unload (user navigates away or closes tab)
*/
export function setupAutoReleaseOnUnload(
submissionId: string,
moderatorId: string
): () => void {
const handleUnload = () => {
// Use sendBeacon for reliable unload requests
const payload = JSON.stringify({
submission_id: submissionId,
moderator_id: moderatorId,
});
// Try to call RPC via sendBeacon (more reliable on unload)
const url = `${import.meta.env.VITE_SUPABASE_URL}/rest/v1/rpc/release_submission_lock`;
const blob = new Blob([payload], { type: 'application/json' });
navigator.sendBeacon(url, blob);
logger.info('Scheduled lock release on unload', {
submissionId,
moderatorId,
});
};
// Add listeners
window.addEventListener('beforeunload', handleUnload);
window.addEventListener('pagehide', handleUnload);
// Return cleanup function
return () => {
window.removeEventListener('beforeunload', handleUnload);
window.removeEventListener('pagehide', handleUnload);
};
}
/**
* Monitor inactivity and auto-release after timeout
*
* @param submissionId - Submission ID
* @param moderatorId - Moderator ID
* @param inactivityMinutes - Minutes of inactivity before release (default: 10)
* @returns Cleanup function
*/
export function setupInactivityAutoRelease(
submissionId: string,
moderatorId: string,
inactivityMinutes: number = 10
): () => void {
let inactivityTimer: NodeJS.Timeout | null = null;
const resetTimer = () => {
if (inactivityTimer) {
clearTimeout(inactivityTimer);
}
inactivityTimer = setTimeout(() => {
logger.warn('Inactivity timeout - auto-releasing lock', {
submissionId,
moderatorId,
inactivityMinutes,
});
autoReleaseLockOnAbandon(submissionId, moderatorId);
}, inactivityMinutes * 60 * 1000);
};
// Track user activity
const activityEvents = ['mousedown', 'keydown', 'scroll', 'touchstart'];
activityEvents.forEach(event => {
window.addEventListener(event, resetTimer, { passive: true });
});
// Start timer
resetTimer();
// Return cleanup function
return () => {
if (inactivityTimer) {
clearTimeout(inactivityTimer);
}
activityEvents.forEach(event => {
window.removeEventListener(event, resetTimer);
});
};
}
/**
* Get user-friendly lock release message
*/
function getLockReleaseMessage(reason: LockReleaseOptions['reason']): string {
switch (reason) {
case 'timeout':
return 'Lock released due to timeout. The submission is available for other moderators.';
case 'error':
return 'Lock released due to an error. You can reclaim it to continue reviewing.';
case 'abandoned':
return 'Lock released. The submission is back in the queue.';
case 'manual':
return 'Lock released successfully.';
}
}

View File

@@ -0,0 +1,91 @@
/**
* Lock Management Utilities
*
* Helper functions for managing submission locks and lock state.
*/
/**
* Check if a submission can be claimed by the current user
*/
export function canClaimSubmission(
submission: { assigned_to: string | null; locked_until: string | null },
currentUserId: string
): boolean {
// Can claim if unassigned
if (!submission.assigned_to) return true;
// Can claim if no lock time set
if (!submission.locked_until) return true;
// Can claim if lock expired
if (new Date(submission.locked_until) < new Date()) return true;
// Already claimed by current user - cannot claim again
if (submission.assigned_to === currentUserId) return false;
return false;
}
/**
* Check if a submission has an active lock
*/
export function isActiveLock(
assignedTo: string | null,
lockedUntil: string | null
): boolean {
if (!assignedTo || !lockedUntil) return false;
return new Date(lockedUntil) > new Date();
}
/**
* Get lock status indicator for a submission
*/
export type LockStatus = 'locked_by_me' | 'locked_by_other' | 'unlocked' | 'expired';
export function getLockStatus(
submission: { assigned_to: string | null; locked_until: string | null },
currentUserId: string
): LockStatus {
if (!submission.assigned_to || !submission.locked_until) {
return 'unlocked';
}
const lockExpired = new Date(submission.locked_until) < new Date();
if (lockExpired) {
return 'expired';
}
if (submission.assigned_to === currentUserId) {
return 'locked_by_me';
}
return 'locked_by_other';
}
/**
* Format lock expiry time as MM:SS
*/
export function formatLockExpiry(lockedUntil: string): string {
const expiresAt = new Date(lockedUntil);
const now = new Date();
const msLeft = expiresAt.getTime() - now.getTime();
if (msLeft <= 0) return 'Expired';
const minutes = Math.floor(msLeft / 60000);
const seconds = Math.floor((msLeft % 60000) / 1000);
return `${minutes}:${seconds.toString().padStart(2, '0')}`;
}
/**
* Calculate lock urgency level based on time remaining
*/
export type LockUrgency = 'critical' | 'warning' | 'normal';
export function getLockUrgency(timeLeftMs: number): LockUrgency {
if (timeLeftMs < 2 * 60 * 1000) return 'critical'; // < 2 min
if (timeLeftMs < 5 * 60 * 1000) return 'warning'; // < 5 min
return 'normal';
}

View File

@@ -0,0 +1,110 @@
/**
* Moderation Lock Monitor
*
* Monitors lock expiry and provides automatic renewal prompts for moderators.
* Prevents loss of work due to expired locks.
*/
import { useEffect } from 'react';
import type { ModerationState } from '../moderationStateMachine';
import type { ModerationAction } from '../moderationStateMachine';
import { hasActiveLock, needsLockRenewal } from '../moderationStateMachine';
import { toast } from '@/hooks/use-toast';
import { supabase } from '@/lib/supabaseClient';
import { handleNonCriticalError } from '../errorHandler';
/**
* Hook to monitor lock status and warn about expiry
*
* @param state - Current moderation state
* @param dispatch - State machine dispatch function
* @param itemId - ID of the locked item (optional, for manual extension)
* @returns Extension function to manually extend lock
*/
export function useLockMonitor(
state: ModerationState,
dispatch: React.Dispatch<ModerationAction>,
itemId?: string
): { extendLock: () => Promise<void> } {
useEffect(() => {
if (!hasActiveLock(state)) {
return;
}
const checkInterval = setInterval(() => {
if (needsLockRenewal(state)) {
// Dispatch lock expiry warning
dispatch({ type: 'LOCK_EXPIRED' });
// Show toast with extension option
toast({
title: 'Lock Expiring Soon',
description: 'Your lock on this submission will expire in less than 2 minutes. Click below to extend.',
duration: Infinity,
});
// Also call extension function automatically after showing toast
if (itemId) {
setTimeout(() => {
handleExtendLock(itemId, dispatch);
}, 100);
}
}
}, 30000); // Check every 30 seconds
return () => clearInterval(checkInterval);
}, [state, dispatch, itemId]);
const extendLock = async () => {
if (itemId) {
await handleExtendLock(itemId, dispatch);
}
};
return { extendLock };
}
/**
* Extend the lock on a submission
*
* @param submissionId - Submission ID
* @param dispatch - State machine dispatch function
*/
export async function handleExtendLock(
submissionId: string,
dispatch: React.Dispatch<ModerationAction>
) {
try {
// Call Supabase to extend lock (assumes 15 minute extension)
const { error } = await supabase
.from('content_submissions')
.update({
locked_until: new Date(Date.now() + 15 * 60 * 1000).toISOString(),
})
.eq('id', submissionId);
if (error) throw error;
// Update state machine with new lock time
dispatch({
type: 'LOCK_ACQUIRED',
payload: { lockExpires: new Date(Date.now() + 15 * 60 * 1000).toISOString() },
});
toast({
title: 'Lock Extended',
description: 'You have 15 more minutes to complete your review.',
});
} catch (error: unknown) {
handleNonCriticalError(error, {
action: 'Extend Lock',
metadata: { submissionId }
});
toast({
title: 'Extension Failed',
description: 'Could not extend lock. Please save your work and re-claim the item.',
variant: 'destructive',
});
}
}

View File

@@ -0,0 +1,455 @@
/**
* Moderation Queue Query Builder
*
* Constructs Supabase queries for fetching and filtering moderation queue items.
* Handles complex filtering logic, pagination, and entity resolution.
*/
import { SupabaseClient } from '@supabase/supabase-js';
import type {
EntityFilter,
StatusFilter,
QueueTab,
SortConfig,
} from '@/types/moderation';
/**
* Query configuration for building submission queries
*/
export interface QueryConfig {
entityFilter: EntityFilter;
statusFilter: StatusFilter;
tab: QueueTab;
userId: string;
isAdmin: boolean;
isSuperuser: boolean;
currentPage: number;
pageSize: number;
sortConfig?: SortConfig;
}
/**
* Result from fetching submissions
*/
export interface FetchSubmissionsResult {
submissions: any[];
totalCount: number;
error?: Error;
}
/**
* Build a Supabase query for content submissions based on filters
*
* Applies tab-based filtering (main queue vs archive), entity type filtering,
* status filtering, and access control (admin vs moderator view).
*
* @param supabase - Supabase client instance
* @param config - Query configuration
* @param skipModeratorFilter - Skip the moderator access control filter
* @returns Configured Supabase query builder
*/
export function buildSubmissionQuery(
supabase: SupabaseClient,
config: QueryConfig,
skipModeratorFilter = false
) {
const { entityFilter, statusFilter, tab, userId, isAdmin, isSuperuser } = config;
// Use optimized view with pre-joined profiles and entity data
let query = supabase
.from('moderation_queue_with_entities')
.select('*');
// CRITICAL: Multi-level ordering
// Level 1: Always sort by escalated first (descending) - escalated items always appear at top
query = query.order('escalated', { ascending: false });
// Level 2: Apply user-selected sort (if provided)
if (config.sortConfig) {
query = query.order(config.sortConfig.field, {
ascending: config.sortConfig.direction === 'asc'
});
}
// Level 3: Tertiary sort by created_at as tiebreaker (if not already primary sort)
if (!config.sortConfig || config.sortConfig.field !== 'created_at') {
query = query.order('created_at', { ascending: true });
}
// Apply tab-based status filtering
if (tab === 'mainQueue') {
// Main queue: pending, flagged, partially_approved submissions
if (statusFilter === 'all') {
query = query.in('status', ['pending', 'flagged', 'partially_approved']);
} else if (statusFilter === 'pending') {
query = query.in('status', ['pending', 'partially_approved']);
} else {
query = query.eq('status', statusFilter);
}
} else {
// Archive: approved or rejected submissions
if (statusFilter === 'all') {
query = query.in('status', ['approved', 'rejected']);
} else {
query = query.eq('status', statusFilter);
}
}
// Apply entity type filter
if (entityFilter === 'photos') {
query = query.eq('submission_type', 'photo');
} else if (entityFilter === 'submissions') {
query = query.neq('submission_type', 'photo');
}
// 'all' and 'reviews' filters don't add any conditions
// CRM-style claim filtering: moderators only see unclaimed OR self-assigned submissions
// Admins see all submissions
// Note: For non-admin users, moderator filtering is handled by multi-query approach in fetchSubmissions
if (!isAdmin && !isSuperuser && !skipModeratorFilter) {
const now = new Date().toISOString();
// Single filter approach (used by getQueueStats)
query = query.or(
`assigned_to.is.null,locked_until.lt.${now},assigned_to.eq.${userId}`
);
}
return query;
}
/**
* Build a count query with the same filters as the main query
*
* Used for pagination to get total number of items matching the filter criteria.
*
* @param supabase - Supabase client instance
* @param config - Query configuration
* @returns Configured count query
*/
export function buildCountQuery(
supabase: SupabaseClient,
config: QueryConfig
) {
const { entityFilter, statusFilter, tab, userId, isAdmin, isSuperuser } = config;
let countQuery = supabase
.from('content_submissions')
.select('*', { count: 'exact', head: true });
// Apply same filters as main query
if (tab === 'mainQueue') {
if (statusFilter === 'all') {
countQuery = countQuery.in('status', ['pending', 'flagged', 'partially_approved']);
} else if (statusFilter === 'pending') {
countQuery = countQuery.in('status', ['pending', 'partially_approved']);
} else {
countQuery = countQuery.eq('status', statusFilter);
}
} else {
if (statusFilter === 'all') {
countQuery = countQuery.in('status', ['approved', 'rejected']);
} else {
countQuery = countQuery.eq('status', statusFilter);
}
}
if (entityFilter === 'photos') {
countQuery = countQuery.eq('submission_type', 'photo');
} else if (entityFilter === 'submissions') {
countQuery = countQuery.neq('submission_type', 'photo');
}
// Note: Count query not used for non-admin users (multi-query approach handles count)
if (!isAdmin && !isSuperuser) {
const now = new Date().toISOString();
countQuery = countQuery.or(
`assigned_to.is.null,locked_until.lt.${now},assigned_to.eq.${userId}`
);
}
return countQuery;
}
/**
* Fetch submissions with pagination and all required data
*
* Executes the query and returns both the submissions and total count.
* Handles errors gracefully and returns them in the result object.
*
* @param supabase - Supabase client instance
* @param config - Query configuration
* @returns Submissions data and total count
*
* @example
* ```tsx
* const { submissions, totalCount, error } = await fetchSubmissions(supabase, {
* entityFilter: 'all',
* statusFilter: 'pending',
* tab: 'mainQueue',
* userId: user.id,
* isAdmin: false,
* isSuperuser: false,
* currentPage: 1,
* pageSize: 25
* });
* ```
*/
export async function fetchSubmissions(
supabase: SupabaseClient,
config: QueryConfig
): Promise<FetchSubmissionsResult> {
try {
const { userId, isAdmin, isSuperuser, currentPage, pageSize } = config;
// For non-admin users, use multi-query approach to avoid complex OR filters
if (!isAdmin && !isSuperuser) {
return await fetchSubmissionsMultiQuery(supabase, config);
}
// Admin path: use single query with count
const countQuery = buildCountQuery(supabase, config);
const { count, error: countError } = await countQuery;
if (countError) {
throw countError;
}
// Build main query with pagination
const query = buildSubmissionQuery(supabase, config);
const startIndex = (currentPage - 1) * pageSize;
const endIndex = startIndex + pageSize - 1;
const paginatedQuery = query.range(startIndex, endIndex);
// Execute query
const { data: submissions, error: submissionsError } = await paginatedQuery;
if (submissionsError) {
throw submissionsError;
}
// Enrich submissions with type field for UI conditional logic
const enrichedSubmissions = (submissions || []).map(sub => ({
...sub,
type: 'content_submission' as const,
}));
return {
submissions: enrichedSubmissions,
totalCount: count || 0,
};
} catch (error: unknown) {
return {
submissions: [],
totalCount: 0,
error: error as Error,
};
}
}
/**
* Fetch submissions using multi-query approach for non-admin users
*
* Executes three separate queries to avoid complex OR filters:
* 1. Unclaimed items (assigned_to is null)
* 2. Expired locks (locked_until < now, not assigned to current user)
* 3. Items assigned to current user
*
* Results are merged, deduplicated, sorted, and paginated.
*/
async function fetchSubmissionsMultiQuery(
supabase: SupabaseClient,
config: QueryConfig
): Promise<FetchSubmissionsResult> {
const { userId, currentPage, pageSize } = config;
const now = new Date().toISOString();
try {
// Build three separate queries
// Query 1: Unclaimed items
const query1 = buildSubmissionQuery(supabase, config, true).is('assigned_to', null);
// Query 2: Expired locks (not mine)
const query2 = buildSubmissionQuery(supabase, config, true)
.not('assigned_to', 'is', null)
.neq('assigned_to', userId)
.lt('locked_until', now);
// Query 3: My claimed items
const query3 = buildSubmissionQuery(supabase, config, true).eq('assigned_to', userId);
// Execute all queries in parallel
const [result1, result2, result3] = await Promise.all([
query1,
query2,
query3,
]);
// Check for errors
if (result1.error) throw result1.error;
if (result2.error) throw result2.error;
if (result3.error) throw result3.error;
// Merge all submissions
const allSubmissions = [
...(result1.data || []),
...(result2.data || []),
...(result3.data || []),
];
// Deduplicate by ID
const uniqueMap = new Map();
allSubmissions.forEach(sub => {
if (!uniqueMap.has(sub.id)) {
uniqueMap.set(sub.id, sub);
}
});
const uniqueSubmissions = Array.from(uniqueMap.values());
// Apply sorting (same logic as buildSubmissionQuery)
uniqueSubmissions.sort((a, b) => {
// Level 1: Escalated first
if (a.escalated !== b.escalated) {
return b.escalated ? 1 : -1;
}
// Level 2: Custom sort (if provided)
if (config.sortConfig) {
const field = config.sortConfig.field;
const ascending = config.sortConfig.direction === 'asc';
const aVal = a[field];
const bVal = b[field];
if (aVal !== bVal) {
if (aVal == null) return 1;
if (bVal == null) return -1;
const comparison = aVal < bVal ? -1 : 1;
return ascending ? comparison : -comparison;
}
}
// Level 3: Tiebreaker by created_at
const aTime = new Date(a.created_at).getTime();
const bTime = new Date(b.created_at).getTime();
return aTime - bTime;
});
// Apply pagination
const totalCount = uniqueSubmissions.length;
const startIndex = (currentPage - 1) * pageSize;
const endIndex = startIndex + pageSize;
const paginatedSubmissions = uniqueSubmissions.slice(startIndex, endIndex);
// Enrich with type field
const enrichedSubmissions = paginatedSubmissions.map(sub => ({
...sub,
type: 'content_submission' as const,
}));
return {
submissions: enrichedSubmissions,
totalCount,
};
} catch (error: unknown) {
return {
submissions: [],
totalCount: 0,
error: error as Error,
};
}
}
/**
* Check if a submission is locked by another moderator
*
* @param submission - Submission object
* @param currentUserId - Current user's ID
* @returns True if locked by another user
*/
export function isLockedByOther(
submission: any,
currentUserId: string
): boolean {
if (!submission.locked_until || !submission.assigned_to) {
return false;
}
const lockExpiry = new Date(submission.locked_until);
const now = new Date();
// Lock is expired
if (lockExpiry < now) {
return false;
}
// Locked by current user
if (submission.assigned_to === currentUserId) {
return false;
}
// Locked by someone else
return true;
}
/**
* Get queue statistics (optimized with aggregation query)
*
* Fetches counts for different submission states to display in the queue dashboard.
* Uses a single aggregation query instead of fetching all data and filtering client-side.
*
* @param supabase - Supabase client instance
* @param userId - Current user's ID
* @param isAdmin - Whether user is admin
* @param isSuperuser - Whether user is superuser
* @returns Object with various queue statistics
*/
export async function getQueueStats(
supabase: SupabaseClient,
userId: string,
isAdmin: boolean,
isSuperuser: boolean
) {
try {
// Optimized: Use aggregation directly in database
let statsQuery = supabase
.from('content_submissions')
.select('status, escalated');
// Apply access control using simple OR filter
if (!isAdmin && !isSuperuser) {
const now = new Date().toISOString();
// Show: unclaimed items OR items with expired locks OR my items
statsQuery = statsQuery.or(
`assigned_to.is.null,locked_until.lt.${now},assigned_to.eq.${userId}`
);
}
const { data: submissions, error } = await statsQuery;
if (error) {
throw error;
}
// Calculate statistics (still done client-side but with minimal data transfer)
const pending = submissions?.filter(s => s.status === 'pending' || s.status === 'partially_approved').length || 0;
const flagged = submissions?.filter(s => s.status === 'flagged').length || 0;
const escalated = submissions?.filter(s => s.escalated).length || 0;
const total = submissions?.length || 0;
return {
pending,
flagged,
escalated,
total,
};
} catch (error: unknown) {
// Error already logged in caller, just return defaults
return {
pending: 0,
flagged: 0,
escalated: 0,
total: 0,
};
}
}

View File

@@ -0,0 +1,209 @@
/**
* Realtime Subscription Utilities
*
* Helper functions for processing realtime subscription events in the moderation queue.
*/
import type { ModerationItem, EntityFilter, StatusFilter } from '@/types/moderation';
interface SubmissionContent {
name?: string;
[key: string]: any;
}
/**
* Check if a submission matches the entity filter
*/
export function matchesEntityFilter(
submission: { submission_type: string },
entityFilter: EntityFilter
): boolean {
if (entityFilter === 'all') return true;
if (entityFilter === 'photos') {
return submission.submission_type === 'photo';
}
if (entityFilter === 'submissions') {
return submission.submission_type !== 'photo';
}
if (entityFilter === 'reviews') {
return submission.submission_type === 'review';
}
return false;
}
/**
* Check if a submission matches the status filter
*/
export function matchesStatusFilter(
submission: { status: string },
statusFilter: StatusFilter
): boolean {
if (statusFilter === 'all') return true;
if (statusFilter === 'pending') {
return ['pending', 'partially_approved'].includes(submission.status);
}
return statusFilter === submission.status;
}
/**
* Deep comparison of ModerationItem fields to detect actual changes
*/
export function hasItemChanged(
current: ModerationItem,
updated: ModerationItem
): boolean {
// Check critical fields
if (
current.status !== updated.status ||
current.reviewed_at !== updated.reviewed_at ||
current.reviewer_notes !== updated.reviewer_notes ||
current.assigned_to !== updated.assigned_to ||
current.locked_until !== updated.locked_until ||
current.escalated !== updated.escalated
) {
return true;
}
// Check submission_items
if (current.submission_items?.length !== updated.submission_items?.length) {
return true;
}
// Check content (one level deep for performance)
if (current.content && updated.content) {
// Compare content reference first
if (current.content !== updated.content) {
const currentKeys = Object.keys(current.content).sort();
const updatedKeys = Object.keys(updated.content).sort();
// Different number of keys = changed
if (currentKeys.length !== updatedKeys.length) {
return true;
}
// Different key names = changed
if (!currentKeys.every((key, i) => key === updatedKeys[i])) {
return true;
}
// Check each key's value
for (const key of currentKeys) {
if (current.content[key] !== updated.content[key]) {
return true;
}
}
}
}
return false;
}
/**
* Extract only changed fields for minimal updates
*/
export function extractChangedFields(
current: ModerationItem,
updated: ModerationItem
): Partial<ModerationItem> {
const changes: Partial<ModerationItem> = {};
if (current.status !== updated.status) {
changes.status = updated.status;
}
if (current.reviewed_at !== updated.reviewed_at) {
changes.reviewed_at = updated.reviewed_at;
}
if (current.reviewer_notes !== updated.reviewer_notes) {
changes.reviewer_notes = updated.reviewer_notes;
}
if (current.assigned_to !== updated.assigned_to) {
changes.assigned_to = updated.assigned_to;
}
if (current.locked_until !== updated.locked_until) {
changes.locked_until = updated.locked_until;
}
if (current.escalated !== updated.escalated) {
changes.escalated = updated.escalated;
}
// Check content changes
if (current.content !== updated.content) {
changes.content = updated.content;
}
// Check submission_items
if (updated.submission_items) {
changes.submission_items = updated.submission_items;
}
return changes;
}
/**
* Build a full ModerationItem from submission data
*/
export function buildModerationItem(
submission: any,
profile?: any,
entityName?: string,
parkName?: string
): ModerationItem {
return {
id: submission.id,
type: 'content_submission',
content: submission.content,
// Handle both created_at (from view) and submitted_at (from realtime)
created_at: submission.created_at || submission.submitted_at,
submitted_at: submission.submitted_at,
// Support both user_id and submitter_id
user_id: submission.user_id || submission.submitter_id,
submitter_id: submission.submitter_id || submission.user_id,
status: submission.status,
submission_type: submission.submission_type,
// Use new profile structure from view if available
submitter_profile: submission.submitter_profile || (profile ? {
user_id: submission.user_id || submission.submitter_id,
username: profile.username,
display_name: profile.display_name,
avatar_url: profile.avatar_url,
} : undefined),
reviewer_profile: submission.reviewer_profile,
assigned_profile: submission.assigned_profile,
// Legacy support: create user_profile from submitter_profile
user_profile: submission.submitter_profile ? {
username: submission.submitter_profile.username,
display_name: submission.submitter_profile.display_name,
avatar_url: submission.submitter_profile.avatar_url,
} : (profile ? {
username: profile.username,
display_name: profile.display_name,
avatar_url: profile.avatar_url,
} : undefined),
entity_name: entityName || (submission.content as SubmissionContent)?.name || 'Unknown',
park_name: parkName,
reviewed_at: submission.reviewed_at || undefined,
reviewer_notes: submission.reviewer_notes || undefined,
escalated: submission.escalated || false,
assigned_to: submission.assigned_to || undefined,
locked_until: submission.locked_until || undefined,
submission_items: submission.submission_items || undefined,
};
}

View File

@@ -0,0 +1,64 @@
/**
* Type Guard Functions for Moderation Queue
*
* Provides runtime type checking for submission item data.
* Enables type-safe handling of different entity types.
*/
import type {
SubmissionItemData,
ParkItemData,
RideItemData,
CompanyItemData,
RideModelItemData,
PhotoItemData,
} from '@/types/moderation';
/**
* Check if item data is for a park
*/
export function isParkItemData(data: SubmissionItemData): data is ParkItemData {
return 'park_type' in data && 'name' in data;
}
/**
* Check if item data is for a ride
*/
export function isRideItemData(data: SubmissionItemData): data is RideItemData {
return ('ride_id' in data || 'park_id' in data) && 'ride_type' in data;
}
/**
* Check if item data is for a company
*/
export function isCompanyItemData(data: SubmissionItemData): data is CompanyItemData {
return 'company_type' in data && !('park_type' in data) && !('ride_type' in data);
}
/**
* Check if item data is for a ride model
*/
export function isRideModelItemData(data: SubmissionItemData): data is RideModelItemData {
return 'model_type' in data && 'manufacturer_id' in data;
}
/**
* Check if item data is for a photo
*/
export function isPhotoItemData(data: SubmissionItemData): data is PhotoItemData {
return 'photo_url' in data;
}
/**
* Get the entity type from item data (for validation and display)
*/
export function getEntityTypeFromItemData(data: SubmissionItemData): string {
if (isParkItemData(data)) return 'park';
if (isRideItemData(data)) return 'ride';
if (isCompanyItemData(data)) {
return data.company_type; // 'manufacturer', 'designer', etc.
}
if (isRideModelItemData(data)) return 'ride_model';
if (isPhotoItemData(data)) return 'photo';
return 'unknown';
}

View File

@@ -0,0 +1,121 @@
/**
* Runtime Data Validation for Moderation Queue
*
* Uses Zod to validate data shapes from the database at runtime.
* Prevents runtime errors if database schema changes unexpectedly.
*/
import { z } from 'zod';
import { handleError } from '@/lib/errorHandler';
// Profile schema (matches database JSONB structure)
const ProfileSchema = z.object({
user_id: z.string().uuid(),
username: z.string(),
display_name: z.string().optional().nullable(),
avatar_url: z.string().optional().nullable(),
});
// Legacy profile schema (for backward compatibility)
const LegacyProfileSchema = z.object({
username: z.string(),
display_name: z.string().optional().nullable(),
avatar_url: z.string().optional().nullable(),
});
// Submission item schema
const SubmissionItemSchema = z.object({
id: z.string().uuid(),
status: z.string(),
item_type: z.string().optional(),
item_data: z.record(z.string(), z.any()).optional().nullable(),
// Typed FK columns (optional, only one will be populated)
park_submission_id: z.string().uuid().optional().nullable(),
ride_submission_id: z.string().uuid().optional().nullable(),
photo_submission_id: z.string().uuid().optional().nullable(),
company_submission_id: z.string().uuid().optional().nullable(),
ride_model_submission_id: z.string().uuid().optional().nullable(),
timeline_event_submission_id: z.string().uuid().optional().nullable(),
action_type: z.enum(['create', 'edit', 'delete']).optional(),
original_data: z.record(z.string(), z.any()).optional().nullable(),
error_message: z.string().optional().nullable(),
});
// Main moderation item schema
export const ModerationItemSchema = z.object({
id: z.string().uuid(),
status: z.enum(['pending', 'approved', 'rejected', 'partially_approved', 'flagged']),
type: z.string(),
submission_type: z.string(),
// Accept both created_at and submitted_at for flexibility
created_at: z.string(),
submitted_at: z.string().optional(),
updated_at: z.string().optional().nullable(),
reviewed_at: z.string().optional().nullable(),
content: z.record(z.string(), z.any()).optional().nullable(),
// User fields (support both old and new naming)
submitter_id: z.string().uuid().optional(),
user_id: z.string().uuid().optional(),
assigned_to: z.string().uuid().optional().nullable(),
locked_until: z.string().optional().nullable(),
reviewed_by: z.string().uuid().optional().nullable(),
reviewer_notes: z.string().optional().nullable(),
// Escalation fields
escalated: z.boolean().optional().default(false),
escalation_reason: z.string().optional().nullable(),
// Profile objects (new structure from view)
submitter_profile: ProfileSchema.optional().nullable(),
assigned_profile: ProfileSchema.optional().nullable(),
reviewer_profile: ProfileSchema.optional().nullable(),
// Legacy profile support
user_profile: LegacyProfileSchema.optional().nullable(),
// Submission items
submission_items: z.array(SubmissionItemSchema).optional().nullable(),
// Entity names
entity_name: z.string().optional(),
park_name: z.string().optional(),
});
export const ModerationItemArraySchema = z.array(ModerationItemSchema);
/**
* Validate moderation items array
*
* @param data - Data to validate
* @returns Validation result with typed data or error
*/
export function validateModerationItems(data: unknown): {
success: boolean;
data?: any[];
error?: string
} {
const result = ModerationItemArraySchema.safeParse(data);
if (!result.success) {
handleError(result.error, {
action: 'Data validation failed',
metadata: {
errors: result.error.issues.slice(0, 5)
}
});
return {
success: false,
error: 'Received invalid data format from server. Please refresh the page.',
};
}
return {
success: true,
data: result.data,
};
}

View File

@@ -0,0 +1,207 @@
/**
* Moderation State Machine
* Manages moderation workflow with type-safe state transitions and lock coordination
*/
import type { SubmissionItemWithDeps } from './submissionItemsService';
import { logger } from './logger';
// State definitions using discriminated unions
export type ModerationState =
| { status: 'idle' }
| { status: 'claiming'; itemId: string }
| { status: 'locked'; itemId: string; lockExpires: string }
| { status: 'loading_data'; itemId: string; lockExpires: string }
| { status: 'reviewing'; itemId: string; lockExpires: string; reviewData: SubmissionItemWithDeps[] }
| { status: 'approving'; itemId: string }
| { status: 'rejecting'; itemId: string }
| { status: 'complete'; itemId: string; result: 'approved' | 'rejected' }
| { status: 'error'; itemId: string; error: string }
| { status: 'lock_expired'; itemId: string };
// Action definitions using discriminated unions
export type ModerationAction =
| { type: 'CLAIM_ITEM'; payload: { itemId: string } }
| { type: 'LOCK_ACQUIRED'; payload: { lockExpires: string } }
| { type: 'LOCK_EXPIRED' }
| { type: 'LOAD_DATA' }
| { type: 'DATA_LOADED'; payload: { reviewData: SubmissionItemWithDeps[] } }
| { type: 'START_APPROVAL' }
| { type: 'START_REJECTION' }
| { type: 'COMPLETE'; payload: { result: 'approved' | 'rejected' } }
| { type: 'ERROR'; payload: { error: string } }
| { type: 'RELEASE_LOCK' }
| { type: 'RESET' };
/**
* Moderation reducer with strict transition validation
*/
export function moderationReducer(
state: ModerationState,
action: ModerationAction
): ModerationState {
switch (action.type) {
case 'CLAIM_ITEM':
if (state.status !== 'idle' && state.status !== 'complete' && state.status !== 'error') {
throw new Error(`Cannot claim item from state: ${state.status}`);
}
return { status: 'claiming', itemId: action.payload.itemId };
case 'LOCK_ACQUIRED':
if (state.status !== 'claiming') {
throw new Error(`Illegal transition: ${state.status} → locked`);
}
// Validate lock expiry date
const lockDate = new Date(action.payload.lockExpires);
if (isNaN(lockDate.getTime())) {
throw new Error('Invalid lock expiry date');
}
return {
status: 'locked',
itemId: state.itemId,
lockExpires: action.payload.lockExpires
};
case 'LOCK_EXPIRED':
if (state.status !== 'locked' && state.status !== 'reviewing' && state.status !== 'loading_data') {
logger.warn(`Lock expired notification in unexpected state: ${state.status}`);
return state;
}
return {
status: 'lock_expired',
itemId: state.itemId
};
case 'LOAD_DATA':
if (state.status !== 'locked') {
throw new Error(`Illegal transition: ${state.status} → loading_data`);
}
return {
status: 'loading_data',
itemId: state.itemId,
lockExpires: state.lockExpires
};
case 'DATA_LOADED':
if (state.status !== 'loading_data') {
throw new Error(`Illegal transition: ${state.status} → reviewing`);
}
return {
status: 'reviewing',
itemId: state.itemId,
lockExpires: state.lockExpires,
reviewData: action.payload.reviewData
};
case 'START_APPROVAL':
if (state.status !== 'reviewing') {
throw new Error(`Illegal transition: ${state.status} → approving`);
}
return {
status: 'approving',
itemId: state.itemId
};
case 'START_REJECTION':
if (state.status !== 'reviewing') {
throw new Error(`Illegal transition: ${state.status} → rejecting`);
}
return {
status: 'rejecting',
itemId: state.itemId
};
case 'COMPLETE':
if (state.status !== 'approving' && state.status !== 'rejecting') {
throw new Error(`Illegal transition: ${state.status} → complete`);
}
return {
status: 'complete',
itemId: state.itemId,
result: action.payload.result
};
case 'ERROR':
// Error can happen from most states
if (state.status === 'idle' || state.status === 'complete') {
logger.warn('Error action in terminal state');
return state;
}
return {
status: 'error',
itemId: state.itemId,
error: action.payload.error
};
case 'RELEASE_LOCK':
// Can release lock from locked, reviewing, or error states
if (state.status !== 'locked' && state.status !== 'reviewing' && state.status !== 'error' && state.status !== 'lock_expired' && state.status !== 'loading_data') {
logger.warn(`Cannot release lock from state: ${state.status}`);
return state;
}
return { status: 'idle' };
case 'RESET':
return { status: 'idle' };
default:
// Exhaustive check
const _exhaustive: never = action;
return state;
}
}
// State transition guards
export function canClaimItem(state: ModerationState): boolean {
return state.status === 'idle' || state.status === 'complete' || state.status === 'error';
}
export function canLoadData(state: ModerationState): boolean {
return state.status === 'locked';
}
export function canStartReview(state: ModerationState): boolean {
return state.status === 'loading_data';
}
export function canApprove(state: ModerationState): boolean {
return state.status === 'reviewing';
}
export function canReject(state: ModerationState): boolean {
return state.status === 'reviewing';
}
export function canReleaseLock(state: ModerationState): boolean {
return state.status === 'locked'
|| state.status === 'reviewing'
|| state.status === 'error'
|| state.status === 'lock_expired'
|| state.status === 'loading_data';
}
export function hasActiveLock(state: ModerationState): boolean {
if (state.status !== 'locked' && state.status !== 'reviewing' && state.status !== 'loading_data') {
return false;
}
const lockExpires = new Date(state.lockExpires);
return lockExpires > new Date();
}
export function isTerminalState(state: ModerationState): boolean {
return state.status === 'complete' || state.status === 'error';
}
export function needsLockRenewal(state: ModerationState): boolean {
if (state.status !== 'locked' && state.status !== 'reviewing' && state.status !== 'loading_data') {
return false;
}
const lockExpires = new Date(state.lockExpires);
const now = new Date();
const timeUntilExpiry = lockExpires.getTime() - now.getTime();
// Renew if less than 2 minutes remaining
return timeUntilExpiry < 120000;
}

View File

@@ -0,0 +1,499 @@
import { supabase } from "@/lib/supabaseClient";
import { invokeWithTracking } from "@/lib/edgeFunctionTracking";
import { handleNonCriticalError, AppError } from "@/lib/errorHandler";
import { z } from "zod";
import type {
NotificationPayload,
SubscriberData,
NotificationPreferences,
NotificationTemplate
} from "@/types/notifications";
import {
notificationPreferencesSchema,
subscriberDataSchema,
DEFAULT_NOTIFICATION_PREFERENCES
} from "@/lib/notificationValidation";
class NotificationService {
/**
* Check if Novu is enabled by checking admin settings
*/
async isNovuEnabled(): Promise<boolean> {
try {
const { data } = await supabase
.from('admin_settings')
.select('setting_value')
.eq('setting_key', 'novu.application_identifier')
.maybeSingle();
return !!data?.setting_value;
} catch (error: unknown) {
handleNonCriticalError(error, {
action: 'Check Novu Status',
metadata: { returnedFalse: true }
});
return false;
}
}
/**
* Update an existing Novu subscriber's profile information
*/
async updateSubscriber(subscriberData: SubscriberData): Promise<{ success: boolean; error?: string }> {
try {
// Validate input
const validated = subscriberDataSchema.parse(subscriberData);
const novuEnabled = await this.isNovuEnabled();
if (!novuEnabled) {
return { success: false, error: 'Novu not configured' };
}
const { data, error, requestId } = await invokeWithTracking(
'update-novu-subscriber',
validated
);
if (error) {
handleNonCriticalError(error, {
action: 'Update Novu Subscriber (Edge Function)',
userId: validated.subscriberId,
metadata: { requestId }
});
throw new AppError(
'Failed to update notification subscriber',
'NOTIFICATION_ERROR',
error.message
);
}
return { success: true };
} catch (error: unknown) {
handleNonCriticalError(error, {
action: 'Update Novu Subscriber',
userId: subscriberData.subscriberId
});
return {
success: false,
error: error instanceof AppError ? error.message : 'Failed to update subscriber'
};
}
}
/**
* Create or update a Novu subscriber
*/
async createSubscriber(subscriberData: SubscriberData): Promise<{ success: boolean; error?: string }> {
try {
// Validate input
const validated = subscriberDataSchema.parse(subscriberData);
const novuEnabled = await this.isNovuEnabled();
if (!novuEnabled) {
return { success: false, error: 'Novu not configured' };
}
const { data, error, requestId } = await invokeWithTracking(
'create-novu-subscriber',
validated
);
if (error) {
handleNonCriticalError(error, {
action: 'Create Novu Subscriber (Edge Function)',
userId: validated.subscriberId,
metadata: { requestId }
});
throw new AppError(
'Failed to create notification subscriber',
'NOTIFICATION_ERROR',
error.message
);
}
if (!data?.subscriberId) {
throw new AppError(
'Invalid response from notification service',
'NOTIFICATION_ERROR'
);
}
// Store subscriber ID in database
const { error: dbError } = await supabase
.from('user_notification_preferences')
.upsert({
user_id: validated.subscriberId,
novu_subscriber_id: data.subscriberId,
});
if (dbError) {
handleNonCriticalError(dbError, {
action: 'Store Subscriber Preferences',
userId: validated.subscriberId
});
throw dbError;
}
return { success: true };
} catch (error: unknown) {
handleNonCriticalError(error, {
action: 'Create Novu Subscriber',
userId: subscriberData.subscriberId
});
return {
success: false,
error: error instanceof AppError ? error.message : 'Failed to create subscriber'
};
}
}
/**
* Update notification preferences with validation and audit logging
*/
async updatePreferences(
userId: string,
preferences: NotificationPreferences
): Promise<{ success: boolean; error?: string }> {
try {
// Validate preferences
const validated = notificationPreferencesSchema.parse(preferences);
// Get previous preferences for audit log
const { data: previousPrefs } = await supabase
.from('user_notification_preferences')
.select('channel_preferences, workflow_preferences, frequency_settings')
.eq('user_id', userId)
.maybeSingle();
const novuEnabled = await this.isNovuEnabled();
// Update Novu preferences if enabled
if (novuEnabled) {
const { error: novuError, requestId } = await invokeWithTracking(
'update-novu-preferences',
{
userId,
preferences: validated,
}
);
if (novuError) {
handleNonCriticalError(novuError, {
action: 'Update Novu Preferences',
userId,
metadata: { requestId }
});
throw novuError;
}
}
// Update local database
const { error: dbError } = await supabase
.from('user_notification_preferences')
.upsert({
user_id: userId,
channel_preferences: validated.channelPreferences,
workflow_preferences: validated.workflowPreferences,
frequency_settings: validated.frequencySettings,
});
if (dbError) {
handleNonCriticalError(dbError, {
action: 'Save Notification Preferences',
userId
});
throw dbError;
}
// Create audit log entry using relational tables
const { data: auditLog, error: auditError } = await supabase
.from('profile_audit_log')
.insert([{
user_id: userId,
changed_by: userId,
action: 'notification_preferences_updated',
changes: {}, // Empty placeholder - actual changes stored in profile_change_fields table
}])
.select('id')
.single();
if (!auditError && auditLog) {
// Write changes to relational profile_change_fields table
const { writeProfileChangeFields } = await import('./auditHelpers');
await writeProfileChangeFields(auditLog.id, {
email_notifications: {
old_value: previousPrefs?.channel_preferences,
new_value: validated.channelPreferences,
},
workflow_preferences: {
old_value: previousPrefs?.workflow_preferences,
new_value: validated.workflowPreferences,
},
frequency_settings: {
old_value: previousPrefs?.frequency_settings,
new_value: validated.frequencySettings,
},
});
}
return { success: true };
} catch (error: unknown) {
handleNonCriticalError(error, {
action: 'Update Notification Preferences',
userId
});
if (error instanceof z.ZodError) {
return {
success: false,
error: `Invalid preferences: ${error.issues.map(i => i.message).join(', ')}`
};
}
return {
success: false,
error: 'Failed to update notification preferences'
};
}
}
/**
* Get user's notification preferences with proper typing
*/
async getPreferences(userId: string): Promise<NotificationPreferences | null> {
try {
const { data, error } = await supabase
.from('user_notification_preferences')
.select('channel_preferences, workflow_preferences, frequency_settings')
.eq('user_id', userId)
.maybeSingle();
if (error && error.code !== 'PGRST116') {
handleNonCriticalError(error, {
action: 'Fetch Notification Preferences',
userId
});
throw error;
}
if (!data) {
return DEFAULT_NOTIFICATION_PREFERENCES;
}
// Validate the data from database
return notificationPreferencesSchema.parse({
channelPreferences: data.channel_preferences,
workflowPreferences: data.workflow_preferences,
frequencySettings: data.frequency_settings
});
} catch (error: unknown) {
handleNonCriticalError(error, {
action: 'Get Notification Preferences',
userId
});
return null;
}
}
/**
* Get notification templates
*/
async getTemplates(): Promise<NotificationTemplate[]> {
try {
const { data, error } = await supabase
.from('notification_templates')
.select('*')
.eq('is_active', true)
.order('category', { ascending: true });
if (error) {
handleNonCriticalError(error, {
action: 'Fetch Notification Templates'
});
throw error;
}
return (data || []).map(t => ({
...t,
is_active: t.is_active ?? true,
description: t.description || null,
novu_workflow_id: t.novu_workflow_id || null,
}));
} catch (error: unknown) {
handleNonCriticalError(error, {
action: 'Get Notification Templates'
});
return [];
}
}
/**
* Trigger a notification workflow
*/
async trigger(payload: NotificationPayload): Promise<{ success: boolean; error?: string }> {
try {
const novuEnabled = await this.isNovuEnabled();
if (!novuEnabled) {
return { success: false, error: 'Novu not configured' };
}
const { data, error, requestId } = await invokeWithTracking(
'trigger-notification',
payload
);
if (error) {
handleNonCriticalError(error, {
action: 'Trigger Notification',
metadata: { workflowId: payload.workflowId, subscriberId: payload.subscriberId, requestId }
});
throw error;
}
return { success: true };
} catch (error: unknown) {
handleNonCriticalError(error, {
action: 'Trigger Notification',
metadata: { workflowId: payload.workflowId, subscriberId: payload.subscriberId }
});
return {
success: false,
error: 'Failed to trigger notification'
};
}
}
/**
* Notify moderators (legacy method for backward compatibility)
*/
async notifyModerators(payload: {
submission_id: string;
submission_type: string;
submitter_name: string;
action: string;
}): Promise<void> {
try {
const { error, requestId } = await invokeWithTracking(
'notify-moderators-submission',
payload
);
if (error) {
handleNonCriticalError(error, {
action: 'Notify Moderators (Submission)',
metadata: { submissionId: payload.submission_id, requestId }
});
throw error;
}
} catch (error: unknown) {
handleNonCriticalError(error, {
action: 'Notify Moderators (Submission)',
metadata: { submissionId: payload.submission_id }
});
}
}
/**
* Trigger a system announcement to all users via the "users" topic
* Requires admin or superuser role
*/
async sendSystemAnnouncement(payload: {
title: string;
message: string;
severity: 'info' | 'warning' | 'critical';
actionUrl?: string;
}): Promise<{ success: boolean; error?: string; announcementId?: string }> {
try {
const novuEnabled = await this.isNovuEnabled();
if (!novuEnabled) {
return { success: false, error: 'Novu not configured' };
}
const { data, error, requestId } = await invokeWithTracking(
'notify-system-announcement',
payload
);
if (error) {
handleNonCriticalError(error, {
action: 'Send System Announcement',
metadata: { title: payload.title, requestId }
});
throw error;
}
return {
success: true,
announcementId: data?.announcementId
};
} catch (error: unknown) {
handleNonCriticalError(error, {
action: 'Send System Announcement',
metadata: { title: payload.title }
});
return {
success: false,
error: error instanceof Error ? error.message : 'Failed to send system announcement'
};
}
}
/**
* Notify moderators about a new report via the "moderation-reports" topic
*/
async notifyModeratorsReport(payload: {
reportId: string;
reportType: string;
reportedEntityType: string;
reportedEntityId: string;
reporterName: string;
reason: string;
entityPreview: string;
reportedAt: string;
}): Promise<{ success: boolean; error?: string }> {
try {
const novuEnabled = await this.isNovuEnabled();
if (!novuEnabled) {
return { success: false, error: 'Novu not configured' };
}
const { data, error, requestId } = await invokeWithTracking(
'notify-moderators-report',
payload
);
if (error) {
handleNonCriticalError(error, {
action: 'Notify Moderators (Report)',
metadata: { reportId: payload.reportId, requestId }
});
throw error;
}
return { success: true };
} catch (error: unknown) {
handleNonCriticalError(error, {
action: 'Notify Moderators (Report)',
metadata: { reportId: payload.reportId }
});
return {
success: false,
error: 'Failed to notify moderators about report'
};
}
}
/**
* Check if notifications are enabled
*/
isEnabled(): boolean {
return true; // Always return true, actual check happens in isNovuEnabled()
}
}
export const notificationService = new NotificationService();

View File

@@ -0,0 +1,64 @@
import { z } from 'zod';
import type { NotificationPreferences } from '@/types/notifications';
/**
* Schema for channel preferences
*/
export const channelPreferencesSchema = z.object({
in_app: z.boolean(),
email: z.boolean(),
push: z.boolean(),
sms: z.boolean()
});
/**
* Schema for workflow preferences (dynamic keys)
*/
export const workflowPreferencesSchema = z.record(z.string(), z.boolean());
/**
* Schema for frequency settings
*/
export const frequencySettingsSchema = z.object({
digest: z.enum(['realtime', 'hourly', 'daily', 'weekly'] as const),
max_per_hour: z.number().int().min(1).max(999)
});
/**
* Complete notification preferences schema
*/
export const notificationPreferencesSchema = z.object({
channelPreferences: channelPreferencesSchema,
workflowPreferences: workflowPreferencesSchema,
frequencySettings: frequencySettingsSchema
});
/**
* Schema for subscriber data
*/
export const subscriberDataSchema = z.object({
subscriberId: z.string().uuid('Invalid subscriber ID'),
email: z.string().email('Invalid email').optional(),
firstName: z.string().max(100).optional(),
lastName: z.string().max(100).optional(),
phone: z.string().max(20).optional(),
avatar: z.string().url('Invalid avatar URL').optional(),
data: z.record(z.string(), z.any()).optional()
});
/**
* Default notification preferences for new users
*/
export const DEFAULT_NOTIFICATION_PREFERENCES: NotificationPreferences = {
channelPreferences: {
in_app: true,
email: true,
push: false,
sms: false
},
workflowPreferences: {},
frequencySettings: {
digest: 'daily',
max_per_hour: 10
}
};

115
src-old/lib/photoHelpers.ts Normal file
View File

@@ -0,0 +1,115 @@
/**
* Photo Helpers
* Utilities for normalizing and validating photo data from different sources
*/
import type { PhotoItem, NormalizedPhoto, PhotoDataSource } from '@/types/photos';
import type { PhotoSubmissionItem } from '@/types/photo-submissions';
/**
* Type guard: Check if data is a photo submission item
*/
export function isPhotoSubmissionItem(data: any): data is PhotoSubmissionItem {
return (
data &&
typeof data === 'object' &&
'cloudflare_image_id' in data &&
'cloudflare_image_url' in data &&
'order_index' in data
);
}
/**
* Type guard: Check if content is a review with photos
*/
export function isReviewWithPhotos(content: any): boolean {
return (
content &&
typeof content === 'object' &&
Array.isArray(content.photos) &&
content.photos.length > 0 &&
content.photos[0]?.url
);
}
/**
* Normalize photo data from any source to PhotoItem[]
*/
export function normalizePhotoData(source: PhotoDataSource): PhotoItem[] {
switch (source.type) {
case 'review':
return source.photos.map((photo, index) => ({
id: `review-${index}`,
url: photo.url,
filename: photo.filename || `Review photo ${index + 1}`,
caption: photo.caption,
size: photo.size,
type: photo.type,
}));
case 'submission_jsonb':
return source.photos.map((photo, index) => ({
id: `jsonb-${index}`,
url: photo.url,
filename: photo.filename || `Photo ${index + 1}`,
caption: photo.caption,
title: photo.title,
size: photo.size,
type: photo.type,
}));
case 'submission_items':
return source.items.map((item) => ({
id: item.id,
url: item.cloudflare_image_url,
filename: item.filename || `Photo ${item.order_index + 1}`,
caption: item.caption,
title: item.title,
date_taken: item.date_taken,
}));
default:
return [];
}
}
/**
* Convert PhotoSubmissionItem[] to NormalizedPhoto[]
*/
export function normalizePhotoSubmissionItems(
items: PhotoSubmissionItem[]
): NormalizedPhoto[] {
return items.map((item) => ({
id: item.id,
url: item.cloudflare_image_url,
filename: item.filename || `Photo ${item.order_index + 1}`,
caption: item.caption || undefined,
title: item.title || undefined,
date_taken: item.date_taken || undefined,
order_index: item.order_index,
}));
}
/**
* Validate photo URL is from Cloudflare Images
* Supports both old imagedelivery.net and new CDN URLs
*/
export function isValidCloudflareUrl(url: string): boolean {
try {
const urlObj = new URL(url);
return urlObj.hostname.includes('imagedelivery.net') ||
urlObj.hostname === 'cdn.thrillwiki.com';
} catch {
return false;
}
}
/**
* Generate photo alt text from available metadata
*/
export function generatePhotoAlt(photo: PhotoItem | NormalizedPhoto): string {
if (photo.title) return photo.title;
if (photo.caption) return photo.caption;
return photo.filename || 'Photo';
}

View File

@@ -0,0 +1,138 @@
/**
* Pipeline Alert Reporting
*
* Client-side utilities for reporting critical pipeline issues to system alerts.
* Non-blocking operations that enhance monitoring without disrupting user flows.
*/
import { supabase } from '@/lib/supabaseClient';
import { handleNonCriticalError } from '@/lib/errorHandler';
/**
* Report temp ref validation errors to system alerts
* Called when validateTempRefs() fails in entitySubmissionHelpers
*/
export async function reportTempRefError(
entityType: 'park' | 'ride',
errors: string[],
userId: string
): Promise<void> {
try {
await supabase.rpc('create_system_alert', {
p_alert_type: 'temp_ref_error',
p_severity: 'high',
p_message: `Temp reference validation failed for ${entityType}: ${errors.join(', ')}`,
p_metadata: {
entity_type: entityType,
errors,
user_id: userId,
timestamp: new Date().toISOString()
}
});
} catch (error) {
handleNonCriticalError(error, {
action: 'Report temp ref error to alerts'
});
}
}
/**
* Report submission queue backlog
* Called when IndexedDB queue exceeds threshold
*/
export async function reportQueueBacklog(
pendingCount: number,
userId?: string
): Promise<void> {
// Only report if backlog > 10
if (pendingCount <= 10) return;
try {
await supabase.rpc('create_system_alert', {
p_alert_type: 'submission_queue_backlog',
p_severity: pendingCount > 50 ? 'high' : 'medium',
p_message: `Submission queue backlog: ${pendingCount} pending submissions`,
p_metadata: {
pending_count: pendingCount,
user_id: userId,
timestamp: new Date().toISOString()
}
});
} catch (error) {
handleNonCriticalError(error, {
action: 'Report queue backlog to alerts'
});
}
}
/**
* Check queue status and report if needed
* Called on app startup and periodically
*/
export async function checkAndReportQueueStatus(userId?: string): Promise<void> {
try {
const { getPendingCount } = await import('./submissionQueue');
const pendingCount = await getPendingCount();
await reportQueueBacklog(pendingCount, userId);
} catch (error) {
handleNonCriticalError(error, {
action: 'Check queue status'
});
}
}
/**
* Report rate limit violations to system alerts
* Called when checkSubmissionRateLimit() blocks a user
*/
export async function reportRateLimitViolation(
userId: string,
action: string,
retryAfter: number
): Promise<void> {
try {
await supabase.rpc('create_system_alert', {
p_alert_type: 'rate_limit_violation',
p_severity: 'medium',
p_message: `Rate limit exceeded: ${action} (retry after ${retryAfter}s)`,
p_metadata: {
user_id: userId,
action,
retry_after_seconds: retryAfter,
timestamp: new Date().toISOString()
}
});
} catch (error) {
handleNonCriticalError(error, {
action: 'Report rate limit violation to alerts'
});
}
}
/**
* Report ban evasion attempts to system alerts
* Called when banned users attempt to submit content
*/
export async function reportBanEvasionAttempt(
userId: string,
action: string,
username?: string
): Promise<void> {
try {
await supabase.rpc('create_system_alert', {
p_alert_type: 'ban_attempt',
p_severity: 'high',
p_message: `Banned user attempted submission: ${action}${username ? ` (${username})` : ''}`,
p_metadata: {
user_id: userId,
action,
username: username || 'unknown',
timestamp: new Date().toISOString()
}
});
} catch (error) {
handleNonCriticalError(error, {
action: 'Report ban evasion attempt to alerts'
});
}
}

View File

@@ -0,0 +1,67 @@
/**
* Privacy Settings Validation
*
* Provides Zod schemas for runtime validation of privacy settings.
*
* Usage:
* ```typescript
* const validated = privacyFormSchema.parse(userInput);
* ```
*
* Security:
* - All user inputs must be validated before database writes
* - Prevents injection attacks and data corruption
* - Ensures data integrity with type-safe validation
*/
import { z } from 'zod';
/**
* Schema for privacy settings in user_preferences
* Uses defaults for backward compatibility with incomplete data
*/
export const privacySettingsSchema = z.object({
activity_visibility: z.enum(['public', 'private'] as const).default('public'),
search_visibility: z.boolean().default(true),
show_location: z.boolean().default(false),
show_age: z.boolean().default(false),
show_avatar: z.boolean().default(true),
show_bio: z.boolean().default(true),
show_activity_stats: z.boolean().default(true),
show_home_park: z.boolean().default(false)
}).passthrough();
/**
* Schema for profile privacy settings
*/
export const profilePrivacySchema = z.object({
privacy_level: z.enum(['public', 'private'] as const),
show_pronouns: z.boolean()
});
/**
* Combined schema for privacy form
*/
export const privacyFormSchema = privacySettingsSchema.merge(profilePrivacySchema);
/**
* Schema for blocking a user
*/
export const blockUserSchema = z.object({
blocked_id: z.string().uuid('Invalid user ID'),
reason: z.string().max(500, 'Reason must be 500 characters or less').optional()
});
/**
* Default privacy settings for new users
*/
export const DEFAULT_PRIVACY_SETTINGS = {
activity_visibility: 'public' as const,
search_visibility: true,
show_location: false,
show_age: false,
show_avatar: true,
show_bio: true,
show_activity_stats: true,
show_home_park: false
};

View File

@@ -0,0 +1,190 @@
/**
* Query invalidation helpers for TanStack Query
*
* Use these helpers to invalidate cached queries when data changes.
* This ensures UI stays in sync with backend state.
*/
import { useQueryClient } from '@tanstack/react-query';
import { queryKeys } from './queryKeys';
/**
* Hook providing query invalidation helpers
*/
export function useQueryInvalidation() {
const queryClient = useQueryClient();
return {
/**
* Invalidate user roles cache
* Call this after assigning/revoking roles
*/
invalidateUserRoles: (userId?: string) => {
if (userId) {
queryClient.invalidateQueries({ queryKey: queryKeys.userRoles(userId) });
} else {
queryClient.invalidateQueries({ queryKey: ['user-roles'] });
}
},
/**
* Invalidate user permissions cache
* Call this after role changes that affect permissions
*/
invalidateUserPermissions: (userId?: string) => {
if (userId) {
queryClient.invalidateQueries({ queryKey: queryKeys.userPermissions(userId) });
} else {
queryClient.invalidateQueries({ queryKey: ['user-permissions'] });
}
},
/**
* Invalidate both roles and permissions for a user
* Use this as a convenience method after role updates
*/
invalidateUserAuth: (userId?: string) => {
if (userId) {
queryClient.invalidateQueries({ queryKey: queryKeys.userRoles(userId) });
queryClient.invalidateQueries({ queryKey: queryKeys.userPermissions(userId) });
} else {
queryClient.invalidateQueries({ queryKey: ['user-roles'] });
queryClient.invalidateQueries({ queryKey: ['user-permissions'] });
}
},
/**
* Invalidate moderation queue
* Call this after moderation actions
*/
invalidateModerationQueue: () => {
queryClient.invalidateQueries({ queryKey: ['moderation-queue'] });
},
/**
* Invalidate moderation stats
* Call this after queue changes
*/
invalidateModerationStats: () => {
queryClient.invalidateQueries({ queryKey: queryKeys.moderationStats() });
},
/**
* Invalidate homepage data
* Call this after creating/updating parks or rides
*/
invalidateHomepageData: (entityType?: 'parks' | 'rides' | 'all') => {
if (!entityType || entityType === 'all') {
queryClient.invalidateQueries({ queryKey: ['homepage'] });
} else if (entityType === 'parks') {
queryClient.invalidateQueries({
queryKey: ['homepage'],
predicate: (query) => {
const key = query.queryKey[1] as string;
return typeof key === 'string' && key.includes('parks');
}
});
} else if (entityType === 'rides') {
queryClient.invalidateQueries({
queryKey: ['homepage'],
predicate: (query) => {
const key = query.queryKey[1] as string;
return typeof key === 'string' && key.includes('rides');
}
});
}
},
/**
* Invalidate parks listing cache
* Call this after creating/updating/deleting parks
*/
invalidateParks: () => {
queryClient.invalidateQueries({ queryKey: ['parks'] });
},
/**
* Invalidate rides listing cache
* Call this after creating/updating/deleting rides
*/
invalidateRides: () => {
queryClient.invalidateQueries({ queryKey: ['rides'] });
},
/**
* Invalidate park detail cache
* Call this after updating a park
*/
invalidateParkDetail: (slug: string) => {
queryClient.invalidateQueries({ queryKey: queryKeys.parks.detail(slug) });
},
/**
* Invalidate ride detail cache
* Call this after updating a ride
*/
invalidateRideDetail: (parkSlug: string, rideSlug: string) => {
queryClient.invalidateQueries({ queryKey: queryKeys.rides.detail(parkSlug, rideSlug) });
},
/**
* Invalidate entity reviews
* Call this after adding/updating/deleting reviews
*/
invalidateEntityReviews: (entityType: 'park' | 'ride', entityId: string) => {
queryClient.invalidateQueries({ queryKey: queryKeys.reviews.entity(entityType, entityId) });
},
/**
* Invalidate user reviews
* Call this after a user adds/updates/deletes their reviews
*/
invalidateUserReviews: (userId: string) => {
queryClient.invalidateQueries({ queryKey: ['reviews', 'user', userId] });
},
/**
* Invalidate entity photos
* Call this after uploading/deleting photos
*/
invalidateEntityPhotos: (entityType: string, entityId: string) => {
queryClient.invalidateQueries({ queryKey: queryKeys.photos.entity(entityType, entityId) });
},
/**
* Invalidate photo count
* Call this after photo changes
*/
invalidatePhotoCount: (entityType: string, entityId: string) => {
queryClient.invalidateQueries({ queryKey: queryKeys.photos.count(entityType, entityId) });
},
/**
* Invalidate search results
* Call this after major data changes
*/
invalidateSearchResults: () => {
queryClient.invalidateQueries({ queryKey: ['search'] });
},
/**
* Invalidate similar rides
* Call this after ride updates
*/
invalidateSimilarRides: (parkId: string, category: string) => {
queryClient.invalidateQueries({
queryKey: ['rides', 'similar', parkId, category]
});
},
/**
* Invalidate featured parks
* Call this after park updates that affect featured status
*/
invalidateFeaturedParks: () => {
queryClient.invalidateQueries({
queryKey: ['homepage', 'featured-parks']
});
},
};
}

80
src-old/lib/queryKeys.ts Normal file
View File

@@ -0,0 +1,80 @@
/**
* Centralized query key definitions for TanStack Query
*
* This ensures consistent query keys across the application
* and makes cache invalidation easier to manage.
*/
export const queryKeys = {
// User-related queries
userRoles: (userId?: string) => ['user-roles', userId] as const,
userPermissions: (userId?: string) => ['user-permissions', userId] as const,
// Moderation queue queries
moderationQueue: (config: Record<string, any>) => ['moderation-queue', config] as const,
moderationStats: () => ['moderation-stats'] as const,
// Homepage queries
homepage: {
trendingParks: () => ['homepage', 'trending-parks'] as const,
trendingRides: () => ['homepage', 'trending-rides'] as const,
recentParks: () => ['homepage', 'recent-parks'] as const,
recentRides: () => ['homepage', 'recent-rides'] as const,
recentChanges: () => ['homepage', 'recent-changes'] as const,
recentlyOpenedParks: () => ['homepage', 'recently-opened-parks'] as const,
recentlyOpenedRides: () => ['homepage', 'recently-opened-rides'] as const,
highestRatedParks: () => ['homepage', 'highest-rated-parks'] as const,
highestRatedRides: () => ['homepage', 'highest-rated-rides'] as const,
openingSoonParks: () => ['homepage', 'opening-soon-parks'] as const,
openingSoonRides: () => ['homepage', 'opening-soon-rides'] as const,
closingSoonParks: () => ['homepage', 'closing-soon-parks'] as const,
closingSoonRides: () => ['homepage', 'closing-soon-rides'] as const,
recentlyClosedParks: () => ['homepage', 'recently-closed-parks'] as const,
recentlyClosedRides: () => ['homepage', 'recently-closed-rides'] as const,
featuredParks: {
topRated: () => ['homepage', 'featured-parks', 'top-rated'] as const,
mostRides: () => ['homepage', 'featured-parks', 'most-rides'] as const,
},
},
// Parks queries
parks: {
all: () => ['parks', 'all'] as const,
detail: (slug: string) => ['parks', 'detail', slug] as const,
rides: (parkId: string) => ['parks', 'rides', parkId] as const,
},
// Rides queries
rides: {
all: () => ['rides', 'all'] as const,
detail: (parkSlug: string, rideSlug: string) => ['rides', 'detail', parkSlug, rideSlug] as const,
similar: (parkId: string, category: string, currentId: string) =>
['rides', 'similar', parkId, category, currentId] as const,
},
// Reviews queries
reviews: {
entity: (entityType: 'park' | 'ride', entityId: string) =>
['reviews', entityType, entityId] as const,
user: (userId: string, filter: string, sortBy: string) =>
['reviews', 'user', userId, filter, sortBy] as const,
},
// Photos queries
photos: {
entity: (entityType: string, entityId: string) =>
['photos', entityType, entityId] as const,
count: (entityType: string, entityId: string) =>
['photos', 'count', entityType, entityId] as const,
},
// Search queries
search: {
global: (query: string) => ['search', 'global', query] as const,
},
// Lists queries
lists: {
items: (listId: string) => ['list-items', listId] as const,
},
} as const;

View File

@@ -0,0 +1,66 @@
/**
* Request Context Manager
* Provides correlation IDs and metadata for tracking requests across the system
*/
export interface RequestContext {
requestId: string;
userId?: string;
timestamp: string;
userAgent?: string;
clientVersion?: string;
traceId?: string; // For distributed tracing across multiple requests
}
export interface RequestMetadata {
endpoint: string;
method: string;
statusCode?: number;
duration?: number;
error?: {
type: string;
message: string;
};
}
class RequestContextManager {
private contexts = new Map<string, RequestContext>();
private readonly MAX_CONTEXTS = 1000; // Prevent memory leaks
create(userId?: string, traceId?: string): RequestContext {
// Cleanup old contexts if limit reached
if (this.contexts.size >= this.MAX_CONTEXTS) {
const firstKey = this.contexts.keys().next().value;
if (firstKey) this.contexts.delete(firstKey);
}
const context: RequestContext = {
requestId: crypto.randomUUID(),
userId,
timestamp: new Date().toISOString(),
userAgent: typeof navigator !== 'undefined' ? navigator.userAgent : undefined,
clientVersion: import.meta.env.VITE_APP_VERSION || '1.0.0',
traceId: traceId || crypto.randomUUID(),
};
this.contexts.set(context.requestId, context);
return context;
}
get(requestId: string): RequestContext | undefined {
return this.contexts.get(requestId);
}
cleanup(requestId: string): void {
this.contexts.delete(requestId);
}
// Get current context from thread-local storage pattern
getCurrentContext(): RequestContext | undefined {
// This is a simplified version - in production you'd use AsyncLocalStorage equivalent
const keys = Array.from(this.contexts.keys());
return keys.length > 0 ? this.contexts.get(keys[keys.length - 1]) : undefined;
}
}
export const requestContext = new RequestContextManager();

View File

@@ -0,0 +1,190 @@
/**
* Request Tracking Service
* Tracks API requests with correlation IDs and stores metadata for monitoring
*/
import { supabase } from '@/integrations/supabase/client';
import { requestContext, type RequestContext } from './requestContext';
import { breadcrumbManager } from './errorBreadcrumbs';
import { captureEnvironmentContext } from './environmentContext';
import { handleNonCriticalError } from './errorHandler';
import { logger } from './logger';
export interface RequestTrackingOptions {
endpoint: string;
method: string;
userId?: string;
parentRequestId?: string;
traceId?: string;
}
export interface RequestResult {
requestId: string;
statusCode: number;
duration: number;
error?: {
type: string;
message: string;
};
}
/**
* Track a request and store metadata
* Returns requestId for correlation and support
*/
export async function trackRequest<T>(
options: RequestTrackingOptions,
fn: (context: RequestContext) => Promise<T>
): Promise<{ result: T; requestId: string; duration: number }> {
const context = requestContext.create(options.userId, options.traceId);
const start = Date.now();
try {
const result = await fn(context);
const duration = Date.now() - start;
// Log to database (fire and forget - don't block response)
logRequestMetadata({
requestId: context.requestId,
userId: options.userId,
endpoint: options.endpoint,
method: options.method,
statusCode: 200,
duration,
userAgent: context.userAgent,
clientVersion: context.clientVersion,
parentRequestId: options.parentRequestId,
traceId: context.traceId,
}).catch(err => {
handleNonCriticalError(err, {
action: 'Log request metadata (success)',
userId: options.userId,
metadata: {
endpoint: options.endpoint,
method: options.method,
statusCode: 200,
requestId: context.requestId
}
});
});
// Cleanup context
requestContext.cleanup(context.requestId);
return { result, requestId: context.requestId, duration };
} catch (error: unknown) {
const duration = Date.now() - start;
const errorInfo = error instanceof Error
? {
type: error.name,
message: error.message,
stack: error.stack ? error.stack.slice(0, 5000) : undefined // Limit to 5000 chars
}
: { type: 'UnknownError', message: String(error), stack: undefined };
// Capture environment context and breadcrumbs
const envContext = captureEnvironmentContext();
const breadcrumbs = breadcrumbManager.getAll();
// Log error to database (fire and forget)
logRequestMetadata({
requestId: context.requestId,
userId: options.userId,
endpoint: options.endpoint,
method: options.method,
statusCode: 500,
duration,
errorType: errorInfo.type,
errorMessage: errorInfo.message,
errorStack: errorInfo.stack,
breadcrumbs,
userAgent: context.userAgent,
clientVersion: context.clientVersion,
parentRequestId: options.parentRequestId,
traceId: context.traceId,
timezone: envContext.timezone,
referrer: typeof document !== 'undefined' ? document.referrer : undefined,
}).catch(err => {
handleNonCriticalError(err, {
action: 'Log request metadata (error)',
userId: options.userId,
metadata: {
endpoint: options.endpoint,
method: options.method,
statusCode: 500,
requestId: context.requestId,
errorType: errorInfo.type
}
});
});
// Cleanup context
requestContext.cleanup(context.requestId);
throw error;
}
}
interface RequestMetadata {
requestId: string;
userId?: string;
endpoint: string;
method: string;
statusCode: number;
duration: number;
errorType?: string;
errorMessage?: string;
errorStack?: string;
breadcrumbs?: any[];
userAgent?: string;
clientVersion?: string;
parentRequestId?: string;
traceId?: string;
timezone?: string;
referrer?: string;
}
async function logRequestMetadata(metadata: RequestMetadata): Promise<void> {
// Safe cast - RPC function exists in database
const { error } = await supabase.rpc('log_request_metadata' as 'log_request_metadata', {
p_request_id: metadata.requestId,
p_user_id: metadata.userId ?? undefined,
p_endpoint: metadata.endpoint,
p_method: metadata.method,
p_status_code: metadata.statusCode,
p_duration_ms: metadata.duration,
p_error_type: metadata.errorType ?? undefined,
p_error_message: metadata.errorMessage ?? undefined,
p_error_stack: metadata.errorStack ?? undefined,
p_breadcrumbs: metadata.breadcrumbs ? JSON.stringify(metadata.breadcrumbs) : '[]',
p_environment_context: '{}', // Legacy parameter - no longer used
p_user_agent: metadata.userAgent ?? undefined,
p_client_version: metadata.clientVersion ?? undefined,
p_parent_request_id: metadata.parentRequestId ?? undefined,
p_trace_id: metadata.traceId ?? undefined,
p_timezone: metadata.timezone ?? undefined,
p_referrer: metadata.referrer ?? undefined,
});
if (error) {
// Already logged by handleNonCriticalError in requestTracking
}
}
/**
* Simple wrapper for tracking without async operations
*/
export function createRequestContext(
userId?: string,
traceId?: string
): RequestContext {
return requestContext.create(userId, traceId);
}
/**
* Get request context for current operation
*/
export function getCurrentRequestContext(): RequestContext | undefined {
return requestContext.getCurrentContext();
}

270
src-old/lib/retryHelpers.ts Normal file
View File

@@ -0,0 +1,270 @@
/**
* Retry utility with exponential backoff
* Handles transient failures gracefully with configurable retry logic
*/
import { logger } from './logger';
import { supabase } from './supabaseClient';
export interface RetryOptions {
/** Maximum number of attempts (default: 3) */
maxAttempts?: number;
/** Base delay in milliseconds (default: 1000) */
baseDelay?: number;
/** Maximum delay in milliseconds (default: 10000) */
maxDelay?: number;
/** Multiplier for exponential backoff (default: 2) */
backoffMultiplier?: number;
/** Add jitter to prevent thundering herd (default: true) */
jitter?: boolean;
/** Callback invoked before each retry attempt */
onRetry?: (attempt: number, error: unknown, delay: number) => void;
/** Custom function to determine if error is retryable (default: isRetryableError) */
shouldRetry?: (error: unknown) => boolean;
}
/**
* Determines if an error is transient and retryable
* @param error - The error to check
* @returns true if error is retryable, false otherwise
*/
export function isRetryableError(error: unknown): boolean {
// Network/timeout errors from fetch
if (error instanceof TypeError && error.message.includes('fetch')) {
return true;
}
// Network/timeout errors
if (error instanceof Error) {
const message = error.message.toLowerCase();
if (message.includes('network') ||
message.includes('timeout') ||
message.includes('connection') ||
message.includes('econnrefused') ||
message.includes('enotfound')) {
return true;
}
}
// Supabase/PostgreSQL errors
if (error && typeof error === 'object') {
const supabaseError = error as { code?: string; status?: number };
// Connection/timeout errors
if (supabaseError.code === 'PGRST301') return true; // Connection timeout
if (supabaseError.code === 'PGRST204') return true; // Temporary failure
if (supabaseError.code === 'PGRST000') return true; // Connection error
// HTTP status codes indicating transient failures
if (supabaseError.status === 429) return true; // Rate limit
if (supabaseError.status === 503) return true; // Service unavailable
if (supabaseError.status === 504) return true; // Gateway timeout
if (supabaseError.status && supabaseError.status >= 500 && supabaseError.status < 600) {
return true; // Server errors (5xx)
}
// Database-level transient errors
if (supabaseError.code === '40001') return true; // Serialization failure
if (supabaseError.code === '40P01') return true; // Deadlock detected
if (supabaseError.code === '57014') return true; // Query cancelled
if (supabaseError.code === '08000') return true; // Connection exception
if (supabaseError.code === '08003') return true; // Connection does not exist
if (supabaseError.code === '08006') return true; // Connection failure
if (supabaseError.code === '08001') return true; // Unable to connect
if (supabaseError.code === '08004') return true; // Server rejected connection
}
return false;
}
/**
* Calculates delay for next retry attempt using exponential backoff
* @param attempt - Current attempt number (0-indexed)
* @param options - Retry configuration
* @returns Delay in milliseconds
*/
function calculateBackoffDelay(attempt: number, options: Required<RetryOptions>): number {
const exponentialDelay = options.baseDelay * Math.pow(options.backoffMultiplier, attempt);
const cappedDelay = Math.min(exponentialDelay, options.maxDelay);
// Add jitter (randomness) to prevent thundering herd
if (options.jitter) {
const jitterAmount = cappedDelay * 0.3; // ±30% jitter
const jitterOffset = (Math.random() * 2 - 1) * jitterAmount;
return Math.max(0, cappedDelay + jitterOffset);
}
return cappedDelay;
}
/**
* Load retry configuration from admin settings
*/
export async function loadRetryConfig(): Promise<Required<RetryOptions>> {
try {
const { data: settings } = await supabase
.from('admin_settings')
.select('setting_key, setting_value')
.in('setting_key', [
'retry.max_attempts',
'retry.base_delay',
'retry.max_delay',
'retry.backoff_multiplier'
]);
if (!settings || settings.length === 0) {
return getDefaultRetryConfig();
}
const config: any = {};
settings.forEach(s => {
const key = s.setting_key.replace('retry.', '');
const camelKey = key.replace(/_([a-z])/g, (g) => g[1].toUpperCase());
if (key === 'backoff_multiplier') {
config[camelKey] = parseFloat(String(s.setting_value));
} else {
config[camelKey] = parseInt(String(s.setting_value));
}
});
return {
maxAttempts: config.maxAttempts ?? 3,
baseDelay: config.baseDelay ?? 1000,
maxDelay: config.maxDelay ?? 10000,
backoffMultiplier: config.backoffMultiplier ?? 2,
jitter: true,
onRetry: () => {},
shouldRetry: isRetryableError
};
} catch (error) {
logger.error('Failed to load retry config', { error });
return getDefaultRetryConfig();
}
}
function getDefaultRetryConfig(): Required<RetryOptions> {
return {
maxAttempts: 3,
baseDelay: 1000,
maxDelay: 10000,
backoffMultiplier: 2,
jitter: true,
onRetry: () => {},
shouldRetry: isRetryableError
};
}
// Cache admin config for 5 minutes
let cachedRetryConfig: Required<RetryOptions> | null = null;
let configCacheTime: number = 0;
const CONFIG_CACHE_TTL = 5 * 60 * 1000; // 5 minutes
async function getCachedRetryConfig(): Promise<Required<RetryOptions>> {
const now = Date.now();
if (cachedRetryConfig && (now - configCacheTime < CONFIG_CACHE_TTL)) {
return cachedRetryConfig;
}
cachedRetryConfig = await loadRetryConfig();
configCacheTime = now;
return cachedRetryConfig;
}
/**
* Executes a function with retry logic and exponential backoff
*
* @param fn - Async function to execute
* @param options - Retry configuration options
* @returns Result of the function execution
* @throws Last error if all retry attempts fail
*
* @example
* ```typescript
* const result = await withRetry(
* async () => await supabase.rpc('my_function', { data }),
* {
* maxAttempts: 3,
* onRetry: (attempt, error, delay) => {
* toast.info(`Retrying... (${attempt}/3)`);
* }
* }
* );
* ```
*/
export async function withRetry<T>(
fn: () => Promise<T>,
options?: RetryOptions
): Promise<T> {
// Load config from admin settings
const adminConfig = await getCachedRetryConfig();
// Merge: options override admin settings
const config: Required<RetryOptions> = {
maxAttempts: options?.maxAttempts ?? adminConfig.maxAttempts,
baseDelay: options?.baseDelay ?? adminConfig.baseDelay,
maxDelay: options?.maxDelay ?? adminConfig.maxDelay,
backoffMultiplier: options?.backoffMultiplier ?? adminConfig.backoffMultiplier,
jitter: options?.jitter ?? adminConfig.jitter,
onRetry: options?.onRetry ?? adminConfig.onRetry,
shouldRetry: options?.shouldRetry ?? adminConfig.shouldRetry,
};
let lastError: unknown;
for (let attempt = 0; attempt < config.maxAttempts; attempt++) {
try {
// Execute the function directly
const result = await fn();
// Log successful retry if not first attempt
if (attempt > 0) {
logger.info('Retry succeeded', {
attempt: attempt + 1,
totalAttempts: config.maxAttempts
});
}
return result;
} catch (error) {
lastError = error;
// Check if we should retry
const isLastAttempt = attempt === config.maxAttempts - 1;
const shouldRetry = config.shouldRetry(error);
if (isLastAttempt || !shouldRetry) {
// Log final failure
logger.error('Retry exhausted or non-retryable error', {
attempt: attempt + 1,
maxAttempts: config.maxAttempts,
isRetryable: shouldRetry,
error: error instanceof Error ? error.message : String(error)
});
throw error;
}
// Calculate delay for next attempt
const delay = calculateBackoffDelay(attempt, config);
// Log retry attempt
logger.warn('Retrying after error', {
attempt: attempt + 1,
maxAttempts: config.maxAttempts,
delay,
error: error instanceof Error ? error.message : String(error)
});
// Invoke callback
config.onRetry(attempt + 1, error, delay);
// Wait before retrying
await new Promise(resolve => setTimeout(resolve, delay));
}
}
// This should never be reached, but TypeScript requires it
throw lastError;
}

View File

@@ -0,0 +1,240 @@
/**
* Runtime Type Validation
*
* Validates data from Supabase queries at runtime to ensure type safety.
* Uses Zod schemas to parse and validate database responses.
*/
import { z } from 'zod';
import type { Park, Ride, Company, RideModel } from '@/types/database';
// ============================================
// RUNTIME SCHEMAS (Mirror Database Types)
// ============================================
export const parkRuntimeSchema = z.object({
id: z.string().uuid(),
name: z.string(),
slug: z.string(),
description: z.string().nullable().optional(),
park_type: z.string(),
status: z.enum(['operating', 'closed_permanently', 'closed_temporarily', 'under_construction', 'planned', 'abandoned']),
opening_date: z.string().nullable().optional(),
opening_date_precision: z.string().nullable().optional(),
closing_date: z.string().nullable().optional(),
closing_date_precision: z.string().nullable().optional(),
location_id: z.string().uuid().nullable().optional(),
operator_id: z.string().uuid().nullable().optional(),
property_owner_id: z.string().uuid().nullable().optional(),
website_url: z.string().nullable().optional(),
phone: z.string().nullable().optional(),
email: z.string().nullable().optional(),
banner_image_url: z.string().nullable().optional(),
banner_image_id: z.string().nullable().optional(),
card_image_url: z.string().nullable().optional(),
card_image_id: z.string().nullable().optional(),
ride_count: z.number().optional(),
coaster_count: z.number().optional(),
average_rating: z.number().nullable().optional(),
review_count: z.number().optional(),
view_count_7d: z.number().optional(),
view_count_30d: z.number().optional(),
view_count_all: z.number().optional(),
created_at: z.string().optional(),
updated_at: z.string().optional(),
}).passthrough(); // Allow additional fields from joins
export const rideRuntimeSchema = z.object({
id: z.string().uuid(),
name: z.string(),
slug: z.string(),
description: z.string().nullable().optional(),
category: z.string(),
ride_sub_type: z.string().nullable().optional(),
status: z.enum(['operating', 'closed_permanently', 'closed_temporarily', 'under_construction', 'relocated', 'stored', 'demolished']),
park_id: z.string().uuid().nullable().optional(),
manufacturer_id: z.string().uuid().nullable().optional(),
designer_id: z.string().uuid().nullable().optional(),
ride_model_id: z.string().uuid().nullable().optional(),
opening_date: z.string().nullable().optional(),
opening_date_precision: z.string().nullable().optional(),
closing_date: z.string().nullable().optional(),
closing_date_precision: z.string().nullable().optional(),
height_requirement_cm: z.number().nullable().optional(),
age_requirement: z.number().nullable().optional(),
max_speed_kmh: z.number().nullable().optional(),
duration_seconds: z.number().nullable().optional(),
capacity_per_hour: z.number().nullable().optional(),
gforce_max: z.number().nullable().optional(),
inversions_count: z.number().nullable().optional(),
length_meters: z.number().nullable().optional(),
height_meters: z.number().nullable().optional(),
drop_meters: z.number().nullable().optional(),
angle_degrees: z.number().nullable().optional(),
coaster_type: z.string().nullable().optional(),
seating_type: z.string().nullable().optional(),
intensity_level: z.string().nullable().optional(),
former_names: z.array(z.unknown()).nullable().optional(),
banner_image_url: z.string().nullable().optional(),
banner_image_id: z.string().nullable().optional(),
card_image_url: z.string().nullable().optional(),
card_image_id: z.string().nullable().optional(),
average_rating: z.number().nullable().optional(),
review_count: z.number().optional(),
view_count_7d: z.number().optional(),
view_count_30d: z.number().optional(),
view_count_all: z.number().optional(),
created_at: z.string().optional(),
updated_at: z.string().optional(),
}).passthrough();
export const companyRuntimeSchema = z.object({
id: z.string().uuid(),
name: z.string(),
slug: z.string(),
description: z.string().nullable().optional(),
company_type: z.string(),
person_type: z.string().nullable().optional(),
founded_year: z.number().nullable().optional(),
founded_date: z.string().nullable().optional(),
founded_date_precision: z.string().nullable().optional(),
headquarters_location: z.string().nullable().optional(),
website_url: z.string().nullable().optional(),
logo_url: z.string().nullable().optional(),
banner_image_url: z.string().nullable().optional(),
banner_image_id: z.string().nullable().optional(),
card_image_url: z.string().nullable().optional(),
card_image_id: z.string().nullable().optional(),
average_rating: z.number().nullable().optional(),
review_count: z.number().optional(),
view_count_7d: z.number().optional(),
view_count_30d: z.number().optional(),
view_count_all: z.number().optional(),
created_at: z.string().optional(),
updated_at: z.string().optional(),
}).passthrough();
export const rideModelRuntimeSchema = z.object({
id: z.string().uuid(),
name: z.string(),
slug: z.string(),
manufacturer_id: z.string().uuid().nullable().optional(),
category: z.string(),
description: z.string().nullable().optional(),
// Note: technical_specs deprecated - use ride_model_technical_specifications table
created_at: z.string().optional(),
updated_at: z.string().optional(),
}).passthrough();
// ============================================
// VALIDATION HELPERS
// ============================================
/**
* Validate a single park record
*/
export function validatePark(data: unknown): Park {
return parkRuntimeSchema.parse(data) as Park;
}
/**
* Validate an array of park records
*/
export function validateParks(data: unknown): Park[] {
return z.array(parkRuntimeSchema).parse(data) as Park[];
}
/**
* Safely validate parks (returns null on error)
*/
export function safeValidateParks(data: unknown): Park[] | null {
const result = z.array(parkRuntimeSchema).safeParse(data);
return result.success ? (result.data as Park[]) : null;
}
/**
* Validate a single ride record
*/
export function validateRide(data: unknown): Ride {
return rideRuntimeSchema.parse(data) as Ride;
}
/**
* Validate an array of ride records
*/
export function validateRides(data: unknown): Ride[] {
return z.array(rideRuntimeSchema).parse(data) as Ride[];
}
/**
* Safely validate rides (returns null on error)
*/
export function safeValidateRides(data: unknown): Ride[] | null {
const result = z.array(rideRuntimeSchema).safeParse(data);
return result.success ? (result.data as Ride[]) : null;
}
/**
* Validate a single company record
*/
export function validateCompany(data: unknown): Company {
return companyRuntimeSchema.parse(data) as Company;
}
/**
* Validate an array of company records
*/
export function validateCompanies(data: unknown): Company[] {
return z.array(companyRuntimeSchema).parse(data) as Company[];
}
/**
* Safely validate companies (returns null on error)
*/
export function safeValidateCompanies(data: unknown): Company[] | null {
const result = z.array(companyRuntimeSchema).safeParse(data);
return result.success ? (result.data as Company[]) : null;
}
/**
* Validate a single ride model record
*/
export function validateRideModel(data: unknown): RideModel {
return rideModelRuntimeSchema.parse(data) as RideModel;
}
/**
* Validate an array of ride model records
*/
export function validateRideModels(data: unknown): RideModel[] {
return z.array(rideModelRuntimeSchema).parse(data) as RideModel[];
}
/**
* Safely validate ride models (returns null on error)
*/
export function safeValidateRideModels(data: unknown): RideModel[] | null {
const result = z.array(rideModelRuntimeSchema).safeParse(data);
return result.success ? (result.data as RideModel[]) : null;
}
/**
* Generic validator for any entity type
*/
export function validateEntity<T>(
data: unknown,
schema: z.ZodSchema<T>
): T {
return schema.parse(data);
}
/**
* Safe generic validator (returns null on error)
*/
export function safeValidateEntity<T>(
data: unknown,
schema: z.ZodSchema<T>
): T | null {
const result = schema.safeParse(data);
return result.success ? result.data : null;
}

99
src-old/lib/sanitize.ts Normal file
View File

@@ -0,0 +1,99 @@
/**
* Input Sanitization Utilities
*
* Provides XSS protection for user-generated content.
* All user input should be sanitized before rendering to prevent injection attacks.
*/
import DOMPurify from 'dompurify';
import { logger } from './logger';
/**
* Sanitize HTML content to prevent XSS attacks
*
* @param html - Raw HTML string from user input
* @returns Sanitized HTML safe for rendering
*/
export function sanitizeHTML(html: string): string {
return DOMPurify.sanitize(html, {
ALLOWED_TAGS: ['p', 'br', 'strong', 'em', 'u', 'a', 'ul', 'ol', 'li'],
ALLOWED_ATTR: ['href', 'target', 'rel'],
ALLOW_DATA_ATTR: false,
});
}
/**
* Sanitize URL to prevent javascript: and data: protocol injection
*
* @param url - URL from user input
* @returns Sanitized URL or '#' if invalid
*/
export function sanitizeURL(url: string): string {
if (!url || typeof url !== 'string') {
return '#';
}
try {
const parsed = new URL(url);
// Only allow http, https, and mailto protocols
const allowedProtocols = ['http:', 'https:', 'mailto:'];
if (!allowedProtocols.includes(parsed.protocol)) {
logger.warn('Blocked potentially dangerous URL protocol', { protocol: parsed.protocol });
return '#';
}
return url;
} catch {
// Invalid URL format
logger.warn('Invalid URL format', { url });
return '#';
}
}
/**
* Sanitize plain text to prevent any HTML rendering
* Escapes all HTML entities
*
* @param text - Plain text from user input
* @returns Escaped text safe for rendering
*/
export function sanitizePlainText(text: string): string {
if (!text || typeof text !== 'string') {
return '';
}
return text
.replace(/&/g, '&amp;')
.replace(/</g, '&lt;')
.replace(/>/g, '&gt;')
.replace(/"/g, '&quot;')
.replace(/'/g, '&#x27;')
.replace(/\//g, '&#x2F;');
}
/**
* Check if a string contains potentially dangerous content
* Used for validation before sanitization
*
* @param input - User input to check
* @returns true if input contains suspicious patterns
*/
export function containsSuspiciousContent(input: string): boolean {
if (!input || typeof input !== 'string') {
return false;
}
const suspiciousPatterns = [
/<script/i,
/javascript:/i,
/on\w+\s*=/i, // Event handlers like onclick=
/<iframe/i,
/<object/i,
/<embed/i,
/data:text\/html/i,
];
return suspiciousPatterns.some(pattern => pattern.test(input));
}

View File

@@ -0,0 +1,92 @@
import { z } from 'zod';
import type { SecurityOperation } from '@/types/auth';
import type { UserRole } from '@/hooks/useUserRole';
/**
* Validation schemas for security operations
*/
export const sessionRevocationSchema = z.object({
sessionId: z.string().uuid('Invalid session ID'),
requiresConfirmation: z.boolean().default(true),
});
export const identityOperationSchema = z.object({
provider: z.enum(['google', 'discord']),
redirectTo: z.string().url().optional(),
});
export const mfaOperationSchema = z.object({
factorId: z.string().uuid('Invalid factor ID').optional(),
code: z.string().length(6, 'Code must be 6 digits').regex(/^\d+$/, 'Code must be numeric').optional(),
});
export const passwordChangeSchema = z.object({
currentPassword: z.string().min(1, 'Current password required'),
newPassword: z.string()
.min(8, 'Must be at least 8 characters')
.max(128, 'Must be less than 128 characters')
.regex(/[A-Z]/, 'Must contain uppercase letter')
.regex(/[a-z]/, 'Must contain lowercase letter')
.regex(/[0-9]/, 'Must contain number')
.regex(/[^A-Za-z0-9]/, 'Must contain special character'),
confirmPassword: z.string(),
captchaToken: z.string().min(1, 'CAPTCHA verification required'),
}).refine(data => data.newPassword === data.confirmPassword, {
message: "Passwords don't match",
path: ["confirmPassword"]
});
/**
* Determines if an operation requires CAPTCHA verification
*/
export function requiresCaptcha(operation: SecurityOperation): boolean {
const captchaOperations: SecurityOperation[] = [
'password_change',
'identity_disconnect',
'mfa_unenroll'
];
return captchaOperations.includes(operation);
}
/**
* Determines if an operation requires MFA verification for the user's role
*/
export function requiresMFA(
operation: SecurityOperation,
userRoles: UserRole[]
): boolean {
const privilegedRoles: UserRole[] = ['moderator', 'admin', 'superuser'];
const hasPrivilegedRole = userRoles.some(role => privilegedRoles.includes(role));
if (!hasPrivilegedRole) return false;
// MFA required for these operations if user is privileged
const mfaOperations: SecurityOperation[] = [
'password_change',
'session_revoke',
'mfa_unenroll'
];
return mfaOperations.includes(operation);
}
/**
* Get rate limit parameters for a security operation
*/
export function getRateLimitParams(operation: SecurityOperation): {
action: string;
maxAttempts: number;
windowMinutes: number;
} {
const limits: Record<SecurityOperation, { action: string; maxAttempts: number; windowMinutes: number }> = {
password_change: { action: 'password_change', maxAttempts: 3, windowMinutes: 60 },
identity_disconnect: { action: 'identity_disconnect', maxAttempts: 3, windowMinutes: 60 },
identity_connect: { action: 'identity_connect', maxAttempts: 5, windowMinutes: 60 },
session_revoke: { action: 'session_revoke', maxAttempts: 10, windowMinutes: 60 },
mfa_enroll: { action: 'mfa_enroll', maxAttempts: 3, windowMinutes: 60 },
mfa_unenroll: { action: 'mfa_unenroll', maxAttempts: 2, windowMinutes: 1440 }, // Phase 4: 2 per day
};
return limits[operation] || { action: operation, maxAttempts: 5, windowMinutes: 60 };
}

View File

@@ -0,0 +1,83 @@
/**
* Type-safe session storage management for authentication flows
*/
export const SessionFlags = {
MFA_STEP_UP_REQUIRED: 'mfa_step_up_required',
MFA_INTENDED_PATH: 'mfa_intended_path',
MFA_CHALLENGE_ID: 'mfa_challenge_id',
AUTH_METHOD: 'auth_method',
} as const;
export type SessionFlagKey = typeof SessionFlags[keyof typeof SessionFlags];
/**
* Set the MFA step-up required flag
*/
export function setStepUpRequired(required: boolean, intendedPath?: string): void {
if (required) {
sessionStorage.setItem(SessionFlags.MFA_STEP_UP_REQUIRED, 'true');
if (intendedPath) {
sessionStorage.setItem(SessionFlags.MFA_INTENDED_PATH, intendedPath);
}
} else {
clearStepUpFlags();
}
}
/**
* Check if MFA step-up is required
*/
export function getStepUpRequired(): boolean {
return sessionStorage.getItem(SessionFlags.MFA_STEP_UP_REQUIRED) === 'true';
}
/**
* Get the intended path after MFA verification
*/
export function getIntendedPath(): string {
return sessionStorage.getItem(SessionFlags.MFA_INTENDED_PATH) || '/';
}
/**
* Clear all MFA step-up flags
*/
export function clearStepUpFlags(): void {
sessionStorage.removeItem(SessionFlags.MFA_STEP_UP_REQUIRED);
sessionStorage.removeItem(SessionFlags.MFA_INTENDED_PATH);
sessionStorage.removeItem(SessionFlags.MFA_CHALLENGE_ID);
}
/**
* Store the authentication method used
*/
export function setAuthMethod(method: 'password' | 'oauth' | 'magiclink'): void {
sessionStorage.setItem(SessionFlags.AUTH_METHOD, method);
}
/**
* Get the authentication method used
*/
export function getAuthMethod(): 'password' | 'oauth' | 'magiclink' | null {
const method = sessionStorage.getItem(SessionFlags.AUTH_METHOD);
if (method === 'password' || method === 'oauth' || method === 'magiclink') {
return method;
}
return null;
}
/**
* Clear the authentication method
*/
export function clearAuthMethod(): void {
sessionStorage.removeItem(SessionFlags.AUTH_METHOD);
}
/**
* Clear all authentication-related session flags
*/
export function clearAllAuthFlags(): void {
Object.values(SessionFlags).forEach(flag => {
sessionStorage.removeItem(flag);
});
}

82
src-old/lib/slugUtils.ts Normal file
View File

@@ -0,0 +1,82 @@
import { supabase } from '@/lib/supabaseClient';
import { handleError } from './errorHandler';
/**
* Generate a URL-safe slug from a name
* This is the canonical slug generation function used throughout the app
*/
export function generateSlugFromName(name: string): string {
if (!name) return '';
return name
.toLowerCase()
.replace(/[^a-z0-9\s-]/g, '') // Remove non-alphanumeric except spaces and hyphens
.replace(/\s+/g, '-') // Replace spaces with hyphens
.replace(/-+/g, '-') // Replace multiple hyphens with single hyphen
.replace(/^-+|-+$/g, '') // Remove leading/trailing hyphens
.trim();
}
/**
* Validate that a user has permission to edit slugs
* Only moderators should be able to manually edit slugs
*/
export function canEditSlug(isModerator: boolean): boolean {
return isModerator;
}
/**
* Ensure slug is unique by checking database and appending number if needed
*/
export async function ensureUniqueSlug(
baseSlug: string,
tableName: 'parks' | 'rides' | 'companies' | 'ride_models',
excludeId?: string
): Promise<string> {
let slug = baseSlug;
let counter = 1;
while (true) {
// Check if slug exists
let query = supabase
.from(tableName)
.select('id')
.eq('slug', slug);
// Exclude current record when editing
if (excludeId) {
query = query.neq('id', excludeId);
}
const { data, error } = await query.limit(1);
if (error) {
handleError(error, {
action: 'Check Slug Uniqueness',
metadata: { tableName, slug }
});
throw error;
}
// If no match found, slug is unique
if (!data || data.length === 0) {
return slug;
}
// Append counter and try again
counter++;
slug = `${baseSlug}-${counter}`;
}
}
/**
* Generate and ensure unique slug in one operation
*/
export async function generateUniqueSlug(
name: string,
tableName: 'parks' | 'rides' | 'companies' | 'ride_models',
excludeId?: string
): Promise<string> {
const baseSlug = generateSlugFromName(name);
return ensureUniqueSlug(baseSlug, tableName, excludeId);
}

View File

@@ -0,0 +1,189 @@
/**
* Utility functions for performing "smart" updates on arrays
*/
/**
* Creates a stable content hash for comparison
*/
function hashContent(obj: any): string {
if (obj === null || obj === undefined) return 'null';
if (typeof obj !== 'object') return String(obj);
// Handle arrays
if (Array.isArray(obj)) {
return `[${obj.map(hashContent).join(',')}]`;
}
// Sort keys for stable hashing (CRITICAL for nested objects!)
const sortedKeys = Object.keys(obj).sort();
const parts = sortedKeys.map(key => `${key}:${hashContent(obj[key])}`);
return parts.join('|');
}
/**
* Checks if content has meaningfully changed (not just object reference)
*/
function hasContentChanged(current: any, next: any): boolean {
return hashContent(current) !== hashContent(next);
}
export interface SmartMergeOptions<T> {
compareFields?: (keyof T)[];
preserveOrder?: boolean;
addToTop?: boolean;
}
export interface MergeChanges<T> {
added: T[];
removed: T[];
updated: T[];
}
export interface SmartMergeResult<T> {
items: T[];
changes: MergeChanges<T>;
hasChanges: boolean;
}
/**
* Performs intelligent array diffing and merging
*
* @param currentItems - The current items in state
* @param newItems - The new items fetched from the server
* @param options - Configuration options
* @returns Merged items with change information
*/
export function smartMergeArray<T extends { id: string }>(
currentItems: T[],
newItems: T[],
options?: SmartMergeOptions<T>
): SmartMergeResult<T> {
const {
compareFields,
preserveOrder = false,
addToTop = true,
} = options || {};
// Create ID maps for quick lookup
const currentMap = new Map(currentItems.map(item => [item.id, item]));
const newMap = new Map(newItems.map(item => [item.id, item]));
// Detect changes
const added: T[] = [];
const removed: T[] = [];
const updated: T[] = [];
// Find added and updated items
for (const newItem of newItems) {
const currentItem = currentMap.get(newItem.id);
if (!currentItem) {
// New item
added.push(newItem);
} else if (hasItemChanged(currentItem, newItem, compareFields)) {
// Item has changed
updated.push(newItem);
}
}
// Find removed items
for (const currentItem of currentItems) {
if (!newMap.has(currentItem.id)) {
removed.push(currentItem);
}
}
const hasChanges = added.length > 0 || removed.length > 0 || updated.length > 0;
// If no changes, return current items (preserves object references)
if (!hasChanges) {
return {
items: currentItems,
changes: { added: [], removed: [], updated: [] },
hasChanges: false,
};
}
// Build merged array
let mergedItems: T[];
if (preserveOrder) {
// Preserve the order of current items
mergedItems = currentItems
.filter(item => !removed.some(r => r.id === item.id))
.map(item => {
const updatedItem = updated.find(u => u.id === item.id);
return updatedItem || item; // Use updated version if available, otherwise keep current
});
// Add new items at top or bottom
if (addToTop) {
mergedItems = [...added, ...mergedItems];
} else {
mergedItems = [...mergedItems, ...added];
}
} else {
// Use the order from newItems
mergedItems = newItems.map(newItem => {
const currentItem = currentMap.get(newItem.id);
// If item exists in current state and hasn't changed, preserve reference
if (currentItem && !updated.some(u => u.id === newItem.id)) {
return currentItem;
}
return newItem;
});
}
return {
items: mergedItems,
changes: { added, removed, updated },
hasChanges: true,
};
}
/**
* Checks if an item has changed by comparing specific fields
*/
function hasItemChanged<T>(
currentItem: T,
newItem: T,
compareFields?: (keyof T)[]
): boolean {
if (!compareFields || compareFields.length === 0) {
// If no fields specified, use content hash comparison
return hasContentChanged(currentItem, newItem);
}
// Compare only specified fields
for (const field of compareFields) {
const currentValue = currentItem[field];
const newValue = newItem[field];
// Handle nested objects/arrays with content hash
if (typeof currentValue === 'object' && typeof newValue === 'object') {
if (hasContentChanged(currentValue, newValue)) {
return true;
}
} else if (currentValue !== newValue) {
return true;
}
}
return false;
}
/**
* Creates a stable ID set for tracking interactions
*/
export function createStableIdSet(ids: string[]): Set<string> {
return new Set(ids);
}
/**
* Checks if an ID is in an interaction set
*/
export function isInteractingWith(id: string, interactionSet: Set<string>): boolean {
return interactionSet.has(id);
}

View File

@@ -0,0 +1,879 @@
import type { SubmissionItemData } from '@/types/submissions';
import type {
ParkSubmissionData,
RideSubmissionData,
CompanySubmissionData,
RideModelSubmissionData
} from '@/types/submission-data';
import { supabase } from '@/lib/supabaseClient';
import { handleNonCriticalError, getErrorMessage } from './errorHandler';
type SubmissionDataTypes =
| ParkSubmissionData
| RideSubmissionData
| CompanySubmissionData
| RideModelSubmissionData;
export interface FieldChange {
field: string;
oldValue: any;
newValue: any;
changeType: 'added' | 'removed' | 'modified';
metadata?: {
isCreatingNewLocation?: boolean;
precision?: 'day' | 'month' | 'year';
oldPrecision?: 'day' | 'month' | 'year';
newPrecision?: 'day' | 'month' | 'year';
};
}
export interface ImageChange {
type: 'banner' | 'card';
oldUrl?: string;
newUrl?: string;
oldId?: string;
newId?: string;
}
export interface PhotoChange {
type: 'added' | 'edited' | 'deleted';
photos?: Array<{ url: string; title?: string; caption?: string }>;
photo?: {
url: string;
title?: string;
caption?: string;
oldCaption?: string;
newCaption?: string;
oldTitle?: string;
newTitle?: string;
entity_type?: string;
entity_name?: string;
deletion_reason?: string;
};
}
export interface ChangesSummary {
action: 'create' | 'edit' | 'delete';
entityType: string;
entityName?: string;
fieldChanges: FieldChange[];
imageChanges: ImageChange[];
photoChanges: PhotoChange[];
hasLocationChange: boolean;
totalChanges: number;
}
/**
* Detects photo changes for a submission
*/
async function detectPhotoChanges(submissionId: string): Promise<PhotoChange[]> {
const changes: PhotoChange[] = [];
try {
// First check for photo submission items (photo additions)
const { data: photoSubmissions, error: photoError } = await supabase
.from('photo_submissions')
.select(`
*,
items:photo_submission_items(*)
`)
.eq('submission_id', submissionId);
if (photoError) {
handleNonCriticalError(photoError, {
action: 'Detect Photo Changes (Fetch Photo Submission)',
metadata: { submissionId }
});
} else {
const photoSubmission = photoSubmissions?.[0];
if (photoSubmission?.items && photoSubmission.items.length > 0) {
changes.push({
type: 'added',
photos: photoSubmission.items.map((item: any) => ({
url: item.cloudflare_image_url,
title: item.title,
caption: item.caption
}))
});
}
}
// Check for photo edits and deletions in submission_items
const { data: submissionItems, error: itemsError } = await supabase
.from('submission_items')
.select('*')
.eq('submission_id', submissionId)
.in('item_type', ['photo_edit', 'photo_delete']);
if (itemsError) {
handleNonCriticalError(itemsError, {
action: 'Detect Photo Changes (Fetch Submission Items)',
metadata: { submissionId }
});
} else if (submissionItems && submissionItems.length > 0) {
for (const item of submissionItems) {
// For photo items, data is stored differently
// Skip for now as photo submissions use separate table
continue;
}
}
} catch (err: unknown) {
handleNonCriticalError(err, {
action: 'Detect Photo Changes',
metadata: { submissionId }
});
}
return changes;
}
/**
* Detects what changed between original_data and item_data
*/
export async function detectChanges(
item: { item_data?: any; original_data?: any; item_type: string; action_type?: string },
submissionId?: string
): Promise<ChangesSummary> {
const itemData = item.item_data || {};
const originalData = item.original_data || {};
// Determine action type - prioritize explicit action_type field to preserve submission intent
let action: 'create' | 'edit' | 'delete' = 'edit';
if (item.item_type === 'photo_delete' || itemData.action === 'delete' || itemData.deleted) {
action = 'delete';
} else if (item.action_type) {
// Use explicit action_type if set (preserves original submission intent even after moderator edits)
action = item.action_type as 'create' | 'edit' | 'delete';
} else if (!originalData || Object.keys(originalData).length === 0) {
// Fall back to inference for backwards compatibility
action = 'create';
}
const fieldChanges: FieldChange[] = [];
const imageChanges: ImageChange[] = [];
let hasLocationChange = false;
if (action === 'create') {
// Check if this creation was edited by a moderator
const hasModeratorEdits = originalData && Object.keys(originalData).length > 0;
if (hasModeratorEdits) {
// Compare item_data with original_data to detect moderator changes
const allKeys = new Set([
...Object.keys(itemData),
...Object.keys(originalData)
]);
allKeys.forEach(key => {
if (!shouldTrackField(key)) return;
const oldValue = originalData[key];
const newValue = itemData[key];
// Skip if both are empty
const oldEmpty = oldValue === null || oldValue === undefined || oldValue === '';
const newEmpty = newValue === null || newValue === undefined || newValue === '';
if (oldEmpty && newEmpty) return;
// Detect the type of change
if (!isEqual(oldValue, newValue)) {
fieldChanges.push({
field: key,
oldValue,
newValue,
changeType: oldEmpty && !newEmpty ? 'added' : // Moderator added new field
newEmpty && !oldEmpty ? 'removed' : // Moderator removed field
'modified', // Moderator changed value
});
} else if (!newEmpty) {
// Field unchanged - show as 'added' (part of original submission)
fieldChanges.push({
field: key,
oldValue: null,
newValue,
changeType: 'added',
});
}
});
} else {
// No moderator edits - show all fields as 'added' (original behavior)
Object.entries(itemData).forEach(([key, value]) => {
const systemFields = ['id', 'created_at', 'updated_at', 'slug', 'images', 'image_assignments'];
const shouldShow = !systemFields.includes(key) && value !== null && value !== undefined && value !== '';
if (shouldShow) {
fieldChanges.push({
field: key,
oldValue: null,
newValue: value,
changeType: 'added',
});
}
});
}
} else if (action === 'edit') {
// Compare each field
const allKeys = new Set([
...Object.keys(itemData),
...Object.keys(originalData)
]);
allKeys.forEach(key => {
if (!shouldTrackField(key)) return;
const oldValue = originalData[key];
const newValue = itemData[key];
// Handle location changes specially - compare objects not IDs
if (key === 'location' || key === 'location_id') {
// Skip location_id if we already have a location object
if (key === 'location_id' && itemData.location) {
return;
}
const oldLoc = originalData.location;
const newLoc = itemData.location;
// Check if new location entity is being created (old has location_id, new has location object)
const isCreatingNewLocation = originalData.location_id && newLoc && typeof newLoc === 'object' && !oldLoc;
// Only compare if we have location objects with actual data
if (newLoc && typeof newLoc === 'object' && oldLoc && typeof oldLoc === 'object') {
// Compare all location data including coordinates
const locChanged =
oldLoc.city !== newLoc.city ||
oldLoc.state_province !== newLoc.state_province ||
oldLoc.country !== newLoc.country ||
oldLoc.postal_code !== newLoc.postal_code ||
Number(oldLoc.latitude) !== Number(newLoc.latitude) ||
Number(oldLoc.longitude) !== Number(newLoc.longitude);
if (locChanged) {
hasLocationChange = true;
fieldChanges.push({
field: 'location',
oldValue: oldLoc,
newValue: newLoc,
changeType: 'modified',
});
}
} else if (isCreatingNewLocation) {
// New location entity is being created - mark as location change
hasLocationChange = true;
fieldChanges.push({
field: 'location',
oldValue: { location_id: originalData.location_id },
newValue: newLoc,
changeType: 'modified',
metadata: { isCreatingNewLocation: true },
});
}
return;
}
// Skip if both are "empty" (null, undefined, or empty string)
const oldEmpty = oldValue === null || oldValue === undefined || oldValue === '';
const newEmpty = newValue === null || newValue === undefined || newValue === '';
if (oldEmpty && newEmpty) {
return; // Both empty, no change
}
// Check for changes
if (!isEqual(oldValue, newValue)) {
const fieldChange: FieldChange = {
field: key,
oldValue,
newValue,
changeType: oldEmpty && !newEmpty ? 'added' :
newEmpty && !oldEmpty ? 'removed' :
'modified',
};
// Add precision metadata for date fields
if (key.endsWith('_date') && !key.endsWith('_precision')) {
const precisionKey = `${key}_precision`;
const newPrecision = itemData[precisionKey];
const oldPrecision = originalData[precisionKey];
if (newPrecision || oldPrecision) {
fieldChange.metadata = {
...fieldChange.metadata,
precision: newPrecision || oldPrecision,
oldPrecision,
newPrecision,
};
}
}
fieldChanges.push(fieldChange);
}
});
// Detect image changes
detectImageChanges(originalData, itemData, imageChanges);
}
// Get entity name - handle different item types
let entityName = 'Unknown';
if (item.item_type === 'photo_delete' || item.item_type === 'photo_edit' || item.item_type === 'photo') {
// For photo operations, prioritize entity_name from item_data
entityName = itemData.entity_name || itemData.caption || itemData.title || 'Photo';
// If we have entity_type and entity_id but no entity_name, fetch it from DB
if (!itemData.entity_name && itemData.entity_type && itemData.entity_id) {
try {
const entityType = itemData.entity_type;
const entityId = itemData.entity_id;
if (entityType === 'park') {
const { data } = await supabase.from('parks').select('name').eq('id', entityId).maybeSingle();
if (data?.name) entityName = `${data.name} (${formatEntityType(entityType)})`;
} else if (entityType === 'ride') {
const { data } = await supabase.from('rides').select('name').eq('id', entityId).maybeSingle();
if (data?.name) entityName = `${data.name} (${formatEntityType(entityType)})`;
} else if (entityType === 'ride_model') {
const { data } = await supabase.from('ride_models').select('name').eq('id', entityId).maybeSingle();
if (data?.name) entityName = `${data.name} (${formatEntityType(entityType)})`;
} else if (['manufacturer', 'operator', 'designer', 'property_owner'].includes(entityType)) {
const { data } = await supabase.from('companies').select('name').eq('id', entityId).maybeSingle();
if (data?.name) entityName = `${data.name} (${formatEntityType(entityType)})`;
}
} catch (err) {
handleNonCriticalError(err, {
action: 'Fetch Entity Name for Photo Operation',
metadata: { entityType: itemData.entity_type, entityId: itemData.entity_id }
});
}
}
} else if (item.item_type === 'milestone') {
// Milestone submissions reference entity_id and entity_type
// Need to fetch the entity name from the database
if (itemData.entity_type && itemData.entity_id) {
try {
const entityType = itemData.entity_type;
const entityId = itemData.entity_id;
if (entityType === 'park') {
const { data } = await supabase.from('parks').select('name').eq('id', entityId).maybeSingle();
if (data?.name) {
entityName = `${data.name} - ${itemData.title || 'Milestone'}`;
}
} else if (entityType === 'ride') {
const { data: rideData } = await supabase
.from('rides')
.select('name, park:parks(name)')
.eq('id', entityId)
.maybeSingle();
if (rideData?.name) {
const parkName = rideData.park?.name;
entityName = parkName
? `${rideData.name} at ${parkName} - ${itemData.title || 'Milestone'}`
: `${rideData.name} - ${itemData.title || 'Milestone'}`;
}
}
// If lookup failed, fall back to title with entity type
if (entityName === 'Unknown' && itemData.title) {
entityName = `${formatEntityType(entityType)} - ${itemData.title}`;
}
} catch (err) {
handleNonCriticalError(err, {
action: 'Fetch Entity Name for Milestone',
metadata: { entityType: itemData.entity_type, entityId: itemData.entity_id }
});
// Fall back to just the title if database lookup fails
if (itemData.title) {
entityName = itemData.title;
}
}
} else if (itemData.title) {
// No entity reference, just use the milestone title
entityName = itemData.title;
}
// Add resolved entity name as an explicit field for milestone submissions
if (itemData.entity_type && itemData.entity_id) {
let resolvedEntityName = 'Unknown Entity';
try {
const entityType = itemData.entity_type;
const entityId = itemData.entity_id;
if (entityType === 'park') {
const { data } = await supabase.from('parks').select('name').eq('id', entityId).maybeSingle();
if (data?.name) {
resolvedEntityName = data.name;
}
} else if (entityType === 'ride') {
const { data: rideData } = await supabase
.from('rides')
.select('name, park:parks(name)')
.eq('id', entityId)
.maybeSingle();
if (rideData?.name) {
const parkName = rideData.park?.name;
resolvedEntityName = parkName
? `${rideData.name} at ${parkName}`
: rideData.name;
}
}
} catch (err) {
handleNonCriticalError(err, {
action: 'Resolve Entity Name for Field Display',
metadata: { entityType: itemData.entity_type, entityId: itemData.entity_id }
});
}
// Add entity name as an explicit field change at the beginning
fieldChanges.unshift({
field: 'entity_name',
oldValue: null,
newValue: resolvedEntityName,
changeType: 'added',
});
}
} else {
// For regular entities, use name field
entityName = itemData.name || originalData?.name || 'Unknown';
}
// Detect photo changes if submissionId provided
const photoChanges = submissionId ? await detectPhotoChanges(submissionId) : [];
return {
action,
entityType: item.item_type,
entityName,
fieldChanges,
imageChanges,
photoChanges,
hasLocationChange,
totalChanges: fieldChanges.length + imageChanges.length + photoChanges.length + (hasLocationChange ? 1 : 0)
};
}
/**
* ═══════════════════════════════════════════════════════════════════
* TYPE-SAFE CHANGE EXTRACTION FOR EDIT SUBMISSIONS
* ═══════════════════════════════════════════════════════════════════
*
* Extracts ONLY changed fields from form data compared to original entity data.
* Critical for edit operations to avoid passing unchanged fields through moderation.
*
* Benefits:
* ✅ Clearer audit trail (only see actual changes)
* ✅ Smaller database writes (no unnecessary updates)
* ✅ Correct validation (unchanged required fields stay in original_data)
* ✅ Type-safe with generics (compiler catches errors)
* ✅ Follows project knowledge: "only funnel through real changes"
*
* @param formData - New form data from user submission
* @param originalData - Original entity data from database
* @returns Object containing ONLY changed fields
*
* @example
* // Edit that only changes description
* extractChangedFields(
* { name: "Cedar Point", description: "New desc" },
* { name: "Cedar Point", description: "Old desc", location_id: "uuid-123" }
* )
* // Returns: { description: "New desc" }
* // ✅ location_id NOT included (unchanged, exists in original_data)
*/
export function extractChangedFields<T extends Record<string, any>>(
formData: T,
originalData: Partial<T>
): Partial<T> {
const changes: Partial<T> = {};
// Critical IDs that MUST always be included for relational integrity
// Even if "unchanged", these maintain foreign key relationships
const alwaysIncludeIds = [
'park_id', // Rides belong to parks
'ride_id', // For ride updates
'company_id', // For company updates
'manufacturer_id', // Rides reference manufacturers
'ride_model_id', // Rides reference models
'operator_id', // Parks reference operators
'property_owner_id', // Parks reference property owners
'designer_id', // Rides reference designers
];
Object.keys(formData).forEach((key) => {
const newValue = formData[key];
const oldValue = originalData[key];
// Always include critical relational IDs (even if unchanged)
if (alwaysIncludeIds.includes(key)) {
if (newValue !== undefined && newValue !== null) {
changes[key as keyof T] = newValue;
}
return;
}
// Skip system fields and fields that shouldn't be tracked
if (!shouldTrackField(key)) {
return;
}
// ═══ SPECIAL HANDLING: LOCATION OBJECTS ═══
// Location can be an object (from form) vs location_id (from DB)
if (key === 'location' && newValue && typeof newValue === 'object') {
const oldLoc = originalData.location;
if (!oldLoc || typeof oldLoc !== 'object' || !isEqual(oldLoc, newValue)) {
changes[key as keyof T] = newValue;
}
return;
}
// ═══ SPECIAL HANDLING: DATE FIELDS WITH PRECISION ═══
// opening_date, closing_date, founded_date, etc.
if (key.endsWith('_date') && !key.endsWith('_precision')) {
const precisionKey = `${key}_precision` as keyof T;
const newDate = newValue;
const oldDate = oldValue;
const newPrecision = formData[precisionKey];
const oldPrecision = originalData[precisionKey];
// Include if EITHER date OR precision changed
if (!isEqual(newDate, oldDate) || !isEqual(newPrecision, oldPrecision)) {
changes[key as keyof T] = newValue;
// Also include precision if it exists
if (newPrecision !== undefined) {
changes[precisionKey] = newPrecision;
}
}
return;
}
// Skip precision fields (they're handled with their date above)
if (key.endsWith('_precision')) {
return;
}
// ═══ SPECIAL HANDLING: IMAGE FIELDS ═══
// Images have their own assignment system and should always be included if present
if (key === 'images' || key.includes('image_')) {
if (!isEqual(newValue, oldValue)) {
changes[key as keyof T] = newValue;
}
return;
}
// ═══ GENERAL FIELD COMPARISON ═══
// Include field if:
// 1. Value changed from something to something else
// 2. Value added (old was empty, new has value)
//
// Do NOT include if:
// 1. Both values are empty (null, undefined, '')
// 2. Values are equal after normalization
const oldEmpty = oldValue === null || oldValue === undefined || oldValue === '';
const newEmpty = newValue === null || newValue === undefined || newValue === '';
// If both empty, don't track (no change)
if (oldEmpty && newEmpty) {
return;
}
// If values differ, include the change
if (!isEqual(oldValue, newValue)) {
changes[key as keyof T] = newValue;
}
});
return changes;
}
/**
* Determines if a field should be tracked for changes
*/
function shouldTrackField(key: string): boolean {
const excludedFields = [
// System fields
'id',
'created_at',
'updated_at',
'slug',
// Image-related (handled separately)
'images',
'image_assignments',
'banner_image_url',
'banner_image_id',
'card_image_url',
'card_image_id',
// Reference IDs (not editable, just for linking)
'park_id',
'ride_id',
'company_id',
'manufacturer_id',
'operator_id',
'designer_id',
'property_owner_id',
'location_id', // Location object is tracked instead
// Computed/aggregated fields (not editable)
'ride_count',
'review_count',
'coaster_count',
'average_rating',
// Analytics fields (auto-updated by system)
'view_count_7d',
'view_count_30d',
'view_count_all',
];
return !excludedFields.includes(key);
}
/**
* Normalizes values for consistent comparison
* Handles enum-like strings (snake_case and Title Case) by ensuring lowercase
*/
function normalizeForComparison(value: any): any {
// Null/undefined pass through
if (value == null) return value;
// Normalize enum-like strings to lowercase for comparison
// Matches patterns like: "operating", "Operating", "amusement_park", "Amusement_Park", "Amusement Park"
if (typeof value === 'string' && /^[a-zA-Z_\s]+$/.test(value)) {
return value
.toLowerCase()
.replace(/_/g, ' ') // Replace underscores with spaces
.replace(/\s+/g, ' ') // Collapse multiple spaces
.trim();
}
// Recursively normalize arrays
if (Array.isArray(value)) {
return value.map(normalizeForComparison);
}
// Recursively normalize objects (but not Date objects)
if (typeof value === 'object' && !(value instanceof Date)) {
const normalized: Record<string, any> = {};
for (const [key, val] of Object.entries(value)) {
normalized[key] = normalizeForComparison(val);
}
return normalized;
}
return value;
}
/**
* Deep equality check for values with normalization
*/
function isEqual(a: any, b: any): boolean {
// Normalize both values before comparison
const normalizedA = normalizeForComparison(a);
const normalizedB = normalizeForComparison(b);
if (normalizedA === normalizedB) return true;
if (normalizedA == null || normalizedB == null) return normalizedA === normalizedB;
if (typeof normalizedA !== typeof normalizedB) return false;
if (typeof normalizedA === 'object') {
if (Array.isArray(normalizedA) && Array.isArray(normalizedB)) {
if (normalizedA.length !== normalizedB.length) return false;
return normalizedA.every((item, i) => isEqual(item, normalizedB[i]));
}
const keysA = Object.keys(normalizedA);
const keysB = Object.keys(normalizedB);
if (keysA.length !== keysB.length) return false;
return keysA.every(key => isEqual(normalizedA[key], normalizedB[key]));
}
return false;
}
/**
* Normalizes image data structures to extract IDs consistently
*/
function getImageIds(data: any): { banner?: string; card?: string } {
const result: { banner?: string; card?: string } = {};
// Handle flat structure (original_data from DB)
if (data.banner_image_id) result.banner = data.banner_image_id;
if (data.card_image_id) result.card = data.card_image_id;
// Handle nested structure (item_data from form)
if (data.images?.uploaded && Array.isArray(data.images.uploaded)) {
const uploaded = data.images.uploaded;
// Handle banner/card assignment mapping (default to indices 0 and 1)
const bannerIdx = data.images.banner_assignment ?? 0;
const cardIdx = data.images.card_assignment ?? 1;
// Try both 'cloudflare_id' and 'id' for compatibility
if (uploaded[bannerIdx]) {
result.banner = uploaded[bannerIdx].cloudflare_id || uploaded[bannerIdx].id;
}
if (uploaded[cardIdx]) {
result.card = uploaded[cardIdx].cloudflare_id || uploaded[cardIdx].id;
}
}
return result;
}
/**
* Detects changes in banner/card images
*/
function detectImageChanges(
originalData: any,
itemData: any,
imageChanges: ImageChange[]
): void {
// Normalize both data structures before comparing
const oldIds = getImageIds(originalData);
const newIds = getImageIds(itemData);
// Check banner image
if (oldIds.banner !== newIds.banner) {
imageChanges.push({
type: 'banner',
oldUrl: originalData.banner_image_url,
newUrl: itemData.banner_image_url || itemData.images?.uploaded?.[0]?.url,
oldId: oldIds.banner,
newId: newIds.banner,
});
}
// Check card image
if (oldIds.card !== newIds.card) {
imageChanges.push({
type: 'card',
oldUrl: originalData.card_image_url,
newUrl: itemData.card_image_url || itemData.images?.uploaded?.[1]?.url,
oldId: oldIds.card,
newId: newIds.card,
});
}
}
/**
* Format field name for display
*/
export function formatFieldName(field: string): string {
return field
.replace(/_/g, ' ')
.replace(/([A-Z])/g, ' $1')
.replace(/^./, str => str.toUpperCase())
.trim();
}
/**
* Get table name for entity type
*/
function getTableNameForEntityType(entityType: string): string | null {
const mapping: Record<string, string> = {
'park': 'parks',
'ride': 'rides',
'manufacturer': 'companies',
'operator': 'companies',
'designer': 'companies',
'property_owner': 'companies',
'ride_model': 'ride_models'
};
return mapping[entityType] || null;
}
/**
* Format entity type for display
*/
function formatEntityType(entityType: string): string {
return entityType
.split('_')
.map(word => word.charAt(0).toUpperCase() + word.slice(1).toLowerCase())
.join(' ');
}
/**
* Format field value for display
*/
export function formatFieldValue(value: any, precision?: 'day' | 'month' | 'year'): string {
if (value === null || value === undefined) return 'None';
if (typeof value === 'boolean') return value ? 'Yes' : 'No';
// Handle dates with precision support
if (value instanceof Date || (typeof value === 'string' && /^\d{4}-\d{2}-\d{2}/.test(value))) {
try {
const date = new Date(value);
// Apply precision if provided
if (precision === 'year') {
return date.getFullYear().toString();
} else if (precision === 'month') {
return date.toLocaleDateString('en-US', { year: 'numeric', month: 'long' });
}
// Default: full date
return date.toLocaleDateString('en-US', { year: 'numeric', month: 'long', day: 'numeric' });
} catch {
return String(value);
}
}
// Handle arrays - show actual items
if (Array.isArray(value)) {
if (value.length === 0) return 'None';
if (value.length <= 3) return value.map(v => String(v)).join(', ');
return `${value.slice(0, 3).map(v => String(v)).join(', ')}... +${value.length - 3} more`;
}
// Handle objects - create readable summary
if (typeof value === 'object') {
// Location object
if (value.city || value.state_province || value.country) {
const parts = [value.city, value.state_province, value.country].filter(Boolean);
return parts.join(', ');
}
// Generic object - show key-value pairs
const entries = Object.entries(value).slice(0, 3);
if (entries.length === 0) return 'Empty';
return entries.map(([k, v]) => `${k}: ${v}`).join(', ');
}
// Handle year-like numbers (prevent comma formatting for founded_year)
if (typeof value === 'number') {
const currentYear = new Date().getFullYear();
if (value >= 1800 && value <= currentYear + 10) {
return value.toString(); // Don't add commas for year values
}
return value.toLocaleString(); // Add commas for other numbers
}
// Handle URLs
if (typeof value === 'string' && value.startsWith('http')) {
try {
const url = new URL(value);
return url.hostname + (url.pathname !== '/' ? url.pathname.slice(0, 30) : '');
} catch {
return value;
}
}
// Handle enum-like strings (snake_case or kebab-case) - capitalize and replace separators
if (typeof value === 'string' && (value.includes('_') || value.includes('-'))) {
return value
.split(/[_-]/)
.map(word => word.charAt(0).toUpperCase() + word.slice(1).toLowerCase())
.join(' ');
}
if (typeof value === 'number') return value.toLocaleString();
return String(value);
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,87 @@
/**
* Submission Metadata Service
* Handles reading/writing submission metadata to relational tables
* Replaces content_submissions.content JSONB column
*/
import { supabase } from '@/lib/supabaseClient';
import { handleError, handleNonCriticalError } from './errorHandler';
export interface SubmissionMetadataInsert {
submission_id: string;
metadata_key: string;
metadata_value: string;
value_type?: 'string' | 'number' | 'boolean' | 'date' | 'url' | 'json';
display_order?: number;
}
/**
* Write submission metadata to relational table
*/
export async function writeSubmissionMetadata(
submissionId: string,
metadata: Record<string, unknown>
): Promise<void> {
if (!metadata || Object.keys(metadata).length === 0) return;
const entries: SubmissionMetadataInsert[] = Object.entries(metadata).map(([key, value], index) => ({
submission_id: submissionId,
metadata_key: key,
metadata_value: typeof value === 'object' ? JSON.stringify(value) : String(value),
value_type: inferValueType(value),
display_order: index,
}));
const { error } = await supabase
.from('submission_metadata')
.insert(entries);
if (error) {
handleError(error, {
action: 'Write submission metadata',
metadata: { submissionId }
});
throw error;
}
}
/**
* Read submission metadata from relational table
* Returns as key-value object for backward compatibility
*/
export async function readSubmissionMetadata(
submissionId: string
): Promise<Record<string, string>> {
const { data, error } = await supabase
.from('submission_metadata')
.select('metadata_key, metadata_value')
.eq('submission_id', submissionId)
.order('display_order');
if (error) {
handleNonCriticalError(error, {
action: 'Read submission metadata',
metadata: { submissionId }
});
return {};
}
return data.reduce((acc, row) => {
acc[row.metadata_key] = row.metadata_value;
return acc;
}, {} as Record<string, string>);
}
/**
* Infer value type for metadata storage
*/
function inferValueType(value: unknown): 'string' | 'number' | 'boolean' | 'date' | 'url' | 'json' {
if (typeof value === 'number') return 'number';
if (typeof value === 'boolean') return 'boolean';
if (typeof value === 'object') return 'json';
if (typeof value === 'string') {
if (value.startsWith('http://') || value.startsWith('https://')) return 'url';
if (/^\d{4}-\d{2}-\d{2}/.test(value)) return 'date';
}
return 'string';
}

View File

@@ -0,0 +1,192 @@
/**
* Submission Queue with IndexedDB Fallback
*
* Provides resilience when edge functions are unavailable by queuing
* submissions locally and retrying when connectivity is restored.
*
* Part of Sacred Pipeline Phase 3: Fortify Defenses
*/
import { openDB, DBSchema, IDBPDatabase } from 'idb';
interface SubmissionQueueDB extends DBSchema {
submissions: {
key: string;
value: {
id: string;
type: string;
data: any;
timestamp: number;
retries: number;
lastAttempt: number | null;
error: string | null;
};
};
}
const DB_NAME = 'thrillwiki-submission-queue';
const DB_VERSION = 1;
const STORE_NAME = 'submissions';
const MAX_RETRIES = 3;
let dbInstance: IDBPDatabase<SubmissionQueueDB> | null = null;
async function getDB(): Promise<IDBPDatabase<SubmissionQueueDB>> {
if (dbInstance) return dbInstance;
dbInstance = await openDB<SubmissionQueueDB>(DB_NAME, DB_VERSION, {
upgrade(db) {
if (!db.objectStoreNames.contains(STORE_NAME)) {
db.createObjectStore(STORE_NAME, { keyPath: 'id' });
}
},
});
return dbInstance;
}
/**
* Queue a submission for later processing
*/
export async function queueSubmission(type: string, data: any): Promise<string> {
const db = await getDB();
const id = crypto.randomUUID();
await db.add(STORE_NAME, {
id,
type,
data,
timestamp: Date.now(),
retries: 0,
lastAttempt: null,
error: null,
});
console.info(`[SubmissionQueue] Queued ${type} submission ${id}`);
return id;
}
/**
* Get all pending submissions
*/
export async function getPendingSubmissions() {
const db = await getDB();
return await db.getAll(STORE_NAME);
}
/**
* Get count of pending submissions
*/
export async function getPendingCount(): Promise<number> {
const db = await getDB();
const all = await db.getAll(STORE_NAME);
return all.length;
}
/**
* Remove a submission from the queue
*/
export async function removeFromQueue(id: string): Promise<void> {
const db = await getDB();
await db.delete(STORE_NAME, id);
console.info(`[SubmissionQueue] Removed submission ${id}`);
}
/**
* Update submission retry count and error
*/
export async function updateSubmissionRetry(
id: string,
error: string
): Promise<void> {
const db = await getDB();
const item = await db.get(STORE_NAME, id);
if (!item) return;
item.retries += 1;
item.lastAttempt = Date.now();
item.error = error;
await db.put(STORE_NAME, item);
}
/**
* Process all queued submissions
* Called when connectivity is restored or on app startup
*/
export async function processQueue(
submitFn: (type: string, data: any) => Promise<void>
): Promise<{ processed: number; failed: number }> {
const db = await getDB();
const pending = await db.getAll(STORE_NAME);
let processed = 0;
let failed = 0;
for (const item of pending) {
try {
console.info(`[SubmissionQueue] Processing ${item.type} submission ${item.id} (attempt ${item.retries + 1})`);
await submitFn(item.type, item.data);
await db.delete(STORE_NAME, item.id);
processed++;
console.info(`[SubmissionQueue] Successfully processed ${item.id}`);
} catch (error) {
const errorMsg = error instanceof Error ? error.message : String(error);
if (item.retries >= MAX_RETRIES - 1) {
// Max retries exceeded, remove from queue
await db.delete(STORE_NAME, item.id);
failed++;
console.error(`[SubmissionQueue] Max retries exceeded for ${item.id}:`, errorMsg);
} else {
// Update retry count
await updateSubmissionRetry(item.id, errorMsg);
console.warn(`[SubmissionQueue] Retry ${item.retries + 1}/${MAX_RETRIES} failed for ${item.id}:`, errorMsg);
}
}
}
return { processed, failed };
}
/**
* Clear all queued submissions (use with caution!)
*/
export async function clearQueue(): Promise<number> {
const db = await getDB();
const tx = db.transaction(STORE_NAME, 'readwrite');
const store = tx.objectStore(STORE_NAME);
const all = await store.getAll();
await store.clear();
await tx.done;
console.warn(`[SubmissionQueue] Cleared ${all.length} submissions from queue`);
return all.length;
}
/**
* Check if edge function is available
*/
export async function checkEdgeFunctionHealth(
functionUrl: string
): Promise<boolean> {
try {
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), 5000);
const response = await fetch(functionUrl, {
method: 'HEAD',
signal: controller.signal,
});
clearTimeout(timeout);
return response.ok || response.status === 405; // 405 = Method Not Allowed is OK
} catch (error) {
console.error('[SubmissionQueue] Health check failed:', error);
return false;
}
}

View File

@@ -0,0 +1,204 @@
/**
* Submission Rate Limiter
*
* Client-side rate limiting for submission creation to prevent
* abuse and accidental duplicate submissions.
*
* Part of Sacred Pipeline Phase 3: Enhanced Error Handling
*/
import { logger } from './logger';
interface RateLimitConfig {
maxSubmissionsPerMinute: number;
maxSubmissionsPerHour: number;
cooldownAfterLimit: number; // milliseconds
}
interface RateLimitRecord {
timestamps: number[];
lastAttempt: number;
blockedUntil?: number;
}
const DEFAULT_CONFIG: RateLimitConfig = {
maxSubmissionsPerMinute: 5,
maxSubmissionsPerHour: 20,
cooldownAfterLimit: 60000, // 1 minute
};
// Store rate limit data in memory (per session)
const rateLimitStore = new Map<string, RateLimitRecord>();
/**
* Clean up old timestamps from rate limit record
*/
function cleanupTimestamps(record: RateLimitRecord, now: number): void {
const oneHourAgo = now - 60 * 60 * 1000;
record.timestamps = record.timestamps.filter(ts => ts > oneHourAgo);
}
/**
* Get or create rate limit record for user
*/
function getRateLimitRecord(userId: string): RateLimitRecord {
if (!rateLimitStore.has(userId)) {
rateLimitStore.set(userId, {
timestamps: [],
lastAttempt: 0,
});
}
return rateLimitStore.get(userId)!;
}
/**
* Check if user can submit based on rate limits
*
* @param userId - User ID to check
* @param config - Optional rate limit configuration
* @returns Object indicating if allowed and retry information
*/
export function checkSubmissionRateLimit(
userId: string,
config: Partial<RateLimitConfig> = {}
): {
allowed: boolean;
reason?: string;
retryAfter?: number; // seconds
remaining?: number;
} {
const cfg = { ...DEFAULT_CONFIG, ...config };
const now = Date.now();
const record = getRateLimitRecord(userId);
// Clean up old timestamps
cleanupTimestamps(record, now);
// Check if user is currently blocked
if (record.blockedUntil && now < record.blockedUntil) {
const retryAfter = Math.ceil((record.blockedUntil - now) / 1000);
logger.warn('[SubmissionRateLimiter] User blocked', {
userId,
retryAfter,
});
return {
allowed: false,
reason: `Rate limit exceeded. Please wait ${retryAfter} seconds before submitting again`,
retryAfter,
};
}
// Check per-minute limit
const oneMinuteAgo = now - 60 * 1000;
const submissionsLastMinute = record.timestamps.filter(ts => ts > oneMinuteAgo).length;
if (submissionsLastMinute >= cfg.maxSubmissionsPerMinute) {
record.blockedUntil = now + cfg.cooldownAfterLimit;
const retryAfter = Math.ceil(cfg.cooldownAfterLimit / 1000);
logger.warn('[SubmissionRateLimiter] Per-minute limit exceeded', {
userId,
submissionsLastMinute,
limit: cfg.maxSubmissionsPerMinute,
retryAfter,
});
return {
allowed: false,
reason: `Too many submissions in a short time. Please wait ${retryAfter} seconds`,
retryAfter,
};
}
// Check per-hour limit
const submissionsLastHour = record.timestamps.length;
if (submissionsLastHour >= cfg.maxSubmissionsPerHour) {
record.blockedUntil = now + cfg.cooldownAfterLimit;
const retryAfter = Math.ceil(cfg.cooldownAfterLimit / 1000);
logger.warn('[SubmissionRateLimiter] Per-hour limit exceeded', {
userId,
submissionsLastHour,
limit: cfg.maxSubmissionsPerHour,
retryAfter,
});
return {
allowed: false,
reason: `Hourly submission limit reached. Please wait ${retryAfter} seconds`,
retryAfter,
};
}
// Calculate remaining submissions
const remainingMinute = cfg.maxSubmissionsPerMinute - submissionsLastMinute;
const remainingHour = cfg.maxSubmissionsPerHour - submissionsLastHour;
const remaining = Math.min(remainingMinute, remainingHour);
return {
allowed: true,
remaining,
};
}
/**
* Record a submission attempt
*
* @param userId - User ID
*/
export function recordSubmissionAttempt(userId: string): void {
const now = Date.now();
const record = getRateLimitRecord(userId);
record.timestamps.push(now);
record.lastAttempt = now;
// Clean up immediately to maintain accurate counts
cleanupTimestamps(record, now);
logger.info('[SubmissionRateLimiter] Recorded submission', {
userId,
totalLastHour: record.timestamps.length,
});
}
/**
* Clear rate limit for user (useful for testing or admin override)
*
* @param userId - User ID to clear
*/
export function clearUserRateLimit(userId: string): void {
rateLimitStore.delete(userId);
logger.info('[SubmissionRateLimiter] Cleared rate limit', { userId });
}
/**
* Get current rate limit status for user
*
* @param userId - User ID
* @returns Current status information
*/
export function getRateLimitStatus(userId: string): {
submissionsLastMinute: number;
submissionsLastHour: number;
isBlocked: boolean;
blockedUntil?: Date;
} {
const now = Date.now();
const record = getRateLimitRecord(userId);
cleanupTimestamps(record, now);
const oneMinuteAgo = now - 60 * 1000;
const submissionsLastMinute = record.timestamps.filter(ts => ts > oneMinuteAgo).length;
return {
submissionsLastMinute,
submissionsLastHour: record.timestamps.length,
isBlocked: !!(record.blockedUntil && now < record.blockedUntil),
blockedUntil: record.blockedUntil ? new Date(record.blockedUntil) : undefined,
};
}

View File

@@ -0,0 +1,207 @@
/**
* Client-side validation for entity submissions
* Prevents missing required fields before database calls
*/
export interface ValidationResult {
valid: boolean;
missingFields: string[];
errorMessage?: string;
}
export interface SlugValidationResult extends ValidationResult {
suggestedSlug?: string;
}
/**
* Validates slug format matching database constraints
* Pattern: lowercase alphanumeric with hyphens only
* No consecutive hyphens, no leading/trailing hyphens
*/
export function validateSlugFormat(slug: string): SlugValidationResult {
if (!slug) {
return {
valid: false,
missingFields: ['slug'],
errorMessage: 'Slug is required'
};
}
// Must match DB regex: ^[a-z0-9]+(-[a-z0-9]+)*$
const slugRegex = /^[a-z0-9]+(-[a-z0-9]+)*$/;
if (!slugRegex.test(slug)) {
const suggested = slug
.toLowerCase()
.replace(/[^a-z0-9-]/g, '-')
.replace(/-+/g, '-')
.replace(/^-|-$/g, '');
return {
valid: false,
missingFields: ['slug'],
errorMessage: 'Slug must be lowercase alphanumeric with hyphens only (no spaces or special characters)',
suggestedSlug: suggested
};
}
// Length constraints
if (slug.length < 2) {
return {
valid: false,
missingFields: ['slug'],
errorMessage: 'Slug too short (minimum 2 characters)'
};
}
if (slug.length > 100) {
return {
valid: false,
missingFields: ['slug'],
errorMessage: 'Slug too long (maximum 100 characters)'
};
}
// Reserved slugs that could conflict with routes
const reserved = [
'admin', 'api', 'auth', 'new', 'edit', 'delete', 'create',
'update', 'null', 'undefined', 'settings', 'profile', 'login',
'logout', 'signup', 'dashboard', 'moderator', 'moderation'
];
if (reserved.includes(slug)) {
return {
valid: false,
missingFields: ['slug'],
errorMessage: `'${slug}' is a reserved slug and cannot be used`,
suggestedSlug: `${slug}-1`
};
}
return { valid: true, missingFields: [] };
}
/**
* Validates required fields for park creation
*/
export function validateParkCreateFields(data: any): ValidationResult {
const missingFields: string[] = [];
if (!data.name?.trim()) missingFields.push('name');
if (!data.slug?.trim()) missingFields.push('slug');
if (!data.park_type) missingFields.push('park_type');
if (!data.status) missingFields.push('status');
if (missingFields.length > 0) {
return {
valid: false,
missingFields,
errorMessage: `Missing required fields for park creation: ${missingFields.join(', ')}`
};
}
// Validate slug format
if (data.slug?.trim()) {
const slugValidation = validateSlugFormat(data.slug.trim());
if (!slugValidation.valid) {
return slugValidation;
}
}
return { valid: true, missingFields: [] };
}
/**
* Validates required fields for ride creation
*/
export function validateRideCreateFields(data: any): ValidationResult {
const missingFields: string[] = [];
if (!data.name?.trim()) missingFields.push('name');
if (!data.slug?.trim()) missingFields.push('slug');
if (!data.category) missingFields.push('category');
if (!data.status) missingFields.push('status');
if (missingFields.length > 0) {
return {
valid: false,
missingFields,
errorMessage: `Missing required fields for ride creation: ${missingFields.join(', ')}`
};
}
// Validate slug format
if (data.slug?.trim()) {
const slugValidation = validateSlugFormat(data.slug.trim());
if (!slugValidation.valid) {
return slugValidation;
}
}
return { valid: true, missingFields: [] };
}
/**
* Validates required fields for company creation
*/
export function validateCompanyCreateFields(data: any): ValidationResult {
const missingFields: string[] = [];
if (!data.name?.trim()) missingFields.push('name');
if (!data.slug?.trim()) missingFields.push('slug');
if (!data.company_type) missingFields.push('company_type');
if (missingFields.length > 0) {
return {
valid: false,
missingFields,
errorMessage: `Missing required fields for company creation: ${missingFields.join(', ')}`
};
}
// Validate slug format
if (data.slug?.trim()) {
const slugValidation = validateSlugFormat(data.slug.trim());
if (!slugValidation.valid) {
return slugValidation;
}
}
return { valid: true, missingFields: [] };
}
/**
* Validates required fields for ride model creation
*/
export function validateRideModelCreateFields(data: any): ValidationResult {
const missingFields: string[] = [];
if (!data.name?.trim()) missingFields.push('name');
if (!data.slug?.trim()) missingFields.push('slug');
if (!data.manufacturer_id) missingFields.push('manufacturer_id');
if (!data.category) missingFields.push('category');
if (missingFields.length > 0) {
return {
valid: false,
missingFields,
errorMessage: `Missing required fields for ride model creation: ${missingFields.join(', ')}`
};
}
// Validate slug format
if (data.slug?.trim()) {
const slugValidation = validateSlugFormat(data.slug.trim());
if (!slugValidation.valid) {
return slugValidation;
}
}
return { valid: true, missingFields: [] };
}
/**
* Helper to throw validation error if validation fails
*/
export function assertValid(result: ValidationResult): void {
if (!result.valid) {
throw new Error(result.errorMessage);
}
}

View File

@@ -0,0 +1,99 @@
/**
* Central Supabase Client Export with Automatic Breadcrumb Tracking
*
* All application code should import from this file instead of the base client.
* This wrapper automatically tracks all database operations as breadcrumbs for error debugging.
*/
import { supabase as baseClient } from '@/integrations/supabase/client';
import { breadcrumb } from './errorBreadcrumbs';
type SupabaseClient = typeof baseClient;
/**
* Create a recursive proxy for query builders that tracks terminal method calls
*/
function createQueryProxy(queryBuilder: any, endpoint: string, operations: string[] = []): any {
return new Proxy(queryBuilder, {
get(target, prop: string | symbol) {
const value = target[prop];
if (typeof value !== 'function') {
return value;
}
// Terminal methods that execute queries and return promises
const terminalMethods = ['then', 'single', 'maybeSingle'];
if (terminalMethods.includes(String(prop))) {
// Wrap terminal method to log breadcrumb when promise resolves
return function(...args: any[]) {
const result = value.apply(target, args);
const fullOperation = operations.join('.');
// Intercept promise resolution to log breadcrumb
if (result && typeof result.then === 'function') {
return result.then(
(response: any) => {
// Log successful API call
breadcrumb.apiCall(
endpoint,
fullOperation || 'query',
response?.error ? 400 : 200
);
// Dispatch API connectivity up event on successful requests
if (!response?.error) {
window.dispatchEvent(new CustomEvent('api-connectivity-up'));
}
return response;
},
(error: any) => {
// Log failed API call
breadcrumb.apiCall(endpoint, fullOperation || 'query', 500);
throw error;
}
);
}
return result;
};
}
// Builder methods - pass through synchronously and continue proxying
return function(...args: any[]) {
const result = value.apply(target, args);
// Continue proxying the returned builder with accumulated operations
return createQueryProxy(result, endpoint, [...operations, String(prop)]);
};
}
});
}
/**
* Wrap Supabase client to automatically track API calls as breadcrumbs
*/
function wrapSupabaseClient(client: SupabaseClient): SupabaseClient {
return new Proxy(client, {
get(target, prop: string | symbol) {
const value = target[prop as keyof typeof target];
// Only wrap 'from' and 'rpc' methods for database operations
if ((prop === 'from' || prop === 'rpc') && typeof value === 'function') {
return (...args: any[]) => {
const result = (value as any).apply(target, args);
const endpoint = prop === 'from' ? `/table/${args[0]}` : `/rpc/${args[0]}`;
// Return a recursive proxy that tracks the query chain
return createQueryProxy(result, endpoint, []);
};
}
return value;
}
}) as SupabaseClient;
}
export const supabase = wrapSupabaseClient(baseClient);

View File

@@ -0,0 +1,55 @@
/**
* Type-safe Supabase query helpers
*
* Provides type-safe table query builders to eliminate `as any` assertions.
*/
import { supabase } from '@/lib/supabaseClient';
import type { Database } from '@/integrations/supabase/types';
// Define valid table names from the database schema
export type TableName = keyof Database['public']['Tables'];
/**
* Create a type-safe query builder for a specific table
*
* @example
* ```typescript
* const query = createTableQuery('parks').select('*').eq('slug', 'disneyland');
* const query2 = createTableQuery('rides').select('id, name').eq('status', 'operating');
* ```
*/
export function createTableQuery<T extends TableName>(tableName: T) {
return supabase.from(tableName);
}
/**
* Dynamically query a table by name with type safety
*
* Use this when the table name is determined at runtime (e.g., version tables).
*
* @example
* ```typescript
* const versions = await queryTable('park_versions', (q) =>
* q.select('*').eq('park_id', parkId)
* );
* ```
*/
export async function queryTable<T extends TableName>(
tableName: T,
queryBuilder: (query: ReturnType<typeof createTableQuery<T>>) => any
) {
const query = createTableQuery(tableName);
return queryBuilder(query);
}
/**
* Type-safe interface for processed uploaded images
*/
export interface ProcessedImage {
url: string;
cloudflare_id: string;
order: number;
title?: string;
caption?: string;
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,430 @@
import { supabase } from '@/lib/supabaseClient';
import type { ParkSubmissionData, RideSubmissionData, CompanySubmissionData, RideModelSubmissionData } from '@/types/submission-data';
import { handleNonCriticalError } from './errorHandler';
import {
randomInt,
randomFloat,
randomItem,
randomDate,
randomDatePrecision,
shouldPopulateField,
generateWaterRideFields,
generateDarkRideFields,
generateFlatRideFields,
generateKiddieRideFields,
generateTransportationRideFields,
generateRideMaterials,
generateSourceUrl,
generateSubmissionNotes,
type FieldDensity
} from './testDataGeneratorHelpers';
// Preset configurations
export const PRESETS = {
small: { parks: 5, rides: 10, companies: 3, rideModels: 2, photos: 5 },
medium: { parks: 20, rides: 50, companies: 20, rideModels: 10, photos: 25 },
large: { parks: 100, rides: 250, companies: 100, rideModels: 50, photos: 100 },
stress: { parks: 400, rides: 1000, companies: 400, rideModels: 200, photos: 500 }
} as const;
// Word lists for realistic names
const PARK_ADJECTIVES = ['Adventure', 'Fantasy', 'Wonder', 'Magic', 'Dream', 'Thrill', 'Fun', 'Happy', 'Paradise', 'Epic'];
const PARK_NOUNS = ['World', 'Land', 'Park', 'Gardens', 'Kingdom', 'Realm', 'Island', 'Bay', 'Point', 'Valley'];
const RIDE_PREFIXES = ['Super', 'Mega', 'Ultra', 'Extreme', 'Wild', 'Crazy', 'Thunder', 'Lightning', 'Dragon', 'Phoenix'];
const RIDE_TYPES = ['Coaster', 'Drop', 'Spinner', 'Flyer', 'Racer', 'Twister', 'Loop', 'Screamer', 'Rush', 'Blast'];
const COMPANY_PREFIXES = ['Ace', 'Premier', 'Advanced', 'Dynamic', 'Innovative', 'Global', 'United', 'International'];
const COMPANY_SUFFIXES = ['Industries', 'Manufacturing', 'Enterprises', 'Solutions', 'Systems', 'Designs', 'Works', 'Creations'];
const CITIES = ['New York', 'Los Angeles', 'Chicago', 'London', 'Paris', 'Tokyo', 'Sydney', 'Toronto', 'Berlin', 'Madrid'];
const COUNTRIES = ['USA', 'UK', 'France', 'Japan', 'Australia', 'Canada', 'Germany', 'Spain', 'Italy', 'Netherlands'];
function generateSlug(name: string, counter?: number): string {
let slug = name
.toLowerCase()
.replace(/[^a-z0-9\s-]/g, '')
.replace(/\s+/g, '-')
.replace(/-+/g, '-')
.replace(/^-+|-+$/g, '');
if (counter) {
slug += `-${counter}`;
}
return slug;
}
// Test data context for tracking created entities
export class TestDataContext {
createdParks: Array<{ id: string; name: string }> = [];
createdCompanies: Array<{ id: string; name: string; type: string }> = [];
createdRideModels: Array<{ id: string; name: string; manufacturer_id: string }> = [];
stats = {
parks: 0,
rides: 0,
companies: 0,
rideModels: 0,
photos: 0,
conflicts: 0,
versionChains: 0
};
addPark(id: string, name: string) {
this.createdParks.push({ id, name });
this.stats.parks++;
}
addCompany(id: string, name: string, type: string) {
this.createdCompanies.push({ id, name, type });
this.stats.companies++;
}
addRideModel(id: string, name: string, manufacturer_id: string) {
this.createdRideModels.push({ id, name, manufacturer_id });
this.stats.rideModels++;
}
getRandomPark() {
return this.createdParks.length > 0 ? randomItem(this.createdParks) : null;
}
getRandomCompany(type?: string) {
const filtered = type
? this.createdCompanies.filter(c => c.type === type)
: this.createdCompanies;
return filtered.length > 0 ? randomItem(filtered) : null;
}
getRandomRideModel() {
return this.createdRideModels.length > 0 ? randomItem(this.createdRideModels) : null;
}
getSummary() {
return this.stats;
}
}
// Random data generators
export function generateRandomPark(counter: number, density: FieldDensity = 'mixed'): ParkSubmissionData {
const name = `${randomItem(PARK_ADJECTIVES)} ${randomItem(PARK_NOUNS)}`;
const slug = generateSlug(name, counter);
const openingDate = randomDate(1950, 2024);
const status = randomItem(['operating', 'operating', 'operating', 'seasonal']); // More likely to be operating
const parkData: ParkSubmissionData = {
name,
slug,
description: `A thrilling amusement park featuring world-class attractions and entertainment.`,
park_type: randomItem(['theme_park', 'amusement_park', 'water_park', 'adventure_park']),
status,
opening_date: openingDate,
opening_date_precision: randomDatePrecision(),
website_url: `https://${slug}.example.com`,
phone: `+1-555-${randomInt(100, 999)}-${randomInt(1000, 9999)}`,
email: `info@${slug}.example.com`,
operator_id: null,
property_owner_id: null,
location_id: null,
banner_image_url: null,
banner_image_id: null,
card_image_url: null,
card_image_id: null,
is_test_data: true
};
// Add closing date for closed/seasonal parks
if ((status === 'closed' || status === 'seasonal') && shouldPopulateField(density, counter, 'high')) {
const openYear = parseInt(openingDate.split('-')[0]);
parkData.closing_date = randomDate(openYear + 5, 2024);
parkData.closing_date_precision = randomDatePrecision();
}
// Add source URL
if (shouldPopulateField(density, counter, 'low')) {
parkData.source_url = generateSourceUrl('park', counter);
}
// Add submission notes
const notes = generateSubmissionNotes(density, counter, 'park');
if (notes) {
parkData.submission_notes = notes;
}
return parkData;
}
export function generateRandomRide(parkId: string, counter: number, density: FieldDensity = 'mixed'): RideSubmissionData {
const name = `${randomItem(RIDE_PREFIXES)} ${randomItem(RIDE_TYPES)}`;
const slug = generateSlug(name, counter);
const category = randomItem(['roller_coaster', 'flat_ride', 'water_ride', 'dark_ride', 'family_ride', 'transport_ride', 'kiddie_ride']);
const openingDate = randomDate(1980, 2024);
const status = randomItem(['operating', 'operating', 'operating', 'seasonal']);
const rideData: RideSubmissionData = {
name,
slug,
description: `An exciting ${category.replace('_', ' ')} experience for all ages.`,
category,
ride_sub_type: null,
status,
park_id: parkId,
ride_model_id: null,
manufacturer_id: null,
designer_id: null,
opening_date: openingDate,
opening_date_precision: randomDatePrecision(),
closing_date: Math.random() > 0.95 ? randomDate(2010, 2024) : null,
height_requirement: randomInt(90, 140),
age_requirement: null,
capacity_per_hour: randomInt(500, 2000),
duration_seconds: randomInt(60, 300),
max_speed_kmh: category === 'roller_coaster' ? randomFloat(40, 150, 1) : randomFloat(10, 60, 1),
max_height_meters: category === 'roller_coaster' ? randomFloat(20, 100, 1) : randomFloat(5, 30, 1),
length_meters: category === 'roller_coaster' ? randomFloat(500, 2500, 1) : null,
drop_height_meters: category === 'roller_coaster' ? randomFloat(15, 80, 1) : null,
inversions: category === 'roller_coaster' && Math.random() > 0.5 ? randomInt(1, 7) : 0,
max_g_force: category === 'roller_coaster' ? randomFloat(2, 5, 1) : null,
coaster_type: category === 'roller_coaster' ? randomItem(['steel', 'wooden', 'hybrid']) : null,
seating_type: category === 'roller_coaster' ? randomItem(['sit_down', 'inverted', 'floorless', 'suspended']) : null,
intensity_level: randomItem(['family', 'moderate', 'thrill', 'extreme']),
banner_image_url: null,
banner_image_id: null,
card_image_url: null,
card_image_id: null,
image_url: null,
is_test_data: true
};
// Add closing date precision if closing date exists
if (rideData.closing_date && shouldPopulateField(density, counter, 'medium')) {
rideData.closing_date_precision = randomDatePrecision();
}
// Add material arrays for appropriate rides
if (category === 'roller_coaster' && shouldPopulateField(density, counter, 'medium')) {
const materials = generateRideMaterials(category);
Object.assign(rideData, materials);
}
// Add category-specific fields
if (category === 'water_ride') {
Object.assign(rideData, generateWaterRideFields(density, counter));
} else if (category === 'dark_ride') {
Object.assign(rideData, generateDarkRideFields(density, counter));
} else if (category === 'flat_ride') {
Object.assign(rideData, generateFlatRideFields(density, counter));
} else if (category === 'kiddie_ride') {
Object.assign(rideData, generateKiddieRideFields(density, counter));
} else if (category === 'transport_ride') {
Object.assign(rideData, generateTransportationRideFields(density, counter));
}
// Add source URL
if (shouldPopulateField(density, counter, 'low')) {
rideData.source_url = generateSourceUrl('ride', counter);
}
// Add submission notes
const notes = generateSubmissionNotes(density, counter, 'ride');
if (notes) {
rideData.submission_notes = notes;
}
return rideData;
}
export function generateRandomCompany(type: 'manufacturer' | 'operator' | 'designer' | 'property_owner', counter: number, density: FieldDensity = 'mixed'): CompanySubmissionData {
const name = `${randomItem(COMPANY_PREFIXES)} ${randomItem(COMPANY_SUFFIXES)}`;
const slug = generateSlug(name, counter);
const foundedYear = randomInt(1950, 2020);
const companyData: CompanySubmissionData = {
name,
slug,
company_type: type,
description: `A leading ${type.replace('_', ' ')} in the amusement industry.`,
person_type: Math.random() > 0.9 ? 'individual' : 'company',
founded_year: foundedYear,
headquarters_location: `${randomItem(CITIES)}, ${randomItem(COUNTRIES)}`,
website_url: `https://${slug}.example.com`,
logo_url: null,
banner_image_url: null,
banner_image_id: null,
card_image_url: null,
card_image_id: null,
is_test_data: true
};
// Add full founded date with precision
if (shouldPopulateField(density, counter, 'medium')) {
companyData.founded_date = `${foundedYear}-01-01`;
companyData.founded_date_precision = randomItem(['year', 'month', 'day']);
}
// Add defunct date for some companies
if (shouldPopulateField(density, counter, 'low') && Math.random() > 0.85) {
const defunctYear = randomInt(foundedYear + 10, 2024);
companyData.defunct_date = `${defunctYear}-12-31`;
companyData.defunct_date_precision = randomItem(['year', 'month', 'day']);
}
// Add source URL
if (shouldPopulateField(density, counter, 'low')) {
companyData.source_url = generateSourceUrl('company', counter);
}
// Add submission notes
const notes = generateSubmissionNotes(density, counter, 'company');
if (notes) {
companyData.submission_notes = notes;
}
return companyData;
}
export function generateRandomRideModel(manufacturerId: string, counter: number, density: FieldDensity = 'mixed'): RideModelSubmissionData {
const name = `${randomItem(RIDE_PREFIXES)} Model ${randomInt(100, 999)}`;
const slug = generateSlug(name, counter);
const category = randomItem(['roller_coaster', 'flat_ride', 'water_ride', 'dark_ride']);
const modelData: RideModelSubmissionData = {
name,
slug,
manufacturer_id: manufacturerId,
category,
ride_type: randomItem(['spinning', 'launch', 'inverted', 'suspended', 'floorless']),
description: `A state-of-the-art ${category.replace('_', ' ')} model.`,
banner_image_url: null,
banner_image_id: null,
card_image_url: null,
card_image_id: null,
is_test_data: true
};
// Add source URL
if (shouldPopulateField(density, counter, 'low')) {
modelData.source_url = generateSourceUrl('ride-model', counter);
}
// Add submission notes
const notes = generateSubmissionNotes(density, counter, 'ride model');
if (notes) {
modelData.submission_notes = notes;
}
return modelData;
}
// Cleanup utilities
export async function clearTestData(): Promise<{ deleted: number }> {
try {
// Find all test submissions by querying submission_metadata
const { data: testMetadata, error: metadataError } = await supabase
.from('submission_metadata')
.select('submission_id')
.eq('metadata_key', 'is_test_data')
.eq('metadata_value', 'true');
if (metadataError) throw metadataError;
const submissionIds = testMetadata?.map(m => m.submission_id) || [];
const submissionCount = submissionIds.length;
// Delete submissions if found
if (submissionCount > 0) {
const batchSize = 100;
for (let i = 0; i < submissionIds.length; i += batchSize) {
const batch = submissionIds.slice(i, i + batchSize);
const { error: deleteError } = await supabase
.from('content_submissions')
.delete()
.in('id', batch);
if (deleteError) throw deleteError;
}
}
// Clear the test data registry
const { error: registryError } = await supabase
.from('test_data_registry')
.delete()
.neq('id', '00000000-0000-0000-0000-000000000000'); // Delete all records
if (registryError) {
handleNonCriticalError(registryError, {
action: 'Clear test data registry',
metadata: { operation: 'clearTestData' }
});
}
return { deleted: submissionCount };
} catch (error: unknown) {
handleNonCriticalError(error, {
action: 'Clear test data',
metadata: { operation: 'clearTestData' }
});
throw error;
}
}
export async function getTestDataStats(): Promise<{
total: number;
pending: number;
approved: number;
operators: number;
property_owners: number;
manufacturers: number;
designers: number;
parks: number;
rides: number;
ride_models: number;
}> {
// Query submission_metadata to find test submissions
const { data: testMetadata, error: metadataError } = await supabase
.from('submission_metadata')
.select('submission_id')
.eq('metadata_key', 'is_test_data')
.eq('metadata_value', 'true');
if (metadataError) throw metadataError;
const submissionIds = testMetadata?.map(m => m.submission_id) || [];
// Get statuses for test submissions
let data: Array<{ status: string }> = [];
if (submissionIds.length > 0) {
const { data: submissions, error } = await supabase
.from('content_submissions')
.select('status')
.in('id', submissionIds);
if (error) throw error;
data = submissions || [];
}
// Get registry counts for available dependencies
const { data: registryData } = await supabase
.from('test_data_registry')
.select('entity_type');
const registryCounts = registryData?.reduce((acc, row) => {
acc[row.entity_type] = (acc[row.entity_type] || 0) + 1;
return acc;
}, {} as Record<string, number>) || {};
const stats = {
total: data.length,
pending: data.filter(s => s.status === 'pending').length,
approved: data.filter(s => s.status === 'approved').length,
operators: registryCounts['operator'] || 0,
property_owners: registryCounts['property_owner'] || 0,
manufacturers: registryCounts['manufacturer'] || 0,
designers: registryCounts['designer'] || 0,
parks: registryCounts['park'] || 0,
rides: registryCounts['ride'] || 0,
ride_models: registryCounts['ride_model'] || 0
};
return stats;
}

View File

@@ -0,0 +1,191 @@
/**
* Test Data Generator Helpers
*
* Reusable utilities for generating category-specific fields and managing field density.
*/
// ============================================================================
// Field Density Logic
// ============================================================================
export type FieldDensity = 'minimal' | 'standard' | 'maximum' | 'mixed';
export type FieldImportance = 'low' | 'medium' | 'high';
/**
* Determine if a field should be populated based on density and importance
*/
export function shouldPopulateField(
density: FieldDensity,
index: number,
fieldImportance: FieldImportance
): boolean {
const rand = Math.random();
if (density === 'minimal') {
return fieldImportance === 'high' ? rand < 0.5 : rand < 0.1;
} else if (density === 'standard') {
return fieldImportance === 'high' ? rand < 0.8 : rand < 0.5;
} else if (density === 'maximum') {
return rand < 0.95; // Almost everything
} else if (density === 'mixed') {
// Vary by entity index
if (index % 3 === 0) return shouldPopulateField('minimal', index, fieldImportance);
if (index % 3 === 1) return shouldPopulateField('standard', index, fieldImportance);
return shouldPopulateField('maximum', index, fieldImportance);
}
return false;
}
// ============================================================================
// Random Data Generators
// ============================================================================
export function randomInt(min: number, max: number): number {
return Math.floor(Math.random() * (max - min + 1)) + min;
}
export function randomFloat(min: number, max: number, decimals = 2): number {
const value = Math.random() * (max - min) + min;
return parseFloat(value.toFixed(decimals));
}
export function randomItem<T>(array: T[]): T {
return array[randomInt(0, array.length - 1)];
}
export function randomDate(startYear: number, endYear: number): string {
const year = randomInt(startYear, endYear);
const month = randomInt(1, 12);
const day = randomInt(1, 28);
return `${year}-${String(month).padStart(2, '0')}-${String(day).padStart(2, '0')}`;
}
export function randomDatePrecision(): 'day' | 'month' | 'year' {
return randomItem(['day', 'month', 'year']);
}
// ============================================================================
// Category-Specific Field Generators
// ============================================================================
/**
* Generate water ride specific fields
*/
export function generateWaterRideFields(density: FieldDensity, index: number) {
if (!shouldPopulateField(density, index, 'medium')) return {};
return {
water_depth_cm: randomInt(30, 300),
splash_height_meters: randomFloat(1, 20, 1),
wetness_level: randomItem(['dry', 'light', 'moderate', 'soaked']),
flume_type: randomItem(['log', 'tube', 'raft', 'boat']),
boat_capacity: randomInt(2, 20)
};
}
/**
* Generate dark ride specific fields
*/
export function generateDarkRideFields(density: FieldDensity, index: number) {
if (!shouldPopulateField(density, index, 'medium')) return {};
return {
theme_name: randomItem(['Space Adventure', 'Haunted Mansion', 'Underwater Journey', 'Fantasy Quest']),
story_description: 'An immersive journey through a themed environment with exciting scenes.',
show_duration_seconds: randomInt(180, 600),
animatronics_count: randomInt(5, 50),
projection_type: randomItem(['2d', '3d', 'holographic', 'mixed']),
ride_system: randomItem(['omnimover', 'tracked', 'trackless', 'boat']),
scenes_count: randomInt(5, 20)
};
}
/**
* Generate flat ride specific fields
*/
export function generateFlatRideFields(density: FieldDensity, index: number) {
if (!shouldPopulateField(density, index, 'medium')) return {};
return {
rotation_type: randomItem(['horizontal', 'vertical', 'both', 'none']),
motion_pattern: randomItem(['circular', 'pendulum', 'spinning', 'wave', 'random']),
platform_count: randomInt(1, 8),
swing_angle_degrees: randomInt(45, 180),
rotation_speed_rpm: randomInt(5, 30),
arm_length_meters: randomFloat(5, 25, 1),
max_height_reached_meters: randomFloat(10, 80, 1)
};
}
/**
* Generate kiddie ride specific fields
*/
export function generateKiddieRideFields(density: FieldDensity, index: number) {
if (!shouldPopulateField(density, index, 'medium')) return {};
return {
min_age: randomInt(2, 5),
max_age: randomInt(8, 12),
educational_theme: randomItem(['counting', 'colors', 'animals', 'shapes', 'letters']),
character_theme: randomItem(['dinosaurs', 'princesses', 'superheroes', 'animals', 'vehicles'])
};
}
/**
* Generate transportation ride specific fields
*/
export function generateTransportationRideFields(density: FieldDensity, index: number) {
if (!shouldPopulateField(density, index, 'medium')) return {};
return {
transport_type: randomItem(['monorail', 'train', 'skyway', 'gondola', 'ferry']),
route_length_meters: randomInt(500, 5000),
stations_count: randomInt(2, 8),
vehicle_capacity: randomInt(20, 200),
vehicles_count: randomInt(2, 12),
round_trip_duration_seconds: randomInt(300, 1800)
};
}
/**
* Generate material arrays for rides
*/
export function generateRideMaterials(category: string) {
if (category === 'roller_coaster') {
return {
track_material: [randomItem(['steel', 'wood', 'hybrid'])],
support_material: [randomItem(['steel', 'wood', 'concrete'])],
propulsion_method: [randomItem(['chain_lift', 'cable_lift', 'launch', 'gravity', 'tire_drive'])]
};
}
return {};
}
/**
* Generate source URL for testing
*/
export function generateSourceUrl(entityType: string, index: number): string {
return `https://example.com/source/${entityType}/${index + 1}`;
}
/**
* Generate submission notes based on field density
*/
export function generateSubmissionNotes(
density: FieldDensity,
index: number,
entityType: string
): string | undefined {
if (!shouldPopulateField(density, index, 'low')) return undefined;
const notes = [
`Updated ${entityType} information from official source`,
`Added comprehensive data for testing purposes`,
`Verified information through multiple sources`,
`Historical data added for completeness`,
`Technical specifications verified with manufacturer`
];
return randomItem(notes);
}

View File

@@ -0,0 +1,216 @@
/**
* Timeout Detection & Recovery
*
* Detects timeout errors from various sources (fetch, Supabase, edge functions)
* and provides recovery strategies.
*
* Part of Sacred Pipeline Phase 4: Transaction Resilience
*/
import { logger } from './logger';
export interface TimeoutError extends Error {
isTimeout: true;
source: 'fetch' | 'supabase' | 'edge-function' | 'database' | 'unknown';
originalError?: unknown;
duration?: number;
}
/**
* Check if an error is a timeout error
*/
export function isTimeoutError(error: unknown): boolean {
if (!error) return false;
// Check for AbortController timeout
if (error instanceof DOMException && error.name === 'AbortError') {
return true;
}
// Check for fetch timeout
if (error instanceof TypeError && error.message.includes('aborted')) {
return true;
}
// Check error message for timeout keywords
if (error instanceof Error) {
const message = error.message.toLowerCase();
return (
message.includes('timeout') ||
message.includes('timed out') ||
message.includes('deadline exceeded') ||
message.includes('request aborted') ||
message.includes('etimedout')
);
}
// Check Supabase/HTTP timeout status codes
if (error && typeof error === 'object') {
const errorObj = error as { status?: number; code?: string; message?: string };
// HTTP 408 Request Timeout
if (errorObj.status === 408) return true;
// HTTP 504 Gateway Timeout
if (errorObj.status === 504) return true;
// Supabase timeout codes
if (errorObj.code === 'PGRST301') return true; // Connection timeout
if (errorObj.code === '57014') return true; // PostgreSQL query cancelled
// Check message
if (errorObj.message?.toLowerCase().includes('timeout')) return true;
}
return false;
}
/**
* Wrap an error as a TimeoutError with source information
*/
export function wrapAsTimeoutError(
error: unknown,
source: TimeoutError['source'],
duration?: number
): TimeoutError {
const message = error instanceof Error ? error.message : 'Operation timed out';
const timeoutError = new Error(message) as TimeoutError;
timeoutError.name = 'TimeoutError';
timeoutError.isTimeout = true;
timeoutError.source = source;
timeoutError.originalError = error;
timeoutError.duration = duration;
return timeoutError;
}
/**
* Execute a function with a timeout wrapper
*
* @param fn - Function to execute
* @param timeoutMs - Timeout in milliseconds
* @param source - Source identifier for error tracking
* @returns Promise that resolves or rejects with timeout
*/
export async function withTimeout<T>(
fn: () => Promise<T>,
timeoutMs: number,
source: TimeoutError['source'] = 'unknown'
): Promise<T> {
const startTime = Date.now();
const controller = new AbortController();
const timeoutId = setTimeout(() => {
controller.abort();
}, timeoutMs);
try {
// Execute the function with abort signal if supported
const result = await fn();
clearTimeout(timeoutId);
return result;
} catch (error) {
clearTimeout(timeoutId);
const duration = Date.now() - startTime;
// Check if error is timeout-related
if (isTimeoutError(error) || controller.signal.aborted) {
const timeoutError = wrapAsTimeoutError(error, source, duration);
logger.error('Operation timed out', {
source,
duration,
timeoutMs,
originalError: error instanceof Error ? error.message : String(error)
});
throw timeoutError;
}
// Re-throw non-timeout errors
throw error;
}
}
/**
* Categorize timeout severity for recovery strategy
*/
export function getTimeoutSeverity(error: TimeoutError): 'minor' | 'moderate' | 'critical' {
const { duration, source } = error;
// No duration means immediate abort - likely user action or critical failure
if (!duration) return 'critical';
// Database/edge function timeouts are more critical
if (source === 'database' || source === 'edge-function') {
if (duration > 30000) return 'critical'; // >30s
if (duration > 10000) return 'moderate'; // >10s
return 'minor';
}
// Fetch timeouts
if (source === 'fetch') {
if (duration > 60000) return 'critical'; // >60s
if (duration > 20000) return 'moderate'; // >20s
return 'minor';
}
return 'moderate';
}
/**
* Get recommended retry strategy based on timeout error
*/
export function getTimeoutRetryStrategy(error: TimeoutError): {
shouldRetry: boolean;
delayMs: number;
maxAttempts: number;
increaseTimeout: boolean;
} {
const severity = getTimeoutSeverity(error);
switch (severity) {
case 'minor':
return {
shouldRetry: true,
delayMs: 1000,
maxAttempts: 3,
increaseTimeout: false,
};
case 'moderate':
return {
shouldRetry: true,
delayMs: 3000,
maxAttempts: 2,
increaseTimeout: true, // Increase timeout by 50%
};
case 'critical':
return {
shouldRetry: false, // Don't auto-retry critical timeouts
delayMs: 5000,
maxAttempts: 1,
increaseTimeout: true,
};
}
}
/**
* User-friendly timeout error message
*/
export function getTimeoutErrorMessage(error: TimeoutError): string {
const severity = getTimeoutSeverity(error);
switch (severity) {
case 'minor':
return 'The request took longer than expected. Retrying...';
case 'moderate':
return 'The server is taking longer than usual to respond. Please wait while we retry.';
case 'critical':
return 'The operation timed out. Please check your connection and try again.';
}
}

View File

@@ -0,0 +1,22 @@
/**
* Type assertion helpers for TypeScript strict mode compatibility
* These help bridge database types (null) with application types (undefined)
*/
export function nullToUndefined<T>(value: T | null): T | undefined {
return value === null ? undefined : value;
}
export function convertNullsToUndefined<T extends Record<string, any>>(obj: T): { [K in keyof T]: T[K] extends (infer U | null) ? (U | undefined) : T[K] } {
const result: any = {};
for (const key in obj) {
const value = obj[key];
result[key] = value === null ? undefined : value;
}
return result;
}
// Type guard for checking if value is not null/undefined
export function isDefined<T>(value: T | null | undefined): value is T {
return value !== null && value !== undefined;
}

View File

@@ -0,0 +1,149 @@
/**
* Type Conversion Utilities
*
* This module provides type-safe conversion functions for handling:
* - Database nulls → Application undefined
* - Json types → Form data types
* - Type guards for runtime validation
*
* These utilities eliminate the need for `as any` casts by providing
* explicit, type-safe conversions at data boundaries.
*/
import type { Json } from '@/integrations/supabase/types';
/**
* Convert database null to application undefined
* Database returns null, but forms expect undefined for optional fields
*/
export function nullToUndefined<T>(value: T | null): T | undefined {
return value === null ? undefined : value;
}
/**
* Convert all null values in an object to undefined
* Use at database boundaries when fetching data for forms
*/
export function convertNullsToUndefined<T extends Record<string, unknown>>(
obj: T
): { [K in keyof T]: T[K] extends (infer U | null) ? (U | undefined) : T[K] } {
const result: Record<string, unknown> = {};
for (const key in obj) {
const value = obj[key];
result[key] = value === null ? undefined : value;
}
return result as { [K in keyof T]: T[K] extends (infer U | null) ? (U | undefined) : T[K] };
}
/**
* Photo item interface for type-safe photo handling
*/
export interface PhotoItem {
url: string;
caption?: string;
title?: string;
cloudflare_id?: string;
order_index?: number;
}
/**
* Type guard: Check if value is a valid photo array
*/
export function isPhotoArray(data: unknown): data is PhotoItem[] {
return (
Array.isArray(data) &&
data.every(
(item) =>
typeof item === 'object' &&
item !== null &&
'url' in item &&
typeof (item as Record<string, unknown>).url === 'string'
)
);
}
/**
* Safely extract photo array from Json data
* Handles both direct arrays and nested { photos: [...] } structures
*/
export function extractPhotoArray(data: Json): PhotoItem[] {
// Direct array
if (isPhotoArray(data)) {
return data;
}
// Nested photos property
if (typeof data === 'object' && data !== null && !Array.isArray(data)) {
const obj = data as Record<string, Json>;
if ('photos' in obj && isPhotoArray(obj.photos)) {
return obj.photos;
}
}
return [];
}
/**
* Type guard: Check if Json is a plain object (not array or null)
*/
export function isJsonObject(data: Json): data is Record<string, Json> {
return typeof data === 'object' && data !== null && !Array.isArray(data);
}
/**
* Convert Json to a form-compatible object
* Converts all null values to undefined for form compatibility
*/
export function jsonToFormData(data: Json | unknown): Record<string, unknown> {
if (!isJsonObject(data as Json)) {
return {};
}
const result: Record<string, unknown> = {};
for (const [key, value] of Object.entries(data as Record<string, Json>)) {
result[key] = value === null ? undefined : value;
}
return result;
}
/**
* Type guard: Check if object has a specific property with a given type
*/
export function hasProperty<T>(
obj: unknown,
key: string,
typeCheck: (value: unknown) => value is T
): obj is Record<string, unknown> & { [K in typeof key]: T } {
return (
typeof obj === 'object' &&
obj !== null &&
key in obj &&
typeCheck((obj as Record<string, unknown>)[key])
);
}
/**
* Safely get a property from Json with type assertion
*/
export function getProperty<T = unknown>(
data: Json,
key: string,
defaultValue?: T
): T | undefined {
if (isJsonObject(data) && key in data) {
const value = data[key];
return value === null ? defaultValue : (value as T);
}
return defaultValue;
}
/**
* Convert array of database records with nulls to form-compatible records
* Useful when fetching lists of entities for display/editing
*/
export function convertArrayNullsToUndefined<T extends Record<string, unknown>>(
arr: T[]
): Array<{ [K in keyof T]: T[K] extends (infer U | null) ? (U | undefined) : T[K] }> {
return arr.map((item) => convertNullsToUndefined(item));
}

View File

@@ -0,0 +1,97 @@
/**
* Unit Validation Utilities
* Ensures all stored units comply with metric-only storage rule
*
* Custom Knowledge Requirement:
* "Unit Conversion Rules: Storage: Always metric in DB (km/h, m, cm, kg)"
*/
import { convertValueToMetric, getMetricUnit } from './units';
export const METRIC_UNITS = [
'km/h', // Speed
'm', // Distance (large)
'cm', // Distance (small)
'kg', // Weight
'g', // Weight (small)
'G', // G-force
'celsius', // Temperature
'seconds', // Time
'minutes', // Time
'hours', // Time
'count', // Dimensionless
'%', // Percentage
] as const;
export const IMPERIAL_UNITS = [
'mph', // Speed
'ft', // Distance
'in', // Distance
'lbs', // Weight
'fahrenheit', // Temperature
] as const;
export type MetricUnit = typeof METRIC_UNITS[number];
export type ImperialUnit = typeof IMPERIAL_UNITS[number];
/**
* Check if a unit is metric
*/
export function isMetricUnit(unit: string): unit is MetricUnit {
return METRIC_UNITS.includes(unit as MetricUnit);
}
/**
* Validate that a unit is metric (throws if not)
*/
export function validateMetricUnit(unit: string, fieldName: string = 'unit'): void {
if (!isMetricUnit(unit)) {
throw new Error(
`${fieldName} must be metric. Received "${unit}", expected one of: ${METRIC_UNITS.join(', ')}`
);
}
}
/**
* Ensure value is in metric units, converting if necessary
*
* @example
* ```typescript
* const { value, unit } = ensureMetricUnit(60, 'mph');
* // Returns: { value: 96.56, unit: 'km/h' }
* ```
*/
export function ensureMetricUnit(
value: number,
unit: string
): { value: number; unit: MetricUnit } {
if (isMetricUnit(unit)) {
return { value, unit };
}
// Convert imperial to metric
const metricValue = convertValueToMetric(value, unit);
const metricUnit = getMetricUnit(unit) as MetricUnit;
return { value: metricValue, unit: metricUnit };
}
/**
* Batch validate an array of measurements
*/
export function validateMetricUnits(
measurements: Array<{ value: number; unit: string; name: string }>
): { valid: boolean; errors: string[] } {
const errors: string[] = [];
measurements.forEach(({ unit, name }) => {
if (!isMetricUnit(unit)) {
errors.push(`${name}: "${unit}" is not a valid metric unit`);
}
});
return {
valid: errors.length === 0,
errors
};
}

179
src-old/lib/units.ts Normal file
View File

@@ -0,0 +1,179 @@
export type MeasurementSystem = 'metric' | 'imperial';
export interface UnitPreferences {
measurement_system: MeasurementSystem;
temperature: 'celsius' | 'fahrenheit';
auto_detect: boolean;
}
// Get unit labels (helper functions - use getDisplayUnit for conversion logic)
export function getSpeedUnit(system: MeasurementSystem): string {
return system === 'imperial' ? 'mph' : 'km/h';
}
export function getDistanceUnit(system: MeasurementSystem): string {
return system === 'imperial' ? 'ft' : 'm';
}
export function getHeightUnit(system: MeasurementSystem): string {
return system === 'imperial' ? 'in' : 'cm';
}
export function getShortDistanceUnit(system: MeasurementSystem): string {
return system === 'imperial' ? 'ft' : 'm';
}
// Countries that primarily use imperial system
export const IMPERIAL_COUNTRIES = ['US', 'LR', 'MM'];
// Detect measurement system from country code
export function getMeasurementSystemFromCountry(countryCode: string): MeasurementSystem {
return IMPERIAL_COUNTRIES.includes(countryCode.toUpperCase()) ? 'imperial' : 'metric';
}
// Unit type detection
export type UnitType = 'speed' | 'distance' | 'height' | 'weight' | 'unknown';
export function detectUnitType(unit: string): UnitType {
const normalized = unit.toLowerCase().trim();
// Speed units
if (['km/h', 'kmh', 'kph', 'mph', 'm/s', 'ms'].includes(normalized)) {
return 'speed';
}
// Distance units (meters/feet)
if (['m', 'meter', 'meters', 'metre', 'metres', 'ft', 'feet', 'foot'].includes(normalized)) {
return 'distance';
}
// Height units (cm/inches)
if (['cm', 'centimeter', 'centimeters', 'in', 'inch', 'inches'].includes(normalized)) {
return 'height';
}
// Weight units
if (['kg', 'kilogram', 'kilograms', 'lb', 'lbs', 'pound', 'pounds'].includes(normalized)) {
return 'weight';
}
return 'unknown';
}
// Convert any value to metric based on its unit
export function convertValueToMetric(value: number, unit: string): number {
const normalized = unit.toLowerCase().trim();
// Speed conversions to km/h
if (normalized === 'mph') {
return Math.round(value / 0.621371);
}
if (['m/s', 'ms'].includes(normalized)) {
return Math.round(value * 3.6);
}
if (['km/h', 'kmh', 'kph'].includes(normalized)) {
return Math.round(value);
}
// Distance conversions to meters
if (['ft', 'feet', 'foot'].includes(normalized)) {
return Math.round(value / 3.28084);
}
if (['m', 'meter', 'meters', 'metre', 'metres'].includes(normalized)) {
return Math.round(value);
}
// Height conversions to cm
if (['in', 'inch', 'inches'].includes(normalized)) {
return Math.round(value / 0.393701);
}
if (['cm', 'centimeter', 'centimeters'].includes(normalized)) {
return Math.round(value);
}
// Weight conversions to kg
if (['lb', 'lbs', 'pound', 'pounds'].includes(normalized)) {
return Math.round(value / 2.20462);
}
if (['kg', 'kilogram', 'kilograms'].includes(normalized)) {
return Math.round(value);
}
// Unknown unit, return as-is
return value;
}
// Convert metric value to target unit
export function convertValueFromMetric(value: number, targetUnit: string, metricUnit: string): number {
const normalized = targetUnit.toLowerCase().trim();
const metricNormalized = metricUnit.toLowerCase().trim();
// Speed conversions from km/h
if (metricNormalized === 'km/h' || metricNormalized === 'kmh' || metricNormalized === 'kph') {
if (normalized === 'mph') {
return Math.round(value * 0.621371);
}
if (normalized === 'm/s' || normalized === 'ms') {
return Math.round(value / 3.6);
}
}
// Distance conversions from meters
if (metricNormalized === 'm' || metricNormalized === 'meter' || metricNormalized === 'meters') {
if (['ft', 'feet', 'foot'].includes(normalized)) {
return Math.round(value * 3.28084);
}
}
// Height conversions from cm
if (metricNormalized === 'cm' || metricNormalized === 'centimeter' || metricNormalized === 'centimeters') {
if (['in', 'inch', 'inches'].includes(normalized)) {
return Math.round(value * 0.393701);
}
}
// Weight conversions from kg
if (metricNormalized === 'kg' || metricNormalized === 'kilogram' || metricNormalized === 'kilograms') {
if (['lb', 'lbs', 'pound', 'pounds'].includes(normalized)) {
return Math.round(value * 2.20462);
}
}
return value;
}
// Get metric unit for a given unit type
export function getMetricUnit(unit: string): string {
const unitType = detectUnitType(unit);
switch (unitType) {
case 'speed':
return 'km/h';
case 'distance':
return 'm';
case 'height':
return 'cm';
case 'weight':
return 'kg';
default:
return unit;
}
}
// Get display unit based on unit type and measurement system
export function getDisplayUnit(metricUnit: string, system: MeasurementSystem): string {
const unitType = detectUnitType(metricUnit);
switch (unitType) {
case 'speed':
return system === 'imperial' ? 'mph' : 'km/h';
case 'distance':
return system === 'imperial' ? 'ft' : 'm';
case 'height':
return system === 'imperial' ? 'in' : 'cm';
case 'weight':
return system === 'imperial' ? 'lbs' : 'kg';
default:
return metricUnit;
}
}

13
src-old/lib/utils.ts Normal file
View File

@@ -0,0 +1,13 @@
import { clsx, type ClassValue } from "clsx";
import { twMerge } from "tailwind-merge";
/**
* Combines multiple class names into a single string, merging Tailwind CSS classes intelligently.
* Uses clsx for conditional class handling and tailwind-merge to resolve conflicts.
*
* @param inputs - Class values to combine (strings, objects, arrays)
* @returns Merged class string with Tailwind conflicts resolved
*/
export function cn(...inputs: ClassValue[]) {
return twMerge(clsx(inputs));
}

108
src-old/lib/validation.ts Normal file
View File

@@ -0,0 +1,108 @@
import { z } from 'zod';
// Reserved usernames for security and system purposes
const FORBIDDEN_USERNAMES = new Set([
// System/Admin accounts
'admin', 'administrator', 'moderator', 'mod', 'owner', 'root', 'system', 'support',
'staff', 'team', 'official', 'verified', 'bot', 'api', 'service',
// Company/Brand protection
'thrillwiki', 'lovable', 'supabase', 'cloudflare',
// Common system routes/pages
'www', 'mail', 'email', 'ftp', 'blog', 'forum', 'shop', 'store', 'app', 'mobile',
'help', 'support', 'contact', 'about', 'terms', 'privacy', 'security', 'legal',
'login', 'signup', 'register', 'signin', 'signout', 'logout', 'auth', 'oauth',
'profile', 'profiles', 'user', 'users', 'account', 'accounts', 'settings',
'dashboard', 'console', 'panel', 'manage', 'management',
// Technical terms
'null', 'undefined', 'true', 'false', 'delete', 'remove', 'test', 'demo',
'localhost', 'example', 'temp', 'temporary', 'guest', 'anonymous', 'anon',
// Offensive prevention (basic)
'fuck', 'shit', 'damn', 'hell', 'ass', 'bitch', 'bastard', 'crap',
'nazi', 'hitler', 'stalin', 'terrorist', 'kill', 'death', 'murder',
// Impersonation prevention
'ceo', 'president', 'manager', 'director', 'executive', 'founder'
]);
export const usernameSchema = z
.string()
.min(3, 'Username must be at least 3 characters')
.max(30, 'Username must be less than 30 characters')
.regex(
/^[a-zA-Z0-9]([a-zA-Z0-9_-]*[a-zA-Z0-9])?$/,
'Username must start and end with letters/numbers, and can only contain letters, numbers, hyphens, and underscores'
)
.refine(
(val) => !/[-_]{2,}/.test(val),
'Username cannot contain consecutive hyphens or underscores'
)
.transform(val => val.toLowerCase())
.refine(val => !FORBIDDEN_USERNAMES.has(val), 'This username is not allowed');
// Display name validation with content filtering
export const displayNameSchema = z
.string()
.max(100, 'Display name must be less than 100 characters')
.refine(val => {
if (!val) return true;
const lowerVal = val.toLowerCase();
// Check for basic offensive content in display names
const offensiveTerms = ['nazi', 'hitler', 'terrorist', 'kill', 'murder', 'fuck', 'shit'];
return !offensiveTerms.some(term => lowerVal.includes(term));
}, 'Display name contains inappropriate content')
.optional();
// Password validation schema with complexity requirements
export const passwordSchema = z.object({
currentPassword: z.string().min(1, 'Current password is required'),
newPassword: z.string()
.min(8, 'Password must be at least 8 characters')
.max(128, 'Password must be less than 128 characters')
.regex(/[A-Z]/, 'Password must contain at least one uppercase letter')
.regex(/[a-z]/, 'Password must contain at least one lowercase letter')
.regex(/[0-9]/, 'Password must contain at least one number')
.regex(/[^A-Za-z0-9]/, 'Password must contain at least one special character'),
confirmPassword: z.string()
}).refine(data => data.newPassword === data.confirmPassword, {
message: "Passwords don't match",
path: ["confirmPassword"]
});
// Bio field validation with sanitization
export const bioSchema = z.string()
.max(500, 'Bio must be less than 500 characters')
.transform(val => val?.trim())
.refine(
val => !val || !/[<>]/.test(val),
'Bio cannot contain HTML tags'
)
.optional();
// Personal location field validation with sanitization
export const personalLocationSchema = z.string()
.max(100, 'Location must be less than 100 characters')
.transform(val => val?.trim())
.refine(
val => !val || !/[<>{}]/.test(val),
'Location cannot contain special characters'
)
.optional();
// Preferred pronouns validation
export const preferredPronounsSchema = z
.string()
.trim()
.max(20, { message: "Pronouns must be less than 20 characters" })
.optional();
export const profileEditSchema = z.object({
username: usernameSchema,
display_name: displayNameSchema,
bio: bioSchema,
});
export type ProfileEditForm = z.infer<typeof profileEditSchema>;

View File

@@ -0,0 +1,220 @@
/**
* Modern versioning utilities for relational version tables
*
* These functions work with the new trigger-based versioning system.
* All version creation is automatic via database triggers - no manual calls needed.
*
* @see docs/versioning/ARCHITECTURE.md for system design
* @see docs/versioning/API.md for complete API reference
*/
import { supabase } from '@/lib/supabaseClient';
import type { EntityType } from '@/types/versioning';
import { createTableQuery } from './supabaseHelpers';
import { handleNonCriticalError } from './errorHandler';
/**
* Manually trigger cleanup of old versions for a specific entity type
*
* Note: This should normally run automatically via pg_cron, but can be called manually.
*
* @param entityType - The entity type to clean up ('park', 'ride', 'company', 'ride_model')
* @param keepCount - Number of most recent versions to keep per entity (default: 50)
* @returns Number of versions deleted
*
* @example
* ```typescript
* const deleted = await cleanupVersions('park', 50);
* console.log(`Deleted ${deleted} old park versions`);
* ```
*/
export async function cleanupVersions(
entityType: EntityType,
keepCount: number = 50
): Promise<number> {
const { data, error } = await supabase.rpc('cleanup_old_versions', {
entity_type: entityType,
keep_versions: keepCount
});
if (error) {
handleNonCriticalError(error, {
action: 'Version cleanup',
metadata: { entityType, keepCount }
});
return 0;
}
return data as number;
}
/**
* Get statistics about versions for a specific entity
*
* @param entityType - The entity type ('park', 'ride', 'company', 'ride_model')
* @param entityId - The UUID of the entity
* @returns Version statistics including total count, date range, and change type distribution
*
* @example
* ```typescript
* const stats = await getVersionStats('park', 'uuid-here');
* console.log(`Total versions: ${stats.totalVersions}`);
* console.log(`Change types:`, stats.changeTypes);
* ```
*/
export async function getVersionStats(
entityType: EntityType,
entityId: string
) {
const entityIdCol = `${entityType}_id`;
// Directly query the version table based on entity type
// Use simpler type inference to avoid TypeScript deep instantiation issues
let result;
if (entityType === 'park') {
result = await supabase
.from('park_versions')
.select('version_number, created_at, change_type', { count: 'exact' })
.eq('park_id', entityId)
.order('version_number', { ascending: true });
} else if (entityType === 'ride') {
result = await supabase
.from('ride_versions')
.select('version_number, created_at, change_type', { count: 'exact' })
.eq('ride_id', entityId)
.order('version_number', { ascending: true });
} else if (entityType === 'company') {
result = await supabase
.from('company_versions')
.select('version_number, created_at, change_type', { count: 'exact' })
.eq('company_id', entityId)
.order('version_number', { ascending: true });
} else {
result = await supabase
.from('ride_model_versions')
.select('version_number, created_at, change_type', { count: 'exact' })
.eq('ride_model_id', entityId)
.order('version_number', { ascending: true });
}
const { data, error } = result;
if (error || !data) {
handleNonCriticalError(error || new Error('No data returned'), {
action: 'Fetch version stats',
metadata: { entityType, entityId }
});
return null;
}
if (data.length === 0) {
return {
totalVersions: 0,
oldestVersion: null,
newestVersion: null,
changeTypes: {}
};
}
// Type-safe access to version data
const versions = data as unknown as Array<{
version_number: number;
created_at: string;
change_type: string;
}>;
return {
totalVersions: versions.length,
oldestVersion: versions[0]?.created_at || null,
newestVersion: versions[versions.length - 1]?.created_at || null,
changeTypes: versions.reduce((acc, v) => {
acc[v.change_type] = (acc[v.change_type] || 0) + 1;
return acc;
}, {} as Record<string, number>)
};
}
/**
* Get total version counts across all entity types
*
* Useful for monitoring storage usage and cleanup effectiveness.
*
* @returns Total version counts for each entity type
*
* @example
* ```typescript
* const counts = await getAllVersionCounts();
* console.log('Park versions:', counts.park);
* console.log('Ride versions:', counts.ride);
* ```
*/
export async function getAllVersionCounts() {
const counts = {
park: 0,
ride: 0,
company: 0,
ride_model: 0,
};
const parkCount = await supabase
.from('park_versions')
.select('*', { count: 'exact', head: true });
counts.park = parkCount.count || 0;
const rideCount = await supabase
.from('ride_versions')
.select('*', { count: 'exact', head: true });
counts.ride = rideCount.count || 0;
const companyCount = await supabase
.from('company_versions')
.select('*', { count: 'exact', head: true });
counts.company = companyCount.count || 0;
const modelCount = await supabase
.from('ride_model_versions')
.select('*', { count: 'exact', head: true });
counts.ride_model = modelCount.count || 0;
return counts;
}
/**
* Check if an entity has any versions
*
* @param entityType - The entity type
* @param entityId - The UUID of the entity
* @returns True if entity has at least one version
*/
export async function hasVersions(
entityType: EntityType,
entityId: string
): Promise<boolean> {
// Directly query the version table based on entity type with explicit column names
let result;
if (entityType === 'park') {
result = await supabase
.from('park_versions')
.select('*', { count: 'exact', head: true })
.eq('park_id', entityId);
} else if (entityType === 'ride') {
result = await supabase
.from('ride_versions')
.select('*', { count: 'exact', head: true })
.eq('ride_id', entityId);
} else if (entityType === 'company') {
result = await supabase
.from('company_versions')
.select('*', { count: 'exact', head: true })
.eq('company_id', entityId);
} else {
result = await supabase
.from('ride_model_versions')
.select('*', { count: 'exact', head: true })
.eq('ride_model_id', entityId);
}
return (result.count || 0) > 0;
}

View File

@@ -0,0 +1,49 @@
import { supabase } from '@/lib/supabaseClient';
import { handleNonCriticalError } from './errorHandler';
// Generate anonymous session hash (no PII)
function getSessionHash(): string {
// Check if we have a session hash in sessionStorage
let sessionHash = sessionStorage.getItem('session_hash');
if (!sessionHash) {
// Create a random hash for this session (no user data)
sessionHash = `session_${Math.random().toString(36).substring(2, 15)}`;
sessionStorage.setItem('session_hash', sessionHash);
}
return sessionHash;
}
// Debounce tracking to avoid rapid-fire views
const trackedViews = new Set<string>();
export async function trackPageView(
entityType: 'park' | 'ride' | 'company',
entityId: string
) {
// Create unique key for this view
const viewKey = `${entityType}:${entityId}`;
// Don't track the same entity twice in the same session
if (trackedViews.has(viewKey)) {
return;
}
trackedViews.add(viewKey);
try {
// Track view asynchronously (fire and forget)
await supabase.from('entity_page_views').insert({
entity_type: entityType,
entity_id: entityId,
session_hash: getSessionHash()
});
} catch (error: unknown) {
// Fail silently - don't break the page if tracking fails
handleNonCriticalError(error, {
action: 'Track page view',
metadata: { entityType, entityId }
});
}
}