Refactor code structure and remove redundant changes

This commit is contained in:
pacnpal
2025-11-09 16:31:34 -05:00
parent 2884bc23ce
commit eb68cf40c6
1080 changed files with 27361 additions and 56687 deletions

View File

@@ -0,0 +1,31 @@
/**
* Moderation Hooks
*
* Centralized exports for all moderation-related hooks.
* These hooks are designed to support the moderation queue system.
*/
export { useEntityCache } from './useEntityCache';
export { useProfileCache } from './useProfileCache';
export type { CachedProfile } from './useProfileCache';
export { useModerationFilters } from './useModerationFilters';
export type { ModerationFilters, ModerationFiltersConfig } from './useModerationFilters';
export { usePagination } from './usePagination';
export type { PaginationState, PaginationConfig } from './usePagination';
export { useRealtimeSubscriptions } from './useRealtimeSubscriptions';
export type {
RealtimeSubscriptionConfig,
UseRealtimeSubscriptionsReturn
} from './useRealtimeSubscriptions';
export { useQueueQuery } from './useQueueQuery';
export type { UseQueueQueryConfig, UseQueueQueryReturn } from './useQueueQuery';
export { useModerationQueueManager } from './useModerationQueueManager';
export type {
ModerationQueueManager,
ModerationQueueManagerConfig
} from './useModerationQueueManager';

View File

@@ -0,0 +1,293 @@
import { useRef, useCallback } from 'react';
import { supabase } from '@/lib/supabaseClient';
import { createTableQuery } from '@/lib/supabaseHelpers';
import { logger } from '@/lib/logger';
import { getErrorMessage } from '@/lib/errorHandler';
import { MODERATION_CONSTANTS } from '@/lib/moderation/constants';
import type { Database } from '@/integrations/supabase/types';
/**
* Entity types supported by the cache
*/
type EntityType = 'rides' | 'parks' | 'companies';
/**
* Type definitions for cached entities (can be partial)
*/
type Ride = Database['public']['Tables']['rides']['Row'];
type Park = Database['public']['Tables']['parks']['Row'];
type Company = Database['public']['Tables']['companies']['Row'];
/**
* Discriminated union for all cached entity types
*/
type CachedEntity = Ride | Park | Company;
/**
* Map entity type strings to their corresponding types
* Cache stores partial entities with at least id and name
*/
interface EntityTypeMap {
rides: Partial<Ride> & { id: string; name: string };
parks: Partial<Park> & { id: string; name: string };
companies: Partial<Company> & { id: string; name: string };
}
/**
* Cache structure for entities with flexible typing
*/
interface EntityCacheStructure {
rides: Map<string, Partial<Ride> & { id: string; name: string }>;
parks: Map<string, Partial<Park> & { id: string; name: string }>;
companies: Map<string, Partial<Company> & { id: string; name: string }>;
}
/**
* Hook for managing entity name caching (rides, parks, companies)
*
* Uses ref-based storage to avoid triggering re-renders while providing
* efficient caching for entity lookups during moderation.
*
* @example
* ```tsx
* const entityCache = useEntityCache();
*
* // Get cached entity
* const ride = entityCache.getCached('rides', rideId);
*
* // Bulk fetch and cache entities
* await entityCache.bulkFetch('rides', [id1, id2, id3]);
*
* // Clear specific cache
* entityCache.clear('rides');
*
* // Clear all caches
* entityCache.clearAll();
* ```
*/
export function useEntityCache() {
// Use ref to prevent re-renders on cache updates
const cacheRef = useRef<EntityCacheStructure>({
rides: new Map(),
parks: new Map(),
companies: new Map(),
});
/**
* Get a cached entity by ID with type safety
*/
const getCached = useCallback(<T extends EntityType>(
type: T,
id: string
): EntityTypeMap[T] | undefined => {
return cacheRef.current[type].get(id) as EntityTypeMap[T] | undefined;
}, []);
/**
* Check if an entity is cached
*/
const has = useCallback((type: EntityType, id: string): boolean => {
return cacheRef.current[type].has(id);
}, []);
/**
* Set a cached entity with LRU eviction and type safety
*/
const setCached = useCallback(<T extends EntityType>(
type: T,
id: string,
data: EntityTypeMap[T]
): void => {
const cache = cacheRef.current[type];
// LRU eviction: remove oldest entry if cache is full
if (cache.size >= MODERATION_CONSTANTS.MAX_ENTITY_CACHE_SIZE) {
const firstKey = cache.keys().next().value;
if (firstKey) {
cache.delete(firstKey);
logger.log(`♻️ [EntityCache] Evicted ${type}/${firstKey} (LRU)`);
}
}
cache.set(id, data);
}, []);
/**
* Get uncached IDs from a list
*/
const getUncachedIds = useCallback((type: EntityType, ids: string[]): string[] => {
return ids.filter(id => !cacheRef.current[type].has(id));
}, []);
/**
* Bulk fetch entities from the database and cache them
* Only fetches entities that aren't already cached
*/
const bulkFetch = useCallback(async <T extends EntityType>(
type: T,
ids: string[]
): Promise<EntityTypeMap[T][]> => {
if (ids.length === 0) return [];
// Filter to only uncached IDs
const uncachedIds = getUncachedIds(type, ids);
if (uncachedIds.length === 0) {
// All entities are cached, return them
return ids.map(id => getCached(type, id)).filter((item): item is EntityTypeMap[T] => item !== undefined);
}
try {
let data: unknown[] | null = null;
let error: unknown = null;
// Use type-safe table queries
switch (type) {
case 'rides':
const ridesResult = await createTableQuery('rides')
.select('id, name, slug, park_id')
.in('id', uncachedIds);
data = ridesResult.data;
error = ridesResult.error;
break;
case 'parks':
const parksResult = await createTableQuery('parks')
.select('id, name, slug')
.in('id', uncachedIds);
data = parksResult.data;
error = parksResult.error;
break;
case 'companies':
const companiesResult = await createTableQuery('companies')
.select('id, name, slug, company_type')
.in('id', uncachedIds);
data = companiesResult.data;
error = companiesResult.error;
break;
default:
// Unknown entity type - skip
return [];
}
if (error) {
// Silent - cache miss is acceptable
return [];
}
// Cache the fetched entities
if (data) {
(data as Array<Record<string, unknown>>).forEach((entity) => {
if (entity && typeof entity === 'object' && 'id' in entity && 'name' in entity) {
setCached(type, entity.id as string, entity as EntityTypeMap[T]);
}
});
}
return (data as EntityTypeMap[T][]) || [];
} catch (error: unknown) {
// Silent - cache operations are non-critical
return [];
}
}, [getCached, setCached, getUncachedIds]);
/**
* Fetch and cache related entities based on submission content
* Automatically determines which entities to fetch from submission data
*/
const fetchRelatedEntities = useCallback(async (submissions: Array<{ content?: Record<string, string | number>; submission_type?: string }>): Promise<void> => {
const rideIds = new Set<string>();
const parkIds = new Set<string>();
const companyIds = new Set<string>();
// Collect all entity IDs from submissions
submissions.forEach(submission => {
const content = submission.content;
if (content && typeof content === 'object') {
if (typeof content.ride_id === 'string') rideIds.add(content.ride_id);
if (typeof content.park_id === 'string') parkIds.add(content.park_id);
if (typeof content.company_id === 'string') companyIds.add(content.company_id);
if (typeof content.entity_id === 'string') {
if (submission.submission_type === 'ride') rideIds.add(content.entity_id);
if (submission.submission_type === 'park') parkIds.add(content.entity_id);
if (['manufacturer', 'operator', 'designer', 'property_owner'].includes(submission.submission_type || '')) {
companyIds.add(content.entity_id);
}
}
if (typeof content.manufacturer_id === 'string') companyIds.add(content.manufacturer_id);
if (typeof content.designer_id === 'string') companyIds.add(content.designer_id);
if (typeof content.operator_id === 'string') companyIds.add(content.operator_id);
if (typeof content.property_owner_id === 'string') companyIds.add(content.property_owner_id);
}
});
// Fetch all entities in parallel
const fetchPromises: Promise<any[]>[] = [];
if (rideIds.size > 0) {
fetchPromises.push(bulkFetch('rides', Array.from(rideIds)));
}
if (parkIds.size > 0) {
fetchPromises.push(bulkFetch('parks', Array.from(parkIds)));
}
if (companyIds.size > 0) {
fetchPromises.push(bulkFetch('companies', Array.from(companyIds)));
}
await Promise.all(fetchPromises);
}, [bulkFetch]);
/**
* Clear a specific entity type cache
*/
const clear = useCallback((type: EntityType): void => {
cacheRef.current[type].clear();
}, []);
/**
* Clear all entity caches
*/
const clearAll = useCallback((): void => {
cacheRef.current.rides.clear();
cacheRef.current.parks.clear();
cacheRef.current.companies.clear();
}, []);
/**
* Get cache size for a specific type
*/
const getSize = useCallback((type: EntityType): number => {
return cacheRef.current[type].size;
}, []);
/**
* Get total cache size across all entity types
*/
const getTotalSize = useCallback((): number => {
return cacheRef.current.rides.size +
cacheRef.current.parks.size +
cacheRef.current.companies.size;
}, []);
/**
* Get direct access to cache ref (for advanced use cases)
* Use with caution - prefer using the provided methods
*/
const getCacheRef = useCallback(() => cacheRef.current, []);
// Return without useMemo wrapper (OPTIMIZED)
return {
getCached,
has,
setCached,
getUncachedIds,
bulkFetch,
fetchRelatedEntities,
clear,
clearAll,
getSize,
getTotalSize,
getCacheRef,
};
}

View File

@@ -0,0 +1,978 @@
import { useCallback } from 'react';
import { useMutation, useQueryClient } from '@tanstack/react-query';
import { supabase } from '@/lib/supabaseClient';
import { useToast } from '@/hooks/use-toast';
import { logger } from '@/lib/logger';
import { getErrorMessage, handleError, isSupabaseConnectionError } from '@/lib/errorHandler';
// Validation removed from client - edge function is single source of truth
import { invokeWithTracking } from '@/lib/edgeFunctionTracking';
import {
generateIdempotencyKey,
is409Conflict,
getRetryAfter,
sleep,
generateAndRegisterKey,
validateAndStartProcessing,
markKeyCompleted,
markKeyFailed,
} from '@/lib/idempotencyHelpers';
import {
withTimeout,
isTimeoutError,
getTimeoutErrorMessage,
type TimeoutError,
} from '@/lib/timeoutDetection';
import {
autoReleaseLockOnError,
} from '@/lib/moderation/lockAutoRelease';
import type { User } from '@supabase/supabase-js';
import type { ModerationItem } from '@/types/moderation';
/**
* Configuration for moderation actions
*/
export interface ModerationActionsConfig {
user: User | null;
onActionStart: (itemId: string) => void;
onActionComplete: () => void;
currentLockSubmissionId?: string | null;
}
/**
* Return type for useModerationActions
*/
export interface ModerationActions {
performAction: (item: ModerationItem, action: 'approved' | 'rejected', moderatorNotes?: string) => Promise<void>;
deleteSubmission: (item: ModerationItem) => Promise<void>;
resetToPending: (item: ModerationItem) => Promise<void>;
retryFailedItems: (item: ModerationItem) => Promise<void>;
escalateSubmission: (item: ModerationItem, reason: string) => Promise<void>;
}
/**
* Hook for moderation action handlers
* Extracted from useModerationQueueManager for better separation of concerns
*
* @param config - Configuration object with user, callbacks, and dependencies
* @returns Object with action handler functions
*/
export function useModerationActions(config: ModerationActionsConfig): ModerationActions {
const { user, onActionStart, onActionComplete } = config;
const { toast } = useToast();
const queryClient = useQueryClient();
/**
* Invoke edge function with full transaction resilience
*
* Provides:
* - Timeout detection with automatic recovery
* - Lock auto-release on error/timeout
* - Idempotency key lifecycle management
* - 409 Conflict handling with exponential backoff
*
* @param functionName - Edge function to invoke
* @param payload - Request payload with submissionId
* @param action - Action type for idempotency key generation
* @param itemIds - Item IDs being processed
* @param userId - User ID for tracking
* @param maxConflictRetries - Max retries for 409 responses (default: 3)
* @param timeoutMs - Timeout in milliseconds (default: 30000)
* @returns Result with data, error, requestId, etc.
*/
async function invokeWithResilience<T = any>(
functionName: string,
payload: any,
action: 'approval' | 'rejection' | 'retry',
itemIds: string[],
userId?: string,
maxConflictRetries: number = 3,
timeoutMs: number = 30000
): Promise<{
data: T | null;
error: any;
requestId: string;
duration: number;
attempts?: number;
cached?: boolean;
conflictRetries?: number;
}> {
if (!userId) {
return {
data: null,
error: { message: 'User not authenticated' },
requestId: 'auth-error',
duration: 0,
};
}
const submissionId = payload.submissionId;
if (!submissionId) {
return {
data: null,
error: { message: 'Missing submissionId in payload' },
requestId: 'validation-error',
duration: 0,
};
}
// Generate and register idempotency key
const { key: idempotencyKey } = await generateAndRegisterKey(
action,
submissionId,
itemIds,
userId
);
logger.info('[ModerationResilience] Starting transaction', {
action,
submissionId,
itemIds,
idempotencyKey: idempotencyKey.substring(0, 32) + '...',
});
let conflictRetries = 0;
let lastError: any = null;
try {
// Validate key and mark as processing
const isValid = await validateAndStartProcessing(idempotencyKey);
if (!isValid) {
const error = new Error('Idempotency key validation failed - possible duplicate request');
await markKeyFailed(idempotencyKey, error.message);
return {
data: null,
error,
requestId: 'idempotency-validation-failed',
duration: 0,
};
}
// Retry loop for 409 conflicts
while (conflictRetries <= maxConflictRetries) {
try {
// Execute with timeout detection
const result = await withTimeout(
async () => {
return await invokeWithTracking<T>(
functionName,
payload,
userId,
undefined,
undefined,
timeoutMs,
{ maxAttempts: 3, baseDelay: 1500 },
{ 'X-Idempotency-Key': idempotencyKey }
);
},
timeoutMs,
'edge-function'
);
// Success or non-409 error
if (!result.error || !is409Conflict(result.error)) {
const isCached = result.data && typeof result.data === 'object' && 'cached' in result.data
? (result.data as any).cached
: false;
// Mark key as completed on success
if (!result.error) {
await markKeyCompleted(idempotencyKey);
} else {
await markKeyFailed(idempotencyKey, getErrorMessage(result.error));
}
logger.info('[ModerationResilience] Transaction completed', {
action,
submissionId,
idempotencyKey: idempotencyKey.substring(0, 32) + '...',
success: !result.error,
cached: isCached,
conflictRetries,
});
return {
...result,
cached: isCached,
conflictRetries,
};
}
// 409 Conflict detected
lastError = result.error;
conflictRetries++;
if (conflictRetries > maxConflictRetries) {
logger.error('Max 409 conflict retries exceeded', {
functionName,
idempotencyKey: idempotencyKey.substring(0, 32) + '...',
conflictRetries,
submissionId,
});
break;
}
// Wait before retry
const retryAfterSeconds = getRetryAfter(result.error);
const retryDelayMs = retryAfterSeconds * 1000;
logger.log(`409 Conflict detected, retrying after ${retryAfterSeconds}s (attempt ${conflictRetries}/${maxConflictRetries})`, {
functionName,
idempotencyKey: idempotencyKey.substring(0, 32) + '...',
retryAfterSeconds,
});
await sleep(retryDelayMs);
} catch (innerError) {
// Handle timeout errors specifically
if (isTimeoutError(innerError)) {
const timeoutError = innerError as TimeoutError;
const message = getTimeoutErrorMessage(timeoutError);
logger.error('[ModerationResilience] Transaction timed out', {
action,
submissionId,
idempotencyKey: idempotencyKey.substring(0, 32) + '...',
duration: timeoutError.duration,
});
// Auto-release lock on timeout
await autoReleaseLockOnError(submissionId, userId, timeoutError);
// Mark key as failed
await markKeyFailed(idempotencyKey, message);
return {
data: null,
error: timeoutError,
requestId: 'timeout-error',
duration: timeoutError.duration || 0,
conflictRetries,
};
}
// Re-throw non-timeout errors to outer catch
throw innerError;
}
}
// All conflict retries exhausted
await markKeyFailed(idempotencyKey, 'Max 409 conflict retries exceeded');
return {
data: null,
error: lastError || { message: 'Unknown conflict retry error' },
requestId: 'conflict-retry-failed',
duration: 0,
attempts: 0,
conflictRetries,
};
} catch (error) {
// Generic error handling
const errorMessage = getErrorMessage(error);
logger.error('[ModerationResilience] Transaction failed', {
action,
submissionId,
idempotencyKey: idempotencyKey.substring(0, 32) + '...',
error: errorMessage,
});
// Auto-release lock on error
await autoReleaseLockOnError(submissionId, userId, error);
// Mark key as failed
await markKeyFailed(idempotencyKey, errorMessage);
return {
data: null,
error,
requestId: 'error',
duration: 0,
conflictRetries,
};
}
}
/**
* Perform moderation action (approve/reject) with optimistic updates
*/
const performActionMutation = useMutation({
mutationFn: async ({
item,
action,
moderatorNotes
}: {
item: ModerationItem;
action: 'approved' | 'rejected';
moderatorNotes?: string;
}) => {
// Handle photo submissions
if (action === 'approved' && item.submission_type === 'photo') {
const { data: photoSubmission, error: fetchError } = await supabase
.from('photo_submissions')
.select(`
*,
items:photo_submission_items(*),
submission:content_submissions!inner(user_id)
`)
.eq('submission_id', item.id)
.single();
// Add explicit error handling
if (fetchError) {
throw new Error(`Failed to fetch photo submission: ${fetchError.message}`);
}
if (!photoSubmission) {
throw new Error('Photo submission not found');
}
// Type assertion with validation
const typedPhotoSubmission = photoSubmission as {
id: string;
entity_id: string;
entity_type: string;
items: Array<{
id: string;
cloudflare_image_id: string;
cloudflare_image_url: string;
caption?: string;
title?: string;
date_taken?: string;
date_taken_precision?: string;
order_index: number;
}>;
submission: { user_id: string };
};
// Validate required fields
if (!typedPhotoSubmission.items || typedPhotoSubmission.items.length === 0) {
throw new Error('No photo items found in submission');
}
const { data: existingPhotos } = await supabase
.from('photos')
.select('id')
.eq('submission_id', item.id);
if (!existingPhotos || existingPhotos.length === 0) {
const photoRecords = typedPhotoSubmission.items.map((photoItem) => ({
entity_id: typedPhotoSubmission.entity_id,
entity_type: typedPhotoSubmission.entity_type,
cloudflare_image_id: photoItem.cloudflare_image_id,
cloudflare_image_url: photoItem.cloudflare_image_url,
title: photoItem.title || null,
caption: photoItem.caption || null,
date_taken: photoItem.date_taken || null,
order_index: photoItem.order_index,
submission_id: item.id,
submitted_by: typedPhotoSubmission.submission?.user_id,
approved_by: user?.id,
approved_at: new Date().toISOString(),
}));
await supabase.from('photos').insert(photoRecords);
}
}
// Check for submission items
const { data: submissionItems } = await supabase
.from('submission_items')
.select('id, status')
.eq('submission_id', item.id)
.in('status', ['pending', 'rejected']);
if (submissionItems && submissionItems.length > 0) {
if (action === 'approved') {
// ⚠️ VALIDATION CENTRALIZED IN EDGE FUNCTION
// All business logic validation happens in process-selective-approval edge function.
// Client-side only performs basic UX validation (non-empty, format) in forms.
// If server-side validation fails, the edge function returns detailed 400/500 errors.
const {
data,
error,
requestId,
attempts,
cached,
conflictRetries
} = await invokeWithResilience(
'process-selective-approval',
{
itemIds: submissionItems.map((i) => i.id),
submissionId: item.id,
},
'approval',
submissionItems.map((i) => i.id),
config.user?.id,
3, // Max 3 conflict retries
30000 // 30s timeout
);
// Log retry attempts
if (attempts && attempts > 1) {
logger.log(`Approval succeeded after ${attempts} network retries`, {
submissionId: item.id,
requestId,
});
}
if (conflictRetries && conflictRetries > 0) {
logger.log(`Resolved 409 conflict after ${conflictRetries} retries`, {
submissionId: item.id,
requestId,
cached: !!cached,
});
}
if (error) {
// Enhance error with context for better UI feedback
if (is409Conflict(error)) {
throw new Error(
'This approval is being processed by another request. Please wait and try again if it does not complete.'
);
}
throw error;
}
toast({
title: cached ? 'Cached Result' : 'Submission Approved',
description: cached
? `Returned cached result for ${submissionItems.length} item(s)`
: `Successfully processed ${submissionItems.length} item(s)${requestId ? ` (Request: ${requestId.substring(0, 8)})` : ''}`,
});
return;
} else if (action === 'rejected') {
await supabase
.from('submission_items')
.update({
status: 'rejected',
rejection_reason: moderatorNotes || 'Parent submission rejected',
updated_at: new Date().toISOString(),
})
.eq('submission_id', item.id)
.eq('status', 'pending');
}
}
// Standard update
const table = item.type === 'review' ? 'reviews' : 'content_submissions';
const statusField = item.type === 'review' ? 'moderation_status' : 'status';
const timestampField = item.type === 'review' ? 'moderated_at' : 'reviewed_at';
const reviewerField = item.type === 'review' ? 'moderated_by' : 'reviewer_id';
const updateData: any = {
[statusField]: action,
[timestampField]: new Date().toISOString(),
};
if (user) {
updateData[reviewerField] = user.id;
}
if (moderatorNotes) {
updateData.reviewer_notes = moderatorNotes;
}
const { error } = await supabase.from(table).update(updateData).eq('id', item.id);
if (error) throw error;
// Log audit trail for review moderation
if (table === 'reviews' && user) {
try {
// Extract entity information from item content
const entityType = item.content?.ride_id ? 'ride' : item.content?.park_id ? 'park' : 'unknown';
const entityId = item.content?.ride_id || item.content?.park_id || null;
await supabase.rpc('log_admin_action', {
_admin_user_id: user.id,
_target_user_id: item.user_id,
_action: `review_${action}`,
_details: {
review_id: item.id,
entity_type: entityType,
entity_id: entityId,
moderator_notes: moderatorNotes
}
});
} catch (auditError) {
// Silent - audit logging is non-critical
}
}
toast({
title: `Content ${action}`,
description: `The ${item.type} has been ${action}`,
});
logger.log(`✅ Action ${action} completed for ${item.id}`);
return { item, action };
},
onMutate: async ({ item, action }) => {
// Cancel outgoing refetches
await queryClient.cancelQueries({ queryKey: ['moderation-queue'] });
// Snapshot previous value
const previousData = queryClient.getQueryData(['moderation-queue']);
// Optimistically update cache
queryClient.setQueriesData({ queryKey: ['moderation-queue'] }, (old: any) => {
if (!old?.submissions) return old;
return {
...old,
submissions: old.submissions.map((i: ModerationItem) =>
i.id === item.id
? {
...i,
status: action,
_optimistic: true,
reviewed_at: new Date().toISOString(),
reviewer_id: user?.id,
}
: i
),
};
});
return { previousData };
},
onError: (error: any, variables, context) => {
// Rollback optimistic update
if (context?.previousData) {
queryClient.setQueryData(['moderation-queue'], context.previousData);
}
// Enhanced error handling with timeout, conflict, and network detection
const isNetworkError = isSupabaseConnectionError(error);
const isConflict = is409Conflict(error);
const isTimeout = isTimeoutError(error);
const errorMessage = getErrorMessage(error) || `Failed to ${variables.action} content`;
// Check if this is a validation error from edge function
const isValidationError = errorMessage.includes('Validation failed') ||
errorMessage.includes('blocking errors') ||
errorMessage.includes('blockingErrors');
toast({
title: isNetworkError ? 'Connection Error' :
isValidationError ? 'Validation Failed' :
isConflict ? 'Duplicate Request' :
isTimeout ? 'Transaction Timeout' :
'Action Failed',
description: isTimeout
? getTimeoutErrorMessage(error as TimeoutError)
: isConflict
? 'This action is already being processed. Please wait for it to complete.'
: errorMessage,
variant: 'destructive',
});
logger.error('Moderation action failed', {
itemId: variables.item.id,
action: variables.action,
error: errorMessage,
errorId: error.errorId,
isNetworkError,
isValidationError,
isConflict,
isTimeout,
});
},
onSuccess: (data) => {
if (data) {
toast({
title: `Content ${data.action}`,
description: `The ${data.item.type} has been ${data.action}`,
});
}
},
onSettled: () => {
// Always refetch to ensure consistency
queryClient.invalidateQueries({ queryKey: ['moderation-queue'] });
onActionComplete();
},
});
/**
* Wrapper function that handles loading states and error tracking
*/
const performAction = useCallback(
async (item: ModerationItem, action: 'approved' | 'rejected', moderatorNotes?: string) => {
onActionStart(item.id);
try {
await performActionMutation.mutateAsync({ item, action, moderatorNotes });
} catch (error) {
const errorId = handleError(error, {
action: `Moderation ${action}`,
userId: user?.id,
metadata: {
submissionId: item.id,
submissionType: item.submission_type,
itemType: item.type,
hasSubmissionItems: item.submission_items?.length ?? 0,
moderatorNotes: moderatorNotes?.substring(0, 100),
},
});
// Attach error ID for UI display
const enhancedError = error instanceof Error
? Object.assign(error, { errorId })
: { message: getErrorMessage(error), errorId };
throw enhancedError;
}
},
[onActionStart, performActionMutation, user]
);
/**
* Delete a submission permanently
*/
const deleteSubmission = useCallback(
async (item: ModerationItem) => {
if (item.type !== 'content_submission') return;
onActionStart(item.id);
try {
// Fetch submission details for audit log
const { data: submission } = await supabase
.from('content_submissions')
.select('user_id, submission_type, status')
.eq('id', item.id)
.single();
const { error } = await supabase.from('content_submissions').delete().eq('id', item.id);
if (error) throw error;
// Log audit trail for deletion
if (user && submission) {
try {
await supabase.rpc('log_admin_action', {
_admin_user_id: user.id,
_target_user_id: submission.user_id,
_action: 'submission_deleted',
_details: {
submission_id: item.id,
submission_type: submission.submission_type,
status_when_deleted: submission.status
}
});
} catch (auditError) {
// Silent - audit logging is non-critical
}
}
toast({
title: 'Submission deleted',
description: 'The submission has been permanently deleted',
});
logger.log(`✅ Submission ${item.id} deleted`);
} catch (error: unknown) {
const errorId = handleError(error, {
action: 'Delete Submission',
userId: user?.id,
metadata: {
submissionId: item.id,
submissionType: item.submission_type,
},
});
logger.error('Failed to delete submission', {
submissionId: item.id,
errorId,
});
const enhancedError = error instanceof Error
? Object.assign(error, { errorId })
: { message: getErrorMessage(error), errorId };
throw enhancedError;
} finally {
onActionComplete();
}
},
[toast, onActionStart, onActionComplete]
);
/**
* Reset submission to pending status
*/
const resetToPending = useCallback(
async (item: ModerationItem) => {
onActionStart(item.id);
try {
const { resetRejectedItemsToPending } = await import('@/lib/submissionItemsService');
await resetRejectedItemsToPending(item.id);
// Log audit trail for reset
if (user) {
try {
await supabase.rpc('log_admin_action', {
_admin_user_id: user.id,
_target_user_id: item.user_id,
_action: 'submission_reset',
_details: {
submission_id: item.id,
submission_type: item.submission_type
}
});
} catch (auditError) {
// Silent - audit logging is non-critical
}
}
toast({
title: 'Reset Complete',
description: 'Submission and all items have been reset to pending status',
});
logger.log(`✅ Submission ${item.id} reset to pending`);
} catch (error: unknown) {
const errorId = handleError(error, {
action: 'Reset to Pending',
userId: user?.id,
metadata: {
submissionId: item.id,
submissionType: item.submission_type,
},
});
logger.error('Failed to reset status', {
submissionId: item.id,
errorId,
});
const enhancedError = error instanceof Error
? Object.assign(error, { errorId })
: { message: getErrorMessage(error), errorId };
throw enhancedError;
} finally {
onActionComplete();
}
},
[toast, onActionStart, onActionComplete]
);
/**
* Retry failed items in a submission
*/
const retryFailedItems = useCallback(
async (item: ModerationItem) => {
onActionStart(item.id);
let failedItemsCount = 0;
try {
const { data: failedItems } = await supabase
.from('submission_items')
.select('id')
.eq('submission_id', item.id)
.eq('status', 'rejected');
if (!failedItems || failedItems.length === 0) {
toast({
title: 'No Failed Items',
description: 'All items have been processed successfully',
});
return;
}
failedItemsCount = failedItems.length;
const {
data,
error,
requestId,
attempts,
cached,
conflictRetries
} = await invokeWithResilience(
'process-selective-approval',
{
itemIds: failedItems.map((i) => i.id),
submissionId: item.id,
},
'retry',
failedItems.map((i) => i.id),
config.user?.id,
3, // Max 3 conflict retries
30000 // 30s timeout
);
if (attempts && attempts > 1) {
logger.log(`Retry succeeded after ${attempts} network retries`, {
submissionId: item.id,
requestId,
});
}
if (conflictRetries && conflictRetries > 0) {
logger.log(`Retry resolved 409 conflict after ${conflictRetries} retries`, {
submissionId: item.id,
requestId,
cached: !!cached,
});
}
if (error) {
if (is409Conflict(error)) {
throw new Error(
'This retry is being processed by another request. Please wait and try again if it does not complete.'
);
}
throw error;
}
// Log audit trail for retry
if (user) {
try {
await supabase.rpc('log_admin_action', {
_admin_user_id: user.id,
_target_user_id: item.user_id,
_action: 'submission_retried',
_details: {
submission_id: item.id,
submission_type: item.submission_type,
items_retried: failedItems.length,
request_id: requestId
}
});
} catch (auditError) {
// Silent - audit logging is non-critical
}
}
toast({
title: cached ? 'Cached Retry Result' : 'Items Retried',
description: cached
? `Returned cached result for ${failedItems.length} item(s)`
: `Successfully retried ${failedItems.length} failed item(s)${requestId ? ` (Request: ${requestId.substring(0, 8)})` : ''}`,
});
logger.log(`✅ Retried ${failedItems.length} failed items for ${item.id}`);
} catch (error: unknown) {
const errorId = handleError(error, {
action: 'Retry Failed Items',
userId: user?.id,
metadata: {
submissionId: item.id,
failedItemsCount,
},
});
logger.error('Failed to retry items', {
submissionId: item.id,
errorId,
});
const enhancedError = error instanceof Error
? Object.assign(error, { errorId })
: { message: getErrorMessage(error), errorId };
throw enhancedError;
} finally {
onActionComplete();
}
},
[toast, onActionStart, onActionComplete, user]
);
/**
* Escalate submission for admin review
* Consolidates escalation logic with comprehensive error handling
*/
const escalateSubmission = useCallback(
async (item: ModerationItem, reason: string) => {
if (!user?.id) {
toast({
title: 'Authentication Required',
description: 'You must be logged in to escalate submissions',
variant: 'destructive',
});
return;
}
onActionStart(item.id);
try {
// Call edge function for email notification with retry
const { error: edgeFunctionError, requestId, attempts } = await invokeWithTracking(
'send-escalation-notification',
{
submissionId: item.id,
escalationReason: reason,
escalatedBy: user.id,
},
user.id,
undefined,
undefined,
45000, // Longer timeout for email sending
{ maxAttempts: 3, baseDelay: 2000 } // Retry for email delivery
);
if (attempts && attempts > 1) {
logger.log(`Escalation email sent after ${attempts} attempts`);
}
if (edgeFunctionError) {
// Edge function failed - log and show fallback toast
handleError(edgeFunctionError, {
action: 'Send escalation notification',
userId: user.id,
metadata: {
submissionId: item.id,
reason: reason.substring(0, 100),
fallbackUsed: true,
},
});
toast({
title: 'Escalated (Email Failed)',
description: 'Submission escalated but notification email could not be sent',
});
} else {
toast({
title: 'Escalated Successfully',
description: `Submission escalated and admin notified${requestId ? ` (${requestId.substring(0, 8)})` : ''}`,
});
}
// Invalidate cache
queryClient.invalidateQueries({ queryKey: ['moderation-queue'] });
logger.log(`✅ Submission ${item.id} escalated`);
} catch (error: unknown) {
const errorId = handleError(error, {
action: 'Escalate Submission',
userId: user.id,
metadata: {
submissionId: item.id,
submissionType: item.submission_type,
reason: reason.substring(0, 100),
},
});
logger.error('Escalation failed', {
submissionId: item.id,
errorId,
});
// Re-throw to allow UI to show retry option
const enhancedError = error instanceof Error
? Object.assign(error, { errorId })
: { message: getErrorMessage(error), errorId };
throw enhancedError;
} finally {
onActionComplete();
}
},
[user, toast, onActionStart, onActionComplete, queryClient]
);
return {
performAction,
deleteSubmission,
resetToPending,
retryFailedItems,
escalateSubmission,
};
}

View File

@@ -0,0 +1,287 @@
/**
* Moderation Queue Filters Hook
*
* Manages filter state for the moderation queue, including:
* - Entity type filtering (all, reviews, submissions, photos)
* - Status filtering (pending, approved, rejected, etc.)
* - Tab management (main queue vs archive)
* - Filter persistence and clearing
*/
import { useState, useCallback, useEffect } from 'react';
import { useDebounce } from '@/hooks/useDebounce';
import { logger } from '@/lib/logger';
import { MODERATION_CONSTANTS } from '@/lib/moderation/constants';
import type { EntityFilter, StatusFilter, QueueTab, SortConfig, SortField } from '@/types/moderation';
import * as storage from '@/lib/localStorage';
export interface ModerationFiltersConfig {
/** Initial entity filter */
initialEntityFilter?: EntityFilter;
/** Initial status filter */
initialStatusFilter?: StatusFilter;
/** Initial active tab */
initialTab?: QueueTab;
/** Debounce delay for filter changes (ms) */
debounceDelay?: number;
/** Whether to persist filters to localStorage */
persist?: boolean;
/** localStorage key prefix for persistence */
storageKey?: string;
/** Initial sort configuration */
initialSortConfig?: SortConfig;
}
export interface ModerationFilters {
/** Current entity type filter */
entityFilter: EntityFilter;
/** Current status filter */
statusFilter: StatusFilter;
/** Current active tab */
activeTab: QueueTab;
/** Debounced entity filter (for API calls) */
debouncedEntityFilter: EntityFilter;
/** Debounced status filter (for API calls) */
debouncedStatusFilter: StatusFilter;
/** Set entity filter */
setEntityFilter: (filter: EntityFilter) => void;
/** Set status filter */
setStatusFilter: (filter: StatusFilter) => void;
/** Set active tab */
setActiveTab: (tab: QueueTab) => void;
/** Reset all filters to defaults */
clearFilters: () => void;
/** Check if any non-default filters are active */
hasActiveFilters: boolean;
/** Current sort configuration (immediate) */
sortConfig: SortConfig;
/** Debounced sort configuration (use this for queries) */
debouncedSortConfig: SortConfig;
/** Update the sort configuration */
setSortConfig: (config: SortConfig) => void;
/** Sort by a specific field, toggling direction if already sorting by that field */
sortBy: (field: SortField) => void;
/** Toggle the sort direction */
toggleSortDirection: () => void;
/** Reset sort to default */
resetSort: () => void;
/** Reset pagination to page 1 (callback) */
onFilterChange?: () => void;
}
/**
* Hook for managing moderation queue filters
*
* @param config - Configuration options
* @returns Filter state and actions
*
* @example
* ```tsx
* const filters = useModerationFilters({
* persist: true,
* debounceDelay: 300
* });
*
* // Use in component
* <Select value={filters.entityFilter} onValueChange={filters.setEntityFilter}>
* ...
* </Select>
* ```
*/
export function useModerationFilters(
config: ModerationFiltersConfig & { onFilterChange?: () => void } = {}
): ModerationFilters {
const {
initialEntityFilter = 'all',
initialStatusFilter = 'pending',
initialTab = 'mainQueue',
debounceDelay = MODERATION_CONSTANTS.FILTER_DEBOUNCE_MS,
persist = true,
storageKey = 'moderationQueue_filters',
initialSortConfig = { field: 'created_at', direction: 'asc' },
onFilterChange,
} = config;
// Load persisted filters on mount
const loadPersistedFilters = useCallback(() => {
if (!persist) return null;
try {
const saved = localStorage.getItem(storageKey);
if (saved) {
return JSON.parse(saved);
}
} catch (error: unknown) {
// Silent - localStorage failures are non-critical
}
return null;
}, [persist, storageKey]);
// Load persisted sort
const loadPersistedSort = useCallback((): SortConfig => {
if (!persist) return initialSortConfig;
try {
const saved = localStorage.getItem(`${storageKey}_sort`);
if (saved) {
const parsed = JSON.parse(saved);
if (parsed.field && parsed.direction) {
return parsed;
}
}
} catch (error: unknown) {
// Silent - localStorage failures are non-critical
}
return initialSortConfig;
}, [persist, storageKey, initialSortConfig]);
const persisted = loadPersistedFilters();
// Filter state
const [entityFilter, setEntityFilterState] = useState<EntityFilter>(
persisted?.entityFilter || initialEntityFilter
);
const [statusFilter, setStatusFilterState] = useState<StatusFilter>(
persisted?.statusFilter || initialStatusFilter
);
const [activeTab, setActiveTabState] = useState<QueueTab>(
persisted?.activeTab || initialTab
);
// Sort state
const [sortConfig, setSortConfigState] = useState<SortConfig>(loadPersistedSort);
// Debounced filters for API calls
const debouncedEntityFilter = useDebounce(entityFilter, debounceDelay);
const debouncedStatusFilter = useDebounce(statusFilter, debounceDelay);
// Debounced sort (0ms for immediate feedback)
const debouncedSortConfig = useDebounce(sortConfig, 0);
// Persist filters to localStorage
useEffect(() => {
if (persist) {
storage.setJSON(storageKey, {
entityFilter,
statusFilter,
activeTab,
});
}
}, [entityFilter, statusFilter, activeTab, persist, storageKey]);
// Persist sort to localStorage
useEffect(() => {
if (persist) {
storage.setJSON(`${storageKey}_sort`, sortConfig);
}
}, [sortConfig, persist, storageKey]);
// Set entity filter with logging and pagination reset
const setEntityFilter = useCallback((filter: EntityFilter) => {
logger.log('🔍 Entity filter changed:', filter);
setEntityFilterState(filter);
onFilterChange?.();
}, [onFilterChange]);
// Set status filter with logging and pagination reset
const setStatusFilter = useCallback((filter: StatusFilter) => {
logger.log('🔍 Status filter changed:', filter);
setStatusFilterState(filter);
onFilterChange?.();
}, [onFilterChange]);
// Set active tab with logging and pagination reset
const setActiveTab = useCallback((tab: QueueTab) => {
logger.log('🔍 Tab changed:', tab);
setActiveTabState(tab);
onFilterChange?.();
}, [onFilterChange]);
// Sort callbacks
const setSortConfig = useCallback((config: SortConfig) => {
logger.log('📝 [SORT] Sort config changed:', config);
setSortConfigState(config);
}, []);
const sortBy = useCallback((field: SortField) => {
setSortConfigState(prev => ({
field,
direction: prev.field === field
? (prev.direction === 'asc' ? 'desc' : 'asc')
: 'asc'
}));
}, []);
const toggleSortDirection = useCallback(() => {
setSortConfigState(prev => ({
...prev,
direction: prev.direction === 'asc' ? 'desc' : 'asc'
}));
}, []);
const resetSort = useCallback(() => {
setSortConfigState(initialSortConfig);
}, [initialSortConfig]);
// Clear all filters
const clearFilters = useCallback(() => {
logger.log('🔍 Filters cleared');
setEntityFilterState(initialEntityFilter);
setStatusFilterState(initialStatusFilter);
setActiveTabState(initialTab);
setSortConfigState(initialSortConfig);
}, [initialEntityFilter, initialStatusFilter, initialTab, initialSortConfig]);
// Check if non-default filters are active
const hasActiveFilters =
entityFilter !== initialEntityFilter ||
statusFilter !== initialStatusFilter ||
activeTab !== initialTab ||
sortConfig.field !== initialSortConfig.field ||
sortConfig.direction !== initialSortConfig.direction;
// Return without useMemo wrapper (OPTIMIZED)
return {
entityFilter,
statusFilter,
activeTab,
debouncedEntityFilter,
debouncedStatusFilter,
setEntityFilter,
setStatusFilter,
setActiveTab,
clearFilters,
hasActiveFilters,
sortConfig,
debouncedSortConfig,
setSortConfig,
sortBy,
toggleSortDirection,
resetSort,
onFilterChange,
};
}

View File

@@ -0,0 +1,563 @@
import { useState, useCallback, useRef, useEffect, useMemo } from "react";
import { supabase } from "@/lib/supabaseClient";
import { useToast } from "@/hooks/use-toast";
import { useAuth } from "@/hooks/useAuth";
import { logger } from "@/lib/logger";
import { getErrorMessage } from "@/lib/errorHandler";
import { invokeWithTracking } from "@/lib/edgeFunctionTracking";
import { MODERATION_CONSTANTS } from "@/lib/moderation/constants";
import { useQueryClient } from '@tanstack/react-query';
import type { User } from "@supabase/supabase-js";
import {
useEntityCache,
useProfileCache,
useModerationFilters,
usePagination,
useRealtimeSubscriptions,
useQueueQuery,
} from "./index";
import { useModerationQueue } from "@/hooks/useModerationQueue";
import { useModerationActions } from "./useModerationActions";
import type { ModerationItem, EntityFilter, StatusFilter, LoadingState } from "@/types/moderation";
interface ModerationStats {
pendingSubmissions: number;
openReports: number;
flaggedContent: number;
}
/**
* Configuration for useModerationQueueManager
*/
export interface ModerationQueueManagerConfig {
user: User | null;
isAdmin: boolean;
isSuperuser: boolean;
toast: ReturnType<typeof useToast>["toast"];
optimisticallyUpdateStats?: (delta: Partial<ModerationStats>) => void;
settings: {
refreshMode: "auto" | "manual";
pollInterval: number;
refreshStrategy: "notify" | "merge" | "replace";
preserveInteraction: boolean;
useRealtimeQueue: boolean;
};
}
/**
* Return type for useModerationQueueManager
*/
export interface ModerationQueueManager {
// State
items: ModerationItem[];
loadingState: LoadingState;
actionLoading: string | null;
// Sub-hooks (exposed for granular control)
filters: ReturnType<typeof useModerationFilters>;
pagination: ReturnType<typeof usePagination>;
queue: ReturnType<typeof useModerationQueue>;
// Realtime
newItemsCount: number;
pendingNewItems: ModerationItem[];
showNewItems: () => void;
// Interaction tracking
interactingWith: Set<string>;
markInteracting: (id: string, interacting: boolean) => void;
// Actions
refresh: () => void;
performAction: (item: ModerationItem, action: "approved" | "rejected", moderatorNotes?: string) => Promise<void>;
deleteSubmission: (item: ModerationItem) => Promise<void>;
resetToPending: (item: ModerationItem) => Promise<void>;
retryFailedItems: (item: ModerationItem) => Promise<void>;
// Caches (for QueueItem enrichment)
entityCache: ReturnType<typeof useEntityCache>;
profileCache: ReturnType<typeof useProfileCache>;
}
/**
* Orchestrator hook for moderation queue management
* Consolidates all queue-related logic into a single hook
*/
export function useModerationQueueManager(config: ModerationQueueManagerConfig): ModerationQueueManager {
logger.log('🚀 [QUEUE MANAGER] Hook mounting/rendering', {
hasUser: !!config.user,
isAdmin: config.isAdmin,
timestamp: new Date().toISOString()
});
const { user, isAdmin, isSuperuser, toast, optimisticallyUpdateStats, settings } = config;
const queryClient = useQueryClient();
const { aal } = useAuth();
// Debug AAL status
useEffect(() => {
logger.log('🔐 [QUEUE MANAGER] AAL Status:', {
aal,
isNull: aal === null,
isAal1: aal === 'aal1',
isAal2: aal === 'aal2',
timestamp: new Date().toISOString()
});
}, [aal]);
// Initialize sub-hooks
const filters = useModerationFilters({
initialEntityFilter: "all",
initialStatusFilter: "pending",
initialTab: "mainQueue",
debounceDelay: 300,
persist: true,
storageKey: "moderationQueue_filters",
});
// Memoize filters object for realtime subscriptions to prevent reconnections
const realtimeFilters = useMemo(() => ({
entityFilter: filters.debouncedEntityFilter,
statusFilter: filters.debouncedStatusFilter,
}), [filters.debouncedEntityFilter, filters.debouncedStatusFilter]);
const pagination = usePagination({
initialPage: 1,
initialPageSize: 25,
persist: false,
onPageChange: (page) => {
if (page > 1) {
setLoadingState("loading");
}
},
onPageSizeChange: () => {
setLoadingState("loading");
},
});
// Use a stable callback via ref to prevent excessive re-renders
const lockStateChangeHandlerRef = useRef<() => void>();
const queue = useModerationQueue({
onLockStateChange: useCallback(() => {
lockStateChangeHandlerRef.current?.();
}, [])
});
const entityCache = useEntityCache();
const profileCache = useProfileCache();
// Core state
const [items, setItems] = useState<ModerationItem[]>([]);
const [loadingState, setLoadingState] = useState<LoadingState>("initial");
const [actionLoading, setActionLoading] = useState<string | null>(null);
const [interactingWith, setInteractingWith] = useState<Set<string>>(new Set());
const [pendingNewItems, setPendingNewItems] = useState<ModerationItem[]>([]);
const [newItemsCount, setNewItemsCount] = useState(0);
// Refs for tracking
const recentlyRemovedRef = useRef<Set<string>>(new Set());
const initialFetchCompleteRef = useRef(false);
const isMountingRef = useRef(true);
/**
* Replace manual fetching with TanStack Query
* Use direct state values for stable query keys
*/
const queueQuery = useQueueQuery({
userId: user?.id,
isAdmin,
isSuperuser,
entityFilter: filters.debouncedEntityFilter,
statusFilter: filters.debouncedStatusFilter,
tab: filters.activeTab,
currentPage: pagination.currentPage,
pageSize: pagination.pageSize,
sortConfig: filters.debouncedSortConfig,
enabled: !!user,
});
// Update the lock state change handler ref whenever queueQuery changes
lockStateChangeHandlerRef.current = () => {
logger.log('🔄 Lock state changed, invalidating queue cache');
queueQuery.invalidate();
setLoadingState(prev => prev === "loading" ? "ready" : prev);
};
// Update items when query data changes
useEffect(() => {
if (queueQuery.items) {
setItems(queueQuery.items);
logger.log('✅ Queue items updated from TanStack Query:', queueQuery.items.length);
}
}, [queueQuery.items]);
// Update loading state based on query status
useEffect(() => {
if (queueQuery.isLoading) {
setLoadingState('loading');
} else if (queueQuery.isRefreshing) {
setLoadingState('refreshing');
} else {
setLoadingState('ready');
}
}, [queueQuery.isLoading, queueQuery.isRefreshing]);
// Show error toast when query fails
useEffect(() => {
if (queueQuery.error) {
// Error already captured by TanStack Query
toast({
variant: 'destructive',
title: 'Failed to Load Queue',
description: queueQuery.error.message || 'An error occurred while fetching the moderation queue.',
});
}
}, [queueQuery.error, toast]);
// Extract stable callback to prevent infinite loop
const { setTotalCount } = pagination;
// Update total count for pagination
useEffect(() => {
setTotalCount(queueQuery.totalCount);
}, [queueQuery.totalCount, setTotalCount]);
// Mark initial fetch as complete
useEffect(() => {
if (!queueQuery.isLoading && !initialFetchCompleteRef.current) {
initialFetchCompleteRef.current = true;
logger.log('✅ Initial queue fetch complete');
}
}, [queueQuery.isLoading]);
/**
* Manual refresh function
*/
const refresh = useCallback(async () => {
logger.log('🔄 Manual refresh triggered');
await queueQuery.refetch();
}, [queueQuery]);
/**
* Show pending new items by invalidating query
*/
const showNewItems = useCallback(async () => {
logger.log('✅ Showing new items via query invalidation');
await queueQuery.invalidate();
setPendingNewItems([]);
setNewItemsCount(0);
}, [queueQuery]);
/**
* Mark an item as being interacted with (prevents realtime updates)
*/
const markInteracting = useCallback((id: string, interacting: boolean) => {
setInteractingWith((prev) => {
const next = new Set(prev);
if (interacting) {
next.add(id);
} else {
next.delete(id);
}
return next;
});
}, []);
/**
* Use validated action handler from useModerationActions
*/
const moderationActions = useModerationActions({
user,
onActionStart: setActionLoading,
onActionComplete: () => {
setActionLoading(null);
refresh();
queue.refreshStats();
},
currentLockSubmissionId: queue.currentLock?.submissionId,
});
/**
* Perform moderation action (approve/reject) - delegates to validated handler
*/
const performAction = useCallback(
async (item: ModerationItem, action: "approved" | "rejected", moderatorNotes?: string) => {
// Release lock if held
if (queue.currentLock?.submissionId === item.id) {
await queue.releaseLock(item.id, true);
}
// Use validated action handler
await moderationActions.performAction(item, action, moderatorNotes);
},
[moderationActions, queue]
);
/**
* Delete a submission permanently
*/
const deleteSubmission = useCallback(
async (item: ModerationItem) => {
if (item.type !== "content_submission") return;
if (actionLoading === item.id) return;
setActionLoading(item.id);
setItems((prev) => prev.filter((i) => i.id !== item.id));
try {
const { error } = await supabase.from("content_submissions").delete().eq("id", item.id);
if (error) throw error;
toast({
title: "Submission deleted",
description: "The submission has been permanently deleted",
});
// Refresh stats to update counts
queue.refreshStats();
} catch (error: unknown) {
const errorMsg = getErrorMessage(error);
// Silent - operation handled optimistically
setItems((prev) => {
if (prev.some((i) => i.id === item.id)) return prev;
return [...prev, item];
});
toast({
title: "Error",
description: "Failed to delete submission",
variant: "destructive",
});
} finally {
setActionLoading(null);
}
},
[actionLoading, toast],
);
/**
* Reset submission to pending status
*/
const resetToPending = useCallback(
async (item: ModerationItem) => {
setActionLoading(item.id);
try {
const { resetRejectedItemsToPending } = await import("@/lib/submissionItemsService");
await resetRejectedItemsToPending(item.id);
toast({
title: "Reset Complete",
description: "Submission and all items have been reset to pending status",
});
// Refresh stats to update counts
queue.refreshStats();
setItems((prev) => prev.filter((i) => i.id !== item.id));
} catch (error: unknown) {
const errorMsg = getErrorMessage(error);
// Silent - operation handled optimistically
toast({
title: "Reset Failed",
description: errorMsg,
variant: "destructive",
});
} finally {
setActionLoading(null);
}
},
[toast],
);
/**
* Retry failed items in a submission
*/
const retryFailedItems = useCallback(
async (item: ModerationItem) => {
setActionLoading(item.id);
const shouldRemove =
filters.statusFilter === "pending" ||
filters.statusFilter === "flagged" ||
filters.statusFilter === "partially_approved";
if (shouldRemove) {
requestAnimationFrame(() => {
setItems((prev) => prev.filter((i) => i.id !== item.id));
recentlyRemovedRef.current.add(item.id);
setTimeout(() => recentlyRemovedRef.current.delete(item.id), 10000);
});
}
try {
const { data: failedItems } = await supabase
.from("submission_items")
.select("id")
.eq("submission_id", item.id)
.eq("status", "rejected");
if (!failedItems || failedItems.length === 0) {
toast({
title: "No Failed Items",
description: "All items have been processed successfully",
});
return;
}
const { data, error, requestId } = await invokeWithTracking(
"process-selective-approval",
{
itemIds: failedItems.map((i) => i.id),
submissionId: item.id,
},
user?.id
);
if (error) throw error;
toast({
title: "Retry Complete",
description: `Processed ${failedItems.length} failed item(s)${requestId ? ` (Request: ${requestId.substring(0, 8)})` : ""}`,
});
// Refresh stats to update counts
queue.refreshStats();
} catch (error: unknown) {
const errorMsg = getErrorMessage(error);
// Silent - operation handled optimistically
toast({
title: "Retry Failed",
description: errorMsg,
variant: "destructive",
});
} finally {
setActionLoading(null);
}
},
[filters.statusFilter, toast],
);
// Extract stable callbacks to prevent infinite loop in effects
const { invalidate: invalidateQuery } = queueQuery;
const { reset: resetPagination } = pagination;
// Mark initial fetch as complete when query loads
useEffect(() => {
if (!queueQuery.isLoading && !initialFetchCompleteRef.current) {
initialFetchCompleteRef.current = true;
isMountingRef.current = false;
logger.log('✅ Initial queue fetch complete');
}
}, [queueQuery.isLoading]);
// Invalidate query when filters or sort changes (OPTIMIZED)
useEffect(() => {
if (
!user ||
!initialFetchCompleteRef.current ||
isMountingRef.current
) return;
logger.log('🔄 Filters/sort changed, invalidating query');
resetPagination();
invalidateQuery();
}, [
filters.debouncedEntityFilter,
filters.debouncedStatusFilter,
filters.debouncedSortConfig.field,
filters.debouncedSortConfig.direction,
user,
invalidateQuery,
resetPagination
]);
// Polling effect (when realtime disabled) - MUTUALLY EXCLUSIVE
useEffect(() => {
const shouldPoll = settings.refreshMode === 'auto'
&& !settings.useRealtimeQueue
&& loadingState !== 'initial'
&& !!user;
if (!shouldPoll) {
return;
}
logger.log("⚠️ Polling ENABLED - interval:", settings.pollInterval);
const interval = setInterval(() => {
logger.log("🔄 Polling refresh triggered");
queueQuery.refetch();
}, settings.pollInterval);
return () => {
clearInterval(interval);
logger.log("🛑 Polling stopped");
};
}, [user, settings.refreshMode, settings.pollInterval, loadingState, settings.useRealtimeQueue, queueQuery.refetch]);
// Initialize realtime subscriptions
useRealtimeSubscriptions({
enabled: settings.useRealtimeQueue && !!user,
filters: realtimeFilters,
onNewItem: (item: ModerationItem) => {
if (recentlyRemovedRef.current.has(item.id)) return;
setPendingNewItems((prev) => {
if (prev.some((p) => p.id === item.id)) return prev;
return [...prev, item];
});
setNewItemsCount((prev) => prev + 1);
toast({
title: "🆕 New Submission",
description: `${item.submission_type} - ${item.entity_name}`,
});
},
onUpdateItem: (item: ModerationItem, shouldRemove: boolean) => {
if (recentlyRemovedRef.current.has(item.id)) return;
if (interactingWith.has(item.id)) return;
// Only track removals for optimistic update protection
if (shouldRemove && !recentlyRemovedRef.current.has(item.id)) {
recentlyRemovedRef.current.add(item.id);
setTimeout(() => recentlyRemovedRef.current.delete(item.id), MODERATION_CONSTANTS.REALTIME_OPTIMISTIC_REMOVAL_TIMEOUT);
}
// TanStack Query handles actual state updates via invalidation
},
onItemRemoved: (itemId: string) => {
// Track for optimistic update protection
recentlyRemovedRef.current.add(itemId);
setTimeout(() => recentlyRemovedRef.current.delete(itemId), MODERATION_CONSTANTS.REALTIME_OPTIMISTIC_REMOVAL_TIMEOUT);
// TanStack Query handles removal via invalidation
},
entityCache,
profileCache,
recentlyRemovedIds: recentlyRemovedRef.current,
interactingWithIds: interactingWith,
});
return {
items,
loadingState,
actionLoading,
filters,
pagination,
queue,
newItemsCount,
pendingNewItems,
showNewItems,
interactingWith,
markInteracting,
refresh,
performAction,
deleteSubmission,
resetToPending,
retryFailedItems,
entityCache,
profileCache,
};
}

View File

@@ -0,0 +1,250 @@
/**
* Pagination Hook
*
* Manages pagination state and actions for the moderation queue.
*/
import { useState, useCallback, useEffect, useMemo } from 'react';
import { MODERATION_CONSTANTS } from '@/lib/moderation/constants';
import * as storage from '@/lib/localStorage';
import { logger } from '@/lib/logger';
export interface PaginationConfig {
/** Initial page number (1-indexed) */
initialPage?: number;
/** Initial page size */
initialPageSize?: number;
/** Whether to persist pagination state */
persist?: boolean;
/** localStorage key for persistence */
storageKey?: string;
/** Callback when page changes */
onPageChange?: (page: number) => void;
/** Callback when page size changes */
onPageSizeChange?: (pageSize: number) => void;
}
export interface PaginationState {
/** Current page (1-indexed) */
currentPage: number;
/** Items per page */
pageSize: number;
/** Total number of items */
totalCount: number;
/** Total number of pages */
totalPages: number;
/** Start index for current page (0-indexed) */
startIndex: number;
/** End index for current page (0-indexed) */
endIndex: number;
/** Whether there is a previous page */
hasPrevPage: boolean;
/** Whether there is a next page */
hasNextPage: boolean;
/** Set current page */
setCurrentPage: (page: number) => void;
/** Set page size */
setPageSize: (size: number) => void;
/** Set total count */
setTotalCount: (count: number) => void;
/** Go to next page */
nextPage: () => void;
/** Go to previous page */
prevPage: () => void;
/** Go to first page */
firstPage: () => void;
/** Go to last page */
lastPage: () => void;
/** Reset pagination */
reset: () => void;
/** Get page range for display */
getPageRange: (maxPages?: number) => number[];
}
/**
* Hook for managing pagination state
*
* @param config - Configuration options
* @returns Pagination state and actions
*
* @example
* ```tsx
* const pagination = usePagination({
* initialPageSize: 25,
* persist: true,
* onPageChange: (page) => fetchData(page)
* });
*
* // Set total count from API
* pagination.setTotalCount(response.count);
*
* // Use in query
* const { startIndex, endIndex } = pagination;
* query.range(startIndex, endIndex);
* ```
*/
export function usePagination(config: PaginationConfig = {}): PaginationState {
const {
initialPage = 1,
initialPageSize = MODERATION_CONSTANTS.DEFAULT_PAGE_SIZE,
persist = false,
storageKey = 'pagination_state',
onPageChange,
onPageSizeChange,
} = config;
// Load persisted state
const loadPersistedState = useCallback(() => {
if (!persist) return null;
try {
const saved = localStorage.getItem(storageKey);
if (saved) {
return JSON.parse(saved);
}
} catch (error: unknown) {
// Silent - localStorage failures are non-critical
}
return null;
}, [persist, storageKey]);
const persisted = loadPersistedState();
// State
const [currentPage, setCurrentPageState] = useState<number>(
persisted?.currentPage || initialPage
);
const [pageSize, setPageSizeState] = useState<number>(
persisted?.pageSize || initialPageSize
);
const [totalCount, setTotalCount] = useState<number>(0);
// Computed values
const totalPages = useMemo(() => Math.ceil(totalCount / pageSize), [totalCount, pageSize]);
const startIndex = useMemo(() => (currentPage - 1) * pageSize, [currentPage, pageSize]);
const endIndex = useMemo(() => startIndex + pageSize - 1, [startIndex, pageSize]);
const hasPrevPage = currentPage > 1;
const hasNextPage = currentPage < totalPages;
// Persist state
useEffect(() => {
if (persist) {
storage.setJSON(storageKey, {
currentPage,
pageSize,
});
}
}, [currentPage, pageSize, persist, storageKey]);
// Set current page with bounds checking
const setCurrentPage = useCallback(
(page: number) => {
const boundedPage = Math.max(1, Math.min(page, totalPages || 1));
setCurrentPageState(boundedPage);
onPageChange?.(boundedPage);
},
[totalPages, onPageChange]
);
// Set page size and reset to first page
const setPageSize = useCallback(
(size: number) => {
setPageSizeState(size);
setCurrentPageState(1);
onPageSizeChange?.(size);
},
[onPageSizeChange]
);
// Navigation actions
const nextPage = useCallback(() => {
if (hasNextPage) {
setCurrentPage(currentPage + 1);
}
}, [currentPage, hasNextPage, setCurrentPage]);
const prevPage = useCallback(() => {
if (hasPrevPage) {
setCurrentPage(currentPage - 1);
}
}, [currentPage, hasPrevPage, setCurrentPage]);
const firstPage = useCallback(() => {
setCurrentPage(1);
}, [setCurrentPage]);
const lastPage = useCallback(() => {
setCurrentPage(totalPages);
}, [totalPages, setCurrentPage]);
// Reset pagination
const reset = useCallback(() => {
setCurrentPageState(initialPage);
setPageSizeState(initialPageSize);
setTotalCount(0);
}, [initialPage, initialPageSize]);
// Get page range for pagination controls
const getPageRange = useCallback(
(maxPages: number = 5): number[] => {
if (totalPages <= maxPages) {
return Array.from({ length: totalPages }, (_, i) => i + 1);
}
const half = Math.floor(maxPages / 2);
let start = Math.max(1, currentPage - half);
let end = Math.min(totalPages, start + maxPages - 1);
// Adjust start if we're near the end
if (end - start < maxPages - 1) {
start = Math.max(1, end - maxPages + 1);
}
return Array.from({ length: end - start + 1 }, (_, i) => start + i);
},
[currentPage, totalPages]
);
// Return without useMemo wrapper (OPTIMIZED)
return {
currentPage,
pageSize,
totalCount,
totalPages,
startIndex,
endIndex,
hasPrevPage,
hasNextPage,
setCurrentPage,
setPageSize,
setTotalCount,
nextPage,
prevPage,
firstPage,
lastPage,
reset,
getPageRange,
};
}

View File

@@ -0,0 +1,224 @@
import { useRef, useCallback } from 'react';
import { supabase } from '@/lib/supabaseClient';
import { logger } from '@/lib/logger';
import { getErrorMessage } from '@/lib/errorHandler';
import { MODERATION_CONSTANTS } from '@/lib/moderation/constants';
import type { ModerationItem } from '@/types/moderation';
/**
* Profile data structure returned from the database
*/
export interface CachedProfile {
user_id: string;
username: string;
display_name?: string;
avatar_url?: string;
}
/**
* Hook for managing user profile caching
*
* Uses ref-based storage to avoid triggering re-renders while providing
* efficient caching for user profile lookups during moderation.
*
* @example
* ```tsx
* const profileCache = useProfileCache();
*
* // Get cached profile
* const profile = profileCache.getCached(userId);
*
* // Bulk fetch and cache profiles
* const profiles = await profileCache.bulkFetch([id1, id2, id3]);
*
* // Check if profile exists in cache
* if (profileCache.has(userId)) {
* const profile = profileCache.getCached(userId);
* }
*
* // Clear cache
* profileCache.clear();
* ```
*/
export function useProfileCache() {
// Use ref to prevent re-renders on cache updates
const cacheRef = useRef<Map<string, CachedProfile>>(new Map());
/**
* Get a cached profile by user ID
*/
const getCached = useCallback((userId: string): CachedProfile | undefined => {
return cacheRef.current.get(userId);
}, []);
/**
* Check if a profile is cached
*/
const has = useCallback((userId: string): boolean => {
return cacheRef.current.has(userId);
}, []);
/**
* Set a cached profile with LRU eviction
*/
const setCached = useCallback((userId: string, profile: CachedProfile): void => {
const cache = cacheRef.current;
// LRU eviction
if (cache.size >= MODERATION_CONSTANTS.MAX_PROFILE_CACHE_SIZE) {
const firstKey = cache.keys().next().value;
if (firstKey) {
cache.delete(firstKey);
logger.log(`♻️ [ProfileCache] Evicted ${firstKey} (LRU)`);
}
}
cache.set(userId, profile);
}, []);
/**
* Get uncached user IDs from a list
*/
const getUncachedIds = useCallback((userIds: string[]): string[] => {
return userIds.filter(id => !cacheRef.current.has(id));
}, []);
/**
* Bulk fetch user profiles from the database and cache them
* Only fetches profiles that aren't already cached
*
* @param userIds - Array of user IDs to fetch
* @returns Array of fetched profiles
*/
const bulkFetch = useCallback(async (userIds: string[]): Promise<CachedProfile[]> => {
if (userIds.length === 0) return [];
// Filter to only uncached IDs
const uncachedIds = getUncachedIds(userIds);
if (uncachedIds.length === 0) {
// All profiles are cached, return them
return userIds.map(id => getCached(id)).filter((p): p is CachedProfile => !!p);
}
try {
const { data, error } = await supabase
.from('profiles')
.select('user_id, username, display_name, avatar_url')
.in('user_id', uncachedIds);
if (error) {
// Silent - cache miss is acceptable
return [];
}
// Cache the fetched profiles
if (data) {
data.forEach((profile) => {
const cachedProfile: CachedProfile = {
...profile,
display_name: profile.display_name || undefined,
avatar_url: profile.avatar_url || undefined
};
setCached(profile.user_id, cachedProfile);
});
}
return (data || []).map(profile => ({
...profile,
display_name: profile.display_name || undefined,
avatar_url: profile.avatar_url || undefined
}));
} catch (error: unknown) {
// Silent - cache operations are non-critical
return [];
}
}, [getCached, setCached, getUncachedIds]);
/**
* Fetch and return profiles for a list of user IDs
* Returns a Map for easy lookup
*
* @param userIds - Array of user IDs to fetch
* @returns Map of userId -> profile
*/
const fetchAsMap = useCallback(async (userIds: string[]): Promise<Map<string, CachedProfile>> => {
const profiles = await bulkFetch(userIds);
return new Map(profiles.map(p => [p.user_id, p]));
}, [bulkFetch]);
/**
* Fetch profiles for submitters and reviewers from submissions
* Automatically extracts user IDs and reviewer IDs from submission data
*
* @param submissions - Array of submissions with user_id and reviewer_id
* @returns Map of userId -> profile for all users involved
*/
const fetchForSubmissions = useCallback(async (submissions: ModerationItem[]): Promise<Map<string, CachedProfile>> => {
const userIds = submissions.map(s => s.user_id).filter(Boolean);
const reviewerIds = submissions.map(s => s.reviewer_id).filter((id): id is string => !!id);
const allUserIds = [...new Set([...userIds, ...reviewerIds])];
return await fetchAsMap(allUserIds);
}, [fetchAsMap]);
/**
* Get a display name for a user (display_name or username)
* Returns 'Unknown User' if not found in cache
*/
const getDisplayName = useCallback((userId: string): string => {
const profile = getCached(userId);
if (!profile) return 'Unknown User';
return profile.display_name || profile.username || 'Unknown User';
}, [getCached]);
/**
* Invalidate (remove) a specific profile from cache
*/
const invalidate = useCallback((userId: string): void => {
cacheRef.current.delete(userId);
}, []);
/**
* Clear all cached profiles
*/
const clear = useCallback((): void => {
cacheRef.current.clear();
}, []);
/**
* Get cache size
*/
const getSize = useCallback((): number => {
return cacheRef.current.size;
}, []);
/**
* Get all cached profile user IDs
*/
const getAllCachedIds = useCallback((): string[] => {
return Array.from(cacheRef.current.keys());
}, []);
/**
* Get direct access to cache ref (for advanced use cases)
* Use with caution - prefer using the provided methods
*/
const getCacheRef = useCallback(() => cacheRef.current, []);
// Return without useMemo wrapper (OPTIMIZED)
return {
getCached,
has,
setCached,
getUncachedIds,
bulkFetch,
fetchAsMap,
fetchForSubmissions,
getDisplayName,
invalidate,
clear,
getSize,
getAllCachedIds,
getCacheRef,
};
}

View File

@@ -0,0 +1,225 @@
/**
* TanStack Query hook for moderation queue data fetching
*
* Wraps the existing fetchSubmissions query builder with React Query
* to provide automatic caching, deduplication, and background refetching.
*/
import { useQuery, useQueryClient } from '@tanstack/react-query';
import { fetchSubmissions, type QueryConfig } from '@/lib/moderation/queries';
import { supabase } from '@/lib/supabaseClient';
import { logger } from '@/lib/logger';
import { getErrorMessage } from '@/lib/errorHandler';
import { MODERATION_CONSTANTS } from '@/lib/moderation/constants';
import { validateModerationItems } from '@/lib/moderation/validation';
import type {
ModerationItem,
EntityFilter,
StatusFilter,
QueueTab,
SortField,
SortDirection
} from '@/types/moderation';
/**
* Get specific, actionable error message based on error type
*/
function getSpecificErrorMessage(error: unknown): string {
// Offline detection
if (!navigator.onLine) {
return 'You appear to be offline. Check your internet connection and try again.';
}
// Timeout
if (error instanceof Error && error.name === 'AbortError') {
return 'Request timed out. The server is taking too long to respond. Please try again.';
}
// Check for Supabase-specific errors
if (typeof error === 'object' && error !== null) {
const err = error as any;
// 500 errors
if (err.status === 500 || err.code === '500') {
return 'Server error occurred. Our team has been notified. Please try again in a few minutes.';
}
// 429 Rate limiting
if (err.status === 429 || err.message?.includes('rate limit')) {
return 'Too many requests. Please wait a moment before trying again.';
}
// Authentication errors
if (err.status === 401 || err.message?.includes('JWT')) {
return 'Your session has expired. Please refresh the page and sign in again.';
}
// Permission errors
if (err.status === 403 || err.message?.includes('permission')) {
return 'You do not have permission to access the moderation queue.';
}
}
// Fallback
return getErrorMessage(error) || 'Failed to load moderation queue. Please try again.';
}
/**
* Configuration for queue query
*/
export interface UseQueueQueryConfig {
/** User making the query */
userId: string | undefined;
/** Whether user is admin */
isAdmin: boolean;
/** Whether user is superuser */
isSuperuser: boolean;
/** Entity filter */
entityFilter: EntityFilter;
/** Status filter */
statusFilter: StatusFilter;
/** Active tab */
tab: QueueTab;
/** Current page */
currentPage: number;
/** Page size */
pageSize: number;
/** Sort configuration */
sortConfig: {
field: SortField;
direction: SortDirection;
};
/** Whether query is enabled (defaults to true) */
enabled?: boolean;
}
/**
* Return type for useQueueQuery
*/
export interface UseQueueQueryReturn {
/** Queue items */
items: ModerationItem[];
/** Total count of items matching filters */
totalCount: number;
/** Initial loading state (no data yet) */
isLoading: boolean;
/** Background refresh in progress (has data already) */
isRefreshing: boolean;
/** Any error that occurred */
error: Error | null;
/** Manually trigger a refetch */
refetch: () => Promise<any>;
/** Invalidate this query (triggers background refetch) */
invalidate: () => Promise<void>;
}
/**
* Hook to fetch moderation queue data using TanStack Query
*/
export function useQueueQuery(config: UseQueueQueryConfig): UseQueueQueryReturn {
const queryClient = useQueryClient();
// Build query config for fetchSubmissions
const queryConfig: QueryConfig = {
userId: config.userId || '',
isAdmin: config.isAdmin,
isSuperuser: config.isSuperuser,
entityFilter: config.entityFilter,
statusFilter: config.statusFilter,
tab: config.tab,
currentPage: config.currentPage,
pageSize: config.pageSize,
sortConfig: config.sortConfig,
};
// Create stable query key (TanStack Query uses this for caching/deduplication)
// Include user context to ensure proper cache isolation per user/role
const queryKey = [
'moderation-queue',
config.userId,
config.isAdmin,
config.isSuperuser,
config.entityFilter,
config.statusFilter,
config.tab,
config.currentPage,
config.pageSize,
config.sortConfig.field,
config.sortConfig.direction,
];
// Execute query
const query = useQuery({
queryKey,
queryFn: async () => {
logger.log('🔍 [TanStack Query] Fetching queue data:', queryKey);
// Create timeout controller (30s timeout)
const controller = new AbortController();
const timeoutId = setTimeout(() => controller.abort(), 30000);
try {
const result = await fetchSubmissions(supabase, queryConfig);
clearTimeout(timeoutId);
if (result.error) {
const specificMessage = getSpecificErrorMessage(result.error);
// Error already captured in context
throw new Error(specificMessage);
}
// Validate data shape before returning
const validation = validateModerationItems(result.submissions);
if (!validation.success) {
// Invalid data shape
throw new Error(validation.error || 'Invalid data format');
}
logger.log('✅ [TanStack Query] Fetched', validation.data!.length, 'items');
return { ...result, submissions: validation.data! };
} catch (error) {
clearTimeout(timeoutId);
throw error;
}
},
enabled: config.enabled !== false && !!config.userId,
staleTime: MODERATION_CONSTANTS.QUERY_STALE_TIME,
gcTime: MODERATION_CONSTANTS.QUERY_GC_TIME,
retry: MODERATION_CONSTANTS.QUERY_RETRY_COUNT,
retryDelay: (attemptIndex) => Math.min(1000 * 2 ** attemptIndex, 30000),
networkMode: 'offlineFirst', // Handle offline gracefully
meta: {
errorMessage: 'Failed to load moderation queue',
},
});
// Invalidate helper
const invalidate = async () => {
await queryClient.invalidateQueries({ queryKey: ['moderation-queue'] });
};
return {
items: query.data?.submissions || [],
totalCount: query.data?.totalCount || 0,
isLoading: query.isLoading,
isRefreshing: query.isFetching && !query.isLoading,
error: query.error as Error | null,
refetch: query.refetch,
invalidate,
};
}

View File

@@ -0,0 +1,508 @@
/**
* Realtime Subscriptions Hook for Moderation Queue
*
* Manages all Supabase realtime subscriptions for the moderation queue system.
* Handles INSERT and UPDATE events with debouncing, filtering, and optimistic update protection.
*/
import { useEffect, useRef, useState, useCallback } from 'react';
import { useQueryClient } from '@tanstack/react-query';
import { supabase } from '@/lib/supabaseClient';
import { logger } from '@/lib/logger';
import { getErrorMessage } from '@/lib/errorHandler';
import { MODERATION_CONSTANTS } from '@/lib/moderation/constants';
import type { RealtimeChannel, RealtimePostgresChangesPayload } from '@supabase/supabase-js';
import type { Json } from '@/integrations/supabase/types';
import type { ModerationItem, EntityFilter, StatusFilter } from '@/types/moderation';
import type { useEntityCache } from './useEntityCache';
import type { useProfileCache } from './useProfileCache';
import {
matchesEntityFilter,
matchesStatusFilter,
hasItemChanged,
buildModerationItem,
} from '@/lib/moderation/realtime';
/**
* Type-safe interface for submission content from realtime events
*/
interface SubmissionContent {
action?: string;
name?: string;
entity_slug?: string;
entity_name?: string;
entity_id?: string;
park_id?: string;
}
type EntityCacheReturn = ReturnType<typeof useEntityCache>;
type ProfileCacheReturn = ReturnType<typeof useProfileCache>;
/**
* Configuration for realtime subscriptions
*/
export interface RealtimeSubscriptionConfig {
/** Whether realtime subscriptions are enabled */
enabled: boolean;
/** Current filter configuration */
filters: {
entityFilter: EntityFilter;
statusFilter: StatusFilter;
};
/** Callback when a new item is detected */
onNewItem: (item: ModerationItem) => void;
/** Callback when an item is updated */
onUpdateItem: (item: ModerationItem, shouldRemove: boolean) => void;
/** Callback when an item is removed from queue */
onItemRemoved: (itemId: string) => void;
/** Pause subscriptions when tab is hidden (default: true) */
pauseWhenHidden?: boolean;
/** Debounce delay for UPDATE events in milliseconds */
debounceMs?: number;
/** Entity cache for resolving entity names */
entityCache: EntityCacheReturn;
/** Profile cache for resolving user information */
profileCache: ProfileCacheReturn;
/** Set of recently removed IDs (for optimistic updates) */
recentlyRemovedIds: Set<string>;
/** Set of IDs currently being interacted with */
interactingWithIds: Set<string>;
/** Current items in queue (for comparison) - using ref to avoid reconnections (optional) */
currentItemsRef?: React.MutableRefObject<ModerationItem[]>;
}
/**
* Return type for useRealtimeSubscriptions hook
*/
export interface UseRealtimeSubscriptionsReturn {
/** Whether subscriptions are currently connected */
isConnected: boolean;
/** Current connection status */
channelStatus: 'connected' | 'disconnected' | 'error';
/** Manually reconnect subscriptions */
reconnect: () => void;
}
/**
* Hook to manage realtime subscriptions for the moderation queue
*/
export function useRealtimeSubscriptions(
config: RealtimeSubscriptionConfig
): UseRealtimeSubscriptionsReturn {
const queryClient = useQueryClient();
const {
enabled,
filters,
onNewItem,
onUpdateItem,
onItemRemoved,
pauseWhenHidden = true,
debounceMs = MODERATION_CONSTANTS.REALTIME_DEBOUNCE_MS,
entityCache,
profileCache,
recentlyRemovedIds,
interactingWithIds,
currentItemsRef,
} = config;
// Debounce management for UPDATE events
const updateDebounceMap = useRef<Map<string, NodeJS.Timeout>>(new Map());
// Channel references
const insertChannelRef = useRef<RealtimeChannel | null>(null);
const updateChannelRef = useRef<RealtimeChannel | null>(null);
// Status tracking
const [channelStatus, setChannelStatus] = useState<'connected' | 'disconnected' | 'error'>('disconnected');
const [reconnectTrigger, setReconnectTrigger] = useState(0);
/**
* Debounced update handler - waits for rapid changes to settle
*/
const debouncedUpdate = useCallback((submissionId: string, updateFn: () => void) => {
const existingTimeout = updateDebounceMap.current.get(submissionId);
if (existingTimeout) {
clearTimeout(existingTimeout);
}
const newTimeout = setTimeout(() => {
updateFn();
updateDebounceMap.current.delete(submissionId);
}, debounceMs);
updateDebounceMap.current.set(submissionId, newTimeout);
}, [debounceMs]);
/**
* Fetch full submission details with related data
*/
const fetchSubmissionDetails = useCallback(async (submissionId: string) => {
const { data: submission, error } = await supabase
.from('content_submissions')
.select(`
id, submission_type, status, created_at, user_id,
reviewed_at, reviewer_id, reviewer_notes, escalated, assigned_to, locked_until,
submission_items (
id,
item_type,
item_data,
status
),
submission_metadata (
entity_id,
park_id,
ride_id
)
`)
.eq('id', submissionId)
.single();
if (error || !submission) {
// Silent - will retry on next attempt
return null;
}
return submission;
}, []);
/**
* Resolve entity names for a submission
*/
const resolveEntityNames = useCallback(async (submission: { submission_type: string; submission_metadata?: any[] }) => {
// Get metadata
const metadata = Array.isArray(submission.submission_metadata) && submission.submission_metadata.length > 0
? submission.submission_metadata[0]
: undefined;
let entityName = 'Unknown';
let parkName: string | undefined;
if (submission.submission_type === 'ride' && metadata?.entity_id) {
// Try cache first
const cachedRide = entityCache.getCached('rides', metadata.entity_id);
if (cachedRide) {
entityName = cachedRide.name;
if (cachedRide.park_id) {
const cachedPark = entityCache.getCached('parks', cachedRide.park_id);
if (cachedPark) parkName = cachedPark.name;
}
} else {
const { data: ride } = await supabase
.from('rides')
.select('id, name, park_id')
.eq('id', metadata.entity_id)
.maybeSingle();
if (ride) {
entityName = ride.name;
entityCache.setCached('rides', metadata.entity_id, ride);
if (ride.park_id) {
const { data: park } = await supabase
.from('parks')
.select('id, name')
.eq('id', ride.park_id)
.maybeSingle();
if (park) {
parkName = park.name;
entityCache.setCached('parks', ride.park_id, park);
}
}
}
}
} else if (submission.submission_type === 'park' && metadata?.entity_id) {
const cachedPark = entityCache.getCached('parks', metadata.entity_id);
if (cachedPark) {
entityName = cachedPark.name;
} else {
const { data: park } = await supabase
.from('parks')
.select('id, name')
.eq('id', metadata.entity_id)
.maybeSingle();
if (park) {
entityName = park.name;
entityCache.setCached('parks', metadata.entity_id, park);
}
}
} else if (['manufacturer', 'operator', 'designer', 'property_owner'].includes(submission.submission_type) && metadata?.entity_id) {
const cachedCompany = entityCache.getCached('companies', metadata.entity_id);
if (cachedCompany) {
entityName = cachedCompany.name;
} else {
const { data: company } = await supabase
.from('companies')
.select('id, name')
.eq('id', metadata.entity_id)
.maybeSingle();
if (company) {
entityName = company.name;
entityCache.setCached('companies', metadata.entity_id, company);
}
}
}
return { entityName, parkName };
}, [entityCache]);
/**
* Handle new submission INSERT event
*/
const handleInsert = useCallback(async (payload: RealtimePostgresChangesPayload<any>) => {
const newSubmission = payload.new;
logger.log('🆕 Realtime INSERT:', newSubmission.id);
// Queue updates if tab is hidden
if (pauseWhenHidden && document.hidden) {
logger.log('📴 Realtime event received while hidden - queuing for later');
return;
}
// Ignore if recently removed (optimistic update)
if (recentlyRemovedIds.has(newSubmission.id)) {
logger.log('⏭️ Ignoring INSERT for recently removed submission:', newSubmission.id);
return;
}
// Only process pending/partially_approved submissions
if (!['pending', 'partially_approved'].includes(newSubmission.status)) {
return;
}
// Apply filters
if (!matchesEntityFilter(newSubmission, filters.entityFilter)) {
return;
}
if (!matchesStatusFilter(newSubmission, filters.statusFilter)) {
return;
}
logger.log('✅ NEW submission matches filters, invalidating query:', newSubmission.id);
// Invalidate the query to trigger background refetch
await queryClient.invalidateQueries({ queryKey: ['moderation-queue'] });
// Call legacy callback for new item notification
// (This maintains compatibility with NewItemsAlert component)
try {
const submission = await fetchSubmissionDetails(newSubmission.id);
if (!submission) return;
const profile = await profileCache.bulkFetch([submission.user_id]);
const userProfile = profile[0];
const { entityName, parkName } = await resolveEntityNames(submission);
const fullItem = buildModerationItem(
submission,
userProfile,
entityName,
parkName
);
onNewItem(fullItem);
} catch (error: unknown) {
// Silent - notifications are non-critical
}
}, [
filters,
pauseWhenHidden,
recentlyRemovedIds,
queryClient,
fetchSubmissionDetails,
profileCache,
resolveEntityNames,
onNewItem,
]);
/**
* Handle submission UPDATE event
*/
const handleUpdate = useCallback(async (payload: RealtimePostgresChangesPayload<any>) => {
const updatedSubmission = payload.new;
const oldSubmission = payload.old;
logger.log('🔄 Realtime UPDATE:', updatedSubmission.id);
// Queue updates if tab is hidden
if (pauseWhenHidden && document.hidden) {
logger.log('📴 Realtime UPDATE received while hidden - queuing for later');
return;
}
// Ignore if recently removed (optimistic update in progress)
if (recentlyRemovedIds.has(updatedSubmission.id)) {
logger.log('⏭️ Ignoring UPDATE for recently removed submission:', updatedSubmission.id);
return;
}
// Ignore if currently being interacted with
if (interactingWithIds.has(updatedSubmission.id)) {
logger.log('⏭️ Ignoring UPDATE for interacting submission:', updatedSubmission.id);
return;
}
// Skip debounce for status changes (critical updates)
const isStatusChange = oldSubmission && 'status' in oldSubmission
&& oldSubmission.status !== updatedSubmission?.status;
if (isStatusChange) {
logger.log('⚡ Status change detected, invalidating immediately');
await queryClient.invalidateQueries({ queryKey: ['moderation-queue'] });
const matchesEntity = matchesEntityFilter(updatedSubmission, filters.entityFilter);
const matchesStatus = matchesStatusFilter(updatedSubmission, filters.statusFilter);
const shouldBeInQueue = matchesEntity && matchesStatus;
if (!shouldBeInQueue) {
onItemRemoved(updatedSubmission.id);
}
return; // Skip debounced update
}
// Use debounce for non-critical updates
debouncedUpdate(updatedSubmission.id, async () => {
logger.log('🔄 Invalidating query due to UPDATE:', updatedSubmission.id);
// Simply invalidate the query - TanStack Query handles the rest
await queryClient.invalidateQueries({ queryKey: ['moderation-queue'] });
// Legacy callback for compatibility
const matchesEntity = matchesEntityFilter(updatedSubmission, filters.entityFilter);
const matchesStatus = matchesStatusFilter(updatedSubmission, filters.statusFilter);
const shouldBeInQueue = matchesEntity && matchesStatus;
if (!shouldBeInQueue) {
onItemRemoved(updatedSubmission.id);
}
});
}, [
filters,
pauseWhenHidden,
recentlyRemovedIds,
interactingWithIds,
debouncedUpdate,
queryClient,
onItemRemoved,
]);
/**
* Setup INSERT subscription
*/
useEffect(() => {
if (!enabled) {
setChannelStatus('disconnected');
return;
}
logger.log('📡 Setting up INSERT subscription');
const channel = supabase
.channel('moderation-new-submissions')
.on(
'postgres_changes',
{
event: 'INSERT',
schema: 'public',
table: 'content_submissions',
},
handleInsert
)
.subscribe((status) => {
logger.log('INSERT subscription status:', status);
if (status === 'SUBSCRIBED') {
setChannelStatus('connected');
} else if (status === 'CHANNEL_ERROR') {
setChannelStatus('error');
}
});
insertChannelRef.current = channel;
return () => {
logger.log('🛑 Cleaning up INSERT subscription');
supabase.removeChannel(channel);
insertChannelRef.current = null;
};
}, [enabled, handleInsert, reconnectTrigger]);
/**
* Setup UPDATE subscription
*/
useEffect(() => {
if (!enabled) return;
logger.log('📡 Setting up UPDATE subscription');
const channel = supabase
.channel('moderation-updated-submissions')
.on(
'postgres_changes',
{
event: 'UPDATE',
schema: 'public',
table: 'content_submissions',
},
handleUpdate
)
.subscribe((status) => {
logger.log('UPDATE subscription status:', status);
if (status === 'SUBSCRIBED') {
setChannelStatus('connected');
} else if (status === 'CHANNEL_ERROR') {
setChannelStatus('error');
}
});
updateChannelRef.current = channel;
return () => {
logger.log('🛑 Cleaning up UPDATE subscription');
supabase.removeChannel(channel);
updateChannelRef.current = null;
};
}, [enabled, handleUpdate, reconnectTrigger]);
/**
* Cleanup debounce timers on unmount
*/
useEffect(() => {
return () => {
updateDebounceMap.current.forEach(timeout => clearTimeout(timeout));
updateDebounceMap.current.clear();
};
}, []);
/**
* Manual reconnect function
*/
const reconnect = useCallback(() => {
logger.log('🔄 Manually reconnecting subscriptions...');
setReconnectTrigger(prev => prev + 1);
}, []);
return {
isConnected: channelStatus === 'connected',
channelStatus,
reconnect,
};
}