mirror of
https://github.com/pacnpal/thrilltrack-explorer.git
synced 2025-12-24 18:31:11 -05:00
feat: Implement comprehensive request tracking and state management
This commit is contained in:
114
src/lib/edgeFunctionTracking.ts
Normal file
114
src/lib/edgeFunctionTracking.ts
Normal file
@@ -0,0 +1,114 @@
|
||||
/**
|
||||
* Edge Function Request Tracking Wrapper
|
||||
*
|
||||
* Wraps Supabase function invocations with request tracking for debugging and monitoring.
|
||||
* Provides correlation IDs for tracing requests across the system.
|
||||
*/
|
||||
|
||||
import { supabase } from '@/integrations/supabase/client';
|
||||
import { trackRequest } from './requestTracking';
|
||||
import { getErrorMessage } from './errorHandler';
|
||||
|
||||
/**
|
||||
* Invoke a Supabase edge function with request tracking
|
||||
*
|
||||
* @param functionName - Name of the edge function to invoke
|
||||
* @param payload - Request payload
|
||||
* @param userId - User ID for tracking (optional)
|
||||
* @param parentRequestId - Parent request ID for chaining (optional)
|
||||
* @param traceId - Trace ID for distributed tracing (optional)
|
||||
* @returns Response data with requestId
|
||||
*/
|
||||
export async function invokeWithTracking<T = any>(
|
||||
functionName: string,
|
||||
payload: Record<string, unknown> = {},
|
||||
userId?: string,
|
||||
parentRequestId?: string,
|
||||
traceId?: string
|
||||
): Promise<{ data: T | null; error: any; requestId: string; duration: number }> {
|
||||
try {
|
||||
const { result, requestId, duration } = await trackRequest(
|
||||
{
|
||||
endpoint: `/functions/${functionName}`,
|
||||
method: 'POST',
|
||||
userId,
|
||||
parentRequestId,
|
||||
traceId,
|
||||
},
|
||||
async (context) => {
|
||||
// Include client request ID in payload for correlation
|
||||
const { data, error } = await supabase.functions.invoke<T>(functionName, {
|
||||
body: { ...payload, clientRequestId: context.requestId },
|
||||
});
|
||||
|
||||
if (error) throw error;
|
||||
return data;
|
||||
}
|
||||
);
|
||||
|
||||
return { data: result, error: null, requestId, duration };
|
||||
} catch (error: unknown) {
|
||||
const errorMessage = getErrorMessage(error);
|
||||
// On error, we don't have tracking info, so create basic response
|
||||
return {
|
||||
data: null,
|
||||
error: { message: errorMessage },
|
||||
requestId: 'unknown',
|
||||
duration: 0,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Invoke multiple edge functions in parallel with batch tracking
|
||||
*
|
||||
* Uses a shared trace ID to correlate all operations.
|
||||
*
|
||||
* @param operations - Array of function invocation configurations
|
||||
* @param userId - User ID for tracking
|
||||
* @returns Array of results with their request IDs
|
||||
*/
|
||||
export async function invokeBatchWithTracking<T = any>(
|
||||
operations: Array<{
|
||||
functionName: string;
|
||||
payload: Record<string, unknown>;
|
||||
}>,
|
||||
userId?: string
|
||||
): Promise<
|
||||
Array<{
|
||||
functionName: string;
|
||||
data: T | null;
|
||||
error: any;
|
||||
requestId: string;
|
||||
duration: number;
|
||||
}>
|
||||
> {
|
||||
const traceId = crypto.randomUUID();
|
||||
|
||||
const results = await Promise.allSettled(
|
||||
operations.map(async (op) => {
|
||||
const result = await invokeWithTracking<T>(
|
||||
op.functionName,
|
||||
op.payload,
|
||||
userId,
|
||||
undefined,
|
||||
traceId
|
||||
);
|
||||
return { functionName: op.functionName, ...result };
|
||||
})
|
||||
);
|
||||
|
||||
return results.map((result, index) => {
|
||||
if (result.status === 'fulfilled') {
|
||||
return result.value;
|
||||
} else {
|
||||
return {
|
||||
functionName: operations[index].functionName,
|
||||
data: null,
|
||||
error: { message: result.reason?.message || 'Unknown error' },
|
||||
requestId: 'unknown',
|
||||
duration: 0,
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -1,5 +1,7 @@
|
||||
import { supabase } from '@/integrations/supabase/client';
|
||||
import { invokeWithTracking } from './edgeFunctionTracking';
|
||||
import type { UploadedImage } from '@/components/upload/EntityMultiImageUploader';
|
||||
import { logger } from './logger';
|
||||
|
||||
export interface CloudflareUploadResponse {
|
||||
result: {
|
||||
@@ -25,16 +27,28 @@ export async function uploadPendingImages(images: UploadedImage[]): Promise<Uplo
|
||||
if (image.isLocal && image.file) {
|
||||
const fileName = image.file.name;
|
||||
|
||||
// Step 1: Get upload URL from our Supabase Edge Function
|
||||
const { data: uploadUrlData, error: urlError } = await supabase.functions.invoke('upload-image', {
|
||||
body: { action: 'get-upload-url' }
|
||||
});
|
||||
// Step 1: Get upload URL from our Supabase Edge Function (with tracking)
|
||||
const { data: uploadUrlData, error: urlError, requestId } = await invokeWithTracking(
|
||||
'upload-image',
|
||||
{ action: 'get-upload-url' }
|
||||
);
|
||||
|
||||
if (urlError || !uploadUrlData?.uploadURL) {
|
||||
console.error(`imageUploadHelper.uploadPendingImages: Failed to get upload URL for "${fileName}":`, urlError);
|
||||
logger.error('Failed to get upload URL', {
|
||||
action: 'upload_pending_images',
|
||||
fileName,
|
||||
requestId,
|
||||
error: urlError?.message || 'Unknown error',
|
||||
});
|
||||
throw new Error(`Failed to get upload URL for "${fileName}": ${urlError?.message || 'Unknown error'}`);
|
||||
}
|
||||
|
||||
logger.info('Got upload URL', {
|
||||
action: 'upload_pending_images',
|
||||
fileName,
|
||||
requestId,
|
||||
});
|
||||
|
||||
// Step 2: Upload file directly to Cloudflare
|
||||
const formData = new FormData();
|
||||
formData.append('file', image.file);
|
||||
@@ -46,17 +60,31 @@ export async function uploadPendingImages(images: UploadedImage[]): Promise<Uplo
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
const errorText = await uploadResponse.text();
|
||||
console.error(`imageUploadHelper.uploadPendingImages: Upload failed for "${fileName}" (status ${uploadResponse.status}):`, errorText);
|
||||
logger.error('Cloudflare upload failed', {
|
||||
action: 'upload_pending_images',
|
||||
fileName,
|
||||
status: uploadResponse.status,
|
||||
error: errorText,
|
||||
});
|
||||
throw new Error(`Upload failed for "${fileName}" (status ${uploadResponse.status}): ${errorText}`);
|
||||
}
|
||||
|
||||
const result: CloudflareUploadResponse = await uploadResponse.json();
|
||||
|
||||
if (!result.success || !result.result) {
|
||||
console.error(`imageUploadHelper.uploadPendingImages: Cloudflare upload unsuccessful for "${fileName}"`);
|
||||
logger.error('Cloudflare upload unsuccessful', {
|
||||
action: 'upload_pending_images',
|
||||
fileName,
|
||||
});
|
||||
throw new Error(`Cloudflare upload returned unsuccessful response for "${fileName}"`);
|
||||
}
|
||||
|
||||
logger.info('Image uploaded successfully', {
|
||||
action: 'upload_pending_images',
|
||||
fileName,
|
||||
imageId: result.result.id,
|
||||
});
|
||||
|
||||
// Clean up object URL
|
||||
URL.revokeObjectURL(image.url);
|
||||
|
||||
@@ -106,13 +134,18 @@ export async function uploadPendingImages(images: UploadedImage[]): Promise<Uplo
|
||||
// If any uploads failed, clean up ONLY newly uploaded images and throw error
|
||||
if (errors.length > 0) {
|
||||
if (newlyUploadedImageIds.length > 0) {
|
||||
console.error(`imageUploadHelper.uploadPendingImages: Some uploads failed. Cleaning up ${newlyUploadedImageIds.length} newly uploaded images...`);
|
||||
logger.error('Some uploads failed, cleaning up', {
|
||||
action: 'upload_pending_images',
|
||||
newlyUploadedCount: newlyUploadedImageIds.length,
|
||||
failureCount: errors.length,
|
||||
});
|
||||
|
||||
// Attempt cleanup in parallel with detailed error tracking
|
||||
const cleanupResults = await Promise.allSettled(
|
||||
newlyUploadedImageIds.map(imageId =>
|
||||
supabase.functions.invoke('upload-image', {
|
||||
body: { action: 'delete', imageId }
|
||||
invokeWithTracking('upload-image', {
|
||||
action: 'delete',
|
||||
imageId,
|
||||
})
|
||||
)
|
||||
);
|
||||
@@ -120,13 +153,17 @@ export async function uploadPendingImages(images: UploadedImage[]): Promise<Uplo
|
||||
// Track cleanup failures for better debugging
|
||||
const cleanupFailures = cleanupResults.filter(r => r.status === 'rejected');
|
||||
if (cleanupFailures.length > 0) {
|
||||
console.error(
|
||||
`imageUploadHelper.uploadPendingImages: Failed to cleanup ${cleanupFailures.length} of ${newlyUploadedImageIds.length} images.`,
|
||||
'These images may remain orphaned in Cloudflare:',
|
||||
newlyUploadedImageIds.filter((_, i) => cleanupResults[i].status === 'rejected')
|
||||
);
|
||||
logger.error('Failed to cleanup images', {
|
||||
action: 'upload_pending_images_cleanup',
|
||||
cleanupFailures: cleanupFailures.length,
|
||||
totalCleanup: newlyUploadedImageIds.length,
|
||||
orphanedImages: newlyUploadedImageIds.filter((_, i) => cleanupResults[i].status === 'rejected'),
|
||||
});
|
||||
} else {
|
||||
console.log(`imageUploadHelper.uploadPendingImages: Successfully cleaned up ${newlyUploadedImageIds.length} images.`);
|
||||
logger.info('Successfully cleaned up images', {
|
||||
action: 'upload_pending_images_cleanup',
|
||||
cleanedCount: newlyUploadedImageIds.length,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ import { createTableQuery } from '@/lib/supabaseHelpers';
|
||||
import type { ModerationItem } from '@/types/moderation';
|
||||
import { logger } from '@/lib/logger';
|
||||
import { getErrorMessage } from '@/lib/errorHandler';
|
||||
import { invokeWithTracking, invokeBatchWithTracking } from '@/lib/edgeFunctionTracking';
|
||||
|
||||
/**
|
||||
* Type-safe update data for review moderation
|
||||
@@ -184,20 +185,32 @@ export async function approveSubmissionItems(
|
||||
itemIds: string[]
|
||||
): Promise<ModerationActionResult> {
|
||||
try {
|
||||
const { error: approvalError } = await supabase.functions.invoke(
|
||||
const { data: approvalData, error: approvalError, requestId } = await invokeWithTracking(
|
||||
'process-selective-approval',
|
||||
{
|
||||
body: {
|
||||
itemIds,
|
||||
submissionId,
|
||||
},
|
||||
itemIds,
|
||||
submissionId,
|
||||
}
|
||||
);
|
||||
|
||||
if (approvalError) {
|
||||
logger.error('Submission items approval failed via edge function', {
|
||||
action: 'approve_submission_items',
|
||||
submissionId,
|
||||
itemCount: itemIds.length,
|
||||
requestId,
|
||||
error: approvalError.message,
|
||||
});
|
||||
throw new Error(`Failed to process submission items: ${approvalError.message}`);
|
||||
}
|
||||
|
||||
logger.info('Submission items approved successfully', {
|
||||
action: 'approve_submission_items',
|
||||
submissionId,
|
||||
itemCount: itemIds.length,
|
||||
requestId,
|
||||
});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `Successfully processed ${itemIds.length} item(s)`,
|
||||
@@ -478,24 +491,28 @@ export async function deleteSubmission(
|
||||
|
||||
// Delete photos from Cloudflare
|
||||
if (validImageIds.length > 0) {
|
||||
const deletePromises = validImageIds.map(async imageId => {
|
||||
try {
|
||||
await supabase.functions.invoke('upload-image', {
|
||||
method: 'DELETE',
|
||||
body: { imageId },
|
||||
});
|
||||
} catch (photoDeleteError: unknown) {
|
||||
const errorMessage = getErrorMessage(photoDeleteError);
|
||||
logger.error('Photo deletion failed', {
|
||||
action: 'delete_submission_photo',
|
||||
imageId,
|
||||
error: errorMessage
|
||||
});
|
||||
}
|
||||
});
|
||||
const deleteResults = await invokeBatchWithTracking(
|
||||
validImageIds.map(imageId => ({
|
||||
functionName: 'upload-image',
|
||||
payload: { action: 'delete', imageId },
|
||||
})),
|
||||
undefined
|
||||
);
|
||||
|
||||
await Promise.allSettled(deletePromises);
|
||||
deletedPhotoCount = validImageIds.length;
|
||||
// Count successful deletions
|
||||
const successfulDeletions = deleteResults.filter(r => !r.error);
|
||||
deletedPhotoCount = successfulDeletions.length;
|
||||
|
||||
// Log any failures
|
||||
const failedDeletions = deleteResults.filter(r => r.error);
|
||||
if (failedDeletions.length > 0) {
|
||||
logger.error('Some photo deletions failed', {
|
||||
action: 'delete_submission_photos',
|
||||
failureCount: failedDeletions.length,
|
||||
totalAttempted: validImageIds.length,
|
||||
failedRequestIds: failedDeletions.map(r => r.requestId),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
108
src/lib/moderation/lockMonitor.ts
Normal file
108
src/lib/moderation/lockMonitor.ts
Normal file
@@ -0,0 +1,108 @@
|
||||
/**
|
||||
* Moderation Lock Monitor
|
||||
*
|
||||
* Monitors lock expiry and provides automatic renewal prompts for moderators.
|
||||
* Prevents loss of work due to expired locks.
|
||||
*/
|
||||
|
||||
import { useEffect } from 'react';
|
||||
import type { ModerationState } from '../moderationStateMachine';
|
||||
import type { ModerationAction } from '../moderationStateMachine';
|
||||
import { hasActiveLock, needsLockRenewal } from '../moderationStateMachine';
|
||||
import { toast } from '@/hooks/use-toast';
|
||||
import { supabase } from '@/integrations/supabase/client';
|
||||
import { logger } from '../logger';
|
||||
|
||||
/**
|
||||
* Hook to monitor lock status and warn about expiry
|
||||
*
|
||||
* @param state - Current moderation state
|
||||
* @param dispatch - State machine dispatch function
|
||||
* @param itemId - ID of the locked item (optional, for manual extension)
|
||||
*/
|
||||
export function useLockMonitor(
|
||||
state: ModerationState,
|
||||
dispatch: React.Dispatch<ModerationAction>,
|
||||
itemId?: string
|
||||
) {
|
||||
useEffect(() => {
|
||||
if (!hasActiveLock(state)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const checkInterval = setInterval(() => {
|
||||
if (needsLockRenewal(state)) {
|
||||
logger.warn('Lock expiring soon', {
|
||||
action: 'lock_expiry_warning',
|
||||
itemId,
|
||||
lockExpires: state.status === 'locked' || state.status === 'reviewing'
|
||||
? state.lockExpires
|
||||
: undefined,
|
||||
});
|
||||
|
||||
// Dispatch lock expiry warning
|
||||
dispatch({ type: 'LOCK_EXPIRED' });
|
||||
|
||||
// Show toast with extension option
|
||||
toast({
|
||||
title: 'Lock Expiring Soon',
|
||||
description: 'Your lock on this submission will expire in less than 2 minutes. Click to extend.',
|
||||
variant: 'default',
|
||||
});
|
||||
}
|
||||
}, 30000); // Check every 30 seconds
|
||||
|
||||
return () => clearInterval(checkInterval);
|
||||
}, [state, dispatch, itemId]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extend the lock on a submission
|
||||
*
|
||||
* @param submissionId - Submission ID
|
||||
* @param dispatch - State machine dispatch function
|
||||
*/
|
||||
async function handleExtendLock(
|
||||
submissionId: string,
|
||||
dispatch: React.Dispatch<ModerationAction>
|
||||
) {
|
||||
try {
|
||||
// Call Supabase to extend lock (assumes 15 minute extension)
|
||||
const { error } = await supabase
|
||||
.from('content_submissions')
|
||||
.update({
|
||||
locked_until: new Date(Date.now() + 15 * 60 * 1000).toISOString(),
|
||||
})
|
||||
.eq('id', submissionId);
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
// Update state machine with new lock time
|
||||
dispatch({
|
||||
type: 'LOCK_ACQUIRED',
|
||||
payload: { lockExpires: new Date(Date.now() + 15 * 60 * 1000).toISOString() },
|
||||
});
|
||||
|
||||
toast({
|
||||
title: 'Lock Extended',
|
||||
description: 'You have 15 more minutes to complete your review.',
|
||||
});
|
||||
|
||||
logger.info('Lock extended successfully', {
|
||||
action: 'lock_extended',
|
||||
submissionId,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to extend lock', {
|
||||
action: 'extend_lock_error',
|
||||
submissionId,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
});
|
||||
|
||||
toast({
|
||||
title: 'Extension Failed',
|
||||
description: 'Could not extend lock. Please save your work and re-claim the item.',
|
||||
variant: 'destructive',
|
||||
});
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user