Files
thrilltrack-explorer/src/lib/moderation/actions.ts
gpt-engineer-app[bot] 94312c8ef0 Connect to atomic rejection flow
Introduce atomic rejection for bulk submissions, remove dead moderation code, and clean exports:
- Replace direct DB updates in rejectSubmissionItems with atomic process-selective-rejection edge function call
- Add edge function invocation helper import
- Remove obsolete moderation actions (approvePhotoSubmission, rejectSubmissionItems, performModerationAction) and prune exports
- Update moderation index exports accordingly
- Ensure cascade handling uses atomic pipeline and avoid updateSubmissionStatusAfterRejection calls
2025-11-10 14:19:49 +00:00

275 lines
8.3 KiB
TypeScript

/**
* Moderation Actions
*
* Business logic for performing moderation actions on submissions.
* Handles approval, rejection, and deletion workflows with proper
* error handling and database updates.
*/
import { SupabaseClient } from '@supabase/supabase-js';
import { createTableQuery } from '@/lib/supabaseHelpers';
import type { ModerationItem } from '@/types/moderation';
import { handleError, handleNonCriticalError, getErrorMessage } from '@/lib/errorHandler';
import { invokeWithTracking, invokeBatchWithTracking } from '@/lib/edgeFunctionTracking';
/**
* Type-safe update data for review moderation
* Note: These types document the expected structure. Type assertions (as any) are used
* during database updates due to Supabase's strict typed client, but the actual types
* are validated by the database schema and RLS policies.
*/
interface ReviewUpdateData {
moderation_status: string;
moderated_at: string;
moderated_by: string;
reviewer_notes?: string;
locked_until?: null;
locked_by?: null;
}
/**
* Type-safe update data for submission moderation
* Note: These types document the expected structure. Type assertions (as any) are used
* during database updates due to Supabase's strict typed client, but the actual types
* are validated by the database schema and RLS policies.
*/
interface SubmissionUpdateData {
status: string;
reviewed_at: string;
reviewer_id: string;
reviewer_notes?: string;
locked_until?: null;
locked_by?: null;
}
/**
* Discriminated union for moderation updates (documentation purposes)
*/
type ModerationUpdateData = ReviewUpdateData | SubmissionUpdateData;
/**
* Result of a moderation action
*/
export interface ModerationActionResult {
success: boolean;
message: string;
error?: Error;
shouldRemoveFromQueue: boolean;
}
/**
* Approve submission items using atomic transaction RPC.
*
* This function uses PostgreSQL's ACID transaction guarantees to ensure
* all-or-nothing approval with automatic rollback on any error.
*
* The approval process is handled entirely within a single database transaction
* via the process_approval_transaction() RPC function, which guarantees:
* - True atomic transactions (all-or-nothing)
* - Automatic rollback on ANY error
* - Network-resilient (edge function crash = auto rollback)
* - Zero orphaned entities
*/
export async function approveSubmissionItems(
supabase: SupabaseClient,
submissionId: string,
itemIds: string[]
): Promise<ModerationActionResult> {
try {
console.log(`[Approval] Processing ${itemIds.length} items via atomic transaction`, {
submissionId,
itemCount: itemIds.length
});
const { data: approvalData, error: approvalError, requestId } = await invokeWithTracking(
'process-selective-approval',
{
itemIds,
submissionId,
}
);
if (approvalError) {
const error = new Error(`Failed to process submission items: ${approvalError.message}`);
handleError(error, {
action: 'Approve Submission Items',
metadata: { submissionId, itemCount: itemIds.length, requestId }
});
throw error;
}
return {
success: true,
message: `Successfully processed ${itemIds.length} item(s)`,
shouldRemoveFromQueue: true,
};
} catch (error: unknown) {
handleError(error, {
action: 'Approve Submission Items',
metadata: { submissionId, itemCount: itemIds.length }
});
return {
success: false,
message: 'Failed to approve submission items',
error: error instanceof Error ? error : new Error(getErrorMessage(error)),
shouldRemoveFromQueue: false,
};
}
}
/**
* Configuration for submission deletion
*/
export interface DeleteSubmissionConfig {
item: ModerationItem;
deletePhotos?: boolean;
}
/**
* Delete a submission and its associated photos
*
* Extracts photo IDs, deletes them from Cloudflare, then deletes the submission.
*
* @param supabase - Supabase client
* @param config - Deletion configuration
* @returns Action result
*/
export async function deleteSubmission(
supabase: SupabaseClient,
config: DeleteSubmissionConfig
): Promise<ModerationActionResult> {
const { item, deletePhotos = true } = config;
if (item.type !== 'content_submission') {
return {
success: false,
message: 'Can only delete content submissions',
shouldRemoveFromQueue: false,
};
}
try {
let deletedPhotoCount = 0;
let skippedPhotoCount = 0;
// Extract and delete photos if requested
if (deletePhotos) {
const photosArray = item.content?.content?.photos || item.content?.photos;
if (photosArray && Array.isArray(photosArray)) {
const validImageIds: string[] = [];
for (const photo of photosArray) {
let imageId = '';
if (photo.imageId) {
imageId = photo.imageId;
} else if (photo.url && !photo.url.startsWith('blob:')) {
// Try to extract from URL
const uuidRegex =
/^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$/i;
if (uuidRegex.test(photo.url)) {
imageId = photo.url;
} else {
const cloudflareMatch = photo.url.match(
/imagedelivery\.net\/[^\/]+\/([a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12})/i
);
if (cloudflareMatch) {
imageId = cloudflareMatch[1];
}
}
}
if (imageId) {
validImageIds.push(imageId);
} else {
skippedPhotoCount++;
}
}
// Delete photos from Cloudflare
if (validImageIds.length > 0) {
const deleteResults = await invokeBatchWithTracking(
validImageIds.map(imageId => ({
functionName: 'upload-image',
payload: { action: 'delete', imageId },
})),
undefined
);
// Count successful deletions
const successfulDeletions = deleteResults.filter(r => !r.error);
deletedPhotoCount = successfulDeletions.length;
// Log any failures silently (background operation)
const failedDeletions = deleteResults.filter(r => r.error);
if (failedDeletions.length > 0) {
handleNonCriticalError(
new Error(`Failed to delete ${failedDeletions.length} of ${validImageIds.length} photos`),
{
action: 'Delete Submission Photos',
metadata: {
failureCount: failedDeletions.length,
totalAttempted: validImageIds.length,
failedRequestIds: failedDeletions.map(r => r.requestId)
}
}
);
}
}
}
}
// Delete the submission from the database
const { error } = await supabase
.from('content_submissions')
.delete()
.eq('id', item.id);
if (error) {
throw error;
}
// Verify deletion
const { data: checkData, error: checkError } = await supabase
.from('content_submissions')
.select('id')
.eq('id', item.id)
.single();
if (checkData && !checkError) {
throw new Error('Deletion failed - item still exists in database');
}
// Build result message
let message = 'The submission has been permanently deleted';
if (deletedPhotoCount > 0 && skippedPhotoCount > 0) {
message = `The submission and ${deletedPhotoCount} photo(s) have been deleted. ${skippedPhotoCount} photo(s) could not be deleted from storage`;
} else if (deletedPhotoCount > 0) {
message = `The submission and ${deletedPhotoCount} associated photo(s) have been permanently deleted`;
} else if (skippedPhotoCount > 0) {
message = `The submission has been deleted. ${skippedPhotoCount} photo(s) could not be deleted from storage`;
}
return {
success: true,
message,
shouldRemoveFromQueue: true,
};
} catch (error: unknown) {
handleError(error, {
action: 'Delete Submission',
metadata: { submissionId: item.id, deletePhotos }
});
return {
success: false,
message: 'Failed to delete submission',
error: error instanceof Error ? error : new Error('Unknown error'),
shouldRemoveFromQueue: false,
};
}
}