Files
thrilltrack-explorer/supabase/functions/cleanup-old-versions/index.ts
gpt-engineer-app[bot] 2d65f13b85 Connect to Lovable Cloud
Add centralized errorFormatter to convert various error types into readable messages, and apply it across edge functions. Replace String(error) usage with formatEdgeError, update relevant imports, fix a throw to use toError, and enhance logger to log formatted errors. Includes new errorFormatter.ts and widespread updates to 18+ edge functions plus logger integration.
2025-11-10 18:09:15 +00:00

201 lines
6.5 KiB
TypeScript

import { createClient } from 'https://esm.sh/@supabase/supabase-js@2';
import { edgeLogger } from '../_shared/logger.ts';
import { formatEdgeError } from '../_shared/errorFormatter.ts';
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type',
};
interface CleanupStats {
item_edit_history_deleted: number;
orphaned_records_deleted: number;
processing_time_ms: number;
errors: string[];
}
Deno.serve(async (req) => {
// Handle CORS preflight
if (req.method === 'OPTIONS') {
return new Response(null, { headers: corsHeaders });
}
const supabaseClient = createClient(
Deno.env.get('SUPABASE_URL') ?? '',
Deno.env.get('SUPABASE_SERVICE_ROLE_KEY') ?? '',
{
auth: {
autoRefreshToken: false,
persistSession: false
}
}
);
try {
const startTime = Date.now();
const stats: CleanupStats = {
item_edit_history_deleted: 0,
orphaned_records_deleted: 0,
processing_time_ms: 0,
errors: [],
};
edgeLogger.info('Starting version cleanup job');
// Get retention settings from admin_settings
const { data: retentionSetting, error: settingsError } = await supabaseClient
.from('admin_settings')
.select('setting_value')
.eq('setting_key', 'version_retention_days')
.single();
if (settingsError) {
throw new Error(`Failed to fetch settings: ${settingsError.message}`);
}
const retentionDays = Number(retentionSetting?.setting_value) || 90;
const cutoffDate = new Date();
cutoffDate.setDate(cutoffDate.getDate() - retentionDays);
edgeLogger.info('Cleanup configuration', { retentionDays, cutoff: cutoffDate.toISOString() });
// Step 1: Delete orphaned edit history (where submission_item no longer exists)
const { data: orphanedRecords, error: orphanError } = await supabaseClient
.rpc('get_orphaned_edit_history');
if (orphanError) {
stats.errors.push(`Failed to find orphaned records: ${orphanError.message}`);
edgeLogger.error('Orphan detection error', { error: orphanError.message });
} else if (orphanedRecords && orphanedRecords.length > 0) {
const orphanedIds = orphanedRecords.map((r: { id: string }) => r.id);
edgeLogger.info('Found orphaned edit history records', { count: orphanedIds.length });
const { error: deleteOrphanError } = await supabaseClient
.from('item_edit_history')
.delete()
.in('id', orphanedIds);
if (deleteOrphanError) {
stats.errors.push(`Failed to delete orphaned records: ${deleteOrphanError.message}`);
edgeLogger.error('Orphan deletion error', { error: deleteOrphanError.message });
} else {
stats.orphaned_records_deleted = orphanedIds.length;
edgeLogger.info('Deleted orphaned records', { count: orphanedIds.length });
}
}
// Step 2: For each item, keep most recent 10 versions, delete older ones beyond retention
const { data: items, error: itemsError } = await supabaseClient
.from('submission_items')
.select('id');
if (itemsError) {
throw new Error(`Failed to fetch submission items: ${itemsError.message}`);
}
if (items && items.length > 0) {
edgeLogger.info('Processing submission items for version cleanup', { itemCount: items.length });
for (const item of items) {
try {
// Get all versions for this item, ordered by date (newest first)
const { data: versions, error: versionsError } = await supabaseClient
.from('item_edit_history')
.select('id, edited_at')
.eq('item_id', item.id)
.order('edited_at', { ascending: false });
if (versionsError) {
stats.errors.push(`Item ${item.id}: ${versionsError.message}`);
continue;
}
if (versions && versions.length > 10) {
// Keep most recent 10, delete the rest if they're old enough
const versionsToDelete = versions
.slice(10)
.filter(v => new Date(v.edited_at) < cutoffDate)
.map(v => v.id);
if (versionsToDelete.length > 0) {
const { error: deleteError } = await supabaseClient
.from('item_edit_history')
.delete()
.in('id', versionsToDelete);
if (deleteError) {
stats.errors.push(`Item ${item.id} deletion failed: ${deleteError.message}`);
} else {
stats.item_edit_history_deleted += versionsToDelete.length;
}
}
}
} catch (itemError) {
stats.errors.push(`Item ${item.id} processing error: ${itemError}`);
}
}
}
// Step 3: Update last cleanup timestamp
const cleanupTimestamp = new Date().toISOString();
const { error: updateError } = await supabaseClient
.from('admin_settings')
.update({ setting_value: `"${cleanupTimestamp}"` })
.eq('setting_key', 'last_version_cleanup');
if (updateError) {
stats.errors.push(`Failed to update last_version_cleanup: ${updateError.message}`);
}
// Step 4: Log cleanup statistics to audit log
await supabaseClient
.from('admin_audit_log')
.insert({
admin_user_id: null,
target_user_id: null,
action: 'version_cleanup',
details: {
stats: {
...stats,
errors: undefined, // Don't log errors array in details
},
retention_days: retentionDays,
executed_at: cleanupTimestamp,
error_count: stats.errors.length,
},
});
stats.processing_time_ms = Date.now() - startTime;
edgeLogger.info('Cleanup completed successfully', {
...stats,
errors: stats.errors.length > 0 ? stats.errors : undefined,
});
return new Response(
JSON.stringify({
success: true,
stats,
message: `Cleaned up ${stats.item_edit_history_deleted + stats.orphaned_records_deleted} version records`,
}),
{
headers: { ...corsHeaders, 'Content-Type': 'application/json' },
status: 200,
}
);
} catch (error) {
edgeLogger.error('Cleanup job failed', { error: formatEdgeError(error) });
return new Response(
JSON.stringify({
success: false,
error: error instanceof Error ? error.message : 'Unknown error',
}),
{
headers: { ...corsHeaders, 'Content-Type': 'application/json' },
status: 500,
}
);
}
});