Fix notification logs migration

This commit is contained in:
gpt-engineer-app[bot]
2025-11-03 00:28:33 +00:00
parent 036df6f5b7
commit d44f806afa
4 changed files with 370 additions and 1 deletions

View File

@@ -1368,3 +1368,127 @@ export async function fetchEditHistory(itemId: string) {
return [];
}
}
/**
* Conflict detection interfaces and functions
*/
export interface ConflictCheckResult {
hasConflict: boolean;
serverVersion?: {
last_modified_at: string;
last_modified_by: string;
modified_by_profile?: {
username: string;
display_name: string;
avatar_url: string;
};
};
clientVersion?: {
last_modified_at: string;
};
}
/**
* Check if a submission has been modified since the client last loaded it
* Used for optimistic locking to prevent concurrent edit conflicts
*/
export async function checkSubmissionConflict(
submissionId: string,
clientLastModified: string
): Promise<ConflictCheckResult> {
try {
const { data, error } = await supabase
.from('content_submissions')
.select(`
last_modified_at,
last_modified_by,
profiles:last_modified_by (
username,
display_name,
avatar_url
)
`)
.eq('id', submissionId)
.single();
if (error) throw error;
if (!data.last_modified_at) {
return {
hasConflict: false,
clientVersion: { last_modified_at: clientLastModified },
};
}
const serverTimestamp = new Date(data.last_modified_at).getTime();
const clientTimestamp = new Date(clientLastModified).getTime();
return {
hasConflict: serverTimestamp > clientTimestamp,
serverVersion: {
last_modified_at: data.last_modified_at,
last_modified_by: data.last_modified_by,
modified_by_profile: data.profiles as any,
},
clientVersion: {
last_modified_at: clientLastModified,
},
};
} catch (error: unknown) {
logger.error('Error checking submission conflict', {
submissionId,
error: getErrorMessage(error),
});
throw error;
}
}
/**
* Fetch recent versions of submission items for conflict resolution
*/
export async function fetchSubmissionVersions(
submissionId: string,
limit: number = 10
) {
try {
// Get all item IDs for this submission
const { data: items, error: itemsError } = await supabase
.from('submission_items')
.select('id')
.eq('submission_id', submissionId);
if (itemsError) throw itemsError;
if (!items || items.length === 0) return [];
const itemIds = items.map(i => i.id);
// Fetch edit history for all items
const { data, error } = await supabase
.from('item_edit_history')
.select(`
id,
item_id,
changes,
edited_at,
editor:profiles!item_edit_history_editor_id_fkey (
user_id,
username,
display_name,
avatar_url
)
`)
.in('item_id', itemIds)
.order('edited_at', { ascending: false })
.limit(limit);
if (error) throw error;
return data || [];
} catch (error: unknown) {
logger.error('Error fetching submission versions', {
submissionId,
error: getErrorMessage(error),
});
return [];
}
}

View File

@@ -68,3 +68,6 @@ verify_jwt = false
[functions.process-expired-bans]
verify_jwt = false
[functions.cleanup-old-versions]
verify_jwt = false

View File

@@ -0,0 +1,198 @@
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2';
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type',
};
interface CleanupStats {
item_edit_history_deleted: number;
orphaned_records_deleted: number;
processing_time_ms: number;
errors: string[];
}
Deno.serve(async (req) => {
// Handle CORS preflight
if (req.method === 'OPTIONS') {
return new Response(null, { headers: corsHeaders });
}
const supabaseClient = createClient(
Deno.env.get('SUPABASE_URL') ?? '',
Deno.env.get('SUPABASE_SERVICE_ROLE_KEY') ?? '',
{
auth: {
autoRefreshToken: false,
persistSession: false
}
}
);
try {
const startTime = Date.now();
const stats: CleanupStats = {
item_edit_history_deleted: 0,
orphaned_records_deleted: 0,
processing_time_ms: 0,
errors: [],
};
console.log('Starting version cleanup job...');
// Get retention settings from admin_settings
const { data: retentionSetting, error: settingsError } = await supabaseClient
.from('admin_settings')
.select('setting_value')
.eq('setting_key', 'version_retention_days')
.single();
if (settingsError) {
throw new Error(`Failed to fetch settings: ${settingsError.message}`);
}
const retentionDays = Number(retentionSetting?.setting_value) || 90;
const cutoffDate = new Date();
cutoffDate.setDate(cutoffDate.getDate() - retentionDays);
console.log(`Cleanup configuration: ${retentionDays} day retention, cutoff: ${cutoffDate.toISOString()}`);
// Step 1: Delete orphaned edit history (where submission_item no longer exists)
const { data: orphanedRecords, error: orphanError } = await supabaseClient
.rpc('get_orphaned_edit_history');
if (orphanError) {
stats.errors.push(`Failed to find orphaned records: ${orphanError.message}`);
console.error('Orphan detection error:', orphanError);
} else if (orphanedRecords && orphanedRecords.length > 0) {
const orphanedIds = orphanedRecords.map((r: any) => r.id);
console.log(`Found ${orphanedIds.length} orphaned edit history records`);
const { error: deleteOrphanError } = await supabaseClient
.from('item_edit_history')
.delete()
.in('id', orphanedIds);
if (deleteOrphanError) {
stats.errors.push(`Failed to delete orphaned records: ${deleteOrphanError.message}`);
console.error('Orphan deletion error:', deleteOrphanError);
} else {
stats.orphaned_records_deleted = orphanedIds.length;
console.log(`Deleted ${orphanedIds.length} orphaned records`);
}
}
// Step 2: For each item, keep most recent 10 versions, delete older ones beyond retention
const { data: items, error: itemsError } = await supabaseClient
.from('submission_items')
.select('id');
if (itemsError) {
throw new Error(`Failed to fetch submission items: ${itemsError.message}`);
}
if (items && items.length > 0) {
console.log(`Processing ${items.length} submission items for version cleanup`);
for (const item of items) {
try {
// Get all versions for this item, ordered by date (newest first)
const { data: versions, error: versionsError } = await supabaseClient
.from('item_edit_history')
.select('id, edited_at')
.eq('item_id', item.id)
.order('edited_at', { ascending: false });
if (versionsError) {
stats.errors.push(`Item ${item.id}: ${versionsError.message}`);
continue;
}
if (versions && versions.length > 10) {
// Keep most recent 10, delete the rest if they're old enough
const versionsToDelete = versions
.slice(10)
.filter(v => new Date(v.edited_at) < cutoffDate)
.map(v => v.id);
if (versionsToDelete.length > 0) {
const { error: deleteError } = await supabaseClient
.from('item_edit_history')
.delete()
.in('id', versionsToDelete);
if (deleteError) {
stats.errors.push(`Item ${item.id} deletion failed: ${deleteError.message}`);
} else {
stats.item_edit_history_deleted += versionsToDelete.length;
}
}
}
} catch (itemError) {
stats.errors.push(`Item ${item.id} processing error: ${itemError}`);
}
}
}
// Step 3: Update last cleanup timestamp
const cleanupTimestamp = new Date().toISOString();
const { error: updateError } = await supabaseClient
.from('admin_settings')
.update({ setting_value: `"${cleanupTimestamp}"` })
.eq('setting_key', 'last_version_cleanup');
if (updateError) {
stats.errors.push(`Failed to update last_version_cleanup: ${updateError.message}`);
}
// Step 4: Log cleanup statistics to audit log
await supabaseClient
.from('admin_audit_log')
.insert({
admin_user_id: null,
target_user_id: null,
action: 'version_cleanup',
details: {
stats: {
...stats,
errors: undefined, // Don't log errors array in details
},
retention_days: retentionDays,
executed_at: cleanupTimestamp,
error_count: stats.errors.length,
},
});
stats.processing_time_ms = Date.now() - startTime;
console.log('Cleanup completed successfully:', {
...stats,
errors: stats.errors.length > 0 ? stats.errors : undefined,
});
return new Response(
JSON.stringify({
success: true,
stats,
message: `Cleaned up ${stats.item_edit_history_deleted + stats.orphaned_records_deleted} version records`,
}),
{
headers: { ...corsHeaders, 'Content-Type': 'application/json' },
status: 200,
}
);
} catch (error) {
console.error('Cleanup job failed:', error);
return new Response(
JSON.stringify({
success: false,
error: error instanceof Error ? error.message : 'Unknown error',
}),
{
headers: { ...corsHeaders, 'Content-Type': 'application/json' },
status: 500,
}
);
}
});

View File

@@ -0,0 +1,44 @@
-- Migration: Setup version cleanup cron job
-- Add version retention settings to admin_settings (using proper JSONB values)
INSERT INTO public.admin_settings (setting_key, setting_value, category, description)
VALUES (
'version_retention_days',
'90'::jsonb,
'maintenance',
'Number of days to retain old version history'
)
ON CONFLICT (setting_key) DO NOTHING;
INSERT INTO public.admin_settings (setting_key, setting_value, category, description)
VALUES (
'last_version_cleanup',
'null'::jsonb,
'maintenance',
'Timestamp of last successful version cleanup'
)
ON CONFLICT (setting_key) DO NOTHING;
-- Enable pg_cron extension if not already enabled
CREATE EXTENSION IF NOT EXISTS pg_cron;
-- Unschedule existing job if it exists (to avoid duplicates)
SELECT cron.unschedule('cleanup-old-versions-weekly') WHERE EXISTS (
SELECT 1 FROM cron.job WHERE jobname = 'cleanup-old-versions-weekly'
);
-- Schedule cleanup job: Every Sunday at 2 AM UTC
SELECT cron.schedule(
'cleanup-old-versions-weekly',
'0 2 * * 0',
$$
SELECT net.http_post(
url := 'https://ydvtmnrszybqnbcqbdcy.supabase.co/functions/v1/cleanup-old-versions',
headers := jsonb_build_object(
'Content-Type', 'application/json',
'Authorization', 'Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6InlkdnRtbnJzenlicW5iY3FiZGN5Iiwicm9sZSI6ImFub24iLCJpYXQiOjE3NTgzMjYzNTYsImV4cCI6MjA3MzkwMjM1Nn0.DM3oyapd_omP5ZzIlrT0H9qBsiQBxBRgw2tYuqgXKX4'
),
body := jsonb_build_object('scheduled', true)
) as request_id;
$$
);