Add automated data retention cleanup

Implements edge function, Django tasks, and UI hooks/panels for automatic retention of old metrics, anomalies, alerts, and incidents, plus updates to query keys and monitoring dashboard to reflect data-retention workflows.
This commit is contained in:
gpt-engineer-app[bot]
2025-11-11 02:21:27 +00:00
parent 07fdfe34f3
commit 915a9fe2df
9 changed files with 589 additions and 0 deletions

View File

@@ -0,0 +1,134 @@
import { useQuery, useMutation, useQueryClient } from "@tanstack/react-query";
import { supabase } from "@/integrations/supabase/client";
import { toast } from "sonner";
interface RetentionStats {
table_name: string;
total_records: number;
last_7_days: number;
last_30_days: number;
oldest_record: string;
newest_record: string;
table_size: string;
}
interface CleanupResult {
success: boolean;
cleanup_results: {
metrics_deleted: number;
anomalies_archived: number;
anomalies_deleted: number;
alerts_deleted: number;
incidents_deleted: number;
};
timestamp: string;
}
export function useRetentionStats() {
return useQuery({
queryKey: ["dataRetentionStats"],
queryFn: async () => {
const { data, error } = await supabase
.from("data_retention_stats")
.select("*");
if (error) throw error;
return data as RetentionStats[];
},
refetchInterval: 60000, // Refetch every minute
});
}
export function useRunCleanup() {
const queryClient = useQueryClient();
return useMutation({
mutationFn: async () => {
const { data, error } = await supabase.functions.invoke(
"data-retention-cleanup"
);
if (error) throw error;
return data as CleanupResult;
},
onSuccess: (data) => {
const results = data.cleanup_results;
const total =
results.metrics_deleted +
results.anomalies_archived +
results.anomalies_deleted +
results.alerts_deleted +
results.incidents_deleted;
toast.success(
`Cleanup completed: ${total} records removed`,
{
description: `Metrics: ${results.metrics_deleted}, Anomalies: ${results.anomalies_deleted}, Alerts: ${results.alerts_deleted}`,
}
);
// Invalidate relevant queries
queryClient.invalidateQueries({ queryKey: ["dataRetentionStats"] });
queryClient.invalidateQueries({ queryKey: ["anomalyDetections"] });
queryClient.invalidateQueries({ queryKey: ["systemAlerts"] });
},
onError: (error: Error) => {
toast.error("Failed to run cleanup", {
description: error.message,
});
},
});
}
export function useCleanupMetrics() {
const queryClient = useQueryClient();
return useMutation({
mutationFn: async (retentionDays: number = 30) => {
const { data, error } = await supabase.rpc("cleanup_old_metrics", {
retention_days: retentionDays,
});
if (error) throw error;
return data;
},
onSuccess: (deletedCount) => {
toast.success(`Cleaned up ${deletedCount} old metrics`);
queryClient.invalidateQueries({ queryKey: ["dataRetentionStats"] });
},
onError: (error: Error) => {
toast.error("Failed to cleanup metrics", {
description: error.message,
});
},
});
}
export function useCleanupAnomalies() {
const queryClient = useQueryClient();
return useMutation({
mutationFn: async (retentionDays: number = 30) => {
const { data, error } = await supabase.rpc("cleanup_old_anomalies", {
retention_days: retentionDays,
});
if (error) throw error;
return data;
},
onSuccess: (result) => {
// Result is returned as an array with one element
const cleanupResult = Array.isArray(result) ? result[0] : result;
toast.success(
`Cleaned up anomalies: ${cleanupResult.archived_count} archived, ${cleanupResult.deleted_count} deleted`
);
queryClient.invalidateQueries({ queryKey: ["dataRetentionStats"] });
queryClient.invalidateQueries({ queryKey: ["anomalyDetections"] });
},
onError: (error: Error) => {
toast.error("Failed to cleanup anomalies", {
description: error.message,
});
},
});
}