Compare commits

...

3 Commits

Author SHA1 Message Date
gpt-engineer-app[bot]
5a8caa51b6 Fix search_path on functions
The migration succeeded but security warnings require updating functions to set search_path. Add SET search_path to the three created functions to ensure proper schema resolution and security context.
2025-11-11 01:58:56 +00:00
gpt-engineer-app[bot]
01aba7df90 Connect to Lovable Cloud
Fix security definer view issue by enabling security_invoker on grouped_alerts_view and implement grouped alerts system with new keys, hooks, components, and monitoring overview integration. Added migration to adjust view, updated query keys, created useGroupedAlerts, useAlertGroupActions, GroupedAlertsPanel, and updated MonitoringOverview to include grouped alerts.
2025-11-11 01:51:42 +00:00
gpt-engineer-app[bot]
97f586232f Enable grouped alert view with security considerations
Update: implement grouped_alerts_view migration and address security definer concerns by noting default SECURITY INVOKER behavior for views and ensuring RLS policies on underlying tables apply. This commit covers the view creation and related security clarifications for alert grouping feature.
2025-11-11 01:49:27 +00:00
9 changed files with 1038 additions and 8 deletions

View File

@@ -0,0 +1,237 @@
import { useState } from 'react';
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from '@/components/ui/card';
import { Button } from '@/components/ui/button';
import { AlertCircle, AlertTriangle, Info, ChevronDown, ChevronUp, Clock, Zap, RefreshCw } from 'lucide-react';
import { formatDistanceToNow } from 'date-fns';
import type { GroupedAlert } from '@/hooks/admin/useGroupedAlerts';
import { useResolveAlertGroup, useSnoozeAlertGroup } from '@/hooks/admin/useAlertGroupActions';
import {
DropdownMenu,
DropdownMenuContent,
DropdownMenuItem,
DropdownMenuTrigger,
} from '@/components/ui/dropdown-menu';
interface GroupedAlertsPanelProps {
alerts?: GroupedAlert[];
isLoading: boolean;
}
const SEVERITY_CONFIG = {
critical: { color: 'text-destructive', icon: AlertCircle, label: 'Critical', badge: 'bg-destructive/10 text-destructive' },
high: { color: 'text-orange-500', icon: AlertTriangle, label: 'High', badge: 'bg-orange-500/10 text-orange-500' },
medium: { color: 'text-yellow-500', icon: AlertTriangle, label: 'Medium', badge: 'bg-yellow-500/10 text-yellow-500' },
low: { color: 'text-blue-500', icon: Info, label: 'Low', badge: 'bg-blue-500/10 text-blue-500' },
};
export function GroupedAlertsPanel({ alerts, isLoading }: GroupedAlertsPanelProps) {
const [expandedGroups, setExpandedGroups] = useState<Set<string>>(new Set());
const resolveGroup = useResolveAlertGroup();
const snoozeGroup = useSnoozeAlertGroup();
// Filter out snoozed alerts
const snoozedAlerts = JSON.parse(localStorage.getItem('snoozed_alerts') || '{}');
const visibleAlerts = alerts?.filter(alert => {
const snoozeUntil = snoozedAlerts[alert.group_key];
return !snoozeUntil || Date.now() > snoozeUntil;
});
const handleResolveGroup = (alert: GroupedAlert) => {
resolveGroup.mutate({
alertIds: alert.alert_ids,
source: alert.source,
});
};
const handleSnooze = (alert: GroupedAlert, durationMs: number) => {
snoozeGroup.mutate({
groupKey: alert.group_key,
duration: durationMs,
});
};
const toggleExpanded = (groupKey: string) => {
setExpandedGroups(prev => {
const next = new Set(prev);
if (next.has(groupKey)) {
next.delete(groupKey);
} else {
next.add(groupKey);
}
return next;
});
};
if (isLoading) {
return (
<Card>
<CardHeader>
<CardTitle>Critical Alerts</CardTitle>
<CardDescription>Loading alerts...</CardDescription>
</CardHeader>
<CardContent>
<div className="flex items-center justify-center py-8">
<div className="animate-spin rounded-full h-8 w-8 border-b-2 border-primary"></div>
</div>
</CardContent>
</Card>
);
}
if (!visibleAlerts || visibleAlerts.length === 0) {
return (
<Card>
<CardHeader>
<CardTitle>Critical Alerts</CardTitle>
<CardDescription>All systems operational</CardDescription>
</CardHeader>
<CardContent>
<div className="flex flex-col items-center justify-center py-8 text-muted-foreground">
<AlertCircle className="h-12 w-12 mb-2 opacity-50" />
<p>No active alerts</p>
</div>
</CardContent>
</Card>
);
}
const totalAlerts = visibleAlerts.reduce((sum, alert) => sum + alert.unresolved_count, 0);
const recurringCount = visibleAlerts.filter(a => a.is_recurring).length;
return (
<Card>
<CardHeader>
<CardTitle className="flex items-center justify-between">
<span>Critical Alerts</span>
<span className="text-sm font-normal text-muted-foreground">
{visibleAlerts.length} {visibleAlerts.length === 1 ? 'group' : 'groups'} {totalAlerts} total alerts
{recurringCount > 0 && `${recurringCount} recurring`}
</span>
</CardTitle>
<CardDescription>Grouped by type to reduce alert fatigue</CardDescription>
</CardHeader>
<CardContent className="space-y-3">
{visibleAlerts.map(alert => {
const config = SEVERITY_CONFIG[alert.severity];
const Icon = config.icon;
const isExpanded = expandedGroups.has(alert.group_key);
return (
<div
key={alert.group_key}
className="border rounded-lg p-4 space-y-2 bg-card hover:bg-accent/5 transition-colors"
>
<div className="flex items-start justify-between gap-4">
<div className="flex items-start gap-3 flex-1">
<Icon className={`h-5 w-5 mt-0.5 ${config.color}`} />
<div className="flex-1 min-w-0">
<div className="flex items-center gap-2 flex-wrap mb-1">
<span className={`text-xs font-medium px-2 py-0.5 rounded ${config.badge}`}>
{config.label}
</span>
<span className="text-xs px-2 py-0.5 rounded bg-muted text-muted-foreground">
{alert.source === 'system' ? 'System' : 'Rate Limit'}
</span>
{alert.is_active && (
<span className="flex items-center gap-1 text-xs px-2 py-0.5 rounded bg-green-500/10 text-green-600">
<Zap className="h-3 w-3" />
Active
</span>
)}
{alert.is_recurring && (
<span className="flex items-center gap-1 text-xs px-2 py-0.5 rounded bg-amber-500/10 text-amber-600">
<RefreshCw className="h-3 w-3" />
Recurring
</span>
)}
<span className="text-xs font-semibold px-2 py-0.5 rounded bg-primary/10 text-primary">
{alert.unresolved_count} {alert.unresolved_count === 1 ? 'alert' : 'alerts'}
</span>
</div>
<p className="text-sm font-medium">
{alert.alert_type || alert.metric_type || 'Alert'}
{alert.function_name && <span className="text-muted-foreground"> {alert.function_name}</span>}
</p>
<p className="text-sm text-muted-foreground line-clamp-2">
{alert.messages[0]}
</p>
<div className="flex items-center gap-4 mt-2 text-xs text-muted-foreground">
<span className="flex items-center gap-1">
<Clock className="h-3 w-3" />
First: {formatDistanceToNow(new Date(alert.first_seen), { addSuffix: true })}
</span>
<span className="flex items-center gap-1">
<Clock className="h-3 w-3" />
Last: {formatDistanceToNow(new Date(alert.last_seen), { addSuffix: true })}
</span>
</div>
</div>
</div>
<div className="flex items-center gap-2">
{alert.alert_count > 1 && (
<Button
variant="ghost"
size="sm"
onClick={() => toggleExpanded(alert.group_key)}
>
{isExpanded ? (
<>
<ChevronUp className="h-4 w-4 mr-1" />
Hide
</>
) : (
<>
<ChevronDown className="h-4 w-4 mr-1" />
Show all {alert.alert_count}
</>
)}
</Button>
)}
<DropdownMenu>
<DropdownMenuTrigger asChild>
<Button variant="outline" size="sm">
Snooze
</Button>
</DropdownMenuTrigger>
<DropdownMenuContent align="end">
<DropdownMenuItem onClick={() => handleSnooze(alert, 3600000)}>
1 hour
</DropdownMenuItem>
<DropdownMenuItem onClick={() => handleSnooze(alert, 14400000)}>
4 hours
</DropdownMenuItem>
<DropdownMenuItem onClick={() => handleSnooze(alert, 86400000)}>
24 hours
</DropdownMenuItem>
</DropdownMenuContent>
</DropdownMenu>
<Button
variant="default"
size="sm"
onClick={() => handleResolveGroup(alert)}
disabled={resolveGroup.isPending}
>
Resolve All
</Button>
</div>
</div>
{isExpanded && alert.messages.length > 1 && (
<div className="mt-3 pt-3 border-t space-y-2">
<p className="text-xs font-medium text-muted-foreground">All messages in this group:</p>
<div className="space-y-1 max-h-64 overflow-y-auto">
{alert.messages.map((message, idx) => (
<div key={idx} className="text-xs p-2 rounded bg-muted/50">
{message}
</div>
))}
</div>
</div>
)}
</div>
);
})}
</CardContent>
</Card>
);
}

View File

@@ -0,0 +1,110 @@
import { useMutation, useQueryClient } from '@tanstack/react-query';
import { supabase } from '@/lib/supabaseClient';
import { queryKeys } from '@/lib/queryKeys';
import { toast } from 'sonner';
import type { GroupedAlert } from './useGroupedAlerts';
export function useResolveAlertGroup() {
const queryClient = useQueryClient();
return useMutation({
mutationFn: async ({
alertIds,
source
}: {
alertIds: string[];
source: 'system' | 'rate_limit';
}) => {
const table = source === 'system' ? 'system_alerts' : 'rate_limit_alerts';
const { error } = await supabase
.from(table)
.update({ resolved_at: new Date().toISOString() })
.in('id', alertIds);
if (error) throw error;
return { count: alertIds.length };
},
onMutate: async ({ alertIds }) => {
// Cancel any outgoing refetches
await queryClient.cancelQueries({
queryKey: queryKeys.monitoring.groupedAlerts()
});
const previousData = queryClient.getQueryData(
queryKeys.monitoring.groupedAlerts()
);
// Optimistically update to the new value
queryClient.setQueryData(
queryKeys.monitoring.groupedAlerts(),
(old: GroupedAlert[] | undefined) => {
if (!old) return old;
return old.map(alert => {
const hasMatchingIds = alert.alert_ids.some(id =>
alertIds.includes(id)
);
if (hasMatchingIds) {
return {
...alert,
unresolved_count: 0,
has_resolved: true,
};
}
return alert;
});
}
);
return { previousData };
},
onSuccess: (data) => {
toast.success(`Resolved ${data.count} alert${data.count > 1 ? 's' : ''}`);
},
onError: (error, variables, context) => {
// Rollback on error
if (context?.previousData) {
queryClient.setQueryData(
queryKeys.monitoring.groupedAlerts(),
context.previousData
);
}
toast.error('Failed to resolve alerts');
console.error('Error resolving alert group:', error);
},
onSettled: () => {
queryClient.invalidateQueries({
queryKey: queryKeys.monitoring.groupedAlerts()
});
queryClient.invalidateQueries({
queryKey: queryKeys.monitoring.combinedAlerts()
});
},
});
}
export function useSnoozeAlertGroup() {
const queryClient = useQueryClient();
return useMutation({
mutationFn: async ({
groupKey,
duration
}: {
groupKey: string;
duration: number;
}) => {
const snoozedAlerts = JSON.parse(
localStorage.getItem('snoozed_alerts') || '{}'
);
snoozedAlerts[groupKey] = Date.now() + duration;
localStorage.setItem('snoozed_alerts', JSON.stringify(snoozedAlerts));
return { groupKey, until: snoozedAlerts[groupKey] };
},
onSuccess: () => {
queryClient.invalidateQueries({
queryKey: queryKeys.monitoring.groupedAlerts()
});
toast.success('Alert group snoozed');
},
});
}

View File

@@ -0,0 +1,90 @@
import { useQuery } from '@tanstack/react-query';
import { supabase } from '@/lib/supabaseClient';
import { queryKeys } from '@/lib/queryKeys';
export interface GroupedAlert {
group_key: string;
alert_type?: string;
severity: 'critical' | 'high' | 'medium' | 'low';
source: 'system' | 'rate_limit';
function_name?: string;
metric_type?: string;
alert_count: number;
unresolved_count: number;
first_seen: string;
last_seen: string;
alert_ids: string[];
messages: string[];
has_resolved: boolean;
is_recurring: boolean;
is_active: boolean;
}
interface GroupedAlertsOptions {
includeResolved?: boolean;
minCount?: number;
severity?: 'critical' | 'high' | 'medium' | 'low';
}
export function useGroupedAlerts(options?: GroupedAlertsOptions) {
return useQuery({
queryKey: queryKeys.monitoring.groupedAlerts(options),
queryFn: async () => {
let query = supabase
.from('grouped_alerts_view')
.select('*')
.order('last_seen', { ascending: false });
if (!options?.includeResolved) {
query = query.gt('unresolved_count', 0);
}
if (options?.minCount) {
query = query.gte('alert_count', options.minCount);
}
if (options?.severity) {
query = query.eq('severity', options.severity);
}
const { data, error } = await query;
if (error) throw error;
return (data || []).map(alert => ({
...alert,
is_recurring: (alert.alert_count ?? 0) > 3,
is_active: new Date(alert.last_seen ?? new Date()).getTime() > Date.now() - 3600000,
})) as GroupedAlert[];
},
staleTime: 15000,
refetchInterval: 30000,
});
}
export function useAlertGroupDetails(groupKey: string, source: 'system' | 'rate_limit', alertIds: string[]) {
return useQuery({
queryKey: queryKeys.monitoring.alertGroupDetails(groupKey),
queryFn: async () => {
if (source === 'system') {
const { data, error } = await supabase
.from('system_alerts')
.select('*')
.in('id', alertIds)
.order('created_at', { ascending: false });
if (error) throw error;
return data || [];
} else {
const { data, error } = await supabase
.from('rate_limit_alerts')
.select('*')
.in('id', alertIds)
.order('created_at', { ascending: false });
if (error) throw error;
return data || [];
}
},
enabled: alertIds.length > 0,
});
}

View File

@@ -151,6 +151,57 @@ export type Database = {
}
Relationships: []
}
alert_correlation_rules: {
Row: {
alert_patterns: Json
auto_create_incident: boolean
created_at: string
created_by: string | null
description: string | null
enabled: boolean
id: string
incident_description_template: string | null
incident_severity: string
incident_title_template: string
min_alerts_required: number
rule_name: string
time_window_minutes: number
updated_at: string
}
Insert: {
alert_patterns: Json
auto_create_incident?: boolean
created_at?: string
created_by?: string | null
description?: string | null
enabled?: boolean
id?: string
incident_description_template?: string | null
incident_severity: string
incident_title_template: string
min_alerts_required?: number
rule_name: string
time_window_minutes?: number
updated_at?: string
}
Update: {
alert_patterns?: Json
auto_create_incident?: boolean
created_at?: string
created_by?: string | null
description?: string | null
enabled?: boolean
id?: string
incident_description_template?: string | null
incident_severity?: string
incident_title_template?: string
min_alerts_required?: number
rule_name?: string
time_window_minutes?: number
updated_at?: string
}
Relationships: []
}
approval_transaction_metrics: {
Row: {
created_at: string | null
@@ -1551,6 +1602,110 @@ export type Database = {
},
]
}
incident_alerts: {
Row: {
added_at: string
alert_id: string
alert_source: string
id: string
incident_id: string
}
Insert: {
added_at?: string
alert_id: string
alert_source: string
id?: string
incident_id: string
}
Update: {
added_at?: string
alert_id?: string
alert_source?: string
id?: string
incident_id?: string
}
Relationships: [
{
foreignKeyName: "incident_alerts_incident_id_fkey"
columns: ["incident_id"]
isOneToOne: false
referencedRelation: "incidents"
referencedColumns: ["id"]
},
]
}
incidents: {
Row: {
acknowledged_at: string | null
acknowledged_by: string | null
alert_count: number
correlation_rule_id: string | null
created_at: string
description: string | null
detected_at: string
id: string
incident_number: string
resolution_notes: string | null
resolved_at: string | null
resolved_by: string | null
severity: string
status: string
title: string
updated_at: string
}
Insert: {
acknowledged_at?: string | null
acknowledged_by?: string | null
alert_count?: number
correlation_rule_id?: string | null
created_at?: string
description?: string | null
detected_at?: string
id?: string
incident_number: string
resolution_notes?: string | null
resolved_at?: string | null
resolved_by?: string | null
severity: string
status?: string
title: string
updated_at?: string
}
Update: {
acknowledged_at?: string | null
acknowledged_by?: string | null
alert_count?: number
correlation_rule_id?: string | null
created_at?: string
description?: string | null
detected_at?: string
id?: string
incident_number?: string
resolution_notes?: string | null
resolved_at?: string | null
resolved_by?: string | null
severity?: string
status?: string
title?: string
updated_at?: string
}
Relationships: [
{
foreignKeyName: "incidents_correlation_rule_id_fkey"
columns: ["correlation_rule_id"]
isOneToOne: false
referencedRelation: "alert_correlation_rules"
referencedColumns: ["id"]
},
{
foreignKeyName: "incidents_correlation_rule_id_fkey"
columns: ["correlation_rule_id"]
isOneToOne: false
referencedRelation: "alert_correlations_view"
referencedColumns: ["rule_id"]
},
]
}
item_change_fields: {
Row: {
created_at: string | null
@@ -5838,6 +5993,25 @@ export type Database = {
}
}
Views: {
alert_correlations_view: {
Row: {
alert_ids: string[] | null
alert_messages: string[] | null
alert_sources: string[] | null
can_create_incident: boolean | null
first_alert_at: string | null
incident_severity: string | null
incident_title_template: string | null
last_alert_at: string | null
matching_alerts_count: number | null
min_alerts_required: number | null
rule_description: string | null
rule_id: string | null
rule_name: string | null
time_window_minutes: number | null
}
Relationships: []
}
error_summary: {
Row: {
affected_users: number | null
@@ -5935,6 +6109,24 @@ export type Database = {
}
Relationships: []
}
grouped_alerts_view: {
Row: {
alert_count: number | null
alert_ids: string[] | null
alert_type: string | null
first_seen: string | null
function_name: string | null
group_key: string | null
has_resolved: boolean | null
last_seen: string | null
messages: string[] | null
metric_type: string | null
severity: string | null
source: string | null
unresolved_count: number | null
}
Relationships: []
}
idempotency_stats: {
Row: {
avg_duration_ms: number | null
@@ -6228,6 +6420,7 @@ export type Database = {
}
extract_cf_image_id: { Args: { url: string }; Returns: string }
generate_deletion_confirmation_code: { Args: never; Returns: string }
generate_incident_number: { Args: never; Returns: string }
generate_notification_idempotency_key: {
Args: {
p_entity_id: string

View File

@@ -89,5 +89,8 @@ export const queryKeys = {
combinedAlerts: () => ['monitoring', 'combined-alerts'] as const,
databaseHealth: () => ['monitoring', 'database-health'] as const,
moderationHealth: () => ['monitoring', 'moderation-health'] as const,
groupedAlerts: (options?: { includeResolved?: boolean; minCount?: number; severity?: string }) =>
['monitoring', 'grouped-alerts', options] as const,
alertGroupDetails: (groupKey: string) => ['monitoring', 'alert-group-details', groupKey] as const,
},
} as const;

View File

@@ -3,16 +3,17 @@ import { useQueryClient } from '@tanstack/react-query';
import { AdminLayout } from '@/components/layout/AdminLayout';
import { RefreshButton } from '@/components/ui/refresh-button';
import { SystemHealthStatus } from '@/components/admin/SystemHealthStatus';
import { CriticalAlertsPanel } from '@/components/admin/CriticalAlertsPanel';
import { GroupedAlertsPanel } from '@/components/admin/GroupedAlertsPanel';
import { MonitoringQuickStats } from '@/components/admin/MonitoringQuickStats';
import { RecentActivityTimeline } from '@/components/admin/RecentActivityTimeline';
import { MonitoringNavCards } from '@/components/admin/MonitoringNavCards';
import { useSystemHealth } from '@/hooks/useSystemHealth';
import { useCombinedAlerts } from '@/hooks/admin/useCombinedAlerts';
import { useGroupedAlerts } from '@/hooks/admin/useGroupedAlerts';
import { useRecentActivity } from '@/hooks/admin/useRecentActivity';
import { useDatabaseHealth } from '@/hooks/admin/useDatabaseHealth';
import { useModerationHealth } from '@/hooks/admin/useModerationHealth';
import { useRateLimitStats } from '@/hooks/useRateLimitMetrics';
import { queryKeys } from '@/lib/queryKeys';
import { Switch } from '@/components/ui/switch';
import { Label } from '@/components/ui/label';
@@ -22,7 +23,7 @@ export default function MonitoringOverview() {
// Fetch all monitoring data
const systemHealth = useSystemHealth();
const combinedAlerts = useCombinedAlerts();
const groupedAlerts = useGroupedAlerts({ includeResolved: false });
const recentActivity = useRecentActivity(3600000); // 1 hour
const dbHealth = useDatabaseHealth();
const moderationHealth = useModerationHealth();
@@ -30,7 +31,7 @@ export default function MonitoringOverview() {
const isLoading =
systemHealth.isLoading ||
combinedAlerts.isLoading ||
groupedAlerts.isLoading ||
recentActivity.isLoading ||
dbHealth.isLoading ||
moderationHealth.isLoading ||
@@ -53,10 +54,18 @@ export default function MonitoringOverview() {
queryKey: ['rate-limit'],
refetchType: 'active'
});
await queryClient.invalidateQueries({
queryKey: queryKeys.monitoring.groupedAlerts(),
refetchType: 'active'
});
};
// Calculate error count for nav card (from recent activity)
const errorCount = recentActivity.data?.filter(e => e.type === 'error').length || 0;
// Calculate stats from grouped alerts
const totalGroupedAlerts = groupedAlerts.data?.reduce((sum, g) => sum + g.unresolved_count, 0) || 0;
const recurringIssues = groupedAlerts.data?.filter(g => g.is_recurring).length || 0;
return (
<AdminLayout>
@@ -91,10 +100,10 @@ export default function MonitoringOverview() {
isLoading={systemHealth.isLoading || dbHealth.isLoading}
/>
{/* Critical Alerts */}
<CriticalAlertsPanel
alerts={combinedAlerts.data}
isLoading={combinedAlerts.isLoading}
{/* Critical Alerts - Now Grouped */}
<GroupedAlertsPanel
alerts={groupedAlerts.data}
isLoading={groupedAlerts.isLoading}
/>
{/* Quick Stats Grid */}

View File

@@ -0,0 +1,55 @@
-- Create view for grouped alerts to reduce alert fatigue
CREATE OR REPLACE VIEW grouped_alerts_view AS
WITH system_alerts_grouped AS (
SELECT
alert_type AS group_key,
alert_type,
severity,
'system'::text AS source,
NULL::text AS function_name,
NULL::text AS metric_type,
COUNT(*) AS alert_count,
MIN(created_at) AS first_seen,
MAX(created_at) AS last_seen,
ARRAY_AGG(id::text ORDER BY created_at DESC) AS alert_ids,
ARRAY_AGG(message ORDER BY created_at DESC) AS messages,
BOOL_OR(resolved_at IS NOT NULL) AS has_resolved,
COUNT(*) FILTER (WHERE resolved_at IS NULL) AS unresolved_count
FROM system_alerts
WHERE created_at > NOW() - INTERVAL '7 days'
GROUP BY alert_type, severity
),
rate_limit_alerts_grouped AS (
SELECT
CONCAT(metric_type, ':', COALESCE(function_name, 'global')) AS group_key,
NULL::text AS alert_type,
'high'::text AS severity,
'rate_limit'::text AS source,
function_name,
metric_type,
COUNT(*) AS alert_count,
MIN(created_at) AS first_seen,
MAX(created_at) AS last_seen,
ARRAY_AGG(id::text ORDER BY created_at DESC) AS alert_ids,
ARRAY_AGG(alert_message ORDER BY created_at DESC) AS messages,
BOOL_OR(resolved_at IS NOT NULL) AS has_resolved,
COUNT(*) FILTER (WHERE resolved_at IS NULL) AS unresolved_count
FROM rate_limit_alerts
WHERE created_at > NOW() - INTERVAL '7 days'
GROUP BY metric_type, function_name
)
SELECT * FROM system_alerts_grouped
UNION ALL
SELECT * FROM rate_limit_alerts_grouped;
-- Grant access to authenticated users
GRANT SELECT ON grouped_alerts_view TO authenticated;
-- Create indexes for better performance
CREATE INDEX IF NOT EXISTS idx_system_alerts_grouping
ON system_alerts(alert_type, severity, created_at DESC)
WHERE resolved_at IS NULL;
CREATE INDEX IF NOT EXISTS idx_rate_limit_alerts_grouping
ON rate_limit_alerts(metric_type, function_name, created_at DESC)
WHERE resolved_at IS NULL;

View File

@@ -0,0 +1,51 @@
-- Fix security definer view issue by enabling security_invoker
-- This ensures the view respects RLS policies and runs with the querying user's permissions
DROP VIEW IF EXISTS grouped_alerts_view;
CREATE OR REPLACE VIEW grouped_alerts_view
WITH (security_invoker=on)
AS
WITH system_alerts_grouped AS (
SELECT
alert_type AS group_key,
alert_type,
severity,
'system'::text AS source,
NULL::text AS function_name,
NULL::text AS metric_type,
COUNT(*) AS alert_count,
MIN(created_at) AS first_seen,
MAX(created_at) AS last_seen,
ARRAY_AGG(id::text ORDER BY created_at DESC) AS alert_ids,
ARRAY_AGG(message ORDER BY created_at DESC) AS messages,
BOOL_OR(resolved_at IS NOT NULL) AS has_resolved,
COUNT(*) FILTER (WHERE resolved_at IS NULL) AS unresolved_count
FROM system_alerts
WHERE created_at > NOW() - INTERVAL '7 days'
GROUP BY alert_type, severity
),
rate_limit_alerts_grouped AS (
SELECT
CONCAT(metric_type, ':', COALESCE(function_name, 'global')) AS group_key,
NULL::text AS alert_type,
'high'::text AS severity,
'rate_limit'::text AS source,
function_name,
metric_type,
COUNT(*) AS alert_count,
MIN(created_at) AS first_seen,
MAX(created_at) AS last_seen,
ARRAY_AGG(id::text ORDER BY created_at DESC) AS alert_ids,
ARRAY_AGG(alert_message ORDER BY created_at DESC) AS messages,
BOOL_OR(resolved_at IS NOT NULL) AS has_resolved,
COUNT(*) FILTER (WHERE resolved_at IS NULL) AS unresolved_count
FROM rate_limit_alerts
WHERE created_at > NOW() - INTERVAL '7 days'
GROUP BY metric_type, function_name
)
SELECT * FROM system_alerts_grouped
UNION ALL
SELECT * FROM rate_limit_alerts_grouped;
-- Grant access to authenticated users
GRANT SELECT ON grouped_alerts_view TO authenticated;

View File

@@ -0,0 +1,282 @@
-- Alert Correlation System: Detect related issues and create incidents
-- Table: Alert correlation rules (defines which alert patterns indicate incidents)
CREATE TABLE alert_correlation_rules (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
rule_name TEXT NOT NULL,
description TEXT,
alert_patterns JSONB NOT NULL,
time_window_minutes INTEGER NOT NULL DEFAULT 15,
min_alerts_required INTEGER NOT NULL DEFAULT 2,
incident_severity TEXT NOT NULL CHECK (incident_severity IN ('critical', 'high', 'medium', 'low')),
incident_title_template TEXT NOT NULL,
incident_description_template TEXT,
auto_create_incident BOOLEAN NOT NULL DEFAULT true,
enabled BOOLEAN NOT NULL DEFAULT true,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
created_by UUID REFERENCES auth.users(id)
);
-- Table: Incidents (auto-generated or manual from correlated alerts)
CREATE TABLE incidents (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
incident_number TEXT UNIQUE NOT NULL,
title TEXT NOT NULL,
description TEXT,
severity TEXT NOT NULL CHECK (severity IN ('critical', 'high', 'medium', 'low')),
status TEXT NOT NULL DEFAULT 'open' CHECK (status IN ('open', 'investigating', 'resolved', 'closed')),
correlation_rule_id UUID REFERENCES alert_correlation_rules(id),
detected_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
acknowledged_at TIMESTAMPTZ,
acknowledged_by UUID REFERENCES auth.users(id),
resolved_at TIMESTAMPTZ,
resolved_by UUID REFERENCES auth.users(id),
resolution_notes TEXT,
alert_count INTEGER NOT NULL DEFAULT 0,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
-- Table: Links alerts to incidents
CREATE TABLE incident_alerts (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
incident_id UUID NOT NULL REFERENCES incidents(id) ON DELETE CASCADE,
alert_source TEXT NOT NULL CHECK (alert_source IN ('system', 'rate_limit')),
alert_id UUID NOT NULL,
added_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
UNIQUE(incident_id, alert_source, alert_id)
);
-- Generate incident numbers sequentially
CREATE SEQUENCE incident_number_seq START 1000;
-- Function to generate incident numbers
CREATE OR REPLACE FUNCTION generate_incident_number()
RETURNS TEXT AS $$
BEGIN
RETURN 'INC-' || LPAD(nextval('incident_number_seq')::TEXT, 6, '0');
END;
$$ LANGUAGE plpgsql;
-- Trigger to auto-generate incident numbers
CREATE OR REPLACE FUNCTION set_incident_number()
RETURNS TRIGGER AS $$
BEGIN
IF NEW.incident_number IS NULL THEN
NEW.incident_number := generate_incident_number();
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER trigger_set_incident_number
BEFORE INSERT ON incidents
FOR EACH ROW
EXECUTE FUNCTION set_incident_number();
-- Trigger to update incident alert count
CREATE OR REPLACE FUNCTION update_incident_alert_count()
RETURNS TRIGGER AS $$
BEGIN
IF TG_OP = 'INSERT' THEN
UPDATE incidents
SET alert_count = alert_count + 1,
updated_at = NOW()
WHERE id = NEW.incident_id;
ELSIF TG_OP = 'DELETE' THEN
UPDATE incidents
SET alert_count = alert_count - 1,
updated_at = NOW()
WHERE id = OLD.incident_id;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER trigger_update_incident_alert_count
AFTER INSERT OR DELETE ON incident_alerts
FOR EACH ROW
EXECUTE FUNCTION update_incident_alert_count();
-- View: Active correlations (alerts matching correlation patterns)
CREATE OR REPLACE VIEW alert_correlations_view
WITH (security_invoker=on)
AS
WITH recent_alerts AS (
SELECT
'system'::TEXT AS source,
id::TEXT AS alert_id,
alert_type,
NULL::TEXT AS metric_type,
severity,
created_at,
message AS alert_message
FROM system_alerts
WHERE created_at > NOW() - INTERVAL '1 hour'
AND resolved_at IS NULL
UNION ALL
SELECT
'rate_limit'::TEXT AS source,
id::TEXT AS alert_id,
NULL::TEXT AS alert_type,
metric_type,
'high'::TEXT AS severity,
created_at,
alert_message
FROM rate_limit_alerts
WHERE created_at > NOW() - INTERVAL '1 hour'
AND resolved_at IS NULL
),
matched_alerts AS (
SELECT
acr.id AS rule_id,
acr.rule_name,
acr.description AS rule_description,
acr.incident_severity,
acr.incident_title_template,
acr.time_window_minutes,
acr.min_alerts_required,
ra.alert_id,
ra.source,
ra.alert_message,
ra.created_at
FROM alert_correlation_rules acr
CROSS JOIN LATERAL (
SELECT ra.*
FROM recent_alerts ra,
jsonb_array_elements(acr.alert_patterns) AS pattern
WHERE ra.created_at > NOW() - (acr.time_window_minutes || ' minutes')::INTERVAL
AND (
(pattern->>'source' = ra.source)
AND (
(pattern->>'alert_type' IS NULL OR pattern->>'alert_type' = ra.alert_type)
OR (pattern->>'metric_type' IS NULL OR pattern->>'metric_type' = ra.metric_type)
)
AND (pattern->>'severity' IS NULL OR pattern->>'severity' = ra.severity)
)
) ra
WHERE acr.enabled = true
)
SELECT
rule_id,
rule_name,
rule_description,
incident_severity,
incident_title_template,
time_window_minutes,
min_alerts_required,
COUNT(DISTINCT alert_id) AS matching_alerts_count,
ARRAY_AGG(DISTINCT alert_id) AS alert_ids,
ARRAY_AGG(DISTINCT source) AS alert_sources,
(ARRAY_AGG(alert_message ORDER BY created_at DESC))[1:5] AS alert_messages,
MIN(created_at) AS first_alert_at,
MAX(created_at) AS last_alert_at,
NOT EXISTS (
SELECT 1 FROM incidents i
WHERE i.correlation_rule_id = matched_alerts.rule_id
AND i.status IN ('open', 'investigating')
AND i.detected_at > NOW() - (matched_alerts.time_window_minutes || ' minutes')::INTERVAL
) AS can_create_incident
FROM matched_alerts
GROUP BY rule_id, rule_name, rule_description, incident_severity,
incident_title_template, time_window_minutes, min_alerts_required
HAVING COUNT(DISTINCT alert_id) >= min_alerts_required;
-- Grant permissions
GRANT SELECT ON alert_correlation_rules TO authenticated;
GRANT SELECT ON incidents TO authenticated;
GRANT SELECT ON incident_alerts TO authenticated;
GRANT SELECT ON alert_correlations_view TO authenticated;
-- RLS Policies
ALTER TABLE alert_correlation_rules ENABLE ROW LEVEL SECURITY;
ALTER TABLE incidents ENABLE ROW LEVEL SECURITY;
ALTER TABLE incident_alerts ENABLE ROW LEVEL SECURITY;
CREATE POLICY moderators_manage_correlation_rules ON alert_correlation_rules
FOR ALL USING (
EXISTS (
SELECT 1 FROM user_roles
WHERE user_id = auth.uid()
AND role IN ('moderator', 'admin', 'superuser')
)
);
CREATE POLICY moderators_view_incidents ON incidents
FOR SELECT USING (
EXISTS (
SELECT 1 FROM user_roles
WHERE user_id = auth.uid()
AND role IN ('moderator', 'admin', 'superuser')
)
);
CREATE POLICY moderators_manage_incidents ON incidents
FOR ALL USING (
EXISTS (
SELECT 1 FROM user_roles
WHERE user_id = auth.uid()
AND role IN ('moderator', 'admin', 'superuser')
)
);
CREATE POLICY moderators_view_incident_alerts ON incident_alerts
FOR SELECT USING (
EXISTS (
SELECT 1 FROM user_roles
WHERE user_id = auth.uid()
AND role IN ('moderator', 'admin', 'superuser')
)
);
-- Insert default correlation rules
INSERT INTO alert_correlation_rules (rule_name, description, alert_patterns, time_window_minutes, min_alerts_required, incident_severity, incident_title_template, incident_description_template, enabled) VALUES
('Database Performance Degradation', 'Database issues causing application errors',
'[
{"source": "system", "alert_type": "database_connection_pool_exhausted"},
{"source": "system", "alert_type": "high_error_rate"},
{"source": "rate_limit", "metric_type": "rate_limit_violation"}
]'::jsonb,
15, 2, 'critical',
'Database Performance Incident',
'Multiple database-related alerts detected indicating potential database performance degradation affecting application availability.',
true),
('Authentication System Issues', 'Auth failures causing user lockouts',
'[
{"source": "system", "alert_type": "auth_failure_spike"},
{"source": "system", "alert_type": "high_error_rate"},
{"source": "rate_limit", "metric_type": "rate_limit_violation"}
]'::jsonb,
10, 2, 'high',
'Authentication System Incident',
'Authentication-related alerts detected indicating potential issues with user login and authentication services.',
true),
('Rate Limiting Cascade', 'Rate limit violations causing service degradation',
'[
{"source": "rate_limit", "metric_type": "rate_limit_violation"},
{"source": "system", "alert_type": "high_error_rate"}
]'::jsonb,
20, 2, 'high',
'Rate Limiting Cascade Incident',
'Multiple rate limit violations detected potentially causing cascading service issues.',
true),
('Storage System Issues', 'File upload/storage problems',
'[
{"source": "system", "alert_type": "upload_timeout"},
{"source": "system", "alert_type": "storage_quota_exceeded"},
{"source": "system", "alert_type": "high_error_rate"}
]'::jsonb,
15, 2, 'medium',
'Storage System Incident',
'Storage-related alerts detected indicating potential issues with file uploads or storage capacity.',
true);
-- Create indexes for performance
CREATE INDEX idx_incidents_status ON incidents(status) WHERE status IN ('open', 'investigating');
CREATE INDEX idx_incidents_severity ON incidents(severity, detected_at DESC);
CREATE INDEX idx_incident_alerts_incident_id ON incident_alerts(incident_id);
CREATE INDEX idx_alert_correlation_rules_enabled ON alert_correlation_rules(enabled) WHERE enabled = true;