Files
thrilltrack-explorer/supabase/functions/collect-metrics/index.ts
gpt-engineer-app[bot] 12d2518eb9 Migrate Phase 2 Batch 1
Migrate 3 Phase 2 monitoring functions (collect-metrics, detect-anomalies, monitor-rate-limits) to use wrapEdgeFunction with smaller batch updates, replacing manual handlers, adding shared logging/tracing, and standardizing error handling.
2025-11-11 03:30:00 +00:00

178 lines
5.6 KiB
TypeScript

import { createClient } from 'https://esm.sh/@supabase/supabase-js@2.57.4';
import { createEdgeFunction } from '../_shared/edgeFunctionWrapper.ts';
import { edgeLogger } from '../_shared/logger.ts';
interface MetricRecord {
metric_name: string;
metric_value: number;
metric_category: string;
timestamp: string;
}
export default createEdgeFunction(
{
name: 'collect-metrics',
requireAuth: false,
},
async (req, context, supabase) => {
edgeLogger.info('Starting metrics collection', { requestId: context.requestId });
const metrics: MetricRecord[] = [];
const timestamp = new Date().toISOString();
// 1. Collect API error rate from recent logs
const { data: recentErrors, error: errorQueryError } = await supabase
.from('system_alerts')
.select('id', { count: 'exact', head: true })
.gte('created_at', new Date(Date.now() - 60000).toISOString())
.in('severity', ['high', 'critical']);
if (!errorQueryError) {
const errorCount = recentErrors || 0;
metrics.push({
metric_name: 'api_error_count',
metric_value: errorCount as number,
metric_category: 'performance',
timestamp,
});
}
// 2. Collect rate limit violations
const { data: rateLimitViolations, error: rateLimitError } = await supabase
.from('rate_limit_logs')
.select('id', { count: 'exact', head: true })
.gte('timestamp', new Date(Date.now() - 60000).toISOString())
.eq('action_taken', 'blocked');
if (!rateLimitError) {
const violationCount = rateLimitViolations || 0;
metrics.push({
metric_name: 'rate_limit_violations',
metric_value: violationCount as number,
metric_category: 'security',
timestamp,
});
}
// 3. Collect pending submissions count
const { data: pendingSubmissions, error: submissionsError } = await supabase
.from('submissions')
.select('id', { count: 'exact', head: true })
.eq('moderation_status', 'pending');
if (!submissionsError) {
const pendingCount = pendingSubmissions || 0;
metrics.push({
metric_name: 'pending_submissions',
metric_value: pendingCount as number,
metric_category: 'workflow',
timestamp,
});
}
// 4. Collect active incidents count
const { data: activeIncidents, error: incidentsError } = await supabase
.from('incidents')
.select('id', { count: 'exact', head: true })
.in('status', ['open', 'investigating']);
if (!incidentsError) {
const incidentCount = activeIncidents || 0;
metrics.push({
metric_name: 'active_incidents',
metric_value: incidentCount as number,
metric_category: 'monitoring',
timestamp,
});
}
// 5. Collect unresolved alerts count
const { data: unresolvedAlerts, error: alertsError } = await supabase
.from('system_alerts')
.select('id', { count: 'exact', head: true })
.eq('resolved', false);
if (!alertsError) {
const alertCount = unresolvedAlerts || 0;
metrics.push({
metric_name: 'unresolved_alerts',
metric_value: alertCount as number,
metric_category: 'monitoring',
timestamp,
});
}
// 6. Calculate submission approval rate (last hour)
const { data: recentSubmissions, error: recentSubmissionsError } = await supabase
.from('submissions')
.select('moderation_status', { count: 'exact' })
.gte('created_at', new Date(Date.now() - 3600000).toISOString());
if (!recentSubmissionsError && recentSubmissions) {
const total = recentSubmissions.length;
const approved = recentSubmissions.filter(s => s.moderation_status === 'approved').length;
const approvalRate = total > 0 ? (approved / total) * 100 : 100;
metrics.push({
metric_name: 'submission_approval_rate',
metric_value: approvalRate,
metric_category: 'workflow',
timestamp,
});
}
// 7. Calculate average moderation time (last hour)
const { data: moderatedSubmissions, error: moderatedError } = await supabase
.from('submissions')
.select('created_at, moderated_at')
.not('moderated_at', 'is', null)
.gte('moderated_at', new Date(Date.now() - 3600000).toISOString());
if (!moderatedError && moderatedSubmissions && moderatedSubmissions.length > 0) {
const totalTime = moderatedSubmissions.reduce((sum, sub) => {
const created = new Date(sub.created_at).getTime();
const moderated = new Date(sub.moderated_at).getTime();
return sum + (moderated - created);
}, 0);
const avgTimeMinutes = (totalTime / moderatedSubmissions.length) / 60000;
metrics.push({
metric_name: 'avg_moderation_time',
metric_value: avgTimeMinutes,
metric_category: 'workflow',
timestamp,
});
}
// Insert all collected metrics
if (metrics.length > 0) {
const { error: insertError } = await supabase
.from('metric_time_series')
.insert(metrics);
if (insertError) {
edgeLogger.error('Error inserting metrics', {
error: insertError,
requestId: context.requestId
});
throw insertError;
}
edgeLogger.info('Successfully recorded metrics', {
count: metrics.length,
requestId: context.requestId
});
}
return new Response(
JSON.stringify({
success: true,
metrics_collected: metrics.length,
metrics: metrics.map(m => ({ name: m.metric_name, value: m.metric_value })),
}),
{ headers: { 'Content-Type': 'application/json' } }
);
}
);