Files
thrilltrack-explorer/supabase/functions/process-selective-approval/index.ts
gpt-engineer-app[bot] 444634dc85 Approve database migration
2025-10-15 19:29:06 +00:00

917 lines
30 KiB
TypeScript

import { serve } from "https://deno.land/std@0.190.0/http/server.ts";
import { createClient } from "https://esm.sh/@supabase/supabase-js@2.57.4";
import { validateEntityData, validateEntityDataStrict } from "./validation.ts";
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type',
};
interface ApprovalRequest {
itemIds: string[];
submissionId: string;
}
// Allowed database fields for each entity type
const RIDE_FIELDS = [
'name', 'slug', 'description', 'park_id', 'ride_model_id',
'manufacturer_id', 'designer_id', 'category', 'status',
'opening_date', 'closing_date', 'height_requirement', 'age_requirement',
'capacity_per_hour', 'duration_seconds', 'max_speed_kmh',
'max_height_meters', 'length_meters', 'inversions',
'ride_sub_type', 'coaster_type', 'seating_type', 'intensity_level',
'drop_height_meters', 'max_g_force', 'image_url',
'banner_image_url', 'banner_image_id', 'card_image_url', 'card_image_id'
];
const PARK_FIELDS = [
'name', 'slug', 'description', 'park_type', 'status',
'opening_date', 'closing_date', 'location_id', 'operator_id',
'property_owner_id', 'website_url', 'phone', 'email',
'banner_image_url', 'banner_image_id', 'card_image_url', 'card_image_id'
];
const COMPANY_FIELDS = [
'name', 'slug', 'description', 'company_type', 'person_type',
'founded_year', 'headquarters_location', 'website_url', 'logo_url',
'banner_image_url', 'banner_image_id', 'card_image_url', 'card_image_id'
];
const RIDE_MODEL_FIELDS = [
'name', 'slug', 'description', 'category', 'ride_type',
'manufacturer_id', 'banner_image_url', 'banner_image_id',
'card_image_url', 'card_image_id'
];
serve(async (req) => {
if (req.method === 'OPTIONS') {
return new Response(null, { headers: corsHeaders });
}
try {
// Verify authentication first with a client that respects RLS
const authHeader = req.headers.get('Authorization');
if (!authHeader) {
return new Response(
JSON.stringify({ error: 'Authentication required. Please log in.' }),
{ status: 401, headers: { ...corsHeaders, 'Content-Type': 'application/json' } }
);
}
// Create Supabase client with user's auth token to verify authentication
const supabaseUrl = Deno.env.get('SUPABASE_URL') ?? '';
const supabaseAnonKey = Deno.env.get('SUPABASE_ANON_KEY') ?? '';
const supabaseAuth = createClient(supabaseUrl, supabaseAnonKey, {
global: { headers: { Authorization: authHeader } }
});
// Verify JWT and get authenticated user
const { data: { user }, error: authError } = await supabaseAuth.auth.getUser();
if (authError || !user) {
console.error('Auth verification failed:', authError);
return new Response(
JSON.stringify({ error: 'Invalid authentication token.' }),
{ status: 401, headers: { ...corsHeaders, 'Content-Type': 'application/json' } }
);
}
// SECURITY NOTE: Service role key used later in this function
// Reason: Need to bypass RLS to write approved changes to entity tables
// (parks, rides, companies, ride_models) which have RLS policies
// Security measures: User auth verified above, moderator role checked via RPC
const authenticatedUserId = user.id;
// Create service role client for privileged operations (including role check)
const supabase = createClient(
Deno.env.get('SUPABASE_URL') ?? '',
Deno.env.get('SUPABASE_SERVICE_ROLE_KEY') ?? ''
);
// Check if user has moderator permissions using service role to bypass RLS
const { data: roles, error: rolesError } = await supabase
.from('user_roles')
.select('role')
.eq('user_id', authenticatedUserId);
if (rolesError) {
console.error('Failed to fetch user roles:', rolesError);
return new Response(
JSON.stringify({ error: 'Failed to verify user permissions.' }),
{ status: 403, headers: { ...corsHeaders, 'Content-Type': 'application/json' } }
);
}
const userRoles = roles?.map(r => r.role) || [];
const isModerator = userRoles.includes('moderator') ||
userRoles.includes('admin') ||
userRoles.includes('superuser');
if (!isModerator) {
return new Response(
JSON.stringify({ error: 'Insufficient permissions. Moderator role required.' }),
{ status: 403, headers: { ...corsHeaders, 'Content-Type': 'application/json' } }
);
}
const { itemIds, submissionId }: ApprovalRequest = await req.json();
// UUID validation regex
const uuidRegex = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i;
// Validate itemIds
if (!itemIds || !Array.isArray(itemIds)) {
return new Response(
JSON.stringify({ error: 'itemIds is required and must be an array' }),
{ status: 400, headers: { ...corsHeaders, 'Content-Type': 'application/json' } }
);
}
if (itemIds.length === 0) {
return new Response(
JSON.stringify({ error: 'itemIds must be a non-empty array' }),
{ status: 400, headers: { ...corsHeaders, 'Content-Type': 'application/json' } }
);
}
// Validate submissionId
if (!submissionId || typeof submissionId !== 'string' || submissionId.trim() === '') {
return new Response(
JSON.stringify({ error: 'submissionId is required and must be a non-empty string' }),
{ status: 400, headers: { ...corsHeaders, 'Content-Type': 'application/json' } }
);
}
if (!uuidRegex.test(submissionId)) {
return new Response(
JSON.stringify({ error: 'submissionId must be a valid UUID format' }),
{ status: 400, headers: { ...corsHeaders, 'Content-Type': 'application/json' } }
);
}
console.log('Processing selective approval:', { itemIds, userId: authenticatedUserId, submissionId });
// Fetch all items for the submission
const { data: items, error: fetchError } = await supabase
.from('submission_items')
.select('*')
.in('id', itemIds);
if (fetchError) {
throw new Error(`Failed to fetch items: ${fetchError.message}`);
}
// Get the submitter's user_id from the submission
const { data: submission, error: submissionError } = await supabase
.from('content_submissions')
.select('user_id')
.eq('id', submissionId)
.single();
if (submissionError || !submission) {
throw new Error(`Failed to fetch submission: ${submissionError?.message}`);
}
const submitterId = submission.user_id;
// Topologically sort items by dependencies
let sortedItems;
try {
sortedItems = topologicalSort(items);
} catch (sortError) {
const errorMessage = sortError instanceof Error ? sortError.message : 'Failed to sort items';
console.error('Topological sort failed:', errorMessage);
return new Response(
JSON.stringify({
error: 'Invalid submission structure',
message: errorMessage,
details: 'The submission contains circular dependencies or missing required items'
}),
{ status: 400, headers: { ...corsHeaders, 'Content-Type': 'application/json' } }
);
}
const dependencyMap = new Map<string, string>();
const approvalResults: Array<{
itemId: string;
entityId?: string | null;
itemType: string;
success: boolean;
error?: string;
isDependencyFailure?: boolean;
}> = [];
// Process items in order
for (const item of sortedItems) {
try {
console.log(`Processing item ${item.id} of type ${item.item_type}`);
// Validate entity data with strict validation
const validation = validateEntityDataStrict(item.item_type, item.item_data);
if (validation.blockingErrors.length > 0) {
console.error(`❌ Blocking errors for item ${item.id}:`, validation.blockingErrors);
// Fail the entire batch if ANY item has blocking errors
return new Response(JSON.stringify({
success: false,
message: 'Validation failed: Items have blocking errors that must be fixed',
errors: validation.blockingErrors,
failedItemId: item.id,
failedItemType: item.item_type
}), {
status: 400,
headers: { ...corsHeaders, 'Content-Type': 'application/json' }
});
}
if (validation.warnings.length > 0) {
console.warn(`⚠️ Warnings for item ${item.id}:`, validation.warnings);
// Continue processing - warnings don't block approval
}
// Set user context for versioning trigger
// This allows create_relational_version() trigger to capture the submitter
const { error: setUserIdError } = await supabase.rpc('set_config_value', {
setting_name: 'app.current_user_id',
setting_value: submitterId,
is_local: false
});
if (setUserIdError) {
console.error('Failed to set user context:', setUserIdError);
}
// Set submission ID for version tracking
const { error: setSubmissionIdError } = await supabase.rpc('set_config_value', {
setting_name: 'app.submission_id',
setting_value: submissionId,
is_local: false
});
if (setSubmissionIdError) {
console.error('Failed to set submission context:', setSubmissionIdError);
}
// Resolve dependencies in item data
const resolvedData = resolveDependencies(item.item_data, dependencyMap);
// Add submitter ID to the data for photo tracking
resolvedData._submitter_id = submitterId;
let entityId: string | null = null;
// Create entity based on type
switch (item.item_type) {
case 'park':
entityId = await createPark(supabase, resolvedData);
break;
case 'ride':
entityId = await createRide(supabase, resolvedData);
break;
case 'manufacturer':
case 'operator':
case 'property_owner':
case 'designer':
entityId = await createCompany(supabase, resolvedData, item.item_type);
break;
case 'ride_model':
entityId = await createRideModel(supabase, resolvedData);
break;
case 'photo':
await approvePhotos(supabase, resolvedData, item.id);
entityId = item.id; // Use item ID as entity ID for photos
break;
case 'photo_edit':
await editPhoto(supabase, resolvedData);
entityId = resolvedData.photo_id;
break;
case 'photo_delete':
await deletePhoto(supabase, resolvedData);
entityId = resolvedData.photo_id;
break;
case 'timeline_event':
entityId = await createTimelineEvent(supabase, resolvedData, submitterId, authenticatedUserId, submissionId);
break;
default:
throw new Error(`Unknown item type: ${item.item_type}`);
}
if (entityId) {
dependencyMap.set(item.id, entityId);
}
// Store result for batch update later
approvalResults.push({
itemId: item.id,
entityId,
itemType: item.item_type,
success: true
});
console.log(`Successfully approved item ${item.id} -> entity ${entityId}`);
} catch (error) {
console.error(`Error processing item ${item.id}:`, error);
const isDependencyError = error instanceof Error && (
error.message.includes('Missing dependency') ||
error.message.includes('depends on') ||
error.message.includes('Circular dependency')
);
approvalResults.push({
itemId: item.id,
itemType: item.item_type,
success: false,
error: error instanceof Error ? error.message : 'Unknown error',
isDependencyFailure: isDependencyError
});
}
}
// Batch update all approved items
const approvedItemIds = approvalResults.filter(r => r.success).map(r => r.itemId);
if (approvedItemIds.length > 0) {
const approvedUpdates = approvalResults
.filter(r => r.success)
.map(r => ({
id: r.itemId,
status: 'approved',
approved_entity_id: r.entityId,
updated_at: new Date().toISOString()
}));
for (const update of approvedUpdates) {
const { error: batchApproveError } = await supabase
.from('submission_items')
.update({
status: update.status,
approved_entity_id: update.approved_entity_id,
updated_at: update.updated_at
})
.eq('id', update.id);
if (batchApproveError) {
console.error(`Failed to approve item ${update.id}:`, batchApproveError);
}
}
}
// Batch update all rejected items
const rejectedItemIds = approvalResults.filter(r => !r.success).map(r => r.itemId);
if (rejectedItemIds.length > 0) {
const rejectedUpdates = approvalResults
.filter(r => !r.success)
.map(r => ({
id: r.itemId,
status: 'rejected',
rejection_reason: r.error || 'Unknown error',
updated_at: new Date().toISOString()
}));
for (const update of rejectedUpdates) {
const { error: batchRejectError } = await supabase
.from('submission_items')
.update({
status: update.status,
rejection_reason: update.rejection_reason,
updated_at: update.updated_at
})
.eq('id', update.id);
if (batchRejectError) {
console.error(`Failed to reject item ${update.id}:`, batchRejectError);
}
}
}
// Check if any failures were dependency-related
const hasDependencyFailure = approvalResults.some(r =>
!r.success && r.isDependencyFailure
);
const allApproved = approvalResults.every(r => r.success);
const someApproved = approvalResults.some(r => r.success);
const allFailed = approvalResults.every(r => !r.success);
// Determine final status:
// - If dependency validation failed: keep pending for escalation
// - If all approved: approved
// - If some approved: partially_approved
// - If all failed but no dependency issues: rejected (can retry)
const finalStatus = hasDependencyFailure && !someApproved
? 'pending' // Keep pending for escalation only
: allApproved
? 'approved'
: allFailed
? 'rejected' // Total failure, allow retry
: 'partially_approved'; // Mixed results
const reviewerNotes = hasDependencyFailure && !someApproved
? 'Submission has unresolved dependencies. Escalation required.'
: undefined;
const { error: updateError } = await supabase
.from('content_submissions')
.update({
status: finalStatus,
reviewer_id: authenticatedUserId,
reviewed_at: new Date().toISOString(),
reviewer_notes: reviewerNotes,
escalated: hasDependencyFailure && !someApproved ? true : undefined
})
.eq('id', submissionId);
if (updateError) {
console.error('Failed to update submission status:', updateError);
}
return new Response(
JSON.stringify({
success: true,
results: approvalResults,
submissionStatus: finalStatus
}),
{ headers: { ...corsHeaders, 'Content-Type': 'application/json' } }
);
} catch (error) {
console.error('Error in process-selective-approval:', error);
return new Response(
JSON.stringify({ error: error instanceof Error ? error.message : 'Unknown error' }),
{ status: 500, headers: { ...corsHeaders, 'Content-Type': 'application/json' } }
);
}
});
// Helper functions
function topologicalSort(items: any[]): any[] {
const sorted: any[] = [];
const visited = new Set<string>();
const visiting = new Set<string>();
const visit = (item: any) => {
if (visited.has(item.id)) return;
if (visiting.has(item.id)) {
throw new Error(
`Circular dependency detected: item ${item.id} (${item.item_type}) ` +
`creates a dependency loop. This submission requires escalation.`
);
}
visiting.add(item.id);
if (item.depends_on) {
const parent = items.find(i => i.id === item.depends_on);
if (!parent) {
throw new Error(
`Missing dependency: item ${item.id} (${item.item_type}) ` +
`depends on ${item.depends_on} which is not in this submission or has not been approved. ` +
`This submission requires escalation.`
);
}
visit(parent);
}
visiting.delete(item.id);
visited.add(item.id);
sorted.push(item);
};
items.forEach(item => visit(item));
return sorted;
}
function resolveDependencies(data: any, dependencyMap: Map<string, string>): any {
if (typeof data !== 'object' || data === null) {
return data;
}
if (Array.isArray(data)) {
return data.map(item => resolveDependencies(item, dependencyMap));
}
const resolved: any = {};
for (const [key, value] of Object.entries(data)) {
if (typeof value === 'string' && dependencyMap.has(value)) {
resolved[key] = dependencyMap.get(value);
} else {
resolved[key] = resolveDependencies(value, dependencyMap);
}
}
return resolved;
}
function sanitizeDateFields(data: any): any {
const dateFields = ['opening_date', 'closing_date', 'date_changed', 'date_taken', 'visit_date'];
const sanitized = { ...data };
for (const field of dateFields) {
if (field in sanitized && sanitized[field] === '') {
sanitized[field] = null;
}
}
return sanitized;
}
function filterDatabaseFields(data: any, allowedFields: string[]): any {
const filtered: any = {};
for (const field of allowedFields) {
if (field in data && data[field] !== undefined) {
filtered[field] = data[field];
}
}
return filtered;
}
function normalizeStatusValue(data: any): any {
if (data.status) {
// Map display values to database values
const statusMap: Record<string, string> = {
'Operating': 'operating',
'Seasonal': 'operating',
'Closed Temporarily': 'maintenance',
'Closed Permanently': 'closed',
'Under Construction': 'under_construction',
'Planned': 'under_construction',
'SBNO': 'sbno',
// Also handle already-lowercase values
'operating': 'operating',
'closed': 'closed',
'under_construction': 'under_construction',
'maintenance': 'maintenance',
'sbno': 'sbno'
};
data.status = statusMap[data.status] || 'operating';
}
return data;
}
async function createPark(supabase: any, data: any): Promise<string> {
const submitterId = data._submitter_id;
let uploadedPhotos: any[] = [];
// Transform images object if present
if (data.images) {
const { uploaded, banner_assignment, card_assignment } = data.images;
if (uploaded && Array.isArray(uploaded)) {
// Store uploaded photos for later insertion into photos table
uploadedPhotos = uploaded;
// Assign banner image
if (banner_assignment !== undefined && uploaded[banner_assignment]) {
data.banner_image_id = uploaded[banner_assignment].cloudflare_id;
data.banner_image_url = uploaded[banner_assignment].url;
}
// Assign card image
if (card_assignment !== undefined && uploaded[card_assignment]) {
data.card_image_id = uploaded[card_assignment].cloudflare_id;
data.card_image_url = uploaded[card_assignment].url;
}
}
// Remove images object
delete data.images;
}
// Remove internal fields
delete data._submitter_id;
let parkId: string;
// Check if this is an edit (has park_id) or a new creation
if (data.park_id) {
console.log(`Updating existing park ${data.park_id}`);
parkId = data.park_id;
delete data.park_id; // Remove ID from update data
const normalizedData = normalizeStatusValue(data);
const sanitizedData = sanitizeDateFields(normalizedData);
const filteredData = filterDatabaseFields(sanitizedData, PARK_FIELDS);
const { error } = await supabase
.from('parks')
.update(filteredData)
.eq('id', parkId);
if (error) throw new Error(`Failed to update park: ${error.message}`);
} else {
console.log('Creating new park');
const normalizedData = normalizeStatusValue(data);
const sanitizedData = sanitizeDateFields(normalizedData);
const filteredData = filterDatabaseFields(sanitizedData, PARK_FIELDS);
const { data: park, error } = await supabase
.from('parks')
.insert(filteredData)
.select('id')
.single();
if (error) throw new Error(`Failed to create park: ${error.message}`);
parkId = park.id;
}
// Insert photos into photos table
if (uploadedPhotos.length > 0 && submitterId) {
console.log(`Inserting ${uploadedPhotos.length} photos for park ${parkId}`);
for (let i = 0; i < uploadedPhotos.length; i++) {
const photo = uploadedPhotos[i];
if (photo.cloudflare_id && photo.url) {
const { error: photoError } = await supabase.from('photos').insert({
entity_id: parkId,
entity_type: 'park',
cloudflare_image_id: photo.cloudflare_id,
cloudflare_image_url: photo.url,
caption: photo.caption || null,
title: null,
submitted_by: submitterId,
approved_at: new Date().toISOString(),
order_index: i,
});
if (photoError) {
console.error(`Failed to insert photo ${i}:`, photoError);
}
}
}
}
return parkId;
}
async function createRide(supabase: any, data: any): Promise<string> {
const submitterId = data._submitter_id;
let uploadedPhotos: any[] = [];
// Transform images object if present
if (data.images) {
const { uploaded, banner_assignment, card_assignment } = data.images;
if (uploaded && Array.isArray(uploaded)) {
// Store uploaded photos for later insertion into photos table
uploadedPhotos = uploaded;
// Assign banner image
if (banner_assignment !== undefined && uploaded[banner_assignment]) {
data.banner_image_id = uploaded[banner_assignment].cloudflare_id;
data.banner_image_url = uploaded[banner_assignment].url;
}
// Assign card image
if (card_assignment !== undefined && uploaded[card_assignment]) {
data.card_image_id = uploaded[card_assignment].cloudflare_id;
data.card_image_url = uploaded[card_assignment].url;
}
}
// Remove images object
delete data.images;
}
// Remove internal fields and store park_id before filtering
delete data._submitter_id;
const parkId = data.park_id;
let rideId: string;
// Check if this is an edit (has ride_id) or a new creation
if (data.ride_id) {
console.log(`Updating existing ride ${data.ride_id}`);
rideId = data.ride_id;
delete data.ride_id; // Remove ID from update data
const normalizedData = normalizeStatusValue(data);
const sanitizedData = sanitizeDateFields(normalizedData);
const filteredData = filterDatabaseFields(sanitizedData, RIDE_FIELDS);
const { error } = await supabase
.from('rides')
.update(filteredData)
.eq('id', rideId);
if (error) throw new Error(`Failed to update ride: ${error.message}`);
// Update park ride counts after successful ride update
if (parkId) {
console.log(`Updating ride counts for park ${parkId}`);
const { error: countError } = await supabase.rpc('update_park_ride_counts', {
target_park_id: parkId
});
if (countError) {
console.error('Failed to update park counts:', countError);
}
}
} else {
console.log('Creating new ride');
const normalizedData = normalizeStatusValue(data);
const sanitizedData = sanitizeDateFields(normalizedData);
const filteredData = filterDatabaseFields(sanitizedData, RIDE_FIELDS);
const { data: ride, error } = await supabase
.from('rides')
.insert(filteredData)
.select('id')
.single();
if (error) throw new Error(`Failed to create ride: ${error.message}`);
rideId = ride.id;
// Update park ride counts after successful ride creation
if (parkId) {
console.log(`Updating ride counts for park ${parkId}`);
const { error: countError } = await supabase.rpc('update_park_ride_counts', {
target_park_id: parkId
});
if (countError) {
console.error('Failed to update park counts:', countError);
}
}
}
// Insert photos into photos table
if (uploadedPhotos.length > 0 && submitterId) {
console.log(`Inserting ${uploadedPhotos.length} photos for ride ${rideId}`);
for (let i = 0; i < uploadedPhotos.length; i++) {
const photo = uploadedPhotos[i];
if (photo.cloudflare_id && photo.url) {
const { error: photoError } = await supabase.from('photos').insert({
entity_id: rideId,
entity_type: 'ride',
cloudflare_image_id: photo.cloudflare_id,
cloudflare_image_url: photo.url,
caption: photo.caption || null,
title: null,
submitted_by: submitterId,
approved_at: new Date().toISOString(),
order_index: i,
});
if (photoError) {
console.error(`Failed to insert photo ${i}:`, photoError);
}
}
}
}
return rideId;
}
async function createCompany(supabase: any, data: any, companyType: string): Promise<string> {
// Transform images object if present
if (data.images) {
const { uploaded, banner_assignment, card_assignment } = data.images;
if (uploaded && Array.isArray(uploaded)) {
// Assign banner image
if (banner_assignment !== undefined && uploaded[banner_assignment]) {
data.banner_image_id = uploaded[banner_assignment].cloudflare_id;
data.banner_image_url = uploaded[banner_assignment].url;
}
// Assign card image
if (card_assignment !== undefined && uploaded[card_assignment]) {
data.card_image_id = uploaded[card_assignment].cloudflare_id;
data.card_image_url = uploaded[card_assignment].url;
}
}
// Remove images object
delete data.images;
}
// Check if this is an edit (has company_id or id) or a new creation
const companyId = data.company_id || data.id;
if (companyId) {
console.log(`Updating existing company ${companyId}`);
const updateData = sanitizeDateFields({ ...data, company_type: companyType });
delete updateData.company_id;
delete updateData.id; // Remove ID from update data
const filteredData = filterDatabaseFields(updateData, COMPANY_FIELDS);
const { error } = await supabase
.from('companies')
.update(filteredData)
.eq('id', companyId);
if (error) throw new Error(`Failed to update company: ${error.message}`);
return companyId;
} else {
console.log('Creating new company');
const companyData = sanitizeDateFields({ ...data, company_type: companyType });
const filteredData = filterDatabaseFields(companyData, COMPANY_FIELDS);
const { data: company, error } = await supabase
.from('companies')
.insert(filteredData)
.select('id')
.single();
if (error) throw new Error(`Failed to create company: ${error.message}`);
return company.id;
}
}
async function createRideModel(supabase: any, data: any): Promise<string> {
const sanitizedData = sanitizeDateFields(data);
const filteredData = filterDatabaseFields(sanitizedData, RIDE_MODEL_FIELDS);
const { data: model, error } = await supabase
.from('ride_models')
.insert(filteredData)
.select('id')
.single();
if (error) throw new Error(`Failed to create ride model: ${error.message}`);
return model.id;
}
async function approvePhotos(supabase: any, data: any, submissionItemId: string): Promise<void> {
const photos = data.photos || [];
for (const photo of photos) {
const photoData = {
entity_id: data.entity_id,
entity_type: data.context,
cloudflare_image_id: extractImageId(photo.url),
cloudflare_image_url: photo.url,
title: photo.title,
caption: photo.caption,
date_taken: photo.date,
order_index: photo.order,
submission_id: submissionItemId
};
const { error } = await supabase.from('photos').insert(photoData);
if (error) {
console.error('Failed to insert photo:', error);
throw new Error(`Failed to insert photo: ${error.message}`);
}
}
}
function extractImageId(url: string): string {
const matches = url.match(/\/([^\/]+)\/public$/);
return matches ? matches[1] : url;
}
async function editPhoto(supabase: any, data: any): Promise<void> {
console.log(`Editing photo ${data.photo_id}`);
const { error } = await supabase
.from('photos')
.update({
caption: data.new_caption,
})
.eq('id', data.photo_id);
if (error) throw new Error(`Failed to edit photo: ${error.message}`);
}
async function deletePhoto(supabase: any, data: any): Promise<void> {
console.log(`Deleting photo ${data.photo_id}`);
const { error } = await supabase
.from('photos')
.delete()
.eq('id', data.photo_id);
if (error) throw new Error(`Failed to delete photo: ${error.message}`);
}
async function createTimelineEvent(
supabase: any,
data: any,
submitterId: string,
approvingUserId: string,
submissionId: string
): Promise<string> {
console.log('Creating timeline event');
const eventData = {
entity_id: data.entity_id,
entity_type: data.entity_type,
event_type: data.event_type,
event_date: data.event_date,
event_date_precision: data.event_date_precision,
title: data.title,
description: data.description,
from_value: data.from_value,
to_value: data.to_value,
from_entity_id: data.from_entity_id,
to_entity_id: data.to_entity_id,
from_location_id: data.from_location_id,
to_location_id: data.to_location_id,
is_public: data.is_public ?? true,
created_by: submitterId,
approved_by: approvingUserId,
submission_id: submissionId,
};
const { data: event, error } = await supabase
.from('entity_timeline_events')
.insert(eventData)
.select('id')
.single();
if (error) throw new Error(`Failed to create timeline event: ${error.message}`);
return event.id;
}