Fix remaining production readiness issues

This commit is contained in:
gpt-engineer-app[bot]
2025-10-20 12:30:09 +00:00
parent 640fdb11db
commit 4983960138
7 changed files with 158 additions and 91 deletions

View File

@@ -2,6 +2,8 @@ import { supabase } from '@/integrations/supabase/client';
import type { Json } from '@/integrations/supabase/types';
import { uploadPendingImages } from './imageUploadHelper';
import { CompanyFormData, TempCompanyData } from '@/types/company';
import { logger } from './logger';
import { getErrorMessage } from './errorHandler';
export type { CompanyFormData, TempCompanyData };
@@ -19,8 +21,12 @@ export async function submitCompanyCreation(
...data.images,
uploaded: uploadedImages
};
} catch (error) {
console.error(`Failed to upload images for ${companyType} creation:`, error);
} catch (error: unknown) {
const errorMsg = getErrorMessage(error);
logger.error('Failed to upload images for company', {
action: `${companyType}_creation`,
error: errorMsg
});
throw new Error('Failed to upload images. Please check your connection and try again.');
}
}
@@ -91,8 +97,13 @@ export async function submitCompanyUpdate(
...data.images,
uploaded: uploadedImages
};
} catch (error) {
console.error(`Failed to upload images for ${existingCompany.company_type} update:`, error);
} catch (error: unknown) {
const errorMsg = getErrorMessage(error);
logger.error('Failed to upload images for company update', {
action: `${existingCompany.company_type}_update`,
companyId,
error: errorMsg
});
throw new Error('Failed to upload images. Please check your connection and try again.');
}
}

View File

@@ -1,5 +1,6 @@
import { supabase } from '@/integrations/supabase/client';
import { getErrorMessage } from '@/lib/errorHandler';
import { logger } from '@/lib/logger';
import { updateSubmissionItem, type SubmissionItemWithDeps, type DependencyConflict } from './submissionItemsService';
export interface ResolutionResult {
@@ -84,9 +85,13 @@ export async function resolveConflicts(
success: true,
updatedSelections,
};
} catch (error) {
} catch (error: unknown) {
const errorMsg = getErrorMessage(error);
console.error('Conflict resolution error:', errorMsg);
logger.error('Conflict resolution error', {
action: 'resolve_conflicts',
conflictCount: conflicts.length,
error: errorMsg
});
return {
success: false,
error: errorMsg,
@@ -230,8 +235,13 @@ export async function findMatchingEntities(
}
return [];
} catch (error) {
console.error('Error finding matching entities:', error);
} catch (error: unknown) {
const errorMsg = getErrorMessage(error);
logger.error('Error finding matching entities', {
action: 'find_matching_entities',
itemType,
error: errorMsg
});
return [];
}
}

View File

@@ -1,4 +1,6 @@
import { supabase } from '@/integrations/supabase/client';
import { logger } from '@/lib/logger';
import { getErrorMessage } from '@/lib/errorHandler';
interface EmailValidationResult {
valid: boolean;
@@ -17,7 +19,10 @@ export async function validateEmailNotDisposable(email: string): Promise<EmailVa
});
if (error) {
console.error('Email validation error:', error);
logger.error('Email validation error from backend', {
action: 'validate_email_backend',
error: error.message
});
return {
valid: false,
reason: 'Unable to validate email address. Please try again.'
@@ -25,8 +30,12 @@ export async function validateEmailNotDisposable(email: string): Promise<EmailVa
}
return data as EmailValidationResult;
} catch (error) {
console.error('Email validation exception:', error);
} catch (error: unknown) {
const errorMsg = getErrorMessage(error);
logger.error('Email validation error', {
action: 'validate_email_disposable',
error: errorMsg
});
return {
valid: false,
reason: 'Unable to validate email address. Please try again.'

View File

@@ -1,5 +1,6 @@
import { supabase } from '@/integrations/supabase/client';
import { getErrorMessage } from './errorHandler';
import { logger } from './logger';
export interface SubmissionItemWithDeps {
id: string;
@@ -225,9 +226,14 @@ export async function approveSubmissionItems(
// Add to dependency map for child items
dependencyMap.set(item.id, entityId);
} catch (error) {
} catch (error: unknown) {
const errorMsg = getErrorMessage(error);
console.error(`Error approving ${item.item_type} item ${item.id}:`, errorMsg);
logger.error('Error approving items', {
action: 'approve_submission_items',
error: errorMsg,
userId,
itemCount: items.length
});
// Update item with error status
await updateSubmissionItem(item.id, {
@@ -378,7 +384,11 @@ async function createPark(data: any, dependencyMap: Map<string, string>): Promis
.eq('id', data.park_id);
if (error) {
console.error('Error updating park:', error);
logger.error('Error updating park', {
action: 'update_park',
parkId: data.park_id,
error: error.message
});
throw new Error(`Database error: ${error.message}`);
}
@@ -417,7 +427,11 @@ async function createPark(data: any, dependencyMap: Map<string, string>): Promis
.single();
if (error) {
console.error('Error creating park:', error);
logger.error('Error creating park', {
action: 'create_park',
parkName: resolvedData.name,
error: error.message
});
throw new Error(`Database error: ${error.message}`);
}
@@ -462,7 +476,11 @@ async function resolveLocationId(locationData: any): Promise<string | null> {
.single();
if (error) {
console.error('Error creating location:', error);
logger.error('Error creating location', {
action: 'create_location',
locationData,
error: error.message
});
throw new Error(`Failed to create location: ${error.message}`);
}

View File

@@ -46,17 +46,20 @@ function cleanupExpiredEntries() {
cleanupFailureCount = 0;
}
} catch (error) {
} catch (error: unknown) {
// CRITICAL: Increment failure counter and log detailed error information
cleanupFailureCount++;
const errorMessage = error instanceof Error ? error.message : String(error);
const errorStack = error instanceof Error ? error.stack : 'No stack trace available';
console.error(`[Cleanup Error] Cleanup failed (attempt ${cleanupFailureCount}/${MAX_CLEANUP_FAILURES})`);
console.error(`[Cleanup Error] Error message: ${errorMessage}`);
console.error(`[Cleanup Error] Stack trace: ${errorStack}`);
console.error(`[Cleanup Error] Current map size: ${rateLimitMap.size}`);
console.error('[Cleanup Error]', {
attempt: cleanupFailureCount,
maxAttempts: MAX_CLEANUP_FAILURES,
error: errorMessage,
stack: errorStack,
mapSize: rateLimitMap.size
});
// FALLBACK MECHANISM: If cleanup fails repeatedly, force clear to prevent memory leak
if (cleanupFailureCount >= MAX_CLEANUP_FAILURES) {
@@ -204,7 +207,8 @@ serve(async (req) => {
);
}
console.log('Detecting location for IP:', clientIP);
// PII Note: Do not log full IP addresses in production
console.log('[Location] Detecting location for request');
// Use configurable geolocation service with proper error handling
// Defaults to ip-api.com if not configured
@@ -245,7 +249,11 @@ serve(async (req) => {
measurementSystem
};
console.log('Location detected:', result);
console.log('[Location] Location detected:', {
country: result.country,
countryCode: result.countryCode,
measurementSystem: result.measurementSystem
});
return new Response(
JSON.stringify(result),
@@ -257,14 +265,16 @@ serve(async (req) => {
}
);
} catch (error) {
} catch (error: unknown) {
// Enhanced error logging for better visibility and debugging
const errorMessage = error instanceof Error ? error.message : String(error);
const errorStack = error instanceof Error ? error.stack : 'No stack trace available';
console.error('[Location Detection Error] Request failed');
console.error(`[Location Detection Error] Message: ${errorMessage}`);
console.error(`[Location Detection Error] Stack: ${errorStack}`);
console.error('[Location Detection Error]', {
error: errorMessage,
stack: errorStack,
hasIP: true // IP removed for PII protection
});
// Return default (metric) with 500 status to indicate error occurred
// This allows proper error monitoring while still providing fallback data

View File

@@ -68,33 +68,18 @@ serve(async (req) => {
});
// Verify JWT and get authenticated user
console.log('🔍 [AUTH DEBUG] Attempting getUser()...', {
hasAuthHeader: !!authHeader,
authHeaderLength: authHeader?.length,
authHeaderPrefix: authHeader?.substring(0, 20) + '...',
supabaseUrl,
timestamp: new Date().toISOString()
});
const { data: { user }, error: authError } = await supabaseAuth.auth.getUser();
console.log('🔍 [AUTH DEBUG] getUser() result:', {
console.log('[AUTH] User auth result:', {
hasUser: !!user,
userId: user?.id,
userEmail: user?.email,
hasError: !!authError,
errorMessage: authError?.message,
errorName: authError?.name,
errorStatus: authError?.status,
errorCode: authError?.code
hasError: !!authError
});
if (authError || !user) {
console.error('[AUTH DEBUG] Auth verification failed:', {
error: authError,
errorDetails: JSON.stringify(authError),
authHeaderPresent: !!authHeader,
authHeaderSample: authHeader?.substring(0, 30) + '...'
console.error('[AUTH] Auth verification failed:', {
error: authError?.message,
code: authError?.code
});
return new Response(
JSON.stringify({
@@ -106,7 +91,7 @@ serve(async (req) => {
);
}
console.log('[AUTH DEBUG] Authentication successful for user:', user.id);
console.log('[AUTH] Authentication successful:', user.id);
// SECURITY NOTE: Service role key used later in this function
// Reason: Need to bypass RLS to write approved changes to entity tables
@@ -122,22 +107,18 @@ serve(async (req) => {
);
// Check if user has moderator permissions using service role to bypass RLS
console.log('🔍 [ROLE CHECK] Fetching roles for user:', authenticatedUserId);
const { data: roles, error: rolesError } = await supabase
.from('user_roles')
.select('role')
.eq('user_id', authenticatedUserId);
console.log('🔍 [ROLE CHECK] Query result:', {
roles,
error: rolesError,
console.log('[ROLE_CHECK] Query result:', {
rolesCount: roles?.length,
userId: authenticatedUserId
error: rolesError?.message
});
if (rolesError) {
console.error('[ROLE CHECK] Failed:', rolesError);
console.error('[ROLE_CHECK] Failed:', { error: rolesError.message });
return new Response(
JSON.stringify({ error: 'Failed to verify user permissions.' }),
{ status: 403, headers: { ...corsHeaders, 'Content-Type': 'application/json' } }
@@ -149,21 +130,17 @@ serve(async (req) => {
userRoles.includes('admin') ||
userRoles.includes('superuser');
console.log('🔍 [ROLE CHECK] Result:', {
userRoles,
isModerator,
userId: authenticatedUserId
});
console.log('[ROLE_CHECK] Result:', { isModerator, userId: authenticatedUserId });
if (!isModerator) {
console.error('[ROLE CHECK] Insufficient permissions');
console.error('[ROLE_CHECK] Insufficient permissions');
return new Response(
JSON.stringify({ error: 'Insufficient permissions. Moderator role required.' }),
{ status: 403, headers: { ...corsHeaders, 'Content-Type': 'application/json' } }
);
}
console.log('[ROLE CHECK] User is moderator');
console.log('[ROLE_CHECK] User is moderator');
// Phase 2: AAL2 Enforcement - Check if user has MFA enrolled and requires AAL2
// Parse JWT directly from Authorization header to get AAL level
@@ -171,17 +148,17 @@ serve(async (req) => {
const payload = JSON.parse(atob(jwt.split('.')[1]));
const aal = payload.aal || 'aal1';
console.log('🔍 [AAL CHECK] Session AAL level:', { aal, userId: authenticatedUserId });
console.log('[AAL_CHECK] Session AAL level:', { aal, userId: authenticatedUserId });
// Check if user has MFA enrolled
const { data: factorsData } = await supabaseAuth.auth.mfa.listFactors();
const hasMFA = factorsData?.totp?.some(f => f.status === 'verified') || false;
console.log('🔍 [MFA CHECK] MFA status:', { hasMFA, userId: authenticatedUserId });
console.log('[MFA_CHECK] MFA status:', { hasMFA, userId: authenticatedUserId });
// Enforce AAL2 if MFA is enrolled
if (hasMFA && aal !== 'aal2') {
console.error('[AAL CHECK] AAL2 required but session is at AAL1', { userId: authenticatedUserId });
console.error('[AAL_CHECK] AAL2 required but session is at AAL1');
return new Response(
JSON.stringify({
error: 'MFA verification required',
@@ -192,7 +169,7 @@ serve(async (req) => {
);
}
console.log('[AAL CHECK] AAL2 check passed', { userId: authenticatedUserId, hasMFA, aal });
console.log('[AAL_CHECK] AAL2 check passed:', { userId: authenticatedUserId, hasMFA, aal });
const { itemIds, submissionId }: ApprovalRequest = await req.json();
@@ -229,7 +206,7 @@ serve(async (req) => {
);
}
console.log('Processing selective approval:', { itemIds, userId: authenticatedUserId, submissionId });
console.log('[APPROVAL] Processing selective approval:', { itemIds, userId: authenticatedUserId, submissionId });
// Fetch all items for the submission
const { data: items, error: fetchError } = await supabase
@@ -258,9 +235,14 @@ serve(async (req) => {
let sortedItems;
try {
sortedItems = topologicalSort(items);
} catch (sortError) {
} catch (sortError: unknown) {
const errorMessage = sortError instanceof Error ? sortError.message : 'Failed to sort items';
console.error('Topological sort failed:', errorMessage);
console.error('[APPROVAL ERROR] Topological sort failed:', {
submissionId,
itemCount: items.length,
error: errorMessage,
userId: authenticatedUserId
});
return new Response(
JSON.stringify({
error: 'Invalid submission structure',
@@ -284,13 +266,16 @@ serve(async (req) => {
// Process items in order
for (const item of sortedItems) {
try {
console.log(`Processing item ${item.id} of type ${item.item_type}`);
console.log('[APPROVAL] Processing item:', { itemId: item.id, itemType: item.item_type });
// Validate entity data with strict validation, passing original_data for edits
const validation = validateEntityDataStrict(item.item_type, item.item_data, item.original_data);
if (validation.blockingErrors.length > 0) {
console.error(`❌ Blocking errors for item ${item.id}:`, validation.blockingErrors);
console.error('[APPROVAL] Blocking validation errors:', {
itemId: item.id,
errors: validation.blockingErrors
});
// Fail the entire batch if ANY item has blocking errors
return new Response(JSON.stringify({
@@ -306,7 +291,10 @@ serve(async (req) => {
}
if (validation.warnings.length > 0) {
console.warn(`⚠️ Warnings for item ${item.id}:`, validation.warnings);
console.warn('[APPROVAL] Validation warnings:', {
itemId: item.id,
warnings: validation.warnings
});
// Continue processing - warnings don't block approval
}
@@ -319,7 +307,7 @@ serve(async (req) => {
});
if (setUserIdError) {
console.error('Failed to set user context:', setUserIdError);
console.error('[APPROVAL] Failed to set user context:', { error: setUserIdError.message });
}
// Set submission ID for version tracking
@@ -330,7 +318,7 @@ serve(async (req) => {
});
if (setSubmissionIdError) {
console.error('Failed to set submission context:', setSubmissionIdError);
console.error('[APPROVAL] Failed to set submission context:', { error: setSubmissionIdError.message });
}
// Resolve dependencies in item data
@@ -390,9 +378,16 @@ serve(async (req) => {
success: true
});
console.log(`Successfully approved item ${item.id} -> entity ${entityId}`);
} catch (error) {
console.error(`Error processing item ${item.id}:`, error);
console.log('[APPROVAL SUCCESS]', { itemId: item.id, entityId, itemType: item.item_type });
} catch (error: unknown) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
console.error('[APPROVAL ERROR] Item approval failed:', {
itemId: item.id,
itemType: item.item_type,
error: errorMessage,
userId: authenticatedUserId,
submissionId
});
const isDependencyError = error instanceof Error && (
error.message.includes('Missing dependency') ||
@@ -404,7 +399,7 @@ serve(async (req) => {
itemId: item.id,
itemType: item.item_type,
success: false,
error: error instanceof Error ? error.message : 'Unknown error',
error: errorMessage,
isDependencyFailure: isDependencyError
});
}
@@ -433,7 +428,10 @@ serve(async (req) => {
.eq('id', update.id);
if (batchApproveError) {
console.error(`Failed to approve item ${update.id}:`, batchApproveError);
console.error('[APPROVAL] Failed to approve item:', {
itemId: update.id,
error: batchApproveError.message
});
}
}
}
@@ -461,7 +459,10 @@ serve(async (req) => {
.eq('id', update.id);
if (batchRejectError) {
console.error(`Failed to reject item ${update.id}:`, batchRejectError);
console.error('[APPROVAL] Failed to reject item:', {
itemId: update.id,
error: batchRejectError.message
});
}
}
}
@@ -504,7 +505,7 @@ serve(async (req) => {
.eq('id', submissionId);
if (updateError) {
console.error('Failed to update submission status:', updateError);
console.error('[APPROVAL] Failed to update submission status:', { error: updateError.message });
}
return new Response(
@@ -515,8 +516,13 @@ serve(async (req) => {
}),
{ headers: { ...corsHeaders, 'Content-Type': 'application/json' } }
);
} catch (error) {
console.error('Error in process-selective-approval:', error);
} catch (error: unknown) {
const errorMessage = error instanceof Error ? error.message : 'An unexpected error occurred';
console.error('[APPROVAL ERROR] Process failed:', {
error: errorMessage,
userId: authenticatedUserId,
timestamp: new Date().toISOString()
});
return createErrorResponse(
error,
500,

View File

@@ -175,7 +175,9 @@ serve(async (req) => {
let requestBody;
try {
requestBody = await req.json();
} catch (error) {
} catch (error: unknown) {
const errorMessage = error instanceof Error ? error.message : String(error);
console.error('[Upload] Invalid JSON:', { error: errorMessage });
return new Response(
JSON.stringify({
error: 'Invalid JSON',
@@ -357,7 +359,7 @@ serve(async (req) => {
let requestBody;
try {
requestBody = await req.json();
} catch (error) {
} catch (error: unknown) {
requestBody = {};
}
@@ -603,12 +605,13 @@ serve(async (req) => {
}
)
} catch (error) {
console.error('Upload error:', error)
} catch (error: unknown) {
const errorMessage = error instanceof Error ? error.message : 'An unexpected error occurred';
console.error('[Upload] Error:', { error: errorMessage });
return new Response(
JSON.stringify({
error: 'Internal server error',
message: error instanceof Error ? error.message : 'An unexpected error occurred'
message: errorMessage
}),
{
status: 500,