Fix retry flashing and reloading

This commit is contained in:
gpt-engineer-app[bot]
2025-10-09 18:04:02 +00:00
parent 88bcb00157
commit 73eb8a60e5
2 changed files with 200 additions and 28 deletions

View File

@@ -102,6 +102,7 @@ export const ModerationQueue = forwardRef<ModerationQueueRef>((props, ref) => {
const fetchInProgressRef = useRef(false);
const itemsRef = useRef<ModerationItem[]>([]);
const loadedIdsRef = useRef<Set<string>>(new Set());
const realtimeUpdateDebounceRef = useRef<Map<string, NodeJS.Timeout>>(new Map());
// Get admin settings for polling configuration
const {
@@ -591,14 +592,23 @@ export const ModerationQueue = forwardRef<ModerationQueueRef>((props, ref) => {
// Polling for auto-refresh (only if realtime is disabled)
useEffect(() => {
if (!user || refreshMode !== 'auto' || isInitialLoad || useRealtimeQueue) return;
// STRICT CHECK: Only enable polling if explicitly disabled
if (!user || refreshMode !== 'auto' || isInitialLoad || useRealtimeQueue) {
if (useRealtimeQueue && refreshMode === 'auto') {
console.log('✅ Polling DISABLED - using realtime subscriptions');
}
return;
}
console.log('⚠️ Polling ENABLED - interval:', pollInterval);
const interval = setInterval(() => {
fetchItems(filtersRef.current.entityFilter, filtersRef.current.statusFilter, true); // Silent refresh
console.log('🔄 Polling refresh triggered');
fetchItems(filtersRef.current.entityFilter, filtersRef.current.statusFilter, true);
}, pollInterval);
return () => {
clearInterval(interval);
console.log('🛑 Polling stopped');
};
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [user, refreshMode, pollInterval, isInitialLoad, useRealtimeQueue]);
@@ -748,6 +758,138 @@ export const ModerationQueue = forwardRef<ModerationQueueRef>((props, ref) => {
};
}, [user, useRealtimeQueue, toast]);
// Helper function to debounce realtime updates
const debouncedRealtimeUpdate = useCallback((submissionId: string, updateFn: () => void) => {
const existingTimeout = realtimeUpdateDebounceRef.current.get(submissionId);
if (existingTimeout) {
clearTimeout(existingTimeout);
}
const newTimeout = setTimeout(() => {
updateFn();
realtimeUpdateDebounceRef.current.delete(submissionId);
}, 500); // Wait 500ms after last event
realtimeUpdateDebounceRef.current.set(submissionId, newTimeout);
}, []);
// Cleanup debounce timeouts on unmount
useEffect(() => {
return () => {
realtimeUpdateDebounceRef.current.forEach(timeout => clearTimeout(timeout));
realtimeUpdateDebounceRef.current.clear();
};
}, []);
// Real-time subscription for UPDATED submissions
useEffect(() => {
if (!user || !useRealtimeQueue) return;
const channel = supabase
.channel('moderation-updated-submissions')
.on(
'postgres_changes',
{
event: 'UPDATE',
schema: 'public',
table: 'content_submissions',
},
async (payload) => {
const updatedSubmission = payload.new as any;
// Ignore if recently removed (optimistic update in progress)
if (recentlyRemovedRef.current.has(updatedSubmission.id)) {
console.log('⏭️ Ignoring UPDATE for recently removed submission:', updatedSubmission.id);
return;
}
debouncedRealtimeUpdate(updatedSubmission.id, async () => {
// Check if submission matches current filters
const matchesEntityFilter =
filtersRef.current.entityFilter === 'all' ||
(filtersRef.current.entityFilter === 'photos' && updatedSubmission.submission_type === 'photo') ||
(filtersRef.current.entityFilter === 'submissions' && updatedSubmission.submission_type !== 'photo');
const matchesStatusFilter =
filtersRef.current.statusFilter === 'all' ||
(filtersRef.current.statusFilter === 'pending' && ['pending', 'partially_approved'].includes(updatedSubmission.status)) ||
filtersRef.current.statusFilter === updatedSubmission.status;
const wasInQueue = itemsRef.current.some(i => i.id === updatedSubmission.id);
const shouldBeInQueue = matchesEntityFilter && matchesStatusFilter;
if (wasInQueue && !shouldBeInQueue) {
// Submission moved out of current filter (e.g., pending → approved)
console.log('❌ Submission moved out of queue:', updatedSubmission.id);
setItems(prev => prev.filter(i => i.id !== updatedSubmission.id));
} else if (shouldBeInQueue) {
// Submission should be in queue - update it
console.log('🔄 Submission updated in queue:', updatedSubmission.id);
// Fetch full details
try {
const { data: submission, error } = await supabase
.from('content_submissions')
.select(`
id, submission_type, status, content, created_at, user_id,
reviewed_at, reviewer_id, reviewer_notes, escalated, assigned_to, locked_until
`)
.eq('id', updatedSubmission.id)
.single();
if (error || !submission) return;
// Get user profile
const { data: profile } = await supabase
.from('profiles')
.select('user_id, username, display_name, avatar_url')
.eq('user_id', submission.user_id)
.maybeSingle();
// Resolve entity name (simplified - reuse existing logic)
const content = submission.content as any;
const entityName = content?.name || 'Unknown';
const fullItem: ModerationItem = {
id: submission.id,
type: 'content_submission',
content: submission.content,
created_at: submission.created_at,
user_id: submission.user_id,
status: submission.status,
submission_type: submission.submission_type,
user_profile: profile || undefined,
entity_name: entityName,
reviewed_at: submission.reviewed_at || undefined,
reviewer_notes: submission.reviewer_notes || undefined,
escalated: submission.escalated,
assigned_to: submission.assigned_to || undefined,
locked_until: submission.locked_until || undefined,
};
// Update or add to queue
setItems(prev => {
const exists = prev.some(i => i.id === fullItem.id);
if (exists) {
return prev.map(i => i.id === fullItem.id ? fullItem : i);
} else {
return [fullItem, ...prev]; // Add at top
}
});
} catch (error) {
console.error('Error processing updated submission:', error);
}
}
});
}
)
.subscribe();
return () => {
supabase.removeChannel(channel);
};
}, [user, useRealtimeQueue, debouncedRealtimeUpdate]);
const handleResetToPending = async (item: ModerationItem) => {
setActionLoading(item.id);
try {
@@ -788,7 +930,7 @@ export const ModerationQueue = forwardRef<ModerationQueueRef>((props, ref) => {
requestAnimationFrame(() => {
setItems(prev => prev.filter(i => i.id !== item.id));
recentlyRemovedRef.current.add(item.id);
setTimeout(() => recentlyRemovedRef.current.delete(item.id), 3000);
setTimeout(() => recentlyRemovedRef.current.delete(item.id), 10000); // Increased from 3000
});
}
@@ -861,11 +1003,11 @@ export const ModerationQueue = forwardRef<ModerationQueueRef>((props, ref) => {
if (shouldRemove) {
setItems(prev => prev.filter(i => i.id !== item.id));
// Mark as recently removed - ignore realtime updates for 3 seconds
// Mark as recently removed - ignore realtime updates for 10 seconds
recentlyRemovedRef.current.add(item.id);
setTimeout(() => {
recentlyRemovedRef.current.delete(item.id);
}, 3000);
}, 10000); // Increased from 3000
} else {
setItems(prev => prev.map(i =>
i.id === item.id ? { ...i, status: action } : i
@@ -984,6 +1126,7 @@ export const ModerationQueue = forwardRef<ModerationQueueRef>((props, ref) => {
// Optimistic update - remove from queue
setItems(prev => prev.filter(i => i.id !== item.id));
recentlyRemovedRef.current.add(item.id);
setTimeout(() => recentlyRemovedRef.current.delete(item.id), 10000); // Increased timeout
return;
}

View File

@@ -262,20 +262,7 @@ serve(async (req) => {
dependencyMap.set(item.id, entityId);
}
// Update item status
const { error: updateError } = await supabase
.from('submission_items')
.update({
status: 'approved',
approved_entity_id: entityId,
updated_at: new Date().toISOString()
})
.eq('id', item.id);
if (updateError) {
throw new Error(`Failed to update item status: ${updateError.message}`);
}
// Store result for batch update later
approvalResults.push({
itemId: item.id,
entityId,
@@ -300,19 +287,61 @@ serve(async (req) => {
error: error instanceof Error ? error.message : 'Unknown error',
isDependencyFailure: isDependencyError
});
// Mark item as rejected in submission_items
const { error: markRejectedError } = await supabase
}
}
// Batch update all approved items
const approvedItemIds = approvalResults.filter(r => r.success).map(r => r.itemId);
if (approvedItemIds.length > 0) {
const approvedUpdates = approvalResults
.filter(r => r.success)
.map(r => ({
id: r.itemId,
status: 'approved',
approved_entity_id: r.entityId,
updated_at: new Date().toISOString()
}));
for (const update of approvedUpdates) {
const { error: batchApproveError } = await supabase
.from('submission_items')
.update({
status: 'rejected',
rejection_reason: error instanceof Error ? error.message : 'Unknown error',
updated_at: new Date().toISOString()
status: update.status,
approved_entity_id: update.approved_entity_id,
updated_at: update.updated_at
})
.eq('id', item.id);
.eq('id', update.id);
if (markRejectedError) {
console.error(`Failed to mark item ${item.id} as rejected:`, markRejectedError);
if (batchApproveError) {
console.error(`Failed to approve item ${update.id}:`, batchApproveError);
}
}
}
// Batch update all rejected items
const rejectedItemIds = approvalResults.filter(r => !r.success).map(r => r.itemId);
if (rejectedItemIds.length > 0) {
const rejectedUpdates = approvalResults
.filter(r => !r.success)
.map(r => ({
id: r.itemId,
status: 'rejected',
rejection_reason: r.error || 'Unknown error',
updated_at: new Date().toISOString()
}));
for (const update of rejectedUpdates) {
const { error: batchRejectError } = await supabase
.from('submission_items')
.update({
status: update.status,
rejection_reason: update.rejection_reason,
updated_at: update.updated_at
})
.eq('id', update.id);
if (batchRejectError) {
console.error(`Failed to reject item ${update.id}:`, batchRejectError);
}
}
}