mirror of
https://github.com/pacnpal/thrilltrack-explorer.git
synced 2025-12-23 13:31:14 -05:00
Refactor code structure and remove redundant changes
This commit is contained in:
63
src-old/hooks/homepage/useFeaturedParks.ts
Normal file
63
src-old/hooks/homepage/useFeaturedParks.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import { queryKeys } from '@/lib/queryKeys';
|
||||
|
||||
/**
|
||||
* Hook to fetch featured parks (top rated and most rides)
|
||||
*/
|
||||
export function useFeaturedParks() {
|
||||
const topRated = useQuery({
|
||||
queryKey: queryKeys.homepage.featuredParks.topRated(),
|
||||
queryFn: async () => {
|
||||
const { data, error } = await supabase
|
||||
.from('parks')
|
||||
.select(`
|
||||
*,
|
||||
location:locations(*),
|
||||
operator:companies!parks_operator_id_fkey(*)
|
||||
`)
|
||||
.order('average_rating', { ascending: false })
|
||||
.limit(3);
|
||||
|
||||
if (error) throw error;
|
||||
return data || [];
|
||||
},
|
||||
staleTime: 10 * 60 * 1000, // 10 minutes - featured parks change rarely
|
||||
gcTime: 30 * 60 * 1000,
|
||||
refetchOnWindowFocus: false,
|
||||
});
|
||||
|
||||
const mostRides = useQuery({
|
||||
queryKey: queryKeys.homepage.featuredParks.mostRides(),
|
||||
queryFn: async () => {
|
||||
const { data, error } = await supabase
|
||||
.from('parks')
|
||||
.select(`
|
||||
*,
|
||||
location:locations(*),
|
||||
operator:companies!parks_operator_id_fkey(*)
|
||||
`)
|
||||
.order('ride_count', { ascending: false })
|
||||
.limit(3);
|
||||
|
||||
if (error) throw error;
|
||||
return data || [];
|
||||
},
|
||||
staleTime: 10 * 60 * 1000, // 10 minutes
|
||||
gcTime: 30 * 60 * 1000,
|
||||
refetchOnWindowFocus: false,
|
||||
});
|
||||
|
||||
return {
|
||||
topRated: {
|
||||
data: topRated.data,
|
||||
isLoading: topRated.isLoading,
|
||||
error: topRated.error,
|
||||
},
|
||||
mostRides: {
|
||||
data: mostRides.data,
|
||||
isLoading: mostRides.isLoading,
|
||||
error: mostRides.error,
|
||||
},
|
||||
};
|
||||
}
|
||||
60
src-old/hooks/homepage/useHomepageClosed.ts
Normal file
60
src-old/hooks/homepage/useHomepageClosed.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import { queryKeys } from '@/lib/queryKeys';
|
||||
|
||||
export function useHomepageRecentlyClosedParks(enabled = true) {
|
||||
return useQuery({
|
||||
queryKey: queryKeys.homepage.recentlyClosedParks(),
|
||||
queryFn: async () => {
|
||||
const oneYearAgo = new Date();
|
||||
oneYearAgo.setFullYear(oneYearAgo.getFullYear() - 1);
|
||||
const today = new Date();
|
||||
|
||||
const { data, error } = await supabase
|
||||
.from('parks')
|
||||
.select(`*, location:locations(*), operator:companies!parks_operator_id_fkey(*)`)
|
||||
.gte('closing_date', oneYearAgo.toISOString())
|
||||
.lte('closing_date', today.toISOString())
|
||||
.order('closing_date', { ascending: false })
|
||||
.limit(12);
|
||||
|
||||
if (error) throw error;
|
||||
return data || [];
|
||||
},
|
||||
enabled,
|
||||
staleTime: 5 * 60 * 1000,
|
||||
gcTime: 15 * 60 * 1000,
|
||||
refetchOnWindowFocus: false,
|
||||
});
|
||||
}
|
||||
|
||||
export function useHomepageRecentlyClosedRides(enabled = true) {
|
||||
return useQuery({
|
||||
queryKey: queryKeys.homepage.recentlyClosedRides(),
|
||||
queryFn: async () => {
|
||||
const oneYearAgo = new Date();
|
||||
oneYearAgo.setFullYear(oneYearAgo.getFullYear() - 1);
|
||||
const today = new Date();
|
||||
|
||||
const { data, error } = await supabase
|
||||
.from('rides')
|
||||
.select(`
|
||||
*,
|
||||
park:parks(*, location:locations(*)),
|
||||
manufacturer:companies!rides_manufacturer_id_fkey(*),
|
||||
designer:companies!rides_designer_id_fkey(*)
|
||||
`)
|
||||
.gte('closing_date', oneYearAgo.toISOString())
|
||||
.lte('closing_date', today.toISOString())
|
||||
.order('closing_date', { ascending: false })
|
||||
.limit(12);
|
||||
|
||||
if (error) throw error;
|
||||
return data || [];
|
||||
},
|
||||
enabled,
|
||||
staleTime: 5 * 60 * 1000,
|
||||
gcTime: 15 * 60 * 1000,
|
||||
refetchOnWindowFocus: false,
|
||||
});
|
||||
}
|
||||
60
src-old/hooks/homepage/useHomepageClosing.ts
Normal file
60
src-old/hooks/homepage/useHomepageClosing.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import { queryKeys } from '@/lib/queryKeys';
|
||||
|
||||
export function useHomepageClosingSoonParks(enabled = true) {
|
||||
return useQuery({
|
||||
queryKey: queryKeys.homepage.closingSoonParks(),
|
||||
queryFn: async () => {
|
||||
const today = new Date();
|
||||
const sixMonthsFromNow = new Date();
|
||||
sixMonthsFromNow.setMonth(sixMonthsFromNow.getMonth() + 6);
|
||||
|
||||
const { data, error } = await supabase
|
||||
.from('parks')
|
||||
.select(`*, location:locations(*), operator:companies!parks_operator_id_fkey(*)`)
|
||||
.gte('closing_date', today.toISOString())
|
||||
.lte('closing_date', sixMonthsFromNow.toISOString())
|
||||
.order('closing_date', { ascending: true })
|
||||
.limit(12);
|
||||
|
||||
if (error) throw error;
|
||||
return data || [];
|
||||
},
|
||||
enabled,
|
||||
staleTime: 5 * 60 * 1000,
|
||||
gcTime: 15 * 60 * 1000,
|
||||
refetchOnWindowFocus: false,
|
||||
});
|
||||
}
|
||||
|
||||
export function useHomepageClosingSoonRides(enabled = true) {
|
||||
return useQuery({
|
||||
queryKey: queryKeys.homepage.closingSoonRides(),
|
||||
queryFn: async () => {
|
||||
const today = new Date();
|
||||
const sixMonthsFromNow = new Date();
|
||||
sixMonthsFromNow.setMonth(sixMonthsFromNow.getMonth() + 6);
|
||||
|
||||
const { data, error } = await supabase
|
||||
.from('rides')
|
||||
.select(`
|
||||
*,
|
||||
park:parks(*, location:locations(*)),
|
||||
manufacturer:companies!rides_manufacturer_id_fkey(*),
|
||||
designer:companies!rides_designer_id_fkey(*)
|
||||
`)
|
||||
.gte('closing_date', today.toISOString())
|
||||
.lte('closing_date', sixMonthsFromNow.toISOString())
|
||||
.order('closing_date', { ascending: true })
|
||||
.limit(12);
|
||||
|
||||
if (error) throw error;
|
||||
return data || [];
|
||||
},
|
||||
enabled,
|
||||
staleTime: 5 * 60 * 1000,
|
||||
gcTime: 15 * 60 * 1000,
|
||||
refetchOnWindowFocus: false,
|
||||
});
|
||||
}
|
||||
56
src-old/hooks/homepage/useHomepageOpened.ts
Normal file
56
src-old/hooks/homepage/useHomepageOpened.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import { queryKeys } from '@/lib/queryKeys';
|
||||
|
||||
export function useHomepageRecentlyOpenedParks(enabled = true) {
|
||||
return useQuery({
|
||||
queryKey: queryKeys.homepage.recentlyOpenedParks(),
|
||||
queryFn: async () => {
|
||||
const oneYearAgo = new Date();
|
||||
oneYearAgo.setFullYear(oneYearAgo.getFullYear() - 1);
|
||||
|
||||
const { data, error } = await supabase
|
||||
.from('parks')
|
||||
.select(`*, location:locations(*), operator:companies!parks_operator_id_fkey(*)`)
|
||||
.gte('opening_date', oneYearAgo.toISOString())
|
||||
.order('opening_date', { ascending: false })
|
||||
.limit(12);
|
||||
|
||||
if (error) throw error;
|
||||
return data || [];
|
||||
},
|
||||
enabled,
|
||||
staleTime: 5 * 60 * 1000,
|
||||
gcTime: 15 * 60 * 1000,
|
||||
refetchOnWindowFocus: false,
|
||||
});
|
||||
}
|
||||
|
||||
export function useHomepageRecentlyOpenedRides(enabled = true) {
|
||||
return useQuery({
|
||||
queryKey: queryKeys.homepage.recentlyOpenedRides(),
|
||||
queryFn: async () => {
|
||||
const oneYearAgo = new Date();
|
||||
oneYearAgo.setFullYear(oneYearAgo.getFullYear() - 1);
|
||||
|
||||
const { data, error } = await supabase
|
||||
.from('rides')
|
||||
.select(`
|
||||
*,
|
||||
park:parks(*, location:locations(*)),
|
||||
manufacturer:companies!rides_manufacturer_id_fkey(*),
|
||||
designer:companies!rides_designer_id_fkey(*)
|
||||
`)
|
||||
.gte('opening_date', oneYearAgo.toISOString())
|
||||
.order('opening_date', { ascending: false })
|
||||
.limit(12);
|
||||
|
||||
if (error) throw error;
|
||||
return data || [];
|
||||
},
|
||||
enabled,
|
||||
staleTime: 5 * 60 * 1000,
|
||||
gcTime: 15 * 60 * 1000,
|
||||
refetchOnWindowFocus: false,
|
||||
});
|
||||
}
|
||||
60
src-old/hooks/homepage/useHomepageOpeningSoon.ts
Normal file
60
src-old/hooks/homepage/useHomepageOpeningSoon.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import { queryKeys } from '@/lib/queryKeys';
|
||||
|
||||
export function useHomepageOpeningSoonParks(enabled = true) {
|
||||
return useQuery({
|
||||
queryKey: queryKeys.homepage.openingSoonParks(),
|
||||
queryFn: async () => {
|
||||
const today = new Date();
|
||||
const sixMonthsFromNow = new Date();
|
||||
sixMonthsFromNow.setMonth(sixMonthsFromNow.getMonth() + 6);
|
||||
|
||||
const { data, error } = await supabase
|
||||
.from('parks')
|
||||
.select(`*, location:locations(*), operator:companies!parks_operator_id_fkey(*)`)
|
||||
.gte('opening_date', today.toISOString())
|
||||
.lte('opening_date', sixMonthsFromNow.toISOString())
|
||||
.order('opening_date', { ascending: true })
|
||||
.limit(12);
|
||||
|
||||
if (error) throw error;
|
||||
return data || [];
|
||||
},
|
||||
enabled,
|
||||
staleTime: 5 * 60 * 1000,
|
||||
gcTime: 15 * 60 * 1000,
|
||||
refetchOnWindowFocus: false,
|
||||
});
|
||||
}
|
||||
|
||||
export function useHomepageOpeningSoonRides(enabled = true) {
|
||||
return useQuery({
|
||||
queryKey: queryKeys.homepage.openingSoonRides(),
|
||||
queryFn: async () => {
|
||||
const today = new Date();
|
||||
const sixMonthsFromNow = new Date();
|
||||
sixMonthsFromNow.setMonth(sixMonthsFromNow.getMonth() + 6);
|
||||
|
||||
const { data, error } = await supabase
|
||||
.from('rides')
|
||||
.select(`
|
||||
*,
|
||||
park:parks(*, location:locations(*)),
|
||||
manufacturer:companies!rides_manufacturer_id_fkey(*),
|
||||
designer:companies!rides_designer_id_fkey(*)
|
||||
`)
|
||||
.gte('opening_date', today.toISOString())
|
||||
.lte('opening_date', sixMonthsFromNow.toISOString())
|
||||
.order('opening_date', { ascending: true })
|
||||
.limit(12);
|
||||
|
||||
if (error) throw error;
|
||||
return data || [];
|
||||
},
|
||||
enabled,
|
||||
staleTime: 5 * 60 * 1000,
|
||||
gcTime: 15 * 60 * 1000,
|
||||
refetchOnWindowFocus: false,
|
||||
});
|
||||
}
|
||||
50
src-old/hooks/homepage/useHomepageRated.ts
Normal file
50
src-old/hooks/homepage/useHomepageRated.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import { queryKeys } from '@/lib/queryKeys';
|
||||
|
||||
export function useHomepageHighestRatedParks(enabled = true) {
|
||||
return useQuery({
|
||||
queryKey: queryKeys.homepage.highestRatedParks(),
|
||||
queryFn: async () => {
|
||||
const { data, error } = await supabase
|
||||
.from('parks')
|
||||
.select(`*, location:locations(*), operator:companies!parks_operator_id_fkey(*)`)
|
||||
.not('average_rating', 'is', null)
|
||||
.order('average_rating', { ascending: false })
|
||||
.limit(12);
|
||||
|
||||
if (error) throw error;
|
||||
return data || [];
|
||||
},
|
||||
enabled,
|
||||
staleTime: 5 * 60 * 1000,
|
||||
gcTime: 15 * 60 * 1000,
|
||||
refetchOnWindowFocus: false,
|
||||
});
|
||||
}
|
||||
|
||||
export function useHomepageHighestRatedRides(enabled = true) {
|
||||
return useQuery({
|
||||
queryKey: queryKeys.homepage.highestRatedRides(),
|
||||
queryFn: async () => {
|
||||
const { data, error } = await supabase
|
||||
.from('rides')
|
||||
.select(`
|
||||
*,
|
||||
park:parks(*, location:locations(*)),
|
||||
manufacturer:companies!rides_manufacturer_id_fkey(*),
|
||||
designer:companies!rides_designer_id_fkey(*)
|
||||
`)
|
||||
.not('average_rating', 'is', null)
|
||||
.order('average_rating', { ascending: false })
|
||||
.limit(12);
|
||||
|
||||
if (error) throw error;
|
||||
return data || [];
|
||||
},
|
||||
enabled,
|
||||
staleTime: 5 * 60 * 1000,
|
||||
gcTime: 15 * 60 * 1000,
|
||||
refetchOnWindowFocus: false,
|
||||
});
|
||||
}
|
||||
48
src-old/hooks/homepage/useHomepageRecent.ts
Normal file
48
src-old/hooks/homepage/useHomepageRecent.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import { queryKeys } from '@/lib/queryKeys';
|
||||
|
||||
export function useHomepageRecentParks(enabled = true) {
|
||||
return useQuery({
|
||||
queryKey: queryKeys.homepage.recentParks(),
|
||||
queryFn: async () => {
|
||||
const { data, error } = await supabase
|
||||
.from('parks')
|
||||
.select(`*, location:locations(*), operator:companies!parks_operator_id_fkey(*)`)
|
||||
.order('created_at', { ascending: false })
|
||||
.limit(12);
|
||||
|
||||
if (error) throw error;
|
||||
return data || [];
|
||||
},
|
||||
enabled,
|
||||
staleTime: 5 * 60 * 1000,
|
||||
gcTime: 15 * 60 * 1000,
|
||||
refetchOnWindowFocus: false,
|
||||
});
|
||||
}
|
||||
|
||||
export function useHomepageRecentRides(enabled = true) {
|
||||
return useQuery({
|
||||
queryKey: queryKeys.homepage.recentRides(),
|
||||
queryFn: async () => {
|
||||
const { data, error } = await supabase
|
||||
.from('rides')
|
||||
.select(`
|
||||
*,
|
||||
park:parks(*, location:locations(*)),
|
||||
manufacturer:companies!rides_manufacturer_id_fkey(*),
|
||||
designer:companies!rides_designer_id_fkey(*)
|
||||
`)
|
||||
.order('created_at', { ascending: false })
|
||||
.limit(12);
|
||||
|
||||
if (error) throw error;
|
||||
return data || [];
|
||||
},
|
||||
enabled,
|
||||
staleTime: 5 * 60 * 1000,
|
||||
gcTime: 15 * 60 * 1000,
|
||||
refetchOnWindowFocus: false,
|
||||
});
|
||||
}
|
||||
52
src-old/hooks/homepage/useHomepageRecentChanges.ts
Normal file
52
src-old/hooks/homepage/useHomepageRecentChanges.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import { queryKeys } from '@/lib/queryKeys';
|
||||
|
||||
interface RecentChange {
|
||||
id: string;
|
||||
name: string;
|
||||
type: 'park' | 'ride' | 'company';
|
||||
slug: string;
|
||||
parkSlug?: string;
|
||||
imageUrl?: string;
|
||||
changeType: string;
|
||||
changedAt: string;
|
||||
changedBy?: {
|
||||
username: string;
|
||||
avatarUrl?: string;
|
||||
};
|
||||
changeReason?: string;
|
||||
}
|
||||
|
||||
export function useHomepageRecentChanges(enabled = true) {
|
||||
return useQuery({
|
||||
queryKey: queryKeys.homepage.recentChanges(),
|
||||
queryFn: async () => {
|
||||
// Use the new database function to get all changes in a single query
|
||||
const { data, error } = await supabase.rpc('get_recent_changes', { limit_count: 24 });
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
// Transform the database response to match our interface
|
||||
return (data || []).map((item: any) => ({
|
||||
id: item.entity_id,
|
||||
name: item.entity_name,
|
||||
type: item.entity_type as 'park' | 'ride' | 'company',
|
||||
slug: item.entity_slug,
|
||||
parkSlug: item.park_slug || undefined,
|
||||
imageUrl: item.image_url || undefined,
|
||||
changeType: item.change_type,
|
||||
changedAt: item.changed_at,
|
||||
changedBy: item.changed_by_username ? {
|
||||
username: item.changed_by_username,
|
||||
avatarUrl: item.changed_by_avatar || undefined
|
||||
} : undefined,
|
||||
changeReason: item.change_reason || undefined
|
||||
})) as RecentChange[];
|
||||
},
|
||||
enabled,
|
||||
staleTime: 5 * 60 * 1000,
|
||||
gcTime: 15 * 60 * 1000,
|
||||
refetchOnWindowFocus: false,
|
||||
});
|
||||
}
|
||||
48
src-old/hooks/homepage/useHomepageTrending.ts
Normal file
48
src-old/hooks/homepage/useHomepageTrending.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import { queryKeys } from '@/lib/queryKeys';
|
||||
|
||||
export function useHomepageTrendingParks(enabled = true) {
|
||||
return useQuery({
|
||||
queryKey: queryKeys.homepage.trendingParks(),
|
||||
queryFn: async () => {
|
||||
const { data, error } = await supabase
|
||||
.from('parks')
|
||||
.select(`*, location:locations(*), operator:companies!parks_operator_id_fkey(*)`)
|
||||
.order('view_count_30d', { ascending: false })
|
||||
.limit(12);
|
||||
|
||||
if (error) throw error;
|
||||
return data || [];
|
||||
},
|
||||
enabled,
|
||||
staleTime: 5 * 60 * 1000,
|
||||
gcTime: 15 * 60 * 1000,
|
||||
refetchOnWindowFocus: false,
|
||||
});
|
||||
}
|
||||
|
||||
export function useHomepageTrendingRides(enabled = true) {
|
||||
return useQuery({
|
||||
queryKey: queryKeys.homepage.trendingRides(),
|
||||
queryFn: async () => {
|
||||
const { data, error } = await supabase
|
||||
.from('rides')
|
||||
.select(`
|
||||
*,
|
||||
park:parks(*, location:locations(*)),
|
||||
manufacturer:companies!rides_manufacturer_id_fkey(*),
|
||||
designer:companies!rides_designer_id_fkey(*)
|
||||
`)
|
||||
.order('view_count_30d', { ascending: false })
|
||||
.limit(12);
|
||||
|
||||
if (error) throw error;
|
||||
return data || [];
|
||||
},
|
||||
enabled,
|
||||
staleTime: 5 * 60 * 1000,
|
||||
gcTime: 15 * 60 * 1000,
|
||||
refetchOnWindowFocus: false,
|
||||
});
|
||||
}
|
||||
59
src-old/hooks/lists/useListItems.ts
Normal file
59
src-old/hooks/lists/useListItems.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import { queryKeys } from '@/lib/queryKeys';
|
||||
|
||||
/**
|
||||
* Hook to fetch list items with entities (batch fetching to avoid N+1)
|
||||
*/
|
||||
export function useListItems(listId: string | undefined, enabled = true) {
|
||||
return useQuery({
|
||||
queryKey: queryKeys.lists.items(listId || ''),
|
||||
queryFn: async () => {
|
||||
if (!listId) return [];
|
||||
|
||||
// Get items
|
||||
const { data: items, error: itemsError } = await supabase
|
||||
.from('user_top_list_items')
|
||||
.select('*')
|
||||
.eq('list_id', listId)
|
||||
.order('position', { ascending: true });
|
||||
|
||||
if (itemsError) throw itemsError;
|
||||
if (!items || items.length === 0) return [];
|
||||
|
||||
// Group by entity type for batch fetching
|
||||
const parkIds = items.filter(i => i.entity_type === 'park').map(i => i.entity_id);
|
||||
const rideIds = items.filter(i => i.entity_type === 'ride').map(i => i.entity_id);
|
||||
const companyIds = items.filter(i => i.entity_type === 'company').map(i => i.entity_id);
|
||||
|
||||
// Batch fetch all entities in parallel
|
||||
const [parksResult, ridesResult, companiesResult] = await Promise.all([
|
||||
parkIds.length > 0
|
||||
? supabase.from('parks').select('id, name, slug, park_type, location_id').in('id', parkIds)
|
||||
: Promise.resolve({ data: [] }),
|
||||
rideIds.length > 0
|
||||
? supabase.from('rides').select('id, name, slug, category, park_id').in('id', rideIds)
|
||||
: Promise.resolve({ data: [] }),
|
||||
companyIds.length > 0
|
||||
? supabase.from('companies').select('id, name, slug, company_type').in('id', companyIds)
|
||||
: Promise.resolve({ data: [] }),
|
||||
]);
|
||||
|
||||
// Create entities map for quick lookup
|
||||
const entitiesMap = new Map<string, any>();
|
||||
(parksResult.data || []).forEach(p => entitiesMap.set(p.id, p));
|
||||
(ridesResult.data || []).forEach(r => entitiesMap.set(r.id, r));
|
||||
(companiesResult.data || []).forEach(c => entitiesMap.set(c.id, c));
|
||||
|
||||
// Map entities to items
|
||||
return items.map(item => ({
|
||||
...item,
|
||||
entity: entitiesMap.get(item.entity_id),
|
||||
}));
|
||||
},
|
||||
enabled: enabled && !!listId,
|
||||
staleTime: 5 * 60 * 1000, // 5 minutes
|
||||
gcTime: 15 * 60 * 1000,
|
||||
refetchOnWindowFocus: false,
|
||||
});
|
||||
}
|
||||
31
src-old/hooks/moderation/index.ts
Normal file
31
src-old/hooks/moderation/index.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
/**
|
||||
* Moderation Hooks
|
||||
*
|
||||
* Centralized exports for all moderation-related hooks.
|
||||
* These hooks are designed to support the moderation queue system.
|
||||
*/
|
||||
|
||||
export { useEntityCache } from './useEntityCache';
|
||||
export { useProfileCache } from './useProfileCache';
|
||||
export type { CachedProfile } from './useProfileCache';
|
||||
|
||||
export { useModerationFilters } from './useModerationFilters';
|
||||
export type { ModerationFilters, ModerationFiltersConfig } from './useModerationFilters';
|
||||
|
||||
export { usePagination } from './usePagination';
|
||||
export type { PaginationState, PaginationConfig } from './usePagination';
|
||||
|
||||
export { useRealtimeSubscriptions } from './useRealtimeSubscriptions';
|
||||
export type {
|
||||
RealtimeSubscriptionConfig,
|
||||
UseRealtimeSubscriptionsReturn
|
||||
} from './useRealtimeSubscriptions';
|
||||
|
||||
export { useQueueQuery } from './useQueueQuery';
|
||||
export type { UseQueueQueryConfig, UseQueueQueryReturn } from './useQueueQuery';
|
||||
|
||||
export { useModerationQueueManager } from './useModerationQueueManager';
|
||||
export type {
|
||||
ModerationQueueManager,
|
||||
ModerationQueueManagerConfig
|
||||
} from './useModerationQueueManager';
|
||||
293
src-old/hooks/moderation/useEntityCache.ts
Normal file
293
src-old/hooks/moderation/useEntityCache.ts
Normal file
@@ -0,0 +1,293 @@
|
||||
import { useRef, useCallback } from 'react';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import { createTableQuery } from '@/lib/supabaseHelpers';
|
||||
import { logger } from '@/lib/logger';
|
||||
import { getErrorMessage } from '@/lib/errorHandler';
|
||||
import { MODERATION_CONSTANTS } from '@/lib/moderation/constants';
|
||||
import type { Database } from '@/integrations/supabase/types';
|
||||
|
||||
/**
|
||||
* Entity types supported by the cache
|
||||
*/
|
||||
type EntityType = 'rides' | 'parks' | 'companies';
|
||||
|
||||
/**
|
||||
* Type definitions for cached entities (can be partial)
|
||||
*/
|
||||
type Ride = Database['public']['Tables']['rides']['Row'];
|
||||
type Park = Database['public']['Tables']['parks']['Row'];
|
||||
type Company = Database['public']['Tables']['companies']['Row'];
|
||||
|
||||
/**
|
||||
* Discriminated union for all cached entity types
|
||||
*/
|
||||
type CachedEntity = Ride | Park | Company;
|
||||
|
||||
/**
|
||||
* Map entity type strings to their corresponding types
|
||||
* Cache stores partial entities with at least id and name
|
||||
*/
|
||||
interface EntityTypeMap {
|
||||
rides: Partial<Ride> & { id: string; name: string };
|
||||
parks: Partial<Park> & { id: string; name: string };
|
||||
companies: Partial<Company> & { id: string; name: string };
|
||||
}
|
||||
|
||||
/**
|
||||
* Cache structure for entities with flexible typing
|
||||
*/
|
||||
interface EntityCacheStructure {
|
||||
rides: Map<string, Partial<Ride> & { id: string; name: string }>;
|
||||
parks: Map<string, Partial<Park> & { id: string; name: string }>;
|
||||
companies: Map<string, Partial<Company> & { id: string; name: string }>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for managing entity name caching (rides, parks, companies)
|
||||
*
|
||||
* Uses ref-based storage to avoid triggering re-renders while providing
|
||||
* efficient caching for entity lookups during moderation.
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* const entityCache = useEntityCache();
|
||||
*
|
||||
* // Get cached entity
|
||||
* const ride = entityCache.getCached('rides', rideId);
|
||||
*
|
||||
* // Bulk fetch and cache entities
|
||||
* await entityCache.bulkFetch('rides', [id1, id2, id3]);
|
||||
*
|
||||
* // Clear specific cache
|
||||
* entityCache.clear('rides');
|
||||
*
|
||||
* // Clear all caches
|
||||
* entityCache.clearAll();
|
||||
* ```
|
||||
*/
|
||||
export function useEntityCache() {
|
||||
// Use ref to prevent re-renders on cache updates
|
||||
const cacheRef = useRef<EntityCacheStructure>({
|
||||
rides: new Map(),
|
||||
parks: new Map(),
|
||||
companies: new Map(),
|
||||
});
|
||||
|
||||
/**
|
||||
* Get a cached entity by ID with type safety
|
||||
*/
|
||||
const getCached = useCallback(<T extends EntityType>(
|
||||
type: T,
|
||||
id: string
|
||||
): EntityTypeMap[T] | undefined => {
|
||||
return cacheRef.current[type].get(id) as EntityTypeMap[T] | undefined;
|
||||
}, []);
|
||||
|
||||
/**
|
||||
* Check if an entity is cached
|
||||
*/
|
||||
const has = useCallback((type: EntityType, id: string): boolean => {
|
||||
return cacheRef.current[type].has(id);
|
||||
}, []);
|
||||
|
||||
/**
|
||||
* Set a cached entity with LRU eviction and type safety
|
||||
*/
|
||||
const setCached = useCallback(<T extends EntityType>(
|
||||
type: T,
|
||||
id: string,
|
||||
data: EntityTypeMap[T]
|
||||
): void => {
|
||||
const cache = cacheRef.current[type];
|
||||
|
||||
// LRU eviction: remove oldest entry if cache is full
|
||||
if (cache.size >= MODERATION_CONSTANTS.MAX_ENTITY_CACHE_SIZE) {
|
||||
const firstKey = cache.keys().next().value;
|
||||
if (firstKey) {
|
||||
cache.delete(firstKey);
|
||||
logger.log(`♻️ [EntityCache] Evicted ${type}/${firstKey} (LRU)`);
|
||||
}
|
||||
}
|
||||
|
||||
cache.set(id, data);
|
||||
}, []);
|
||||
|
||||
/**
|
||||
* Get uncached IDs from a list
|
||||
*/
|
||||
const getUncachedIds = useCallback((type: EntityType, ids: string[]): string[] => {
|
||||
return ids.filter(id => !cacheRef.current[type].has(id));
|
||||
}, []);
|
||||
|
||||
/**
|
||||
* Bulk fetch entities from the database and cache them
|
||||
* Only fetches entities that aren't already cached
|
||||
*/
|
||||
const bulkFetch = useCallback(async <T extends EntityType>(
|
||||
type: T,
|
||||
ids: string[]
|
||||
): Promise<EntityTypeMap[T][]> => {
|
||||
if (ids.length === 0) return [];
|
||||
|
||||
// Filter to only uncached IDs
|
||||
const uncachedIds = getUncachedIds(type, ids);
|
||||
if (uncachedIds.length === 0) {
|
||||
// All entities are cached, return them
|
||||
return ids.map(id => getCached(type, id)).filter((item): item is EntityTypeMap[T] => item !== undefined);
|
||||
}
|
||||
|
||||
try {
|
||||
let data: unknown[] | null = null;
|
||||
let error: unknown = null;
|
||||
|
||||
// Use type-safe table queries
|
||||
switch (type) {
|
||||
case 'rides':
|
||||
const ridesResult = await createTableQuery('rides')
|
||||
.select('id, name, slug, park_id')
|
||||
.in('id', uncachedIds);
|
||||
data = ridesResult.data;
|
||||
error = ridesResult.error;
|
||||
break;
|
||||
|
||||
case 'parks':
|
||||
const parksResult = await createTableQuery('parks')
|
||||
.select('id, name, slug')
|
||||
.in('id', uncachedIds);
|
||||
data = parksResult.data;
|
||||
error = parksResult.error;
|
||||
break;
|
||||
|
||||
case 'companies':
|
||||
const companiesResult = await createTableQuery('companies')
|
||||
.select('id, name, slug, company_type')
|
||||
.in('id', uncachedIds);
|
||||
data = companiesResult.data;
|
||||
error = companiesResult.error;
|
||||
break;
|
||||
|
||||
default:
|
||||
// Unknown entity type - skip
|
||||
return [];
|
||||
}
|
||||
|
||||
if (error) {
|
||||
// Silent - cache miss is acceptable
|
||||
return [];
|
||||
}
|
||||
|
||||
// Cache the fetched entities
|
||||
if (data) {
|
||||
(data as Array<Record<string, unknown>>).forEach((entity) => {
|
||||
if (entity && typeof entity === 'object' && 'id' in entity && 'name' in entity) {
|
||||
setCached(type, entity.id as string, entity as EntityTypeMap[T]);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return (data as EntityTypeMap[T][]) || [];
|
||||
} catch (error: unknown) {
|
||||
// Silent - cache operations are non-critical
|
||||
return [];
|
||||
}
|
||||
}, [getCached, setCached, getUncachedIds]);
|
||||
|
||||
/**
|
||||
* Fetch and cache related entities based on submission content
|
||||
* Automatically determines which entities to fetch from submission data
|
||||
*/
|
||||
const fetchRelatedEntities = useCallback(async (submissions: Array<{ content?: Record<string, string | number>; submission_type?: string }>): Promise<void> => {
|
||||
const rideIds = new Set<string>();
|
||||
const parkIds = new Set<string>();
|
||||
const companyIds = new Set<string>();
|
||||
|
||||
// Collect all entity IDs from submissions
|
||||
submissions.forEach(submission => {
|
||||
const content = submission.content;
|
||||
if (content && typeof content === 'object') {
|
||||
if (typeof content.ride_id === 'string') rideIds.add(content.ride_id);
|
||||
if (typeof content.park_id === 'string') parkIds.add(content.park_id);
|
||||
if (typeof content.company_id === 'string') companyIds.add(content.company_id);
|
||||
if (typeof content.entity_id === 'string') {
|
||||
if (submission.submission_type === 'ride') rideIds.add(content.entity_id);
|
||||
if (submission.submission_type === 'park') parkIds.add(content.entity_id);
|
||||
if (['manufacturer', 'operator', 'designer', 'property_owner'].includes(submission.submission_type || '')) {
|
||||
companyIds.add(content.entity_id);
|
||||
}
|
||||
}
|
||||
if (typeof content.manufacturer_id === 'string') companyIds.add(content.manufacturer_id);
|
||||
if (typeof content.designer_id === 'string') companyIds.add(content.designer_id);
|
||||
if (typeof content.operator_id === 'string') companyIds.add(content.operator_id);
|
||||
if (typeof content.property_owner_id === 'string') companyIds.add(content.property_owner_id);
|
||||
}
|
||||
});
|
||||
|
||||
// Fetch all entities in parallel
|
||||
const fetchPromises: Promise<any[]>[] = [];
|
||||
|
||||
if (rideIds.size > 0) {
|
||||
fetchPromises.push(bulkFetch('rides', Array.from(rideIds)));
|
||||
}
|
||||
if (parkIds.size > 0) {
|
||||
fetchPromises.push(bulkFetch('parks', Array.from(parkIds)));
|
||||
}
|
||||
if (companyIds.size > 0) {
|
||||
fetchPromises.push(bulkFetch('companies', Array.from(companyIds)));
|
||||
}
|
||||
|
||||
await Promise.all(fetchPromises);
|
||||
}, [bulkFetch]);
|
||||
|
||||
/**
|
||||
* Clear a specific entity type cache
|
||||
*/
|
||||
const clear = useCallback((type: EntityType): void => {
|
||||
cacheRef.current[type].clear();
|
||||
}, []);
|
||||
|
||||
/**
|
||||
* Clear all entity caches
|
||||
*/
|
||||
const clearAll = useCallback((): void => {
|
||||
cacheRef.current.rides.clear();
|
||||
cacheRef.current.parks.clear();
|
||||
cacheRef.current.companies.clear();
|
||||
}, []);
|
||||
|
||||
/**
|
||||
* Get cache size for a specific type
|
||||
*/
|
||||
const getSize = useCallback((type: EntityType): number => {
|
||||
return cacheRef.current[type].size;
|
||||
}, []);
|
||||
|
||||
/**
|
||||
* Get total cache size across all entity types
|
||||
*/
|
||||
const getTotalSize = useCallback((): number => {
|
||||
return cacheRef.current.rides.size +
|
||||
cacheRef.current.parks.size +
|
||||
cacheRef.current.companies.size;
|
||||
}, []);
|
||||
|
||||
/**
|
||||
* Get direct access to cache ref (for advanced use cases)
|
||||
* Use with caution - prefer using the provided methods
|
||||
*/
|
||||
const getCacheRef = useCallback(() => cacheRef.current, []);
|
||||
|
||||
// Return without useMemo wrapper (OPTIMIZED)
|
||||
return {
|
||||
getCached,
|
||||
has,
|
||||
setCached,
|
||||
getUncachedIds,
|
||||
bulkFetch,
|
||||
fetchRelatedEntities,
|
||||
clear,
|
||||
clearAll,
|
||||
getSize,
|
||||
getTotalSize,
|
||||
getCacheRef,
|
||||
};
|
||||
}
|
||||
978
src-old/hooks/moderation/useModerationActions.ts
Normal file
978
src-old/hooks/moderation/useModerationActions.ts
Normal file
@@ -0,0 +1,978 @@
|
||||
import { useCallback } from 'react';
|
||||
import { useMutation, useQueryClient } from '@tanstack/react-query';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import { useToast } from '@/hooks/use-toast';
|
||||
import { logger } from '@/lib/logger';
|
||||
import { getErrorMessage, handleError, isSupabaseConnectionError } from '@/lib/errorHandler';
|
||||
// Validation removed from client - edge function is single source of truth
|
||||
import { invokeWithTracking } from '@/lib/edgeFunctionTracking';
|
||||
import {
|
||||
generateIdempotencyKey,
|
||||
is409Conflict,
|
||||
getRetryAfter,
|
||||
sleep,
|
||||
generateAndRegisterKey,
|
||||
validateAndStartProcessing,
|
||||
markKeyCompleted,
|
||||
markKeyFailed,
|
||||
} from '@/lib/idempotencyHelpers';
|
||||
import {
|
||||
withTimeout,
|
||||
isTimeoutError,
|
||||
getTimeoutErrorMessage,
|
||||
type TimeoutError,
|
||||
} from '@/lib/timeoutDetection';
|
||||
import {
|
||||
autoReleaseLockOnError,
|
||||
} from '@/lib/moderation/lockAutoRelease';
|
||||
import type { User } from '@supabase/supabase-js';
|
||||
import type { ModerationItem } from '@/types/moderation';
|
||||
|
||||
/**
|
||||
* Configuration for moderation actions
|
||||
*/
|
||||
export interface ModerationActionsConfig {
|
||||
user: User | null;
|
||||
onActionStart: (itemId: string) => void;
|
||||
onActionComplete: () => void;
|
||||
currentLockSubmissionId?: string | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return type for useModerationActions
|
||||
*/
|
||||
export interface ModerationActions {
|
||||
performAction: (item: ModerationItem, action: 'approved' | 'rejected', moderatorNotes?: string) => Promise<void>;
|
||||
deleteSubmission: (item: ModerationItem) => Promise<void>;
|
||||
resetToPending: (item: ModerationItem) => Promise<void>;
|
||||
retryFailedItems: (item: ModerationItem) => Promise<void>;
|
||||
escalateSubmission: (item: ModerationItem, reason: string) => Promise<void>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for moderation action handlers
|
||||
* Extracted from useModerationQueueManager for better separation of concerns
|
||||
*
|
||||
* @param config - Configuration object with user, callbacks, and dependencies
|
||||
* @returns Object with action handler functions
|
||||
*/
|
||||
export function useModerationActions(config: ModerationActionsConfig): ModerationActions {
|
||||
const { user, onActionStart, onActionComplete } = config;
|
||||
const { toast } = useToast();
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
/**
|
||||
* Invoke edge function with full transaction resilience
|
||||
*
|
||||
* Provides:
|
||||
* - Timeout detection with automatic recovery
|
||||
* - Lock auto-release on error/timeout
|
||||
* - Idempotency key lifecycle management
|
||||
* - 409 Conflict handling with exponential backoff
|
||||
*
|
||||
* @param functionName - Edge function to invoke
|
||||
* @param payload - Request payload with submissionId
|
||||
* @param action - Action type for idempotency key generation
|
||||
* @param itemIds - Item IDs being processed
|
||||
* @param userId - User ID for tracking
|
||||
* @param maxConflictRetries - Max retries for 409 responses (default: 3)
|
||||
* @param timeoutMs - Timeout in milliseconds (default: 30000)
|
||||
* @returns Result with data, error, requestId, etc.
|
||||
*/
|
||||
async function invokeWithResilience<T = any>(
|
||||
functionName: string,
|
||||
payload: any,
|
||||
action: 'approval' | 'rejection' | 'retry',
|
||||
itemIds: string[],
|
||||
userId?: string,
|
||||
maxConflictRetries: number = 3,
|
||||
timeoutMs: number = 30000
|
||||
): Promise<{
|
||||
data: T | null;
|
||||
error: any;
|
||||
requestId: string;
|
||||
duration: number;
|
||||
attempts?: number;
|
||||
cached?: boolean;
|
||||
conflictRetries?: number;
|
||||
}> {
|
||||
if (!userId) {
|
||||
return {
|
||||
data: null,
|
||||
error: { message: 'User not authenticated' },
|
||||
requestId: 'auth-error',
|
||||
duration: 0,
|
||||
};
|
||||
}
|
||||
|
||||
const submissionId = payload.submissionId;
|
||||
if (!submissionId) {
|
||||
return {
|
||||
data: null,
|
||||
error: { message: 'Missing submissionId in payload' },
|
||||
requestId: 'validation-error',
|
||||
duration: 0,
|
||||
};
|
||||
}
|
||||
|
||||
// Generate and register idempotency key
|
||||
const { key: idempotencyKey } = await generateAndRegisterKey(
|
||||
action,
|
||||
submissionId,
|
||||
itemIds,
|
||||
userId
|
||||
);
|
||||
|
||||
logger.info('[ModerationResilience] Starting transaction', {
|
||||
action,
|
||||
submissionId,
|
||||
itemIds,
|
||||
idempotencyKey: idempotencyKey.substring(0, 32) + '...',
|
||||
});
|
||||
|
||||
let conflictRetries = 0;
|
||||
let lastError: any = null;
|
||||
|
||||
try {
|
||||
// Validate key and mark as processing
|
||||
const isValid = await validateAndStartProcessing(idempotencyKey);
|
||||
|
||||
if (!isValid) {
|
||||
const error = new Error('Idempotency key validation failed - possible duplicate request');
|
||||
await markKeyFailed(idempotencyKey, error.message);
|
||||
return {
|
||||
data: null,
|
||||
error,
|
||||
requestId: 'idempotency-validation-failed',
|
||||
duration: 0,
|
||||
};
|
||||
}
|
||||
|
||||
// Retry loop for 409 conflicts
|
||||
while (conflictRetries <= maxConflictRetries) {
|
||||
try {
|
||||
// Execute with timeout detection
|
||||
const result = await withTimeout(
|
||||
async () => {
|
||||
return await invokeWithTracking<T>(
|
||||
functionName,
|
||||
payload,
|
||||
userId,
|
||||
undefined,
|
||||
undefined,
|
||||
timeoutMs,
|
||||
{ maxAttempts: 3, baseDelay: 1500 },
|
||||
{ 'X-Idempotency-Key': idempotencyKey }
|
||||
);
|
||||
},
|
||||
timeoutMs,
|
||||
'edge-function'
|
||||
);
|
||||
|
||||
// Success or non-409 error
|
||||
if (!result.error || !is409Conflict(result.error)) {
|
||||
const isCached = result.data && typeof result.data === 'object' && 'cached' in result.data
|
||||
? (result.data as any).cached
|
||||
: false;
|
||||
|
||||
// Mark key as completed on success
|
||||
if (!result.error) {
|
||||
await markKeyCompleted(idempotencyKey);
|
||||
} else {
|
||||
await markKeyFailed(idempotencyKey, getErrorMessage(result.error));
|
||||
}
|
||||
|
||||
logger.info('[ModerationResilience] Transaction completed', {
|
||||
action,
|
||||
submissionId,
|
||||
idempotencyKey: idempotencyKey.substring(0, 32) + '...',
|
||||
success: !result.error,
|
||||
cached: isCached,
|
||||
conflictRetries,
|
||||
});
|
||||
|
||||
return {
|
||||
...result,
|
||||
cached: isCached,
|
||||
conflictRetries,
|
||||
};
|
||||
}
|
||||
|
||||
// 409 Conflict detected
|
||||
lastError = result.error;
|
||||
conflictRetries++;
|
||||
|
||||
if (conflictRetries > maxConflictRetries) {
|
||||
logger.error('Max 409 conflict retries exceeded', {
|
||||
functionName,
|
||||
idempotencyKey: idempotencyKey.substring(0, 32) + '...',
|
||||
conflictRetries,
|
||||
submissionId,
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
// Wait before retry
|
||||
const retryAfterSeconds = getRetryAfter(result.error);
|
||||
const retryDelayMs = retryAfterSeconds * 1000;
|
||||
|
||||
logger.log(`409 Conflict detected, retrying after ${retryAfterSeconds}s (attempt ${conflictRetries}/${maxConflictRetries})`, {
|
||||
functionName,
|
||||
idempotencyKey: idempotencyKey.substring(0, 32) + '...',
|
||||
retryAfterSeconds,
|
||||
});
|
||||
|
||||
await sleep(retryDelayMs);
|
||||
} catch (innerError) {
|
||||
// Handle timeout errors specifically
|
||||
if (isTimeoutError(innerError)) {
|
||||
const timeoutError = innerError as TimeoutError;
|
||||
const message = getTimeoutErrorMessage(timeoutError);
|
||||
|
||||
logger.error('[ModerationResilience] Transaction timed out', {
|
||||
action,
|
||||
submissionId,
|
||||
idempotencyKey: idempotencyKey.substring(0, 32) + '...',
|
||||
duration: timeoutError.duration,
|
||||
});
|
||||
|
||||
// Auto-release lock on timeout
|
||||
await autoReleaseLockOnError(submissionId, userId, timeoutError);
|
||||
|
||||
// Mark key as failed
|
||||
await markKeyFailed(idempotencyKey, message);
|
||||
|
||||
return {
|
||||
data: null,
|
||||
error: timeoutError,
|
||||
requestId: 'timeout-error',
|
||||
duration: timeoutError.duration || 0,
|
||||
conflictRetries,
|
||||
};
|
||||
}
|
||||
|
||||
// Re-throw non-timeout errors to outer catch
|
||||
throw innerError;
|
||||
}
|
||||
}
|
||||
|
||||
// All conflict retries exhausted
|
||||
await markKeyFailed(idempotencyKey, 'Max 409 conflict retries exceeded');
|
||||
return {
|
||||
data: null,
|
||||
error: lastError || { message: 'Unknown conflict retry error' },
|
||||
requestId: 'conflict-retry-failed',
|
||||
duration: 0,
|
||||
attempts: 0,
|
||||
conflictRetries,
|
||||
};
|
||||
} catch (error) {
|
||||
// Generic error handling
|
||||
const errorMessage = getErrorMessage(error);
|
||||
|
||||
logger.error('[ModerationResilience] Transaction failed', {
|
||||
action,
|
||||
submissionId,
|
||||
idempotencyKey: idempotencyKey.substring(0, 32) + '...',
|
||||
error: errorMessage,
|
||||
});
|
||||
|
||||
// Auto-release lock on error
|
||||
await autoReleaseLockOnError(submissionId, userId, error);
|
||||
|
||||
// Mark key as failed
|
||||
await markKeyFailed(idempotencyKey, errorMessage);
|
||||
|
||||
return {
|
||||
data: null,
|
||||
error,
|
||||
requestId: 'error',
|
||||
duration: 0,
|
||||
conflictRetries,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform moderation action (approve/reject) with optimistic updates
|
||||
*/
|
||||
const performActionMutation = useMutation({
|
||||
mutationFn: async ({
|
||||
item,
|
||||
action,
|
||||
moderatorNotes
|
||||
}: {
|
||||
item: ModerationItem;
|
||||
action: 'approved' | 'rejected';
|
||||
moderatorNotes?: string;
|
||||
}) => {
|
||||
// Handle photo submissions
|
||||
if (action === 'approved' && item.submission_type === 'photo') {
|
||||
const { data: photoSubmission, error: fetchError } = await supabase
|
||||
.from('photo_submissions')
|
||||
.select(`
|
||||
*,
|
||||
items:photo_submission_items(*),
|
||||
submission:content_submissions!inner(user_id)
|
||||
`)
|
||||
.eq('submission_id', item.id)
|
||||
.single();
|
||||
|
||||
// Add explicit error handling
|
||||
if (fetchError) {
|
||||
throw new Error(`Failed to fetch photo submission: ${fetchError.message}`);
|
||||
}
|
||||
|
||||
if (!photoSubmission) {
|
||||
throw new Error('Photo submission not found');
|
||||
}
|
||||
|
||||
// Type assertion with validation
|
||||
const typedPhotoSubmission = photoSubmission as {
|
||||
id: string;
|
||||
entity_id: string;
|
||||
entity_type: string;
|
||||
items: Array<{
|
||||
id: string;
|
||||
cloudflare_image_id: string;
|
||||
cloudflare_image_url: string;
|
||||
caption?: string;
|
||||
title?: string;
|
||||
date_taken?: string;
|
||||
date_taken_precision?: string;
|
||||
order_index: number;
|
||||
}>;
|
||||
submission: { user_id: string };
|
||||
};
|
||||
|
||||
// Validate required fields
|
||||
if (!typedPhotoSubmission.items || typedPhotoSubmission.items.length === 0) {
|
||||
throw new Error('No photo items found in submission');
|
||||
}
|
||||
|
||||
const { data: existingPhotos } = await supabase
|
||||
.from('photos')
|
||||
.select('id')
|
||||
.eq('submission_id', item.id);
|
||||
|
||||
if (!existingPhotos || existingPhotos.length === 0) {
|
||||
const photoRecords = typedPhotoSubmission.items.map((photoItem) => ({
|
||||
entity_id: typedPhotoSubmission.entity_id,
|
||||
entity_type: typedPhotoSubmission.entity_type,
|
||||
cloudflare_image_id: photoItem.cloudflare_image_id,
|
||||
cloudflare_image_url: photoItem.cloudflare_image_url,
|
||||
title: photoItem.title || null,
|
||||
caption: photoItem.caption || null,
|
||||
date_taken: photoItem.date_taken || null,
|
||||
order_index: photoItem.order_index,
|
||||
submission_id: item.id,
|
||||
submitted_by: typedPhotoSubmission.submission?.user_id,
|
||||
approved_by: user?.id,
|
||||
approved_at: new Date().toISOString(),
|
||||
}));
|
||||
|
||||
await supabase.from('photos').insert(photoRecords);
|
||||
}
|
||||
}
|
||||
|
||||
// Check for submission items
|
||||
const { data: submissionItems } = await supabase
|
||||
.from('submission_items')
|
||||
.select('id, status')
|
||||
.eq('submission_id', item.id)
|
||||
.in('status', ['pending', 'rejected']);
|
||||
|
||||
if (submissionItems && submissionItems.length > 0) {
|
||||
if (action === 'approved') {
|
||||
// ⚠️ VALIDATION CENTRALIZED IN EDGE FUNCTION
|
||||
// All business logic validation happens in process-selective-approval edge function.
|
||||
// Client-side only performs basic UX validation (non-empty, format) in forms.
|
||||
// If server-side validation fails, the edge function returns detailed 400/500 errors.
|
||||
|
||||
const {
|
||||
data,
|
||||
error,
|
||||
requestId,
|
||||
attempts,
|
||||
cached,
|
||||
conflictRetries
|
||||
} = await invokeWithResilience(
|
||||
'process-selective-approval',
|
||||
{
|
||||
itemIds: submissionItems.map((i) => i.id),
|
||||
submissionId: item.id,
|
||||
},
|
||||
'approval',
|
||||
submissionItems.map((i) => i.id),
|
||||
config.user?.id,
|
||||
3, // Max 3 conflict retries
|
||||
30000 // 30s timeout
|
||||
);
|
||||
|
||||
// Log retry attempts
|
||||
if (attempts && attempts > 1) {
|
||||
logger.log(`Approval succeeded after ${attempts} network retries`, {
|
||||
submissionId: item.id,
|
||||
requestId,
|
||||
});
|
||||
}
|
||||
|
||||
if (conflictRetries && conflictRetries > 0) {
|
||||
logger.log(`Resolved 409 conflict after ${conflictRetries} retries`, {
|
||||
submissionId: item.id,
|
||||
requestId,
|
||||
cached: !!cached,
|
||||
});
|
||||
}
|
||||
|
||||
if (error) {
|
||||
// Enhance error with context for better UI feedback
|
||||
if (is409Conflict(error)) {
|
||||
throw new Error(
|
||||
'This approval is being processed by another request. Please wait and try again if it does not complete.'
|
||||
);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
|
||||
toast({
|
||||
title: cached ? 'Cached Result' : 'Submission Approved',
|
||||
description: cached
|
||||
? `Returned cached result for ${submissionItems.length} item(s)`
|
||||
: `Successfully processed ${submissionItems.length} item(s)${requestId ? ` (Request: ${requestId.substring(0, 8)})` : ''}`,
|
||||
});
|
||||
return;
|
||||
} else if (action === 'rejected') {
|
||||
await supabase
|
||||
.from('submission_items')
|
||||
.update({
|
||||
status: 'rejected',
|
||||
rejection_reason: moderatorNotes || 'Parent submission rejected',
|
||||
updated_at: new Date().toISOString(),
|
||||
})
|
||||
.eq('submission_id', item.id)
|
||||
.eq('status', 'pending');
|
||||
}
|
||||
}
|
||||
|
||||
// Standard update
|
||||
const table = item.type === 'review' ? 'reviews' : 'content_submissions';
|
||||
const statusField = item.type === 'review' ? 'moderation_status' : 'status';
|
||||
const timestampField = item.type === 'review' ? 'moderated_at' : 'reviewed_at';
|
||||
const reviewerField = item.type === 'review' ? 'moderated_by' : 'reviewer_id';
|
||||
|
||||
const updateData: any = {
|
||||
[statusField]: action,
|
||||
[timestampField]: new Date().toISOString(),
|
||||
};
|
||||
|
||||
if (user) {
|
||||
updateData[reviewerField] = user.id;
|
||||
}
|
||||
|
||||
if (moderatorNotes) {
|
||||
updateData.reviewer_notes = moderatorNotes;
|
||||
}
|
||||
|
||||
const { error } = await supabase.from(table).update(updateData).eq('id', item.id);
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
// Log audit trail for review moderation
|
||||
if (table === 'reviews' && user) {
|
||||
try {
|
||||
// Extract entity information from item content
|
||||
const entityType = item.content?.ride_id ? 'ride' : item.content?.park_id ? 'park' : 'unknown';
|
||||
const entityId = item.content?.ride_id || item.content?.park_id || null;
|
||||
|
||||
await supabase.rpc('log_admin_action', {
|
||||
_admin_user_id: user.id,
|
||||
_target_user_id: item.user_id,
|
||||
_action: `review_${action}`,
|
||||
_details: {
|
||||
review_id: item.id,
|
||||
entity_type: entityType,
|
||||
entity_id: entityId,
|
||||
moderator_notes: moderatorNotes
|
||||
}
|
||||
});
|
||||
} catch (auditError) {
|
||||
// Silent - audit logging is non-critical
|
||||
}
|
||||
}
|
||||
|
||||
toast({
|
||||
title: `Content ${action}`,
|
||||
description: `The ${item.type} has been ${action}`,
|
||||
});
|
||||
|
||||
logger.log(`✅ Action ${action} completed for ${item.id}`);
|
||||
return { item, action };
|
||||
},
|
||||
onMutate: async ({ item, action }) => {
|
||||
// Cancel outgoing refetches
|
||||
await queryClient.cancelQueries({ queryKey: ['moderation-queue'] });
|
||||
|
||||
// Snapshot previous value
|
||||
const previousData = queryClient.getQueryData(['moderation-queue']);
|
||||
|
||||
// Optimistically update cache
|
||||
queryClient.setQueriesData({ queryKey: ['moderation-queue'] }, (old: any) => {
|
||||
if (!old?.submissions) return old;
|
||||
|
||||
return {
|
||||
...old,
|
||||
submissions: old.submissions.map((i: ModerationItem) =>
|
||||
i.id === item.id
|
||||
? {
|
||||
...i,
|
||||
status: action,
|
||||
_optimistic: true,
|
||||
reviewed_at: new Date().toISOString(),
|
||||
reviewer_id: user?.id,
|
||||
}
|
||||
: i
|
||||
),
|
||||
};
|
||||
});
|
||||
|
||||
return { previousData };
|
||||
},
|
||||
onError: (error: any, variables, context) => {
|
||||
// Rollback optimistic update
|
||||
if (context?.previousData) {
|
||||
queryClient.setQueryData(['moderation-queue'], context.previousData);
|
||||
}
|
||||
|
||||
// Enhanced error handling with timeout, conflict, and network detection
|
||||
const isNetworkError = isSupabaseConnectionError(error);
|
||||
const isConflict = is409Conflict(error);
|
||||
const isTimeout = isTimeoutError(error);
|
||||
const errorMessage = getErrorMessage(error) || `Failed to ${variables.action} content`;
|
||||
|
||||
// Check if this is a validation error from edge function
|
||||
const isValidationError = errorMessage.includes('Validation failed') ||
|
||||
errorMessage.includes('blocking errors') ||
|
||||
errorMessage.includes('blockingErrors');
|
||||
|
||||
toast({
|
||||
title: isNetworkError ? 'Connection Error' :
|
||||
isValidationError ? 'Validation Failed' :
|
||||
isConflict ? 'Duplicate Request' :
|
||||
isTimeout ? 'Transaction Timeout' :
|
||||
'Action Failed',
|
||||
description: isTimeout
|
||||
? getTimeoutErrorMessage(error as TimeoutError)
|
||||
: isConflict
|
||||
? 'This action is already being processed. Please wait for it to complete.'
|
||||
: errorMessage,
|
||||
variant: 'destructive',
|
||||
});
|
||||
|
||||
logger.error('Moderation action failed', {
|
||||
itemId: variables.item.id,
|
||||
action: variables.action,
|
||||
error: errorMessage,
|
||||
errorId: error.errorId,
|
||||
isNetworkError,
|
||||
isValidationError,
|
||||
isConflict,
|
||||
isTimeout,
|
||||
});
|
||||
},
|
||||
onSuccess: (data) => {
|
||||
if (data) {
|
||||
toast({
|
||||
title: `Content ${data.action}`,
|
||||
description: `The ${data.item.type} has been ${data.action}`,
|
||||
});
|
||||
}
|
||||
},
|
||||
onSettled: () => {
|
||||
// Always refetch to ensure consistency
|
||||
queryClient.invalidateQueries({ queryKey: ['moderation-queue'] });
|
||||
onActionComplete();
|
||||
},
|
||||
});
|
||||
|
||||
/**
|
||||
* Wrapper function that handles loading states and error tracking
|
||||
*/
|
||||
const performAction = useCallback(
|
||||
async (item: ModerationItem, action: 'approved' | 'rejected', moderatorNotes?: string) => {
|
||||
onActionStart(item.id);
|
||||
try {
|
||||
await performActionMutation.mutateAsync({ item, action, moderatorNotes });
|
||||
} catch (error) {
|
||||
const errorId = handleError(error, {
|
||||
action: `Moderation ${action}`,
|
||||
userId: user?.id,
|
||||
metadata: {
|
||||
submissionId: item.id,
|
||||
submissionType: item.submission_type,
|
||||
itemType: item.type,
|
||||
hasSubmissionItems: item.submission_items?.length ?? 0,
|
||||
moderatorNotes: moderatorNotes?.substring(0, 100),
|
||||
},
|
||||
});
|
||||
|
||||
// Attach error ID for UI display
|
||||
const enhancedError = error instanceof Error
|
||||
? Object.assign(error, { errorId })
|
||||
: { message: getErrorMessage(error), errorId };
|
||||
throw enhancedError;
|
||||
}
|
||||
},
|
||||
[onActionStart, performActionMutation, user]
|
||||
);
|
||||
|
||||
/**
|
||||
* Delete a submission permanently
|
||||
*/
|
||||
const deleteSubmission = useCallback(
|
||||
async (item: ModerationItem) => {
|
||||
if (item.type !== 'content_submission') return;
|
||||
|
||||
onActionStart(item.id);
|
||||
|
||||
try {
|
||||
// Fetch submission details for audit log
|
||||
const { data: submission } = await supabase
|
||||
.from('content_submissions')
|
||||
.select('user_id, submission_type, status')
|
||||
.eq('id', item.id)
|
||||
.single();
|
||||
|
||||
const { error } = await supabase.from('content_submissions').delete().eq('id', item.id);
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
// Log audit trail for deletion
|
||||
if (user && submission) {
|
||||
try {
|
||||
await supabase.rpc('log_admin_action', {
|
||||
_admin_user_id: user.id,
|
||||
_target_user_id: submission.user_id,
|
||||
_action: 'submission_deleted',
|
||||
_details: {
|
||||
submission_id: item.id,
|
||||
submission_type: submission.submission_type,
|
||||
status_when_deleted: submission.status
|
||||
}
|
||||
});
|
||||
} catch (auditError) {
|
||||
// Silent - audit logging is non-critical
|
||||
}
|
||||
}
|
||||
|
||||
toast({
|
||||
title: 'Submission deleted',
|
||||
description: 'The submission has been permanently deleted',
|
||||
});
|
||||
|
||||
logger.log(`✅ Submission ${item.id} deleted`);
|
||||
} catch (error: unknown) {
|
||||
const errorId = handleError(error, {
|
||||
action: 'Delete Submission',
|
||||
userId: user?.id,
|
||||
metadata: {
|
||||
submissionId: item.id,
|
||||
submissionType: item.submission_type,
|
||||
},
|
||||
});
|
||||
|
||||
logger.error('Failed to delete submission', {
|
||||
submissionId: item.id,
|
||||
errorId,
|
||||
});
|
||||
const enhancedError = error instanceof Error
|
||||
? Object.assign(error, { errorId })
|
||||
: { message: getErrorMessage(error), errorId };
|
||||
throw enhancedError;
|
||||
} finally {
|
||||
onActionComplete();
|
||||
}
|
||||
},
|
||||
[toast, onActionStart, onActionComplete]
|
||||
);
|
||||
|
||||
/**
|
||||
* Reset submission to pending status
|
||||
*/
|
||||
const resetToPending = useCallback(
|
||||
async (item: ModerationItem) => {
|
||||
onActionStart(item.id);
|
||||
|
||||
try {
|
||||
const { resetRejectedItemsToPending } = await import('@/lib/submissionItemsService');
|
||||
await resetRejectedItemsToPending(item.id);
|
||||
|
||||
// Log audit trail for reset
|
||||
if (user) {
|
||||
try {
|
||||
await supabase.rpc('log_admin_action', {
|
||||
_admin_user_id: user.id,
|
||||
_target_user_id: item.user_id,
|
||||
_action: 'submission_reset',
|
||||
_details: {
|
||||
submission_id: item.id,
|
||||
submission_type: item.submission_type
|
||||
}
|
||||
});
|
||||
} catch (auditError) {
|
||||
// Silent - audit logging is non-critical
|
||||
}
|
||||
}
|
||||
|
||||
toast({
|
||||
title: 'Reset Complete',
|
||||
description: 'Submission and all items have been reset to pending status',
|
||||
});
|
||||
|
||||
logger.log(`✅ Submission ${item.id} reset to pending`);
|
||||
} catch (error: unknown) {
|
||||
const errorId = handleError(error, {
|
||||
action: 'Reset to Pending',
|
||||
userId: user?.id,
|
||||
metadata: {
|
||||
submissionId: item.id,
|
||||
submissionType: item.submission_type,
|
||||
},
|
||||
});
|
||||
|
||||
logger.error('Failed to reset status', {
|
||||
submissionId: item.id,
|
||||
errorId,
|
||||
});
|
||||
const enhancedError = error instanceof Error
|
||||
? Object.assign(error, { errorId })
|
||||
: { message: getErrorMessage(error), errorId };
|
||||
throw enhancedError;
|
||||
} finally {
|
||||
onActionComplete();
|
||||
}
|
||||
},
|
||||
[toast, onActionStart, onActionComplete]
|
||||
);
|
||||
|
||||
/**
|
||||
* Retry failed items in a submission
|
||||
*/
|
||||
const retryFailedItems = useCallback(
|
||||
async (item: ModerationItem) => {
|
||||
onActionStart(item.id);
|
||||
let failedItemsCount = 0;
|
||||
|
||||
try {
|
||||
const { data: failedItems } = await supabase
|
||||
.from('submission_items')
|
||||
.select('id')
|
||||
.eq('submission_id', item.id)
|
||||
.eq('status', 'rejected');
|
||||
|
||||
if (!failedItems || failedItems.length === 0) {
|
||||
toast({
|
||||
title: 'No Failed Items',
|
||||
description: 'All items have been processed successfully',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
failedItemsCount = failedItems.length;
|
||||
|
||||
const {
|
||||
data,
|
||||
error,
|
||||
requestId,
|
||||
attempts,
|
||||
cached,
|
||||
conflictRetries
|
||||
} = await invokeWithResilience(
|
||||
'process-selective-approval',
|
||||
{
|
||||
itemIds: failedItems.map((i) => i.id),
|
||||
submissionId: item.id,
|
||||
},
|
||||
'retry',
|
||||
failedItems.map((i) => i.id),
|
||||
config.user?.id,
|
||||
3, // Max 3 conflict retries
|
||||
30000 // 30s timeout
|
||||
);
|
||||
|
||||
if (attempts && attempts > 1) {
|
||||
logger.log(`Retry succeeded after ${attempts} network retries`, {
|
||||
submissionId: item.id,
|
||||
requestId,
|
||||
});
|
||||
}
|
||||
|
||||
if (conflictRetries && conflictRetries > 0) {
|
||||
logger.log(`Retry resolved 409 conflict after ${conflictRetries} retries`, {
|
||||
submissionId: item.id,
|
||||
requestId,
|
||||
cached: !!cached,
|
||||
});
|
||||
}
|
||||
|
||||
if (error) {
|
||||
if (is409Conflict(error)) {
|
||||
throw new Error(
|
||||
'This retry is being processed by another request. Please wait and try again if it does not complete.'
|
||||
);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Log audit trail for retry
|
||||
if (user) {
|
||||
try {
|
||||
await supabase.rpc('log_admin_action', {
|
||||
_admin_user_id: user.id,
|
||||
_target_user_id: item.user_id,
|
||||
_action: 'submission_retried',
|
||||
_details: {
|
||||
submission_id: item.id,
|
||||
submission_type: item.submission_type,
|
||||
items_retried: failedItems.length,
|
||||
request_id: requestId
|
||||
}
|
||||
});
|
||||
} catch (auditError) {
|
||||
// Silent - audit logging is non-critical
|
||||
}
|
||||
}
|
||||
|
||||
toast({
|
||||
title: cached ? 'Cached Retry Result' : 'Items Retried',
|
||||
description: cached
|
||||
? `Returned cached result for ${failedItems.length} item(s)`
|
||||
: `Successfully retried ${failedItems.length} failed item(s)${requestId ? ` (Request: ${requestId.substring(0, 8)})` : ''}`,
|
||||
});
|
||||
|
||||
logger.log(`✅ Retried ${failedItems.length} failed items for ${item.id}`);
|
||||
} catch (error: unknown) {
|
||||
const errorId = handleError(error, {
|
||||
action: 'Retry Failed Items',
|
||||
userId: user?.id,
|
||||
metadata: {
|
||||
submissionId: item.id,
|
||||
failedItemsCount,
|
||||
},
|
||||
});
|
||||
|
||||
logger.error('Failed to retry items', {
|
||||
submissionId: item.id,
|
||||
errorId,
|
||||
});
|
||||
const enhancedError = error instanceof Error
|
||||
? Object.assign(error, { errorId })
|
||||
: { message: getErrorMessage(error), errorId };
|
||||
throw enhancedError;
|
||||
} finally {
|
||||
onActionComplete();
|
||||
}
|
||||
},
|
||||
[toast, onActionStart, onActionComplete, user]
|
||||
);
|
||||
|
||||
/**
|
||||
* Escalate submission for admin review
|
||||
* Consolidates escalation logic with comprehensive error handling
|
||||
*/
|
||||
const escalateSubmission = useCallback(
|
||||
async (item: ModerationItem, reason: string) => {
|
||||
if (!user?.id) {
|
||||
toast({
|
||||
title: 'Authentication Required',
|
||||
description: 'You must be logged in to escalate submissions',
|
||||
variant: 'destructive',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
onActionStart(item.id);
|
||||
|
||||
try {
|
||||
// Call edge function for email notification with retry
|
||||
const { error: edgeFunctionError, requestId, attempts } = await invokeWithTracking(
|
||||
'send-escalation-notification',
|
||||
{
|
||||
submissionId: item.id,
|
||||
escalationReason: reason,
|
||||
escalatedBy: user.id,
|
||||
},
|
||||
user.id,
|
||||
undefined,
|
||||
undefined,
|
||||
45000, // Longer timeout for email sending
|
||||
{ maxAttempts: 3, baseDelay: 2000 } // Retry for email delivery
|
||||
);
|
||||
|
||||
if (attempts && attempts > 1) {
|
||||
logger.log(`Escalation email sent after ${attempts} attempts`);
|
||||
}
|
||||
|
||||
if (edgeFunctionError) {
|
||||
// Edge function failed - log and show fallback toast
|
||||
handleError(edgeFunctionError, {
|
||||
action: 'Send escalation notification',
|
||||
userId: user.id,
|
||||
metadata: {
|
||||
submissionId: item.id,
|
||||
reason: reason.substring(0, 100),
|
||||
fallbackUsed: true,
|
||||
},
|
||||
});
|
||||
|
||||
toast({
|
||||
title: 'Escalated (Email Failed)',
|
||||
description: 'Submission escalated but notification email could not be sent',
|
||||
});
|
||||
} else {
|
||||
toast({
|
||||
title: 'Escalated Successfully',
|
||||
description: `Submission escalated and admin notified${requestId ? ` (${requestId.substring(0, 8)})` : ''}`,
|
||||
});
|
||||
}
|
||||
|
||||
// Invalidate cache
|
||||
queryClient.invalidateQueries({ queryKey: ['moderation-queue'] });
|
||||
|
||||
logger.log(`✅ Submission ${item.id} escalated`);
|
||||
} catch (error: unknown) {
|
||||
const errorId = handleError(error, {
|
||||
action: 'Escalate Submission',
|
||||
userId: user.id,
|
||||
metadata: {
|
||||
submissionId: item.id,
|
||||
submissionType: item.submission_type,
|
||||
reason: reason.substring(0, 100),
|
||||
},
|
||||
});
|
||||
|
||||
logger.error('Escalation failed', {
|
||||
submissionId: item.id,
|
||||
errorId,
|
||||
});
|
||||
|
||||
// Re-throw to allow UI to show retry option
|
||||
const enhancedError = error instanceof Error
|
||||
? Object.assign(error, { errorId })
|
||||
: { message: getErrorMessage(error), errorId };
|
||||
throw enhancedError;
|
||||
} finally {
|
||||
onActionComplete();
|
||||
}
|
||||
},
|
||||
[user, toast, onActionStart, onActionComplete, queryClient]
|
||||
);
|
||||
|
||||
return {
|
||||
performAction,
|
||||
deleteSubmission,
|
||||
resetToPending,
|
||||
retryFailedItems,
|
||||
escalateSubmission,
|
||||
};
|
||||
}
|
||||
287
src-old/hooks/moderation/useModerationFilters.ts
Normal file
287
src-old/hooks/moderation/useModerationFilters.ts
Normal file
@@ -0,0 +1,287 @@
|
||||
/**
|
||||
* Moderation Queue Filters Hook
|
||||
*
|
||||
* Manages filter state for the moderation queue, including:
|
||||
* - Entity type filtering (all, reviews, submissions, photos)
|
||||
* - Status filtering (pending, approved, rejected, etc.)
|
||||
* - Tab management (main queue vs archive)
|
||||
* - Filter persistence and clearing
|
||||
*/
|
||||
|
||||
import { useState, useCallback, useEffect } from 'react';
|
||||
import { useDebounce } from '@/hooks/useDebounce';
|
||||
import { logger } from '@/lib/logger';
|
||||
import { MODERATION_CONSTANTS } from '@/lib/moderation/constants';
|
||||
import type { EntityFilter, StatusFilter, QueueTab, SortConfig, SortField } from '@/types/moderation';
|
||||
import * as storage from '@/lib/localStorage';
|
||||
|
||||
export interface ModerationFiltersConfig {
|
||||
/** Initial entity filter */
|
||||
initialEntityFilter?: EntityFilter;
|
||||
|
||||
/** Initial status filter */
|
||||
initialStatusFilter?: StatusFilter;
|
||||
|
||||
/** Initial active tab */
|
||||
initialTab?: QueueTab;
|
||||
|
||||
/** Debounce delay for filter changes (ms) */
|
||||
debounceDelay?: number;
|
||||
|
||||
/** Whether to persist filters to localStorage */
|
||||
persist?: boolean;
|
||||
|
||||
/** localStorage key prefix for persistence */
|
||||
storageKey?: string;
|
||||
|
||||
/** Initial sort configuration */
|
||||
initialSortConfig?: SortConfig;
|
||||
}
|
||||
|
||||
export interface ModerationFilters {
|
||||
/** Current entity type filter */
|
||||
entityFilter: EntityFilter;
|
||||
|
||||
/** Current status filter */
|
||||
statusFilter: StatusFilter;
|
||||
|
||||
/** Current active tab */
|
||||
activeTab: QueueTab;
|
||||
|
||||
/** Debounced entity filter (for API calls) */
|
||||
debouncedEntityFilter: EntityFilter;
|
||||
|
||||
/** Debounced status filter (for API calls) */
|
||||
debouncedStatusFilter: StatusFilter;
|
||||
|
||||
/** Set entity filter */
|
||||
setEntityFilter: (filter: EntityFilter) => void;
|
||||
|
||||
/** Set status filter */
|
||||
setStatusFilter: (filter: StatusFilter) => void;
|
||||
|
||||
/** Set active tab */
|
||||
setActiveTab: (tab: QueueTab) => void;
|
||||
|
||||
/** Reset all filters to defaults */
|
||||
clearFilters: () => void;
|
||||
|
||||
/** Check if any non-default filters are active */
|
||||
hasActiveFilters: boolean;
|
||||
|
||||
/** Current sort configuration (immediate) */
|
||||
sortConfig: SortConfig;
|
||||
|
||||
/** Debounced sort configuration (use this for queries) */
|
||||
debouncedSortConfig: SortConfig;
|
||||
|
||||
/** Update the sort configuration */
|
||||
setSortConfig: (config: SortConfig) => void;
|
||||
|
||||
/** Sort by a specific field, toggling direction if already sorting by that field */
|
||||
sortBy: (field: SortField) => void;
|
||||
|
||||
/** Toggle the sort direction */
|
||||
toggleSortDirection: () => void;
|
||||
|
||||
/** Reset sort to default */
|
||||
resetSort: () => void;
|
||||
|
||||
/** Reset pagination to page 1 (callback) */
|
||||
onFilterChange?: () => void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for managing moderation queue filters
|
||||
*
|
||||
* @param config - Configuration options
|
||||
* @returns Filter state and actions
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* const filters = useModerationFilters({
|
||||
* persist: true,
|
||||
* debounceDelay: 300
|
||||
* });
|
||||
*
|
||||
* // Use in component
|
||||
* <Select value={filters.entityFilter} onValueChange={filters.setEntityFilter}>
|
||||
* ...
|
||||
* </Select>
|
||||
* ```
|
||||
*/
|
||||
export function useModerationFilters(
|
||||
config: ModerationFiltersConfig & { onFilterChange?: () => void } = {}
|
||||
): ModerationFilters {
|
||||
const {
|
||||
initialEntityFilter = 'all',
|
||||
initialStatusFilter = 'pending',
|
||||
initialTab = 'mainQueue',
|
||||
debounceDelay = MODERATION_CONSTANTS.FILTER_DEBOUNCE_MS,
|
||||
persist = true,
|
||||
storageKey = 'moderationQueue_filters',
|
||||
initialSortConfig = { field: 'created_at', direction: 'asc' },
|
||||
onFilterChange,
|
||||
} = config;
|
||||
|
||||
// Load persisted filters on mount
|
||||
const loadPersistedFilters = useCallback(() => {
|
||||
if (!persist) return null;
|
||||
|
||||
try {
|
||||
const saved = localStorage.getItem(storageKey);
|
||||
if (saved) {
|
||||
return JSON.parse(saved);
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
// Silent - localStorage failures are non-critical
|
||||
}
|
||||
|
||||
return null;
|
||||
}, [persist, storageKey]);
|
||||
|
||||
// Load persisted sort
|
||||
const loadPersistedSort = useCallback((): SortConfig => {
|
||||
if (!persist) return initialSortConfig;
|
||||
|
||||
try {
|
||||
const saved = localStorage.getItem(`${storageKey}_sort`);
|
||||
if (saved) {
|
||||
const parsed = JSON.parse(saved);
|
||||
if (parsed.field && parsed.direction) {
|
||||
return parsed;
|
||||
}
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
// Silent - localStorage failures are non-critical
|
||||
}
|
||||
|
||||
return initialSortConfig;
|
||||
}, [persist, storageKey, initialSortConfig]);
|
||||
|
||||
const persisted = loadPersistedFilters();
|
||||
|
||||
// Filter state
|
||||
const [entityFilter, setEntityFilterState] = useState<EntityFilter>(
|
||||
persisted?.entityFilter || initialEntityFilter
|
||||
);
|
||||
const [statusFilter, setStatusFilterState] = useState<StatusFilter>(
|
||||
persisted?.statusFilter || initialStatusFilter
|
||||
);
|
||||
const [activeTab, setActiveTabState] = useState<QueueTab>(
|
||||
persisted?.activeTab || initialTab
|
||||
);
|
||||
|
||||
// Sort state
|
||||
const [sortConfig, setSortConfigState] = useState<SortConfig>(loadPersistedSort);
|
||||
|
||||
// Debounced filters for API calls
|
||||
const debouncedEntityFilter = useDebounce(entityFilter, debounceDelay);
|
||||
const debouncedStatusFilter = useDebounce(statusFilter, debounceDelay);
|
||||
|
||||
// Debounced sort (0ms for immediate feedback)
|
||||
const debouncedSortConfig = useDebounce(sortConfig, 0);
|
||||
|
||||
// Persist filters to localStorage
|
||||
useEffect(() => {
|
||||
if (persist) {
|
||||
storage.setJSON(storageKey, {
|
||||
entityFilter,
|
||||
statusFilter,
|
||||
activeTab,
|
||||
});
|
||||
}
|
||||
}, [entityFilter, statusFilter, activeTab, persist, storageKey]);
|
||||
|
||||
// Persist sort to localStorage
|
||||
useEffect(() => {
|
||||
if (persist) {
|
||||
storage.setJSON(`${storageKey}_sort`, sortConfig);
|
||||
}
|
||||
}, [sortConfig, persist, storageKey]);
|
||||
|
||||
// Set entity filter with logging and pagination reset
|
||||
const setEntityFilter = useCallback((filter: EntityFilter) => {
|
||||
logger.log('🔍 Entity filter changed:', filter);
|
||||
setEntityFilterState(filter);
|
||||
onFilterChange?.();
|
||||
}, [onFilterChange]);
|
||||
|
||||
// Set status filter with logging and pagination reset
|
||||
const setStatusFilter = useCallback((filter: StatusFilter) => {
|
||||
logger.log('🔍 Status filter changed:', filter);
|
||||
setStatusFilterState(filter);
|
||||
onFilterChange?.();
|
||||
}, [onFilterChange]);
|
||||
|
||||
// Set active tab with logging and pagination reset
|
||||
const setActiveTab = useCallback((tab: QueueTab) => {
|
||||
logger.log('🔍 Tab changed:', tab);
|
||||
setActiveTabState(tab);
|
||||
onFilterChange?.();
|
||||
}, [onFilterChange]);
|
||||
|
||||
// Sort callbacks
|
||||
const setSortConfig = useCallback((config: SortConfig) => {
|
||||
logger.log('📝 [SORT] Sort config changed:', config);
|
||||
setSortConfigState(config);
|
||||
}, []);
|
||||
|
||||
const sortBy = useCallback((field: SortField) => {
|
||||
setSortConfigState(prev => ({
|
||||
field,
|
||||
direction: prev.field === field
|
||||
? (prev.direction === 'asc' ? 'desc' : 'asc')
|
||||
: 'asc'
|
||||
}));
|
||||
}, []);
|
||||
|
||||
const toggleSortDirection = useCallback(() => {
|
||||
setSortConfigState(prev => ({
|
||||
...prev,
|
||||
direction: prev.direction === 'asc' ? 'desc' : 'asc'
|
||||
}));
|
||||
}, []);
|
||||
|
||||
const resetSort = useCallback(() => {
|
||||
setSortConfigState(initialSortConfig);
|
||||
}, [initialSortConfig]);
|
||||
|
||||
// Clear all filters
|
||||
const clearFilters = useCallback(() => {
|
||||
logger.log('🔍 Filters cleared');
|
||||
setEntityFilterState(initialEntityFilter);
|
||||
setStatusFilterState(initialStatusFilter);
|
||||
setActiveTabState(initialTab);
|
||||
setSortConfigState(initialSortConfig);
|
||||
}, [initialEntityFilter, initialStatusFilter, initialTab, initialSortConfig]);
|
||||
|
||||
// Check if non-default filters are active
|
||||
const hasActiveFilters =
|
||||
entityFilter !== initialEntityFilter ||
|
||||
statusFilter !== initialStatusFilter ||
|
||||
activeTab !== initialTab ||
|
||||
sortConfig.field !== initialSortConfig.field ||
|
||||
sortConfig.direction !== initialSortConfig.direction;
|
||||
|
||||
// Return without useMemo wrapper (OPTIMIZED)
|
||||
return {
|
||||
entityFilter,
|
||||
statusFilter,
|
||||
activeTab,
|
||||
debouncedEntityFilter,
|
||||
debouncedStatusFilter,
|
||||
setEntityFilter,
|
||||
setStatusFilter,
|
||||
setActiveTab,
|
||||
clearFilters,
|
||||
hasActiveFilters,
|
||||
sortConfig,
|
||||
debouncedSortConfig,
|
||||
setSortConfig,
|
||||
sortBy,
|
||||
toggleSortDirection,
|
||||
resetSort,
|
||||
onFilterChange,
|
||||
};
|
||||
}
|
||||
563
src-old/hooks/moderation/useModerationQueueManager.ts
Normal file
563
src-old/hooks/moderation/useModerationQueueManager.ts
Normal file
@@ -0,0 +1,563 @@
|
||||
import { useState, useCallback, useRef, useEffect, useMemo } from "react";
|
||||
import { supabase } from "@/lib/supabaseClient";
|
||||
import { useToast } from "@/hooks/use-toast";
|
||||
import { useAuth } from "@/hooks/useAuth";
|
||||
import { logger } from "@/lib/logger";
|
||||
import { getErrorMessage } from "@/lib/errorHandler";
|
||||
import { invokeWithTracking } from "@/lib/edgeFunctionTracking";
|
||||
import { MODERATION_CONSTANTS } from "@/lib/moderation/constants";
|
||||
import { useQueryClient } from '@tanstack/react-query';
|
||||
import type { User } from "@supabase/supabase-js";
|
||||
import {
|
||||
useEntityCache,
|
||||
useProfileCache,
|
||||
useModerationFilters,
|
||||
usePagination,
|
||||
useRealtimeSubscriptions,
|
||||
useQueueQuery,
|
||||
} from "./index";
|
||||
import { useModerationQueue } from "@/hooks/useModerationQueue";
|
||||
import { useModerationActions } from "./useModerationActions";
|
||||
|
||||
import type { ModerationItem, EntityFilter, StatusFilter, LoadingState } from "@/types/moderation";
|
||||
|
||||
interface ModerationStats {
|
||||
pendingSubmissions: number;
|
||||
openReports: number;
|
||||
flaggedContent: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configuration for useModerationQueueManager
|
||||
*/
|
||||
export interface ModerationQueueManagerConfig {
|
||||
user: User | null;
|
||||
isAdmin: boolean;
|
||||
isSuperuser: boolean;
|
||||
toast: ReturnType<typeof useToast>["toast"];
|
||||
optimisticallyUpdateStats?: (delta: Partial<ModerationStats>) => void;
|
||||
settings: {
|
||||
refreshMode: "auto" | "manual";
|
||||
pollInterval: number;
|
||||
refreshStrategy: "notify" | "merge" | "replace";
|
||||
preserveInteraction: boolean;
|
||||
useRealtimeQueue: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Return type for useModerationQueueManager
|
||||
*/
|
||||
export interface ModerationQueueManager {
|
||||
// State
|
||||
items: ModerationItem[];
|
||||
loadingState: LoadingState;
|
||||
actionLoading: string | null;
|
||||
|
||||
// Sub-hooks (exposed for granular control)
|
||||
filters: ReturnType<typeof useModerationFilters>;
|
||||
pagination: ReturnType<typeof usePagination>;
|
||||
queue: ReturnType<typeof useModerationQueue>;
|
||||
|
||||
// Realtime
|
||||
newItemsCount: number;
|
||||
pendingNewItems: ModerationItem[];
|
||||
showNewItems: () => void;
|
||||
|
||||
// Interaction tracking
|
||||
interactingWith: Set<string>;
|
||||
markInteracting: (id: string, interacting: boolean) => void;
|
||||
|
||||
// Actions
|
||||
refresh: () => void;
|
||||
performAction: (item: ModerationItem, action: "approved" | "rejected", moderatorNotes?: string) => Promise<void>;
|
||||
deleteSubmission: (item: ModerationItem) => Promise<void>;
|
||||
resetToPending: (item: ModerationItem) => Promise<void>;
|
||||
retryFailedItems: (item: ModerationItem) => Promise<void>;
|
||||
|
||||
// Caches (for QueueItem enrichment)
|
||||
entityCache: ReturnType<typeof useEntityCache>;
|
||||
profileCache: ReturnType<typeof useProfileCache>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Orchestrator hook for moderation queue management
|
||||
* Consolidates all queue-related logic into a single hook
|
||||
*/
|
||||
export function useModerationQueueManager(config: ModerationQueueManagerConfig): ModerationQueueManager {
|
||||
logger.log('🚀 [QUEUE MANAGER] Hook mounting/rendering', {
|
||||
hasUser: !!config.user,
|
||||
isAdmin: config.isAdmin,
|
||||
timestamp: new Date().toISOString()
|
||||
});
|
||||
|
||||
const { user, isAdmin, isSuperuser, toast, optimisticallyUpdateStats, settings } = config;
|
||||
const queryClient = useQueryClient();
|
||||
const { aal } = useAuth();
|
||||
|
||||
// Debug AAL status
|
||||
useEffect(() => {
|
||||
logger.log('🔐 [QUEUE MANAGER] AAL Status:', {
|
||||
aal,
|
||||
isNull: aal === null,
|
||||
isAal1: aal === 'aal1',
|
||||
isAal2: aal === 'aal2',
|
||||
timestamp: new Date().toISOString()
|
||||
});
|
||||
}, [aal]);
|
||||
|
||||
// Initialize sub-hooks
|
||||
const filters = useModerationFilters({
|
||||
initialEntityFilter: "all",
|
||||
initialStatusFilter: "pending",
|
||||
initialTab: "mainQueue",
|
||||
debounceDelay: 300,
|
||||
persist: true,
|
||||
storageKey: "moderationQueue_filters",
|
||||
});
|
||||
|
||||
// Memoize filters object for realtime subscriptions to prevent reconnections
|
||||
const realtimeFilters = useMemo(() => ({
|
||||
entityFilter: filters.debouncedEntityFilter,
|
||||
statusFilter: filters.debouncedStatusFilter,
|
||||
}), [filters.debouncedEntityFilter, filters.debouncedStatusFilter]);
|
||||
|
||||
const pagination = usePagination({
|
||||
initialPage: 1,
|
||||
initialPageSize: 25,
|
||||
persist: false,
|
||||
onPageChange: (page) => {
|
||||
if (page > 1) {
|
||||
setLoadingState("loading");
|
||||
}
|
||||
},
|
||||
onPageSizeChange: () => {
|
||||
setLoadingState("loading");
|
||||
},
|
||||
});
|
||||
|
||||
// Use a stable callback via ref to prevent excessive re-renders
|
||||
const lockStateChangeHandlerRef = useRef<() => void>();
|
||||
|
||||
const queue = useModerationQueue({
|
||||
onLockStateChange: useCallback(() => {
|
||||
lockStateChangeHandlerRef.current?.();
|
||||
}, [])
|
||||
});
|
||||
const entityCache = useEntityCache();
|
||||
const profileCache = useProfileCache();
|
||||
|
||||
// Core state
|
||||
const [items, setItems] = useState<ModerationItem[]>([]);
|
||||
const [loadingState, setLoadingState] = useState<LoadingState>("initial");
|
||||
const [actionLoading, setActionLoading] = useState<string | null>(null);
|
||||
const [interactingWith, setInteractingWith] = useState<Set<string>>(new Set());
|
||||
const [pendingNewItems, setPendingNewItems] = useState<ModerationItem[]>([]);
|
||||
const [newItemsCount, setNewItemsCount] = useState(0);
|
||||
|
||||
// Refs for tracking
|
||||
const recentlyRemovedRef = useRef<Set<string>>(new Set());
|
||||
const initialFetchCompleteRef = useRef(false);
|
||||
const isMountingRef = useRef(true);
|
||||
|
||||
/**
|
||||
* Replace manual fetching with TanStack Query
|
||||
* Use direct state values for stable query keys
|
||||
*/
|
||||
const queueQuery = useQueueQuery({
|
||||
userId: user?.id,
|
||||
isAdmin,
|
||||
isSuperuser,
|
||||
entityFilter: filters.debouncedEntityFilter,
|
||||
statusFilter: filters.debouncedStatusFilter,
|
||||
tab: filters.activeTab,
|
||||
currentPage: pagination.currentPage,
|
||||
pageSize: pagination.pageSize,
|
||||
sortConfig: filters.debouncedSortConfig,
|
||||
enabled: !!user,
|
||||
});
|
||||
|
||||
// Update the lock state change handler ref whenever queueQuery changes
|
||||
lockStateChangeHandlerRef.current = () => {
|
||||
logger.log('🔄 Lock state changed, invalidating queue cache');
|
||||
queueQuery.invalidate();
|
||||
setLoadingState(prev => prev === "loading" ? "ready" : prev);
|
||||
};
|
||||
|
||||
// Update items when query data changes
|
||||
useEffect(() => {
|
||||
if (queueQuery.items) {
|
||||
setItems(queueQuery.items);
|
||||
logger.log('✅ Queue items updated from TanStack Query:', queueQuery.items.length);
|
||||
}
|
||||
}, [queueQuery.items]);
|
||||
|
||||
// Update loading state based on query status
|
||||
useEffect(() => {
|
||||
if (queueQuery.isLoading) {
|
||||
setLoadingState('loading');
|
||||
} else if (queueQuery.isRefreshing) {
|
||||
setLoadingState('refreshing');
|
||||
} else {
|
||||
setLoadingState('ready');
|
||||
}
|
||||
}, [queueQuery.isLoading, queueQuery.isRefreshing]);
|
||||
|
||||
// Show error toast when query fails
|
||||
useEffect(() => {
|
||||
if (queueQuery.error) {
|
||||
// Error already captured by TanStack Query
|
||||
toast({
|
||||
variant: 'destructive',
|
||||
title: 'Failed to Load Queue',
|
||||
description: queueQuery.error.message || 'An error occurred while fetching the moderation queue.',
|
||||
});
|
||||
}
|
||||
}, [queueQuery.error, toast]);
|
||||
|
||||
// Extract stable callback to prevent infinite loop
|
||||
const { setTotalCount } = pagination;
|
||||
|
||||
// Update total count for pagination
|
||||
useEffect(() => {
|
||||
setTotalCount(queueQuery.totalCount);
|
||||
}, [queueQuery.totalCount, setTotalCount]);
|
||||
|
||||
// Mark initial fetch as complete
|
||||
useEffect(() => {
|
||||
if (!queueQuery.isLoading && !initialFetchCompleteRef.current) {
|
||||
initialFetchCompleteRef.current = true;
|
||||
logger.log('✅ Initial queue fetch complete');
|
||||
}
|
||||
}, [queueQuery.isLoading]);
|
||||
|
||||
/**
|
||||
* Manual refresh function
|
||||
*/
|
||||
const refresh = useCallback(async () => {
|
||||
logger.log('🔄 Manual refresh triggered');
|
||||
await queueQuery.refetch();
|
||||
}, [queueQuery]);
|
||||
|
||||
/**
|
||||
* Show pending new items by invalidating query
|
||||
*/
|
||||
const showNewItems = useCallback(async () => {
|
||||
logger.log('✅ Showing new items via query invalidation');
|
||||
await queueQuery.invalidate();
|
||||
setPendingNewItems([]);
|
||||
setNewItemsCount(0);
|
||||
}, [queueQuery]);
|
||||
|
||||
/**
|
||||
* Mark an item as being interacted with (prevents realtime updates)
|
||||
*/
|
||||
const markInteracting = useCallback((id: string, interacting: boolean) => {
|
||||
setInteractingWith((prev) => {
|
||||
const next = new Set(prev);
|
||||
if (interacting) {
|
||||
next.add(id);
|
||||
} else {
|
||||
next.delete(id);
|
||||
}
|
||||
return next;
|
||||
});
|
||||
}, []);
|
||||
|
||||
/**
|
||||
* Use validated action handler from useModerationActions
|
||||
*/
|
||||
const moderationActions = useModerationActions({
|
||||
user,
|
||||
onActionStart: setActionLoading,
|
||||
onActionComplete: () => {
|
||||
setActionLoading(null);
|
||||
refresh();
|
||||
queue.refreshStats();
|
||||
},
|
||||
currentLockSubmissionId: queue.currentLock?.submissionId,
|
||||
});
|
||||
|
||||
/**
|
||||
* Perform moderation action (approve/reject) - delegates to validated handler
|
||||
*/
|
||||
const performAction = useCallback(
|
||||
async (item: ModerationItem, action: "approved" | "rejected", moderatorNotes?: string) => {
|
||||
// Release lock if held
|
||||
if (queue.currentLock?.submissionId === item.id) {
|
||||
await queue.releaseLock(item.id, true);
|
||||
}
|
||||
|
||||
// Use validated action handler
|
||||
await moderationActions.performAction(item, action, moderatorNotes);
|
||||
},
|
||||
[moderationActions, queue]
|
||||
);
|
||||
|
||||
|
||||
/**
|
||||
* Delete a submission permanently
|
||||
*/
|
||||
const deleteSubmission = useCallback(
|
||||
async (item: ModerationItem) => {
|
||||
if (item.type !== "content_submission") return;
|
||||
if (actionLoading === item.id) return;
|
||||
|
||||
setActionLoading(item.id);
|
||||
setItems((prev) => prev.filter((i) => i.id !== item.id));
|
||||
|
||||
try {
|
||||
const { error } = await supabase.from("content_submissions").delete().eq("id", item.id);
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
toast({
|
||||
title: "Submission deleted",
|
||||
description: "The submission has been permanently deleted",
|
||||
});
|
||||
|
||||
// Refresh stats to update counts
|
||||
queue.refreshStats();
|
||||
} catch (error: unknown) {
|
||||
const errorMsg = getErrorMessage(error);
|
||||
// Silent - operation handled optimistically
|
||||
|
||||
setItems((prev) => {
|
||||
if (prev.some((i) => i.id === item.id)) return prev;
|
||||
return [...prev, item];
|
||||
});
|
||||
|
||||
toast({
|
||||
title: "Error",
|
||||
description: "Failed to delete submission",
|
||||
variant: "destructive",
|
||||
});
|
||||
} finally {
|
||||
setActionLoading(null);
|
||||
}
|
||||
},
|
||||
[actionLoading, toast],
|
||||
);
|
||||
|
||||
/**
|
||||
* Reset submission to pending status
|
||||
*/
|
||||
const resetToPending = useCallback(
|
||||
async (item: ModerationItem) => {
|
||||
setActionLoading(item.id);
|
||||
|
||||
try {
|
||||
const { resetRejectedItemsToPending } = await import("@/lib/submissionItemsService");
|
||||
await resetRejectedItemsToPending(item.id);
|
||||
|
||||
toast({
|
||||
title: "Reset Complete",
|
||||
description: "Submission and all items have been reset to pending status",
|
||||
});
|
||||
|
||||
// Refresh stats to update counts
|
||||
queue.refreshStats();
|
||||
|
||||
setItems((prev) => prev.filter((i) => i.id !== item.id));
|
||||
} catch (error: unknown) {
|
||||
const errorMsg = getErrorMessage(error);
|
||||
// Silent - operation handled optimistically
|
||||
toast({
|
||||
title: "Reset Failed",
|
||||
description: errorMsg,
|
||||
variant: "destructive",
|
||||
});
|
||||
} finally {
|
||||
setActionLoading(null);
|
||||
}
|
||||
},
|
||||
[toast],
|
||||
);
|
||||
|
||||
/**
|
||||
* Retry failed items in a submission
|
||||
*/
|
||||
const retryFailedItems = useCallback(
|
||||
async (item: ModerationItem) => {
|
||||
setActionLoading(item.id);
|
||||
|
||||
const shouldRemove =
|
||||
filters.statusFilter === "pending" ||
|
||||
filters.statusFilter === "flagged" ||
|
||||
filters.statusFilter === "partially_approved";
|
||||
|
||||
if (shouldRemove) {
|
||||
requestAnimationFrame(() => {
|
||||
setItems((prev) => prev.filter((i) => i.id !== item.id));
|
||||
recentlyRemovedRef.current.add(item.id);
|
||||
setTimeout(() => recentlyRemovedRef.current.delete(item.id), 10000);
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const { data: failedItems } = await supabase
|
||||
.from("submission_items")
|
||||
.select("id")
|
||||
.eq("submission_id", item.id)
|
||||
.eq("status", "rejected");
|
||||
|
||||
if (!failedItems || failedItems.length === 0) {
|
||||
toast({
|
||||
title: "No Failed Items",
|
||||
description: "All items have been processed successfully",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const { data, error, requestId } = await invokeWithTracking(
|
||||
"process-selective-approval",
|
||||
{
|
||||
itemIds: failedItems.map((i) => i.id),
|
||||
submissionId: item.id,
|
||||
},
|
||||
user?.id
|
||||
);
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
toast({
|
||||
title: "Retry Complete",
|
||||
description: `Processed ${failedItems.length} failed item(s)${requestId ? ` (Request: ${requestId.substring(0, 8)})` : ""}`,
|
||||
});
|
||||
|
||||
// Refresh stats to update counts
|
||||
queue.refreshStats();
|
||||
} catch (error: unknown) {
|
||||
const errorMsg = getErrorMessage(error);
|
||||
// Silent - operation handled optimistically
|
||||
toast({
|
||||
title: "Retry Failed",
|
||||
description: errorMsg,
|
||||
variant: "destructive",
|
||||
});
|
||||
} finally {
|
||||
setActionLoading(null);
|
||||
}
|
||||
},
|
||||
[filters.statusFilter, toast],
|
||||
);
|
||||
|
||||
// Extract stable callbacks to prevent infinite loop in effects
|
||||
const { invalidate: invalidateQuery } = queueQuery;
|
||||
const { reset: resetPagination } = pagination;
|
||||
|
||||
// Mark initial fetch as complete when query loads
|
||||
useEffect(() => {
|
||||
if (!queueQuery.isLoading && !initialFetchCompleteRef.current) {
|
||||
initialFetchCompleteRef.current = true;
|
||||
isMountingRef.current = false;
|
||||
logger.log('✅ Initial queue fetch complete');
|
||||
}
|
||||
}, [queueQuery.isLoading]);
|
||||
|
||||
// Invalidate query when filters or sort changes (OPTIMIZED)
|
||||
useEffect(() => {
|
||||
if (
|
||||
!user ||
|
||||
!initialFetchCompleteRef.current ||
|
||||
isMountingRef.current
|
||||
) return;
|
||||
|
||||
logger.log('🔄 Filters/sort changed, invalidating query');
|
||||
resetPagination();
|
||||
invalidateQuery();
|
||||
}, [
|
||||
filters.debouncedEntityFilter,
|
||||
filters.debouncedStatusFilter,
|
||||
filters.debouncedSortConfig.field,
|
||||
filters.debouncedSortConfig.direction,
|
||||
user,
|
||||
invalidateQuery,
|
||||
resetPagination
|
||||
]);
|
||||
|
||||
// Polling effect (when realtime disabled) - MUTUALLY EXCLUSIVE
|
||||
useEffect(() => {
|
||||
const shouldPoll = settings.refreshMode === 'auto'
|
||||
&& !settings.useRealtimeQueue
|
||||
&& loadingState !== 'initial'
|
||||
&& !!user;
|
||||
|
||||
if (!shouldPoll) {
|
||||
return;
|
||||
}
|
||||
|
||||
logger.log("⚠️ Polling ENABLED - interval:", settings.pollInterval);
|
||||
const interval = setInterval(() => {
|
||||
logger.log("🔄 Polling refresh triggered");
|
||||
queueQuery.refetch();
|
||||
}, settings.pollInterval);
|
||||
|
||||
return () => {
|
||||
clearInterval(interval);
|
||||
logger.log("🛑 Polling stopped");
|
||||
};
|
||||
}, [user, settings.refreshMode, settings.pollInterval, loadingState, settings.useRealtimeQueue, queueQuery.refetch]);
|
||||
|
||||
// Initialize realtime subscriptions
|
||||
useRealtimeSubscriptions({
|
||||
enabled: settings.useRealtimeQueue && !!user,
|
||||
filters: realtimeFilters,
|
||||
onNewItem: (item: ModerationItem) => {
|
||||
if (recentlyRemovedRef.current.has(item.id)) return;
|
||||
|
||||
setPendingNewItems((prev) => {
|
||||
if (prev.some((p) => p.id === item.id)) return prev;
|
||||
return [...prev, item];
|
||||
});
|
||||
setNewItemsCount((prev) => prev + 1);
|
||||
|
||||
toast({
|
||||
title: "🆕 New Submission",
|
||||
description: `${item.submission_type} - ${item.entity_name}`,
|
||||
});
|
||||
},
|
||||
onUpdateItem: (item: ModerationItem, shouldRemove: boolean) => {
|
||||
if (recentlyRemovedRef.current.has(item.id)) return;
|
||||
if (interactingWith.has(item.id)) return;
|
||||
|
||||
// Only track removals for optimistic update protection
|
||||
if (shouldRemove && !recentlyRemovedRef.current.has(item.id)) {
|
||||
recentlyRemovedRef.current.add(item.id);
|
||||
setTimeout(() => recentlyRemovedRef.current.delete(item.id), MODERATION_CONSTANTS.REALTIME_OPTIMISTIC_REMOVAL_TIMEOUT);
|
||||
}
|
||||
// TanStack Query handles actual state updates via invalidation
|
||||
},
|
||||
onItemRemoved: (itemId: string) => {
|
||||
// Track for optimistic update protection
|
||||
recentlyRemovedRef.current.add(itemId);
|
||||
setTimeout(() => recentlyRemovedRef.current.delete(itemId), MODERATION_CONSTANTS.REALTIME_OPTIMISTIC_REMOVAL_TIMEOUT);
|
||||
// TanStack Query handles removal via invalidation
|
||||
},
|
||||
entityCache,
|
||||
profileCache,
|
||||
recentlyRemovedIds: recentlyRemovedRef.current,
|
||||
interactingWithIds: interactingWith,
|
||||
});
|
||||
|
||||
return {
|
||||
items,
|
||||
loadingState,
|
||||
actionLoading,
|
||||
filters,
|
||||
pagination,
|
||||
queue,
|
||||
newItemsCount,
|
||||
pendingNewItems,
|
||||
showNewItems,
|
||||
interactingWith,
|
||||
markInteracting,
|
||||
refresh,
|
||||
performAction,
|
||||
deleteSubmission,
|
||||
resetToPending,
|
||||
retryFailedItems,
|
||||
entityCache,
|
||||
profileCache,
|
||||
};
|
||||
}
|
||||
250
src-old/hooks/moderation/usePagination.ts
Normal file
250
src-old/hooks/moderation/usePagination.ts
Normal file
@@ -0,0 +1,250 @@
|
||||
/**
|
||||
* Pagination Hook
|
||||
*
|
||||
* Manages pagination state and actions for the moderation queue.
|
||||
*/
|
||||
|
||||
import { useState, useCallback, useEffect, useMemo } from 'react';
|
||||
import { MODERATION_CONSTANTS } from '@/lib/moderation/constants';
|
||||
import * as storage from '@/lib/localStorage';
|
||||
import { logger } from '@/lib/logger';
|
||||
|
||||
export interface PaginationConfig {
|
||||
/** Initial page number (1-indexed) */
|
||||
initialPage?: number;
|
||||
|
||||
/** Initial page size */
|
||||
initialPageSize?: number;
|
||||
|
||||
/** Whether to persist pagination state */
|
||||
persist?: boolean;
|
||||
|
||||
/** localStorage key for persistence */
|
||||
storageKey?: string;
|
||||
|
||||
/** Callback when page changes */
|
||||
onPageChange?: (page: number) => void;
|
||||
|
||||
/** Callback when page size changes */
|
||||
onPageSizeChange?: (pageSize: number) => void;
|
||||
}
|
||||
|
||||
export interface PaginationState {
|
||||
/** Current page (1-indexed) */
|
||||
currentPage: number;
|
||||
|
||||
/** Items per page */
|
||||
pageSize: number;
|
||||
|
||||
/** Total number of items */
|
||||
totalCount: number;
|
||||
|
||||
/** Total number of pages */
|
||||
totalPages: number;
|
||||
|
||||
/** Start index for current page (0-indexed) */
|
||||
startIndex: number;
|
||||
|
||||
/** End index for current page (0-indexed) */
|
||||
endIndex: number;
|
||||
|
||||
/** Whether there is a previous page */
|
||||
hasPrevPage: boolean;
|
||||
|
||||
/** Whether there is a next page */
|
||||
hasNextPage: boolean;
|
||||
|
||||
/** Set current page */
|
||||
setCurrentPage: (page: number) => void;
|
||||
|
||||
/** Set page size */
|
||||
setPageSize: (size: number) => void;
|
||||
|
||||
/** Set total count */
|
||||
setTotalCount: (count: number) => void;
|
||||
|
||||
/** Go to next page */
|
||||
nextPage: () => void;
|
||||
|
||||
/** Go to previous page */
|
||||
prevPage: () => void;
|
||||
|
||||
/** Go to first page */
|
||||
firstPage: () => void;
|
||||
|
||||
/** Go to last page */
|
||||
lastPage: () => void;
|
||||
|
||||
/** Reset pagination */
|
||||
reset: () => void;
|
||||
|
||||
/** Get page range for display */
|
||||
getPageRange: (maxPages?: number) => number[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for managing pagination state
|
||||
*
|
||||
* @param config - Configuration options
|
||||
* @returns Pagination state and actions
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* const pagination = usePagination({
|
||||
* initialPageSize: 25,
|
||||
* persist: true,
|
||||
* onPageChange: (page) => fetchData(page)
|
||||
* });
|
||||
*
|
||||
* // Set total count from API
|
||||
* pagination.setTotalCount(response.count);
|
||||
*
|
||||
* // Use in query
|
||||
* const { startIndex, endIndex } = pagination;
|
||||
* query.range(startIndex, endIndex);
|
||||
* ```
|
||||
*/
|
||||
export function usePagination(config: PaginationConfig = {}): PaginationState {
|
||||
const {
|
||||
initialPage = 1,
|
||||
initialPageSize = MODERATION_CONSTANTS.DEFAULT_PAGE_SIZE,
|
||||
persist = false,
|
||||
storageKey = 'pagination_state',
|
||||
onPageChange,
|
||||
onPageSizeChange,
|
||||
} = config;
|
||||
|
||||
// Load persisted state
|
||||
const loadPersistedState = useCallback(() => {
|
||||
if (!persist) return null;
|
||||
|
||||
try {
|
||||
const saved = localStorage.getItem(storageKey);
|
||||
if (saved) {
|
||||
return JSON.parse(saved);
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
// Silent - localStorage failures are non-critical
|
||||
}
|
||||
|
||||
return null;
|
||||
}, [persist, storageKey]);
|
||||
|
||||
const persisted = loadPersistedState();
|
||||
|
||||
// State
|
||||
const [currentPage, setCurrentPageState] = useState<number>(
|
||||
persisted?.currentPage || initialPage
|
||||
);
|
||||
const [pageSize, setPageSizeState] = useState<number>(
|
||||
persisted?.pageSize || initialPageSize
|
||||
);
|
||||
const [totalCount, setTotalCount] = useState<number>(0);
|
||||
|
||||
// Computed values
|
||||
const totalPages = useMemo(() => Math.ceil(totalCount / pageSize), [totalCount, pageSize]);
|
||||
const startIndex = useMemo(() => (currentPage - 1) * pageSize, [currentPage, pageSize]);
|
||||
const endIndex = useMemo(() => startIndex + pageSize - 1, [startIndex, pageSize]);
|
||||
const hasPrevPage = currentPage > 1;
|
||||
const hasNextPage = currentPage < totalPages;
|
||||
|
||||
// Persist state
|
||||
useEffect(() => {
|
||||
if (persist) {
|
||||
storage.setJSON(storageKey, {
|
||||
currentPage,
|
||||
pageSize,
|
||||
});
|
||||
}
|
||||
}, [currentPage, pageSize, persist, storageKey]);
|
||||
|
||||
// Set current page with bounds checking
|
||||
const setCurrentPage = useCallback(
|
||||
(page: number) => {
|
||||
const boundedPage = Math.max(1, Math.min(page, totalPages || 1));
|
||||
setCurrentPageState(boundedPage);
|
||||
onPageChange?.(boundedPage);
|
||||
},
|
||||
[totalPages, onPageChange]
|
||||
);
|
||||
|
||||
// Set page size and reset to first page
|
||||
const setPageSize = useCallback(
|
||||
(size: number) => {
|
||||
setPageSizeState(size);
|
||||
setCurrentPageState(1);
|
||||
onPageSizeChange?.(size);
|
||||
},
|
||||
[onPageSizeChange]
|
||||
);
|
||||
|
||||
// Navigation actions
|
||||
const nextPage = useCallback(() => {
|
||||
if (hasNextPage) {
|
||||
setCurrentPage(currentPage + 1);
|
||||
}
|
||||
}, [currentPage, hasNextPage, setCurrentPage]);
|
||||
|
||||
const prevPage = useCallback(() => {
|
||||
if (hasPrevPage) {
|
||||
setCurrentPage(currentPage - 1);
|
||||
}
|
||||
}, [currentPage, hasPrevPage, setCurrentPage]);
|
||||
|
||||
const firstPage = useCallback(() => {
|
||||
setCurrentPage(1);
|
||||
}, [setCurrentPage]);
|
||||
|
||||
const lastPage = useCallback(() => {
|
||||
setCurrentPage(totalPages);
|
||||
}, [totalPages, setCurrentPage]);
|
||||
|
||||
// Reset pagination
|
||||
const reset = useCallback(() => {
|
||||
setCurrentPageState(initialPage);
|
||||
setPageSizeState(initialPageSize);
|
||||
setTotalCount(0);
|
||||
}, [initialPage, initialPageSize]);
|
||||
|
||||
// Get page range for pagination controls
|
||||
const getPageRange = useCallback(
|
||||
(maxPages: number = 5): number[] => {
|
||||
if (totalPages <= maxPages) {
|
||||
return Array.from({ length: totalPages }, (_, i) => i + 1);
|
||||
}
|
||||
|
||||
const half = Math.floor(maxPages / 2);
|
||||
let start = Math.max(1, currentPage - half);
|
||||
let end = Math.min(totalPages, start + maxPages - 1);
|
||||
|
||||
// Adjust start if we're near the end
|
||||
if (end - start < maxPages - 1) {
|
||||
start = Math.max(1, end - maxPages + 1);
|
||||
}
|
||||
|
||||
return Array.from({ length: end - start + 1 }, (_, i) => start + i);
|
||||
},
|
||||
[currentPage, totalPages]
|
||||
);
|
||||
|
||||
// Return without useMemo wrapper (OPTIMIZED)
|
||||
return {
|
||||
currentPage,
|
||||
pageSize,
|
||||
totalCount,
|
||||
totalPages,
|
||||
startIndex,
|
||||
endIndex,
|
||||
hasPrevPage,
|
||||
hasNextPage,
|
||||
setCurrentPage,
|
||||
setPageSize,
|
||||
setTotalCount,
|
||||
nextPage,
|
||||
prevPage,
|
||||
firstPage,
|
||||
lastPage,
|
||||
reset,
|
||||
getPageRange,
|
||||
};
|
||||
}
|
||||
224
src-old/hooks/moderation/useProfileCache.ts
Normal file
224
src-old/hooks/moderation/useProfileCache.ts
Normal file
@@ -0,0 +1,224 @@
|
||||
import { useRef, useCallback } from 'react';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import { logger } from '@/lib/logger';
|
||||
import { getErrorMessage } from '@/lib/errorHandler';
|
||||
import { MODERATION_CONSTANTS } from '@/lib/moderation/constants';
|
||||
import type { ModerationItem } from '@/types/moderation';
|
||||
|
||||
/**
|
||||
* Profile data structure returned from the database
|
||||
*/
|
||||
export interface CachedProfile {
|
||||
user_id: string;
|
||||
username: string;
|
||||
display_name?: string;
|
||||
avatar_url?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for managing user profile caching
|
||||
*
|
||||
* Uses ref-based storage to avoid triggering re-renders while providing
|
||||
* efficient caching for user profile lookups during moderation.
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* const profileCache = useProfileCache();
|
||||
*
|
||||
* // Get cached profile
|
||||
* const profile = profileCache.getCached(userId);
|
||||
*
|
||||
* // Bulk fetch and cache profiles
|
||||
* const profiles = await profileCache.bulkFetch([id1, id2, id3]);
|
||||
*
|
||||
* // Check if profile exists in cache
|
||||
* if (profileCache.has(userId)) {
|
||||
* const profile = profileCache.getCached(userId);
|
||||
* }
|
||||
*
|
||||
* // Clear cache
|
||||
* profileCache.clear();
|
||||
* ```
|
||||
*/
|
||||
export function useProfileCache() {
|
||||
// Use ref to prevent re-renders on cache updates
|
||||
const cacheRef = useRef<Map<string, CachedProfile>>(new Map());
|
||||
|
||||
/**
|
||||
* Get a cached profile by user ID
|
||||
*/
|
||||
const getCached = useCallback((userId: string): CachedProfile | undefined => {
|
||||
return cacheRef.current.get(userId);
|
||||
}, []);
|
||||
|
||||
/**
|
||||
* Check if a profile is cached
|
||||
*/
|
||||
const has = useCallback((userId: string): boolean => {
|
||||
return cacheRef.current.has(userId);
|
||||
}, []);
|
||||
|
||||
/**
|
||||
* Set a cached profile with LRU eviction
|
||||
*/
|
||||
const setCached = useCallback((userId: string, profile: CachedProfile): void => {
|
||||
const cache = cacheRef.current;
|
||||
|
||||
// LRU eviction
|
||||
if (cache.size >= MODERATION_CONSTANTS.MAX_PROFILE_CACHE_SIZE) {
|
||||
const firstKey = cache.keys().next().value;
|
||||
if (firstKey) {
|
||||
cache.delete(firstKey);
|
||||
logger.log(`♻️ [ProfileCache] Evicted ${firstKey} (LRU)`);
|
||||
}
|
||||
}
|
||||
|
||||
cache.set(userId, profile);
|
||||
}, []);
|
||||
|
||||
/**
|
||||
* Get uncached user IDs from a list
|
||||
*/
|
||||
const getUncachedIds = useCallback((userIds: string[]): string[] => {
|
||||
return userIds.filter(id => !cacheRef.current.has(id));
|
||||
}, []);
|
||||
|
||||
/**
|
||||
* Bulk fetch user profiles from the database and cache them
|
||||
* Only fetches profiles that aren't already cached
|
||||
*
|
||||
* @param userIds - Array of user IDs to fetch
|
||||
* @returns Array of fetched profiles
|
||||
*/
|
||||
const bulkFetch = useCallback(async (userIds: string[]): Promise<CachedProfile[]> => {
|
||||
if (userIds.length === 0) return [];
|
||||
|
||||
// Filter to only uncached IDs
|
||||
const uncachedIds = getUncachedIds(userIds);
|
||||
if (uncachedIds.length === 0) {
|
||||
// All profiles are cached, return them
|
||||
return userIds.map(id => getCached(id)).filter((p): p is CachedProfile => !!p);
|
||||
}
|
||||
|
||||
try {
|
||||
const { data, error } = await supabase
|
||||
.from('profiles')
|
||||
.select('user_id, username, display_name, avatar_url')
|
||||
.in('user_id', uncachedIds);
|
||||
|
||||
if (error) {
|
||||
// Silent - cache miss is acceptable
|
||||
return [];
|
||||
}
|
||||
|
||||
// Cache the fetched profiles
|
||||
if (data) {
|
||||
data.forEach((profile) => {
|
||||
const cachedProfile: CachedProfile = {
|
||||
...profile,
|
||||
display_name: profile.display_name || undefined,
|
||||
avatar_url: profile.avatar_url || undefined
|
||||
};
|
||||
setCached(profile.user_id, cachedProfile);
|
||||
});
|
||||
}
|
||||
|
||||
return (data || []).map(profile => ({
|
||||
...profile,
|
||||
display_name: profile.display_name || undefined,
|
||||
avatar_url: profile.avatar_url || undefined
|
||||
}));
|
||||
} catch (error: unknown) {
|
||||
// Silent - cache operations are non-critical
|
||||
return [];
|
||||
}
|
||||
}, [getCached, setCached, getUncachedIds]);
|
||||
|
||||
/**
|
||||
* Fetch and return profiles for a list of user IDs
|
||||
* Returns a Map for easy lookup
|
||||
*
|
||||
* @param userIds - Array of user IDs to fetch
|
||||
* @returns Map of userId -> profile
|
||||
*/
|
||||
const fetchAsMap = useCallback(async (userIds: string[]): Promise<Map<string, CachedProfile>> => {
|
||||
const profiles = await bulkFetch(userIds);
|
||||
return new Map(profiles.map(p => [p.user_id, p]));
|
||||
}, [bulkFetch]);
|
||||
|
||||
/**
|
||||
* Fetch profiles for submitters and reviewers from submissions
|
||||
* Automatically extracts user IDs and reviewer IDs from submission data
|
||||
*
|
||||
* @param submissions - Array of submissions with user_id and reviewer_id
|
||||
* @returns Map of userId -> profile for all users involved
|
||||
*/
|
||||
const fetchForSubmissions = useCallback(async (submissions: ModerationItem[]): Promise<Map<string, CachedProfile>> => {
|
||||
const userIds = submissions.map(s => s.user_id).filter(Boolean);
|
||||
const reviewerIds = submissions.map(s => s.reviewer_id).filter((id): id is string => !!id);
|
||||
const allUserIds = [...new Set([...userIds, ...reviewerIds])];
|
||||
|
||||
return await fetchAsMap(allUserIds);
|
||||
}, [fetchAsMap]);
|
||||
|
||||
/**
|
||||
* Get a display name for a user (display_name or username)
|
||||
* Returns 'Unknown User' if not found in cache
|
||||
*/
|
||||
const getDisplayName = useCallback((userId: string): string => {
|
||||
const profile = getCached(userId);
|
||||
if (!profile) return 'Unknown User';
|
||||
return profile.display_name || profile.username || 'Unknown User';
|
||||
}, [getCached]);
|
||||
|
||||
/**
|
||||
* Invalidate (remove) a specific profile from cache
|
||||
*/
|
||||
const invalidate = useCallback((userId: string): void => {
|
||||
cacheRef.current.delete(userId);
|
||||
}, []);
|
||||
|
||||
/**
|
||||
* Clear all cached profiles
|
||||
*/
|
||||
const clear = useCallback((): void => {
|
||||
cacheRef.current.clear();
|
||||
}, []);
|
||||
|
||||
/**
|
||||
* Get cache size
|
||||
*/
|
||||
const getSize = useCallback((): number => {
|
||||
return cacheRef.current.size;
|
||||
}, []);
|
||||
|
||||
/**
|
||||
* Get all cached profile user IDs
|
||||
*/
|
||||
const getAllCachedIds = useCallback((): string[] => {
|
||||
return Array.from(cacheRef.current.keys());
|
||||
}, []);
|
||||
|
||||
/**
|
||||
* Get direct access to cache ref (for advanced use cases)
|
||||
* Use with caution - prefer using the provided methods
|
||||
*/
|
||||
const getCacheRef = useCallback(() => cacheRef.current, []);
|
||||
|
||||
// Return without useMemo wrapper (OPTIMIZED)
|
||||
return {
|
||||
getCached,
|
||||
has,
|
||||
setCached,
|
||||
getUncachedIds,
|
||||
bulkFetch,
|
||||
fetchAsMap,
|
||||
fetchForSubmissions,
|
||||
getDisplayName,
|
||||
invalidate,
|
||||
clear,
|
||||
getSize,
|
||||
getAllCachedIds,
|
||||
getCacheRef,
|
||||
};
|
||||
}
|
||||
225
src-old/hooks/moderation/useQueueQuery.ts
Normal file
225
src-old/hooks/moderation/useQueueQuery.ts
Normal file
@@ -0,0 +1,225 @@
|
||||
/**
|
||||
* TanStack Query hook for moderation queue data fetching
|
||||
*
|
||||
* Wraps the existing fetchSubmissions query builder with React Query
|
||||
* to provide automatic caching, deduplication, and background refetching.
|
||||
*/
|
||||
|
||||
import { useQuery, useQueryClient } from '@tanstack/react-query';
|
||||
import { fetchSubmissions, type QueryConfig } from '@/lib/moderation/queries';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import { logger } from '@/lib/logger';
|
||||
import { getErrorMessage } from '@/lib/errorHandler';
|
||||
import { MODERATION_CONSTANTS } from '@/lib/moderation/constants';
|
||||
import { validateModerationItems } from '@/lib/moderation/validation';
|
||||
import type {
|
||||
ModerationItem,
|
||||
EntityFilter,
|
||||
StatusFilter,
|
||||
QueueTab,
|
||||
SortField,
|
||||
SortDirection
|
||||
} from '@/types/moderation';
|
||||
|
||||
/**
|
||||
* Get specific, actionable error message based on error type
|
||||
*/
|
||||
function getSpecificErrorMessage(error: unknown): string {
|
||||
// Offline detection
|
||||
if (!navigator.onLine) {
|
||||
return 'You appear to be offline. Check your internet connection and try again.';
|
||||
}
|
||||
|
||||
// Timeout
|
||||
if (error instanceof Error && error.name === 'AbortError') {
|
||||
return 'Request timed out. The server is taking too long to respond. Please try again.';
|
||||
}
|
||||
|
||||
// Check for Supabase-specific errors
|
||||
if (typeof error === 'object' && error !== null) {
|
||||
const err = error as any;
|
||||
|
||||
// 500 errors
|
||||
if (err.status === 500 || err.code === '500') {
|
||||
return 'Server error occurred. Our team has been notified. Please try again in a few minutes.';
|
||||
}
|
||||
|
||||
// 429 Rate limiting
|
||||
if (err.status === 429 || err.message?.includes('rate limit')) {
|
||||
return 'Too many requests. Please wait a moment before trying again.';
|
||||
}
|
||||
|
||||
// Authentication errors
|
||||
if (err.status === 401 || err.message?.includes('JWT')) {
|
||||
return 'Your session has expired. Please refresh the page and sign in again.';
|
||||
}
|
||||
|
||||
// Permission errors
|
||||
if (err.status === 403 || err.message?.includes('permission')) {
|
||||
return 'You do not have permission to access the moderation queue.';
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback
|
||||
return getErrorMessage(error) || 'Failed to load moderation queue. Please try again.';
|
||||
}
|
||||
|
||||
/**
|
||||
* Configuration for queue query
|
||||
*/
|
||||
export interface UseQueueQueryConfig {
|
||||
/** User making the query */
|
||||
userId: string | undefined;
|
||||
|
||||
/** Whether user is admin */
|
||||
isAdmin: boolean;
|
||||
|
||||
/** Whether user is superuser */
|
||||
isSuperuser: boolean;
|
||||
|
||||
/** Entity filter */
|
||||
entityFilter: EntityFilter;
|
||||
|
||||
/** Status filter */
|
||||
statusFilter: StatusFilter;
|
||||
|
||||
/** Active tab */
|
||||
tab: QueueTab;
|
||||
|
||||
/** Current page */
|
||||
currentPage: number;
|
||||
|
||||
/** Page size */
|
||||
pageSize: number;
|
||||
|
||||
/** Sort configuration */
|
||||
sortConfig: {
|
||||
field: SortField;
|
||||
direction: SortDirection;
|
||||
};
|
||||
|
||||
/** Whether query is enabled (defaults to true) */
|
||||
enabled?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return type for useQueueQuery
|
||||
*/
|
||||
export interface UseQueueQueryReturn {
|
||||
/** Queue items */
|
||||
items: ModerationItem[];
|
||||
|
||||
/** Total count of items matching filters */
|
||||
totalCount: number;
|
||||
|
||||
/** Initial loading state (no data yet) */
|
||||
isLoading: boolean;
|
||||
|
||||
/** Background refresh in progress (has data already) */
|
||||
isRefreshing: boolean;
|
||||
|
||||
/** Any error that occurred */
|
||||
error: Error | null;
|
||||
|
||||
/** Manually trigger a refetch */
|
||||
refetch: () => Promise<any>;
|
||||
|
||||
/** Invalidate this query (triggers background refetch) */
|
||||
invalidate: () => Promise<void>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to fetch moderation queue data using TanStack Query
|
||||
*/
|
||||
export function useQueueQuery(config: UseQueueQueryConfig): UseQueueQueryReturn {
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
// Build query config for fetchSubmissions
|
||||
const queryConfig: QueryConfig = {
|
||||
userId: config.userId || '',
|
||||
isAdmin: config.isAdmin,
|
||||
isSuperuser: config.isSuperuser,
|
||||
entityFilter: config.entityFilter,
|
||||
statusFilter: config.statusFilter,
|
||||
tab: config.tab,
|
||||
currentPage: config.currentPage,
|
||||
pageSize: config.pageSize,
|
||||
sortConfig: config.sortConfig,
|
||||
};
|
||||
|
||||
// Create stable query key (TanStack Query uses this for caching/deduplication)
|
||||
// Include user context to ensure proper cache isolation per user/role
|
||||
const queryKey = [
|
||||
'moderation-queue',
|
||||
config.userId,
|
||||
config.isAdmin,
|
||||
config.isSuperuser,
|
||||
config.entityFilter,
|
||||
config.statusFilter,
|
||||
config.tab,
|
||||
config.currentPage,
|
||||
config.pageSize,
|
||||
config.sortConfig.field,
|
||||
config.sortConfig.direction,
|
||||
];
|
||||
|
||||
// Execute query
|
||||
const query = useQuery({
|
||||
queryKey,
|
||||
queryFn: async () => {
|
||||
logger.log('🔍 [TanStack Query] Fetching queue data:', queryKey);
|
||||
|
||||
// Create timeout controller (30s timeout)
|
||||
const controller = new AbortController();
|
||||
const timeoutId = setTimeout(() => controller.abort(), 30000);
|
||||
|
||||
try {
|
||||
const result = await fetchSubmissions(supabase, queryConfig);
|
||||
clearTimeout(timeoutId);
|
||||
|
||||
if (result.error) {
|
||||
const specificMessage = getSpecificErrorMessage(result.error);
|
||||
// Error already captured in context
|
||||
throw new Error(specificMessage);
|
||||
}
|
||||
|
||||
// Validate data shape before returning
|
||||
const validation = validateModerationItems(result.submissions);
|
||||
if (!validation.success) {
|
||||
// Invalid data shape
|
||||
throw new Error(validation.error || 'Invalid data format');
|
||||
}
|
||||
|
||||
logger.log('✅ [TanStack Query] Fetched', validation.data!.length, 'items');
|
||||
return { ...result, submissions: validation.data! };
|
||||
} catch (error) {
|
||||
clearTimeout(timeoutId);
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
enabled: config.enabled !== false && !!config.userId,
|
||||
staleTime: MODERATION_CONSTANTS.QUERY_STALE_TIME,
|
||||
gcTime: MODERATION_CONSTANTS.QUERY_GC_TIME,
|
||||
retry: MODERATION_CONSTANTS.QUERY_RETRY_COUNT,
|
||||
retryDelay: (attemptIndex) => Math.min(1000 * 2 ** attemptIndex, 30000),
|
||||
networkMode: 'offlineFirst', // Handle offline gracefully
|
||||
meta: {
|
||||
errorMessage: 'Failed to load moderation queue',
|
||||
},
|
||||
});
|
||||
|
||||
// Invalidate helper
|
||||
const invalidate = async () => {
|
||||
await queryClient.invalidateQueries({ queryKey: ['moderation-queue'] });
|
||||
};
|
||||
|
||||
return {
|
||||
items: query.data?.submissions || [],
|
||||
totalCount: query.data?.totalCount || 0,
|
||||
isLoading: query.isLoading,
|
||||
isRefreshing: query.isFetching && !query.isLoading,
|
||||
error: query.error as Error | null,
|
||||
refetch: query.refetch,
|
||||
invalidate,
|
||||
};
|
||||
}
|
||||
508
src-old/hooks/moderation/useRealtimeSubscriptions.ts
Normal file
508
src-old/hooks/moderation/useRealtimeSubscriptions.ts
Normal file
@@ -0,0 +1,508 @@
|
||||
/**
|
||||
* Realtime Subscriptions Hook for Moderation Queue
|
||||
*
|
||||
* Manages all Supabase realtime subscriptions for the moderation queue system.
|
||||
* Handles INSERT and UPDATE events with debouncing, filtering, and optimistic update protection.
|
||||
*/
|
||||
|
||||
import { useEffect, useRef, useState, useCallback } from 'react';
|
||||
import { useQueryClient } from '@tanstack/react-query';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import { logger } from '@/lib/logger';
|
||||
import { getErrorMessage } from '@/lib/errorHandler';
|
||||
import { MODERATION_CONSTANTS } from '@/lib/moderation/constants';
|
||||
import type { RealtimeChannel, RealtimePostgresChangesPayload } from '@supabase/supabase-js';
|
||||
import type { Json } from '@/integrations/supabase/types';
|
||||
import type { ModerationItem, EntityFilter, StatusFilter } from '@/types/moderation';
|
||||
import type { useEntityCache } from './useEntityCache';
|
||||
import type { useProfileCache } from './useProfileCache';
|
||||
import {
|
||||
matchesEntityFilter,
|
||||
matchesStatusFilter,
|
||||
hasItemChanged,
|
||||
buildModerationItem,
|
||||
} from '@/lib/moderation/realtime';
|
||||
|
||||
/**
|
||||
* Type-safe interface for submission content from realtime events
|
||||
*/
|
||||
interface SubmissionContent {
|
||||
action?: string;
|
||||
name?: string;
|
||||
entity_slug?: string;
|
||||
entity_name?: string;
|
||||
entity_id?: string;
|
||||
park_id?: string;
|
||||
}
|
||||
|
||||
type EntityCacheReturn = ReturnType<typeof useEntityCache>;
|
||||
type ProfileCacheReturn = ReturnType<typeof useProfileCache>;
|
||||
|
||||
/**
|
||||
* Configuration for realtime subscriptions
|
||||
*/
|
||||
export interface RealtimeSubscriptionConfig {
|
||||
/** Whether realtime subscriptions are enabled */
|
||||
enabled: boolean;
|
||||
|
||||
/** Current filter configuration */
|
||||
filters: {
|
||||
entityFilter: EntityFilter;
|
||||
statusFilter: StatusFilter;
|
||||
};
|
||||
|
||||
/** Callback when a new item is detected */
|
||||
onNewItem: (item: ModerationItem) => void;
|
||||
|
||||
/** Callback when an item is updated */
|
||||
onUpdateItem: (item: ModerationItem, shouldRemove: boolean) => void;
|
||||
|
||||
/** Callback when an item is removed from queue */
|
||||
onItemRemoved: (itemId: string) => void;
|
||||
|
||||
/** Pause subscriptions when tab is hidden (default: true) */
|
||||
pauseWhenHidden?: boolean;
|
||||
|
||||
/** Debounce delay for UPDATE events in milliseconds */
|
||||
debounceMs?: number;
|
||||
|
||||
/** Entity cache for resolving entity names */
|
||||
entityCache: EntityCacheReturn;
|
||||
|
||||
/** Profile cache for resolving user information */
|
||||
profileCache: ProfileCacheReturn;
|
||||
|
||||
/** Set of recently removed IDs (for optimistic updates) */
|
||||
recentlyRemovedIds: Set<string>;
|
||||
|
||||
/** Set of IDs currently being interacted with */
|
||||
interactingWithIds: Set<string>;
|
||||
|
||||
/** Current items in queue (for comparison) - using ref to avoid reconnections (optional) */
|
||||
currentItemsRef?: React.MutableRefObject<ModerationItem[]>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return type for useRealtimeSubscriptions hook
|
||||
*/
|
||||
export interface UseRealtimeSubscriptionsReturn {
|
||||
/** Whether subscriptions are currently connected */
|
||||
isConnected: boolean;
|
||||
|
||||
/** Current connection status */
|
||||
channelStatus: 'connected' | 'disconnected' | 'error';
|
||||
|
||||
/** Manually reconnect subscriptions */
|
||||
reconnect: () => void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to manage realtime subscriptions for the moderation queue
|
||||
*/
|
||||
export function useRealtimeSubscriptions(
|
||||
config: RealtimeSubscriptionConfig
|
||||
): UseRealtimeSubscriptionsReturn {
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
const {
|
||||
enabled,
|
||||
filters,
|
||||
onNewItem,
|
||||
onUpdateItem,
|
||||
onItemRemoved,
|
||||
pauseWhenHidden = true,
|
||||
debounceMs = MODERATION_CONSTANTS.REALTIME_DEBOUNCE_MS,
|
||||
entityCache,
|
||||
profileCache,
|
||||
recentlyRemovedIds,
|
||||
interactingWithIds,
|
||||
currentItemsRef,
|
||||
} = config;
|
||||
|
||||
// Debounce management for UPDATE events
|
||||
const updateDebounceMap = useRef<Map<string, NodeJS.Timeout>>(new Map());
|
||||
|
||||
// Channel references
|
||||
const insertChannelRef = useRef<RealtimeChannel | null>(null);
|
||||
const updateChannelRef = useRef<RealtimeChannel | null>(null);
|
||||
|
||||
// Status tracking
|
||||
const [channelStatus, setChannelStatus] = useState<'connected' | 'disconnected' | 'error'>('disconnected');
|
||||
const [reconnectTrigger, setReconnectTrigger] = useState(0);
|
||||
|
||||
/**
|
||||
* Debounced update handler - waits for rapid changes to settle
|
||||
*/
|
||||
const debouncedUpdate = useCallback((submissionId: string, updateFn: () => void) => {
|
||||
const existingTimeout = updateDebounceMap.current.get(submissionId);
|
||||
if (existingTimeout) {
|
||||
clearTimeout(existingTimeout);
|
||||
}
|
||||
|
||||
const newTimeout = setTimeout(() => {
|
||||
updateFn();
|
||||
updateDebounceMap.current.delete(submissionId);
|
||||
}, debounceMs);
|
||||
|
||||
updateDebounceMap.current.set(submissionId, newTimeout);
|
||||
}, [debounceMs]);
|
||||
|
||||
/**
|
||||
* Fetch full submission details with related data
|
||||
*/
|
||||
const fetchSubmissionDetails = useCallback(async (submissionId: string) => {
|
||||
const { data: submission, error } = await supabase
|
||||
.from('content_submissions')
|
||||
.select(`
|
||||
id, submission_type, status, created_at, user_id,
|
||||
reviewed_at, reviewer_id, reviewer_notes, escalated, assigned_to, locked_until,
|
||||
submission_items (
|
||||
id,
|
||||
item_type,
|
||||
item_data,
|
||||
status
|
||||
),
|
||||
submission_metadata (
|
||||
entity_id,
|
||||
park_id,
|
||||
ride_id
|
||||
)
|
||||
`)
|
||||
.eq('id', submissionId)
|
||||
.single();
|
||||
|
||||
if (error || !submission) {
|
||||
// Silent - will retry on next attempt
|
||||
return null;
|
||||
}
|
||||
|
||||
return submission;
|
||||
}, []);
|
||||
|
||||
/**
|
||||
* Resolve entity names for a submission
|
||||
*/
|
||||
const resolveEntityNames = useCallback(async (submission: { submission_type: string; submission_metadata?: any[] }) => {
|
||||
// Get metadata
|
||||
const metadata = Array.isArray(submission.submission_metadata) && submission.submission_metadata.length > 0
|
||||
? submission.submission_metadata[0]
|
||||
: undefined;
|
||||
|
||||
let entityName = 'Unknown';
|
||||
let parkName: string | undefined;
|
||||
|
||||
if (submission.submission_type === 'ride' && metadata?.entity_id) {
|
||||
// Try cache first
|
||||
const cachedRide = entityCache.getCached('rides', metadata.entity_id);
|
||||
if (cachedRide) {
|
||||
entityName = cachedRide.name;
|
||||
if (cachedRide.park_id) {
|
||||
const cachedPark = entityCache.getCached('parks', cachedRide.park_id);
|
||||
if (cachedPark) parkName = cachedPark.name;
|
||||
}
|
||||
} else {
|
||||
const { data: ride } = await supabase
|
||||
.from('rides')
|
||||
.select('id, name, park_id')
|
||||
.eq('id', metadata.entity_id)
|
||||
.maybeSingle();
|
||||
|
||||
if (ride) {
|
||||
entityName = ride.name;
|
||||
entityCache.setCached('rides', metadata.entity_id, ride);
|
||||
|
||||
if (ride.park_id) {
|
||||
const { data: park } = await supabase
|
||||
.from('parks')
|
||||
.select('id, name')
|
||||
.eq('id', ride.park_id)
|
||||
.maybeSingle();
|
||||
|
||||
if (park) {
|
||||
parkName = park.name;
|
||||
entityCache.setCached('parks', ride.park_id, park);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (submission.submission_type === 'park' && metadata?.entity_id) {
|
||||
const cachedPark = entityCache.getCached('parks', metadata.entity_id);
|
||||
if (cachedPark) {
|
||||
entityName = cachedPark.name;
|
||||
} else {
|
||||
const { data: park } = await supabase
|
||||
.from('parks')
|
||||
.select('id, name')
|
||||
.eq('id', metadata.entity_id)
|
||||
.maybeSingle();
|
||||
|
||||
if (park) {
|
||||
entityName = park.name;
|
||||
entityCache.setCached('parks', metadata.entity_id, park);
|
||||
}
|
||||
}
|
||||
} else if (['manufacturer', 'operator', 'designer', 'property_owner'].includes(submission.submission_type) && metadata?.entity_id) {
|
||||
const cachedCompany = entityCache.getCached('companies', metadata.entity_id);
|
||||
if (cachedCompany) {
|
||||
entityName = cachedCompany.name;
|
||||
} else {
|
||||
const { data: company } = await supabase
|
||||
.from('companies')
|
||||
.select('id, name')
|
||||
.eq('id', metadata.entity_id)
|
||||
.maybeSingle();
|
||||
|
||||
if (company) {
|
||||
entityName = company.name;
|
||||
entityCache.setCached('companies', metadata.entity_id, company);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { entityName, parkName };
|
||||
}, [entityCache]);
|
||||
|
||||
/**
|
||||
* Handle new submission INSERT event
|
||||
*/
|
||||
const handleInsert = useCallback(async (payload: RealtimePostgresChangesPayload<any>) => {
|
||||
const newSubmission = payload.new;
|
||||
|
||||
logger.log('🆕 Realtime INSERT:', newSubmission.id);
|
||||
|
||||
// Queue updates if tab is hidden
|
||||
if (pauseWhenHidden && document.hidden) {
|
||||
logger.log('📴 Realtime event received while hidden - queuing for later');
|
||||
return;
|
||||
}
|
||||
|
||||
// Ignore if recently removed (optimistic update)
|
||||
if (recentlyRemovedIds.has(newSubmission.id)) {
|
||||
logger.log('⏭️ Ignoring INSERT for recently removed submission:', newSubmission.id);
|
||||
return;
|
||||
}
|
||||
|
||||
// Only process pending/partially_approved submissions
|
||||
if (!['pending', 'partially_approved'].includes(newSubmission.status)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Apply filters
|
||||
if (!matchesEntityFilter(newSubmission, filters.entityFilter)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!matchesStatusFilter(newSubmission, filters.statusFilter)) {
|
||||
return;
|
||||
}
|
||||
|
||||
logger.log('✅ NEW submission matches filters, invalidating query:', newSubmission.id);
|
||||
|
||||
// Invalidate the query to trigger background refetch
|
||||
await queryClient.invalidateQueries({ queryKey: ['moderation-queue'] });
|
||||
|
||||
// Call legacy callback for new item notification
|
||||
// (This maintains compatibility with NewItemsAlert component)
|
||||
try {
|
||||
const submission = await fetchSubmissionDetails(newSubmission.id);
|
||||
if (!submission) return;
|
||||
|
||||
const profile = await profileCache.bulkFetch([submission.user_id]);
|
||||
const userProfile = profile[0];
|
||||
|
||||
const { entityName, parkName } = await resolveEntityNames(submission);
|
||||
|
||||
const fullItem = buildModerationItem(
|
||||
submission,
|
||||
userProfile,
|
||||
entityName,
|
||||
parkName
|
||||
);
|
||||
|
||||
onNewItem(fullItem);
|
||||
} catch (error: unknown) {
|
||||
// Silent - notifications are non-critical
|
||||
}
|
||||
}, [
|
||||
filters,
|
||||
pauseWhenHidden,
|
||||
recentlyRemovedIds,
|
||||
queryClient,
|
||||
fetchSubmissionDetails,
|
||||
profileCache,
|
||||
resolveEntityNames,
|
||||
onNewItem,
|
||||
]);
|
||||
|
||||
/**
|
||||
* Handle submission UPDATE event
|
||||
*/
|
||||
const handleUpdate = useCallback(async (payload: RealtimePostgresChangesPayload<any>) => {
|
||||
const updatedSubmission = payload.new;
|
||||
const oldSubmission = payload.old;
|
||||
|
||||
logger.log('🔄 Realtime UPDATE:', updatedSubmission.id);
|
||||
|
||||
// Queue updates if tab is hidden
|
||||
if (pauseWhenHidden && document.hidden) {
|
||||
logger.log('📴 Realtime UPDATE received while hidden - queuing for later');
|
||||
return;
|
||||
}
|
||||
|
||||
// Ignore if recently removed (optimistic update in progress)
|
||||
if (recentlyRemovedIds.has(updatedSubmission.id)) {
|
||||
logger.log('⏭️ Ignoring UPDATE for recently removed submission:', updatedSubmission.id);
|
||||
return;
|
||||
}
|
||||
|
||||
// Ignore if currently being interacted with
|
||||
if (interactingWithIds.has(updatedSubmission.id)) {
|
||||
logger.log('⏭️ Ignoring UPDATE for interacting submission:', updatedSubmission.id);
|
||||
return;
|
||||
}
|
||||
|
||||
// Skip debounce for status changes (critical updates)
|
||||
const isStatusChange = oldSubmission && 'status' in oldSubmission
|
||||
&& oldSubmission.status !== updatedSubmission?.status;
|
||||
|
||||
if (isStatusChange) {
|
||||
logger.log('⚡ Status change detected, invalidating immediately');
|
||||
await queryClient.invalidateQueries({ queryKey: ['moderation-queue'] });
|
||||
|
||||
const matchesEntity = matchesEntityFilter(updatedSubmission, filters.entityFilter);
|
||||
const matchesStatus = matchesStatusFilter(updatedSubmission, filters.statusFilter);
|
||||
const shouldBeInQueue = matchesEntity && matchesStatus;
|
||||
|
||||
if (!shouldBeInQueue) {
|
||||
onItemRemoved(updatedSubmission.id);
|
||||
}
|
||||
return; // Skip debounced update
|
||||
}
|
||||
|
||||
// Use debounce for non-critical updates
|
||||
debouncedUpdate(updatedSubmission.id, async () => {
|
||||
logger.log('🔄 Invalidating query due to UPDATE:', updatedSubmission.id);
|
||||
|
||||
// Simply invalidate the query - TanStack Query handles the rest
|
||||
await queryClient.invalidateQueries({ queryKey: ['moderation-queue'] });
|
||||
|
||||
// Legacy callback for compatibility
|
||||
const matchesEntity = matchesEntityFilter(updatedSubmission, filters.entityFilter);
|
||||
const matchesStatus = matchesStatusFilter(updatedSubmission, filters.statusFilter);
|
||||
const shouldBeInQueue = matchesEntity && matchesStatus;
|
||||
|
||||
if (!shouldBeInQueue) {
|
||||
onItemRemoved(updatedSubmission.id);
|
||||
}
|
||||
});
|
||||
}, [
|
||||
filters,
|
||||
pauseWhenHidden,
|
||||
recentlyRemovedIds,
|
||||
interactingWithIds,
|
||||
debouncedUpdate,
|
||||
queryClient,
|
||||
onItemRemoved,
|
||||
]);
|
||||
|
||||
/**
|
||||
* Setup INSERT subscription
|
||||
*/
|
||||
useEffect(() => {
|
||||
if (!enabled) {
|
||||
setChannelStatus('disconnected');
|
||||
return;
|
||||
}
|
||||
|
||||
logger.log('📡 Setting up INSERT subscription');
|
||||
|
||||
const channel = supabase
|
||||
.channel('moderation-new-submissions')
|
||||
.on(
|
||||
'postgres_changes',
|
||||
{
|
||||
event: 'INSERT',
|
||||
schema: 'public',
|
||||
table: 'content_submissions',
|
||||
},
|
||||
handleInsert
|
||||
)
|
||||
.subscribe((status) => {
|
||||
logger.log('INSERT subscription status:', status);
|
||||
if (status === 'SUBSCRIBED') {
|
||||
setChannelStatus('connected');
|
||||
} else if (status === 'CHANNEL_ERROR') {
|
||||
setChannelStatus('error');
|
||||
}
|
||||
});
|
||||
|
||||
insertChannelRef.current = channel;
|
||||
|
||||
return () => {
|
||||
logger.log('🛑 Cleaning up INSERT subscription');
|
||||
supabase.removeChannel(channel);
|
||||
insertChannelRef.current = null;
|
||||
};
|
||||
}, [enabled, handleInsert, reconnectTrigger]);
|
||||
|
||||
/**
|
||||
* Setup UPDATE subscription
|
||||
*/
|
||||
useEffect(() => {
|
||||
if (!enabled) return;
|
||||
|
||||
logger.log('📡 Setting up UPDATE subscription');
|
||||
|
||||
const channel = supabase
|
||||
.channel('moderation-updated-submissions')
|
||||
.on(
|
||||
'postgres_changes',
|
||||
{
|
||||
event: 'UPDATE',
|
||||
schema: 'public',
|
||||
table: 'content_submissions',
|
||||
},
|
||||
handleUpdate
|
||||
)
|
||||
.subscribe((status) => {
|
||||
logger.log('UPDATE subscription status:', status);
|
||||
if (status === 'SUBSCRIBED') {
|
||||
setChannelStatus('connected');
|
||||
} else if (status === 'CHANNEL_ERROR') {
|
||||
setChannelStatus('error');
|
||||
}
|
||||
});
|
||||
|
||||
updateChannelRef.current = channel;
|
||||
|
||||
return () => {
|
||||
logger.log('🛑 Cleaning up UPDATE subscription');
|
||||
supabase.removeChannel(channel);
|
||||
updateChannelRef.current = null;
|
||||
};
|
||||
}, [enabled, handleUpdate, reconnectTrigger]);
|
||||
|
||||
/**
|
||||
* Cleanup debounce timers on unmount
|
||||
*/
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
updateDebounceMap.current.forEach(timeout => clearTimeout(timeout));
|
||||
updateDebounceMap.current.clear();
|
||||
};
|
||||
}, []);
|
||||
|
||||
/**
|
||||
* Manual reconnect function
|
||||
*/
|
||||
const reconnect = useCallback(() => {
|
||||
logger.log('🔄 Manually reconnecting subscriptions...');
|
||||
setReconnectTrigger(prev => prev + 1);
|
||||
}, []);
|
||||
|
||||
return {
|
||||
isConnected: channelStatus === 'connected',
|
||||
channelStatus,
|
||||
reconnect,
|
||||
};
|
||||
}
|
||||
34
src-old/hooks/parks/useParkDetail.ts
Normal file
34
src-old/hooks/parks/useParkDetail.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import { queryKeys } from '@/lib/queryKeys';
|
||||
|
||||
/**
|
||||
* Hook to fetch park detail with all relations
|
||||
* Includes location, operator, property owner, and rides
|
||||
*/
|
||||
export function useParkDetail(slug: string | undefined, enabled = true) {
|
||||
return useQuery({
|
||||
queryKey: queryKeys.parks.detail(slug || ''),
|
||||
queryFn: async () => {
|
||||
if (!slug) throw new Error('Slug is required');
|
||||
|
||||
const { data, error } = await supabase
|
||||
.from('parks')
|
||||
.select(`
|
||||
*,
|
||||
location:locations(*),
|
||||
operator:companies!parks_operator_id_fkey(*),
|
||||
property_owner:companies!parks_property_owner_id_fkey(*)
|
||||
`)
|
||||
.eq('slug', slug)
|
||||
.maybeSingle();
|
||||
|
||||
if (error) throw error;
|
||||
return data;
|
||||
},
|
||||
enabled: enabled && !!slug,
|
||||
staleTime: 5 * 60 * 1000, // 5 minutes
|
||||
gcTime: 15 * 60 * 1000, // 15 minutes
|
||||
refetchOnWindowFocus: false,
|
||||
});
|
||||
}
|
||||
28
src-old/hooks/parks/useParkRides.ts
Normal file
28
src-old/hooks/parks/useParkRides.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import { queryKeys } from '@/lib/queryKeys';
|
||||
|
||||
/**
|
||||
* Hook to fetch all rides for a specific park
|
||||
*/
|
||||
export function useParkRides(parkId: string | undefined, enabled = true) {
|
||||
return useQuery({
|
||||
queryKey: queryKeys.parks.rides(parkId || ''),
|
||||
queryFn: async () => {
|
||||
if (!parkId) throw new Error('Park ID is required');
|
||||
|
||||
const { data, error } = await supabase
|
||||
.from('rides')
|
||||
.select('*')
|
||||
.eq('park_id', parkId)
|
||||
.order('name');
|
||||
|
||||
if (error) throw error;
|
||||
return data || [];
|
||||
},
|
||||
enabled: enabled && !!parkId,
|
||||
staleTime: 5 * 60 * 1000, // 5 minutes
|
||||
gcTime: 15 * 60 * 1000,
|
||||
refetchOnWindowFocus: false,
|
||||
});
|
||||
}
|
||||
34
src-old/hooks/parks/useParks.ts
Normal file
34
src-old/hooks/parks/useParks.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
|
||||
interface UseParksOptions {
|
||||
enabled?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to fetch all parks with caching
|
||||
* Loads all parks for client-side filtering
|
||||
*/
|
||||
export function useParks({ enabled = true }: UseParksOptions = {}) {
|
||||
return useQuery({
|
||||
queryKey: ['parks', 'all'],
|
||||
queryFn: async () => {
|
||||
const { data, error } = await supabase
|
||||
.from('parks')
|
||||
.select(`
|
||||
*,
|
||||
location:locations(*),
|
||||
operator:companies!parks_operator_id_fkey(*),
|
||||
property_owner:companies!parks_property_owner_id_fkey(*)
|
||||
`)
|
||||
.order('name');
|
||||
|
||||
if (error) throw error;
|
||||
return data || [];
|
||||
},
|
||||
enabled,
|
||||
staleTime: 5 * 60 * 1000, // 5 minutes
|
||||
gcTime: 15 * 60 * 1000, // 15 minutes
|
||||
refetchOnWindowFocus: false,
|
||||
});
|
||||
}
|
||||
28
src-old/hooks/photos/usePhotoCount.ts
Normal file
28
src-old/hooks/photos/usePhotoCount.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import { queryKeys } from '@/lib/queryKeys';
|
||||
|
||||
/**
|
||||
* Hook to fetch photo count for an entity
|
||||
*/
|
||||
export function usePhotoCount(entityType: string, entityId: string | undefined, enabled = true) {
|
||||
return useQuery({
|
||||
queryKey: ['photos', 'count', entityType, entityId || ''] as const,
|
||||
queryFn: async () => {
|
||||
if (!entityId) return 0;
|
||||
|
||||
const { count, error } = await supabase
|
||||
.from('photos')
|
||||
.select('id', { count: 'exact', head: true })
|
||||
.eq('entity_type', entityType)
|
||||
.eq('entity_id', entityId);
|
||||
|
||||
if (error) throw error;
|
||||
return count || 0;
|
||||
},
|
||||
enabled: enabled && !!entityId,
|
||||
staleTime: 10 * 60 * 1000, // 10 minutes - photo counts change rarely
|
||||
gcTime: 20 * 60 * 1000,
|
||||
refetchOnWindowFocus: false,
|
||||
});
|
||||
}
|
||||
38
src-old/hooks/reviews/useEntityReviews.ts
Normal file
38
src-old/hooks/reviews/useEntityReviews.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import { queryKeys } from '@/lib/queryKeys';
|
||||
|
||||
/**
|
||||
* Hook to fetch reviews for a specific entity (park or ride)
|
||||
*/
|
||||
export function useEntityReviews(entityType: 'park' | 'ride', entityId: string | undefined, enabled = true) {
|
||||
return useQuery({
|
||||
queryKey: queryKeys.reviews.entity(entityType, entityId || ''),
|
||||
queryFn: async () => {
|
||||
if (!entityId) return [];
|
||||
|
||||
const query = supabase
|
||||
.from('reviews')
|
||||
.select(`
|
||||
*,
|
||||
profiles!reviews_user_id_fkey(username, avatar_url, display_name)
|
||||
`)
|
||||
.eq('moderation_status', 'approved')
|
||||
.order('created_at', { ascending: false });
|
||||
|
||||
if (entityType === 'park') {
|
||||
query.eq('park_id', entityId);
|
||||
} else {
|
||||
query.eq('ride_id', entityId);
|
||||
}
|
||||
|
||||
const { data, error } = await query;
|
||||
if (error) throw error;
|
||||
return data || [];
|
||||
},
|
||||
enabled: enabled && !!entityId,
|
||||
staleTime: 3 * 60 * 1000, // 3 minutes
|
||||
gcTime: 10 * 60 * 1000,
|
||||
refetchOnWindowFocus: false,
|
||||
});
|
||||
}
|
||||
61
src-old/hooks/reviews/useUserReviews.ts
Normal file
61
src-old/hooks/reviews/useUserReviews.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import { queryKeys } from '@/lib/queryKeys';
|
||||
|
||||
/**
|
||||
* Hook to fetch all reviews by a specific user
|
||||
*/
|
||||
export function useUserReviews(
|
||||
userId: string | undefined,
|
||||
filter: 'all' | 'parks' | 'rides',
|
||||
sortBy: 'date' | 'rating',
|
||||
enabled = true
|
||||
) {
|
||||
return useQuery({
|
||||
queryKey: queryKeys.reviews.user(userId || '', filter, sortBy),
|
||||
queryFn: async () => {
|
||||
if (!userId) return [];
|
||||
|
||||
let query = supabase
|
||||
.from('reviews')
|
||||
.select(`
|
||||
id,
|
||||
rating,
|
||||
title,
|
||||
content,
|
||||
visit_date,
|
||||
wait_time_minutes,
|
||||
helpful_votes,
|
||||
moderation_status,
|
||||
created_at,
|
||||
parks:park_id (id, name, slug),
|
||||
rides:ride_id (
|
||||
id,
|
||||
name,
|
||||
slug,
|
||||
parks:park_id (name, slug)
|
||||
)
|
||||
`)
|
||||
.eq('user_id', userId);
|
||||
|
||||
if (filter === 'parks') {
|
||||
query = query.not('park_id', 'is', null);
|
||||
} else if (filter === 'rides') {
|
||||
query = query.not('ride_id', 'is', null);
|
||||
}
|
||||
|
||||
query = query.order(
|
||||
sortBy === 'date' ? 'created_at' : 'rating',
|
||||
{ ascending: false }
|
||||
);
|
||||
|
||||
const { data, error } = await query;
|
||||
if (error) throw error;
|
||||
return data || [];
|
||||
},
|
||||
enabled: enabled && !!userId,
|
||||
staleTime: 5 * 60 * 1000, // 5 minutes
|
||||
gcTime: 15 * 60 * 1000,
|
||||
refetchOnWindowFocus: false,
|
||||
});
|
||||
}
|
||||
51
src-old/hooks/rides/useRideDetail.ts
Normal file
51
src-old/hooks/rides/useRideDetail.ts
Normal file
@@ -0,0 +1,51 @@
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import { queryKeys } from '@/lib/queryKeys';
|
||||
|
||||
/**
|
||||
* Hook to fetch ride detail with park, manufacturer, and designer
|
||||
*/
|
||||
export function useRideDetail(parkSlug: string | undefined, rideSlug: string | undefined, enabled = true) {
|
||||
return useQuery({
|
||||
queryKey: queryKeys.rides.detail(parkSlug || '', rideSlug || ''),
|
||||
queryFn: async () => {
|
||||
if (!parkSlug || !rideSlug) throw new Error('Both park and ride slugs are required');
|
||||
|
||||
// First get park to find park_id
|
||||
const { data: parkData, error: parkError } = await supabase
|
||||
.from('parks')
|
||||
.select('id')
|
||||
.eq('slug', parkSlug)
|
||||
.maybeSingle();
|
||||
|
||||
if (parkError) throw parkError;
|
||||
if (!parkData) return null;
|
||||
|
||||
// Then get ride details
|
||||
const { data: rideData, error: rideError } = await supabase
|
||||
.from('rides')
|
||||
.select(`
|
||||
*,
|
||||
park:parks!inner(id, name, slug, location:locations(*)),
|
||||
manufacturer:companies!rides_manufacturer_id_fkey(*),
|
||||
designer:companies!rides_designer_id_fkey(*)
|
||||
`)
|
||||
.eq('park_id', parkData.id)
|
||||
.eq('slug', rideSlug)
|
||||
.maybeSingle();
|
||||
|
||||
if (rideError) throw rideError;
|
||||
|
||||
// Add currentParkId for easier access
|
||||
if (rideData) {
|
||||
return { ...rideData, currentParkId: parkData.id };
|
||||
}
|
||||
|
||||
return rideData;
|
||||
},
|
||||
enabled: enabled && !!parkSlug && !!rideSlug,
|
||||
staleTime: 5 * 60 * 1000, // 5 minutes
|
||||
gcTime: 15 * 60 * 1000, // 15 minutes
|
||||
refetchOnWindowFocus: false,
|
||||
});
|
||||
}
|
||||
34
src-old/hooks/rides/useRides.ts
Normal file
34
src-old/hooks/rides/useRides.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
|
||||
interface UseRidesOptions {
|
||||
enabled?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to fetch all rides with caching
|
||||
* Loads all rides for client-side filtering
|
||||
*/
|
||||
export function useRides({ enabled = true }: UseRidesOptions = {}) {
|
||||
return useQuery({
|
||||
queryKey: ['rides', 'all'],
|
||||
queryFn: async () => {
|
||||
const { data, error } = await supabase
|
||||
.from('rides')
|
||||
.select(`
|
||||
*,
|
||||
park:parks!inner(name, slug, location:locations(*)),
|
||||
manufacturer:companies!rides_manufacturer_id_fkey(*),
|
||||
designer:companies!rides_designer_id_fkey(*)
|
||||
`)
|
||||
.order('name');
|
||||
|
||||
if (error) throw error;
|
||||
return data || [];
|
||||
},
|
||||
enabled,
|
||||
staleTime: 5 * 60 * 1000, // 5 minutes
|
||||
gcTime: 15 * 60 * 1000, // 15 minutes
|
||||
refetchOnWindowFocus: false,
|
||||
});
|
||||
}
|
||||
50
src-old/hooks/rides/useSimilarRides.ts
Normal file
50
src-old/hooks/rides/useSimilarRides.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import { queryKeys } from '@/lib/queryKeys';
|
||||
|
||||
/**
|
||||
* Hook to fetch similar rides (same park and category)
|
||||
*/
|
||||
export function useSimilarRides(
|
||||
currentRideId: string | undefined,
|
||||
parkId: string | undefined,
|
||||
category: string | undefined,
|
||||
enabled = true
|
||||
) {
|
||||
return useQuery({
|
||||
queryKey: queryKeys.rides.similar(parkId || '', category || '', currentRideId || ''),
|
||||
queryFn: async () => {
|
||||
if (!currentRideId || !parkId || !category) return [];
|
||||
|
||||
const { data, error } = await supabase
|
||||
.from('rides')
|
||||
.select(`
|
||||
id,
|
||||
name,
|
||||
slug,
|
||||
image_url,
|
||||
average_rating,
|
||||
status,
|
||||
category,
|
||||
description,
|
||||
max_speed_kmh,
|
||||
max_height_meters,
|
||||
duration_seconds,
|
||||
review_count,
|
||||
park:parks!inner(name, slug)
|
||||
`)
|
||||
.eq('park_id', parkId)
|
||||
.eq('category', category)
|
||||
.neq('id', currentRideId)
|
||||
.order('average_rating', { ascending: false })
|
||||
.limit(4);
|
||||
|
||||
if (error) throw error;
|
||||
return data || [];
|
||||
},
|
||||
enabled: enabled && !!currentRideId && !!parkId && !!category,
|
||||
staleTime: 10 * 60 * 1000, // 10 minutes - similar rides rarely change
|
||||
gcTime: 20 * 60 * 1000,
|
||||
refetchOnWindowFocus: false,
|
||||
});
|
||||
}
|
||||
53
src-old/hooks/search/useGlobalSearch.ts
Normal file
53
src-old/hooks/search/useGlobalSearch.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import { queryKeys } from '@/lib/queryKeys';
|
||||
|
||||
/**
|
||||
* Hook for global search across parks, rides, and companies
|
||||
* Searches in parallel and caches results
|
||||
*/
|
||||
export function useGlobalSearch(query: string) {
|
||||
return useQuery({
|
||||
queryKey: queryKeys.search.global(query.toLowerCase()),
|
||||
queryFn: async () => {
|
||||
if (query.length < 2) {
|
||||
return { parks: [], rides: [], companies: [] };
|
||||
}
|
||||
|
||||
const searchTerm = `%${query.toLowerCase()}%`;
|
||||
|
||||
// Run all 3 queries in parallel
|
||||
const [parksResult, ridesResult, companiesResult] = await Promise.all([
|
||||
supabase
|
||||
.from('parks')
|
||||
.select(`*, location:locations(*)`)
|
||||
.or(`name.ilike.${searchTerm},description.ilike.${searchTerm}`)
|
||||
.limit(5),
|
||||
supabase
|
||||
.from('rides')
|
||||
.select(`*, park:parks!inner(name, slug)`)
|
||||
.or(`name.ilike.${searchTerm},description.ilike.${searchTerm}`)
|
||||
.limit(5),
|
||||
supabase
|
||||
.from('companies')
|
||||
.select('id, name, slug, description, company_type, logo_url, average_rating, review_count')
|
||||
.or(`name.ilike.${searchTerm},description.ilike.${searchTerm}`)
|
||||
.limit(3),
|
||||
]);
|
||||
|
||||
if (parksResult.error) throw parksResult.error;
|
||||
if (ridesResult.error) throw ridesResult.error;
|
||||
if (companiesResult.error) throw companiesResult.error;
|
||||
|
||||
return {
|
||||
parks: parksResult.data || [],
|
||||
rides: ridesResult.data || [],
|
||||
companies: companiesResult.data || [],
|
||||
};
|
||||
},
|
||||
enabled: query.length >= 2,
|
||||
staleTime: 2 * 60 * 1000, // 2 minutes - search results fairly stable
|
||||
gcTime: 5 * 60 * 1000,
|
||||
refetchOnWindowFocus: false,
|
||||
});
|
||||
}
|
||||
30
src-old/hooks/use-mobile.tsx
Normal file
30
src-old/hooks/use-mobile.tsx
Normal file
@@ -0,0 +1,30 @@
|
||||
import * as React from "react";
|
||||
|
||||
const MOBILE_BREAKPOINT = 768;
|
||||
|
||||
export function useIsMobile(): boolean | undefined {
|
||||
const [isMobile, setIsMobile] = React.useState<boolean | undefined>(undefined);
|
||||
|
||||
React.useEffect(() => {
|
||||
// Guard against server-side rendering
|
||||
if (typeof window === 'undefined') return;
|
||||
|
||||
const mql = window.matchMedia(`(max-width: ${MOBILE_BREAKPOINT - 1}px)`);
|
||||
|
||||
// Use MediaQueryList.matches instead of window.innerWidth for consistency
|
||||
const onChange = () => {
|
||||
setIsMobile(mql.matches);
|
||||
};
|
||||
|
||||
// Set initial value
|
||||
setIsMobile(mql.matches);
|
||||
|
||||
// Listen for changes
|
||||
mql.addEventListener("change", onChange);
|
||||
|
||||
// Cleanup listener on unmount
|
||||
return () => mql.removeEventListener("change", onChange);
|
||||
}, []);
|
||||
|
||||
return isMobile;
|
||||
}
|
||||
186
src-old/hooks/use-toast.ts
Normal file
186
src-old/hooks/use-toast.ts
Normal file
@@ -0,0 +1,186 @@
|
||||
import * as React from "react";
|
||||
|
||||
import type { ToastActionElement, ToastProps } from "@/components/ui/toast";
|
||||
|
||||
const TOAST_LIMIT = 1;
|
||||
const TOAST_REMOVE_DELAY = 1000000;
|
||||
|
||||
type ToasterToast = ToastProps & {
|
||||
id: string;
|
||||
title?: React.ReactNode;
|
||||
description?: React.ReactNode;
|
||||
action?: ToastActionElement;
|
||||
};
|
||||
|
||||
const actionTypes = {
|
||||
ADD_TOAST: "ADD_TOAST",
|
||||
UPDATE_TOAST: "UPDATE_TOAST",
|
||||
DISMISS_TOAST: "DISMISS_TOAST",
|
||||
REMOVE_TOAST: "REMOVE_TOAST",
|
||||
} as const;
|
||||
|
||||
let count = 0;
|
||||
|
||||
function genId() {
|
||||
count = (count + 1) % Number.MAX_SAFE_INTEGER;
|
||||
return count.toString();
|
||||
}
|
||||
|
||||
type ActionType = typeof actionTypes;
|
||||
|
||||
type Action =
|
||||
| {
|
||||
type: ActionType["ADD_TOAST"];
|
||||
toast: ToasterToast;
|
||||
}
|
||||
| {
|
||||
type: ActionType["UPDATE_TOAST"];
|
||||
toast: Partial<ToasterToast>;
|
||||
}
|
||||
| {
|
||||
type: ActionType["DISMISS_TOAST"];
|
||||
toastId?: ToasterToast["id"];
|
||||
}
|
||||
| {
|
||||
type: ActionType["REMOVE_TOAST"];
|
||||
toastId?: ToasterToast["id"];
|
||||
};
|
||||
|
||||
interface State {
|
||||
toasts: ToasterToast[];
|
||||
}
|
||||
|
||||
const toastTimeouts = new Map<string, ReturnType<typeof setTimeout>>();
|
||||
|
||||
const addToRemoveQueue = (toastId: string) => {
|
||||
if (toastTimeouts.has(toastId)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const timeout = setTimeout(() => {
|
||||
toastTimeouts.delete(toastId);
|
||||
dispatch({
|
||||
type: "REMOVE_TOAST",
|
||||
toastId: toastId,
|
||||
});
|
||||
}, TOAST_REMOVE_DELAY);
|
||||
|
||||
toastTimeouts.set(toastId, timeout);
|
||||
};
|
||||
|
||||
export const reducer = (state: State, action: Action): State => {
|
||||
switch (action.type) {
|
||||
case "ADD_TOAST":
|
||||
return {
|
||||
...state,
|
||||
toasts: [action.toast, ...state.toasts].slice(0, TOAST_LIMIT),
|
||||
};
|
||||
|
||||
case "UPDATE_TOAST":
|
||||
return {
|
||||
...state,
|
||||
toasts: state.toasts.map((t) => (t.id === action.toast.id ? { ...t, ...action.toast } : t)),
|
||||
};
|
||||
|
||||
case "DISMISS_TOAST": {
|
||||
const { toastId } = action;
|
||||
|
||||
// ! Side effects ! - This could be extracted into a dismissToast() action,
|
||||
// but I'll keep it here for simplicity
|
||||
if (toastId) {
|
||||
addToRemoveQueue(toastId);
|
||||
} else {
|
||||
state.toasts.forEach((toast) => {
|
||||
addToRemoveQueue(toast.id);
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
...state,
|
||||
toasts: state.toasts.map((t) =>
|
||||
t.id === toastId || toastId === undefined
|
||||
? {
|
||||
...t,
|
||||
open: false,
|
||||
}
|
||||
: t,
|
||||
),
|
||||
};
|
||||
}
|
||||
case "REMOVE_TOAST":
|
||||
if (action.toastId === undefined) {
|
||||
return {
|
||||
...state,
|
||||
toasts: [],
|
||||
};
|
||||
}
|
||||
return {
|
||||
...state,
|
||||
toasts: state.toasts.filter((t) => t.id !== action.toastId),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
const listeners: Array<(state: State) => void> = [];
|
||||
|
||||
let memoryState: State = { toasts: [] };
|
||||
|
||||
function dispatch(action: Action) {
|
||||
memoryState = reducer(memoryState, action);
|
||||
listeners.forEach((listener) => {
|
||||
listener(memoryState);
|
||||
});
|
||||
}
|
||||
|
||||
type Toast = Omit<ToasterToast, "id">;
|
||||
|
||||
function toast({ ...props }: Toast) {
|
||||
const id = genId();
|
||||
|
||||
const update = (props: ToasterToast) =>
|
||||
dispatch({
|
||||
type: "UPDATE_TOAST",
|
||||
toast: { ...props, id },
|
||||
});
|
||||
const dismiss = () => dispatch({ type: "DISMISS_TOAST", toastId: id });
|
||||
|
||||
dispatch({
|
||||
type: "ADD_TOAST",
|
||||
toast: {
|
||||
...props,
|
||||
id,
|
||||
open: true,
|
||||
onOpenChange: (open) => {
|
||||
if (!open) dismiss();
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
id: id,
|
||||
dismiss,
|
||||
update,
|
||||
};
|
||||
}
|
||||
|
||||
function useToast() {
|
||||
const [state, setState] = React.useState<State>(memoryState);
|
||||
|
||||
React.useEffect(() => {
|
||||
listeners.push(setState);
|
||||
return () => {
|
||||
const index = listeners.indexOf(setState);
|
||||
if (index > -1) {
|
||||
listeners.splice(index, 1);
|
||||
}
|
||||
};
|
||||
}, [state]);
|
||||
|
||||
return {
|
||||
...state,
|
||||
toast,
|
||||
dismiss: (toastId?: string) => dispatch({ type: "DISMISS_TOAST", toastId }),
|
||||
};
|
||||
}
|
||||
|
||||
export { useToast, toast };
|
||||
73
src-old/hooks/useAdminGuard.ts
Normal file
73
src-old/hooks/useAdminGuard.ts
Normal file
@@ -0,0 +1,73 @@
|
||||
import { useEffect } from 'react';
|
||||
import { useNavigate } from 'react-router-dom';
|
||||
import type { User } from '@supabase/supabase-js';
|
||||
import { useAuth } from './useAuth';
|
||||
import { useUserRole } from './useUserRole';
|
||||
import { useRequireMFA } from './useRequireMFA';
|
||||
|
||||
export interface AdminGuardState {
|
||||
/** Whether auth/role/MFA checks are still loading */
|
||||
isLoading: boolean;
|
||||
|
||||
/** Whether user is authenticated and authorized */
|
||||
isAuthorized: boolean;
|
||||
|
||||
/** Whether user needs to enroll in MFA */
|
||||
needsMFA: boolean;
|
||||
|
||||
/** Current authenticated user */
|
||||
user: User | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Consolidated admin guard hook for all admin pages
|
||||
*
|
||||
* Handles:
|
||||
* - Authentication check (redirects to /auth)
|
||||
* - Role authorization check (redirects to /)
|
||||
* - MFA enrollment check
|
||||
* - Loading states
|
||||
*
|
||||
* @param requireMFA - Whether to enforce MFA requirement (default: true)
|
||||
* @returns AdminGuardState with loading, authorization, and MFA status
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* const { isLoading, isAuthorized, needsMFA } = useAdminGuard();
|
||||
*
|
||||
* if (isLoading) return <LoadingSkeleton />;
|
||||
* if (!isAuthorized) return null;
|
||||
* if (needsMFA) return <MFARequiredAlert />;
|
||||
*
|
||||
* return <AdminContent />;
|
||||
* ```
|
||||
*/
|
||||
export function useAdminGuard(requireMFA: boolean = true): AdminGuardState {
|
||||
const { user, loading: authLoading } = useAuth();
|
||||
const { isModerator, loading: roleLoading } = useUserRole();
|
||||
const { needsEnrollment, needsVerification, loading: mfaLoading } = useRequireMFA();
|
||||
const navigate = useNavigate();
|
||||
|
||||
// Auto-redirect based on auth state
|
||||
useEffect(() => {
|
||||
if (!authLoading && !roleLoading) {
|
||||
if (!user) {
|
||||
navigate('/auth');
|
||||
} else if (!isModerator()) {
|
||||
navigate('/');
|
||||
}
|
||||
}
|
||||
}, [user, authLoading, roleLoading, navigate, isModerator]);
|
||||
|
||||
const isLoading = authLoading || roleLoading || mfaLoading;
|
||||
const isAuthorized = !!user && isModerator();
|
||||
// Block access if EITHER not enrolled OR session is at AAL1 (needs verification)
|
||||
const needsMFA = requireMFA && (needsEnrollment || needsVerification);
|
||||
|
||||
return {
|
||||
isLoading,
|
||||
isAuthorized,
|
||||
needsMFA,
|
||||
user,
|
||||
};
|
||||
}
|
||||
39
src-old/hooks/useAdminRoutePreload.ts
Normal file
39
src-old/hooks/useAdminRoutePreload.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
import { useEffect } from 'react';
|
||||
import { useAuth } from './useAuth';
|
||||
import { useUserRole } from './useUserRole';
|
||||
|
||||
/**
|
||||
* Preloads admin route chunks for authenticated moderators/admins
|
||||
* This reduces chunk load failures by warming up the browser cache
|
||||
*/
|
||||
export function useAdminRoutePreload() {
|
||||
const { user } = useAuth();
|
||||
const { isModerator, isAdmin } = useUserRole();
|
||||
|
||||
useEffect(() => {
|
||||
// Only preload if user has admin access
|
||||
if (!user || (!isModerator && !isAdmin)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Preload admin chunks after a short delay to avoid blocking initial page load
|
||||
const preloadTimer = setTimeout(() => {
|
||||
// Preload critical admin routes
|
||||
const adminRoutes = [
|
||||
() => import('../pages/AdminDashboard'),
|
||||
() => import('../pages/AdminModeration'),
|
||||
() => import('../pages/AdminReports'),
|
||||
];
|
||||
|
||||
// Start preloading (but don't await - let it happen in background)
|
||||
adminRoutes.forEach(route => {
|
||||
route().catch(err => {
|
||||
// Silently fail - preloading is a performance optimization
|
||||
console.debug('Admin route preload failed:', err);
|
||||
});
|
||||
});
|
||||
}, 2000); // Wait 2 seconds after auth to avoid blocking initial render
|
||||
|
||||
return () => clearTimeout(preloadTimer);
|
||||
}, [user, isModerator, isAdmin]);
|
||||
}
|
||||
228
src-old/hooks/useAdminSettings.ts
Normal file
228
src-old/hooks/useAdminSettings.ts
Normal file
@@ -0,0 +1,228 @@
|
||||
import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import { useAuth } from './useAuth';
|
||||
import { useUserRole } from './useUserRole';
|
||||
import { useToast } from './use-toast';
|
||||
import { useCallback, useMemo } from 'react';
|
||||
import type { Json } from '@/integrations/supabase/types';
|
||||
import { handleError } from '@/lib/errorHandler';
|
||||
|
||||
interface AdminSetting {
|
||||
id: string;
|
||||
setting_key: string;
|
||||
setting_value: unknown;
|
||||
category: string;
|
||||
description: string;
|
||||
}
|
||||
|
||||
export function useAdminSettings() {
|
||||
const { user } = useAuth();
|
||||
const { isSuperuser } = useUserRole();
|
||||
const { toast } = useToast();
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
const {
|
||||
data: settings,
|
||||
isLoading,
|
||||
error
|
||||
} = useQuery({
|
||||
queryKey: ['admin-settings'],
|
||||
queryFn: async () => {
|
||||
const { data, error } = await supabase
|
||||
.from('admin_settings')
|
||||
.select('*')
|
||||
.order('category', { ascending: true });
|
||||
|
||||
if (error) throw error;
|
||||
return data as AdminSetting[];
|
||||
},
|
||||
enabled: !!user && isSuperuser()
|
||||
});
|
||||
|
||||
// Memoize settings into a stable map to prevent cascading re-renders
|
||||
const settingsMap = useMemo(() => {
|
||||
if (!settings) return {};
|
||||
return Object.fromEntries(
|
||||
settings.map(s => [s.setting_key, s.setting_value])
|
||||
);
|
||||
}, [settings]);
|
||||
|
||||
const updateSettingMutation = useMutation({
|
||||
mutationFn: async ({ key, value }: { key: string; value: unknown }) => {
|
||||
const { error } = await supabase
|
||||
.from('admin_settings')
|
||||
.update({
|
||||
setting_value: value as Json,
|
||||
updated_by: user?.id,
|
||||
updated_at: new Date().toISOString()
|
||||
})
|
||||
.eq('setting_key', key);
|
||||
|
||||
if (error) throw error;
|
||||
return { key, value };
|
||||
},
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries({ queryKey: ['admin-settings'] });
|
||||
toast({
|
||||
title: "Setting Updated",
|
||||
description: "The setting has been saved successfully.",
|
||||
});
|
||||
},
|
||||
onError: (error: Error, variables) => {
|
||||
handleError(error, {
|
||||
action: 'Update Admin Setting',
|
||||
userId: user?.id,
|
||||
metadata: {
|
||||
settingKey: variables.key,
|
||||
attemptedValue: variables.value
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
const getSettingValue = useCallback((key: string, defaultValue: unknown = null) => {
|
||||
return settingsMap[key] ?? defaultValue;
|
||||
}, [settingsMap]);
|
||||
|
||||
const updateSetting = async (key: string, value: unknown) => {
|
||||
return updateSettingMutation.mutateAsync({ key, value });
|
||||
};
|
||||
|
||||
// Helper functions for common settings (memoized with useCallback for stable references)
|
||||
const getAutoFlagThreshold = useCallback(() => {
|
||||
return parseInt(String(getSettingValue('moderation.auto_flag_threshold', '3')));
|
||||
}, [getSettingValue]);
|
||||
|
||||
const getRequireApproval = useCallback(() => {
|
||||
const value = getSettingValue('moderation.require_approval', 'true');
|
||||
return value === true || value === 'true';
|
||||
}, [getSettingValue]);
|
||||
|
||||
const getBanDurations = useCallback(() => {
|
||||
const value = getSettingValue('moderation.ban_durations', ['1d', '7d', '30d', 'permanent']);
|
||||
return Array.isArray(value) ? value : JSON.parse(String(value || '[]'));
|
||||
}, [getSettingValue]);
|
||||
|
||||
const getEmailAlertsEnabled = useCallback(() => {
|
||||
const value = getSettingValue('notifications.email_alerts', 'true');
|
||||
return value === true || value === 'true';
|
||||
}, [getSettingValue]);
|
||||
|
||||
const getReportThreshold = useCallback(() => {
|
||||
return parseInt(String(getSettingValue('notifications.report_threshold', '5')));
|
||||
}, [getSettingValue]);
|
||||
|
||||
const getAuditRetentionDays = useCallback(() => {
|
||||
return parseInt(String(getSettingValue('system.audit_retention_days', '365')));
|
||||
}, [getSettingValue]);
|
||||
|
||||
const getAutoCleanupEnabled = useCallback(() => {
|
||||
const value = getSettingValue('system.auto_cleanup', 'false');
|
||||
return value === true || value === 'true';
|
||||
}, [getSettingValue]);
|
||||
|
||||
const getAdminPanelRefreshMode = useCallback((): 'auto' | 'manual' => {
|
||||
const value = getSettingValue('system.admin_panel_refresh_mode', 'auto');
|
||||
// Remove quotes if they exist (JSON string stored in DB)
|
||||
const cleanValue = typeof value === 'string' ? value.replace(/"/g, '') : String(value);
|
||||
return (cleanValue === 'manual' ? 'manual' : 'auto') as 'auto' | 'manual';
|
||||
}, [getSettingValue]);
|
||||
|
||||
const getAdminPanelPollInterval = useCallback(() => {
|
||||
const value = getSettingValue('system.admin_panel_poll_interval', 30);
|
||||
return parseInt(value?.toString() || '30') * 1000; // Convert to milliseconds
|
||||
}, [getSettingValue]);
|
||||
|
||||
/**
|
||||
* Get auto-refresh strategy setting
|
||||
* Returns: 'merge' | 'replace' | 'notify'
|
||||
*/
|
||||
const getAutoRefreshStrategy = useCallback((): 'merge' | 'replace' | 'notify' => {
|
||||
const value = getSettingValue('auto_refresh_strategy', 'merge');
|
||||
const cleanValue = typeof value === 'string' ? value.replace(/"/g, '') : value;
|
||||
return cleanValue as 'merge' | 'replace' | 'notify';
|
||||
}, [getSettingValue]);
|
||||
|
||||
/**
|
||||
* Get preserve interaction state setting
|
||||
* Returns: boolean
|
||||
*/
|
||||
const getPreserveInteractionState = useCallback((): boolean => {
|
||||
const value = getSettingValue('preserve_interaction_state', 'true');
|
||||
const cleanValue = typeof value === 'string' ? value.replace(/"/g, '') : value;
|
||||
return cleanValue === 'true' || cleanValue === true;
|
||||
}, [getSettingValue]);
|
||||
|
||||
const getNotificationRecipients = useCallback(() => {
|
||||
return getSettingValue('notifications.recipients', []);
|
||||
}, [getSettingValue]);
|
||||
|
||||
const getUseRealtimeQueue = useCallback((): boolean => {
|
||||
const value = getSettingValue('system.use_realtime_queue', 'true');
|
||||
const cleanValue = typeof value === 'string' ? value.replace(/"/g, '') : value;
|
||||
return cleanValue === 'true' || cleanValue === true;
|
||||
}, [getSettingValue]);
|
||||
|
||||
const getRefreshOnTabVisible = useCallback((): boolean => {
|
||||
const value = getSettingValue('system.refresh_on_tab_visible', 'false');
|
||||
const cleanValue = typeof value === 'string' ? value.replace(/"/g, '') : value;
|
||||
return cleanValue === 'true' || cleanValue === true;
|
||||
}, [getSettingValue]);
|
||||
|
||||
const getSetting = useCallback((key: string) => {
|
||||
return settings?.find(s => s.setting_key === key);
|
||||
}, [settings]);
|
||||
|
||||
const getSettingsByCategory = useCallback((category: string) => {
|
||||
return settings?.filter(s => s.category === category) || [];
|
||||
}, [settings]);
|
||||
|
||||
return useMemo(() => ({
|
||||
settings,
|
||||
isLoading,
|
||||
error,
|
||||
updateSetting,
|
||||
isUpdating: updateSettingMutation.isPending,
|
||||
getSetting,
|
||||
getSettingValue,
|
||||
getSettingsByCategory,
|
||||
// Helper functions
|
||||
getAutoFlagThreshold,
|
||||
getRequireApproval,
|
||||
getBanDurations,
|
||||
getEmailAlertsEnabled,
|
||||
getNotificationRecipients,
|
||||
getReportThreshold,
|
||||
getAuditRetentionDays,
|
||||
getAutoCleanupEnabled,
|
||||
getAdminPanelRefreshMode,
|
||||
getAdminPanelPollInterval,
|
||||
getAutoRefreshStrategy,
|
||||
getPreserveInteractionState,
|
||||
getUseRealtimeQueue,
|
||||
getRefreshOnTabVisible,
|
||||
}), [
|
||||
settings,
|
||||
isLoading,
|
||||
error,
|
||||
updateSetting,
|
||||
updateSettingMutation.isPending,
|
||||
getSetting,
|
||||
getSettingValue,
|
||||
getSettingsByCategory,
|
||||
getAutoFlagThreshold,
|
||||
getRequireApproval,
|
||||
getBanDurations,
|
||||
getEmailAlertsEnabled,
|
||||
getNotificationRecipients,
|
||||
getReportThreshold,
|
||||
getAuditRetentionDays,
|
||||
getAutoCleanupEnabled,
|
||||
getAdminPanelRefreshMode,
|
||||
getAdminPanelPollInterval,
|
||||
getAutoRefreshStrategy,
|
||||
getPreserveInteractionState,
|
||||
getUseRealtimeQueue,
|
||||
getRefreshOnTabVisible,
|
||||
]);
|
||||
}
|
||||
185
src-old/hooks/useAdvancedRideSearch.ts
Normal file
185
src-old/hooks/useAdvancedRideSearch.ts
Normal file
@@ -0,0 +1,185 @@
|
||||
import { useState, useEffect } from 'react';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import { TechnicalSpecFilter, CoasterStatFilter } from '@/components/search/AdvancedRideFilters';
|
||||
import { useDebounce } from './useDebounce';
|
||||
import { handleError } from '@/lib/errorHandler';
|
||||
|
||||
interface AdvancedSearchOptions {
|
||||
query?: string;
|
||||
category?: string;
|
||||
manufacturer?: string;
|
||||
technicalSpecFilters?: TechnicalSpecFilter[];
|
||||
coasterStatFilters?: CoasterStatFilter[];
|
||||
speedMin?: number;
|
||||
speedMax?: number;
|
||||
heightMin?: number;
|
||||
heightMax?: number;
|
||||
limit?: number;
|
||||
}
|
||||
|
||||
export function useAdvancedRideSearch(options: AdvancedSearchOptions) {
|
||||
const [results, setResults] = useState<any[]>([]);
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
const debouncedQuery = useDebounce(options.query || '', 300);
|
||||
|
||||
useEffect(() => {
|
||||
const performSearch = async () => {
|
||||
if (!debouncedQuery && !options.category && !options.manufacturer &&
|
||||
(!options.technicalSpecFilters || options.technicalSpecFilters.length === 0) &&
|
||||
(!options.coasterStatFilters || options.coasterStatFilters.length === 0)) {
|
||||
setResults([]);
|
||||
return;
|
||||
}
|
||||
|
||||
setLoading(true);
|
||||
setError(null);
|
||||
|
||||
try {
|
||||
let query = supabase
|
||||
.from('rides')
|
||||
.select(`
|
||||
*,
|
||||
parks!inner(id, name, slug),
|
||||
companies!rides_manufacturer_id_fkey(id, name, slug)
|
||||
`);
|
||||
|
||||
// Basic text search
|
||||
if (debouncedQuery) {
|
||||
query = query.or(`name.ilike.%${debouncedQuery}%,description.ilike.%${debouncedQuery}%`);
|
||||
}
|
||||
|
||||
// Category filter
|
||||
if (options.category) {
|
||||
query = query.eq('category', options.category);
|
||||
}
|
||||
|
||||
// Manufacturer filter
|
||||
if (options.manufacturer) {
|
||||
query = query.eq('manufacturer_id', options.manufacturer);
|
||||
}
|
||||
|
||||
// Speed range filter
|
||||
if (options.speedMin !== undefined) {
|
||||
query = query.gte('max_speed_kmh', options.speedMin);
|
||||
}
|
||||
if (options.speedMax !== undefined) {
|
||||
query = query.lte('max_speed_kmh', options.speedMax);
|
||||
}
|
||||
|
||||
// Height range filter
|
||||
if (options.heightMin !== undefined) {
|
||||
query = query.gte('max_height_meters', options.heightMin);
|
||||
}
|
||||
if (options.heightMax !== undefined) {
|
||||
query = query.lte('max_height_meters', options.heightMax);
|
||||
}
|
||||
|
||||
query = query.limit(options.limit || 50);
|
||||
|
||||
const { data: initialResults, error: queryError } = await query;
|
||||
|
||||
if (queryError) throw queryError;
|
||||
|
||||
let filteredResults = initialResults || [];
|
||||
|
||||
// Apply technical specification filters
|
||||
if (options.technicalSpecFilters && options.technicalSpecFilters.length > 0) {
|
||||
for (const filter of options.technicalSpecFilters) {
|
||||
if (!filter.spec_name) continue;
|
||||
|
||||
const { data: specsData, error: specsError } = await supabase
|
||||
.from('ride_technical_specifications')
|
||||
.select('ride_id, spec_name, spec_value')
|
||||
.eq('spec_name', filter.spec_name);
|
||||
|
||||
if (specsError) throw specsError;
|
||||
|
||||
const matchingRideIds = new Set<string>();
|
||||
|
||||
specsData?.forEach((spec) => {
|
||||
let matches = false;
|
||||
|
||||
switch (filter.operator) {
|
||||
case 'has_spec':
|
||||
matches = true;
|
||||
break;
|
||||
case 'equals':
|
||||
matches = spec.spec_value === filter.spec_value;
|
||||
break;
|
||||
case 'contains':
|
||||
matches = filter.spec_value ?
|
||||
spec.spec_value.toLowerCase().includes(filter.spec_value.toLowerCase()) :
|
||||
false;
|
||||
break;
|
||||
}
|
||||
|
||||
if (matches) {
|
||||
matchingRideIds.add(spec.ride_id);
|
||||
}
|
||||
});
|
||||
|
||||
filteredResults = filteredResults.filter((ride) => matchingRideIds.has(ride.id));
|
||||
}
|
||||
}
|
||||
|
||||
// Apply coaster statistics filters
|
||||
if (options.coasterStatFilters && options.coasterStatFilters.length > 0) {
|
||||
for (const filter of options.coasterStatFilters) {
|
||||
if (!filter.stat_name) continue;
|
||||
|
||||
let statsQuery = supabase
|
||||
.from('ride_coaster_statistics')
|
||||
.select('ride_id, stat_name, stat_value')
|
||||
.eq('stat_name', filter.stat_name);
|
||||
|
||||
if (filter.min_value !== undefined) {
|
||||
statsQuery = statsQuery.gte('stat_value', filter.min_value);
|
||||
}
|
||||
if (filter.max_value !== undefined) {
|
||||
statsQuery = statsQuery.lte('stat_value', filter.max_value);
|
||||
}
|
||||
|
||||
const { data: statsData, error: statsError } = await statsQuery;
|
||||
|
||||
if (statsError) throw statsError;
|
||||
|
||||
const matchingRideIds = new Set(statsData?.map((stat) => stat.ride_id) || []);
|
||||
filteredResults = filteredResults.filter((ride) => matchingRideIds.has(ride.id));
|
||||
}
|
||||
}
|
||||
|
||||
setResults(filteredResults);
|
||||
} catch (err) {
|
||||
handleError(err, {
|
||||
action: 'Advanced Ride Search',
|
||||
metadata: {
|
||||
query: options.query,
|
||||
category: options.category,
|
||||
manufacturer: options.manufacturer
|
||||
}
|
||||
});
|
||||
setError(err instanceof Error ? err.message : 'Search failed');
|
||||
setResults([]);
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
performSearch();
|
||||
}, [
|
||||
debouncedQuery,
|
||||
options.category,
|
||||
options.manufacturer,
|
||||
options.speedMin,
|
||||
options.speedMax,
|
||||
options.heightMin,
|
||||
options.heightMax,
|
||||
options.limit,
|
||||
JSON.stringify(options.technicalSpecFilters),
|
||||
JSON.stringify(options.coasterStatFilters),
|
||||
]);
|
||||
|
||||
return { results, loading, error };
|
||||
}
|
||||
264
src-old/hooks/useAuth.tsx
Normal file
264
src-old/hooks/useAuth.tsx
Normal file
@@ -0,0 +1,264 @@
|
||||
import React, { createContext, useContext, useEffect, useState, useRef } from 'react';
|
||||
import type { User, Session } from '@supabase/supabase-js';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import type { Profile } from '@/types/database';
|
||||
import { toast } from '@/hooks/use-toast';
|
||||
import { authLog, authWarn, authError } from '@/lib/authLogger';
|
||||
import type { AALLevel, CheckAalResult } from '@/types/auth';
|
||||
import { getSessionAal, checkAalStepUp as checkAalStepUpService, signOutUser } from '@/lib/authService';
|
||||
import { clearAllAuthFlags } from '@/lib/sessionFlags';
|
||||
import { logger } from '@/lib/logger';
|
||||
|
||||
interface AuthContextType {
|
||||
user: User | null;
|
||||
session: Session | null;
|
||||
aal: AALLevel | null;
|
||||
loading: boolean;
|
||||
pendingEmail: string | null;
|
||||
sessionError: string | null;
|
||||
signOut: () => Promise<void>;
|
||||
verifySession: () => Promise<boolean>;
|
||||
clearPendingEmail: () => void;
|
||||
checkAalStepUp: () => Promise<CheckAalResult>;
|
||||
}
|
||||
|
||||
const AuthContext = createContext<AuthContextType | undefined>(undefined);
|
||||
|
||||
function AuthProviderComponent({ children }: { children: React.ReactNode }) {
|
||||
const [user, setUser] = useState<User | null>(null);
|
||||
const [session, setSession] = useState<Session | null>(null);
|
||||
const [aal, setAal] = useState<AALLevel | null>(null);
|
||||
const [loading, setLoading] = useState(true);
|
||||
const [pendingEmail, setPendingEmail] = useState<string | null>(null);
|
||||
const [sessionError, setSessionError] = useState<string | null>(null);
|
||||
|
||||
// Refs for lifecycle and cleanup management
|
||||
const novuUpdateTimeoutRef = useRef<NodeJS.Timeout | null>(null);
|
||||
const previousEmailRef = useRef<string | null>(null);
|
||||
const orphanedPasswordToastShownRef = useRef(false);
|
||||
|
||||
// Verify session is still valid - simplified
|
||||
const verifySession = async () => {
|
||||
try {
|
||||
const { data: { session }, error } = await supabase.auth.getSession();
|
||||
|
||||
if (error) {
|
||||
authError('[Auth] Session verification failed:', error);
|
||||
setSessionError(error.message);
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!session) {
|
||||
authLog('[Auth] No active session found');
|
||||
return false;
|
||||
}
|
||||
|
||||
authLog('[Auth] Session verified:', session.user.email);
|
||||
|
||||
// Update state if session was found but not set
|
||||
if (!user) {
|
||||
setSession(session);
|
||||
setUser(session.user);
|
||||
}
|
||||
|
||||
return true;
|
||||
} catch (error: unknown) {
|
||||
authError('[Auth] Session verification error:', error);
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
authLog('[Auth] Initializing auth provider');
|
||||
|
||||
// CRITICAL: Set up listener FIRST to catch all events
|
||||
const {
|
||||
data: { subscription },
|
||||
} = supabase.auth.onAuthStateChange((event, session) => {
|
||||
authLog('[Auth] State change:', event, 'User:', session?.user?.email || 'none', 'Has session:', !!session);
|
||||
|
||||
// Extract email info early for cleanup
|
||||
const currentEmail = session?.user?.email;
|
||||
const newEmailPending = session?.user?.new_email;
|
||||
|
||||
// CRITICAL: Always clear/update pending email state BEFORE any early returns
|
||||
setPendingEmail(newEmailPending ?? null);
|
||||
|
||||
// Clear any error
|
||||
setSessionError(null);
|
||||
|
||||
// Synchronous state updates only
|
||||
setSession(session);
|
||||
setUser(session?.user ?? null);
|
||||
|
||||
// Handle loading state
|
||||
if (event === 'SIGNED_IN' || event === 'INITIAL_SESSION') {
|
||||
setLoading(false);
|
||||
} else if (event === 'SIGNED_OUT') {
|
||||
authLog('[Auth] SIGNED_OUT - clearing state');
|
||||
setSession(null);
|
||||
setUser(null);
|
||||
setAal(null);
|
||||
setLoading(false);
|
||||
orphanedPasswordToastShownRef.current = false;
|
||||
return;
|
||||
}
|
||||
|
||||
// Defer async operations to avoid blocking the auth state change callback
|
||||
setTimeout(async () => {
|
||||
// Get AAL level from Supabase API (ground truth, not cached session data)
|
||||
if (session) {
|
||||
const currentAal = await getSessionAal(session);
|
||||
setAal(currentAal);
|
||||
authLog('[Auth] Current AAL:', currentAal);
|
||||
|
||||
// Check if user is banned
|
||||
const { data: profile } = await supabase
|
||||
.from('profiles')
|
||||
.select('banned')
|
||||
.eq('user_id', session.user.id)
|
||||
.maybeSingle();
|
||||
|
||||
if (profile?.banned) {
|
||||
authWarn('[Auth] Banned user detected, signing out');
|
||||
await supabase.auth.signOut();
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
setAal(null);
|
||||
}
|
||||
}, 0);
|
||||
|
||||
// Detect confirmed email change: email changed AND no longer pending
|
||||
if (
|
||||
session?.user &&
|
||||
previousEmailRef.current &&
|
||||
currentEmail &&
|
||||
currentEmail !== previousEmailRef.current &&
|
||||
!newEmailPending
|
||||
) {
|
||||
// Clear any existing Novu update timeout
|
||||
if (novuUpdateTimeoutRef.current) {
|
||||
clearTimeout(novuUpdateTimeoutRef.current);
|
||||
}
|
||||
|
||||
// Defer Novu update and notifications to avoid blocking auth
|
||||
const oldEmail = previousEmailRef.current;
|
||||
novuUpdateTimeoutRef.current = setTimeout(async () => {
|
||||
try {
|
||||
// Update Novu subscriber with confirmed email
|
||||
const { notificationService } = await import('@/lib/notificationService');
|
||||
if (notificationService.isEnabled()) {
|
||||
await notificationService.updateSubscriber({
|
||||
subscriberId: session.user.id,
|
||||
email: currentEmail,
|
||||
});
|
||||
}
|
||||
|
||||
// Log the confirmed email change
|
||||
await supabase.from('admin_audit_log').insert({
|
||||
admin_user_id: session.user.id,
|
||||
target_user_id: session.user.id,
|
||||
action: 'email_change_completed',
|
||||
details: {
|
||||
old_email: oldEmail,
|
||||
new_email: currentEmail,
|
||||
timestamp: new Date().toISOString(),
|
||||
},
|
||||
});
|
||||
|
||||
// Send final security notification
|
||||
if (notificationService.isEnabled()) {
|
||||
await notificationService.trigger({
|
||||
workflowId: 'email-changed',
|
||||
subscriberId: session.user.id,
|
||||
payload: {
|
||||
oldEmail: oldEmail,
|
||||
newEmail: currentEmail,
|
||||
timestamp: new Date().toISOString(),
|
||||
},
|
||||
});
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
authError('Error updating Novu after email confirmation:', error);
|
||||
} finally {
|
||||
novuUpdateTimeoutRef.current = null;
|
||||
}
|
||||
}, 0);
|
||||
}
|
||||
|
||||
// Update tracked email
|
||||
if (currentEmail) {
|
||||
previousEmailRef.current = currentEmail;
|
||||
}
|
||||
});
|
||||
|
||||
// THEN get initial session (this may trigger INITIAL_SESSION event)
|
||||
supabase.auth.getSession().then(({ data: { session }, error }) => {
|
||||
if (error) {
|
||||
authError('[Auth] Initial session fetch error:', error);
|
||||
setSessionError(error.message);
|
||||
setLoading(false);
|
||||
return;
|
||||
}
|
||||
|
||||
// Note: onAuthStateChange will handle the INITIAL_SESSION event
|
||||
// This is just a backup in case the event doesn't fire
|
||||
authLog('[Auth] getSession completed, session exists:', !!session);
|
||||
});
|
||||
|
||||
return () => {
|
||||
authLog('[Auth] Cleaning up auth provider');
|
||||
subscription.unsubscribe();
|
||||
|
||||
// Clear any pending timeouts
|
||||
if (novuUpdateTimeoutRef.current) {
|
||||
clearTimeout(novuUpdateTimeoutRef.current);
|
||||
novuUpdateTimeoutRef.current = null;
|
||||
}
|
||||
};
|
||||
}, []);
|
||||
|
||||
const signOut = async () => {
|
||||
authLog('[Auth] Signing out...');
|
||||
const result = await signOutUser();
|
||||
if (!result.success) {
|
||||
authError('Error signing out:', result.error);
|
||||
throw new Error(result.error);
|
||||
}
|
||||
};
|
||||
|
||||
const clearPendingEmail = () => {
|
||||
setPendingEmail(null);
|
||||
};
|
||||
|
||||
const checkAalStepUp = async (): Promise<CheckAalResult> => {
|
||||
return checkAalStepUpService(session);
|
||||
};
|
||||
|
||||
const value = {
|
||||
user,
|
||||
session,
|
||||
aal,
|
||||
loading,
|
||||
pendingEmail,
|
||||
sessionError,
|
||||
signOut,
|
||||
verifySession,
|
||||
clearPendingEmail,
|
||||
checkAalStepUp,
|
||||
};
|
||||
|
||||
return <AuthContext.Provider value={value}>{children}</AuthContext.Provider>;
|
||||
}
|
||||
|
||||
export const AuthProvider = AuthProviderComponent;
|
||||
|
||||
export function useAuth() {
|
||||
const context = useContext(AuthContext);
|
||||
if (context === undefined) {
|
||||
logger.error('AuthContext is undefined - component may be rendering outside AuthProvider', { component: 'useAuth' });
|
||||
throw new Error('useAuth must be used within an AuthProvider');
|
||||
}
|
||||
return context;
|
||||
}
|
||||
10
src-old/hooks/useAuthModal.tsx
Normal file
10
src-old/hooks/useAuthModal.tsx
Normal file
@@ -0,0 +1,10 @@
|
||||
import { useContext } from 'react';
|
||||
import { AuthModalContext } from '@/contexts/AuthModalContext';
|
||||
|
||||
export const useAuthModal = () => {
|
||||
const context = useContext(AuthModalContext);
|
||||
if (!context) {
|
||||
throw new Error('useAuthModal must be used within AuthModalProvider');
|
||||
}
|
||||
return context;
|
||||
};
|
||||
58
src-old/hooks/useAutoSave.ts
Normal file
58
src-old/hooks/useAutoSave.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
import { useEffect, useRef, useState } from 'react';
|
||||
import { useDebounce } from './useDebounce';
|
||||
|
||||
export type AutoSaveOptions<T> = {
|
||||
data: T;
|
||||
onSave: (data: T) => Promise<void>;
|
||||
debounceMs?: number;
|
||||
enabled?: boolean;
|
||||
isValid?: boolean;
|
||||
};
|
||||
|
||||
export const useAutoSave = <T,>({
|
||||
data,
|
||||
onSave,
|
||||
debounceMs = 3000,
|
||||
enabled = true,
|
||||
isValid = true
|
||||
}: AutoSaveOptions<T>) => {
|
||||
const [isSaving, setIsSaving] = useState(false);
|
||||
const [lastSaved, setLastSaved] = useState<Date | null>(null);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const debouncedData = useDebounce(data, debounceMs);
|
||||
const initialRender = useRef(true);
|
||||
|
||||
useEffect(() => {
|
||||
// Skip initial render
|
||||
if (initialRender.current) {
|
||||
initialRender.current = false;
|
||||
return;
|
||||
}
|
||||
|
||||
// Skip if disabled or invalid
|
||||
if (!enabled || !isValid) return;
|
||||
|
||||
const save = async () => {
|
||||
setIsSaving(true);
|
||||
setError(null);
|
||||
|
||||
try {
|
||||
await onSave(debouncedData);
|
||||
setLastSaved(new Date());
|
||||
} catch (err) {
|
||||
setError(err instanceof Error ? err.message : 'Failed to auto-save');
|
||||
} finally {
|
||||
setIsSaving(false);
|
||||
}
|
||||
};
|
||||
|
||||
save();
|
||||
}, [debouncedData, enabled, isValid, onSave]);
|
||||
|
||||
return {
|
||||
isSaving,
|
||||
lastSaved,
|
||||
error,
|
||||
resetError: () => setError(null)
|
||||
};
|
||||
};
|
||||
346
src-old/hooks/useAutocompleteData.ts
Normal file
346
src-old/hooks/useAutocompleteData.ts
Normal file
@@ -0,0 +1,346 @@
|
||||
import { useState, useEffect } from 'react';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import { ComboboxOption } from '@/components/ui/combobox';
|
||||
import { toast } from 'sonner';
|
||||
import { handleNonCriticalError } from '@/lib/errorHandler';
|
||||
|
||||
export function useCountries() {
|
||||
const [countries, setCountries] = useState<ComboboxOption[]>([]);
|
||||
const [loading, setLoading] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
async function fetchCountries() {
|
||||
setLoading(true);
|
||||
try {
|
||||
const { data, error } = await supabase
|
||||
.from('locations')
|
||||
.select('country')
|
||||
.not('country', 'is', null);
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
const uniqueCountries = Array.from(
|
||||
new Set(data?.map(item => item.country) || [])
|
||||
).sort();
|
||||
|
||||
setCountries(
|
||||
uniqueCountries.map(country => ({
|
||||
label: country,
|
||||
value: country.toLowerCase().replace(/\s+/g, '_')
|
||||
}))
|
||||
);
|
||||
} catch (error: unknown) {
|
||||
handleNonCriticalError(error, { action: 'Fetch countries' });
|
||||
toast.error('Failed to load countries', {
|
||||
description: 'Please refresh the page and try again.',
|
||||
});
|
||||
setCountries([]);
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}
|
||||
|
||||
fetchCountries();
|
||||
}, []);
|
||||
|
||||
return { countries, loading };
|
||||
}
|
||||
|
||||
export function useStatesProvinces(country?: string) {
|
||||
const [statesProvinces, setStatesProvinces] = useState<ComboboxOption[]>([]);
|
||||
const [loading, setLoading] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
if (!country) {
|
||||
setStatesProvinces([]);
|
||||
return;
|
||||
}
|
||||
|
||||
async function fetchStatesProvinces() {
|
||||
setLoading(true);
|
||||
try {
|
||||
const { data, error } = await supabase
|
||||
.from('locations')
|
||||
.select('state_province')
|
||||
.eq('country', country || '')
|
||||
.not('state_province', 'is', null);
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
const uniqueStates = Array.from(
|
||||
new Set(data?.map(item => item.state_province).filter((s): s is string => s != null) || [])
|
||||
).sort();
|
||||
|
||||
setStatesProvinces(
|
||||
uniqueStates.map(state => ({
|
||||
label: state,
|
||||
value: state.toLowerCase().replace(/\s+/g, '_')
|
||||
}))
|
||||
);
|
||||
} catch (error: unknown) {
|
||||
handleNonCriticalError(error, {
|
||||
action: 'Fetch states/provinces',
|
||||
metadata: { country },
|
||||
});
|
||||
toast.error('Failed to load states/provinces', {
|
||||
description: 'Please refresh the page and try again.',
|
||||
});
|
||||
setStatesProvinces([]);
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}
|
||||
|
||||
fetchStatesProvinces();
|
||||
}, [country]);
|
||||
|
||||
return { statesProvinces, loading };
|
||||
}
|
||||
|
||||
export function useManufacturers() {
|
||||
const [manufacturers, setManufacturers] = useState<ComboboxOption[]>([]);
|
||||
const [loading, setLoading] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
async function fetchManufacturers() {
|
||||
setLoading(true);
|
||||
try {
|
||||
const { data, error } = await supabase
|
||||
.from('companies')
|
||||
.select('id, name')
|
||||
.eq('company_type', 'manufacturer')
|
||||
.order('name');
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
setManufacturers(
|
||||
(data || []).map(company => ({
|
||||
label: company.name,
|
||||
value: company.id
|
||||
}))
|
||||
);
|
||||
} catch (error: unknown) {
|
||||
handleNonCriticalError(error, { action: 'Fetch manufacturers' });
|
||||
toast.error('Failed to load manufacturers', {
|
||||
description: 'Please refresh the page and try again.',
|
||||
});
|
||||
setManufacturers([]);
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}
|
||||
|
||||
fetchManufacturers();
|
||||
}, []);
|
||||
|
||||
return { manufacturers, loading };
|
||||
}
|
||||
|
||||
export function useRideModels(manufacturerId?: string) {
|
||||
const [rideModels, setRideModels] = useState<ComboboxOption[]>([]);
|
||||
const [loading, setLoading] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
if (!manufacturerId) {
|
||||
setRideModels([]);
|
||||
return;
|
||||
}
|
||||
|
||||
async function fetchRideModels() {
|
||||
setLoading(true);
|
||||
try {
|
||||
const { data, error } = await supabase
|
||||
.from('ride_models')
|
||||
.select('id, name')
|
||||
.eq('manufacturer_id', manufacturerId || '')
|
||||
.order('name');
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
setRideModels(
|
||||
(data || []).map(model => ({
|
||||
label: model.name,
|
||||
value: model.id
|
||||
}))
|
||||
);
|
||||
} catch (error: unknown) {
|
||||
handleNonCriticalError(error, {
|
||||
action: 'Fetch ride models',
|
||||
metadata: { manufacturerId },
|
||||
});
|
||||
toast.error('Failed to load ride models', {
|
||||
description: 'Please refresh the page and try again.',
|
||||
});
|
||||
setRideModels([]);
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}
|
||||
|
||||
fetchRideModels();
|
||||
}, [manufacturerId]);
|
||||
|
||||
return { rideModels, loading };
|
||||
}
|
||||
|
||||
export function useCompanyHeadquarters() {
|
||||
const [headquarters, setHeadquarters] = useState<ComboboxOption[]>([]);
|
||||
const [loading, setLoading] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
async function fetchHeadquarters() {
|
||||
setLoading(true);
|
||||
try {
|
||||
const { data, error } = await supabase
|
||||
.from('companies')
|
||||
.select('headquarters_location')
|
||||
.not('headquarters_location', 'is', null);
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
const uniqueHeadquarters = Array.from(
|
||||
new Set(data?.map(item => item.headquarters_location).filter((hq): hq is string => hq != null) || [])
|
||||
).sort();
|
||||
|
||||
setHeadquarters(
|
||||
uniqueHeadquarters.map(hq => ({
|
||||
label: hq,
|
||||
value: hq.toLowerCase().replace(/\s+/g, '_')
|
||||
}))
|
||||
);
|
||||
} catch (error: unknown) {
|
||||
handleNonCriticalError(error, { action: 'Fetch headquarters' });
|
||||
toast.error('Failed to load headquarters', {
|
||||
description: 'Please refresh the page and try again.',
|
||||
});
|
||||
setHeadquarters([]);
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}
|
||||
|
||||
fetchHeadquarters();
|
||||
}, []);
|
||||
|
||||
return { headquarters, loading };
|
||||
}
|
||||
|
||||
export function useOperators() {
|
||||
const [operators, setOperators] = useState<ComboboxOption[]>([]);
|
||||
const [loading, setLoading] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
async function fetchOperators() {
|
||||
setLoading(true);
|
||||
try {
|
||||
const { data, error } = await supabase
|
||||
.from('companies')
|
||||
.select('id, name')
|
||||
.eq('company_type', 'operator')
|
||||
.order('name');
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
setOperators(
|
||||
(data || []).map(company => ({
|
||||
label: company.name,
|
||||
value: company.id
|
||||
}))
|
||||
);
|
||||
} catch (error: unknown) {
|
||||
handleNonCriticalError(error, { action: 'Fetch operators' });
|
||||
toast.error('Failed to load operators', {
|
||||
description: 'Please refresh the page and try again.',
|
||||
});
|
||||
setOperators([]);
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}
|
||||
|
||||
fetchOperators();
|
||||
}, []);
|
||||
|
||||
return { operators, loading };
|
||||
}
|
||||
|
||||
export function usePropertyOwners() {
|
||||
const [propertyOwners, setPropertyOwners] = useState<ComboboxOption[]>([]);
|
||||
const [loading, setLoading] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
async function fetchPropertyOwners() {
|
||||
setLoading(true);
|
||||
try {
|
||||
const { data, error } = await supabase
|
||||
.from('companies')
|
||||
.select('id, name')
|
||||
.eq('company_type', 'property_owner')
|
||||
.order('name');
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
setPropertyOwners(
|
||||
(data || []).map(company => ({
|
||||
label: company.name,
|
||||
value: company.id
|
||||
}))
|
||||
);
|
||||
} catch (error: unknown) {
|
||||
handleNonCriticalError(error, { action: 'Fetch property owners' });
|
||||
toast.error('Failed to load property owners', {
|
||||
description: 'Please refresh the page and try again.',
|
||||
});
|
||||
setPropertyOwners([]);
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}
|
||||
|
||||
fetchPropertyOwners();
|
||||
}, []);
|
||||
|
||||
return { propertyOwners, loading };
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to fetch all parks for autocomplete
|
||||
* Returns parks as combobox options
|
||||
*/
|
||||
export function useParks() {
|
||||
const [parks, setParks] = useState<ComboboxOption[]>([]);
|
||||
const [loading, setLoading] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
async function fetchParks() {
|
||||
setLoading(true);
|
||||
try {
|
||||
const { data, error } = await supabase
|
||||
.from('parks')
|
||||
.select('id, name, slug')
|
||||
.order('name');
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
setParks(
|
||||
(data || []).map(park => ({
|
||||
label: park.name,
|
||||
value: park.id
|
||||
}))
|
||||
);
|
||||
} catch (error: unknown) {
|
||||
handleNonCriticalError(error, { action: 'Fetch parks' });
|
||||
toast.error('Failed to load parks', {
|
||||
description: 'Please refresh the page and try again.',
|
||||
});
|
||||
setParks([]);
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}
|
||||
|
||||
fetchParks();
|
||||
}, []);
|
||||
|
||||
return { parks, loading };
|
||||
}
|
||||
84
src-old/hooks/useAvatarUpload.ts
Normal file
84
src-old/hooks/useAvatarUpload.ts
Normal file
@@ -0,0 +1,84 @@
|
||||
import { useState, useCallback } from 'react';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import { handleError, handleSuccess } from '@/lib/errorHandler';
|
||||
|
||||
export type AvatarUploadState = {
|
||||
url: string;
|
||||
imageId: string;
|
||||
isUploading: boolean;
|
||||
};
|
||||
|
||||
export const useAvatarUpload = (
|
||||
initialUrl: string = '',
|
||||
initialImageId: string = '',
|
||||
username: string
|
||||
) => {
|
||||
const [state, setState] = useState<AvatarUploadState>({
|
||||
url: initialUrl,
|
||||
imageId: initialImageId,
|
||||
isUploading: false
|
||||
});
|
||||
|
||||
const uploadAvatar = useCallback(async (
|
||||
urls: string[],
|
||||
imageId?: string
|
||||
) => {
|
||||
if (!urls[0]) return { success: false };
|
||||
|
||||
const newUrl = urls[0];
|
||||
const newImageId = imageId || '';
|
||||
|
||||
// Optimistic update
|
||||
setState(prev => ({
|
||||
...prev,
|
||||
url: newUrl,
|
||||
imageId: newImageId,
|
||||
isUploading: true
|
||||
}));
|
||||
|
||||
try {
|
||||
const { error } = await supabase.rpc('update_profile', {
|
||||
p_username: username,
|
||||
p_avatar_url: newUrl,
|
||||
p_avatar_image_id: newImageId
|
||||
});
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
setState(prev => ({ ...prev, isUploading: false }));
|
||||
handleSuccess('Avatar updated', 'Your avatar has been successfully updated.');
|
||||
|
||||
return { success: true };
|
||||
} catch (error: unknown) {
|
||||
// Rollback on error
|
||||
setState({
|
||||
url: initialUrl,
|
||||
imageId: initialImageId,
|
||||
isUploading: false
|
||||
});
|
||||
|
||||
handleError(error, {
|
||||
action: 'Avatar upload failed',
|
||||
metadata: { username }
|
||||
});
|
||||
|
||||
return { success: false, error };
|
||||
}
|
||||
}, [username, initialUrl, initialImageId]);
|
||||
|
||||
const resetAvatar = useCallback(() => {
|
||||
setState({
|
||||
url: initialUrl,
|
||||
imageId: initialImageId,
|
||||
isUploading: false
|
||||
});
|
||||
}, [initialUrl, initialImageId]);
|
||||
|
||||
return {
|
||||
avatarUrl: state.url,
|
||||
avatarImageId: state.imageId,
|
||||
isUploading: state.isUploading,
|
||||
uploadAvatar,
|
||||
resetAvatar
|
||||
};
|
||||
};
|
||||
133
src-old/hooks/useBanCheck.ts
Normal file
133
src-old/hooks/useBanCheck.ts
Normal file
@@ -0,0 +1,133 @@
|
||||
import { useEffect, useState } from 'react';
|
||||
import { useAuth } from '@/hooks/useAuth';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import { useNavigate } from 'react-router-dom';
|
||||
import { toast } from '@/hooks/use-toast';
|
||||
import { logger } from '@/lib/logger';
|
||||
|
||||
export function useBanCheck() {
|
||||
const { user } = useAuth();
|
||||
const navigate = useNavigate();
|
||||
const [isBanned, setIsBanned] = useState(false);
|
||||
const [banReason, setBanReason] = useState<string | null>(null);
|
||||
const [loading, setLoading] = useState(true);
|
||||
|
||||
useEffect(() => {
|
||||
if (!user) {
|
||||
setIsBanned(false);
|
||||
setLoading(false);
|
||||
return;
|
||||
}
|
||||
|
||||
const checkBan = async () => {
|
||||
try {
|
||||
const { data: profile } = await supabase
|
||||
.from('profiles')
|
||||
.select('banned, ban_reason, ban_expires_at')
|
||||
.eq('user_id', user.id)
|
||||
.single();
|
||||
|
||||
if (profile?.banned) {
|
||||
setIsBanned(true);
|
||||
setBanReason(profile.ban_reason || null);
|
||||
|
||||
const reason = profile.ban_reason
|
||||
? `Reason: ${profile.ban_reason}`
|
||||
: 'Contact support for assistance.';
|
||||
|
||||
// Add expiration info
|
||||
let expirationText = '';
|
||||
if (profile.ban_expires_at) {
|
||||
const expiresAt = new Date(profile.ban_expires_at);
|
||||
const now = new Date();
|
||||
const daysLeft = Math.ceil((expiresAt.getTime() - now.getTime()) / (1000 * 60 * 60 * 24));
|
||||
expirationText = ` This ban will expire in ${daysLeft} day${daysLeft !== 1 ? 's' : ''}.`;
|
||||
} else {
|
||||
expirationText = ' This is a permanent ban.';
|
||||
}
|
||||
|
||||
toast({
|
||||
title: 'Account Suspended',
|
||||
description: `Your account has been suspended. ${reason}${expirationText}`,
|
||||
variant: 'destructive',
|
||||
duration: Infinity // Don't auto-dismiss
|
||||
});
|
||||
// Sign out banned user
|
||||
await supabase.auth.signOut();
|
||||
navigate('/');
|
||||
}
|
||||
} catch (error) {
|
||||
// Silent - ban check failure is non-critical, user proceeds normally
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
checkBan();
|
||||
|
||||
// Subscribe to profile changes (real-time ban/unban detection)
|
||||
const channel = supabase
|
||||
.channel('ban-check')
|
||||
.on(
|
||||
'postgres_changes',
|
||||
{
|
||||
event: 'UPDATE',
|
||||
schema: 'public',
|
||||
table: 'profiles',
|
||||
filter: `user_id=eq.${user.id}`
|
||||
},
|
||||
(payload) => {
|
||||
const newProfile = payload.new as { banned: boolean; ban_reason: string | null; ban_expires_at: string | null };
|
||||
|
||||
// Handle BAN event
|
||||
if (newProfile.banned && !isBanned) {
|
||||
setIsBanned(true);
|
||||
setBanReason(newProfile.ban_reason || null);
|
||||
|
||||
const reason = newProfile.ban_reason
|
||||
? `Reason: ${newProfile.ban_reason}`
|
||||
: 'Contact support for assistance.';
|
||||
|
||||
// Add expiration info
|
||||
let expirationText = '';
|
||||
if (newProfile.ban_expires_at) {
|
||||
const expiresAt = new Date(newProfile.ban_expires_at);
|
||||
const now = new Date();
|
||||
const daysLeft = Math.ceil((expiresAt.getTime() - now.getTime()) / (1000 * 60 * 60 * 24));
|
||||
expirationText = ` This ban will expire in ${daysLeft} day${daysLeft !== 1 ? 's' : ''}.`;
|
||||
} else {
|
||||
expirationText = ' This is a permanent ban.';
|
||||
}
|
||||
|
||||
toast({
|
||||
title: 'Account Suspended',
|
||||
description: `Your account has been suspended. ${reason}${expirationText}`,
|
||||
variant: 'destructive',
|
||||
duration: Infinity
|
||||
});
|
||||
supabase.auth.signOut();
|
||||
navigate('/');
|
||||
}
|
||||
|
||||
// Handle UNBAN event
|
||||
if (!newProfile.banned && isBanned) {
|
||||
setIsBanned(false);
|
||||
setBanReason(null);
|
||||
toast({
|
||||
title: 'Account Restored',
|
||||
description: 'Your account has been unbanned. You can now use the application normally.',
|
||||
variant: 'default',
|
||||
duration: 8000
|
||||
});
|
||||
}
|
||||
}
|
||||
)
|
||||
.subscribe();
|
||||
|
||||
return () => {
|
||||
supabase.removeChannel(channel);
|
||||
};
|
||||
}, [user, navigate]);
|
||||
|
||||
return { isBanned, loading, banReason };
|
||||
}
|
||||
21
src-old/hooks/useCaptchaBypass.ts
Normal file
21
src-old/hooks/useCaptchaBypass.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import { useEffect } from 'react';
|
||||
import { logger } from '@/lib/logger';
|
||||
|
||||
export function useCaptchaBypass() {
|
||||
// Single layer: Check if environment allows bypass
|
||||
const bypassEnabled = import.meta.env.VITE_ALLOW_CAPTCHA_BYPASS === 'true';
|
||||
|
||||
useEffect(() => {
|
||||
if (bypassEnabled && typeof window !== 'undefined') {
|
||||
logger.warn(
|
||||
'⚠️ CAPTCHA BYPASS IS ACTIVE - ' +
|
||||
'This should ONLY be enabled in development/preview environments.'
|
||||
);
|
||||
}
|
||||
}, [bypassEnabled]);
|
||||
|
||||
return {
|
||||
bypassEnabled,
|
||||
requireCaptcha: !bypassEnabled,
|
||||
};
|
||||
}
|
||||
44
src-old/hooks/useCoasterStats.ts
Normal file
44
src-old/hooks/useCoasterStats.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
|
||||
export interface CoasterStat {
|
||||
id: string;
|
||||
ride_id: string;
|
||||
stat_name: string;
|
||||
stat_value: number;
|
||||
unit?: string | null;
|
||||
category?: string | null;
|
||||
description?: string | null;
|
||||
display_order: number;
|
||||
created_at: string;
|
||||
}
|
||||
|
||||
export function useCoasterStats(rideId: string | undefined) {
|
||||
return useQuery({
|
||||
queryKey: ['coaster-stats', rideId],
|
||||
queryFn: async () => {
|
||||
if (!rideId) return [];
|
||||
|
||||
const { data, error} = await supabase
|
||||
.from('ride_coaster_stats')
|
||||
.select('*')
|
||||
.eq('ride_id', rideId)
|
||||
.order('display_order');
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
return (data || []).map((stat) => ({
|
||||
id: stat.id,
|
||||
ride_id: stat.ride_id,
|
||||
stat_name: stat.stat_name,
|
||||
stat_value: stat.stat_value,
|
||||
unit: stat.unit || null,
|
||||
category: stat.category || null,
|
||||
description: stat.description || null,
|
||||
display_order: stat.display_order,
|
||||
created_at: stat.created_at,
|
||||
})) as CoasterStat[];
|
||||
},
|
||||
enabled: !!rideId
|
||||
});
|
||||
}
|
||||
17
src-old/hooks/useDebounce.ts
Normal file
17
src-old/hooks/useDebounce.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { useState, useEffect } from 'react';
|
||||
|
||||
export function useDebounce<T>(value: T, delay: number): T {
|
||||
const [debouncedValue, setDebouncedValue] = useState<T>(value);
|
||||
|
||||
useEffect(() => {
|
||||
const handler = setTimeout(() => {
|
||||
setDebouncedValue(value);
|
||||
}, delay);
|
||||
|
||||
return () => {
|
||||
clearTimeout(handler);
|
||||
};
|
||||
}, [value, delay]);
|
||||
|
||||
return debouncedValue;
|
||||
}
|
||||
22
src-old/hooks/useDebouncedValue.ts
Normal file
22
src-old/hooks/useDebouncedValue.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import { useState, useEffect } from 'react';
|
||||
|
||||
/**
|
||||
* Hook to debounce a value
|
||||
* @param value - The value to debounce
|
||||
* @param delay - Delay in milliseconds
|
||||
*/
|
||||
export function useDebouncedValue<T>(value: T, delay: number): T {
|
||||
const [debouncedValue, setDebouncedValue] = useState<T>(value);
|
||||
|
||||
useEffect(() => {
|
||||
const handler = setTimeout(() => {
|
||||
setDebouncedValue(value);
|
||||
}, delay);
|
||||
|
||||
return () => {
|
||||
clearTimeout(handler);
|
||||
};
|
||||
}, [value, delay]);
|
||||
|
||||
return debouncedValue;
|
||||
}
|
||||
12
src-old/hooks/useDocumentTitle.ts
Normal file
12
src-old/hooks/useDocumentTitle.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import { useEffect } from 'react';
|
||||
|
||||
export function useDocumentTitle(title: string, suffix: string = 'ThrillWiki') {
|
||||
useEffect(() => {
|
||||
const fullTitle = title ? `${title} | ${suffix}` : suffix;
|
||||
document.title = fullTitle;
|
||||
|
||||
return () => {
|
||||
document.title = suffix;
|
||||
};
|
||||
}, [title, suffix]);
|
||||
}
|
||||
17
src-old/hooks/useEditHistory.ts
Normal file
17
src-old/hooks/useEditHistory.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { fetchEditHistory } from '@/lib/submissionItemsService';
|
||||
|
||||
/**
|
||||
* Phase 4: Hook to fetch edit history for a submission item
|
||||
*/
|
||||
export function useEditHistory(itemId: string | null) {
|
||||
return useQuery({
|
||||
queryKey: ['item-edit-history', itemId],
|
||||
queryFn: () => {
|
||||
if (!itemId) return [];
|
||||
return fetchEditHistory(itemId);
|
||||
},
|
||||
enabled: !!itemId,
|
||||
staleTime: 30000, // 30 seconds
|
||||
});
|
||||
}
|
||||
291
src-old/hooks/useEntityVersions.ts
Normal file
291
src-old/hooks/useEntityVersions.ts
Normal file
@@ -0,0 +1,291 @@
|
||||
import { useState, useEffect, useRef, useCallback } from 'react';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import { toast } from 'sonner';
|
||||
import { getErrorMessage, handleNonCriticalError, handleError } from '@/lib/errorHandler';
|
||||
import type { EntityType, EntityVersion } from '@/types/versioning';
|
||||
import { logger } from '@/lib/logger';
|
||||
|
||||
interface FieldChange {
|
||||
id: string;
|
||||
field_name: string;
|
||||
old_value: any;
|
||||
new_value: any;
|
||||
change_type: 'added' | 'modified' | 'removed';
|
||||
created_at: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to manage entity versions using relational version tables
|
||||
* NO JSONB - Pure relational structure for type safety and queryability
|
||||
*/
|
||||
export function useEntityVersions(entityType: EntityType, entityId: string) {
|
||||
const [versions, setVersions] = useState<EntityVersion[]>([]);
|
||||
const [currentVersion, setCurrentVersion] = useState<EntityVersion | null>(null);
|
||||
const [loading, setLoading] = useState(true);
|
||||
const [fieldHistory, setFieldHistory] = useState<FieldChange[]>([]);
|
||||
|
||||
const isMountedRef = useRef(true);
|
||||
const channelRef = useRef<ReturnType<typeof supabase.channel> | null>(null);
|
||||
const requestCounterRef = useRef(0);
|
||||
const fieldHistoryRequestCounterRef = useRef(0);
|
||||
|
||||
const fetchVersions = useCallback(async () => {
|
||||
if (!isMountedRef.current) return;
|
||||
|
||||
const currentRequestId = ++requestCounterRef.current;
|
||||
|
||||
try {
|
||||
if (isMountedRef.current && currentRequestId === requestCounterRef.current) {
|
||||
setLoading(true);
|
||||
}
|
||||
|
||||
// Build table and column names
|
||||
const versionTable = `${entityType}_versions`;
|
||||
const entityIdCol = `${entityType}_id`;
|
||||
|
||||
let data, error;
|
||||
|
||||
// Use explicit conditional branches for type safety
|
||||
if (entityType === 'park') {
|
||||
const result = await supabase
|
||||
.from('park_versions')
|
||||
.select(`*, profiles:created_by(username, display_name, avatar_url)`)
|
||||
.eq('park_id', entityId)
|
||||
.order('version_number', { ascending: false });
|
||||
data = result.data;
|
||||
error = result.error;
|
||||
} else if (entityType === 'ride') {
|
||||
const result = await supabase
|
||||
.from('ride_versions')
|
||||
.select(`*, profiles:created_by(username, display_name, avatar_url)`)
|
||||
.eq('ride_id', entityId)
|
||||
.order('version_number', { ascending: false });
|
||||
data = result.data;
|
||||
error = result.error;
|
||||
} else if (entityType === 'company') {
|
||||
const result = await supabase
|
||||
.from('company_versions')
|
||||
.select(`*, profiles:created_by(username, display_name, avatar_url)`)
|
||||
.eq('company_id', entityId)
|
||||
.order('version_number', { ascending: false });
|
||||
data = result.data;
|
||||
error = result.error;
|
||||
} else {
|
||||
const result = await supabase
|
||||
.from('ride_model_versions')
|
||||
.select(`*, profiles:created_by(username, display_name, avatar_url)`)
|
||||
.eq('ride_model_id', entityId)
|
||||
.order('version_number', { ascending: false });
|
||||
data = result.data;
|
||||
error = result.error;
|
||||
}
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
if (!isMountedRef.current || currentRequestId !== requestCounterRef.current) return;
|
||||
|
||||
if (!Array.isArray(data)) {
|
||||
if (isMountedRef.current && currentRequestId === requestCounterRef.current) {
|
||||
setVersions([]);
|
||||
setCurrentVersion(null);
|
||||
setLoading(false);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
interface VersionWithProfile {
|
||||
profiles?: {
|
||||
username: string;
|
||||
display_name: string;
|
||||
avatar_url: string | null;
|
||||
};
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
const versionsWithProfiles = (data || []).map((v: VersionWithProfile) => ({
|
||||
...v,
|
||||
profiles: v.profiles || {
|
||||
username: 'Unknown',
|
||||
display_name: 'Unknown',
|
||||
avatar_url: null
|
||||
}
|
||||
})) as EntityVersion[];
|
||||
|
||||
if (isMountedRef.current && currentRequestId === requestCounterRef.current) {
|
||||
setVersions(versionsWithProfiles);
|
||||
setCurrentVersion(versionsWithProfiles.find(v => v.is_current) || null);
|
||||
setLoading(false);
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
handleNonCriticalError(error, {
|
||||
action: 'Fetch entity versions',
|
||||
metadata: { entityType, entityId },
|
||||
});
|
||||
|
||||
if (isMountedRef.current && currentRequestId === requestCounterRef.current) {
|
||||
toast.error(getErrorMessage(error));
|
||||
setLoading(false);
|
||||
}
|
||||
}
|
||||
}, [entityType, entityId]);
|
||||
|
||||
/**
|
||||
* Field history has been removed - use version comparison instead
|
||||
* This function is kept for backward compatibility but does nothing
|
||||
* @deprecated Use compareVersions() to see field-level changes
|
||||
*/
|
||||
const fetchFieldHistory = async (versionId: string) => {
|
||||
logger.warn('fetchFieldHistory is deprecated. Use compareVersions() instead for field-level changes.');
|
||||
setFieldHistory([]);
|
||||
};
|
||||
|
||||
const compareVersions = async (fromVersionId: string, toVersionId: string) => {
|
||||
try {
|
||||
const { data, error } = await supabase.rpc('get_version_diff', {
|
||||
p_entity_type: entityType,
|
||||
p_from_version_id: fromVersionId,
|
||||
p_to_version_id: toVersionId
|
||||
});
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
return data;
|
||||
} catch (error: unknown) {
|
||||
handleNonCriticalError(error, {
|
||||
action: 'Compare entity versions',
|
||||
metadata: { entityType, fromVersionId, toVersionId },
|
||||
});
|
||||
if (isMountedRef.current) {
|
||||
toast.error(getErrorMessage(error));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
const rollbackToVersion = async (targetVersionId: string, reason: string) => {
|
||||
try {
|
||||
if (!isMountedRef.current) return null;
|
||||
|
||||
const { data: userData } = await supabase.auth.getUser();
|
||||
if (!userData.user) throw new Error('Not authenticated');
|
||||
|
||||
const { data, error } = await supabase.rpc('rollback_to_version', {
|
||||
p_entity_type: entityType,
|
||||
p_entity_id: entityId,
|
||||
p_target_version_id: targetVersionId,
|
||||
p_changed_by: userData.user.id,
|
||||
p_reason: reason
|
||||
});
|
||||
|
||||
if (error) {
|
||||
// Check for authorization error (insufficient_privilege)
|
||||
if (error.code === '42501') {
|
||||
throw new Error('Only moderators can restore previous versions');
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
|
||||
if (isMountedRef.current) {
|
||||
toast.success('Successfully restored to previous version');
|
||||
await fetchVersions();
|
||||
}
|
||||
return data;
|
||||
} catch (error: unknown) {
|
||||
handleError(error, {
|
||||
action: 'Rollback entity version',
|
||||
metadata: { entityType, entityId, targetVersionId },
|
||||
});
|
||||
if (isMountedRef.current) {
|
||||
toast.error('Failed to restore version', {
|
||||
description: getErrorMessage(error)
|
||||
});
|
||||
}
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (entityType && entityId) {
|
||||
fetchVersions();
|
||||
}
|
||||
}, [entityType, entityId, fetchVersions]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!entityType || !entityId) return;
|
||||
|
||||
if (channelRef.current) {
|
||||
try {
|
||||
supabase.removeChannel(channelRef.current);
|
||||
} catch (error: unknown) {
|
||||
handleNonCriticalError(error, {
|
||||
action: 'Cleanup realtime subscription',
|
||||
metadata: {
|
||||
entityType,
|
||||
entityId,
|
||||
context: 'unmount_cleanup'
|
||||
}
|
||||
});
|
||||
} finally {
|
||||
channelRef.current = null;
|
||||
}
|
||||
}
|
||||
|
||||
const versionTable = `${entityType}_versions`;
|
||||
const entityIdCol = `${entityType}_id`;
|
||||
|
||||
const channel = supabase
|
||||
.channel(`${versionTable}_changes`)
|
||||
.on(
|
||||
'postgres_changes',
|
||||
{
|
||||
event: '*',
|
||||
schema: 'public',
|
||||
table: versionTable,
|
||||
filter: `${entityIdCol}=eq.${entityId}`
|
||||
},
|
||||
() => {
|
||||
if (isMountedRef.current) {
|
||||
fetchVersions();
|
||||
}
|
||||
}
|
||||
)
|
||||
.subscribe();
|
||||
|
||||
channelRef.current = channel;
|
||||
|
||||
return () => {
|
||||
if (channelRef.current) {
|
||||
supabase.removeChannel(channelRef.current).catch((error) => {
|
||||
handleNonCriticalError(error, {
|
||||
action: 'Cleanup realtime subscription',
|
||||
metadata: {
|
||||
entityType,
|
||||
entityId,
|
||||
context: 'unmount_cleanup'
|
||||
}
|
||||
});
|
||||
});
|
||||
channelRef.current = null;
|
||||
}
|
||||
};
|
||||
}, [entityType, entityId, fetchVersions]);
|
||||
|
||||
useEffect(() => {
|
||||
isMountedRef.current = true;
|
||||
|
||||
return () => {
|
||||
isMountedRef.current = false;
|
||||
};
|
||||
}, []);
|
||||
|
||||
return {
|
||||
versions,
|
||||
currentVersion,
|
||||
loading,
|
||||
fieldHistory,
|
||||
fetchVersions,
|
||||
fetchFieldHistory,
|
||||
compareVersions,
|
||||
rollbackToVersion
|
||||
};
|
||||
}
|
||||
48
src-old/hooks/useFilterPanelState.ts
Normal file
48
src-old/hooks/useFilterPanelState.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import { useState, useEffect } from 'react';
|
||||
import { logger } from '@/lib/logger';
|
||||
|
||||
const STORAGE_KEY = 'queue-filter-panel-collapsed';
|
||||
|
||||
interface UseFilterPanelStateReturn {
|
||||
isCollapsed: boolean;
|
||||
toggle: () => void;
|
||||
setCollapsed: (value: boolean) => void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to manage filter panel collapsed/expanded state
|
||||
* Syncs with localStorage for persistence across sessions
|
||||
*/
|
||||
export function useFilterPanelState(): UseFilterPanelStateReturn {
|
||||
const [isCollapsed, setIsCollapsed] = useState<boolean>(() => {
|
||||
// Initialize from localStorage on mount
|
||||
try {
|
||||
const stored = localStorage.getItem(STORAGE_KEY);
|
||||
// Default to collapsed on mobile (width < 768px)
|
||||
const isMobile = window.innerWidth < 768;
|
||||
return stored ? JSON.parse(stored) : isMobile;
|
||||
} catch (error) {
|
||||
logger.warn('Error reading filter panel state from localStorage', { error });
|
||||
return window.innerWidth < 768;
|
||||
}
|
||||
});
|
||||
|
||||
// Sync to localStorage when state changes
|
||||
useEffect(() => {
|
||||
try {
|
||||
localStorage.setItem(STORAGE_KEY, JSON.stringify(isCollapsed));
|
||||
} catch (error) {
|
||||
logger.warn('Error saving filter panel state to localStorage', { error });
|
||||
}
|
||||
}, [isCollapsed]);
|
||||
|
||||
const toggle = () => setIsCollapsed(prev => !prev);
|
||||
|
||||
const setCollapsed = (value: boolean) => setIsCollapsed(value);
|
||||
|
||||
return {
|
||||
isCollapsed,
|
||||
toggle,
|
||||
setCollapsed,
|
||||
};
|
||||
}
|
||||
65
src-old/hooks/useKeyboardShortcuts.ts
Normal file
65
src-old/hooks/useKeyboardShortcuts.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import { useEffect, useCallback } from 'react';
|
||||
|
||||
interface KeyboardShortcut {
|
||||
key: string;
|
||||
ctrlOrCmd?: boolean;
|
||||
shift?: boolean;
|
||||
alt?: boolean;
|
||||
handler: () => void;
|
||||
description: string;
|
||||
}
|
||||
|
||||
interface UseKeyboardShortcutsOptions {
|
||||
shortcuts: KeyboardShortcut[];
|
||||
enabled?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for registering keyboard shortcuts
|
||||
* Automatically handles Cmd (Mac) vs Ctrl (Windows/Linux)
|
||||
*/
|
||||
export function useKeyboardShortcuts({ shortcuts, enabled = true }: UseKeyboardShortcutsOptions) {
|
||||
const handleKeyDown = useCallback(
|
||||
(event: KeyboardEvent) => {
|
||||
if (!enabled) return;
|
||||
|
||||
// Ignore shortcuts when typing in input fields
|
||||
const target = event.target as HTMLElement;
|
||||
if (
|
||||
target.tagName === 'INPUT' ||
|
||||
target.tagName === 'TEXTAREA' ||
|
||||
target.isContentEditable
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (const shortcut of shortcuts) {
|
||||
const matchesKey = event.key.toLowerCase() === shortcut.key.toLowerCase();
|
||||
const matchesCtrl = !shortcut.ctrlOrCmd || (event.ctrlKey || event.metaKey);
|
||||
const matchesShift = !shortcut.shift || event.shiftKey;
|
||||
const matchesAlt = !shortcut.alt || event.altKey;
|
||||
|
||||
if (matchesKey && matchesCtrl && matchesShift && matchesAlt) {
|
||||
event.preventDefault();
|
||||
shortcut.handler();
|
||||
break;
|
||||
}
|
||||
}
|
||||
},
|
||||
[shortcuts, enabled]
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (!enabled) return;
|
||||
|
||||
window.addEventListener('keydown', handleKeyDown);
|
||||
return () => window.removeEventListener('keydown', handleKeyDown);
|
||||
}, [handleKeyDown, enabled]);
|
||||
|
||||
return {
|
||||
shortcuts: shortcuts.map(s => ({
|
||||
...s,
|
||||
displayKey: `${s.ctrlOrCmd ? '⌘/Ctrl + ' : ''}${s.shift ? 'Shift + ' : ''}${s.alt ? 'Alt + ' : ''}${s.key.toUpperCase()}`,
|
||||
})),
|
||||
};
|
||||
}
|
||||
42
src-old/hooks/useLocationAutoDetect.ts
Normal file
42
src-old/hooks/useLocationAutoDetect.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import { useEffect } from 'react';
|
||||
import { useAuth } from '@/hooks/useAuth';
|
||||
import { useUnitPreferences } from '@/hooks/useUnitPreferences';
|
||||
import { handleNonCriticalError } from '@/lib/errorHandler';
|
||||
import { logger } from '@/lib/logger';
|
||||
import * as storage from '@/lib/localStorage';
|
||||
|
||||
export function useLocationAutoDetect() {
|
||||
const { user } = useAuth();
|
||||
const { preferences, autoDetectPreferences, loading } = useUnitPreferences();
|
||||
|
||||
useEffect(() => {
|
||||
// Only run auto-detection after preferences have loaded
|
||||
if (loading) return;
|
||||
|
||||
// Check if localStorage is available
|
||||
if (!storage.isLocalStorageAvailable()) {
|
||||
logger.warn('localStorage is not available, skipping location auto-detection');
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if we've already attempted detection
|
||||
const hasAttemptedDetection = storage.getItem('location_detection_attempted');
|
||||
|
||||
// Auto-detect if we haven't attempted it yet and auto_detect is enabled
|
||||
if (preferences.auto_detect && !hasAttemptedDetection) {
|
||||
autoDetectPreferences().then(() => {
|
||||
storage.setItem('location_detection_attempted', 'true');
|
||||
}).catch((error) => {
|
||||
handleNonCriticalError(error, {
|
||||
action: 'Auto-detect user location',
|
||||
userId: user?.id,
|
||||
metadata: {
|
||||
autoDetectEnabled: preferences.auto_detect,
|
||||
context: 'initial_load'
|
||||
}
|
||||
});
|
||||
storage.setItem('location_detection_attempted', 'true');
|
||||
});
|
||||
}
|
||||
}, [user, loading, preferences.auto_detect]);
|
||||
}
|
||||
733
src-old/hooks/useModerationQueue.ts
Normal file
733
src-old/hooks/useModerationQueue.ts
Normal file
@@ -0,0 +1,733 @@
|
||||
import { useState, useEffect, useCallback, useRef } from 'react';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import { useAuth } from './useAuth';
|
||||
import { useToast } from './use-toast';
|
||||
import { getErrorMessage, handleNonCriticalError, handleError } from '@/lib/errorHandler';
|
||||
import { getSubmissionTypeLabel } from '@/lib/moderation/entities';
|
||||
import { logger } from '@/lib/logger';
|
||||
|
||||
interface QueuedSubmission {
|
||||
submission_id: string;
|
||||
submission_type: string;
|
||||
waiting_time: string; // PostgreSQL interval format
|
||||
}
|
||||
|
||||
interface LockState {
|
||||
submissionId: string;
|
||||
expiresAt: Date;
|
||||
autoReleaseTimer?: NodeJS.Timeout;
|
||||
}
|
||||
|
||||
interface QueueStats {
|
||||
pendingCount: number;
|
||||
assignedToMe: number;
|
||||
avgWaitHours: number;
|
||||
}
|
||||
|
||||
interface UseModerationQueueConfig {
|
||||
onLockStateChange?: () => void;
|
||||
}
|
||||
|
||||
export const useModerationQueue = (config?: UseModerationQueueConfig) => {
|
||||
const { onLockStateChange } = config || {};
|
||||
const [currentLock, setCurrentLock] = useState<LockState | null>(null);
|
||||
const [queueStats, setQueueStats] = useState<QueueStats | null>(null);
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const lockTimerRef = useRef<NodeJS.Timeout | null>(null);
|
||||
const { user } = useAuth();
|
||||
const { toast } = useToast();
|
||||
|
||||
// Auto-release expired locks periodically
|
||||
useEffect(() => {
|
||||
if (!user) return;
|
||||
|
||||
// Call release_expired_locks every 2 minutes
|
||||
const releaseInterval = setInterval(async () => {
|
||||
try {
|
||||
await supabase.rpc('release_expired_locks');
|
||||
} catch (error: unknown) {
|
||||
// Log expected periodic failure for debugging without user toast
|
||||
logger.debug('Periodic lock release failed', {
|
||||
operation: 'release_expired_locks',
|
||||
error: getErrorMessage(error)
|
||||
});
|
||||
}
|
||||
}, 120000); // 2 minutes
|
||||
|
||||
return () => clearInterval(releaseInterval);
|
||||
}, [user]);
|
||||
|
||||
// Fetch queue statistics
|
||||
const fetchStats = useCallback(async () => {
|
||||
if (!user) return;
|
||||
|
||||
try {
|
||||
const { data: slaData } = await supabase
|
||||
.from('moderation_sla_metrics')
|
||||
.select('pending_count, avg_wait_hours');
|
||||
|
||||
const { count: assignedCount } = await supabase
|
||||
.from('content_submissions')
|
||||
.select('id', { count: 'exact', head: true })
|
||||
.eq('assigned_to', user.id)
|
||||
.gt('locked_until', new Date().toISOString());
|
||||
|
||||
if (slaData) {
|
||||
const totals = slaData.reduce(
|
||||
(acc, row) => ({
|
||||
pendingCount: acc.pendingCount + (row.pending_count || 0),
|
||||
avgWaitHours: acc.avgWaitHours + (row.avg_wait_hours || 0),
|
||||
}),
|
||||
{ pendingCount: 0, avgWaitHours: 0 }
|
||||
);
|
||||
|
||||
setQueueStats({
|
||||
pendingCount: totals.pendingCount,
|
||||
assignedToMe: assignedCount || 0,
|
||||
avgWaitHours: slaData.length > 0 ? totals.avgWaitHours / slaData.length : 0,
|
||||
});
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
// Log stats fetch failure for debugging without user toast
|
||||
logger.debug('Queue stats fetch failed', {
|
||||
operation: 'fetchStats',
|
||||
error: getErrorMessage(error)
|
||||
});
|
||||
}
|
||||
}, [user]);
|
||||
|
||||
// Start countdown timer for lock expiry with improved memory leak prevention
|
||||
const startLockTimer = useCallback((expiresAt: Date) => {
|
||||
// Track if component is still mounted
|
||||
let isMounted = true;
|
||||
|
||||
// Clear any existing timer first to prevent leaks
|
||||
if (lockTimerRef.current) {
|
||||
clearInterval(lockTimerRef.current);
|
||||
lockTimerRef.current = null;
|
||||
}
|
||||
|
||||
lockTimerRef.current = setInterval(() => {
|
||||
// Prevent timer execution if component unmounted
|
||||
if (!isMounted) {
|
||||
if (lockTimerRef.current) {
|
||||
clearInterval(lockTimerRef.current);
|
||||
lockTimerRef.current = null;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
const now = new Date();
|
||||
const timeLeft = expiresAt.getTime() - now.getTime();
|
||||
|
||||
if (timeLeft <= 0) {
|
||||
// Clear timer before showing toast to prevent double-firing
|
||||
if (lockTimerRef.current) {
|
||||
clearInterval(lockTimerRef.current);
|
||||
lockTimerRef.current = null;
|
||||
}
|
||||
|
||||
setCurrentLock(null);
|
||||
|
||||
toast({
|
||||
title: 'Lock Expired',
|
||||
description: 'Your review lock has expired. Claim another submission to continue.',
|
||||
variant: 'destructive',
|
||||
});
|
||||
|
||||
if (onLockStateChange) {
|
||||
onLockStateChange();
|
||||
}
|
||||
}
|
||||
}, 1000);
|
||||
|
||||
// Return cleanup function
|
||||
return () => {
|
||||
isMounted = false;
|
||||
if (lockTimerRef.current) {
|
||||
clearInterval(lockTimerRef.current);
|
||||
lockTimerRef.current = null;
|
||||
}
|
||||
};
|
||||
}, [toast, onLockStateChange]);
|
||||
|
||||
// Clean up timer on unmount
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
// Comprehensive cleanup on unmount
|
||||
if (lockTimerRef.current) {
|
||||
clearInterval(lockTimerRef.current);
|
||||
lockTimerRef.current = null;
|
||||
}
|
||||
};
|
||||
}, []);
|
||||
|
||||
// Restore active lock from database on mount
|
||||
const restoreActiveLock = useCallback(async () => {
|
||||
if (!user?.id) return;
|
||||
|
||||
try {
|
||||
// Query for any active lock assigned to current user
|
||||
const { data, error } = await supabase
|
||||
.from('content_submissions')
|
||||
.select('id, locked_until')
|
||||
.eq('assigned_to', user.id)
|
||||
.gt('locked_until', new Date().toISOString())
|
||||
.in('status', ['pending', 'partially_approved'])
|
||||
.order('locked_until', { ascending: false })
|
||||
.limit(1)
|
||||
.maybeSingle();
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
if (data) {
|
||||
const expiresAt = new Date(data.locked_until || '');
|
||||
|
||||
// Only restore if lock hasn't expired (race condition check)
|
||||
if (data.locked_until && expiresAt > new Date()) {
|
||||
const timeRemaining = expiresAt.getTime() - new Date().getTime();
|
||||
const minTimeMs = 60 * 1000; // 60 seconds minimum
|
||||
|
||||
if (timeRemaining < minTimeMs) {
|
||||
// Lock expires too soon - auto-release it
|
||||
logger.info('Lock expired or expiring soon, auto-releasing', {
|
||||
submissionId: data.id,
|
||||
timeRemainingSeconds: Math.floor(timeRemaining / 1000),
|
||||
});
|
||||
|
||||
// Release the stale lock
|
||||
await supabase.rpc('release_submission_lock', {
|
||||
submission_id: data.id,
|
||||
moderator_id: user.id,
|
||||
});
|
||||
|
||||
return; // Don't restore
|
||||
}
|
||||
|
||||
// Lock has sufficient time - restore it
|
||||
setCurrentLock({
|
||||
submissionId: data.id,
|
||||
expiresAt,
|
||||
});
|
||||
|
||||
// Start countdown timer for restored lock
|
||||
startLockTimer(expiresAt);
|
||||
|
||||
logger.info('Lock state restored from database', {
|
||||
submissionId: data.id,
|
||||
expiresAt: expiresAt.toISOString(),
|
||||
timeRemainingSeconds: Math.floor(timeRemaining / 1000),
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
// Log but don't show user toast (they haven't taken any action yet)
|
||||
logger.debug('Failed to restore lock state', {
|
||||
error: getErrorMessage(error),
|
||||
userId: user.id,
|
||||
});
|
||||
}
|
||||
}, [user, startLockTimer]);
|
||||
|
||||
// Initialize lock state from database on mount
|
||||
useEffect(() => {
|
||||
if (!user) return;
|
||||
|
||||
restoreActiveLock();
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [user]);
|
||||
|
||||
// Sync lock state across tabs when user returns to the page
|
||||
useEffect(() => {
|
||||
if (!user) return;
|
||||
|
||||
const handleVisibilityChange = () => {
|
||||
if (document.visibilityState === 'visible') {
|
||||
// User returned to tab - check if lock state is still valid
|
||||
if (!currentLock) {
|
||||
restoreActiveLock();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
document.addEventListener('visibilitychange', handleVisibilityChange);
|
||||
|
||||
return () => {
|
||||
document.removeEventListener('visibilitychange', handleVisibilityChange);
|
||||
};
|
||||
}, [user, currentLock, restoreActiveLock]);
|
||||
|
||||
// Claim a specific submission (CRM-style claim any)
|
||||
const extendLock = useCallback(async (submissionId: string): Promise<boolean> => {
|
||||
if (!user?.id) return false;
|
||||
|
||||
setIsLoading(true);
|
||||
try {
|
||||
const { data, error } = await supabase.rpc('extend_submission_lock', {
|
||||
submission_id: submissionId,
|
||||
moderator_id: user.id,
|
||||
extension_duration: 'PT15M', // ISO 8601 format: 15 minutes
|
||||
});
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
if (data) {
|
||||
const newExpiresAt = new Date(data);
|
||||
setCurrentLock((prev) =>
|
||||
prev?.submissionId === submissionId
|
||||
? { ...prev, expiresAt: newExpiresAt }
|
||||
: prev
|
||||
);
|
||||
startLockTimer(newExpiresAt);
|
||||
|
||||
toast({
|
||||
title: 'Lock Extended',
|
||||
description: 'Lock extended for 15 more minutes',
|
||||
});
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
} catch (error: unknown) {
|
||||
toast({
|
||||
title: 'Error',
|
||||
description: getErrorMessage(error),
|
||||
variant: 'destructive',
|
||||
});
|
||||
return false;
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
}, [user, toast, startLockTimer]);
|
||||
|
||||
// Release lock (manual or on completion)
|
||||
const releaseLock = useCallback(async (
|
||||
submissionId: string,
|
||||
silent: boolean = false
|
||||
): Promise<boolean> => {
|
||||
if (!user?.id) return false;
|
||||
|
||||
setIsLoading(true);
|
||||
|
||||
try {
|
||||
const { data, error } = await supabase.rpc('release_submission_lock', {
|
||||
submission_id: submissionId,
|
||||
moderator_id: user.id,
|
||||
});
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
// Always clear local state and refresh stats if no error
|
||||
setCurrentLock((prev) =>
|
||||
prev?.submissionId === submissionId ? null : prev
|
||||
);
|
||||
|
||||
if (lockTimerRef.current) {
|
||||
clearInterval(lockTimerRef.current);
|
||||
lockTimerRef.current = null; // Explicitly null it out
|
||||
}
|
||||
|
||||
fetchStats();
|
||||
|
||||
// Show appropriate toast based on result (unless silent)
|
||||
if (!silent) {
|
||||
if (data === true) {
|
||||
toast({
|
||||
title: 'Lock Released',
|
||||
description: 'You can now claim another submission',
|
||||
});
|
||||
} else {
|
||||
toast({
|
||||
title: 'Lock Already Released',
|
||||
description: 'This submission was already unlocked',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Trigger refresh callback
|
||||
if (onLockStateChange) {
|
||||
onLockStateChange();
|
||||
}
|
||||
|
||||
return data;
|
||||
} catch (error: unknown) {
|
||||
// Always show error toasts even in silent mode
|
||||
toast({
|
||||
title: 'Failed to Release Lock',
|
||||
description: getErrorMessage(error),
|
||||
variant: 'destructive',
|
||||
});
|
||||
return false;
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
}, [user, fetchStats, toast, onLockStateChange]);
|
||||
|
||||
// Get time remaining on current lock
|
||||
const getTimeRemaining = useCallback((): number | null => {
|
||||
if (!currentLock) return null;
|
||||
return Math.max(0, currentLock.expiresAt.getTime() - Date.now());
|
||||
}, [currentLock]);
|
||||
|
||||
/**
|
||||
* @deprecated Use escalateSubmission from useModerationActions instead
|
||||
* This method only updates the database and doesn't send email notifications
|
||||
*/
|
||||
const escalateSubmission = useCallback(async (submissionId: string, reason: string): Promise<boolean> => {
|
||||
if (!user?.id) return false;
|
||||
|
||||
setIsLoading(true);
|
||||
try {
|
||||
const { error } = await supabase
|
||||
.from('content_submissions')
|
||||
.update({
|
||||
escalated: true,
|
||||
escalated_at: new Date().toISOString(),
|
||||
escalated_by: user.id,
|
||||
escalation_reason: reason,
|
||||
})
|
||||
.eq('id', submissionId);
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
toast({
|
||||
title: 'Submission Escalated',
|
||||
description: 'This submission has been marked as high priority',
|
||||
});
|
||||
|
||||
fetchStats();
|
||||
return true;
|
||||
} catch (error: unknown) {
|
||||
toast({
|
||||
title: 'Error',
|
||||
description: getErrorMessage(error),
|
||||
variant: 'destructive',
|
||||
});
|
||||
return false;
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
}, [user, toast, fetchStats]);
|
||||
|
||||
// Claim a specific submission (CRM-style claim any)
|
||||
const claimSubmission = useCallback(async (submissionId: string): Promise<boolean> => {
|
||||
if (!user?.id) {
|
||||
toast({
|
||||
title: 'Authentication Required',
|
||||
description: 'You must be logged in to claim submissions',
|
||||
variant: 'destructive',
|
||||
});
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check if trying to claim same submission user already has locked
|
||||
if (currentLock && currentLock.submissionId === submissionId) {
|
||||
toast({
|
||||
title: 'Already Claimed',
|
||||
description: 'You already have this submission claimed. Review it below.',
|
||||
});
|
||||
return true; // Return success, don't re-claim
|
||||
}
|
||||
|
||||
// Check if user already has an active lock on a different submission
|
||||
if (currentLock && currentLock.submissionId !== submissionId) {
|
||||
toast({
|
||||
title: 'Already Have Active Lock',
|
||||
description: 'Release your current lock before claiming another submission',
|
||||
variant: 'destructive',
|
||||
});
|
||||
return false;
|
||||
}
|
||||
|
||||
setIsLoading(true);
|
||||
try {
|
||||
// Get submission details FIRST for better toast message
|
||||
const { data: submission } = await supabase
|
||||
.from('content_submissions')
|
||||
.select('id, submission_type')
|
||||
.eq('id', submissionId)
|
||||
.single();
|
||||
|
||||
const expiresAt = new Date(Date.now() + 15 * 60 * 1000);
|
||||
|
||||
// Use direct fetch to force read-write transaction
|
||||
const supabaseUrl = 'https://api.thrillwiki.com';
|
||||
const supabaseKey = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6InlkdnRtbnJzenlicW5iY3FiZGN5Iiwicm9sZSI6ImFub24iLCJpYXQiOjE3NTgzMjYzNTYsImV4cCI6MjA3MzkwMjM1Nn0.DM3oyapd_omP5ZzIlrT0H9qBsiQBxBRgw2tYuqgXKX4';
|
||||
|
||||
const { data: sessionData } = await supabase.auth.getSession();
|
||||
const token = sessionData.session?.access_token;
|
||||
|
||||
const response = await fetch(`${supabaseUrl}/rest/v1/rpc/claim_specific_submission`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'apikey': supabaseKey,
|
||||
'Authorization': `Bearer ${token}`,
|
||||
'Prefer': 'tx=commit', // Force read-write transaction
|
||||
},
|
||||
body: JSON.stringify({
|
||||
p_submission_id: submissionId,
|
||||
p_moderator_id: user.id,
|
||||
p_lock_duration: 'PT15M', // ISO 8601 format: 15 minutes
|
||||
}),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch((parseError) => {
|
||||
handleNonCriticalError(parseError, {
|
||||
action: 'Parse claim error response',
|
||||
userId: user.id,
|
||||
metadata: {
|
||||
submissionId,
|
||||
httpStatus: response.status,
|
||||
context: 'claim_submission_error_parsing'
|
||||
}
|
||||
});
|
||||
return { message: 'Failed to claim submission' };
|
||||
});
|
||||
throw new Error(errorData.message || 'Failed to claim submission');
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
if (!data) {
|
||||
throw new Error('Submission is already claimed or no longer available');
|
||||
}
|
||||
|
||||
setCurrentLock({
|
||||
submissionId,
|
||||
expiresAt,
|
||||
});
|
||||
|
||||
startLockTimer(expiresAt);
|
||||
|
||||
// Enhanced toast with submission type
|
||||
const submissionType = submission?.submission_type || 'submission';
|
||||
const formattedType = getSubmissionTypeLabel(submissionType);
|
||||
toast({
|
||||
title: '✅ Submission Claimed',
|
||||
description: `${formattedType} locked for 15 minutes. Start reviewing now.`,
|
||||
duration: 4000,
|
||||
});
|
||||
|
||||
// Force UI refresh to update queue
|
||||
fetchStats();
|
||||
if (onLockStateChange) {
|
||||
onLockStateChange();
|
||||
}
|
||||
|
||||
return true;
|
||||
} catch (error: unknown) {
|
||||
toast({
|
||||
title: 'Failed to Claim Submission',
|
||||
description: getErrorMessage(error),
|
||||
variant: 'destructive',
|
||||
});
|
||||
return false;
|
||||
} finally {
|
||||
setIsLoading(false); // Always clear loading state
|
||||
}
|
||||
}, [user, toast, startLockTimer, fetchStats, onLockStateChange]);
|
||||
|
||||
// Reassign submission
|
||||
const reassignSubmission = useCallback(async (submissionId: string, newModeratorId: string): Promise<boolean> => {
|
||||
if (!user?.id) return false;
|
||||
|
||||
setIsLoading(true);
|
||||
try {
|
||||
const { error } = await supabase
|
||||
.from('content_submissions')
|
||||
.update({
|
||||
assigned_to: newModeratorId,
|
||||
assigned_at: new Date().toISOString(),
|
||||
locked_until: new Date(Date.now() + 15 * 60 * 1000).toISOString(),
|
||||
})
|
||||
.eq('id', submissionId);
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
// If this was our lock, clear it
|
||||
if (currentLock?.submissionId === submissionId) {
|
||||
setCurrentLock(null);
|
||||
if (lockTimerRef.current) {
|
||||
clearInterval(lockTimerRef.current);
|
||||
}
|
||||
}
|
||||
|
||||
toast({
|
||||
title: 'Submission Reassigned',
|
||||
description: 'The submission has been assigned to another moderator',
|
||||
});
|
||||
|
||||
fetchStats();
|
||||
return true;
|
||||
} catch (error: unknown) {
|
||||
toast({
|
||||
title: 'Error',
|
||||
description: getErrorMessage(error),
|
||||
variant: 'destructive',
|
||||
});
|
||||
return false;
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
}, [user, currentLock, toast, fetchStats]);
|
||||
|
||||
// Check if submission is locked by current user
|
||||
const isLockedByMe = useCallback((submissionId: string, assignedTo?: string | null, lockedUntil?: string | null): boolean => {
|
||||
// Check local state first (optimistic UI - immediate feedback)
|
||||
if (currentLock?.submissionId === submissionId) return true;
|
||||
|
||||
// Also check database state (source of truth)
|
||||
if (assignedTo && lockedUntil && user?.id) {
|
||||
const isAssignedToMe = assignedTo === user.id;
|
||||
const isLockActive = new Date(lockedUntil) > new Date();
|
||||
return isAssignedToMe && isLockActive;
|
||||
}
|
||||
|
||||
return false;
|
||||
}, [currentLock, user]);
|
||||
|
||||
// Check if submission is locked by another moderator
|
||||
const isLockedByOther = useCallback((submissionId: string, assignedTo: string | null, lockedUntil: string | null): boolean => {
|
||||
if (!assignedTo || !lockedUntil) return false;
|
||||
if (user?.id === assignedTo) return false; // It's our lock
|
||||
return new Date(lockedUntil) > new Date(); // Lock is still active
|
||||
}, [user]);
|
||||
|
||||
// Get lock progress percentage (0-100)
|
||||
const getLockProgress = useCallback((): number => {
|
||||
const timeLeft = getTimeRemaining();
|
||||
if (timeLeft === null) return 0;
|
||||
const totalTime = 15 * 60 * 1000; // 15 minutes in ms
|
||||
return Math.max(0, Math.min(100, (timeLeft / totalTime) * 100));
|
||||
}, [getTimeRemaining]);
|
||||
|
||||
// Auto-release lock after moderation action
|
||||
const releaseAfterAction = useCallback(async (submissionId: string, action: 'approved' | 'rejected'): Promise<void> => {
|
||||
if (currentLock?.submissionId === submissionId) {
|
||||
await releaseLock(submissionId, true); // Silent release
|
||||
}
|
||||
}, [currentLock, releaseLock]);
|
||||
|
||||
// Superuser: Force release a specific lock
|
||||
const superuserReleaseLock = useCallback(async (
|
||||
submissionId: string
|
||||
): Promise<boolean> => {
|
||||
if (!user?.id) return false;
|
||||
|
||||
setIsLoading(true);
|
||||
|
||||
try {
|
||||
const { data, error } = await supabase.rpc('superuser_release_lock', {
|
||||
p_submission_id: submissionId,
|
||||
p_superuser_id: user.id,
|
||||
});
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
toast({
|
||||
title: 'Lock Forcibly Released',
|
||||
description: 'The submission has been unlocked and is now available',
|
||||
});
|
||||
|
||||
fetchStats();
|
||||
|
||||
if (onLockStateChange) {
|
||||
onLockStateChange();
|
||||
}
|
||||
|
||||
return data;
|
||||
} catch (error: unknown) {
|
||||
handleError(error, {
|
||||
action: 'Superuser Release Lock',
|
||||
userId: user.id,
|
||||
metadata: { submissionId }
|
||||
});
|
||||
return false;
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
}, [user, fetchStats, toast, onLockStateChange]);
|
||||
|
||||
// Superuser: Clear all locks
|
||||
const superuserReleaseAllLocks = useCallback(async (): Promise<number> => {
|
||||
if (!user?.id) return 0;
|
||||
|
||||
setIsLoading(true);
|
||||
|
||||
try {
|
||||
const { data, error } = await supabase.rpc('superuser_release_all_locks', {
|
||||
p_superuser_id: user.id,
|
||||
});
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
const count = data || 0;
|
||||
|
||||
toast({
|
||||
title: 'All Locks Cleared',
|
||||
description: `${count} submission${count !== 1 ? 's' : ''} unlocked`,
|
||||
});
|
||||
|
||||
fetchStats();
|
||||
|
||||
if (onLockStateChange) {
|
||||
onLockStateChange();
|
||||
}
|
||||
|
||||
return count;
|
||||
} catch (error: unknown) {
|
||||
handleError(error, {
|
||||
action: 'Superuser Clear All Locks',
|
||||
userId: user.id,
|
||||
metadata: { attemptedAction: 'bulk_release' }
|
||||
});
|
||||
return 0;
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
}, [user, fetchStats, toast, onLockStateChange]);
|
||||
|
||||
return {
|
||||
currentLock, // Exposed for reactive UI updates
|
||||
queueStats,
|
||||
isLoading,
|
||||
claimSubmission,
|
||||
extendLock,
|
||||
releaseLock,
|
||||
getTimeRemaining,
|
||||
escalateSubmission,
|
||||
reassignSubmission,
|
||||
refreshStats: fetchStats,
|
||||
// New helpers
|
||||
isLockedByMe,
|
||||
isLockedByOther,
|
||||
getLockProgress,
|
||||
releaseAfterAction,
|
||||
// Superuser lock management
|
||||
superuserReleaseLock,
|
||||
superuserReleaseAllLocks,
|
||||
};
|
||||
};
|
||||
|
||||
// Helper to format PostgreSQL interval
|
||||
function formatInterval(interval: string): string {
|
||||
const match = interval.match(/(\d+):(\d+):(\d+)/);
|
||||
if (!match) return interval;
|
||||
|
||||
const hours = parseInt(match[1]);
|
||||
const minutes = parseInt(match[2]);
|
||||
|
||||
if (hours > 24) {
|
||||
const days = Math.floor(hours / 24);
|
||||
return `${days}d ${hours % 24}h`;
|
||||
} else if (hours > 0) {
|
||||
return `${hours}h ${minutes}m`;
|
||||
} else {
|
||||
return `${minutes}m`;
|
||||
}
|
||||
}
|
||||
217
src-old/hooks/useModerationStats.ts
Normal file
217
src-old/hooks/useModerationStats.ts
Normal file
@@ -0,0 +1,217 @@
|
||||
import { useEffect, useState, useRef, useCallback } from 'react';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import { reportsService } from '@/services/reports';
|
||||
|
||||
// Type for submission realtime payload
|
||||
interface SubmissionPayload {
|
||||
status?: string;
|
||||
assigned_to?: string | null;
|
||||
locked_until?: string | null;
|
||||
escalated?: boolean;
|
||||
}
|
||||
|
||||
interface ModerationStats {
|
||||
pendingSubmissions: number;
|
||||
openReports: number;
|
||||
flaggedContent: number;
|
||||
}
|
||||
|
||||
interface UseModerationStatsOptions {
|
||||
onStatsChange?: (stats: ModerationStats) => void;
|
||||
enabled?: boolean;
|
||||
pollingEnabled?: boolean;
|
||||
pollingInterval?: number;
|
||||
realtimeEnabled?: boolean;
|
||||
}
|
||||
|
||||
export const useModerationStats = (options: UseModerationStatsOptions = {}) => {
|
||||
const {
|
||||
onStatsChange,
|
||||
enabled = true,
|
||||
pollingEnabled = true,
|
||||
pollingInterval = 60000, // Reduced to 60 seconds
|
||||
realtimeEnabled = true
|
||||
} = options;
|
||||
|
||||
const [stats, setStats] = useState<ModerationStats>({
|
||||
pendingSubmissions: 0,
|
||||
openReports: 0,
|
||||
flaggedContent: 0,
|
||||
});
|
||||
|
||||
// Optimistic deltas for immediate UI updates
|
||||
const [optimisticDeltas, setOptimisticDeltas] = useState<ModerationStats>({
|
||||
pendingSubmissions: 0,
|
||||
openReports: 0,
|
||||
flaggedContent: 0,
|
||||
});
|
||||
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
const [isInitialLoad, setIsInitialLoad] = useState(true);
|
||||
const [lastUpdated, setLastUpdated] = useState<Date | null>(null);
|
||||
const onStatsChangeRef = useRef(onStatsChange);
|
||||
const statsDebounceRef = useRef<NodeJS.Timeout | null>(null);
|
||||
|
||||
// Update ref when callback changes
|
||||
useEffect(() => {
|
||||
onStatsChangeRef.current = onStatsChange;
|
||||
}, [onStatsChange]);
|
||||
|
||||
// Optimistic update function
|
||||
const optimisticallyUpdateStats = useCallback((delta: Partial<ModerationStats>) => {
|
||||
setOptimisticDeltas(prev => ({
|
||||
pendingSubmissions: (prev.pendingSubmissions || 0) + (delta.pendingSubmissions || 0),
|
||||
openReports: (prev.openReports || 0) + (delta.openReports || 0),
|
||||
flaggedContent: (prev.flaggedContent || 0) + (delta.flaggedContent || 0),
|
||||
}));
|
||||
}, []);
|
||||
|
||||
const fetchStats = useCallback(async (silent = false) => {
|
||||
if (!enabled) return;
|
||||
|
||||
try {
|
||||
// Only show loading on initial load
|
||||
if (!silent) {
|
||||
setIsLoading(true);
|
||||
}
|
||||
|
||||
// Fetch stats - use Django API for reports, Supabase for submissions and reviews
|
||||
const [submissionsResult, reportsStatsResult, reviewsResult] = await Promise.all([
|
||||
supabase
|
||||
.from('content_submissions')
|
||||
.select('id', { count: 'exact', head: true })
|
||||
.eq('status', 'pending'),
|
||||
reportsService.getStatistics(),
|
||||
supabase
|
||||
.from('reviews')
|
||||
.select('id', { count: 'exact', head: true })
|
||||
.eq('moderation_status', 'flagged'),
|
||||
]);
|
||||
|
||||
const newStats = {
|
||||
pendingSubmissions: submissionsResult.count || 0,
|
||||
openReports: reportsStatsResult.success && reportsStatsResult.data
|
||||
? reportsStatsResult.data.pending_reports
|
||||
: 0,
|
||||
flaggedContent: reviewsResult.count || 0,
|
||||
};
|
||||
|
||||
setStats(newStats);
|
||||
setLastUpdated(new Date());
|
||||
onStatsChangeRef.current?.(newStats);
|
||||
|
||||
// Clear optimistic deltas when real data arrives
|
||||
setOptimisticDeltas({
|
||||
pendingSubmissions: 0,
|
||||
openReports: 0,
|
||||
flaggedContent: 0,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
// Silent failure - stats refresh periodically in background
|
||||
// Error already captured for potential monitoring
|
||||
} finally {
|
||||
// Only clear loading if it was set
|
||||
if (!silent) {
|
||||
setIsLoading(false);
|
||||
}
|
||||
if (isInitialLoad) {
|
||||
setIsInitialLoad(false);
|
||||
}
|
||||
}
|
||||
}, [enabled, isInitialLoad]);
|
||||
|
||||
// Initial fetch
|
||||
useEffect(() => {
|
||||
if (enabled) {
|
||||
fetchStats(false); // Show loading
|
||||
}
|
||||
}, [enabled, fetchStats]);
|
||||
|
||||
// Debounced stats fetch to prevent rapid-fire updates
|
||||
const debouncedFetchStats = useCallback(() => {
|
||||
if (statsDebounceRef.current) {
|
||||
clearTimeout(statsDebounceRef.current);
|
||||
}
|
||||
|
||||
statsDebounceRef.current = setTimeout(() => {
|
||||
fetchStats(true); // Silent refresh
|
||||
}, 2000); // 2 second debounce to reduce flashing
|
||||
}, [fetchStats]);
|
||||
|
||||
// Realtime subscription - only for content_submissions and reviews
|
||||
// Reports use polling since Django API doesn't support realtime
|
||||
useEffect(() => {
|
||||
if (!enabled || !realtimeEnabled) return;
|
||||
|
||||
const channel = supabase
|
||||
.channel('moderation-stats-realtime')
|
||||
// Listen to ALL events on content_submissions without filter
|
||||
// Manual filtering catches submissions leaving pending state
|
||||
.on('postgres_changes', {
|
||||
event: '*',
|
||||
schema: 'public',
|
||||
table: 'content_submissions'
|
||||
}, (payload) => {
|
||||
const oldData = payload.old as SubmissionPayload;
|
||||
const newData = payload.new as SubmissionPayload;
|
||||
const oldStatus = oldData?.status;
|
||||
const newStatus = newData?.status;
|
||||
const oldAssignedTo = oldData?.assigned_to;
|
||||
const newAssignedTo = newData?.assigned_to;
|
||||
const oldLockedUntil = oldData?.locked_until;
|
||||
const newLockedUntil = newData?.locked_until;
|
||||
|
||||
// Only refresh if change affects pending count or assignments
|
||||
if (
|
||||
payload.eventType === 'INSERT' && newStatus === 'pending' ||
|
||||
payload.eventType === 'UPDATE' && (oldStatus === 'pending' || newStatus === 'pending') ||
|
||||
payload.eventType === 'DELETE' && oldStatus === 'pending' ||
|
||||
payload.eventType === 'UPDATE' && (oldAssignedTo !== newAssignedTo || oldLockedUntil !== newLockedUntil)
|
||||
) {
|
||||
debouncedFetchStats();
|
||||
}
|
||||
})
|
||||
.on('postgres_changes', {
|
||||
event: '*',
|
||||
schema: 'public',
|
||||
table: 'reviews',
|
||||
filter: 'moderation_status=eq.flagged'
|
||||
}, debouncedFetchStats)
|
||||
.subscribe();
|
||||
|
||||
return () => {
|
||||
supabase.removeChannel(channel);
|
||||
if (statsDebounceRef.current) {
|
||||
clearTimeout(statsDebounceRef.current);
|
||||
}
|
||||
};
|
||||
}, [enabled, realtimeEnabled, debouncedFetchStats]);
|
||||
|
||||
// Polling (fallback when realtime is disabled OR always for reports since Django has no realtime)
|
||||
useEffect(() => {
|
||||
if (!enabled || !pollingEnabled || isInitialLoad) return;
|
||||
|
||||
const interval = setInterval(() => {
|
||||
fetchStats(true); // Silent refresh
|
||||
}, pollingInterval);
|
||||
|
||||
return () => {
|
||||
clearInterval(interval);
|
||||
};
|
||||
}, [enabled, pollingEnabled, pollingInterval, fetchStats, isInitialLoad]);
|
||||
|
||||
// Combine real stats with optimistic deltas for display
|
||||
const displayStats = {
|
||||
pendingSubmissions: Math.max(0, stats.pendingSubmissions + optimisticDeltas.pendingSubmissions),
|
||||
openReports: Math.max(0, stats.openReports + optimisticDeltas.openReports),
|
||||
flaggedContent: Math.max(0, stats.flaggedContent + optimisticDeltas.flaggedContent),
|
||||
};
|
||||
|
||||
return {
|
||||
stats: displayStats,
|
||||
refresh: fetchStats,
|
||||
optimisticallyUpdateStats,
|
||||
isLoading,
|
||||
lastUpdated
|
||||
};
|
||||
};
|
||||
28
src-old/hooks/useNetworkStatus.ts
Normal file
28
src-old/hooks/useNetworkStatus.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { useState, useEffect } from 'react';
|
||||
|
||||
export function useNetworkStatus() {
|
||||
const [isOnline, setIsOnline] = useState(navigator.onLine);
|
||||
const [wasOffline, setWasOffline] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
const handleOnline = () => {
|
||||
setIsOnline(true);
|
||||
setWasOffline(false);
|
||||
};
|
||||
|
||||
const handleOffline = () => {
|
||||
setIsOnline(false);
|
||||
setWasOffline(true);
|
||||
};
|
||||
|
||||
window.addEventListener('online', handleOnline);
|
||||
window.addEventListener('offline', handleOffline);
|
||||
|
||||
return () => {
|
||||
window.removeEventListener('online', handleOnline);
|
||||
window.removeEventListener('offline', handleOffline);
|
||||
};
|
||||
}, []);
|
||||
|
||||
return { isOnline, wasOffline };
|
||||
}
|
||||
17
src-old/hooks/useNovuNotifications.ts
Normal file
17
src-old/hooks/useNovuNotifications.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { useAuth } from '@/hooks/useAuth';
|
||||
import { usePublicNovuSettings } from '@/hooks/usePublicNovuSettings';
|
||||
|
||||
export function useNovuNotifications() {
|
||||
const { user } = useAuth();
|
||||
const { applicationIdentifier, isLoading } = usePublicNovuSettings();
|
||||
|
||||
const subscriberId = user?.id;
|
||||
const isEnabled = !!applicationIdentifier && !!subscriberId;
|
||||
|
||||
return {
|
||||
applicationIdentifier,
|
||||
subscriberId,
|
||||
isEnabled,
|
||||
isLoading,
|
||||
};
|
||||
}
|
||||
174
src-old/hooks/useNovuTheme.ts
Normal file
174
src-old/hooks/useNovuTheme.ts
Normal file
@@ -0,0 +1,174 @@
|
||||
import { useMemo } from 'react';
|
||||
|
||||
export function useNovuTheme() {
|
||||
const appearance = useMemo(() => {
|
||||
return {
|
||||
variables: {
|
||||
// Colors - using semantic tokens
|
||||
colorBackground: `hsl(var(--background))`,
|
||||
colorForeground: `hsl(var(--foreground))`,
|
||||
colorPrimary: `hsl(var(--primary))`,
|
||||
colorPrimaryForeground: `hsl(var(--primary-foreground))`,
|
||||
colorSecondary: `hsl(var(--secondary))`,
|
||||
colorSecondaryForeground: `hsl(var(--secondary-foreground))`,
|
||||
colorCounter: `hsl(var(--primary))`,
|
||||
colorCounterForeground: `hsl(var(--primary-foreground))`,
|
||||
|
||||
// Notification item colors
|
||||
colorNeutral: `hsl(var(--muted))`,
|
||||
colorNeutralForeground: `hsl(var(--muted-foreground))`,
|
||||
|
||||
// Border and divider
|
||||
colorBorder: `hsl(var(--border))`,
|
||||
|
||||
// Border radius
|
||||
borderRadius: `calc(var(--radius) + 2px)`,
|
||||
|
||||
// Typography
|
||||
fontFamily: 'Inter, system-ui, -apple-system, sans-serif',
|
||||
fontSize: '14px',
|
||||
fontWeightRegular: '400',
|
||||
fontWeightMedium: '500',
|
||||
fontWeightBold: '600',
|
||||
lineHeight: '1.5',
|
||||
},
|
||||
elements: {
|
||||
bellContainer: {
|
||||
width: '36px',
|
||||
height: '36px',
|
||||
},
|
||||
bell: {
|
||||
width: '20px',
|
||||
height: '20px',
|
||||
color: `hsl(var(--foreground))`,
|
||||
},
|
||||
bellDot: {
|
||||
backgroundColor: `hsl(var(--primary))`,
|
||||
width: '8px',
|
||||
height: '8px',
|
||||
boxShadow: `0 0 8px hsl(var(--primary) / 0.5)`,
|
||||
},
|
||||
popover: {
|
||||
width: '400px',
|
||||
maxWidth: '90vw',
|
||||
maxHeight: '600px',
|
||||
boxShadow: `var(--shadow-card)`,
|
||||
border: `1px solid hsl(var(--border))`,
|
||||
borderRadius: `calc(var(--radius) + 4px)`,
|
||||
backgroundColor: `hsl(var(--background))`,
|
||||
overflow: 'hidden',
|
||||
},
|
||||
notificationList: {
|
||||
maxHeight: '480px',
|
||||
overflowY: 'auto',
|
||||
},
|
||||
notificationItem: {
|
||||
padding: '16px',
|
||||
borderBottom: `1px solid hsl(var(--border) / 0.5)`,
|
||||
transition: 'var(--transition-smooth)',
|
||||
cursor: 'pointer',
|
||||
},
|
||||
notificationItemRead: {
|
||||
opacity: '0.65',
|
||||
backgroundColor: `hsl(var(--muted) / 0.2)`,
|
||||
},
|
||||
notificationItemUnread: {
|
||||
backgroundColor: `hsl(var(--muted) / 0.4)`,
|
||||
borderLeft: `4px solid hsl(var(--primary))`,
|
||||
fontWeight: '500',
|
||||
},
|
||||
notificationDot: {
|
||||
backgroundColor: `hsl(var(--primary))`,
|
||||
width: '10px',
|
||||
height: '10px',
|
||||
borderRadius: '50%',
|
||||
boxShadow: `0 0 6px hsl(var(--primary) / 0.6)`,
|
||||
},
|
||||
notificationTitle: {
|
||||
fontSize: '15px',
|
||||
fontWeight: '600',
|
||||
lineHeight: '1.4',
|
||||
color: `hsl(var(--foreground))`,
|
||||
marginBottom: '4px',
|
||||
},
|
||||
notificationDescription: {
|
||||
fontSize: '14px',
|
||||
lineHeight: '1.5',
|
||||
color: `hsl(var(--muted-foreground))`,
|
||||
marginBottom: '8px',
|
||||
},
|
||||
notificationTimestamp: {
|
||||
fontSize: '12px',
|
||||
color: `hsl(var(--muted-foreground))`,
|
||||
fontWeight: '400',
|
||||
},
|
||||
notificationPrimaryAction: {
|
||||
backgroundColor: `hsl(var(--primary))`,
|
||||
color: `hsl(var(--primary-foreground))`,
|
||||
borderRadius: `var(--radius)`,
|
||||
padding: '10px 20px',
|
||||
fontSize: '14px',
|
||||
fontWeight: '500',
|
||||
border: 'none',
|
||||
transition: 'var(--transition-smooth)',
|
||||
cursor: 'pointer',
|
||||
},
|
||||
notificationSecondaryAction: {
|
||||
backgroundColor: `hsl(var(--secondary))`,
|
||||
color: `hsl(var(--secondary-foreground))`,
|
||||
borderRadius: `var(--radius)`,
|
||||
padding: '10px 20px',
|
||||
fontSize: '14px',
|
||||
fontWeight: '500',
|
||||
border: 'none',
|
||||
transition: 'var(--transition-smooth)',
|
||||
cursor: 'pointer',
|
||||
},
|
||||
loader: {
|
||||
color: `hsl(var(--primary))`,
|
||||
width: '32px',
|
||||
height: '32px',
|
||||
},
|
||||
emptyNotifications: {
|
||||
color: `hsl(var(--muted-foreground))`,
|
||||
textAlign: 'center',
|
||||
padding: '48px 24px',
|
||||
fontSize: '15px',
|
||||
lineHeight: '1.6',
|
||||
},
|
||||
header: {
|
||||
borderBottom: `1px solid hsl(var(--border))`,
|
||||
padding: '16px 20px',
|
||||
backgroundColor: `hsl(var(--muted) / 0.3)`,
|
||||
},
|
||||
headerTitle: {
|
||||
fontSize: '17px',
|
||||
fontWeight: '600',
|
||||
color: `hsl(var(--foreground))`,
|
||||
letterSpacing: '-0.01em',
|
||||
},
|
||||
headerMarkAllAsReadButton: {
|
||||
fontSize: '13px',
|
||||
color: `hsl(var(--primary))`,
|
||||
fontWeight: '500',
|
||||
cursor: 'pointer',
|
||||
transition: 'var(--transition-smooth)',
|
||||
},
|
||||
footer: {
|
||||
borderTop: `1px solid hsl(var(--border))`,
|
||||
padding: '12px 20px',
|
||||
backgroundColor: `hsl(var(--muted) / 0.3)`,
|
||||
},
|
||||
footerViewAllButton: {
|
||||
fontSize: '14px',
|
||||
color: `hsl(var(--primary))`,
|
||||
fontWeight: '500',
|
||||
cursor: 'pointer',
|
||||
transition: 'var(--transition-smooth)',
|
||||
},
|
||||
},
|
||||
} as const;
|
||||
}, []);
|
||||
|
||||
return appearance;
|
||||
}
|
||||
85
src-old/hooks/useOpenGraph.ts
Normal file
85
src-old/hooks/useOpenGraph.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import { useEffect } from 'react';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
import { getBannerUrls } from '@/lib/cloudflareImageUtils';
|
||||
|
||||
interface OpenGraphOptions {
|
||||
title: string;
|
||||
description?: string;
|
||||
imageUrl?: string;
|
||||
imageId?: string;
|
||||
type?: 'website' | 'article' | 'profile';
|
||||
enabled?: boolean;
|
||||
}
|
||||
|
||||
export function useOpenGraph({
|
||||
title,
|
||||
description,
|
||||
imageUrl,
|
||||
imageId,
|
||||
type = 'website',
|
||||
enabled = true
|
||||
}: OpenGraphOptions) {
|
||||
const location = useLocation();
|
||||
const currentUrl = window.location.origin + location.pathname;
|
||||
|
||||
useEffect(() => {
|
||||
if (!enabled || !title) return;
|
||||
|
||||
// Determine the image to use
|
||||
let finalImageUrl = 'https://cdn.thrillwiki.com/images/4af6a0c6-4450-497d-772f-08da62274100/original';
|
||||
|
||||
if (imageId) {
|
||||
const bannerUrls = getBannerUrls(imageId);
|
||||
finalImageUrl = bannerUrls.desktop || imageUrl || 'https://cdn.thrillwiki.com/images/4af6a0c6-4450-497d-772f-08da62274100/original';
|
||||
} else if (imageUrl) {
|
||||
finalImageUrl = imageUrl;
|
||||
}
|
||||
|
||||
// Convert relative URL to absolute for social media
|
||||
if (finalImageUrl.startsWith('/')) {
|
||||
finalImageUrl = window.location.origin + finalImageUrl;
|
||||
}
|
||||
|
||||
// Update document title
|
||||
document.title = title.includes('ThrillWiki') ? title : `${title} | ThrillWiki`;
|
||||
|
||||
// Update or create meta tags
|
||||
updateMetaTag('og:title', title);
|
||||
updateMetaTag('og:description', description || 'Explore theme parks and roller coasters worldwide with ThrillWiki');
|
||||
updateMetaTag('og:image', finalImageUrl);
|
||||
updateMetaTag('og:type', type);
|
||||
updateMetaTag('og:url', currentUrl);
|
||||
|
||||
// Twitter tags
|
||||
updateMetaTag('twitter:title', title, 'name');
|
||||
updateMetaTag('twitter:description', description || 'Explore theme parks and roller coasters worldwide with ThrillWiki', 'name');
|
||||
updateMetaTag('twitter:image', finalImageUrl, 'name');
|
||||
updateMetaTag('twitter:url', currentUrl, 'name');
|
||||
|
||||
return () => {
|
||||
document.title = 'ThrillWiki - Theme Park & Roller Coaster Database';
|
||||
updateMetaTag('og:title', 'ThrillWiki - Theme Park & Roller Coaster Database');
|
||||
updateMetaTag('og:description', 'Explore theme parks and roller coasters worldwide with ThrillWiki - the comprehensive database for enthusiasts');
|
||||
updateMetaTag('og:image', 'https://cdn.thrillwiki.com/images/4af6a0c6-4450-497d-772f-08da62274100/original');
|
||||
updateMetaTag('og:type', 'website');
|
||||
updateMetaTag('og:url', 'https://www.thrillwiki.com/');
|
||||
|
||||
updateMetaTag('twitter:title', 'ThrillWiki - Theme Park & Roller Coaster Database', 'name');
|
||||
updateMetaTag('twitter:description', 'Explore theme parks and roller coasters worldwide with ThrillWiki - the comprehensive database for enthusiasts', 'name');
|
||||
updateMetaTag('twitter:image', 'https://cdn.thrillwiki.com/images/4af6a0c6-4450-497d-772f-08da62274100/original', 'name');
|
||||
updateMetaTag('twitter:url', 'https://www.thrillwiki.com/', 'name');
|
||||
};
|
||||
}, [title, description, imageUrl, imageId, type, currentUrl, enabled]);
|
||||
}
|
||||
|
||||
function updateMetaTag(property: string, content: string, attributeName: 'property' | 'name' = 'property') {
|
||||
let meta = document.querySelector(`meta[${attributeName}="${property}"]`);
|
||||
|
||||
if (!meta) {
|
||||
meta = document.createElement('meta');
|
||||
meta.setAttribute(attributeName, property);
|
||||
document.head.appendChild(meta);
|
||||
}
|
||||
|
||||
meta.setAttribute('content', content);
|
||||
}
|
||||
79
src-old/hooks/usePhotoSubmissionItems.ts
Normal file
79
src-old/hooks/usePhotoSubmissionItems.ts
Normal file
@@ -0,0 +1,79 @@
|
||||
/**
|
||||
* Hook: usePhotoSubmissionItems
|
||||
* Fetches photo items from relational tables for a given submission
|
||||
*/
|
||||
|
||||
import { useState, useEffect } from 'react';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import { handleNonCriticalError, getErrorMessage } from '@/lib/errorHandler';
|
||||
import type { PhotoSubmissionItem } from '@/types/photo-submissions';
|
||||
|
||||
interface UsePhotoSubmissionItemsResult {
|
||||
photos: PhotoSubmissionItem[];
|
||||
loading: boolean;
|
||||
error: string | null;
|
||||
}
|
||||
|
||||
export function usePhotoSubmissionItems(
|
||||
submissionId: string | undefined
|
||||
): UsePhotoSubmissionItemsResult {
|
||||
const [photos, setPhotos] = useState<PhotoSubmissionItem[]>([]);
|
||||
const [loading, setLoading] = useState(true);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (!submissionId) {
|
||||
setPhotos([]);
|
||||
setLoading(false);
|
||||
return;
|
||||
}
|
||||
|
||||
fetchPhotoItems();
|
||||
}, [submissionId]);
|
||||
|
||||
const fetchPhotoItems = async () => {
|
||||
if (!submissionId) return;
|
||||
|
||||
setLoading(true);
|
||||
setError(null);
|
||||
|
||||
try {
|
||||
// Step 1: Get photo_submission_id from submission_id
|
||||
const { data: photoSubmission, error: photoSubmissionError } = await supabase
|
||||
.from('photo_submissions')
|
||||
.select('id')
|
||||
.eq('submission_id', submissionId)
|
||||
.maybeSingle();
|
||||
|
||||
if (photoSubmissionError) throw photoSubmissionError;
|
||||
if (!photoSubmission) {
|
||||
setPhotos([]);
|
||||
setLoading(false);
|
||||
return;
|
||||
}
|
||||
|
||||
// Step 2: Get photo items using photo_submission_id
|
||||
const { data, error: itemsError } = await supabase
|
||||
.from('photo_submission_items')
|
||||
.select('*')
|
||||
.eq('photo_submission_id', photoSubmission.id)
|
||||
.order('order_index');
|
||||
|
||||
if (itemsError) throw itemsError;
|
||||
|
||||
setPhotos(data || []);
|
||||
} catch (error: unknown) {
|
||||
const errorMsg = getErrorMessage(error);
|
||||
handleNonCriticalError(error, {
|
||||
action: 'Fetch photo submission items',
|
||||
metadata: { submissionId }
|
||||
});
|
||||
setError(errorMsg);
|
||||
setPhotos([]);
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
return { photos, loading, error };
|
||||
}
|
||||
64
src-old/hooks/useProfile.tsx
Normal file
64
src-old/hooks/useProfile.tsx
Normal file
@@ -0,0 +1,64 @@
|
||||
import { useQuery, useQueryClient } from '@tanstack/react-query';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import { Profile } from '@/types/database';
|
||||
import { getErrorMessage } from '@/lib/errorHandler';
|
||||
|
||||
export function useProfile(userId: string | undefined) {
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
const query = useQuery({
|
||||
queryKey: ['profile', userId],
|
||||
queryFn: async () => {
|
||||
if (!userId) return null;
|
||||
|
||||
// Get current viewer ID
|
||||
const { data: { user } } = await supabase.auth.getUser();
|
||||
const viewerId = user?.id || null;
|
||||
|
||||
// Use get_filtered_profile RPC for privacy-aware field filtering
|
||||
const { data, error } = await supabase.rpc('get_filtered_profile', {
|
||||
_profile_user_id: userId,
|
||||
_viewer_id: viewerId || ''
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
if (!data) return null;
|
||||
|
||||
// Type the JSONB response properly
|
||||
const profileData = data as unknown as Profile;
|
||||
|
||||
// Fetch location separately if location_id is present and visible
|
||||
if (profileData.location_id) {
|
||||
const { data: location } = await supabase
|
||||
.from('locations')
|
||||
.select('id, name, city, state_province, country, timezone')
|
||||
.eq('id', profileData.location_id)
|
||||
.single();
|
||||
|
||||
if (location) {
|
||||
profileData.location = location;
|
||||
}
|
||||
}
|
||||
|
||||
return profileData;
|
||||
},
|
||||
enabled: !!userId,
|
||||
staleTime: 5 * 60 * 1000, // 5 minutes
|
||||
refetchOnWindowFocus: false,
|
||||
retry: 2,
|
||||
});
|
||||
|
||||
const refreshProfile = () => {
|
||||
if (userId) {
|
||||
queryClient.invalidateQueries({ queryKey: ['profile', userId] });
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
...query,
|
||||
refreshProfile,
|
||||
};
|
||||
}
|
||||
30
src-old/hooks/usePublicNovuSettings.ts
Normal file
30
src-old/hooks/usePublicNovuSettings.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
|
||||
/**
|
||||
* Hook to fetch public Novu settings accessible to all authenticated users
|
||||
*/
|
||||
export function usePublicNovuSettings() {
|
||||
const { data: settings, isLoading, error } = useQuery({
|
||||
queryKey: ['public-novu-settings'],
|
||||
queryFn: async () => {
|
||||
const { data, error } = await supabase
|
||||
.from('admin_settings')
|
||||
.select('setting_key, setting_value')
|
||||
.eq('setting_key', 'novu.application_identifier')
|
||||
.maybeSingle();
|
||||
|
||||
if (error) throw error;
|
||||
return data;
|
||||
},
|
||||
});
|
||||
|
||||
const applicationIdentifier = settings?.setting_value as string || '';
|
||||
|
||||
return {
|
||||
applicationIdentifier,
|
||||
isLoading,
|
||||
error,
|
||||
isEnabled: !!applicationIdentifier,
|
||||
};
|
||||
}
|
||||
49
src-old/hooks/useRequireMFA.ts
Normal file
49
src-old/hooks/useRequireMFA.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import { useAuth } from './useAuth';
|
||||
import { useUserRole } from './useUserRole';
|
||||
import { useEffect, useState } from 'react';
|
||||
import { getEnrolledFactors } from '@/lib/authService';
|
||||
|
||||
export function useRequireMFA() {
|
||||
const { aal, session } = useAuth();
|
||||
const { isModerator, isAdmin, loading: roleLoading } = useUserRole();
|
||||
const [isEnrolled, setIsEnrolled] = useState(false);
|
||||
const [loading, setLoading] = useState(true);
|
||||
|
||||
// Check actual enrollment status
|
||||
useEffect(() => {
|
||||
const checkEnrollment = async () => {
|
||||
if (!session) {
|
||||
setIsEnrolled(false);
|
||||
setLoading(false);
|
||||
return;
|
||||
}
|
||||
|
||||
const factors = await getEnrolledFactors();
|
||||
setIsEnrolled(factors.length > 0);
|
||||
setLoading(false);
|
||||
};
|
||||
|
||||
if (!roleLoading) {
|
||||
checkEnrollment();
|
||||
}
|
||||
}, [session, roleLoading]);
|
||||
|
||||
// MFA is required for moderators and admins
|
||||
const requiresMFA = isModerator() || isAdmin();
|
||||
|
||||
// User has MFA if they have AAL2 AND have enrolled factors
|
||||
const hasMFA = aal === 'aal2' && isEnrolled;
|
||||
|
||||
// User needs to verify MFA if they're enrolled but session is still at AAL1
|
||||
const needsVerification = requiresMFA && isEnrolled && aal === 'aal1';
|
||||
|
||||
return {
|
||||
requiresMFA,
|
||||
hasMFA,
|
||||
isEnrolled,
|
||||
needsEnrollment: requiresMFA && !isEnrolled,
|
||||
needsVerification,
|
||||
aal,
|
||||
loading: loading || roleLoading,
|
||||
};
|
||||
}
|
||||
125
src-old/hooks/useRetryProgress.ts
Normal file
125
src-old/hooks/useRetryProgress.ts
Normal file
@@ -0,0 +1,125 @@
|
||||
import { useState, useCallback } from 'react';
|
||||
import { toast } from '@/hooks/use-toast';
|
||||
|
||||
interface RetryOptions {
|
||||
maxAttempts?: number;
|
||||
delayMs?: number;
|
||||
exponentialBackoff?: boolean;
|
||||
onProgress?: (attempt: number, maxAttempts: number) => void;
|
||||
}
|
||||
|
||||
export function useRetryProgress() {
|
||||
const [isRetrying, setIsRetrying] = useState(false);
|
||||
const [currentAttempt, setCurrentAttempt] = useState(0);
|
||||
const [abortController, setAbortController] = useState<AbortController | null>(null);
|
||||
|
||||
const retryWithProgress = useCallback(
|
||||
async <T,>(
|
||||
operation: () => Promise<T>,
|
||||
options: RetryOptions = {}
|
||||
): Promise<T> => {
|
||||
const {
|
||||
maxAttempts = 3,
|
||||
delayMs = 1000,
|
||||
exponentialBackoff = true,
|
||||
onProgress,
|
||||
} = options;
|
||||
|
||||
setIsRetrying(true);
|
||||
const controller = new AbortController();
|
||||
setAbortController(controller);
|
||||
|
||||
let lastError: Error | null = null;
|
||||
let toastId: string | undefined;
|
||||
|
||||
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
|
||||
if (controller.signal.aborted) {
|
||||
throw new Error('Operation cancelled');
|
||||
}
|
||||
|
||||
setCurrentAttempt(attempt);
|
||||
onProgress?.(attempt, maxAttempts);
|
||||
|
||||
// Show progress toast
|
||||
if (attempt > 1) {
|
||||
const delay = exponentialBackoff ? delayMs * Math.pow(2, attempt - 2) : delayMs;
|
||||
const countdown = Math.ceil(delay / 1000);
|
||||
|
||||
toast({
|
||||
title: `Retrying (${attempt}/${maxAttempts})`,
|
||||
description: `Waiting ${countdown}s before retry...`,
|
||||
duration: delay,
|
||||
});
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, delay));
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await operation();
|
||||
|
||||
setIsRetrying(false);
|
||||
setCurrentAttempt(0);
|
||||
setAbortController(null);
|
||||
|
||||
// Show success toast
|
||||
toast({
|
||||
title: "Success",
|
||||
description: attempt > 1
|
||||
? `Operation succeeded on attempt ${attempt}`
|
||||
: 'Operation completed successfully',
|
||||
duration: 3000,
|
||||
});
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
lastError = error instanceof Error ? error : new Error(String(error));
|
||||
|
||||
if (attempt < maxAttempts) {
|
||||
toast({
|
||||
title: `Attempt ${attempt} Failed`,
|
||||
description: `${lastError.message}. Retrying...`,
|
||||
duration: 2000,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// All attempts failed
|
||||
setIsRetrying(false);
|
||||
setCurrentAttempt(0);
|
||||
setAbortController(null);
|
||||
|
||||
toast({
|
||||
variant: 'destructive',
|
||||
title: "All Retries Failed",
|
||||
description: `Failed after ${maxAttempts} attempts: ${lastError?.message}`,
|
||||
duration: 5000,
|
||||
});
|
||||
|
||||
throw lastError;
|
||||
},
|
||||
[]
|
||||
);
|
||||
|
||||
const cancel = useCallback(() => {
|
||||
if (abortController) {
|
||||
abortController.abort();
|
||||
setAbortController(null);
|
||||
setIsRetrying(false);
|
||||
setCurrentAttempt(0);
|
||||
|
||||
toast({
|
||||
title: 'Cancelled',
|
||||
description: 'Retry operation cancelled',
|
||||
duration: 2000,
|
||||
});
|
||||
}
|
||||
}, [abortController]);
|
||||
|
||||
return {
|
||||
retryWithProgress,
|
||||
isRetrying,
|
||||
currentAttempt,
|
||||
cancel,
|
||||
};
|
||||
}
|
||||
284
src-old/hooks/useRideCreditFilters.ts
Normal file
284
src-old/hooks/useRideCreditFilters.ts
Normal file
@@ -0,0 +1,284 @@
|
||||
import { useState, useMemo, useCallback } from 'react';
|
||||
import { useDebounce } from '@/hooks/useDebounce';
|
||||
import { RideCreditFilters, FilterPreset } from '@/types/ride-credits';
|
||||
import { UserRideCredit } from '@/types/database';
|
||||
|
||||
export function useRideCreditFilters(credits: UserRideCredit[]) {
|
||||
const [filters, setFilters] = useState<RideCreditFilters>({});
|
||||
const debouncedSearchQuery = useDebounce(filters.searchQuery || '', 300);
|
||||
|
||||
const updateFilter = useCallback((key: keyof RideCreditFilters, value: any) => {
|
||||
setFilters(prev => ({ ...prev, [key]: value }));
|
||||
}, []);
|
||||
|
||||
const clearFilters = useCallback(() => {
|
||||
setFilters({});
|
||||
}, []);
|
||||
|
||||
const applyPreset = useCallback((preset: FilterPreset) => {
|
||||
switch (preset) {
|
||||
case 'mostRidden':
|
||||
setFilters({ minRideCount: 10 });
|
||||
break;
|
||||
case 'recentlyAdded':
|
||||
// Will be sorted by date
|
||||
setFilters({});
|
||||
break;
|
||||
case 'singleRides':
|
||||
setFilters({ minRideCount: 1, maxRideCount: 1 });
|
||||
break;
|
||||
case 'needRating':
|
||||
setFilters({ hasRating: false });
|
||||
break;
|
||||
case 'highlyRated':
|
||||
setFilters({ hasRating: true, minUserRating: 4 });
|
||||
break;
|
||||
case 'all':
|
||||
default:
|
||||
clearFilters();
|
||||
break;
|
||||
}
|
||||
}, [clearFilters]);
|
||||
|
||||
const filteredCredits = useMemo(() => {
|
||||
let result = [...credits];
|
||||
|
||||
// Search filter (text search)
|
||||
if (debouncedSearchQuery) {
|
||||
const search = debouncedSearchQuery.toLowerCase();
|
||||
result = result.filter(credit =>
|
||||
credit.rides?.name?.toLowerCase().includes(search) ||
|
||||
credit.rides?.parks?.name?.toLowerCase().includes(search)
|
||||
);
|
||||
}
|
||||
|
||||
// Selected search items filter (multi-select combobox)
|
||||
if (filters.selectedSearchItems && filters.selectedSearchItems.length > 0) {
|
||||
result = result.filter(credit => {
|
||||
return filters.selectedSearchItems!.some(item => {
|
||||
const [type, id] = item.split(':');
|
||||
|
||||
if (type === 'ride' && credit.rides?.id === id) return true;
|
||||
if (type === 'park' && credit.rides?.parks?.id === id) return true;
|
||||
if (type === 'manufacturer' && credit.rides?.manufacturer?.id === id) return true;
|
||||
|
||||
return false;
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Categories
|
||||
if (filters.categories && filters.categories.length > 0) {
|
||||
result = result.filter(credit =>
|
||||
filters.categories!.includes(credit.rides?.category || '')
|
||||
);
|
||||
}
|
||||
|
||||
// Geographic filters
|
||||
if (filters.countries && filters.countries.length > 0) {
|
||||
result = result.filter(credit =>
|
||||
filters.countries!.includes(credit.rides?.parks?.locations?.country || '')
|
||||
);
|
||||
}
|
||||
|
||||
if (filters.statesProvinces && filters.statesProvinces.length > 0) {
|
||||
result = result.filter(credit =>
|
||||
filters.statesProvinces!.includes(credit.rides?.parks?.locations?.state_province || '')
|
||||
);
|
||||
}
|
||||
|
||||
if (filters.cities && filters.cities.length > 0) {
|
||||
result = result.filter(credit =>
|
||||
filters.cities!.includes(credit.rides?.parks?.locations?.city || '')
|
||||
);
|
||||
}
|
||||
|
||||
// Park filters
|
||||
if (filters.parks && filters.parks.length > 0) {
|
||||
result = result.filter(credit =>
|
||||
filters.parks!.includes(credit.rides?.parks?.id || '')
|
||||
);
|
||||
}
|
||||
|
||||
if (filters.parkTypes && filters.parkTypes.length > 0) {
|
||||
result = result.filter(credit =>
|
||||
filters.parkTypes!.includes(credit.rides?.parks?.park_type || '')
|
||||
);
|
||||
}
|
||||
|
||||
// Manufacturers
|
||||
if (filters.manufacturers && filters.manufacturers.length > 0) {
|
||||
result = result.filter(credit =>
|
||||
credit.rides?.manufacturer?.id &&
|
||||
filters.manufacturers!.includes(credit.rides.manufacturer.id)
|
||||
);
|
||||
}
|
||||
|
||||
// Ride count range
|
||||
if (filters.minRideCount !== undefined) {
|
||||
result = result.filter(credit => credit.ride_count >= filters.minRideCount!);
|
||||
}
|
||||
|
||||
if (filters.maxRideCount !== undefined) {
|
||||
result = result.filter(credit => credit.ride_count <= filters.maxRideCount!);
|
||||
}
|
||||
|
||||
// Speed range
|
||||
if (filters.minSpeed !== undefined || filters.maxSpeed !== undefined) {
|
||||
result = result.filter(credit => {
|
||||
const speed = credit.rides?.max_speed_kmh;
|
||||
if (!speed) return false;
|
||||
if (filters.minSpeed && speed < filters.minSpeed) return false;
|
||||
if (filters.maxSpeed && speed > filters.maxSpeed) return false;
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
// Height range
|
||||
if (filters.minHeight !== undefined || filters.maxHeight !== undefined) {
|
||||
result = result.filter(credit => {
|
||||
const height = credit.rides?.max_height_meters;
|
||||
if (!height) return false;
|
||||
if (filters.minHeight && height < filters.minHeight) return false;
|
||||
if (filters.maxHeight && height > filters.maxHeight) return false;
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
// Inversions
|
||||
if (filters.hasInversions !== undefined) {
|
||||
result = result.filter(credit => {
|
||||
const inversions = credit.rides?.inversions || 0;
|
||||
return filters.hasInversions ? inversions > 0 : inversions === 0;
|
||||
});
|
||||
}
|
||||
|
||||
if (filters.minInversions !== undefined) {
|
||||
result = result.filter(credit => {
|
||||
const inversions = credit.rides?.inversions || 0;
|
||||
return inversions >= filters.minInversions!;
|
||||
});
|
||||
}
|
||||
|
||||
// Coaster types filter
|
||||
if (filters.coasterTypes && filters.coasterTypes.length > 0) {
|
||||
result = result.filter(credit =>
|
||||
credit.rides?.coaster_type &&
|
||||
filters.coasterTypes!.includes(credit.rides.coaster_type)
|
||||
);
|
||||
}
|
||||
|
||||
// Seating types filter
|
||||
if (filters.seatingTypes && filters.seatingTypes.length > 0) {
|
||||
result = result.filter(credit =>
|
||||
credit.rides?.seating_type &&
|
||||
filters.seatingTypes!.includes(credit.rides.seating_type)
|
||||
);
|
||||
}
|
||||
|
||||
// Intensity levels filter
|
||||
if (filters.intensityLevels && filters.intensityLevels.length > 0) {
|
||||
result = result.filter(credit =>
|
||||
credit.rides?.intensity_level &&
|
||||
filters.intensityLevels!.includes(credit.rides.intensity_level)
|
||||
);
|
||||
}
|
||||
|
||||
// Track material filter (handles array)
|
||||
if (filters.trackMaterial && filters.trackMaterial.length > 0) {
|
||||
result = result.filter(credit => {
|
||||
if (!credit.rides?.track_material) return false;
|
||||
const rideMaterials = Array.isArray(credit.rides.track_material)
|
||||
? credit.rides.track_material
|
||||
: [credit.rides.track_material];
|
||||
return rideMaterials.some(material => filters.trackMaterial!.includes(material));
|
||||
});
|
||||
}
|
||||
|
||||
// User rating
|
||||
if (filters.hasRating === true) {
|
||||
result = result.filter(credit => credit.personal_rating !== null);
|
||||
} else if (filters.hasRating === false) {
|
||||
result = result.filter(credit => credit.personal_rating === null);
|
||||
}
|
||||
|
||||
if (filters.minUserRating !== undefined) {
|
||||
result = result.filter(credit =>
|
||||
(credit.personal_rating || 0) >= filters.minUserRating!
|
||||
);
|
||||
}
|
||||
|
||||
// Notes
|
||||
if (filters.hasNotes === true) {
|
||||
result = result.filter(credit =>
|
||||
credit.personal_notes && credit.personal_notes.trim().length > 0
|
||||
);
|
||||
} else if (filters.hasNotes === false) {
|
||||
result = result.filter(credit =>
|
||||
!credit.personal_notes || credit.personal_notes.trim().length === 0
|
||||
);
|
||||
}
|
||||
|
||||
// Photos
|
||||
if (filters.hasPhotos === true) {
|
||||
result = result.filter(credit => credit.personal_photo_id !== null);
|
||||
} else if (filters.hasPhotos === false) {
|
||||
result = result.filter(credit => credit.personal_photo_id === null);
|
||||
}
|
||||
|
||||
// Date ranges
|
||||
if (filters.firstRideDateFrom) {
|
||||
result = result.filter(credit =>
|
||||
credit.first_ride_date &&
|
||||
new Date(credit.first_ride_date) >= filters.firstRideDateFrom!
|
||||
);
|
||||
}
|
||||
|
||||
if (filters.firstRideDateTo) {
|
||||
result = result.filter(credit =>
|
||||
credit.first_ride_date &&
|
||||
new Date(credit.first_ride_date) <= filters.firstRideDateTo!
|
||||
);
|
||||
}
|
||||
|
||||
if (filters.lastRideDateFrom) {
|
||||
result = result.filter(credit =>
|
||||
credit.last_ride_date &&
|
||||
new Date(credit.last_ride_date) >= filters.lastRideDateFrom!
|
||||
);
|
||||
}
|
||||
|
||||
if (filters.lastRideDateTo) {
|
||||
result = result.filter(credit =>
|
||||
credit.last_ride_date &&
|
||||
new Date(credit.last_ride_date) <= filters.lastRideDateTo!
|
||||
);
|
||||
}
|
||||
|
||||
// Ride status
|
||||
if (filters.rideStatuses && filters.rideStatuses.length > 0) {
|
||||
result = result.filter(credit =>
|
||||
filters.rideStatuses!.includes(credit.rides?.status || '')
|
||||
);
|
||||
}
|
||||
|
||||
return result;
|
||||
}, [credits, filters, debouncedSearchQuery]);
|
||||
|
||||
const activeFilterCount = useMemo(() => {
|
||||
return Object.entries(filters).filter(([_, value]) => {
|
||||
if (Array.isArray(value)) return value.length > 0;
|
||||
if (typeof value === 'boolean') return true;
|
||||
return value !== undefined && value !== null && value !== '';
|
||||
}).length;
|
||||
}, [filters]);
|
||||
|
||||
return {
|
||||
filters,
|
||||
updateFilter,
|
||||
clearFilters,
|
||||
applyPreset,
|
||||
filteredCredits,
|
||||
activeFilterCount,
|
||||
};
|
||||
}
|
||||
233
src-old/hooks/useSearch.tsx
Normal file
233
src-old/hooks/useSearch.tsx
Normal file
@@ -0,0 +1,233 @@
|
||||
import { useState, useEffect, useMemo, useCallback } from 'react';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import { Park, Ride, Company } from '@/types/database';
|
||||
import * as storage from '@/lib/localStorage';
|
||||
import { toast } from 'sonner';
|
||||
import { handleNonCriticalError } from '@/lib/errorHandler';
|
||||
|
||||
export interface SearchResult {
|
||||
id: string;
|
||||
type: 'park' | 'ride' | 'company';
|
||||
title: string;
|
||||
subtitle: string;
|
||||
image?: string;
|
||||
rating?: number;
|
||||
slug?: string;
|
||||
data: Park | Ride | Company;
|
||||
}
|
||||
|
||||
interface UseSearchOptions {
|
||||
types?: ('park' | 'ride' | 'company')[];
|
||||
limit?: number;
|
||||
minQuery?: number;
|
||||
debounceMs?: number;
|
||||
}
|
||||
|
||||
// Hoist default values to prevent recreating on every render
|
||||
const DEFAULT_TYPES: ('park' | 'ride' | 'company')[] = ['park', 'ride', 'company'];
|
||||
const DEFAULT_LIMIT = 10;
|
||||
const DEFAULT_MIN_QUERY = 2;
|
||||
const DEFAULT_DEBOUNCE_MS = 300;
|
||||
|
||||
export function useSearch(options: UseSearchOptions = {}) {
|
||||
// All hooks declarations in stable order
|
||||
const [query, setQuery] = useState('');
|
||||
const [results, setResults] = useState<SearchResult[]>([]);
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [recentSearches, setRecentSearches] = useState<string[]>([]);
|
||||
const [debouncedQuery, setDebouncedQuery] = useState('');
|
||||
|
||||
// Use useMemo to stabilize options, but use safe defaults to prevent undefined errors during HMR
|
||||
const stableOptions = useMemo(() => {
|
||||
const safeOptions = options || {};
|
||||
return {
|
||||
types: safeOptions.types || DEFAULT_TYPES,
|
||||
limit: safeOptions.limit ?? DEFAULT_LIMIT,
|
||||
minQuery: safeOptions.minQuery ?? DEFAULT_MIN_QUERY,
|
||||
debounceMs: safeOptions.debounceMs ?? DEFAULT_DEBOUNCE_MS,
|
||||
};
|
||||
}, [options]);
|
||||
|
||||
const { types, limit, minQuery, debounceMs } = stableOptions;
|
||||
|
||||
useEffect(() => {
|
||||
const timer = setTimeout(() => {
|
||||
setDebouncedQuery(query);
|
||||
}, debounceMs);
|
||||
|
||||
return () => clearTimeout(timer);
|
||||
}, [query, debounceMs]);
|
||||
|
||||
// Load recent searches from localStorage
|
||||
useEffect(() => {
|
||||
const searches = storage.getJSON<string[]>('thrillwiki_recent_searches', []);
|
||||
if (Array.isArray(searches)) {
|
||||
setRecentSearches(searches);
|
||||
}
|
||||
}, []);
|
||||
|
||||
// Search function
|
||||
const search = useCallback(async (searchQuery: string) => {
|
||||
if (searchQuery.length < minQuery) {
|
||||
setResults([]);
|
||||
setError(null);
|
||||
return;
|
||||
}
|
||||
|
||||
setLoading(true);
|
||||
setError(null);
|
||||
try {
|
||||
const searchResults: SearchResult[] = [];
|
||||
|
||||
// Search parks
|
||||
if (types.includes('park')) {
|
||||
const { data: parks } = await supabase
|
||||
.from('parks')
|
||||
.select(`
|
||||
*,
|
||||
location:locations(*),
|
||||
operator:companies!parks_operator_id_fkey(*)
|
||||
`)
|
||||
.or(`name.ilike.%${searchQuery}%,description.ilike.%${searchQuery}%`)
|
||||
.limit(Math.ceil(limit / types.length));
|
||||
|
||||
parks?.forEach((park) => {
|
||||
searchResults.push({
|
||||
id: park.id,
|
||||
type: 'park',
|
||||
title: park.name,
|
||||
subtitle: [park.location?.city, park.location?.state_province, park.location?.country].filter(Boolean).join(', '),
|
||||
image: park.banner_image_url || park.card_image_url || undefined,
|
||||
rating: park.average_rating ?? undefined,
|
||||
slug: park.slug,
|
||||
data: park
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Search rides
|
||||
if (types.includes('ride')) {
|
||||
const { data: rides } = await supabase
|
||||
.from('rides')
|
||||
.select(`
|
||||
*,
|
||||
park:parks!inner(name, slug),
|
||||
manufacturer:companies!rides_manufacturer_id_fkey(*)
|
||||
`)
|
||||
.or(`name.ilike.%${searchQuery}%,description.ilike.%${searchQuery}%`)
|
||||
.limit(Math.ceil(limit / types.length));
|
||||
|
||||
rides?.forEach((ride) => {
|
||||
searchResults.push({
|
||||
id: ride.id,
|
||||
type: 'ride',
|
||||
title: ride.name,
|
||||
subtitle: `at ${ride.park?.name || 'Unknown Park'}`,
|
||||
image: ride.image_url || undefined,
|
||||
rating: ride.average_rating ?? undefined,
|
||||
slug: ride.slug,
|
||||
data: ride
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Search companies
|
||||
if (types.includes('company')) {
|
||||
const { data: companies } = await supabase
|
||||
.from('companies')
|
||||
.select('id, name, slug, description, company_type, logo_url, average_rating, review_count')
|
||||
.or(`name.ilike.%${searchQuery}%,description.ilike.%${searchQuery}%`)
|
||||
.limit(Math.ceil(limit / types.length));
|
||||
|
||||
companies?.forEach((company) => {
|
||||
searchResults.push({
|
||||
id: company.id,
|
||||
type: 'company',
|
||||
title: company.name,
|
||||
subtitle: company.company_type?.replace('_', ' ').replace(/\b\w/g, l => l.toUpperCase()) || 'Company',
|
||||
image: company.logo_url || undefined,
|
||||
slug: company.slug,
|
||||
data: company
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Sort by relevance (exact matches first, then partial matches)
|
||||
searchResults.sort((a, b) => {
|
||||
const aExact = a.title.toLowerCase().startsWith(searchQuery.toLowerCase());
|
||||
const bExact = b.title.toLowerCase().startsWith(searchQuery.toLowerCase());
|
||||
|
||||
if (aExact && !bExact) return -1;
|
||||
if (!aExact && bExact) return 1;
|
||||
|
||||
return a.title.localeCompare(b.title);
|
||||
});
|
||||
|
||||
setResults(searchResults.slice(0, limit));
|
||||
} catch (error: unknown) {
|
||||
handleNonCriticalError(error, {
|
||||
action: 'Search',
|
||||
metadata: { query: searchQuery, types },
|
||||
});
|
||||
|
||||
toast.error('Search failed', {
|
||||
description: 'Unable to search. Please try again.',
|
||||
});
|
||||
|
||||
setError('Failed to search. Please try again.');
|
||||
setResults([]);
|
||||
} finally{
|
||||
setLoading(false);
|
||||
}
|
||||
}, [types, limit, minQuery]);
|
||||
|
||||
// Effect for debounced search
|
||||
useEffect(() => {
|
||||
if (debouncedQuery) {
|
||||
search(debouncedQuery);
|
||||
} else {
|
||||
setResults([]);
|
||||
}
|
||||
}, [debouncedQuery, search]);
|
||||
|
||||
// Save search to recent searches
|
||||
const saveSearch = (searchQuery: string) => {
|
||||
if (!searchQuery.trim()) return;
|
||||
|
||||
const updated = [searchQuery, ...recentSearches.filter(s => s !== searchQuery)].slice(0, 5);
|
||||
setRecentSearches(updated);
|
||||
storage.setJSON('thrillwiki_recent_searches', updated);
|
||||
};
|
||||
|
||||
// Clear recent searches
|
||||
const clearRecentSearches = () => {
|
||||
setRecentSearches([]);
|
||||
storage.removeItem('thrillwiki_recent_searches');
|
||||
};
|
||||
|
||||
// Get suggestions (recent searches when no query)
|
||||
const suggestions = useMemo(() => {
|
||||
if (query.length > 0) return [];
|
||||
return recentSearches.map(search => ({
|
||||
id: search,
|
||||
type: 'suggestion' as const,
|
||||
title: search,
|
||||
subtitle: 'Recent search',
|
||||
data: null
|
||||
}));
|
||||
}, [query, recentSearches]);
|
||||
|
||||
return {
|
||||
query,
|
||||
setQuery,
|
||||
results,
|
||||
suggestions,
|
||||
loading,
|
||||
error,
|
||||
recentSearches,
|
||||
saveSearch,
|
||||
clearRecentSearches,
|
||||
search
|
||||
};
|
||||
}
|
||||
74
src-old/hooks/useSessionMonitor.ts
Normal file
74
src-old/hooks/useSessionMonitor.ts
Normal file
@@ -0,0 +1,74 @@
|
||||
import { useState, useEffect } from 'react';
|
||||
import { useAuth } from './useAuth';
|
||||
import { useRequireMFA } from './useRequireMFA';
|
||||
import { getSessionAal } from '@/lib/authService';
|
||||
import { logger } from '@/lib/logger';
|
||||
import { handleNonCriticalError } from '@/lib/errorHandler';
|
||||
|
||||
/**
|
||||
* Phase 3: Session Monitoring Hook
|
||||
* Monitors AAL degradation and forces re-verification when needed
|
||||
*
|
||||
* This hook continuously checks the session's AAL level and detects
|
||||
* if it degrades from AAL2 to AAL1, which can happen after token refresh
|
||||
* or session expiry.
|
||||
*/
|
||||
export function useSessionMonitor() {
|
||||
const { aal, session, user } = useAuth();
|
||||
const { requiresMFA, isEnrolled } = useRequireMFA();
|
||||
const [aalWarning, setAalWarning] = useState(false);
|
||||
const [aalDegraded, setAalDegraded] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
if (!session || !user || !requiresMFA || !isEnrolled) {
|
||||
setAalWarning(false);
|
||||
setAalDegraded(false);
|
||||
return;
|
||||
}
|
||||
|
||||
// Check AAL every 60 seconds
|
||||
const interval = setInterval(async () => {
|
||||
try {
|
||||
const currentAal = await getSessionAal(session);
|
||||
|
||||
// If AAL degraded from AAL2 to AAL1
|
||||
if (currentAal === 'aal1' && aal === 'aal2') {
|
||||
logger.warn('AAL degradation detected', {
|
||||
userId: user.id,
|
||||
previousAal: aal,
|
||||
currentAal,
|
||||
action: 'session_monitor'
|
||||
});
|
||||
|
||||
// Show warning for 30 seconds
|
||||
setAalWarning(true);
|
||||
setAalDegraded(true);
|
||||
|
||||
// After 30 seconds, redirect to MFA step-up
|
||||
setTimeout(() => {
|
||||
logger.info('Forcing MFA step-up due to AAL degradation', {
|
||||
userId: user.id,
|
||||
action: 'session_monitor_redirect'
|
||||
});
|
||||
|
||||
sessionStorage.setItem('mfa_step_up_required', 'true');
|
||||
sessionStorage.setItem('mfa_intended_path', window.location.pathname);
|
||||
window.location.href = '/auth';
|
||||
}, 30000);
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
handleNonCriticalError(error, {
|
||||
action: 'Session monitor check',
|
||||
userId: user.id,
|
||||
});
|
||||
}
|
||||
}, 60000);
|
||||
|
||||
return () => clearInterval(interval);
|
||||
}, [session, aal, requiresMFA, isEnrolled, user]);
|
||||
|
||||
return {
|
||||
aalWarning,
|
||||
aalDegraded
|
||||
};
|
||||
}
|
||||
11
src-old/hooks/useSidebar.ts
Normal file
11
src-old/hooks/useSidebar.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import { useContext } from "react";
|
||||
import { SidebarContext } from "@/components/ui/sidebar-context";
|
||||
|
||||
export function useSidebar() {
|
||||
const context = useContext(SidebarContext);
|
||||
if (!context) {
|
||||
throw new Error("useSidebar must be used within a SidebarProvider.");
|
||||
}
|
||||
|
||||
return context;
|
||||
}
|
||||
146
src-old/hooks/useSubmissionQueue.ts
Normal file
146
src-old/hooks/useSubmissionQueue.ts
Normal file
@@ -0,0 +1,146 @@
|
||||
import { useState, useEffect, useCallback } from 'react';
|
||||
import { QueuedSubmission } from '@/components/submission/SubmissionQueueIndicator';
|
||||
import { useNetworkStatus } from './useNetworkStatus';
|
||||
import {
|
||||
getPendingSubmissions,
|
||||
processQueue,
|
||||
removeFromQueue,
|
||||
clearQueue as clearQueueStorage,
|
||||
getPendingCount,
|
||||
} from '@/lib/submissionQueue';
|
||||
import { logger } from '@/lib/logger';
|
||||
|
||||
interface UseSubmissionQueueOptions {
|
||||
autoRetry?: boolean;
|
||||
retryDelayMs?: number;
|
||||
maxRetries?: number;
|
||||
}
|
||||
|
||||
export function useSubmissionQueue(options: UseSubmissionQueueOptions = {}) {
|
||||
const {
|
||||
autoRetry = true,
|
||||
retryDelayMs = 5000,
|
||||
maxRetries = 3,
|
||||
} = options;
|
||||
|
||||
const [queuedItems, setQueuedItems] = useState<QueuedSubmission[]>([]);
|
||||
const [lastSyncTime, setLastSyncTime] = useState<Date | null>(null);
|
||||
const [nextRetryTime, setNextRetryTime] = useState<Date | null>(null);
|
||||
const { isOnline } = useNetworkStatus();
|
||||
|
||||
// Load queued items from IndexedDB on mount
|
||||
useEffect(() => {
|
||||
loadQueueFromStorage();
|
||||
}, []);
|
||||
|
||||
// Auto-retry when back online
|
||||
useEffect(() => {
|
||||
if (isOnline && autoRetry && queuedItems.length > 0) {
|
||||
const timer = setTimeout(() => {
|
||||
retryAll();
|
||||
}, retryDelayMs);
|
||||
|
||||
setNextRetryTime(new Date(Date.now() + retryDelayMs));
|
||||
|
||||
return () => clearTimeout(timer);
|
||||
}
|
||||
}, [isOnline, autoRetry, queuedItems.length, retryDelayMs]);
|
||||
|
||||
const loadQueueFromStorage = useCallback(async () => {
|
||||
try {
|
||||
const pending = await getPendingSubmissions();
|
||||
|
||||
// Transform to QueuedSubmission format
|
||||
const items: QueuedSubmission[] = pending.map(item => ({
|
||||
id: item.id,
|
||||
type: item.type,
|
||||
entityName: item.data?.name || item.data?.title || 'Unknown',
|
||||
timestamp: new Date(item.timestamp),
|
||||
status: item.retries >= 3 ? 'failed' : (item.lastAttempt ? 'retrying' : 'pending'),
|
||||
retryCount: item.retries,
|
||||
error: item.error || undefined,
|
||||
}));
|
||||
|
||||
setQueuedItems(items);
|
||||
logger.info('[SubmissionQueue] Loaded queue', { count: items.length });
|
||||
} catch (error) {
|
||||
logger.error('[SubmissionQueue] Failed to load queue', { error });
|
||||
}
|
||||
}, []);
|
||||
|
||||
const retryItem = useCallback(async (id: string) => {
|
||||
setQueuedItems(prev =>
|
||||
prev.map(item =>
|
||||
item.id === id
|
||||
? { ...item, status: 'retrying' as const }
|
||||
: item
|
||||
)
|
||||
);
|
||||
|
||||
try {
|
||||
// Placeholder: Retry the submission
|
||||
// await retrySubmission(id);
|
||||
|
||||
// Remove from queue on success
|
||||
setQueuedItems(prev => prev.filter(item => item.id !== id));
|
||||
setLastSyncTime(new Date());
|
||||
} catch (error) {
|
||||
// Mark as failed
|
||||
setQueuedItems(prev =>
|
||||
prev.map(item =>
|
||||
item.id === id
|
||||
? {
|
||||
...item,
|
||||
status: 'failed' as const,
|
||||
retryCount: (item.retryCount || 0) + 1,
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
}
|
||||
: item
|
||||
)
|
||||
);
|
||||
}
|
||||
}, []);
|
||||
|
||||
const retryAll = useCallback(async () => {
|
||||
const pendingItems = queuedItems.filter(
|
||||
item => item.status === 'pending' || item.status === 'failed'
|
||||
);
|
||||
|
||||
for (const item of pendingItems) {
|
||||
if ((item.retryCount || 0) < maxRetries) {
|
||||
await retryItem(item.id);
|
||||
}
|
||||
}
|
||||
}, [queuedItems, maxRetries, retryItem]);
|
||||
|
||||
const removeItem = useCallback(async (id: string) => {
|
||||
try {
|
||||
await removeFromQueue(id);
|
||||
setQueuedItems(prev => prev.filter(item => item.id !== id));
|
||||
logger.info('[SubmissionQueue] Removed item', { id });
|
||||
} catch (error) {
|
||||
logger.error('[SubmissionQueue] Failed to remove item', { id, error });
|
||||
}
|
||||
}, []);
|
||||
|
||||
const clearQueue = useCallback(async () => {
|
||||
try {
|
||||
const count = await clearQueueStorage();
|
||||
setQueuedItems([]);
|
||||
logger.info('[SubmissionQueue] Cleared queue', { count });
|
||||
} catch (error) {
|
||||
logger.error('[SubmissionQueue] Failed to clear queue', { error });
|
||||
}
|
||||
}, []);
|
||||
|
||||
return {
|
||||
queuedItems,
|
||||
lastSyncTime,
|
||||
nextRetryTime,
|
||||
retryItem,
|
||||
retryAll,
|
||||
removeItem,
|
||||
clearQueue,
|
||||
refresh: loadQueueFromStorage,
|
||||
};
|
||||
}
|
||||
72
src-old/hooks/useSuperuserGuard.ts
Normal file
72
src-old/hooks/useSuperuserGuard.ts
Normal file
@@ -0,0 +1,72 @@
|
||||
import { useAuth } from './useAuth';
|
||||
import { useUserRole } from './useUserRole';
|
||||
import { useRequireMFA } from './useRequireMFA';
|
||||
|
||||
/**
|
||||
* Centralized AAL2 enforcement for all superuser actions
|
||||
*
|
||||
* This hook ensures that ALL superuser actions require AAL2 authentication
|
||||
* if the user has MFA enrolled. It fails closed - blocking actions if
|
||||
* AAL verification is uncertain.
|
||||
*
|
||||
* SECURITY PRINCIPLE: Superusers MUST verify MFA before performing
|
||||
* privileged operations that could affect other users or system settings.
|
||||
*/
|
||||
|
||||
export interface SuperuserGuardState {
|
||||
// Core permissions
|
||||
isSuperuser: boolean;
|
||||
canPerformAction: boolean; // Only true if superuser AND has AAL2 (if MFA enrolled)
|
||||
|
||||
// AAL2 state
|
||||
hasAAL2: boolean;
|
||||
needsAAL2Verification: boolean; // True if has MFA but not at AAL2
|
||||
isEnrolled: boolean;
|
||||
|
||||
// Loading states
|
||||
loading: boolean;
|
||||
|
||||
// Current AAL level
|
||||
aal: 'aal1' | 'aal2' | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to check if current user can perform superuser actions
|
||||
* Enforces AAL2 requirement when MFA is enrolled
|
||||
*/
|
||||
export function useSuperuserGuard(): SuperuserGuardState {
|
||||
const { aal, loading: authLoading } = useAuth();
|
||||
const { isSuperuser, loading: roleLoading } = useUserRole();
|
||||
const {
|
||||
hasMFA,
|
||||
isEnrolled,
|
||||
needsVerification,
|
||||
loading: mfaLoading
|
||||
} = useRequireMFA();
|
||||
|
||||
const loading = authLoading || roleLoading || mfaLoading;
|
||||
const isSuperuserRole = isSuperuser();
|
||||
const hasAAL2 = hasMFA; // hasMFA means AAL2 + enrolled
|
||||
|
||||
// CRITICAL: Superuser can only perform actions if:
|
||||
// 1. They have superuser role, AND
|
||||
// 2. Either (no MFA enrolled) OR (has AAL2)
|
||||
// This fails closed - if uncertain about MFA state, block action
|
||||
const canPerformAction = isSuperuserRole && (!isEnrolled || hasAAL2);
|
||||
|
||||
// User needs AAL2 verification if:
|
||||
// - Is superuser
|
||||
// - Has MFA enrolled
|
||||
// - Currently at AAL1 (not AAL2)
|
||||
const needsAAL2Verification = isSuperuserRole && isEnrolled && !hasAAL2;
|
||||
|
||||
return {
|
||||
isSuperuser: isSuperuserRole,
|
||||
canPerformAction,
|
||||
hasAAL2,
|
||||
needsAAL2Verification,
|
||||
isEnrolled,
|
||||
loading,
|
||||
aal,
|
||||
};
|
||||
}
|
||||
129
src-old/hooks/useSystemHealth.ts
Normal file
129
src-old/hooks/useSystemHealth.ts
Normal file
@@ -0,0 +1,129 @@
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import { handleError } from '@/lib/errorHandler';
|
||||
|
||||
interface SystemHealthData {
|
||||
orphaned_images_count: number;
|
||||
critical_alerts_count: number;
|
||||
alerts_last_24h: number;
|
||||
checked_at: string;
|
||||
}
|
||||
|
||||
interface SystemAlert {
|
||||
id: string;
|
||||
alert_type: 'orphaned_images' | 'stale_submissions' | 'circular_dependency' | 'validation_error' | 'ban_attempt' | 'upload_timeout' | 'high_error_rate';
|
||||
severity: 'low' | 'medium' | 'high' | 'critical';
|
||||
message: string;
|
||||
metadata: Record<string, any> | null;
|
||||
resolved_at: string | null;
|
||||
created_at: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to fetch system health metrics
|
||||
* Only accessible to moderators and admins
|
||||
*/
|
||||
export function useSystemHealth() {
|
||||
return useQuery({
|
||||
queryKey: ['system-health'],
|
||||
queryFn: async () => {
|
||||
try {
|
||||
const { data, error } = await supabase
|
||||
.rpc('get_system_health');
|
||||
|
||||
if (error) {
|
||||
handleError(error, {
|
||||
action: 'Fetch System Health',
|
||||
metadata: { error: error.message }
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
|
||||
return data?.[0] as SystemHealthData | null;
|
||||
} catch (error) {
|
||||
handleError(error, {
|
||||
action: 'Fetch System Health',
|
||||
metadata: { error: String(error) }
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
refetchInterval: 60000, // Refetch every minute
|
||||
staleTime: 30000, // Consider data stale after 30 seconds
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to fetch unresolved system alerts
|
||||
* Only accessible to moderators and admins
|
||||
*/
|
||||
export function useSystemAlerts(severity?: 'low' | 'medium' | 'high' | 'critical') {
|
||||
return useQuery({
|
||||
queryKey: ['system-alerts', severity],
|
||||
queryFn: async () => {
|
||||
try {
|
||||
let query = supabase
|
||||
.from('system_alerts')
|
||||
.select('*')
|
||||
.is('resolved_at', null)
|
||||
.order('created_at', { ascending: false });
|
||||
|
||||
if (severity) {
|
||||
query = query.eq('severity', severity);
|
||||
}
|
||||
|
||||
const { data, error } = await query;
|
||||
|
||||
if (error) {
|
||||
handleError(error, {
|
||||
action: 'Fetch System Alerts',
|
||||
metadata: { severity, error: error.message }
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
|
||||
return (data || []) as SystemAlert[];
|
||||
} catch (error) {
|
||||
handleError(error, {
|
||||
action: 'Fetch System Alerts',
|
||||
metadata: { severity, error: String(error) }
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
refetchInterval: 30000, // Refetch every 30 seconds
|
||||
staleTime: 15000, // Consider data stale after 15 seconds
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to run system maintenance manually
|
||||
* Only accessible to admins
|
||||
*/
|
||||
export function useRunSystemMaintenance() {
|
||||
return async () => {
|
||||
try {
|
||||
const { data, error } = await supabase.rpc('run_system_maintenance');
|
||||
|
||||
if (error) {
|
||||
handleError(error, {
|
||||
action: 'Run System Maintenance',
|
||||
metadata: { error: error.message }
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
|
||||
return data as Array<{
|
||||
task: string;
|
||||
status: 'success' | 'error';
|
||||
details: Record<string, any>;
|
||||
}>;
|
||||
} catch (error) {
|
||||
handleError(error, {
|
||||
action: 'Run System Maintenance',
|
||||
metadata: { error: String(error) }
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
}
|
||||
74
src-old/hooks/useTechnicalSpecifications.ts
Normal file
74
src-old/hooks/useTechnicalSpecifications.ts
Normal file
@@ -0,0 +1,74 @@
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
|
||||
export interface TechnicalSpecification {
|
||||
id: string;
|
||||
entity_type: 'ride' | 'ride_model';
|
||||
entity_id: string;
|
||||
spec_name: string;
|
||||
spec_value: string;
|
||||
spec_unit?: string | null;
|
||||
category?: string | null;
|
||||
display_order: number;
|
||||
created_at: string;
|
||||
}
|
||||
|
||||
export function useTechnicalSpecifications(
|
||||
entityType: 'ride' | 'ride_model',
|
||||
entityId: string | undefined
|
||||
) {
|
||||
return useQuery({
|
||||
queryKey: ['technical-specifications', entityType, entityId],
|
||||
queryFn: async () => {
|
||||
if (!entityId) return [];
|
||||
|
||||
const tableName = entityType === 'ride'
|
||||
? 'ride_technical_specifications'
|
||||
: 'ride_model_technical_specifications';
|
||||
const idColumn = entityType === 'ride' ? 'ride_id' : 'ride_model_id';
|
||||
|
||||
if (entityType === 'ride') {
|
||||
const { data, error } = await supabase
|
||||
.from('ride_technical_specifications')
|
||||
.select('*')
|
||||
.eq('ride_id', entityId)
|
||||
.order('display_order');
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
return (data || []).map((spec) => ({
|
||||
id: spec.id,
|
||||
entity_type: 'ride' as const,
|
||||
entity_id: entityId,
|
||||
spec_name: spec.spec_name,
|
||||
spec_value: spec.spec_value,
|
||||
spec_unit: spec.unit || null,
|
||||
category: spec.category || null,
|
||||
display_order: spec.display_order,
|
||||
created_at: spec.created_at,
|
||||
})) as TechnicalSpecification[];
|
||||
} else {
|
||||
const { data, error } = await supabase
|
||||
.from('ride_model_technical_specifications')
|
||||
.select('*')
|
||||
.eq('ride_model_id', entityId)
|
||||
.order('display_order');
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
return (data || []).map((spec) => ({
|
||||
id: spec.id,
|
||||
entity_type: 'ride_model' as const,
|
||||
entity_id: entityId,
|
||||
spec_name: spec.spec_name,
|
||||
spec_value: spec.spec_value,
|
||||
spec_unit: spec.unit || null,
|
||||
category: spec.category || null,
|
||||
display_order: spec.display_order,
|
||||
created_at: spec.created_at,
|
||||
})) as TechnicalSpecification[];
|
||||
}
|
||||
},
|
||||
enabled: !!entityId
|
||||
});
|
||||
}
|
||||
205
src-old/hooks/useTransactionResilience.ts
Normal file
205
src-old/hooks/useTransactionResilience.ts
Normal file
@@ -0,0 +1,205 @@
|
||||
/**
|
||||
* Transaction Resilience Hook
|
||||
*
|
||||
* Combines timeout detection, lock auto-release, and idempotency lifecycle
|
||||
* into a unified hook for moderation transactions.
|
||||
*
|
||||
* Part of Sacred Pipeline Phase 4: Transaction Resilience
|
||||
*/
|
||||
|
||||
import { useEffect, useCallback, useRef } from 'react';
|
||||
import { useAuth } from '@/hooks/useAuth';
|
||||
import {
|
||||
withTimeout,
|
||||
isTimeoutError,
|
||||
getTimeoutErrorMessage,
|
||||
type TimeoutError,
|
||||
} from '@/lib/timeoutDetection';
|
||||
import {
|
||||
autoReleaseLockOnError,
|
||||
setupAutoReleaseOnUnload,
|
||||
setupInactivityAutoRelease,
|
||||
} from '@/lib/moderation/lockAutoRelease';
|
||||
import {
|
||||
generateAndRegisterKey,
|
||||
validateAndStartProcessing,
|
||||
markKeyCompleted,
|
||||
markKeyFailed,
|
||||
is409Conflict,
|
||||
getRetryAfter,
|
||||
sleep,
|
||||
} from '@/lib/idempotencyHelpers';
|
||||
import { toast } from '@/hooks/use-toast';
|
||||
import { logger } from '@/lib/logger';
|
||||
|
||||
interface TransactionResilientOptions {
|
||||
submissionId: string;
|
||||
/** Timeout in milliseconds (default: 30000) */
|
||||
timeoutMs?: number;
|
||||
/** Enable auto-release on unload (default: true) */
|
||||
autoReleaseOnUnload?: boolean;
|
||||
/** Enable inactivity auto-release (default: true) */
|
||||
autoReleaseOnInactivity?: boolean;
|
||||
/** Inactivity timeout in minutes (default: 10) */
|
||||
inactivityMinutes?: number;
|
||||
}
|
||||
|
||||
export function useTransactionResilience(options: TransactionResilientOptions) {
|
||||
const { submissionId, timeoutMs = 30000, autoReleaseOnUnload = true, autoReleaseOnInactivity = true, inactivityMinutes = 10 } = options;
|
||||
const { user } = useAuth();
|
||||
const cleanupFnsRef = useRef<Array<() => void>>([]);
|
||||
|
||||
// Setup auto-release mechanisms
|
||||
useEffect(() => {
|
||||
if (!user?.id) return;
|
||||
|
||||
const cleanupFns: Array<() => void> = [];
|
||||
|
||||
// Setup unload auto-release
|
||||
if (autoReleaseOnUnload) {
|
||||
const cleanup = setupAutoReleaseOnUnload(submissionId, user.id);
|
||||
cleanupFns.push(cleanup);
|
||||
}
|
||||
|
||||
// Setup inactivity auto-release
|
||||
if (autoReleaseOnInactivity) {
|
||||
const cleanup = setupInactivityAutoRelease(submissionId, user.id, inactivityMinutes);
|
||||
cleanupFns.push(cleanup);
|
||||
}
|
||||
|
||||
cleanupFnsRef.current = cleanupFns;
|
||||
|
||||
// Cleanup on unmount
|
||||
return () => {
|
||||
cleanupFns.forEach(fn => fn());
|
||||
};
|
||||
}, [submissionId, user?.id, autoReleaseOnUnload, autoReleaseOnInactivity, inactivityMinutes]);
|
||||
|
||||
/**
|
||||
* Execute a transaction with full resilience (timeout, idempotency, auto-release)
|
||||
*/
|
||||
const executeTransaction = useCallback(
|
||||
async <T,>(
|
||||
action: 'approval' | 'rejection' | 'retry',
|
||||
itemIds: string[],
|
||||
transactionFn: (idempotencyKey: string) => Promise<T>
|
||||
): Promise<T> => {
|
||||
if (!user?.id) {
|
||||
throw new Error('User not authenticated');
|
||||
}
|
||||
|
||||
// Generate and register idempotency key
|
||||
const { key: idempotencyKey } = await generateAndRegisterKey(
|
||||
action,
|
||||
submissionId,
|
||||
itemIds,
|
||||
user.id
|
||||
);
|
||||
|
||||
logger.info('[TransactionResilience] Starting transaction', {
|
||||
action,
|
||||
submissionId,
|
||||
itemIds,
|
||||
idempotencyKey,
|
||||
});
|
||||
|
||||
try {
|
||||
// Validate key and mark as processing
|
||||
const isValid = await validateAndStartProcessing(idempotencyKey);
|
||||
|
||||
if (!isValid) {
|
||||
throw new Error('Idempotency key validation failed - possible duplicate request');
|
||||
}
|
||||
|
||||
// Execute transaction with timeout
|
||||
const result = await withTimeout(
|
||||
() => transactionFn(idempotencyKey),
|
||||
timeoutMs,
|
||||
'edge-function'
|
||||
);
|
||||
|
||||
// Mark key as completed
|
||||
await markKeyCompleted(idempotencyKey);
|
||||
|
||||
logger.info('[TransactionResilience] Transaction completed', {
|
||||
action,
|
||||
submissionId,
|
||||
idempotencyKey,
|
||||
});
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
// Check for timeout
|
||||
if (isTimeoutError(error)) {
|
||||
const timeoutError = error as TimeoutError;
|
||||
const message = getTimeoutErrorMessage(timeoutError);
|
||||
|
||||
logger.error('[TransactionResilience] Transaction timed out', {
|
||||
action,
|
||||
submissionId,
|
||||
idempotencyKey,
|
||||
duration: timeoutError.duration,
|
||||
});
|
||||
|
||||
// Auto-release lock on timeout
|
||||
await autoReleaseLockOnError(submissionId, user.id, error);
|
||||
|
||||
// Mark key as failed
|
||||
await markKeyFailed(idempotencyKey, message);
|
||||
|
||||
toast({
|
||||
title: 'Transaction Timeout',
|
||||
description: message,
|
||||
variant: 'destructive',
|
||||
});
|
||||
|
||||
throw timeoutError;
|
||||
}
|
||||
|
||||
// Check for 409 Conflict (duplicate request)
|
||||
if (is409Conflict(error)) {
|
||||
const retryAfter = getRetryAfter(error);
|
||||
|
||||
logger.warn('[TransactionResilience] Duplicate request detected', {
|
||||
action,
|
||||
submissionId,
|
||||
idempotencyKey,
|
||||
retryAfter,
|
||||
});
|
||||
|
||||
toast({
|
||||
title: 'Duplicate Request',
|
||||
description: `This action is already being processed. Please wait ${retryAfter}s.`,
|
||||
});
|
||||
|
||||
// Wait and return (don't auto-release, the other request is handling it)
|
||||
await sleep(retryAfter * 1000);
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Generic error handling
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||
|
||||
logger.error('[TransactionResilience] Transaction failed', {
|
||||
action,
|
||||
submissionId,
|
||||
idempotencyKey,
|
||||
error: errorMessage,
|
||||
});
|
||||
|
||||
// Auto-release lock on error
|
||||
await autoReleaseLockOnError(submissionId, user.id, error);
|
||||
|
||||
// Mark key as failed
|
||||
await markKeyFailed(idempotencyKey, errorMessage);
|
||||
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
[submissionId, user?.id, timeoutMs]
|
||||
);
|
||||
|
||||
return {
|
||||
executeTransaction,
|
||||
};
|
||||
}
|
||||
161
src-old/hooks/useUnitPreferences.ts
Normal file
161
src-old/hooks/useUnitPreferences.ts
Normal file
@@ -0,0 +1,161 @@
|
||||
import { useState, useEffect, useCallback } from 'react';
|
||||
import { invokeWithTracking } from '@/lib/edgeFunctionTracking';
|
||||
import { useAuth } from '@/hooks/useAuth';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import { handleNonCriticalError } from '@/lib/errorHandler';
|
||||
import { UnitPreferences, getMeasurementSystemFromCountry } from '@/lib/units';
|
||||
import type { Json } from '@/integrations/supabase/types';
|
||||
import * as storage from '@/lib/localStorage';
|
||||
|
||||
// Type guard for unit preferences
|
||||
function isValidUnitPreferences(obj: unknown): obj is UnitPreferences {
|
||||
return (
|
||||
typeof obj === 'object' &&
|
||||
obj !== null &&
|
||||
'measurement_system' in obj &&
|
||||
(obj.measurement_system === 'metric' || obj.measurement_system === 'imperial')
|
||||
);
|
||||
}
|
||||
|
||||
const DEFAULT_PREFERENCES: UnitPreferences = {
|
||||
measurement_system: 'metric',
|
||||
temperature: 'celsius',
|
||||
auto_detect: true
|
||||
};
|
||||
|
||||
export function useUnitPreferences() {
|
||||
const { user } = useAuth();
|
||||
const [preferences, setPreferences] = useState<UnitPreferences>(DEFAULT_PREFERENCES);
|
||||
const [loading, setLoading] = useState(true);
|
||||
|
||||
useEffect(() => {
|
||||
loadPreferences();
|
||||
}, [user]);
|
||||
|
||||
const loadPreferences = async () => {
|
||||
try {
|
||||
if (user) {
|
||||
const { data, error } = await supabase
|
||||
.from('user_preferences')
|
||||
.select('unit_preferences')
|
||||
.eq('user_id', user.id)
|
||||
.maybeSingle();
|
||||
|
||||
if (error && error.code !== 'PGRST116') {
|
||||
handleNonCriticalError(error, {
|
||||
action: 'Fetch unit preferences',
|
||||
userId: user.id,
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
|
||||
if (data?.unit_preferences && isValidUnitPreferences(data.unit_preferences)) {
|
||||
const validPrefs = data.unit_preferences as UnitPreferences;
|
||||
setPreferences({ ...DEFAULT_PREFERENCES, ...validPrefs });
|
||||
} else {
|
||||
await autoDetectPreferences();
|
||||
}
|
||||
} else {
|
||||
const stored = localStorage.getItem('unit_preferences');
|
||||
if (stored) {
|
||||
try {
|
||||
const parsed = JSON.parse(stored);
|
||||
setPreferences({ ...DEFAULT_PREFERENCES, ...parsed });
|
||||
} catch (e) {
|
||||
await autoDetectPreferences();
|
||||
}
|
||||
} else {
|
||||
await autoDetectPreferences();
|
||||
}
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
handleNonCriticalError(error, {
|
||||
action: 'Load unit preferences',
|
||||
userId: user?.id,
|
||||
});
|
||||
await autoDetectPreferences();
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const autoDetectPreferences = useCallback(async () => {
|
||||
try {
|
||||
const response = await invokeWithTracking('detect-location', {}, user?.id);
|
||||
|
||||
if (response.data && response.data.measurementSystem) {
|
||||
const newPreferences: UnitPreferences = {
|
||||
...DEFAULT_PREFERENCES,
|
||||
measurement_system: response.data.measurementSystem,
|
||||
};
|
||||
|
||||
setPreferences(newPreferences);
|
||||
|
||||
if (user) {
|
||||
const { error } = await supabase
|
||||
.from('user_preferences')
|
||||
.upsert({
|
||||
user_id: user.id,
|
||||
unit_preferences: newPreferences as unknown as Json,
|
||||
updated_at: new Date().toISOString()
|
||||
}, {
|
||||
onConflict: 'user_id'
|
||||
});
|
||||
|
||||
if (error) {
|
||||
handleNonCriticalError(error, {
|
||||
action: 'Save auto-detected preferences',
|
||||
userId: user.id,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
localStorage.setItem('unit_preferences', JSON.stringify(newPreferences));
|
||||
}
|
||||
|
||||
return newPreferences;
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
handleNonCriticalError(error, {
|
||||
action: 'Auto-detect location',
|
||||
userId: user?.id,
|
||||
});
|
||||
}
|
||||
|
||||
// Fallback to default
|
||||
setPreferences(DEFAULT_PREFERENCES);
|
||||
return DEFAULT_PREFERENCES;
|
||||
}, [user]);
|
||||
|
||||
const updatePreferences = async (newPreferences: Partial<UnitPreferences>) => {
|
||||
const updated = { ...preferences, ...newPreferences };
|
||||
setPreferences(updated);
|
||||
|
||||
try {
|
||||
if (user) {
|
||||
await supabase
|
||||
.from('user_preferences')
|
||||
.update({
|
||||
unit_preferences: updated as unknown as Json,
|
||||
updated_at: new Date().toISOString()
|
||||
})
|
||||
.eq('user_id', user.id);
|
||||
} else {
|
||||
storage.setJSON('unit_preferences', updated);
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
handleNonCriticalError(error, {
|
||||
action: 'Save unit preferences',
|
||||
userId: user?.id,
|
||||
});
|
||||
setPreferences(preferences);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
preferences,
|
||||
loading,
|
||||
updatePreferences,
|
||||
autoDetectPreferences
|
||||
};
|
||||
}
|
||||
90
src-old/hooks/useUserRole.ts
Normal file
90
src-old/hooks/useUserRole.ts
Normal file
@@ -0,0 +1,90 @@
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { useCallback } from 'react';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import { useAuth } from '@/hooks/useAuth';
|
||||
import { queryKeys } from '@/lib/queryKeys';
|
||||
import { handleNonCriticalError } from '@/lib/errorHandler';
|
||||
|
||||
export type UserRole = 'admin' | 'moderator' | 'user' | 'superuser';
|
||||
|
||||
export interface UserPermissions {
|
||||
can_ban_any_user: boolean;
|
||||
can_manage_admin_roles: boolean;
|
||||
can_manage_moderator_roles: boolean;
|
||||
can_view_all_profiles: boolean;
|
||||
can_assign_superuser: boolean;
|
||||
role_level: string;
|
||||
}
|
||||
|
||||
export function useUserRole() {
|
||||
const { user } = useAuth();
|
||||
|
||||
// Fetch user roles with TanStack Query for automatic caching
|
||||
const rolesQuery = useQuery({
|
||||
queryKey: queryKeys.userRoles(user?.id),
|
||||
queryFn: async () => {
|
||||
if (!user) return [];
|
||||
|
||||
const { data, error } = await supabase
|
||||
.from('user_roles')
|
||||
.select('role')
|
||||
.eq('user_id', user.id);
|
||||
|
||||
if (error) {
|
||||
handleNonCriticalError(error, {
|
||||
action: 'Fetch user roles',
|
||||
userId: user.id,
|
||||
});
|
||||
return [];
|
||||
}
|
||||
|
||||
return data?.map(r => r.role as UserRole) || [];
|
||||
},
|
||||
enabled: !!user,
|
||||
staleTime: 5 * 60 * 1000, // 5 minutes - roles don't change often
|
||||
gcTime: 10 * 60 * 1000, // 10 minutes garbage collection
|
||||
});
|
||||
|
||||
// Fetch user permissions with TanStack Query for automatic caching
|
||||
const permissionsQuery = useQuery({
|
||||
queryKey: queryKeys.userPermissions(user?.id),
|
||||
queryFn: async () => {
|
||||
if (!user) return null;
|
||||
|
||||
const { data, error } = await supabase
|
||||
.rpc('get_user_management_permissions', { _user_id: user.id });
|
||||
|
||||
if (error) {
|
||||
handleNonCriticalError(error, {
|
||||
action: 'Fetch user permissions',
|
||||
userId: user.id,
|
||||
});
|
||||
return null;
|
||||
}
|
||||
|
||||
return data as unknown as UserPermissions;
|
||||
},
|
||||
enabled: !!user,
|
||||
staleTime: 5 * 60 * 1000, // 5 minutes - permissions don't change often
|
||||
gcTime: 10 * 60 * 1000, // 10 minutes garbage collection
|
||||
});
|
||||
|
||||
const roles = rolesQuery.data || [];
|
||||
const permissions = permissionsQuery.data || null;
|
||||
const loading = rolesQuery.isLoading || permissionsQuery.isLoading;
|
||||
|
||||
const hasRole = useCallback((role: UserRole) => roles.includes(role), [roles]);
|
||||
const isModerator = useCallback(() => hasRole('admin') || hasRole('moderator') || hasRole('superuser'), [hasRole]);
|
||||
const isAdmin = useCallback(() => hasRole('admin') || hasRole('superuser'), [hasRole]);
|
||||
const isSuperuser = useCallback(() => hasRole('superuser'), [hasRole]);
|
||||
|
||||
return {
|
||||
roles,
|
||||
permissions,
|
||||
loading,
|
||||
hasRole,
|
||||
isModerator,
|
||||
isAdmin,
|
||||
isSuperuser
|
||||
};
|
||||
}
|
||||
80
src-old/hooks/useUsernameValidation.ts
Normal file
80
src-old/hooks/useUsernameValidation.ts
Normal file
@@ -0,0 +1,80 @@
|
||||
import { useState, useEffect, useCallback } from 'react';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import { usernameSchema } from '@/lib/validation';
|
||||
import { useDebounce } from './useDebounce';
|
||||
|
||||
export type UsernameValidationState = {
|
||||
isValid: boolean;
|
||||
isAvailable: boolean | null;
|
||||
isChecking: boolean;
|
||||
error: string | null;
|
||||
};
|
||||
|
||||
export function useUsernameValidation(username: string, currentUsername?: string) {
|
||||
const [state, setState] = useState<UsernameValidationState>({
|
||||
isValid: false,
|
||||
isAvailable: null,
|
||||
isChecking: false,
|
||||
error: null,
|
||||
});
|
||||
|
||||
const debouncedUsername = useDebounce(username, 500);
|
||||
|
||||
const checkUsernameAvailability = useCallback(async (normalizedUsername: string) => {
|
||||
try {
|
||||
const { data, error } = await supabase
|
||||
.from('profiles')
|
||||
.select('username')
|
||||
.eq('username', normalizedUsername)
|
||||
.maybeSingle();
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
const isAvailable = !data;
|
||||
setState({
|
||||
isValid: isAvailable,
|
||||
isAvailable,
|
||||
isChecking: false,
|
||||
error: isAvailable ? null : 'Username is already taken',
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
setState({
|
||||
isValid: false,
|
||||
isAvailable: null,
|
||||
isChecking: false,
|
||||
error: 'Error checking username availability',
|
||||
});
|
||||
}
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (!debouncedUsername || debouncedUsername === currentUsername) {
|
||||
setState({
|
||||
isValid: debouncedUsername === currentUsername,
|
||||
isAvailable: null,
|
||||
isChecking: false,
|
||||
error: null,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Validate format first
|
||||
const validation = usernameSchema.safeParse(debouncedUsername);
|
||||
if (!validation.success) {
|
||||
setState({
|
||||
isValid: false,
|
||||
isAvailable: null,
|
||||
isChecking: false,
|
||||
error: validation.error.issues[0].message,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Check availability
|
||||
setState(prev => ({ ...prev, isChecking: true, error: null }));
|
||||
|
||||
checkUsernameAvailability(validation.data);
|
||||
}, [debouncedUsername, currentUsername, checkUsernameAvailability]);
|
||||
|
||||
return state;
|
||||
}
|
||||
76
src-old/hooks/useVersionCheck.ts
Normal file
76
src-old/hooks/useVersionCheck.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import { useEffect, useState } from 'react';
|
||||
import { toast } from 'sonner';
|
||||
|
||||
// App version - automatically updated during build
|
||||
const APP_VERSION = import.meta.env.VITE_APP_VERSION || 'dev';
|
||||
const VERSION_CHECK_INTERVAL = 5 * 60 * 1000; // Check every 5 minutes
|
||||
|
||||
/**
|
||||
* Monitors for new app deployments and prompts user to refresh
|
||||
*/
|
||||
export function useVersionCheck() {
|
||||
const [newVersionAvailable, setNewVersionAvailable] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
// Don't run in development
|
||||
if (import.meta.env.DEV) {
|
||||
return;
|
||||
}
|
||||
|
||||
const checkVersion = async () => {
|
||||
try {
|
||||
// Fetch the current index.html with cache bypass
|
||||
const response = await fetch('/', {
|
||||
method: 'HEAD',
|
||||
cache: 'no-cache',
|
||||
headers: {
|
||||
'Cache-Control': 'no-cache, no-store, must-revalidate',
|
||||
'Pragma': 'no-cache',
|
||||
},
|
||||
});
|
||||
|
||||
// Check ETag or Last-Modified to detect changes
|
||||
const etag = response.headers.get('ETag');
|
||||
const lastModified = response.headers.get('Last-Modified');
|
||||
|
||||
const currentFingerprint = `${etag}-${lastModified}`;
|
||||
const storedFingerprint = sessionStorage.getItem('app-version-fingerprint');
|
||||
|
||||
if (storedFingerprint && storedFingerprint !== currentFingerprint) {
|
||||
// New version detected
|
||||
setNewVersionAvailable(true);
|
||||
|
||||
toast.info('New version available', {
|
||||
description: 'A new version of ThrillWiki is available. Please refresh to update.',
|
||||
duration: 30000, // Show for 30 seconds
|
||||
action: {
|
||||
label: 'Refresh Now',
|
||||
onClick: () => window.location.reload(),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Store current fingerprint
|
||||
if (!storedFingerprint) {
|
||||
sessionStorage.setItem('app-version-fingerprint', currentFingerprint);
|
||||
}
|
||||
} catch (error) {
|
||||
// Silently fail - version check is non-critical
|
||||
console.debug('Version check failed:', error);
|
||||
}
|
||||
};
|
||||
|
||||
// Initial check after 1 minute (give time for user to settle in)
|
||||
const initialTimer = setTimeout(checkVersion, 60000);
|
||||
|
||||
// Then check periodically
|
||||
const interval = setInterval(checkVersion, VERSION_CHECK_INTERVAL);
|
||||
|
||||
return () => {
|
||||
clearTimeout(initialTimer);
|
||||
clearInterval(interval);
|
||||
};
|
||||
}, []);
|
||||
|
||||
return { newVersionAvailable };
|
||||
}
|
||||
56
src-old/hooks/useVersionComparison.ts
Normal file
56
src-old/hooks/useVersionComparison.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
import { useState, useEffect } from 'react';
|
||||
import { supabase } from '@/lib/supabaseClient';
|
||||
import type { EntityType, VersionDiff } from '@/types/versioning';
|
||||
import { handleError } from '@/lib/errorHandler';
|
||||
|
||||
/**
|
||||
* Hook to compare two versions of an entity and get the diff
|
||||
* Uses the relational version tables for type-safe comparison
|
||||
*/
|
||||
export function useVersionComparison(
|
||||
entityType: EntityType,
|
||||
fromVersionId: string | null,
|
||||
toVersionId: string | null
|
||||
) {
|
||||
const [diff, setDiff] = useState<VersionDiff | null>(null);
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (!fromVersionId || !toVersionId || !entityType) {
|
||||
setDiff(null);
|
||||
return;
|
||||
}
|
||||
|
||||
async function fetchDiff() {
|
||||
setLoading(true);
|
||||
setError(null);
|
||||
|
||||
try {
|
||||
// Use the database function to get diff
|
||||
const { data, error: rpcError } = await supabase.rpc('get_version_diff', {
|
||||
p_entity_type: entityType,
|
||||
p_from_version_id: fromVersionId || '',
|
||||
p_to_version_id: toVersionId || ''
|
||||
});
|
||||
|
||||
if (rpcError) throw rpcError;
|
||||
|
||||
setDiff(data as VersionDiff);
|
||||
} catch (err) {
|
||||
handleError(err, {
|
||||
action: 'Compare Versions',
|
||||
metadata: { entityType, fromVersionId, toVersionId }
|
||||
});
|
||||
setError(err instanceof Error ? err.message : 'Failed to compare versions');
|
||||
setDiff(null);
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}
|
||||
|
||||
fetchDiff();
|
||||
}, [entityType, fromVersionId, toVersionId]);
|
||||
|
||||
return { diff, loading, error };
|
||||
}
|
||||
Reference in New Issue
Block a user