Compare commits

...

2 Commits

Author SHA1 Message Date
gpt-engineer-app[bot]
791205210f Fix superuser release locks RPC 2025-11-05 01:21:55 +00:00
gpt-engineer-app[bot]
f750763c63 Fix: Implement database schema and code updates 2025-11-05 01:20:30 +00:00
5 changed files with 288 additions and 28 deletions

View File

@@ -12,11 +12,15 @@ interface EditHistoryRecord {
id: string;
item_id: string;
edited_at: string;
previous_data: Record<string, unknown>;
new_data: Record<string, unknown>;
edit_reason: string | null;
changed_fields: string[];
profiles?: {
field_changes?: Array<{
id: string;
field_name: string;
old_value: string | null;
new_value: string | null;
}>;
editor?: {
username: string;
avatar_url?: string | null;
} | null;
@@ -44,11 +48,15 @@ export function EditHistoryAccordion({ submissionId }: EditHistoryAccordionProps
id,
item_id,
edited_at,
previous_data,
new_data,
edit_reason,
changed_fields,
profiles:edited_by (
field_changes:item_field_changes(
id,
field_name,
old_value,
new_value
),
editor:profiles!item_edit_history_edited_by_fkey(
username,
avatar_url
)
@@ -111,19 +119,30 @@ export function EditHistoryAccordion({ submissionId }: EditHistoryAccordionProps
<div className="space-y-4">
<ScrollArea className="h-[400px] pr-4">
<div className="space-y-3">
{editHistory.map((entry: EditHistoryRecord) => (
{editHistory.map((entry: EditHistoryRecord) => {
// Transform relational field_changes into beforeData/afterData objects
const beforeData: Record<string, unknown> = {};
const afterData: Record<string, unknown> = {};
entry.field_changes?.forEach(change => {
beforeData[change.field_name] = change.old_value;
afterData[change.field_name] = change.new_value;
});
return (
<EditHistoryEntry
key={entry.id}
editId={entry.id}
editorName={entry.profiles?.username || 'Unknown User'}
editorAvatar={entry.profiles?.avatar_url || undefined}
editorName={entry.editor?.username || 'Unknown User'}
editorAvatar={entry.editor?.avatar_url || undefined}
timestamp={entry.edited_at}
changedFields={entry.changed_fields || []}
editReason={entry.edit_reason || undefined}
beforeData={entry.previous_data}
afterData={entry.new_data}
beforeData={beforeData}
afterData={afterData}
/>
))}
);
})}
</div>
</ScrollArea>

View File

@@ -1495,20 +1495,26 @@ export type Database = {
}
item_edit_history: {
Row: {
changed_fields: string[] | null
edit_reason: string | null
edited_at: string
editor_id: string
edited_by: string
id: string
item_id: string
}
Insert: {
changed_fields?: string[] | null
edit_reason?: string | null
edited_at?: string
editor_id: string
edited_by: string
id?: string
item_id: string
}
Update: {
changed_fields?: string[] | null
edit_reason?: string | null
edited_at?: string
editor_id?: string
edited_by?: string
id?: string
item_id?: string
}
@@ -1522,6 +1528,41 @@ export type Database = {
},
]
}
item_field_changes: {
Row: {
created_at: string | null
edit_history_id: string
field_name: string
id: string
new_value: string | null
old_value: string | null
}
Insert: {
created_at?: string | null
edit_history_id: string
field_name: string
id?: string
new_value?: string | null
old_value?: string | null
}
Update: {
created_at?: string | null
edit_history_id?: string
field_name?: string
id?: string
new_value?: string | null
old_value?: string | null
}
Relationships: [
{
foreignKeyName: "item_field_changes_edit_history_id_fkey"
columns: ["edit_history_id"]
isOneToOne: false
referencedRelation: "item_edit_history"
referencedColumns: ["id"]
},
]
}
list_items: {
Row: {
created_at: string | null

View File

@@ -1290,13 +1290,37 @@ export async function editSubmissionItem(
if (updateError) throw updateError;
// Phase 4: Record edit history
const { error: historyError } = await supabase
const { data: historyData, error: historyError } = await supabase
.from('item_edit_history')
.insert({
item_id: itemId,
editor_id: userId,
changes: changes,
edited_by: userId,
changed_fields: Object.keys(changes),
edit_reason: 'Direct edit by moderator',
})
.select('id')
.single();
// Insert field changes relationally (NO JSON!)
if (!historyError && historyData) {
const fieldChanges = Object.entries(changes).map(([fieldName, change]: [string, any]) => ({
edit_history_id: historyData.id,
field_name: fieldName,
old_value: String(change.old ?? ''),
new_value: String(change.new ?? ''),
}));
const { error: fieldChangesError } = await supabase
.from('item_field_changes')
.insert(fieldChanges);
if (fieldChangesError) {
handleNonCriticalError(fieldChangesError, {
action: 'Record Field Changes',
metadata: { editHistoryId: historyData.id }
});
}
}
if (historyError) {
handleNonCriticalError(historyError, {
@@ -1435,9 +1459,17 @@ export async function fetchEditHistory(itemId: string) {
.from('item_edit_history')
.select(`
id,
changes,
item_id,
edited_at,
editor:profiles!item_edit_history_editor_id_fkey (
edit_reason,
changed_fields,
field_changes:item_field_changes(
id,
field_name,
old_value,
new_value
),
editor:profiles!item_edit_history_edited_by_fkey(
user_id,
username,
display_name,

View File

@@ -0,0 +1,103 @@
-- Fix item_edit_history table schema to support proper edit tracking
-- This adds missing columns and creates relational table for field changes (NO JSON!)
-- Add missing columns to item_edit_history
ALTER TABLE public.item_edit_history
ADD COLUMN IF NOT EXISTS edit_reason TEXT,
ADD COLUMN IF NOT EXISTS changed_fields TEXT[] DEFAULT '{}';
-- Create relational table for field-level changes (adheres to NO JSON policy)
CREATE TABLE IF NOT EXISTS public.item_field_changes (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
edit_history_id UUID NOT NULL REFERENCES item_edit_history(id) ON DELETE CASCADE,
field_name TEXT NOT NULL,
old_value TEXT,
new_value TEXT,
created_at TIMESTAMPTZ DEFAULT NOW()
);
-- Create index for performance
CREATE INDEX IF NOT EXISTS idx_field_changes_edit_history ON item_field_changes(edit_history_id);
-- Rename editor_id to edited_by for consistency
ALTER TABLE public.item_edit_history
RENAME COLUMN editor_id TO edited_by;
-- Update RLS policies for item_field_changes
ALTER TABLE public.item_field_changes ENABLE ROW LEVEL SECURITY;
CREATE POLICY "Moderators view item field changes"
ON public.item_field_changes
FOR SELECT
USING (is_moderator(auth.uid()));
CREATE POLICY "System inserts item field changes"
ON public.item_field_changes
FOR INSERT
WITH CHECK (is_moderator(auth.uid()));
-- Fix superuser_release_all_locks function to use SECURITY DEFINER
CREATE OR REPLACE FUNCTION public.superuser_release_all_locks(p_superuser_id uuid)
RETURNS integer
LANGUAGE plpgsql
SECURITY DEFINER
SET search_path TO 'public'
AS $function$
DECLARE
v_is_superuser BOOLEAN;
v_released_count INTEGER;
v_released_locks JSONB;
BEGIN
-- Verify caller is actually a superuser
SELECT EXISTS (
SELECT 1 FROM user_roles
WHERE user_id = p_superuser_id
AND role = 'superuser'
) INTO v_is_superuser;
IF NOT v_is_superuser THEN
RAISE EXCEPTION 'Unauthorized: Only superusers can release all locks';
END IF;
-- Capture all locked submissions for audit
SELECT jsonb_agg(
jsonb_build_object(
'submission_id', id,
'assigned_to', assigned_to,
'locked_until', locked_until,
'submission_type', submission_type
)
) INTO v_released_locks
FROM content_submissions
WHERE assigned_to IS NOT NULL
AND locked_until > NOW();
-- Release all active locks
UPDATE content_submissions
SET
assigned_to = NULL,
assigned_at = NULL,
locked_until = NULL
WHERE assigned_to IS NOT NULL
AND locked_until > NOW()
AND status IN ('pending', 'partially_approved');
GET DIAGNOSTICS v_released_count = ROW_COUNT;
-- Log the bulk release
IF v_released_count > 0 THEN
PERFORM log_admin_action(
p_superuser_id,
NULL,
'submission_locks_bulk_released',
jsonb_build_object(
'released_count', v_released_count,
'released_locks', v_released_locks,
'bulk_operation', true
)
);
END IF;
RETURN v_released_count;
END;
$function$;

View File

@@ -0,0 +1,65 @@
-- Fix search_path security issue in superuser_release_all_locks function
CREATE OR REPLACE FUNCTION public.superuser_release_all_locks(p_superuser_id uuid)
RETURNS integer
LANGUAGE plpgsql
SECURITY DEFINER
SET search_path = 'public', 'auth' -- Set immutable search path for security
AS $function$
DECLARE
v_is_superuser BOOLEAN;
v_released_count INTEGER;
v_released_locks JSONB;
BEGIN
-- Verify caller is actually a superuser
SELECT EXISTS (
SELECT 1 FROM public.user_roles
WHERE user_id = p_superuser_id
AND role = 'superuser'
) INTO v_is_superuser;
IF NOT v_is_superuser THEN
RAISE EXCEPTION 'Unauthorized: Only superusers can release all locks';
END IF;
-- Capture all locked submissions for audit
SELECT jsonb_agg(
jsonb_build_object(
'submission_id', id,
'assigned_to', assigned_to,
'locked_until', locked_until,
'submission_type', submission_type
)
) INTO v_released_locks
FROM public.content_submissions
WHERE assigned_to IS NOT NULL
AND locked_until > NOW();
-- Release all active locks
UPDATE public.content_submissions
SET
assigned_to = NULL,
assigned_at = NULL,
locked_until = NULL
WHERE assigned_to IS NOT NULL
AND locked_until > NOW()
AND status IN ('pending', 'partially_approved');
GET DIAGNOSTICS v_released_count = ROW_COUNT;
-- Log the bulk release
IF v_released_count > 0 THEN
PERFORM public.log_admin_action(
p_superuser_id,
NULL,
'submission_locks_bulk_released',
jsonb_build_object(
'released_count', v_released_count,
'released_locks', v_released_locks,
'bulk_operation', true
)
);
END IF;
RETURN v_released_count;
END;
$function$;