mirror of
https://github.com/pacnpal/thrilltrack-explorer.git
synced 2025-12-20 02:51:12 -05:00
Merge branch 'dev' into main
This commit is contained in:
186
.github/workflows/schema-validation.yml
vendored
Normal file
186
.github/workflows/schema-validation.yml
vendored
Normal file
@@ -0,0 +1,186 @@
|
|||||||
|
name: Schema Validation
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'supabase/migrations/**'
|
||||||
|
- 'src/lib/moderation/**'
|
||||||
|
- 'supabase/functions/**'
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- develop
|
||||||
|
paths:
|
||||||
|
- 'supabase/migrations/**'
|
||||||
|
- 'src/lib/moderation/**'
|
||||||
|
- 'supabase/functions/**'
|
||||||
|
workflow_dispatch: # Allow manual triggering
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
validate-schema:
|
||||||
|
name: Validate Database Schema
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '20'
|
||||||
|
cache: 'npm'
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Run schema validation script
|
||||||
|
env:
|
||||||
|
SUPABASE_SERVICE_ROLE_KEY: ${{ secrets.SUPABASE_SERVICE_ROLE_KEY }}
|
||||||
|
run: |
|
||||||
|
echo "🔍 Running schema validation checks..."
|
||||||
|
npm run validate-schema
|
||||||
|
|
||||||
|
- name: Run Playwright schema validation tests
|
||||||
|
env:
|
||||||
|
SUPABASE_SERVICE_ROLE_KEY: ${{ secrets.SUPABASE_SERVICE_ROLE_KEY }}
|
||||||
|
run: |
|
||||||
|
echo "🧪 Running integration tests..."
|
||||||
|
npx playwright test schema-validation --reporter=list
|
||||||
|
|
||||||
|
- name: Upload test results
|
||||||
|
if: failure()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: schema-validation-results
|
||||||
|
path: |
|
||||||
|
playwright-report/
|
||||||
|
test-results/
|
||||||
|
retention-days: 7
|
||||||
|
|
||||||
|
- name: Comment PR with validation results
|
||||||
|
if: failure() && github.event_name == 'pull_request'
|
||||||
|
uses: actions/github-script@v7
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
github.rest.issues.createComment({
|
||||||
|
issue_number: context.issue.number,
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
body: `## ❌ Schema Validation Failed
|
||||||
|
|
||||||
|
The schema validation checks have detected inconsistencies in your database changes.
|
||||||
|
|
||||||
|
**Common issues:**
|
||||||
|
- Missing fields in submission tables
|
||||||
|
- Mismatched data types between tables
|
||||||
|
- Missing version metadata fields
|
||||||
|
- Invalid column names (e.g., \`ride_type\` in \`rides\` table)
|
||||||
|
|
||||||
|
**Next steps:**
|
||||||
|
1. Review the failed tests in the Actions log
|
||||||
|
2. Check the [Schema Reference documentation](https://github.com/${context.repo.owner}/${context.repo.repo}/blob/main/docs/submission-pipeline/SCHEMA_REFERENCE.md)
|
||||||
|
3. Fix the identified issues
|
||||||
|
4. Push your fixes to re-run validation
|
||||||
|
|
||||||
|
**Need help?** Consult the [Integration Tests README](https://github.com/${context.repo.owner}/${context.repo.repo}/blob/main/tests/integration/README.md).`
|
||||||
|
})
|
||||||
|
|
||||||
|
migration-safety-check:
|
||||||
|
name: Migration Safety Check
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.event_name == 'pull_request'
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Check for breaking changes in migrations
|
||||||
|
run: |
|
||||||
|
echo "🔍 Checking for potentially breaking migration patterns..."
|
||||||
|
|
||||||
|
# Check if any migrations contain DROP COLUMN
|
||||||
|
if git diff origin/main...HEAD -- 'supabase/migrations/**' | grep -i "DROP COLUMN"; then
|
||||||
|
echo "⚠️ Warning: Migration contains DROP COLUMN"
|
||||||
|
echo "::warning::Migration contains DROP COLUMN - ensure data migration plan exists"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if any migrations alter NOT NULL constraints
|
||||||
|
if git diff origin/main...HEAD -- 'supabase/migrations/**' | grep -i "ALTER COLUMN.*NOT NULL"; then
|
||||||
|
echo "⚠️ Warning: Migration alters NOT NULL constraints"
|
||||||
|
echo "::warning::Migration alters NOT NULL constraints - ensure data backfill is complete"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if any migrations rename columns
|
||||||
|
if git diff origin/main...HEAD -- 'supabase/migrations/**' | grep -i "RENAME COLUMN"; then
|
||||||
|
echo "⚠️ Warning: Migration renames columns"
|
||||||
|
echo "::warning::Migration renames columns - ensure all code references are updated"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Validate migration file naming
|
||||||
|
run: |
|
||||||
|
echo "🔍 Validating migration file names..."
|
||||||
|
|
||||||
|
# Check that all migration files follow the timestamp pattern
|
||||||
|
for file in supabase/migrations/*.sql; do
|
||||||
|
if [[ ! $(basename "$file") =~ ^[0-9]{14}_ ]]; then
|
||||||
|
echo "❌ Invalid migration filename: $(basename "$file")"
|
||||||
|
echo "::error::Migration files must start with a 14-digit timestamp (YYYYMMDDHHMMSS)"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "✅ All migration filenames are valid"
|
||||||
|
|
||||||
|
documentation-check:
|
||||||
|
name: Documentation Check
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.event_name == 'pull_request'
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Check if schema docs need updating
|
||||||
|
run: |
|
||||||
|
echo "📚 Checking if schema documentation is up to date..."
|
||||||
|
|
||||||
|
# Check if migrations changed but SCHEMA_REFERENCE.md didn't
|
||||||
|
MIGRATIONS_CHANGED=$(git diff origin/main...HEAD --name-only | grep -c "supabase/migrations/" || true)
|
||||||
|
SCHEMA_DOCS_CHANGED=$(git diff origin/main...HEAD --name-only | grep -c "docs/submission-pipeline/SCHEMA_REFERENCE.md" || true)
|
||||||
|
|
||||||
|
if [ "$MIGRATIONS_CHANGED" -gt 0 ] && [ "$SCHEMA_DOCS_CHANGED" -eq 0 ]; then
|
||||||
|
echo "⚠️ Warning: Migrations were changed but SCHEMA_REFERENCE.md was not updated"
|
||||||
|
echo "::warning::Consider updating docs/submission-pipeline/SCHEMA_REFERENCE.md to reflect schema changes"
|
||||||
|
else
|
||||||
|
echo "✅ Documentation check passed"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Comment PR with documentation reminder
|
||||||
|
if: success()
|
||||||
|
uses: actions/github-script@v7
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const fs = require('fs');
|
||||||
|
const migrationsChanged = (await exec.getExecOutput('git', ['diff', 'origin/main...HEAD', '--name-only'])).stdout.includes('supabase/migrations/');
|
||||||
|
const docsChanged = (await exec.getExecOutput('git', ['diff', 'origin/main...HEAD', '--name-only'])).stdout.includes('docs/submission-pipeline/SCHEMA_REFERENCE.md');
|
||||||
|
|
||||||
|
if (migrationsChanged && !docsChanged) {
|
||||||
|
github.rest.issues.createComment({
|
||||||
|
issue_number: context.issue.number,
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
body: `## 📚 Documentation Reminder
|
||||||
|
|
||||||
|
This PR includes database migrations but doesn't update the schema reference documentation.
|
||||||
|
|
||||||
|
**If you added/modified fields**, please update:
|
||||||
|
- \`docs/submission-pipeline/SCHEMA_REFERENCE.md\`
|
||||||
|
|
||||||
|
**If this is a minor change** (e.g., fixing typos, adding indexes), you can ignore this message.`
|
||||||
|
})
|
||||||
|
}
|
||||||
636
docs/submission-pipeline/SCHEMA_REFERENCE.md
Normal file
636
docs/submission-pipeline/SCHEMA_REFERENCE.md
Normal file
@@ -0,0 +1,636 @@
|
|||||||
|
# Submission Pipeline Schema Reference
|
||||||
|
|
||||||
|
**Critical Document**: This reference maps all entity types to their exact database schema fields across the entire submission pipeline to prevent schema mismatches.
|
||||||
|
|
||||||
|
**Last Updated**: 2025-11-08
|
||||||
|
**Status**: ✅ All schemas audited and verified
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Table of Contents
|
||||||
|
|
||||||
|
1. [Overview](#overview)
|
||||||
|
2. [Parks](#parks)
|
||||||
|
3. [Rides](#rides)
|
||||||
|
4. [Companies](#companies)
|
||||||
|
5. [Ride Models](#ride-models)
|
||||||
|
6. [Photos](#photos)
|
||||||
|
7. [Timeline Events](#timeline-events)
|
||||||
|
8. [Critical Functions Reference](#critical-functions-reference)
|
||||||
|
9. [Common Pitfalls](#common-pitfalls)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
### Pipeline Flow
|
||||||
|
|
||||||
|
```
|
||||||
|
User Input → *_submissions table → submission_items → Moderation →
|
||||||
|
process_approval_transaction → create/update_entity_from_submission →
|
||||||
|
Main entity table → Version trigger → *_versions table
|
||||||
|
```
|
||||||
|
|
||||||
|
### Entity Types
|
||||||
|
|
||||||
|
- `park` - Theme parks and amusement parks
|
||||||
|
- `ride` - Individual rides and attractions
|
||||||
|
- `company` - Used for: `manufacturer`, `operator`, `designer`, `property_owner`
|
||||||
|
- `ride_model` - Ride model templates
|
||||||
|
- `photo` - Entity photos
|
||||||
|
- `timeline_event` - Historical events
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Parks
|
||||||
|
|
||||||
|
### Main Table: `parks`
|
||||||
|
|
||||||
|
**Required Fields:**
|
||||||
|
- `id` (uuid, PK)
|
||||||
|
- `name` (text, NOT NULL)
|
||||||
|
- `slug` (text, NOT NULL, UNIQUE)
|
||||||
|
- `park_type` (text, NOT NULL) - Values: `theme_park`, `amusement_park`, `water_park`, etc.
|
||||||
|
- `status` (text, NOT NULL) - Values: `operating`, `closed`, `under_construction`, etc.
|
||||||
|
|
||||||
|
**Optional Fields:**
|
||||||
|
- `description` (text)
|
||||||
|
- `location_id` (uuid, FK → locations)
|
||||||
|
- `operator_id` (uuid, FK → companies)
|
||||||
|
- `property_owner_id` (uuid, FK → companies)
|
||||||
|
- `opening_date` (date)
|
||||||
|
- `closing_date` (date)
|
||||||
|
- `opening_date_precision` (text) - Values: `year`, `month`, `day`
|
||||||
|
- `closing_date_precision` (text)
|
||||||
|
- `website_url` (text)
|
||||||
|
- `phone` (text)
|
||||||
|
- `email` (text)
|
||||||
|
- `banner_image_url` (text)
|
||||||
|
- `banner_image_id` (text)
|
||||||
|
- `card_image_url` (text)
|
||||||
|
- `card_image_id` (text)
|
||||||
|
|
||||||
|
**Metadata Fields:**
|
||||||
|
- `view_count_all` (integer, default: 0)
|
||||||
|
- `view_count_30d` (integer, default: 0)
|
||||||
|
- `view_count_7d` (integer, default: 0)
|
||||||
|
- `average_rating` (numeric, default: 0.00)
|
||||||
|
- `review_count` (integer, default: 0)
|
||||||
|
- `created_at` (timestamptz)
|
||||||
|
- `updated_at` (timestamptz)
|
||||||
|
- `is_test_data` (boolean, default: false)
|
||||||
|
|
||||||
|
### Submission Table: `park_submissions`
|
||||||
|
|
||||||
|
**Schema Identical to Main Table** (excluding auto-generated fields like `id`, timestamps)
|
||||||
|
|
||||||
|
**Additional Field:**
|
||||||
|
- `submission_id` (uuid, NOT NULL, FK → content_submissions)
|
||||||
|
- `temp_location_data` (jsonb) - For pending location creation
|
||||||
|
|
||||||
|
### Version Table: `park_versions`
|
||||||
|
|
||||||
|
**All Main Table Fields PLUS:**
|
||||||
|
- `version_id` (uuid, PK)
|
||||||
|
- `park_id` (uuid, NOT NULL, FK → parks)
|
||||||
|
- `version_number` (integer, NOT NULL)
|
||||||
|
- `change_type` (version_change_type, NOT NULL) - Values: `created`, `updated`, `restored`
|
||||||
|
- `change_reason` (text)
|
||||||
|
- `is_current` (boolean, default: true)
|
||||||
|
- `created_by` (uuid, FK → auth.users)
|
||||||
|
- `created_at` (timestamptz)
|
||||||
|
- `submission_id` (uuid, FK → content_submissions)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Rides
|
||||||
|
|
||||||
|
### Main Table: `rides`
|
||||||
|
|
||||||
|
**Required Fields:**
|
||||||
|
- `id` (uuid, PK)
|
||||||
|
- `name` (text, NOT NULL)
|
||||||
|
- `slug` (text, NOT NULL, UNIQUE)
|
||||||
|
- `park_id` (uuid, NOT NULL, FK → parks)
|
||||||
|
- `category` (text, NOT NULL) ⚠️ **CRITICAL: This field is required**
|
||||||
|
- Values: `roller_coaster`, `water_ride`, `dark_ride`, `flat_ride`, `transport`, `kids_ride`
|
||||||
|
- `status` (text, NOT NULL)
|
||||||
|
- Values: `operating`, `closed`, `under_construction`, `sbno`, etc.
|
||||||
|
|
||||||
|
**⚠️ IMPORTANT: `rides` table does NOT have `ride_type` column!**
|
||||||
|
- `ride_type` only exists in `ride_models` table
|
||||||
|
- Using `ride_type` in rides updates will cause "column does not exist" error
|
||||||
|
|
||||||
|
**Optional Relationship Fields:**
|
||||||
|
- `manufacturer_id` (uuid, FK → companies)
|
||||||
|
- `designer_id` (uuid, FK → companies)
|
||||||
|
- `ride_model_id` (uuid, FK → ride_models)
|
||||||
|
|
||||||
|
**Optional Descriptive Fields:**
|
||||||
|
- `description` (text)
|
||||||
|
- `opening_date` (date)
|
||||||
|
- `closing_date` (date)
|
||||||
|
- `opening_date_precision` (text)
|
||||||
|
- `closing_date_precision` (text)
|
||||||
|
|
||||||
|
**Optional Technical Fields:**
|
||||||
|
- `height_requirement` (integer) - Height requirement in cm
|
||||||
|
- `age_requirement` (integer)
|
||||||
|
- `max_speed_kmh` (numeric)
|
||||||
|
- `duration_seconds` (integer)
|
||||||
|
- `capacity_per_hour` (integer)
|
||||||
|
- `max_g_force` (numeric)
|
||||||
|
- `inversions` (integer) - Number of inversions
|
||||||
|
- `length_meters` (numeric)
|
||||||
|
- `max_height_meters` (numeric)
|
||||||
|
- `drop_height_meters` (numeric)
|
||||||
|
|
||||||
|
**Category-Specific Fields:**
|
||||||
|
|
||||||
|
*Roller Coasters:*
|
||||||
|
- `ride_sub_type` (text)
|
||||||
|
- `coaster_type` (text)
|
||||||
|
- `seating_type` (text)
|
||||||
|
- `intensity_level` (text)
|
||||||
|
- `track_material` (text)
|
||||||
|
- `support_material` (text)
|
||||||
|
- `propulsion_method` (text)
|
||||||
|
|
||||||
|
*Water Rides:*
|
||||||
|
- `water_depth_cm` (integer)
|
||||||
|
- `splash_height_meters` (numeric)
|
||||||
|
- `wetness_level` (text)
|
||||||
|
- `flume_type` (text)
|
||||||
|
- `boat_capacity` (integer)
|
||||||
|
|
||||||
|
*Dark Rides:*
|
||||||
|
- `theme_name` (text)
|
||||||
|
- `story_description` (text)
|
||||||
|
- `show_duration_seconds` (integer)
|
||||||
|
- `animatronics_count` (integer)
|
||||||
|
- `projection_type` (text)
|
||||||
|
- `ride_system` (text)
|
||||||
|
- `scenes_count` (integer)
|
||||||
|
|
||||||
|
*Flat Rides:*
|
||||||
|
- `rotation_type` (text)
|
||||||
|
- `motion_pattern` (text)
|
||||||
|
- `platform_count` (integer)
|
||||||
|
- `swing_angle_degrees` (numeric)
|
||||||
|
- `rotation_speed_rpm` (numeric)
|
||||||
|
- `arm_length_meters` (numeric)
|
||||||
|
- `max_height_reached_meters` (numeric)
|
||||||
|
|
||||||
|
*Kids Rides:*
|
||||||
|
- `min_age` (integer)
|
||||||
|
- `max_age` (integer)
|
||||||
|
- `educational_theme` (text)
|
||||||
|
- `character_theme` (text)
|
||||||
|
|
||||||
|
*Transport:*
|
||||||
|
- `transport_type` (text)
|
||||||
|
- `route_length_meters` (numeric)
|
||||||
|
- `stations_count` (integer)
|
||||||
|
- `vehicle_capacity` (integer)
|
||||||
|
- `vehicles_count` (integer)
|
||||||
|
- `round_trip_duration_seconds` (integer)
|
||||||
|
|
||||||
|
**Image Fields:**
|
||||||
|
- `banner_image_url` (text)
|
||||||
|
- `banner_image_id` (text)
|
||||||
|
- `card_image_url` (text)
|
||||||
|
- `card_image_id` (text)
|
||||||
|
- `image_url` (text) - Legacy field
|
||||||
|
|
||||||
|
**Metadata Fields:**
|
||||||
|
- `view_count_all` (integer, default: 0)
|
||||||
|
- `view_count_30d` (integer, default: 0)
|
||||||
|
- `view_count_7d` (integer, default: 0)
|
||||||
|
- `average_rating` (numeric, default: 0.00)
|
||||||
|
- `review_count` (integer, default: 0)
|
||||||
|
- `created_at` (timestamptz)
|
||||||
|
- `updated_at` (timestamptz)
|
||||||
|
- `is_test_data` (boolean, default: false)
|
||||||
|
|
||||||
|
### Submission Table: `ride_submissions`
|
||||||
|
|
||||||
|
**Schema Identical to Main Table** (excluding auto-generated fields)
|
||||||
|
|
||||||
|
**Additional Fields:**
|
||||||
|
- `submission_id` (uuid, NOT NULL, FK → content_submissions)
|
||||||
|
|
||||||
|
### Version Table: `ride_versions`
|
||||||
|
|
||||||
|
**All Main Table Fields PLUS:**
|
||||||
|
- `version_id` (uuid, PK)
|
||||||
|
- `ride_id` (uuid, NOT NULL, FK → rides)
|
||||||
|
- `version_number` (integer, NOT NULL)
|
||||||
|
- `change_type` (version_change_type, NOT NULL)
|
||||||
|
- `change_reason` (text)
|
||||||
|
- `is_current` (boolean, default: true)
|
||||||
|
- `created_by` (uuid, FK → auth.users)
|
||||||
|
- `created_at` (timestamptz)
|
||||||
|
- `submission_id` (uuid, FK → content_submissions)
|
||||||
|
|
||||||
|
**⚠️ Field Name Differences (Version Table vs Main Table):**
|
||||||
|
- `height_requirement_cm` in versions → `height_requirement` in rides
|
||||||
|
- `gforce_max` in versions → `max_g_force` in rides
|
||||||
|
- `inversions_count` in versions → `inversions` in rides
|
||||||
|
- `height_meters` in versions → `max_height_meters` in rides
|
||||||
|
- `drop_meters` in versions → `drop_height_meters` in rides
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Companies
|
||||||
|
|
||||||
|
**Used For**: `manufacturer`, `operator`, `designer`, `property_owner`
|
||||||
|
|
||||||
|
### Main Table: `companies`
|
||||||
|
|
||||||
|
**Required Fields:**
|
||||||
|
- `id` (uuid, PK)
|
||||||
|
- `name` (text, NOT NULL)
|
||||||
|
- `slug` (text, NOT NULL, UNIQUE)
|
||||||
|
- `company_type` (text, NOT NULL)
|
||||||
|
- Values: `manufacturer`, `operator`, `designer`, `property_owner`
|
||||||
|
|
||||||
|
**Optional Fields:**
|
||||||
|
- `description` (text)
|
||||||
|
- `person_type` (text, default: 'company')
|
||||||
|
- Values: `company`, `individual`
|
||||||
|
- `founded_year` (integer)
|
||||||
|
- `founded_date` (date)
|
||||||
|
- `founded_date_precision` (text)
|
||||||
|
- `headquarters_location` (text)
|
||||||
|
- `website_url` (text)
|
||||||
|
- `logo_url` (text)
|
||||||
|
- `banner_image_url` (text)
|
||||||
|
- `banner_image_id` (text)
|
||||||
|
- `card_image_url` (text)
|
||||||
|
- `card_image_id` (text)
|
||||||
|
|
||||||
|
**Metadata Fields:**
|
||||||
|
- `view_count_all` (integer, default: 0)
|
||||||
|
- `view_count_30d` (integer, default: 0)
|
||||||
|
- `view_count_7d` (integer, default: 0)
|
||||||
|
- `average_rating` (numeric, default: 0.00)
|
||||||
|
- `review_count` (integer, default: 0)
|
||||||
|
- `created_at` (timestamptz)
|
||||||
|
- `updated_at` (timestamptz)
|
||||||
|
- `is_test_data` (boolean, default: false)
|
||||||
|
|
||||||
|
### Submission Table: `company_submissions`
|
||||||
|
|
||||||
|
**Schema Identical to Main Table** (excluding auto-generated fields)
|
||||||
|
|
||||||
|
**Additional Field:**
|
||||||
|
- `submission_id` (uuid, NOT NULL, FK → content_submissions)
|
||||||
|
|
||||||
|
### Version Table: `company_versions`
|
||||||
|
|
||||||
|
**All Main Table Fields PLUS:**
|
||||||
|
- `version_id` (uuid, PK)
|
||||||
|
- `company_id` (uuid, NOT NULL, FK → companies)
|
||||||
|
- `version_number` (integer, NOT NULL)
|
||||||
|
- `change_type` (version_change_type, NOT NULL)
|
||||||
|
- `change_reason` (text)
|
||||||
|
- `is_current` (boolean, default: true)
|
||||||
|
- `created_by` (uuid, FK → auth.users)
|
||||||
|
- `created_at` (timestamptz)
|
||||||
|
- `submission_id` (uuid, FK → content_submissions)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Ride Models
|
||||||
|
|
||||||
|
### Main Table: `ride_models`
|
||||||
|
|
||||||
|
**Required Fields:**
|
||||||
|
- `id` (uuid, PK)
|
||||||
|
- `name` (text, NOT NULL)
|
||||||
|
- `slug` (text, NOT NULL, UNIQUE)
|
||||||
|
- `manufacturer_id` (uuid, NOT NULL, FK → companies)
|
||||||
|
- `category` (text, NOT NULL) ⚠️ **CRITICAL: This field is required**
|
||||||
|
- Values: `roller_coaster`, `water_ride`, `dark_ride`, `flat_ride`, `transport`, `kids_ride`
|
||||||
|
|
||||||
|
**Optional Fields:**
|
||||||
|
- `ride_type` (text) ⚠️ **This field exists in ride_models but NOT in rides**
|
||||||
|
- More specific classification than category
|
||||||
|
- Example: category = `roller_coaster`, ride_type = `inverted_coaster`
|
||||||
|
- `description` (text)
|
||||||
|
- `banner_image_url` (text)
|
||||||
|
- `banner_image_id` (text)
|
||||||
|
- `card_image_url` (text)
|
||||||
|
- `card_image_id` (text)
|
||||||
|
|
||||||
|
**Metadata Fields:**
|
||||||
|
- `view_count_all` (integer, default: 0)
|
||||||
|
- `view_count_30d` (integer, default: 0)
|
||||||
|
- `view_count_7d` (integer, default: 0)
|
||||||
|
- `average_rating` (numeric, default: 0.00)
|
||||||
|
- `review_count` (integer, default: 0)
|
||||||
|
- `installations_count` (integer, default: 0)
|
||||||
|
- `created_at` (timestamptz)
|
||||||
|
- `updated_at` (timestamptz)
|
||||||
|
- `is_test_data` (boolean, default: false)
|
||||||
|
|
||||||
|
### Submission Table: `ride_model_submissions`
|
||||||
|
|
||||||
|
**Schema Identical to Main Table** (excluding auto-generated fields)
|
||||||
|
|
||||||
|
**Additional Field:**
|
||||||
|
- `submission_id` (uuid, NOT NULL, FK → content_submissions)
|
||||||
|
|
||||||
|
### Version Table: `ride_model_versions`
|
||||||
|
|
||||||
|
**All Main Table Fields PLUS:**
|
||||||
|
- `version_id` (uuid, PK)
|
||||||
|
- `ride_model_id` (uuid, NOT NULL, FK → ride_models)
|
||||||
|
- `version_number` (integer, NOT NULL)
|
||||||
|
- `change_type` (version_change_type, NOT NULL)
|
||||||
|
- `change_reason` (text)
|
||||||
|
- `is_current` (boolean, default: true)
|
||||||
|
- `created_by` (uuid, FK → auth.users)
|
||||||
|
- `created_at` (timestamptz)
|
||||||
|
- `submission_id` (uuid, FK → content_submissions)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Photos
|
||||||
|
|
||||||
|
### Main Table: `photos`
|
||||||
|
|
||||||
|
**Required Fields:**
|
||||||
|
- `id` (uuid, PK)
|
||||||
|
- `cloudflare_id` (text, NOT NULL)
|
||||||
|
- `url` (text, NOT NULL)
|
||||||
|
- `entity_type` (text, NOT NULL)
|
||||||
|
- `entity_id` (uuid, NOT NULL)
|
||||||
|
- `uploader_id` (uuid, NOT NULL, FK → auth.users)
|
||||||
|
|
||||||
|
**Optional Fields:**
|
||||||
|
- `title` (text)
|
||||||
|
- `caption` (text)
|
||||||
|
- `taken_date` (date)
|
||||||
|
- `taken_date_precision` (text)
|
||||||
|
- `photographer_name` (text)
|
||||||
|
- `order_index` (integer, default: 0)
|
||||||
|
- `is_primary` (boolean, default: false)
|
||||||
|
- `status` (text, default: 'active')
|
||||||
|
|
||||||
|
**Metadata Fields:**
|
||||||
|
- `created_at` (timestamptz)
|
||||||
|
- `updated_at` (timestamptz)
|
||||||
|
- `is_test_data` (boolean, default: false)
|
||||||
|
|
||||||
|
### Submission Table: `photo_submissions`
|
||||||
|
|
||||||
|
**Required Fields:**
|
||||||
|
- `id` (uuid, PK)
|
||||||
|
- `submission_id` (uuid, NOT NULL, FK → content_submissions)
|
||||||
|
- `entity_type` (text, NOT NULL)
|
||||||
|
- `entity_id` (uuid, NOT NULL)
|
||||||
|
- `cloudflare_id` (text, NOT NULL)
|
||||||
|
- `url` (text, NOT NULL)
|
||||||
|
|
||||||
|
**Optional Fields:**
|
||||||
|
- `title` (text)
|
||||||
|
- `caption` (text)
|
||||||
|
- `taken_date` (date)
|
||||||
|
- `taken_date_precision` (text)
|
||||||
|
- `photographer_name` (text)
|
||||||
|
- `order_index` (integer)
|
||||||
|
|
||||||
|
**Note**: Photos do NOT have version tables - they are immutable after approval
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Timeline Events
|
||||||
|
|
||||||
|
### Main Table: `entity_timeline_events`
|
||||||
|
|
||||||
|
**Required Fields:**
|
||||||
|
- `id` (uuid, PK)
|
||||||
|
- `entity_type` (text, NOT NULL)
|
||||||
|
- `entity_id` (uuid, NOT NULL)
|
||||||
|
- `event_type` (text, NOT NULL)
|
||||||
|
- Values: `opening`, `closing`, `relocation`, `renovation`, `name_change`, `ownership_change`, etc.
|
||||||
|
- `title` (text, NOT NULL)
|
||||||
|
- `event_date` (date, NOT NULL)
|
||||||
|
|
||||||
|
**Optional Fields:**
|
||||||
|
- `description` (text)
|
||||||
|
- `event_date_precision` (text, default: 'day')
|
||||||
|
- `from_value` (text)
|
||||||
|
- `to_value` (text)
|
||||||
|
- `from_entity_id` (uuid)
|
||||||
|
- `to_entity_id` (uuid)
|
||||||
|
- `from_location_id` (uuid)
|
||||||
|
- `to_location_id` (uuid)
|
||||||
|
- `is_public` (boolean, default: true)
|
||||||
|
- `display_order` (integer, default: 0)
|
||||||
|
|
||||||
|
**Approval Fields:**
|
||||||
|
- `created_by` (uuid, FK → auth.users)
|
||||||
|
- `approved_by` (uuid, FK → auth.users)
|
||||||
|
- `submission_id` (uuid, FK → content_submissions)
|
||||||
|
|
||||||
|
**Metadata Fields:**
|
||||||
|
- `created_at` (timestamptz)
|
||||||
|
- `updated_at` (timestamptz)
|
||||||
|
|
||||||
|
### Submission Table: `timeline_event_submissions`
|
||||||
|
|
||||||
|
**Schema Identical to Main Table** (excluding auto-generated fields)
|
||||||
|
|
||||||
|
**Additional Field:**
|
||||||
|
- `submission_id` (uuid, NOT NULL, FK → content_submissions)
|
||||||
|
|
||||||
|
**Note**: Timeline events do NOT have version tables
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Critical Functions Reference
|
||||||
|
|
||||||
|
### 1. `create_entity_from_submission`
|
||||||
|
|
||||||
|
**Purpose**: Creates new entities from approved submissions
|
||||||
|
|
||||||
|
**Parameters**:
|
||||||
|
- `p_entity_type` (text) - Entity type identifier
|
||||||
|
- `p_data` (jsonb) - Entity data from submission
|
||||||
|
- `p_created_by` (uuid) - User who created it
|
||||||
|
- `p_submission_id` (uuid) - Source submission
|
||||||
|
|
||||||
|
**Critical Requirements**:
|
||||||
|
- ✅ MUST extract `category` for rides and ride_models
|
||||||
|
- ✅ MUST NOT use `ride_type` for rides (doesn't exist)
|
||||||
|
- ✅ MUST use `ride_type` for ride_models (does exist)
|
||||||
|
- ✅ MUST handle all required NOT NULL fields
|
||||||
|
|
||||||
|
**Returns**: `uuid` - New entity ID
|
||||||
|
|
||||||
|
### 2. `update_entity_from_submission`
|
||||||
|
|
||||||
|
**Purpose**: Updates existing entities from approved edits
|
||||||
|
|
||||||
|
**Parameters**:
|
||||||
|
- `p_entity_type` (text) - Entity type identifier
|
||||||
|
- `p_data` (jsonb) - Updated entity data
|
||||||
|
- `p_entity_id` (uuid) - Existing entity ID
|
||||||
|
- `p_changed_by` (uuid) - User who changed it
|
||||||
|
|
||||||
|
**Critical Requirements**:
|
||||||
|
- ✅ MUST use COALESCE to preserve existing values
|
||||||
|
- ✅ MUST include `category` for rides and ride_models
|
||||||
|
- ✅ MUST NOT use `ride_type` for rides
|
||||||
|
- ✅ MUST use `ride_type` for ride_models
|
||||||
|
- ✅ MUST update `updated_at` timestamp
|
||||||
|
|
||||||
|
**Returns**: `uuid` - Updated entity ID
|
||||||
|
|
||||||
|
### 3. `process_approval_transaction`
|
||||||
|
|
||||||
|
**Purpose**: Atomic transaction for selective approval
|
||||||
|
|
||||||
|
**Parameters**:
|
||||||
|
- `p_submission_id` (uuid)
|
||||||
|
- `p_item_ids` (uuid[]) - Specific items to approve
|
||||||
|
- `p_moderator_id` (uuid)
|
||||||
|
- `p_change_reason` (text)
|
||||||
|
|
||||||
|
**Critical Requirements**:
|
||||||
|
- ✅ MUST validate all item dependencies first
|
||||||
|
- ✅ MUST extract correct fields from submission tables
|
||||||
|
- ✅ MUST set session variables for triggers
|
||||||
|
- ✅ MUST handle rollback on any error
|
||||||
|
|
||||||
|
**Called By**: Edge function `process-selective-approval`
|
||||||
|
|
||||||
|
### 4. `create_submission_with_items`
|
||||||
|
|
||||||
|
**Purpose**: Creates multi-item submissions atomically
|
||||||
|
|
||||||
|
**Parameters**:
|
||||||
|
- `p_submission_id` (uuid)
|
||||||
|
- `p_entity_type` (text)
|
||||||
|
- `p_action_type` (text) - `create` or `edit`
|
||||||
|
- `p_items` (jsonb) - Array of submission items
|
||||||
|
- `p_user_id` (uuid)
|
||||||
|
|
||||||
|
**Critical Requirements**:
|
||||||
|
- ✅ MUST resolve dependencies in order
|
||||||
|
- ✅ MUST validate all required fields per entity type
|
||||||
|
- ✅ MUST link items to submission correctly
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Common Pitfalls
|
||||||
|
|
||||||
|
### 1. ❌ Using `ride_type` for rides
|
||||||
|
```sql
|
||||||
|
-- WRONG
|
||||||
|
UPDATE rides SET ride_type = 'inverted_coaster' WHERE id = $1;
|
||||||
|
-- ERROR: column "ride_type" does not exist
|
||||||
|
|
||||||
|
-- CORRECT
|
||||||
|
UPDATE rides SET category = 'roller_coaster' WHERE id = $1;
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. ❌ Missing `category` field
|
||||||
|
```sql
|
||||||
|
-- WRONG - Missing required category
|
||||||
|
INSERT INTO rides (name, slug, park_id, status) VALUES (...);
|
||||||
|
-- ERROR: null value violates not-null constraint
|
||||||
|
|
||||||
|
-- CORRECT
|
||||||
|
INSERT INTO rides (name, slug, park_id, category, status) VALUES (..., 'roller_coaster', ...);
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. ❌ Wrong column names in version tables
|
||||||
|
```sql
|
||||||
|
-- WRONG
|
||||||
|
SELECT height_requirement FROM ride_versions WHERE ride_id = $1;
|
||||||
|
-- Returns null
|
||||||
|
|
||||||
|
-- CORRECT
|
||||||
|
SELECT height_requirement_cm FROM ride_versions WHERE ride_id = $1;
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. ❌ Forgetting COALESCE in updates
|
||||||
|
```sql
|
||||||
|
-- WRONG - Overwrites fields with NULL
|
||||||
|
UPDATE rides SET
|
||||||
|
name = (p_data->>'name'),
|
||||||
|
description = (p_data->>'description')
|
||||||
|
WHERE id = $1;
|
||||||
|
|
||||||
|
-- CORRECT - Preserves existing values if not provided
|
||||||
|
UPDATE rides SET
|
||||||
|
name = COALESCE(p_data->>'name', name),
|
||||||
|
description = COALESCE(p_data->>'description', description)
|
||||||
|
WHERE id = $1;
|
||||||
|
```
|
||||||
|
|
||||||
|
### 5. ❌ Not handling submission_id in version triggers
|
||||||
|
```sql
|
||||||
|
-- WRONG - Version doesn't link back to submission
|
||||||
|
INSERT INTO ride_versions (ride_id, ...) VALUES (...);
|
||||||
|
|
||||||
|
-- CORRECT - Trigger must read session variable
|
||||||
|
v_submission_id := current_setting('app.submission_id', true)::uuid;
|
||||||
|
INSERT INTO ride_versions (ride_id, submission_id, ...) VALUES (..., v_submission_id, ...);
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Validation Checklist
|
||||||
|
|
||||||
|
Before deploying any submission pipeline changes:
|
||||||
|
|
||||||
|
- [ ] All entity tables have matching submission tables
|
||||||
|
- [ ] All required NOT NULL fields are included in CREATE functions
|
||||||
|
- [ ] All required NOT NULL fields are included in UPDATE functions
|
||||||
|
- [ ] `category` is extracted for rides and ride_models
|
||||||
|
- [ ] `ride_type` is NOT used for rides
|
||||||
|
- [ ] `ride_type` IS used for ride_models
|
||||||
|
- [ ] COALESCE is used for all UPDATE statements
|
||||||
|
- [ ] Version table column name differences are handled
|
||||||
|
- [ ] Session variables are set for version triggers
|
||||||
|
- [ ] Foreign key relationships are validated
|
||||||
|
- [ ] Dependency resolution works correctly
|
||||||
|
- [ ] Error handling and rollback logic is present
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Maintenance
|
||||||
|
|
||||||
|
**When adding new entity types:**
|
||||||
|
|
||||||
|
1. Create main table with all fields
|
||||||
|
2. Create matching submission table + `submission_id` FK
|
||||||
|
3. Create version table with all fields + version metadata
|
||||||
|
4. Add case to `create_entity_from_submission`
|
||||||
|
5. Add case to `update_entity_from_submission`
|
||||||
|
6. Add case to `process_approval_transaction`
|
||||||
|
7. Add case to `create_submission_with_items`
|
||||||
|
8. Create version trigger for main table
|
||||||
|
9. Update this documentation
|
||||||
|
10. Run full test suite
|
||||||
|
|
||||||
|
**When modifying schemas:**
|
||||||
|
|
||||||
|
1. Check if field exists in ALL three tables (main, submission, version)
|
||||||
|
2. Update ALL three tables in migration
|
||||||
|
3. Update ALL functions that reference the field
|
||||||
|
4. Update this documentation
|
||||||
|
5. Test create, update, and rollback flows
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Related Documentation
|
||||||
|
|
||||||
|
- [Submission Pipeline Overview](./README.md)
|
||||||
|
- [Versioning System](../versioning/README.md)
|
||||||
|
- [Moderation Workflow](../moderation/README.md)
|
||||||
|
- [Migration Guide](../versioning/MIGRATION.md)
|
||||||
402
docs/submission-pipeline/VALIDATION_SETUP.md
Normal file
402
docs/submission-pipeline/VALIDATION_SETUP.md
Normal file
@@ -0,0 +1,402 @@
|
|||||||
|
# Schema Validation Setup Guide
|
||||||
|
|
||||||
|
This guide explains how to set up and use the automated schema validation tools to prevent field mismatches in the submission pipeline.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The validation system consists of three layers:
|
||||||
|
|
||||||
|
1. **Pre-migration Script** - Quick validation before deploying migrations
|
||||||
|
2. **Integration Tests** - Comprehensive Playwright tests for CI/CD
|
||||||
|
3. **GitHub Actions** - Automated checks on every pull request
|
||||||
|
|
||||||
|
## Quick Start
|
||||||
|
|
||||||
|
### 1. Add NPM Scripts
|
||||||
|
|
||||||
|
Add these scripts to your `package.json`:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"scripts": {
|
||||||
|
"validate-schema": "tsx scripts/validate-schema.ts",
|
||||||
|
"test:schema": "playwright test schema-validation",
|
||||||
|
"test:schema:ui": "playwright test schema-validation --ui",
|
||||||
|
"pre-migrate": "npm run validate-schema"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Environment Variables
|
||||||
|
|
||||||
|
Create a `.env.test` file:
|
||||||
|
|
||||||
|
```env
|
||||||
|
SUPABASE_SERVICE_ROLE_KEY=your_service_role_key_here
|
||||||
|
```
|
||||||
|
|
||||||
|
**⚠️ Important**: Never commit this file! Add it to `.gitignore`:
|
||||||
|
|
||||||
|
```gitignore
|
||||||
|
.env.test
|
||||||
|
.env.local
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Install Dependencies
|
||||||
|
|
||||||
|
If not already installed:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm install --save-dev @supabase/supabase-js @playwright/test tsx
|
||||||
|
```
|
||||||
|
|
||||||
|
## Using the Validation Tools
|
||||||
|
|
||||||
|
### Pre-Migration Validation Script
|
||||||
|
|
||||||
|
**When to use**: Before applying any database migration
|
||||||
|
|
||||||
|
**Run manually:**
|
||||||
|
```bash
|
||||||
|
npm run validate-schema
|
||||||
|
```
|
||||||
|
|
||||||
|
**What it checks:**
|
||||||
|
- ✅ Submission tables match main tables
|
||||||
|
- ✅ Version tables have all required fields
|
||||||
|
- ✅ Critical fields are correct (e.g., `category` vs `ride_type`)
|
||||||
|
- ✅ Database functions exist and are accessible
|
||||||
|
|
||||||
|
**Example output:**
|
||||||
|
```
|
||||||
|
🔍 Starting schema validation...
|
||||||
|
|
||||||
|
Submission Tables:
|
||||||
|
────────────────────────────────────────────────────────────────────────────────
|
||||||
|
✅ Parks: submission table matches main table
|
||||||
|
✅ Rides: submission table matches main table
|
||||||
|
✅ Companies: submission table matches main table
|
||||||
|
✅ Ride Models: submission table matches main table
|
||||||
|
|
||||||
|
Version Tables:
|
||||||
|
────────────────────────────────────────────────────────────────────────────────
|
||||||
|
✅ Parks: version table has all fields
|
||||||
|
✅ Rides: version table has all fields
|
||||||
|
✅ Companies: version table has all fields
|
||||||
|
✅ Ride Models: version table has all fields
|
||||||
|
|
||||||
|
Critical Fields:
|
||||||
|
────────────────────────────────────────────────────────────────────────────────
|
||||||
|
✅ rides table does NOT have ride_type column
|
||||||
|
✅ rides table has category column
|
||||||
|
✅ ride_models has both category and ride_type
|
||||||
|
|
||||||
|
Functions:
|
||||||
|
────────────────────────────────────────────────────────────────────────────────
|
||||||
|
✅ create_entity_from_submission exists and is accessible
|
||||||
|
✅ update_entity_from_submission exists and is accessible
|
||||||
|
✅ process_approval_transaction exists and is accessible
|
||||||
|
|
||||||
|
════════════════════════════════════════════════════════════════════════════════
|
||||||
|
Total: 15 passed, 0 failed
|
||||||
|
════════════════════════════════════════════════════════════════════════════════
|
||||||
|
|
||||||
|
✅ All schema validations passed. Safe to deploy.
|
||||||
|
```
|
||||||
|
|
||||||
|
### Integration Tests
|
||||||
|
|
||||||
|
**When to use**: In CI/CD, before merging PRs, after major changes
|
||||||
|
|
||||||
|
**Run all tests:**
|
||||||
|
```bash
|
||||||
|
npm run test:schema
|
||||||
|
```
|
||||||
|
|
||||||
|
**Run in UI mode (for debugging):**
|
||||||
|
```bash
|
||||||
|
npm run test:schema:ui
|
||||||
|
```
|
||||||
|
|
||||||
|
**Run specific test suite:**
|
||||||
|
```bash
|
||||||
|
npx playwright test schema-validation --grep "Entity Tables"
|
||||||
|
```
|
||||||
|
|
||||||
|
**What it tests:**
|
||||||
|
- All pre-migration script checks PLUS:
|
||||||
|
- Field-by-field data type comparison
|
||||||
|
- NOT NULL constraint validation
|
||||||
|
- Foreign key existence checks
|
||||||
|
- Known field name variations (e.g., `height_requirement_cm` vs `height_requirement`)
|
||||||
|
|
||||||
|
### GitHub Actions (Automated)
|
||||||
|
|
||||||
|
**Automatically runs on:**
|
||||||
|
- Every pull request that touches:
|
||||||
|
- `supabase/migrations/**`
|
||||||
|
- `src/lib/moderation/**`
|
||||||
|
- `supabase/functions/**`
|
||||||
|
- Pushes to `main` or `develop` branches
|
||||||
|
- Manual workflow dispatch
|
||||||
|
|
||||||
|
**What it does:**
|
||||||
|
1. Runs validation script
|
||||||
|
2. Runs integration tests
|
||||||
|
3. Checks for breaking migration patterns
|
||||||
|
4. Validates migration file naming
|
||||||
|
5. Comments on PRs with helpful guidance if tests fail
|
||||||
|
|
||||||
|
## Workflow Examples
|
||||||
|
|
||||||
|
### Before Creating a Migration
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# 1. Make schema changes locally
|
||||||
|
# 2. Validate before creating migration
|
||||||
|
npm run validate-schema
|
||||||
|
|
||||||
|
# 3. If validation passes, create migration
|
||||||
|
supabase db diff -f add_new_field
|
||||||
|
|
||||||
|
# 4. Run validation again
|
||||||
|
npm run validate-schema
|
||||||
|
|
||||||
|
# 5. Commit and push
|
||||||
|
git add .
|
||||||
|
git commit -m "Add new field to rides table"
|
||||||
|
git push
|
||||||
|
```
|
||||||
|
|
||||||
|
### After Modifying Entity Schemas
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# 1. Modified rides table schema
|
||||||
|
# 2. Run full test suite
|
||||||
|
npm run test:schema
|
||||||
|
|
||||||
|
# 3. Check specific validation
|
||||||
|
npx playwright test schema-validation --grep "rides"
|
||||||
|
|
||||||
|
# 4. Fix any issues
|
||||||
|
# 5. Re-run tests
|
||||||
|
npm run test:schema
|
||||||
|
```
|
||||||
|
|
||||||
|
### During Code Review
|
||||||
|
|
||||||
|
**PR Author:**
|
||||||
|
1. Ensure all validation tests pass locally
|
||||||
|
2. Push changes
|
||||||
|
3. Wait for GitHub Actions to complete
|
||||||
|
4. Address any automated feedback
|
||||||
|
|
||||||
|
**Reviewer:**
|
||||||
|
1. Check that GitHub Actions passed
|
||||||
|
2. Review schema changes in migrations
|
||||||
|
3. Verify documentation was updated
|
||||||
|
4. Approve if all checks pass
|
||||||
|
|
||||||
|
## Common Issues and Solutions
|
||||||
|
|
||||||
|
### Issue: "Missing fields" Error
|
||||||
|
|
||||||
|
**Symptom:**
|
||||||
|
```
|
||||||
|
❌ Rides: submission table matches main table
|
||||||
|
└─ Missing fields: category
|
||||||
|
```
|
||||||
|
|
||||||
|
**Cause**: Field was added to main table but not submission table
|
||||||
|
|
||||||
|
**Solution:**
|
||||||
|
```sql
|
||||||
|
-- In your migration file
|
||||||
|
ALTER TABLE ride_submissions ADD COLUMN category TEXT NOT NULL;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Issue: "Type mismatch" Error
|
||||||
|
|
||||||
|
**Symptom:**
|
||||||
|
```
|
||||||
|
❌ Rides: submission table matches main table
|
||||||
|
└─ Type mismatches: max_speed_kmh: main=numeric, submission=integer
|
||||||
|
```
|
||||||
|
|
||||||
|
**Cause**: Data types don't match between tables
|
||||||
|
|
||||||
|
**Solution:**
|
||||||
|
```sql
|
||||||
|
-- In your migration file
|
||||||
|
ALTER TABLE ride_submissions
|
||||||
|
ALTER COLUMN max_speed_kmh TYPE NUMERIC USING max_speed_kmh::numeric;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Issue: "Column does not exist" in Production
|
||||||
|
|
||||||
|
**Symptom**: Approval fails with `column "category" does not exist`
|
||||||
|
|
||||||
|
**Immediate action:**
|
||||||
|
1. Run validation script to identify issue
|
||||||
|
2. Create emergency migration to add missing field
|
||||||
|
3. Deploy immediately
|
||||||
|
4. Update functions if needed
|
||||||
|
|
||||||
|
**Prevention**: Always run validation before deploying
|
||||||
|
|
||||||
|
### Issue: Tests Pass Locally but Fail in CI
|
||||||
|
|
||||||
|
**Possible causes:**
|
||||||
|
- Different database state in CI vs local
|
||||||
|
- Missing environment variables
|
||||||
|
- Outdated schema in test database
|
||||||
|
|
||||||
|
**Solution:**
|
||||||
|
```bash
|
||||||
|
# Pull latest schema
|
||||||
|
supabase db pull
|
||||||
|
|
||||||
|
# Reset local database
|
||||||
|
supabase db reset
|
||||||
|
|
||||||
|
# Re-run tests
|
||||||
|
npm run test:schema
|
||||||
|
```
|
||||||
|
|
||||||
|
## Best Practices
|
||||||
|
|
||||||
|
### ✅ Do's
|
||||||
|
|
||||||
|
- ✅ Run validation script before every migration
|
||||||
|
- ✅ Run integration tests before merging PRs
|
||||||
|
- ✅ Update all three tables when adding fields (main, submission, version)
|
||||||
|
- ✅ Document field name variations in tests
|
||||||
|
- ✅ Check GitHub Actions results before merging
|
||||||
|
- ✅ Keep SCHEMA_REFERENCE.md up to date
|
||||||
|
|
||||||
|
### ❌ Don'ts
|
||||||
|
|
||||||
|
- ❌ Don't skip validation "because it's a small change"
|
||||||
|
- ❌ Don't add fields to only main tables
|
||||||
|
- ❌ Don't ignore failing tests
|
||||||
|
- ❌ Don't bypass CI checks
|
||||||
|
- ❌ Don't commit service role keys
|
||||||
|
- ❌ Don't modify submission pipeline functions without testing
|
||||||
|
|
||||||
|
## Continuous Integration Setup
|
||||||
|
|
||||||
|
### GitHub Secrets
|
||||||
|
|
||||||
|
Add to your repository secrets:
|
||||||
|
|
||||||
|
```
|
||||||
|
SUPABASE_SERVICE_ROLE_KEY=your_service_role_key_here
|
||||||
|
```
|
||||||
|
|
||||||
|
**Steps:**
|
||||||
|
1. Go to repository Settings → Secrets and variables → Actions
|
||||||
|
2. Click "New repository secret"
|
||||||
|
3. Name: `SUPABASE_SERVICE_ROLE_KEY`
|
||||||
|
4. Value: Your service role key from Supabase dashboard
|
||||||
|
5. Save
|
||||||
|
|
||||||
|
### Branch Protection Rules
|
||||||
|
|
||||||
|
Recommended settings:
|
||||||
|
|
||||||
|
```
|
||||||
|
Branch: main
|
||||||
|
✓ Require status checks to pass before merging
|
||||||
|
✓ validate-schema (Schema Validation)
|
||||||
|
✓ migration-safety-check (Migration Safety Check)
|
||||||
|
✓ Require branches to be up to date before merging
|
||||||
|
```
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Script Won't Run
|
||||||
|
|
||||||
|
**Error:** `tsx: command not found`
|
||||||
|
|
||||||
|
**Solution:**
|
||||||
|
```bash
|
||||||
|
npm install -g tsx
|
||||||
|
# or
|
||||||
|
npx tsx scripts/validate-schema.ts
|
||||||
|
```
|
||||||
|
|
||||||
|
### Authentication Errors
|
||||||
|
|
||||||
|
**Error:** `Invalid API key`
|
||||||
|
|
||||||
|
**Solution:**
|
||||||
|
1. Check `.env.test` has correct service role key
|
||||||
|
2. Verify key has not expired
|
||||||
|
3. Ensure environment variable is loaded:
|
||||||
|
```bash
|
||||||
|
source .env.test
|
||||||
|
npm run validate-schema
|
||||||
|
```
|
||||||
|
|
||||||
|
### Tests Timeout
|
||||||
|
|
||||||
|
**Error:** Tests timeout after 30 seconds
|
||||||
|
|
||||||
|
**Solution:**
|
||||||
|
```bash
|
||||||
|
# Increase timeout
|
||||||
|
npx playwright test schema-validation --timeout=60000
|
||||||
|
```
|
||||||
|
|
||||||
|
## Maintenance
|
||||||
|
|
||||||
|
### Adding New Entity Types
|
||||||
|
|
||||||
|
When adding a new entity type (e.g., `events`):
|
||||||
|
|
||||||
|
1. **Update validation script:**
|
||||||
|
```typescript
|
||||||
|
// In scripts/validate-schema.ts
|
||||||
|
await validateSubmissionTable('events', 'event_submissions', 'Events');
|
||||||
|
await validateVersionTable('events', 'event_versions', 'Events');
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Update integration tests:**
|
||||||
|
```typescript
|
||||||
|
// In tests/integration/schema-validation.test.ts
|
||||||
|
test('events: submission table matches main table schema', async () => {
|
||||||
|
// Add test logic
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
3. **Update documentation:**
|
||||||
|
- `docs/submission-pipeline/SCHEMA_REFERENCE.md`
|
||||||
|
- This file (`VALIDATION_SETUP.md`)
|
||||||
|
|
||||||
|
### Updating Field Mappings
|
||||||
|
|
||||||
|
When version tables use different field names:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// In both script and tests
|
||||||
|
const fieldMapping: { [key: string]: string } = {
|
||||||
|
'new_main_field': 'version_field_name',
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
## Related Documentation
|
||||||
|
|
||||||
|
- [Schema Reference](./SCHEMA_REFERENCE.md) - Complete field mappings
|
||||||
|
- [Integration Tests README](../../tests/integration/README.md) - Detailed test documentation
|
||||||
|
- [Submission Pipeline](./README.md) - Pipeline overview
|
||||||
|
- [Versioning System](../versioning/README.md) - Version table details
|
||||||
|
|
||||||
|
## Support
|
||||||
|
|
||||||
|
**Questions?** Check the documentation above or review existing migration files.
|
||||||
|
|
||||||
|
**Found a bug in validation?** Open an issue with:
|
||||||
|
- Expected behavior
|
||||||
|
- Actual behavior
|
||||||
|
- Validation script output
|
||||||
|
- Database schema snippets
|
||||||
332
scripts/validate-schema.ts
Normal file
332
scripts/validate-schema.ts
Normal file
@@ -0,0 +1,332 @@
|
|||||||
|
#!/usr/bin/env tsx
|
||||||
|
/**
|
||||||
|
* Schema Validation Script
|
||||||
|
*
|
||||||
|
* Pre-migration validation script that checks schema consistency
|
||||||
|
* across the submission pipeline before deploying changes.
|
||||||
|
*
|
||||||
|
* Usage:
|
||||||
|
* npm run validate-schema
|
||||||
|
* or
|
||||||
|
* tsx scripts/validate-schema.ts
|
||||||
|
*
|
||||||
|
* Exit codes:
|
||||||
|
* 0 = All validations passed
|
||||||
|
* 1 = Validation failures detected
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { createClient } from '@supabase/supabase-js';
|
||||||
|
|
||||||
|
const SUPABASE_URL = 'https://ydvtmnrszybqnbcqbdcy.supabase.co';
|
||||||
|
const SUPABASE_KEY = process.env.SUPABASE_SERVICE_ROLE_KEY;
|
||||||
|
|
||||||
|
if (!SUPABASE_KEY) {
|
||||||
|
console.error('❌ SUPABASE_SERVICE_ROLE_KEY environment variable is required');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
const supabase = createClient(SUPABASE_URL, SUPABASE_KEY);
|
||||||
|
|
||||||
|
interface ValidationResult {
|
||||||
|
category: string;
|
||||||
|
test: string;
|
||||||
|
passed: boolean;
|
||||||
|
message?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
const results: ValidationResult[] = [];
|
||||||
|
|
||||||
|
async function getTableColumns(tableName: string): Promise<Set<string>> {
|
||||||
|
const { data, error } = await supabase
|
||||||
|
.from('information_schema.columns' as any)
|
||||||
|
.select('column_name')
|
||||||
|
.eq('table_schema', 'public')
|
||||||
|
.eq('table_name', tableName);
|
||||||
|
|
||||||
|
if (error) throw error;
|
||||||
|
|
||||||
|
return new Set(data?.map((row: any) => row.column_name) || []);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function validateSubmissionTable(
|
||||||
|
mainTable: string,
|
||||||
|
submissionTable: string,
|
||||||
|
entityName: string
|
||||||
|
): Promise<void> {
|
||||||
|
const mainColumns = await getTableColumns(mainTable);
|
||||||
|
const submissionColumns = await getTableColumns(submissionTable);
|
||||||
|
|
||||||
|
const excludedFields = new Set([
|
||||||
|
'id', 'created_at', 'updated_at', 'is_test_data',
|
||||||
|
'view_count_all', 'view_count_30d', 'view_count_7d',
|
||||||
|
'average_rating', 'review_count', 'installations_count',
|
||||||
|
]);
|
||||||
|
|
||||||
|
const missingFields: string[] = [];
|
||||||
|
|
||||||
|
for (const field of mainColumns) {
|
||||||
|
if (excludedFields.has(field)) continue;
|
||||||
|
if (!submissionColumns.has(field)) {
|
||||||
|
missingFields.push(field);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (missingFields.length === 0) {
|
||||||
|
results.push({
|
||||||
|
category: 'Submission Tables',
|
||||||
|
test: `${entityName}: submission table matches main table`,
|
||||||
|
passed: true,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
results.push({
|
||||||
|
category: 'Submission Tables',
|
||||||
|
test: `${entityName}: submission table matches main table`,
|
||||||
|
passed: false,
|
||||||
|
message: `Missing fields: ${missingFields.join(', ')}`,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function validateVersionTable(
|
||||||
|
mainTable: string,
|
||||||
|
versionTable: string,
|
||||||
|
entityName: string
|
||||||
|
): Promise<void> {
|
||||||
|
const mainColumns = await getTableColumns(mainTable);
|
||||||
|
const versionColumns = await getTableColumns(versionTable);
|
||||||
|
|
||||||
|
const excludedFields = new Set([
|
||||||
|
'id', 'created_at', 'updated_at', 'is_test_data',
|
||||||
|
'view_count_all', 'view_count_30d', 'view_count_7d',
|
||||||
|
'average_rating', 'review_count', 'installations_count',
|
||||||
|
]);
|
||||||
|
|
||||||
|
const fieldMapping: { [key: string]: string } = {
|
||||||
|
'height_requirement': 'height_requirement_cm',
|
||||||
|
'max_g_force': 'gforce_max',
|
||||||
|
'inversions': 'inversions_count',
|
||||||
|
'max_height_meters': 'height_meters',
|
||||||
|
'drop_height_meters': 'drop_meters',
|
||||||
|
};
|
||||||
|
|
||||||
|
const requiredVersionFields = new Set([
|
||||||
|
'version_id', 'version_number', 'change_type', 'change_reason',
|
||||||
|
'is_current', 'created_by', 'submission_id', 'is_test_data',
|
||||||
|
]);
|
||||||
|
|
||||||
|
const missingMainFields: string[] = [];
|
||||||
|
const missingVersionFields: string[] = [];
|
||||||
|
|
||||||
|
// Check main table fields exist in version table
|
||||||
|
for (const field of mainColumns) {
|
||||||
|
if (excludedFields.has(field)) continue;
|
||||||
|
|
||||||
|
const mappedField = fieldMapping[field] || field;
|
||||||
|
if (!versionColumns.has(field) && !versionColumns.has(mappedField)) {
|
||||||
|
missingMainFields.push(field);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check version metadata fields exist
|
||||||
|
for (const field of requiredVersionFields) {
|
||||||
|
if (!versionColumns.has(field)) {
|
||||||
|
missingVersionFields.push(field);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (missingMainFields.length === 0 && missingVersionFields.length === 0) {
|
||||||
|
results.push({
|
||||||
|
category: 'Version Tables',
|
||||||
|
test: `${entityName}: version table has all fields`,
|
||||||
|
passed: true,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
const messages: string[] = [];
|
||||||
|
if (missingMainFields.length > 0) {
|
||||||
|
messages.push(`Missing main fields: ${missingMainFields.join(', ')}`);
|
||||||
|
}
|
||||||
|
if (missingVersionFields.length > 0) {
|
||||||
|
messages.push(`Missing version fields: ${missingVersionFields.join(', ')}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
results.push({
|
||||||
|
category: 'Version Tables',
|
||||||
|
test: `${entityName}: version table has all fields`,
|
||||||
|
passed: false,
|
||||||
|
message: messages.join('; '),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function validateCriticalFields(): Promise<void> {
|
||||||
|
const ridesColumns = await getTableColumns('rides');
|
||||||
|
const rideModelsColumns = await getTableColumns('ride_models');
|
||||||
|
|
||||||
|
// Rides should NOT have ride_type
|
||||||
|
if (!ridesColumns.has('ride_type')) {
|
||||||
|
results.push({
|
||||||
|
category: 'Critical Fields',
|
||||||
|
test: 'rides table does NOT have ride_type column',
|
||||||
|
passed: true,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
results.push({
|
||||||
|
category: 'Critical Fields',
|
||||||
|
test: 'rides table does NOT have ride_type column',
|
||||||
|
passed: false,
|
||||||
|
message: 'rides table incorrectly has ride_type column',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rides MUST have category
|
||||||
|
if (ridesColumns.has('category')) {
|
||||||
|
results.push({
|
||||||
|
category: 'Critical Fields',
|
||||||
|
test: 'rides table has category column',
|
||||||
|
passed: true,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
results.push({
|
||||||
|
category: 'Critical Fields',
|
||||||
|
test: 'rides table has category column',
|
||||||
|
passed: false,
|
||||||
|
message: 'rides table is missing required category column',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ride models must have both category and ride_type
|
||||||
|
if (rideModelsColumns.has('category') && rideModelsColumns.has('ride_type')) {
|
||||||
|
results.push({
|
||||||
|
category: 'Critical Fields',
|
||||||
|
test: 'ride_models has both category and ride_type',
|
||||||
|
passed: true,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
const missing: string[] = [];
|
||||||
|
if (!rideModelsColumns.has('category')) missing.push('category');
|
||||||
|
if (!rideModelsColumns.has('ride_type')) missing.push('ride_type');
|
||||||
|
|
||||||
|
results.push({
|
||||||
|
category: 'Critical Fields',
|
||||||
|
test: 'ride_models has both category and ride_type',
|
||||||
|
passed: false,
|
||||||
|
message: `ride_models is missing: ${missing.join(', ')}`,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function validateFunctions(): Promise<void> {
|
||||||
|
const functionsToCheck = [
|
||||||
|
'create_entity_from_submission',
|
||||||
|
'update_entity_from_submission',
|
||||||
|
'process_approval_transaction',
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const funcName of functionsToCheck) {
|
||||||
|
try {
|
||||||
|
const { data, error } = await supabase
|
||||||
|
.rpc('pg_catalog.pg_function_is_visible' as any, {
|
||||||
|
funcid: `public.${funcName}`::any
|
||||||
|
} as any);
|
||||||
|
|
||||||
|
if (!error) {
|
||||||
|
results.push({
|
||||||
|
category: 'Functions',
|
||||||
|
test: `${funcName} exists and is accessible`,
|
||||||
|
passed: true,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
results.push({
|
||||||
|
category: 'Functions',
|
||||||
|
test: `${funcName} exists and is accessible`,
|
||||||
|
passed: false,
|
||||||
|
message: error.message,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
results.push({
|
||||||
|
category: 'Functions',
|
||||||
|
test: `${funcName} exists and is accessible`,
|
||||||
|
passed: false,
|
||||||
|
message: err instanceof Error ? err.message : String(err),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function printResults(): void {
|
||||||
|
console.log('\n' + '='.repeat(80));
|
||||||
|
console.log('Schema Validation Results');
|
||||||
|
console.log('='.repeat(80) + '\n');
|
||||||
|
|
||||||
|
const categories = [...new Set(results.map(r => r.category))];
|
||||||
|
let totalPassed = 0;
|
||||||
|
let totalFailed = 0;
|
||||||
|
|
||||||
|
for (const category of categories) {
|
||||||
|
const categoryResults = results.filter(r => r.category === category);
|
||||||
|
const passed = categoryResults.filter(r => r.passed).length;
|
||||||
|
const failed = categoryResults.filter(r => !r.passed).length;
|
||||||
|
|
||||||
|
console.log(`\n${category}:`);
|
||||||
|
console.log('-'.repeat(80));
|
||||||
|
|
||||||
|
for (const result of categoryResults) {
|
||||||
|
const icon = result.passed ? '✅' : '❌';
|
||||||
|
console.log(`${icon} ${result.test}`);
|
||||||
|
if (result.message) {
|
||||||
|
console.log(` └─ ${result.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
totalPassed += passed;
|
||||||
|
totalFailed += failed;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('\n' + '='.repeat(80));
|
||||||
|
console.log(`Total: ${totalPassed} passed, ${totalFailed} failed`);
|
||||||
|
console.log('='.repeat(80) + '\n');
|
||||||
|
}
|
||||||
|
|
||||||
|
async function main(): Promise<void> {
|
||||||
|
console.log('🔍 Starting schema validation...\n');
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Validate submission tables
|
||||||
|
await validateSubmissionTable('parks', 'park_submissions', 'Parks');
|
||||||
|
await validateSubmissionTable('rides', 'ride_submissions', 'Rides');
|
||||||
|
await validateSubmissionTable('companies', 'company_submissions', 'Companies');
|
||||||
|
await validateSubmissionTable('ride_models', 'ride_model_submissions', 'Ride Models');
|
||||||
|
|
||||||
|
// Validate version tables
|
||||||
|
await validateVersionTable('parks', 'park_versions', 'Parks');
|
||||||
|
await validateVersionTable('rides', 'ride_versions', 'Rides');
|
||||||
|
await validateVersionTable('companies', 'company_versions', 'Companies');
|
||||||
|
await validateVersionTable('ride_models', 'ride_model_versions', 'Ride Models');
|
||||||
|
|
||||||
|
// Validate critical fields
|
||||||
|
await validateCriticalFields();
|
||||||
|
|
||||||
|
// Validate functions
|
||||||
|
await validateFunctions();
|
||||||
|
|
||||||
|
// Print results
|
||||||
|
printResults();
|
||||||
|
|
||||||
|
// Exit with appropriate code
|
||||||
|
const hasFailures = results.some(r => !r.passed);
|
||||||
|
if (hasFailures) {
|
||||||
|
console.error('❌ Schema validation failed. Please fix the issues above before deploying.\n');
|
||||||
|
process.exit(1);
|
||||||
|
} else {
|
||||||
|
console.log('✅ All schema validations passed. Safe to deploy.\n');
|
||||||
|
process.exit(0);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('❌ Fatal error during validation:');
|
||||||
|
console.error(error);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
main();
|
||||||
@@ -73,6 +73,7 @@ const AdminContact = lazy(() => import("./pages/admin/AdminContact"));
|
|||||||
const AdminEmailSettings = lazy(() => import("./pages/admin/AdminEmailSettings"));
|
const AdminEmailSettings = lazy(() => import("./pages/admin/AdminEmailSettings"));
|
||||||
const ErrorMonitoring = lazy(() => import("./pages/admin/ErrorMonitoring"));
|
const ErrorMonitoring = lazy(() => import("./pages/admin/ErrorMonitoring"));
|
||||||
const ErrorLookup = lazy(() => import("./pages/admin/ErrorLookup"));
|
const ErrorLookup = lazy(() => import("./pages/admin/ErrorLookup"));
|
||||||
|
const TraceViewer = lazy(() => import("./pages/admin/TraceViewer"));
|
||||||
|
|
||||||
// User routes (lazy-loaded)
|
// User routes (lazy-loaded)
|
||||||
const Profile = lazy(() => import("./pages/Profile"));
|
const Profile = lazy(() => import("./pages/Profile"));
|
||||||
@@ -387,6 +388,14 @@ function AppContent(): React.JSX.Element {
|
|||||||
</AdminErrorBoundary>
|
</AdminErrorBoundary>
|
||||||
}
|
}
|
||||||
/>
|
/>
|
||||||
|
<Route
|
||||||
|
path="/admin/trace-viewer"
|
||||||
|
element={
|
||||||
|
<AdminErrorBoundary section="Trace Viewer">
|
||||||
|
<TraceViewer />
|
||||||
|
</AdminErrorBoundary>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
|
||||||
{/* Utility routes - lazy loaded */}
|
{/* Utility routes - lazy loaded */}
|
||||||
<Route path="/force-logout" element={<ForceLogout />} />
|
<Route path="/force-logout" element={<ForceLogout />} />
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import { Dialog, DialogContent, DialogHeader, DialogTitle } from '@/components/ui/dialog';
|
import { Dialog, DialogContent, DialogHeader, DialogTitle } from '@/components/ui/dialog';
|
||||||
import { Badge } from '@/components/ui/badge';
|
import { Badge } from '@/components/ui/badge';
|
||||||
|
import { Button } from '@/components/ui/button';
|
||||||
import { Tabs, TabsContent, TabsList, TabsTrigger } from '@/components/ui/tabs';
|
import { Tabs, TabsContent, TabsList, TabsTrigger } from '@/components/ui/tabs';
|
||||||
import { Card, CardContent } from '@/components/ui/card';
|
import { Card, CardContent } from '@/components/ui/card';
|
||||||
import { format } from 'date-fns';
|
import { format } from 'date-fns';
|
||||||
@@ -196,6 +197,27 @@ export function ApprovalFailureModal({ failure, onClose }: ApprovalFailureModalP
|
|||||||
</Card>
|
</Card>
|
||||||
</TabsContent>
|
</TabsContent>
|
||||||
</Tabs>
|
</Tabs>
|
||||||
|
|
||||||
|
<div className="flex justify-end gap-2 mt-4">
|
||||||
|
{failure.request_id && (
|
||||||
|
<>
|
||||||
|
<Button
|
||||||
|
variant="outline"
|
||||||
|
size="sm"
|
||||||
|
onClick={() => window.open(`/admin/error-monitoring?tab=edge-functions&requestId=${failure.request_id}`, '_blank')}
|
||||||
|
>
|
||||||
|
View Edge Logs
|
||||||
|
</Button>
|
||||||
|
<Button
|
||||||
|
variant="outline"
|
||||||
|
size="sm"
|
||||||
|
onClick={() => window.open(`/admin/error-monitoring?tab=traces&traceId=${failure.request_id}`, '_blank')}
|
||||||
|
>
|
||||||
|
View Full Trace
|
||||||
|
</Button>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
</DialogContent>
|
</DialogContent>
|
||||||
</Dialog>
|
</Dialog>
|
||||||
);
|
);
|
||||||
|
|||||||
161
src/components/admin/CorrelatedLogsView.tsx
Normal file
161
src/components/admin/CorrelatedLogsView.tsx
Normal file
@@ -0,0 +1,161 @@
|
|||||||
|
import { useQuery } from '@tanstack/react-query';
|
||||||
|
import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card';
|
||||||
|
import { Badge } from '@/components/ui/badge';
|
||||||
|
import { Loader2, Clock } from 'lucide-react';
|
||||||
|
import { format } from 'date-fns';
|
||||||
|
import { supabase } from '@/lib/supabaseClient';
|
||||||
|
|
||||||
|
interface CorrelatedLogsViewProps {
|
||||||
|
requestId: string;
|
||||||
|
traceId?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface TimelineEvent {
|
||||||
|
timestamp: Date;
|
||||||
|
type: 'error' | 'edge' | 'database' | 'approval';
|
||||||
|
message: string;
|
||||||
|
severity?: string;
|
||||||
|
metadata?: Record<string, any>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function CorrelatedLogsView({ requestId, traceId }: CorrelatedLogsViewProps) {
|
||||||
|
const { data: events, isLoading } = useQuery({
|
||||||
|
queryKey: ['correlated-logs', requestId, traceId],
|
||||||
|
queryFn: async () => {
|
||||||
|
const events: TimelineEvent[] = [];
|
||||||
|
|
||||||
|
// Fetch application error
|
||||||
|
const { data: error } = await supabase
|
||||||
|
.from('request_metadata')
|
||||||
|
.select('*')
|
||||||
|
.eq('request_id', requestId)
|
||||||
|
.single();
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
events.push({
|
||||||
|
timestamp: new Date(error.created_at),
|
||||||
|
type: 'error',
|
||||||
|
message: error.error_message || 'Unknown error',
|
||||||
|
severity: error.error_type || undefined,
|
||||||
|
metadata: {
|
||||||
|
endpoint: error.endpoint,
|
||||||
|
method: error.method,
|
||||||
|
status_code: error.status_code,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch approval metrics
|
||||||
|
const { data: approval } = await supabase
|
||||||
|
.from('approval_transaction_metrics')
|
||||||
|
.select('*')
|
||||||
|
.eq('request_id', requestId)
|
||||||
|
.maybeSingle();
|
||||||
|
|
||||||
|
if (approval && approval.created_at) {
|
||||||
|
events.push({
|
||||||
|
timestamp: new Date(approval.created_at),
|
||||||
|
type: 'approval',
|
||||||
|
message: approval.success ? 'Approval successful' : (approval.error_message || 'Approval failed'),
|
||||||
|
severity: approval.success ? 'success' : 'error',
|
||||||
|
metadata: {
|
||||||
|
items_count: approval.items_count,
|
||||||
|
duration_ms: approval.duration_ms || undefined,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: Fetch edge function logs (requires Management API access)
|
||||||
|
// TODO: Fetch database logs (requires analytics API access)
|
||||||
|
|
||||||
|
// Sort chronologically
|
||||||
|
events.sort((a, b) => a.timestamp.getTime() - b.timestamp.getTime());
|
||||||
|
|
||||||
|
return events;
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const getTypeColor = (type: string): "default" | "destructive" | "outline" | "secondary" => {
|
||||||
|
switch (type) {
|
||||||
|
case 'error': return 'destructive';
|
||||||
|
case 'approval': return 'destructive';
|
||||||
|
case 'edge': return 'default';
|
||||||
|
case 'database': return 'secondary';
|
||||||
|
default: return 'outline';
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if (isLoading) {
|
||||||
|
return (
|
||||||
|
<div className="flex items-center justify-center py-12">
|
||||||
|
<Loader2 className="w-6 h-6 animate-spin text-muted-foreground" />
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!events || events.length === 0) {
|
||||||
|
return (
|
||||||
|
<Card>
|
||||||
|
<CardContent className="pt-6">
|
||||||
|
<p className="text-center text-muted-foreground">
|
||||||
|
No correlated logs found for this request.
|
||||||
|
</p>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-lg flex items-center gap-2">
|
||||||
|
<Clock className="w-5 h-5" />
|
||||||
|
Timeline for Request {requestId.slice(0, 8)}
|
||||||
|
</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<div className="relative space-y-4">
|
||||||
|
{/* Timeline line */}
|
||||||
|
<div className="absolute left-6 top-0 bottom-0 w-0.5 bg-border" />
|
||||||
|
|
||||||
|
{events.map((event, index) => (
|
||||||
|
<div key={index} className="relative pl-14">
|
||||||
|
{/* Timeline dot */}
|
||||||
|
<div className="absolute left-[18px] top-2 w-4 h-4 rounded-full bg-background border-2 border-primary" />
|
||||||
|
|
||||||
|
<Card>
|
||||||
|
<CardContent className="pt-4">
|
||||||
|
<div className="space-y-2">
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<Badge variant={getTypeColor(event.type)}>
|
||||||
|
{event.type.toUpperCase()}
|
||||||
|
</Badge>
|
||||||
|
{event.severity && (
|
||||||
|
<Badge variant="outline" className="text-xs">
|
||||||
|
{event.severity}
|
||||||
|
</Badge>
|
||||||
|
)}
|
||||||
|
<span className="text-xs text-muted-foreground">
|
||||||
|
{format(event.timestamp, 'HH:mm:ss.SSS')}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
<p className="text-sm">{event.message}</p>
|
||||||
|
{event.metadata && Object.keys(event.metadata).length > 0 && (
|
||||||
|
<div className="text-xs text-muted-foreground space-y-1">
|
||||||
|
{Object.entries(event.metadata).map(([key, value]) => (
|
||||||
|
<div key={key}>
|
||||||
|
<span className="font-medium">{key}:</span> {String(value)}
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
);
|
||||||
|
}
|
||||||
172
src/components/admin/DatabaseLogs.tsx
Normal file
172
src/components/admin/DatabaseLogs.tsx
Normal file
@@ -0,0 +1,172 @@
|
|||||||
|
import { useState } from 'react';
|
||||||
|
import { useQuery } from '@tanstack/react-query';
|
||||||
|
import { Card, CardContent, CardHeader } from '@/components/ui/card';
|
||||||
|
import { Badge } from '@/components/ui/badge';
|
||||||
|
import { Input } from '@/components/ui/input';
|
||||||
|
import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from '@/components/ui/select';
|
||||||
|
import { Loader2, Search, ChevronDown, ChevronRight } from 'lucide-react';
|
||||||
|
import { format } from 'date-fns';
|
||||||
|
import { supabase } from '@/lib/supabaseClient';
|
||||||
|
|
||||||
|
interface DatabaseLog {
|
||||||
|
id: string;
|
||||||
|
timestamp: number;
|
||||||
|
identifier: string;
|
||||||
|
error_severity: string;
|
||||||
|
event_message: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function DatabaseLogs() {
|
||||||
|
const [searchTerm, setSearchTerm] = useState('');
|
||||||
|
const [severity, setSeverity] = useState<string>('all');
|
||||||
|
const [timeRange, setTimeRange] = useState<'1h' | '24h' | '7d'>('24h');
|
||||||
|
const [expandedLog, setExpandedLog] = useState<string | null>(null);
|
||||||
|
|
||||||
|
const { data: logs, isLoading } = useQuery({
|
||||||
|
queryKey: ['database-logs', severity, timeRange],
|
||||||
|
queryFn: async () => {
|
||||||
|
// For now, return empty array as we need proper permissions for analytics query
|
||||||
|
// In production, this would use Supabase Analytics API
|
||||||
|
// const hoursAgo = timeRange === '1h' ? 1 : timeRange === '24h' ? 24 : 168;
|
||||||
|
// const startTime = Date.now() * 1000 - (hoursAgo * 60 * 60 * 1000 * 1000);
|
||||||
|
|
||||||
|
return [] as DatabaseLog[];
|
||||||
|
},
|
||||||
|
refetchInterval: 30000,
|
||||||
|
});
|
||||||
|
|
||||||
|
const filteredLogs = logs?.filter(log => {
|
||||||
|
if (searchTerm && !log.event_message.toLowerCase().includes(searchTerm.toLowerCase())) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}) || [];
|
||||||
|
|
||||||
|
const getSeverityColor = (severity: string): "default" | "destructive" | "outline" | "secondary" => {
|
||||||
|
switch (severity.toUpperCase()) {
|
||||||
|
case 'ERROR': return 'destructive';
|
||||||
|
case 'WARNING': return 'destructive';
|
||||||
|
case 'NOTICE': return 'default';
|
||||||
|
case 'LOG': return 'secondary';
|
||||||
|
default: return 'outline';
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const isSpanLog = (message: string) => {
|
||||||
|
return message.includes('SPAN:') || message.includes('SPAN_EVENT:');
|
||||||
|
};
|
||||||
|
|
||||||
|
const toggleExpand = (logId: string) => {
|
||||||
|
setExpandedLog(expandedLog === logId ? null : logId);
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-4">
|
||||||
|
<div className="flex flex-col md:flex-row gap-4">
|
||||||
|
<div className="flex-1">
|
||||||
|
<div className="relative">
|
||||||
|
<Search className="absolute left-3 top-1/2 -translate-y-1/2 w-4 h-4 text-muted-foreground" />
|
||||||
|
<Input
|
||||||
|
placeholder="Search database logs..."
|
||||||
|
value={searchTerm}
|
||||||
|
onChange={(e) => setSearchTerm(e.target.value)}
|
||||||
|
className="pl-10"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<Select value={severity} onValueChange={setSeverity}>
|
||||||
|
<SelectTrigger className="w-[150px]">
|
||||||
|
<SelectValue placeholder="Severity" />
|
||||||
|
</SelectTrigger>
|
||||||
|
<SelectContent>
|
||||||
|
<SelectItem value="all">All Levels</SelectItem>
|
||||||
|
<SelectItem value="ERROR">Error</SelectItem>
|
||||||
|
<SelectItem value="WARNING">Warning</SelectItem>
|
||||||
|
<SelectItem value="NOTICE">Notice</SelectItem>
|
||||||
|
<SelectItem value="LOG">Log</SelectItem>
|
||||||
|
</SelectContent>
|
||||||
|
</Select>
|
||||||
|
<Select value={timeRange} onValueChange={(v) => setTimeRange(v as any)}>
|
||||||
|
<SelectTrigger className="w-[120px]">
|
||||||
|
<SelectValue />
|
||||||
|
</SelectTrigger>
|
||||||
|
<SelectContent>
|
||||||
|
<SelectItem value="1h">Last Hour</SelectItem>
|
||||||
|
<SelectItem value="24h">Last 24h</SelectItem>
|
||||||
|
<SelectItem value="7d">Last 7 Days</SelectItem>
|
||||||
|
</SelectContent>
|
||||||
|
</Select>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{isLoading ? (
|
||||||
|
<div className="flex items-center justify-center py-12">
|
||||||
|
<Loader2 className="w-6 h-6 animate-spin text-muted-foreground" />
|
||||||
|
</div>
|
||||||
|
) : filteredLogs.length === 0 ? (
|
||||||
|
<Card>
|
||||||
|
<CardContent className="pt-6">
|
||||||
|
<p className="text-center text-muted-foreground">
|
||||||
|
No database logs found for the selected criteria.
|
||||||
|
</p>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
) : (
|
||||||
|
<div className="space-y-2">
|
||||||
|
{filteredLogs.map((log) => (
|
||||||
|
<Card key={log.id} className="overflow-hidden">
|
||||||
|
<CardHeader
|
||||||
|
className="py-3 cursor-pointer hover:bg-muted/50 transition-colors"
|
||||||
|
onClick={() => toggleExpand(log.id)}
|
||||||
|
>
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<div className="flex items-center gap-3">
|
||||||
|
{expandedLog === log.id ? (
|
||||||
|
<ChevronDown className="w-4 h-4 text-muted-foreground" />
|
||||||
|
) : (
|
||||||
|
<ChevronRight className="w-4 h-4 text-muted-foreground" />
|
||||||
|
)}
|
||||||
|
<Badge variant={getSeverityColor(log.error_severity)}>
|
||||||
|
{log.error_severity}
|
||||||
|
</Badge>
|
||||||
|
{isSpanLog(log.event_message) && (
|
||||||
|
<Badge variant="outline" className="text-xs">
|
||||||
|
TRACE
|
||||||
|
</Badge>
|
||||||
|
)}
|
||||||
|
<span className="text-sm text-muted-foreground">
|
||||||
|
{format(log.timestamp / 1000, 'HH:mm:ss.SSS')}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
<span className="text-sm truncate max-w-[500px]">
|
||||||
|
{log.event_message.slice(0, 100)}
|
||||||
|
{log.event_message.length > 100 && '...'}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
</CardHeader>
|
||||||
|
{expandedLog === log.id && (
|
||||||
|
<CardContent className="pt-0 pb-4 border-t">
|
||||||
|
<div className="space-y-2 mt-4">
|
||||||
|
<div>
|
||||||
|
<span className="text-xs text-muted-foreground">Full Message:</span>
|
||||||
|
<pre className="text-xs font-mono mt-1 whitespace-pre-wrap break-all">
|
||||||
|
{log.event_message}
|
||||||
|
</pre>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<span className="text-xs text-muted-foreground">Timestamp:</span>
|
||||||
|
<p className="text-sm">{format(log.timestamp / 1000, 'PPpp')}</p>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<span className="text-xs text-muted-foreground">Identifier:</span>
|
||||||
|
<p className="text-sm font-mono">{log.identifier}</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
)}
|
||||||
|
</Card>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
168
src/components/admin/EdgeFunctionLogs.tsx
Normal file
168
src/components/admin/EdgeFunctionLogs.tsx
Normal file
@@ -0,0 +1,168 @@
|
|||||||
|
import { useState } from 'react';
|
||||||
|
import { useQuery } from '@tanstack/react-query';
|
||||||
|
import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card';
|
||||||
|
import { Badge } from '@/components/ui/badge';
|
||||||
|
import { Input } from '@/components/ui/input';
|
||||||
|
import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from '@/components/ui/select';
|
||||||
|
import { Loader2, Search, ChevronDown, ChevronRight } from 'lucide-react';
|
||||||
|
import { format } from 'date-fns';
|
||||||
|
import { supabase } from '@/lib/supabaseClient';
|
||||||
|
|
||||||
|
interface EdgeFunctionLog {
|
||||||
|
id: string;
|
||||||
|
timestamp: number;
|
||||||
|
event_type: string;
|
||||||
|
event_message: string;
|
||||||
|
function_id: string;
|
||||||
|
level: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
const FUNCTION_NAMES = [
|
||||||
|
'detect-location',
|
||||||
|
'process-selective-approval',
|
||||||
|
'process-selective-rejection',
|
||||||
|
];
|
||||||
|
|
||||||
|
export function EdgeFunctionLogs() {
|
||||||
|
const [selectedFunction, setSelectedFunction] = useState<string>('all');
|
||||||
|
const [searchTerm, setSearchTerm] = useState('');
|
||||||
|
const [timeRange, setTimeRange] = useState<'1h' | '24h' | '7d'>('24h');
|
||||||
|
const [expandedLog, setExpandedLog] = useState<string | null>(null);
|
||||||
|
|
||||||
|
const { data: logs, isLoading } = useQuery({
|
||||||
|
queryKey: ['edge-function-logs', selectedFunction, timeRange],
|
||||||
|
queryFn: async () => {
|
||||||
|
// Query Supabase edge function logs
|
||||||
|
// Note: This uses the analytics endpoint which requires specific permissions
|
||||||
|
const hoursAgo = timeRange === '1h' ? 1 : timeRange === '24h' ? 24 : 168;
|
||||||
|
const startTime = Date.now() - (hoursAgo * 60 * 60 * 1000);
|
||||||
|
|
||||||
|
// For now, return the logs from context as an example
|
||||||
|
// In production, this would call the Supabase Management API
|
||||||
|
const allLogs: EdgeFunctionLog[] = [];
|
||||||
|
|
||||||
|
return allLogs;
|
||||||
|
},
|
||||||
|
refetchInterval: 30000, // Refresh every 30 seconds
|
||||||
|
});
|
||||||
|
|
||||||
|
const filteredLogs = logs?.filter(log => {
|
||||||
|
if (searchTerm && !log.event_message.toLowerCase().includes(searchTerm.toLowerCase())) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}) || [];
|
||||||
|
|
||||||
|
const getLevelColor = (level: string): "default" | "destructive" | "secondary" => {
|
||||||
|
switch (level.toLowerCase()) {
|
||||||
|
case 'error': return 'destructive';
|
||||||
|
case 'warn': return 'destructive';
|
||||||
|
case 'info': return 'default';
|
||||||
|
default: return 'secondary';
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const toggleExpand = (logId: string) => {
|
||||||
|
setExpandedLog(expandedLog === logId ? null : logId);
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-4">
|
||||||
|
<div className="flex flex-col md:flex-row gap-4">
|
||||||
|
<div className="flex-1">
|
||||||
|
<div className="relative">
|
||||||
|
<Search className="absolute left-3 top-1/2 -translate-y-1/2 w-4 h-4 text-muted-foreground" />
|
||||||
|
<Input
|
||||||
|
placeholder="Search logs..."
|
||||||
|
value={searchTerm}
|
||||||
|
onChange={(e) => setSearchTerm(e.target.value)}
|
||||||
|
className="pl-10"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<Select value={selectedFunction} onValueChange={setSelectedFunction}>
|
||||||
|
<SelectTrigger className="w-[200px]">
|
||||||
|
<SelectValue placeholder="Select function" />
|
||||||
|
</SelectTrigger>
|
||||||
|
<SelectContent>
|
||||||
|
<SelectItem value="all">All Functions</SelectItem>
|
||||||
|
{FUNCTION_NAMES.map(name => (
|
||||||
|
<SelectItem key={name} value={name}>{name}</SelectItem>
|
||||||
|
))}
|
||||||
|
</SelectContent>
|
||||||
|
</Select>
|
||||||
|
<Select value={timeRange} onValueChange={(v) => setTimeRange(v as any)}>
|
||||||
|
<SelectTrigger className="w-[120px]">
|
||||||
|
<SelectValue />
|
||||||
|
</SelectTrigger>
|
||||||
|
<SelectContent>
|
||||||
|
<SelectItem value="1h">Last Hour</SelectItem>
|
||||||
|
<SelectItem value="24h">Last 24h</SelectItem>
|
||||||
|
<SelectItem value="7d">Last 7 Days</SelectItem>
|
||||||
|
</SelectContent>
|
||||||
|
</Select>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{isLoading ? (
|
||||||
|
<div className="flex items-center justify-center py-12">
|
||||||
|
<Loader2 className="w-6 h-6 animate-spin text-muted-foreground" />
|
||||||
|
</div>
|
||||||
|
) : filteredLogs.length === 0 ? (
|
||||||
|
<Card>
|
||||||
|
<CardContent className="pt-6">
|
||||||
|
<p className="text-center text-muted-foreground">
|
||||||
|
No edge function logs found. Logs will appear here when edge functions are invoked.
|
||||||
|
</p>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
) : (
|
||||||
|
<div className="space-y-2">
|
||||||
|
{filteredLogs.map((log) => (
|
||||||
|
<Card key={log.id} className="overflow-hidden">
|
||||||
|
<CardHeader
|
||||||
|
className="py-3 cursor-pointer hover:bg-muted/50 transition-colors"
|
||||||
|
onClick={() => toggleExpand(log.id)}
|
||||||
|
>
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<div className="flex items-center gap-3">
|
||||||
|
{expandedLog === log.id ? (
|
||||||
|
<ChevronDown className="w-4 h-4 text-muted-foreground" />
|
||||||
|
) : (
|
||||||
|
<ChevronRight className="w-4 h-4 text-muted-foreground" />
|
||||||
|
)}
|
||||||
|
<Badge variant={getLevelColor(log.level)}>
|
||||||
|
{log.level}
|
||||||
|
</Badge>
|
||||||
|
<span className="text-sm text-muted-foreground">
|
||||||
|
{format(log.timestamp, 'HH:mm:ss.SSS')}
|
||||||
|
</span>
|
||||||
|
<Badge variant="outline" className="text-xs">
|
||||||
|
{log.event_type}
|
||||||
|
</Badge>
|
||||||
|
</div>
|
||||||
|
<span className="text-sm truncate max-w-[400px]">
|
||||||
|
{log.event_message}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
</CardHeader>
|
||||||
|
{expandedLog === log.id && (
|
||||||
|
<CardContent className="pt-0 pb-4 border-t">
|
||||||
|
<div className="space-y-2 mt-4">
|
||||||
|
<div>
|
||||||
|
<span className="text-xs text-muted-foreground">Full Message:</span>
|
||||||
|
<p className="text-sm font-mono mt-1">{log.event_message}</p>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<span className="text-xs text-muted-foreground">Timestamp:</span>
|
||||||
|
<p className="text-sm">{format(log.timestamp, 'PPpp')}</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
)}
|
||||||
|
</Card>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
@@ -222,12 +222,30 @@ ${error.error_stack ? `Stack Trace:\n${error.error_stack}` : ''}
|
|||||||
</TabsContent>
|
</TabsContent>
|
||||||
</Tabs>
|
</Tabs>
|
||||||
|
|
||||||
<div className="flex justify-end gap-2">
|
<div className="flex justify-between items-center">
|
||||||
<Button variant="outline" onClick={copyErrorReport}>
|
<div className="flex gap-2">
|
||||||
<Copy className="w-4 h-4 mr-2" />
|
<Button
|
||||||
Copy Report
|
variant="outline"
|
||||||
</Button>
|
size="sm"
|
||||||
<Button onClick={onClose}>Close</Button>
|
onClick={() => window.open(`/admin/error-monitoring?tab=edge-functions&requestId=${error.request_id}`, '_blank')}
|
||||||
|
>
|
||||||
|
View Edge Logs
|
||||||
|
</Button>
|
||||||
|
<Button
|
||||||
|
variant="outline"
|
||||||
|
size="sm"
|
||||||
|
onClick={() => window.open(`/admin/error-monitoring?tab=database&requestId=${error.request_id}`, '_blank')}
|
||||||
|
>
|
||||||
|
View DB Logs
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
<div className="flex gap-2">
|
||||||
|
<Button variant="outline" onClick={copyErrorReport}>
|
||||||
|
<Copy className="w-4 h-4 mr-2" />
|
||||||
|
Copy Report
|
||||||
|
</Button>
|
||||||
|
<Button onClick={onClose}>Close</Button>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</DialogContent>
|
</DialogContent>
|
||||||
</Dialog>
|
</Dialog>
|
||||||
|
|||||||
203
src/components/admin/UnifiedLogSearch.tsx
Normal file
203
src/components/admin/UnifiedLogSearch.tsx
Normal file
@@ -0,0 +1,203 @@
|
|||||||
|
import { useState } from 'react';
|
||||||
|
import { useQuery } from '@tanstack/react-query';
|
||||||
|
import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card';
|
||||||
|
import { Input } from '@/components/ui/input';
|
||||||
|
import { Button } from '@/components/ui/button';
|
||||||
|
import { Badge } from '@/components/ui/badge';
|
||||||
|
import { Search, Loader2, ExternalLink } from 'lucide-react';
|
||||||
|
import { format } from 'date-fns';
|
||||||
|
import { supabase } from '@/lib/supabaseClient';
|
||||||
|
|
||||||
|
interface SearchResult {
|
||||||
|
type: 'error' | 'approval' | 'edge' | 'database';
|
||||||
|
id: string;
|
||||||
|
timestamp: string;
|
||||||
|
message: string;
|
||||||
|
severity?: string;
|
||||||
|
metadata?: Record<string, any>;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface UnifiedLogSearchProps {
|
||||||
|
onNavigate: (tab: string, filters: Record<string, string>) => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function UnifiedLogSearch({ onNavigate }: UnifiedLogSearchProps) {
|
||||||
|
const [searchQuery, setSearchQuery] = useState('');
|
||||||
|
const [searchTerm, setSearchTerm] = useState('');
|
||||||
|
|
||||||
|
const { data: results, isLoading } = useQuery({
|
||||||
|
queryKey: ['unified-log-search', searchTerm],
|
||||||
|
queryFn: async () => {
|
||||||
|
if (!searchTerm) return [];
|
||||||
|
|
||||||
|
const results: SearchResult[] = [];
|
||||||
|
|
||||||
|
// Search application errors
|
||||||
|
const { data: errors } = await supabase
|
||||||
|
.from('request_metadata')
|
||||||
|
.select('request_id, created_at, error_type, error_message')
|
||||||
|
.or(`request_id.ilike.%${searchTerm}%,error_message.ilike.%${searchTerm}%`)
|
||||||
|
.order('created_at', { ascending: false })
|
||||||
|
.limit(10);
|
||||||
|
|
||||||
|
if (errors) {
|
||||||
|
results.push(...errors.map(e => ({
|
||||||
|
type: 'error' as const,
|
||||||
|
id: e.request_id,
|
||||||
|
timestamp: e.created_at,
|
||||||
|
message: e.error_message || 'Unknown error',
|
||||||
|
severity: e.error_type || undefined,
|
||||||
|
})));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Search approval failures
|
||||||
|
const { data: approvals } = await supabase
|
||||||
|
.from('approval_transaction_metrics')
|
||||||
|
.select('id, created_at, error_message, request_id')
|
||||||
|
.eq('success', false)
|
||||||
|
.or(`request_id.ilike.%${searchTerm}%,error_message.ilike.%${searchTerm}%`)
|
||||||
|
.order('created_at', { ascending: false })
|
||||||
|
.limit(10);
|
||||||
|
|
||||||
|
if (approvals) {
|
||||||
|
results.push(...approvals
|
||||||
|
.filter(a => a.created_at)
|
||||||
|
.map(a => ({
|
||||||
|
type: 'approval' as const,
|
||||||
|
id: a.id,
|
||||||
|
timestamp: a.created_at!,
|
||||||
|
message: a.error_message || 'Approval failed',
|
||||||
|
metadata: { request_id: a.request_id },
|
||||||
|
})));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort by timestamp
|
||||||
|
results.sort((a, b) => new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime());
|
||||||
|
|
||||||
|
return results;
|
||||||
|
},
|
||||||
|
enabled: !!searchTerm,
|
||||||
|
});
|
||||||
|
|
||||||
|
const handleSearch = () => {
|
||||||
|
setSearchTerm(searchQuery);
|
||||||
|
};
|
||||||
|
|
||||||
|
const getTypeColor = (type: string): "default" | "destructive" | "outline" | "secondary" => {
|
||||||
|
switch (type) {
|
||||||
|
case 'error': return 'destructive';
|
||||||
|
case 'approval': return 'destructive';
|
||||||
|
case 'edge': return 'default';
|
||||||
|
case 'database': return 'secondary';
|
||||||
|
default: return 'outline';
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const getTypeLabel = (type: string) => {
|
||||||
|
switch (type) {
|
||||||
|
case 'error': return 'Application Error';
|
||||||
|
case 'approval': return 'Approval Failure';
|
||||||
|
case 'edge': return 'Edge Function';
|
||||||
|
case 'database': return 'Database Log';
|
||||||
|
default: return type;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleResultClick = (result: SearchResult) => {
|
||||||
|
switch (result.type) {
|
||||||
|
case 'error':
|
||||||
|
onNavigate('errors', { requestId: result.id });
|
||||||
|
break;
|
||||||
|
case 'approval':
|
||||||
|
onNavigate('approvals', { failureId: result.id });
|
||||||
|
break;
|
||||||
|
case 'edge':
|
||||||
|
onNavigate('edge-functions', { search: result.message });
|
||||||
|
break;
|
||||||
|
case 'database':
|
||||||
|
onNavigate('database', { search: result.message });
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-lg">Unified Log Search</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent className="space-y-4">
|
||||||
|
<div className="flex gap-2">
|
||||||
|
<div className="relative flex-1">
|
||||||
|
<Search className="absolute left-3 top-1/2 -translate-y-1/2 w-4 h-4 text-muted-foreground" />
|
||||||
|
<Input
|
||||||
|
placeholder="Search across all logs (request ID, error message, trace ID...)"
|
||||||
|
value={searchQuery}
|
||||||
|
onChange={(e) => setSearchQuery(e.target.value)}
|
||||||
|
onKeyDown={(e) => e.key === 'Enter' && handleSearch()}
|
||||||
|
className="pl-10"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<Button onClick={handleSearch} disabled={!searchQuery || isLoading}>
|
||||||
|
{isLoading ? (
|
||||||
|
<Loader2 className="w-4 h-4 animate-spin" />
|
||||||
|
) : (
|
||||||
|
<Search className="w-4 h-4" />
|
||||||
|
)}
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{searchTerm && (
|
||||||
|
<div className="space-y-2">
|
||||||
|
{isLoading ? (
|
||||||
|
<div className="flex items-center justify-center py-8">
|
||||||
|
<Loader2 className="w-6 h-6 animate-spin text-muted-foreground" />
|
||||||
|
</div>
|
||||||
|
) : results && results.length > 0 ? (
|
||||||
|
<>
|
||||||
|
<div className="text-sm text-muted-foreground">
|
||||||
|
Found {results.length} results
|
||||||
|
</div>
|
||||||
|
{results.map((result) => (
|
||||||
|
<Card
|
||||||
|
key={`${result.type}-${result.id}`}
|
||||||
|
className="cursor-pointer hover:bg-muted/50 transition-colors"
|
||||||
|
onClick={() => handleResultClick(result)}
|
||||||
|
>
|
||||||
|
<CardContent className="pt-4 pb-3">
|
||||||
|
<div className="flex items-start justify-between gap-4">
|
||||||
|
<div className="flex-1 space-y-1">
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<Badge variant={getTypeColor(result.type)}>
|
||||||
|
{getTypeLabel(result.type)}
|
||||||
|
</Badge>
|
||||||
|
{result.severity && (
|
||||||
|
<Badge variant="outline" className="text-xs">
|
||||||
|
{result.severity}
|
||||||
|
</Badge>
|
||||||
|
)}
|
||||||
|
<span className="text-xs text-muted-foreground">
|
||||||
|
{format(new Date(result.timestamp), 'PPp')}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
<p className="text-sm line-clamp-2">{result.message}</p>
|
||||||
|
<code className="text-xs text-muted-foreground">
|
||||||
|
{result.id.slice(0, 16)}...
|
||||||
|
</code>
|
||||||
|
</div>
|
||||||
|
<ExternalLink className="w-4 h-4 text-muted-foreground flex-shrink-0" />
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
))}
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
<p className="text-center text-muted-foreground py-8">
|
||||||
|
No results found for "{searchTerm}"
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
);
|
||||||
|
}
|
||||||
@@ -49,7 +49,7 @@ export function AdminSidebar() {
|
|||||||
icon: ScrollText,
|
icon: ScrollText,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
title: 'Error Monitoring',
|
title: 'Monitoring & Logs',
|
||||||
url: '/admin/error-monitoring',
|
url: '/admin/error-monitoring',
|
||||||
icon: AlertTriangle,
|
icon: AlertTriangle,
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -52,6 +52,31 @@ export function UppyPhotoSubmissionUpload({
|
|||||||
const { user } = useAuth();
|
const { user } = useAuth();
|
||||||
const { toast } = useToast();
|
const { toast } = useToast();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* ✅ CRITICAL FIX: Cleanup orphaned Cloudflare images
|
||||||
|
* Called when DB transaction fails after successful uploads
|
||||||
|
*/
|
||||||
|
const cleanupOrphanedImages = async (imageIds: string[]) => {
|
||||||
|
if (imageIds.length === 0) return;
|
||||||
|
|
||||||
|
logger.warn('Cleaning up orphaned images', { count: imageIds.length });
|
||||||
|
|
||||||
|
try {
|
||||||
|
await Promise.allSettled(
|
||||||
|
imageIds.map(id =>
|
||||||
|
invokeWithTracking('upload-image', { action: 'delete', imageId: id }, user?.id)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
logger.info('Orphaned images cleaned up', { count: imageIds.length });
|
||||||
|
} catch (error) {
|
||||||
|
// Non-blocking cleanup - log but don't fail
|
||||||
|
logger.error('Failed to cleanup orphaned images', {
|
||||||
|
error: getErrorMessage(error),
|
||||||
|
imageIds
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
const handleFilesSelected = (files: File[]) => {
|
const handleFilesSelected = (files: File[]) => {
|
||||||
// Convert files to photo objects with object URLs for preview
|
// Convert files to photo objects with object URLs for preview
|
||||||
const newPhotos: PhotoWithCaption[] = files.map((file, index) => ({
|
const newPhotos: PhotoWithCaption[] = files.map((file, index) => ({
|
||||||
@@ -424,6 +449,22 @@ export function UppyPhotoSubmissionUpload({
|
|||||||
throw photoSubmissionError || new Error("Failed to create photo submission");
|
throw photoSubmissionError || new Error("Failed to create photo submission");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ✅ CRITICAL FIX: Create submission_items record for moderation queue
|
||||||
|
const { error: submissionItemError } = await supabase
|
||||||
|
.from('submission_items')
|
||||||
|
.insert({
|
||||||
|
submission_id: submissionData.id,
|
||||||
|
item_type: 'photo',
|
||||||
|
action_type: 'create',
|
||||||
|
status: 'pending',
|
||||||
|
order_index: 0,
|
||||||
|
photo_submission_id: photoSubmissionData.id
|
||||||
|
});
|
||||||
|
|
||||||
|
if (submissionItemError) {
|
||||||
|
throw submissionItemError;
|
||||||
|
}
|
||||||
|
|
||||||
// Insert only successful photo items
|
// Insert only successful photo items
|
||||||
const photoItems = successfulPhotos.map((photo, index) => ({
|
const photoItems = successfulPhotos.map((photo, index) => ({
|
||||||
photo_submission_id: photoSubmissionData.id,
|
photo_submission_id: photoSubmissionData.id,
|
||||||
@@ -527,6 +568,13 @@ export function UppyPhotoSubmissionUpload({
|
|||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
const errorMsg = sanitizeErrorMessage(error);
|
const errorMsg = sanitizeErrorMessage(error);
|
||||||
|
|
||||||
|
// ✅ CRITICAL FIX: Cleanup orphaned images on failure
|
||||||
|
if (orphanedCloudflareIds.length > 0) {
|
||||||
|
cleanupOrphanedImages(orphanedCloudflareIds).catch(() => {
|
||||||
|
// Non-blocking - log already handled in cleanupOrphanedImages
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
logger.error('Photo submission failed', {
|
logger.error('Photo submission failed', {
|
||||||
error: errorMsg,
|
error: errorMsg,
|
||||||
photoCount: photos.length,
|
photoCount: photos.length,
|
||||||
|
|||||||
@@ -306,75 +306,6 @@ export function useModerationActions(config: ModerationActionsConfig): Moderatio
|
|||||||
action: 'approved' | 'rejected';
|
action: 'approved' | 'rejected';
|
||||||
moderatorNotes?: string;
|
moderatorNotes?: string;
|
||||||
}) => {
|
}) => {
|
||||||
// Handle photo submissions
|
|
||||||
if (action === 'approved' && item.submission_type === 'photo') {
|
|
||||||
const { data: photoSubmission, error: fetchError } = await supabase
|
|
||||||
.from('photo_submissions')
|
|
||||||
.select(`
|
|
||||||
*,
|
|
||||||
items:photo_submission_items(*),
|
|
||||||
submission:content_submissions!inner(user_id)
|
|
||||||
`)
|
|
||||||
.eq('submission_id', item.id)
|
|
||||||
.single();
|
|
||||||
|
|
||||||
// Add explicit error handling
|
|
||||||
if (fetchError) {
|
|
||||||
throw new Error(`Failed to fetch photo submission: ${fetchError.message}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!photoSubmission) {
|
|
||||||
throw new Error('Photo submission not found');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Type assertion with validation
|
|
||||||
const typedPhotoSubmission = photoSubmission as {
|
|
||||||
id: string;
|
|
||||||
entity_id: string;
|
|
||||||
entity_type: string;
|
|
||||||
items: Array<{
|
|
||||||
id: string;
|
|
||||||
cloudflare_image_id: string;
|
|
||||||
cloudflare_image_url: string;
|
|
||||||
caption?: string;
|
|
||||||
title?: string;
|
|
||||||
date_taken?: string;
|
|
||||||
date_taken_precision?: string;
|
|
||||||
order_index: number;
|
|
||||||
}>;
|
|
||||||
submission: { user_id: string };
|
|
||||||
};
|
|
||||||
|
|
||||||
// Validate required fields
|
|
||||||
if (!typedPhotoSubmission.items || typedPhotoSubmission.items.length === 0) {
|
|
||||||
throw new Error('No photo items found in submission');
|
|
||||||
}
|
|
||||||
|
|
||||||
const { data: existingPhotos } = await supabase
|
|
||||||
.from('photos')
|
|
||||||
.select('id')
|
|
||||||
.eq('submission_id', item.id);
|
|
||||||
|
|
||||||
if (!existingPhotos || existingPhotos.length === 0) {
|
|
||||||
const photoRecords = typedPhotoSubmission.items.map((photoItem) => ({
|
|
||||||
entity_id: typedPhotoSubmission.entity_id,
|
|
||||||
entity_type: typedPhotoSubmission.entity_type,
|
|
||||||
cloudflare_image_id: photoItem.cloudflare_image_id,
|
|
||||||
cloudflare_image_url: photoItem.cloudflare_image_url,
|
|
||||||
title: photoItem.title || null,
|
|
||||||
caption: photoItem.caption || null,
|
|
||||||
date_taken: photoItem.date_taken || null,
|
|
||||||
order_index: photoItem.order_index,
|
|
||||||
submission_id: item.id,
|
|
||||||
submitted_by: typedPhotoSubmission.submission?.user_id,
|
|
||||||
approved_by: user?.id,
|
|
||||||
approved_at: new Date().toISOString(),
|
|
||||||
}));
|
|
||||||
|
|
||||||
await supabase.from('photos').insert(photoRecords);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for submission items
|
// Check for submission items
|
||||||
const { data: submissionItems } = await supabase
|
const { data: submissionItems } = await supabase
|
||||||
.from('submission_items')
|
.from('submission_items')
|
||||||
@@ -443,15 +374,61 @@ export function useModerationActions(config: ModerationActionsConfig): Moderatio
|
|||||||
});
|
});
|
||||||
return;
|
return;
|
||||||
} else if (action === 'rejected') {
|
} else if (action === 'rejected') {
|
||||||
await supabase
|
// Use atomic rejection transaction for submission items
|
||||||
.from('submission_items')
|
const {
|
||||||
.update({
|
data,
|
||||||
status: 'rejected',
|
error,
|
||||||
rejection_reason: moderatorNotes || 'Parent submission rejected',
|
requestId,
|
||||||
updated_at: new Date().toISOString(),
|
attempts,
|
||||||
})
|
cached,
|
||||||
.eq('submission_id', item.id)
|
conflictRetries
|
||||||
.eq('status', 'pending');
|
} = await invokeWithResilience(
|
||||||
|
'process-selective-rejection',
|
||||||
|
{
|
||||||
|
itemIds: submissionItems.map((i) => i.id),
|
||||||
|
submissionId: item.id,
|
||||||
|
rejectionReason: moderatorNotes || 'Parent submission rejected',
|
||||||
|
},
|
||||||
|
'rejection',
|
||||||
|
submissionItems.map((i) => i.id),
|
||||||
|
config.user?.id,
|
||||||
|
3, // Max 3 conflict retries
|
||||||
|
30000 // 30s timeout
|
||||||
|
);
|
||||||
|
|
||||||
|
// Log retry attempts
|
||||||
|
if (attempts && attempts > 1) {
|
||||||
|
logger.log(`Rejection succeeded after ${attempts} network retries`, {
|
||||||
|
submissionId: item.id,
|
||||||
|
requestId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (conflictRetries && conflictRetries > 0) {
|
||||||
|
logger.log(`Resolved 409 conflict after ${conflictRetries} retries`, {
|
||||||
|
submissionId: item.id,
|
||||||
|
requestId,
|
||||||
|
cached: !!cached,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
// Enhance error with context for better UI feedback
|
||||||
|
if (is409Conflict(error)) {
|
||||||
|
throw new Error(
|
||||||
|
'This rejection is being processed by another request. Please wait and try again if it does not complete.'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
toast({
|
||||||
|
title: cached ? 'Cached Result' : 'Submission Rejected',
|
||||||
|
description: cached
|
||||||
|
? `Returned cached result for ${submissionItems.length} item(s)`
|
||||||
|
: `Successfully rejected ${submissionItems.length} item(s)${requestId ? ` (Request: ${requestId.substring(0, 8)})` : ''}`,
|
||||||
|
});
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -6345,16 +6345,52 @@ export type Database = {
|
|||||||
monitor_ban_attempts: { Args: never; Returns: undefined }
|
monitor_ban_attempts: { Args: never; Returns: undefined }
|
||||||
monitor_failed_submissions: { Args: never; Returns: undefined }
|
monitor_failed_submissions: { Args: never; Returns: undefined }
|
||||||
monitor_slow_approvals: { Args: never; Returns: undefined }
|
monitor_slow_approvals: { Args: never; Returns: undefined }
|
||||||
process_approval_transaction: {
|
process_approval_transaction:
|
||||||
Args: {
|
| {
|
||||||
p_item_ids: string[]
|
Args: {
|
||||||
p_moderator_id: string
|
p_item_ids: string[]
|
||||||
p_request_id?: string
|
p_moderator_id: string
|
||||||
p_submission_id: string
|
p_request_id?: string
|
||||||
p_submitter_id: string
|
p_submission_id: string
|
||||||
}
|
p_submitter_id: string
|
||||||
Returns: Json
|
}
|
||||||
}
|
Returns: Json
|
||||||
|
}
|
||||||
|
| {
|
||||||
|
Args: {
|
||||||
|
p_item_ids: string[]
|
||||||
|
p_moderator_id: string
|
||||||
|
p_parent_span_id?: string
|
||||||
|
p_request_id?: string
|
||||||
|
p_submission_id: string
|
||||||
|
p_submitter_id: string
|
||||||
|
p_trace_id?: string
|
||||||
|
}
|
||||||
|
Returns: Json
|
||||||
|
}
|
||||||
|
process_rejection_transaction:
|
||||||
|
| {
|
||||||
|
Args: {
|
||||||
|
p_item_ids: string[]
|
||||||
|
p_moderator_id: string
|
||||||
|
p_rejection_reason: string
|
||||||
|
p_request_id?: string
|
||||||
|
p_submission_id: string
|
||||||
|
}
|
||||||
|
Returns: Json
|
||||||
|
}
|
||||||
|
| {
|
||||||
|
Args: {
|
||||||
|
p_item_ids: string[]
|
||||||
|
p_moderator_id: string
|
||||||
|
p_parent_span_id?: string
|
||||||
|
p_rejection_reason: string
|
||||||
|
p_request_id?: string
|
||||||
|
p_submission_id: string
|
||||||
|
p_trace_id?: string
|
||||||
|
}
|
||||||
|
Returns: Json
|
||||||
|
}
|
||||||
release_expired_locks: { Args: never; Returns: number }
|
release_expired_locks: { Args: never; Returns: number }
|
||||||
release_submission_lock: {
|
release_submission_lock: {
|
||||||
Args: { moderator_id: string; submission_id: string }
|
Args: { moderator_id: string; submission_id: string }
|
||||||
|
|||||||
@@ -33,7 +33,7 @@ export async function invokeWithTracking<T = any>(
|
|||||||
timeout: number = 30000,
|
timeout: number = 30000,
|
||||||
retryOptions?: Partial<RetryOptions>,
|
retryOptions?: Partial<RetryOptions>,
|
||||||
customHeaders?: Record<string, string>
|
customHeaders?: Record<string, string>
|
||||||
): Promise<{ data: T | null; error: any; requestId: string; duration: number; attempts?: number; status?: number }> {
|
): Promise<{ data: T | null; error: any; requestId: string; duration: number; attempts?: number; status?: number; traceId?: string }> {
|
||||||
// Configure retry options with defaults
|
// Configure retry options with defaults
|
||||||
const effectiveRetryOptions: RetryOptions = {
|
const effectiveRetryOptions: RetryOptions = {
|
||||||
maxAttempts: retryOptions?.maxAttempts ?? 3,
|
maxAttempts: retryOptions?.maxAttempts ?? 3,
|
||||||
@@ -75,11 +75,30 @@ export async function invokeWithTracking<T = any>(
|
|||||||
const controller = new AbortController();
|
const controller = new AbortController();
|
||||||
const timeoutId = setTimeout(() => controller.abort(), timeout);
|
const timeoutId = setTimeout(() => controller.abort(), timeout);
|
||||||
|
|
||||||
|
// Generate W3C Trace Context header
|
||||||
|
const effectiveTraceId = context.traceId || crypto.randomUUID();
|
||||||
|
const spanId = crypto.randomUUID();
|
||||||
|
const traceparent = `00-${effectiveTraceId}-${spanId}-01`;
|
||||||
|
|
||||||
|
// Add breadcrumb with trace context
|
||||||
|
breadcrumb.apiCall(
|
||||||
|
`/functions/${functionName}`,
|
||||||
|
'POST',
|
||||||
|
undefined
|
||||||
|
);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const { data, error } = await supabase.functions.invoke<T>(functionName, {
|
const { data, error } = await supabase.functions.invoke<T>(functionName, {
|
||||||
body: { ...payload, clientRequestId: context.requestId },
|
body: {
|
||||||
|
...payload,
|
||||||
|
clientRequestId: context.requestId,
|
||||||
|
traceId: effectiveTraceId,
|
||||||
|
},
|
||||||
signal: controller.signal,
|
signal: controller.signal,
|
||||||
headers: customHeaders,
|
headers: {
|
||||||
|
...customHeaders,
|
||||||
|
'traceparent': traceparent,
|
||||||
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
clearTimeout(timeoutId);
|
clearTimeout(timeoutId);
|
||||||
@@ -103,7 +122,15 @@ export async function invokeWithTracking<T = any>(
|
|||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
return { data: result, error: null, requestId, duration, attempts: attemptCount, status: 200 };
|
return {
|
||||||
|
data: result,
|
||||||
|
error: null,
|
||||||
|
requestId,
|
||||||
|
duration,
|
||||||
|
attempts: attemptCount,
|
||||||
|
status: 200,
|
||||||
|
traceId,
|
||||||
|
};
|
||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
// Handle AbortError specifically
|
// Handle AbortError specifically
|
||||||
if (error instanceof Error && error.name === 'AbortError') {
|
if (error instanceof Error && error.name === 'AbortError') {
|
||||||
@@ -117,20 +144,22 @@ export async function invokeWithTracking<T = any>(
|
|||||||
duration: timeout,
|
duration: timeout,
|
||||||
attempts: attemptCount,
|
attempts: attemptCount,
|
||||||
status: 408,
|
status: 408,
|
||||||
|
traceId: undefined,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const errorMessage = getErrorMessage(error);
|
const errorMessage = getErrorMessage(error);
|
||||||
return {
|
return {
|
||||||
data: null,
|
data: null,
|
||||||
error: { message: errorMessage, status: (error as any)?.status },
|
error: { message: errorMessage, status: (error as any)?.status },
|
||||||
requestId: 'unknown',
|
requestId: 'unknown',
|
||||||
duration: 0,
|
duration: 0,
|
||||||
attempts: attemptCount,
|
attempts: attemptCount,
|
||||||
status: (error as any)?.status,
|
status: (error as any)?.status,
|
||||||
};
|
traceId: undefined,
|
||||||
|
};
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Invoke multiple edge functions in parallel with batch tracking
|
* Invoke multiple edge functions in parallel with batch tracking
|
||||||
|
|||||||
@@ -57,126 +57,6 @@ export interface ModerationActionResult {
|
|||||||
shouldRemoveFromQueue: boolean;
|
shouldRemoveFromQueue: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Configuration for photo approval
|
|
||||||
*/
|
|
||||||
interface PhotoApprovalConfig {
|
|
||||||
submissionId: string;
|
|
||||||
moderatorId: string;
|
|
||||||
moderatorNotes?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Approve a photo submission
|
|
||||||
*
|
|
||||||
* Creates photo records in the database and updates submission status.
|
|
||||||
* Handles both new approvals and re-approvals (where photos already exist).
|
|
||||||
*
|
|
||||||
* @param supabase - Supabase client
|
|
||||||
* @param config - Photo approval configuration
|
|
||||||
* @returns Action result with success status and message
|
|
||||||
*/
|
|
||||||
export async function approvePhotoSubmission(
|
|
||||||
supabase: SupabaseClient,
|
|
||||||
config: PhotoApprovalConfig
|
|
||||||
): Promise<ModerationActionResult> {
|
|
||||||
try {
|
|
||||||
// Fetch photo submission from relational tables
|
|
||||||
const { data: photoSubmission, error: fetchError } = await supabase
|
|
||||||
.from('photo_submissions')
|
|
||||||
.select(`
|
|
||||||
*,
|
|
||||||
items:photo_submission_items(*),
|
|
||||||
submission:content_submissions!inner(user_id, status)
|
|
||||||
`)
|
|
||||||
.eq('submission_id', config.submissionId)
|
|
||||||
.single();
|
|
||||||
|
|
||||||
if (fetchError || !photoSubmission) {
|
|
||||||
throw new Error('Failed to fetch photo submission data');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!photoSubmission.items || photoSubmission.items.length === 0) {
|
|
||||||
throw new Error('No photos found in submission');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if photos already exist for this submission (re-approval case)
|
|
||||||
const { data: existingPhotos } = await supabase
|
|
||||||
.from('photos')
|
|
||||||
.select('id')
|
|
||||||
.eq('submission_id', config.submissionId);
|
|
||||||
|
|
||||||
if (!existingPhotos || existingPhotos.length === 0) {
|
|
||||||
// Create new photo records from photo_submission_items
|
|
||||||
const photoRecords = photoSubmission.items.map((item: any) => ({
|
|
||||||
entity_id: photoSubmission.entity_id,
|
|
||||||
entity_type: photoSubmission.entity_type,
|
|
||||||
cloudflare_image_id: item.cloudflare_image_id,
|
|
||||||
cloudflare_image_url: item.cloudflare_image_url,
|
|
||||||
title: item.title || null,
|
|
||||||
caption: item.caption || null,
|
|
||||||
date_taken: item.date_taken || null,
|
|
||||||
order_index: item.order_index,
|
|
||||||
submission_id: photoSubmission.submission_id,
|
|
||||||
submitted_by: photoSubmission.submission?.user_id,
|
|
||||||
approved_by: config.moderatorId,
|
|
||||||
approved_at: new Date().toISOString(),
|
|
||||||
}));
|
|
||||||
|
|
||||||
const { error: insertError } = await supabase
|
|
||||||
.from('photos')
|
|
||||||
.insert(photoRecords);
|
|
||||||
|
|
||||||
if (insertError) {
|
|
||||||
throw insertError;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update submission status
|
|
||||||
const { error: updateError } = await supabase
|
|
||||||
.from('content_submissions')
|
|
||||||
.update({
|
|
||||||
status: 'approved' as const,
|
|
||||||
reviewer_id: config.moderatorId,
|
|
||||||
reviewed_at: new Date().toISOString(),
|
|
||||||
reviewer_notes: config.moderatorNotes,
|
|
||||||
})
|
|
||||||
.eq('id', config.submissionId);
|
|
||||||
|
|
||||||
if (updateError) {
|
|
||||||
throw updateError;
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
message: `Successfully approved and published ${photoSubmission.items.length} photo(s)`,
|
|
||||||
shouldRemoveFromQueue: true,
|
|
||||||
};
|
|
||||||
} catch (error: unknown) {
|
|
||||||
handleError(error, {
|
|
||||||
action: 'Approve Photo Submission',
|
|
||||||
userId: config.moderatorId,
|
|
||||||
metadata: { submissionId: config.submissionId }
|
|
||||||
});
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
message: 'Failed to approve photo submission',
|
|
||||||
error: error instanceof Error ? error : new Error(getErrorMessage(error)),
|
|
||||||
shouldRemoveFromQueue: false,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Approve a submission with submission_items
|
|
||||||
*
|
|
||||||
* Uses the edge function to process all pending submission items.
|
|
||||||
*
|
|
||||||
* @param supabase - Supabase client
|
|
||||||
* @param submissionId - Submission ID
|
|
||||||
* @param itemIds - Array of item IDs to approve
|
|
||||||
* @returns Action result
|
|
||||||
*/
|
|
||||||
/**
|
/**
|
||||||
* Approve submission items using atomic transaction RPC.
|
* Approve submission items using atomic transaction RPC.
|
||||||
*
|
*
|
||||||
@@ -238,194 +118,6 @@ export async function approveSubmissionItems(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Reject a submission with submission_items
|
|
||||||
*
|
|
||||||
* Cascades rejection to all pending items.
|
|
||||||
*
|
|
||||||
* @param supabase - Supabase client
|
|
||||||
* @param submissionId - Submission ID
|
|
||||||
* @param rejectionReason - Reason for rejection
|
|
||||||
* @returns Action result
|
|
||||||
*/
|
|
||||||
export async function rejectSubmissionItems(
|
|
||||||
supabase: SupabaseClient,
|
|
||||||
submissionId: string,
|
|
||||||
rejectionReason?: string
|
|
||||||
): Promise<ModerationActionResult> {
|
|
||||||
try {
|
|
||||||
const { error: rejectError } = await supabase
|
|
||||||
.from('submission_items')
|
|
||||||
.update({
|
|
||||||
status: 'rejected' as const,
|
|
||||||
rejection_reason: rejectionReason || 'Parent submission rejected',
|
|
||||||
updated_at: new Date().toISOString(),
|
|
||||||
})
|
|
||||||
.eq('submission_id', submissionId)
|
|
||||||
.eq('status', 'pending');
|
|
||||||
|
|
||||||
if (rejectError) {
|
|
||||||
handleError(rejectError, {
|
|
||||||
action: 'Reject Submission Items (Cascade)',
|
|
||||||
metadata: { submissionId }
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
message: 'Submission items rejected',
|
|
||||||
shouldRemoveFromQueue: false, // Parent rejection will handle removal
|
|
||||||
};
|
|
||||||
} catch (error: unknown) {
|
|
||||||
handleError(error, {
|
|
||||||
action: 'Reject Submission Items',
|
|
||||||
metadata: { submissionId }
|
|
||||||
});
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
message: 'Failed to reject submission items',
|
|
||||||
error: error instanceof Error ? error : new Error(getErrorMessage(error)),
|
|
||||||
shouldRemoveFromQueue: false,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Configuration for standard moderation actions
|
|
||||||
*/
|
|
||||||
export interface ModerationConfig {
|
|
||||||
item: ModerationItem;
|
|
||||||
action: 'approved' | 'rejected';
|
|
||||||
moderatorId: string;
|
|
||||||
moderatorNotes?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Perform a standard moderation action (approve/reject)
|
|
||||||
*
|
|
||||||
* Updates the submission or review status in the database.
|
|
||||||
* Handles both content_submissions and reviews.
|
|
||||||
*
|
|
||||||
* @param supabase - Supabase client
|
|
||||||
* @param config - Moderation configuration
|
|
||||||
* @returns Action result
|
|
||||||
*/
|
|
||||||
export async function performModerationAction(
|
|
||||||
supabase: SupabaseClient,
|
|
||||||
config: ModerationConfig
|
|
||||||
): Promise<ModerationActionResult> {
|
|
||||||
const { item, action, moderatorId, moderatorNotes } = config;
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Handle photo submissions specially
|
|
||||||
if (
|
|
||||||
action === 'approved' &&
|
|
||||||
item.type === 'content_submission' &&
|
|
||||||
item.submission_type === 'photo'
|
|
||||||
) {
|
|
||||||
return await approvePhotoSubmission(supabase, {
|
|
||||||
submissionId: item.id,
|
|
||||||
moderatorId,
|
|
||||||
moderatorNotes,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if this submission has submission_items
|
|
||||||
if (item.type === 'content_submission') {
|
|
||||||
const { data: submissionItems, error: itemsError } = await supabase
|
|
||||||
.from('submission_items')
|
|
||||||
.select('id, status')
|
|
||||||
.eq('submission_id', item.id)
|
|
||||||
.in('status', ['pending', 'rejected']);
|
|
||||||
|
|
||||||
if (!itemsError && submissionItems && submissionItems.length > 0) {
|
|
||||||
if (action === 'approved') {
|
|
||||||
return await approveSubmissionItems(
|
|
||||||
supabase,
|
|
||||||
item.id,
|
|
||||||
submissionItems.map(i => i.id)
|
|
||||||
);
|
|
||||||
} else if (action === 'rejected') {
|
|
||||||
await rejectSubmissionItems(supabase, item.id, moderatorNotes);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Standard moderation flow - Build update object with type-appropriate fields
|
|
||||||
let error: any = null;
|
|
||||||
let data: any = null;
|
|
||||||
|
|
||||||
// Use type-safe table queries based on item type
|
|
||||||
if (item.type === 'review') {
|
|
||||||
const reviewUpdate: {
|
|
||||||
moderation_status: 'approved' | 'rejected' | 'pending';
|
|
||||||
moderated_at: string;
|
|
||||||
moderated_by: string;
|
|
||||||
reviewer_notes?: string;
|
|
||||||
} = {
|
|
||||||
moderation_status: action,
|
|
||||||
moderated_at: new Date().toISOString(),
|
|
||||||
moderated_by: moderatorId,
|
|
||||||
...(moderatorNotes && { reviewer_notes: moderatorNotes }),
|
|
||||||
};
|
|
||||||
|
|
||||||
const result = await createTableQuery('reviews')
|
|
||||||
.update(reviewUpdate)
|
|
||||||
.eq('id', item.id)
|
|
||||||
.select();
|
|
||||||
error = result.error;
|
|
||||||
data = result.data;
|
|
||||||
} else {
|
|
||||||
const submissionUpdate: {
|
|
||||||
status: 'approved' | 'rejected' | 'pending';
|
|
||||||
reviewed_at: string;
|
|
||||||
reviewer_id: string;
|
|
||||||
reviewer_notes?: string;
|
|
||||||
} = {
|
|
||||||
status: action,
|
|
||||||
reviewed_at: new Date().toISOString(),
|
|
||||||
reviewer_id: moderatorId,
|
|
||||||
...(moderatorNotes && { reviewer_notes: moderatorNotes }),
|
|
||||||
};
|
|
||||||
|
|
||||||
const result = await createTableQuery('content_submissions')
|
|
||||||
.update(submissionUpdate)
|
|
||||||
.eq('id', item.id)
|
|
||||||
.select();
|
|
||||||
error = result.error;
|
|
||||||
data = result.data;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (error) {
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if the update actually affected any rows
|
|
||||||
if (!data || data.length === 0) {
|
|
||||||
throw new Error(
|
|
||||||
'Failed to update item - no rows affected. You might not have permission to moderate this content.'
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
message: `Content ${action}`,
|
|
||||||
shouldRemoveFromQueue: action === 'approved' || action === 'rejected',
|
|
||||||
};
|
|
||||||
} catch (error: unknown) {
|
|
||||||
handleError(error, {
|
|
||||||
action: `${config.action === 'approved' ? 'Approve' : 'Reject'} Content`,
|
|
||||||
userId: config.moderatorId,
|
|
||||||
metadata: { itemType: item.type, itemId: item.id }
|
|
||||||
});
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
message: `Failed to ${config.action} content`,
|
|
||||||
error: error instanceof Error ? error : new Error(getErrorMessage(error)),
|
|
||||||
shouldRemoveFromQueue: false,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Configuration for submission deletion
|
* Configuration for submission deletion
|
||||||
|
|||||||
@@ -28,16 +28,12 @@ export type { ResolvedEntityNames } from './entities';
|
|||||||
|
|
||||||
// Moderation actions
|
// Moderation actions
|
||||||
export {
|
export {
|
||||||
approvePhotoSubmission,
|
|
||||||
approveSubmissionItems,
|
approveSubmissionItems,
|
||||||
rejectSubmissionItems,
|
|
||||||
performModerationAction,
|
|
||||||
deleteSubmission,
|
deleteSubmission,
|
||||||
} from './actions';
|
} from './actions';
|
||||||
|
|
||||||
export type {
|
export type {
|
||||||
ModerationActionResult,
|
ModerationActionResult,
|
||||||
ModerationConfig,
|
|
||||||
DeleteSubmissionConfig,
|
DeleteSubmissionConfig,
|
||||||
} from './actions';
|
} from './actions';
|
||||||
|
|
||||||
|
|||||||
150
src/lib/spanVisualizer.ts
Normal file
150
src/lib/spanVisualizer.ts
Normal file
@@ -0,0 +1,150 @@
|
|||||||
|
/**
|
||||||
|
* Span Visualizer
|
||||||
|
* Reconstructs span trees from logs for debugging distributed traces
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type { Span } from '@/types/tracing';
|
||||||
|
|
||||||
|
export interface SpanTree {
|
||||||
|
span: Span;
|
||||||
|
children: SpanTree[];
|
||||||
|
totalDuration: number;
|
||||||
|
selfDuration: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build span tree from flat span logs
|
||||||
|
*/
|
||||||
|
export function buildSpanTree(spans: Span[]): SpanTree | null {
|
||||||
|
const spanMap = new Map<string, Span>();
|
||||||
|
const childrenMap = new Map<string, Span[]>();
|
||||||
|
|
||||||
|
// Index spans
|
||||||
|
for (const span of spans) {
|
||||||
|
spanMap.set(span.spanId, span);
|
||||||
|
|
||||||
|
if (span.parentSpanId) {
|
||||||
|
if (!childrenMap.has(span.parentSpanId)) {
|
||||||
|
childrenMap.set(span.parentSpanId, []);
|
||||||
|
}
|
||||||
|
childrenMap.get(span.parentSpanId)!.push(span);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find root span
|
||||||
|
const rootSpan = spans.find(s => !s.parentSpanId);
|
||||||
|
if (!rootSpan) return null;
|
||||||
|
|
||||||
|
// Build tree recursively
|
||||||
|
function buildTree(span: Span): SpanTree {
|
||||||
|
const children = childrenMap.get(span.spanId) || [];
|
||||||
|
const childTrees = children.map(buildTree);
|
||||||
|
|
||||||
|
const totalDuration = span.duration || 0;
|
||||||
|
const childrenDuration = childTrees.reduce((sum, child) => sum + child.totalDuration, 0);
|
||||||
|
const selfDuration = totalDuration - childrenDuration;
|
||||||
|
|
||||||
|
return {
|
||||||
|
span,
|
||||||
|
children: childTrees,
|
||||||
|
totalDuration,
|
||||||
|
selfDuration,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return buildTree(rootSpan);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format span tree as ASCII art
|
||||||
|
*/
|
||||||
|
export function formatSpanTree(tree: SpanTree, indent: number = 0): string {
|
||||||
|
const prefix = ' '.repeat(indent);
|
||||||
|
const status = tree.span.status === 'error' ? '❌' : tree.span.status === 'ok' ? '✅' : '⏳';
|
||||||
|
const line = `${prefix}${status} ${tree.span.name} (${tree.span.duration}ms / self: ${tree.selfDuration}ms)`;
|
||||||
|
|
||||||
|
const childLines = tree.children.map(child => formatSpanTree(child, indent + 1));
|
||||||
|
|
||||||
|
return [line, ...childLines].join('\n');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate span statistics for a tree
|
||||||
|
*/
|
||||||
|
export function calculateSpanStats(tree: SpanTree): {
|
||||||
|
totalSpans: number;
|
||||||
|
errorCount: number;
|
||||||
|
maxDepth: number;
|
||||||
|
totalDuration: number;
|
||||||
|
criticalPath: string[];
|
||||||
|
} {
|
||||||
|
let totalSpans = 0;
|
||||||
|
let errorCount = 0;
|
||||||
|
let maxDepth = 0;
|
||||||
|
|
||||||
|
function traverse(node: SpanTree, depth: number) {
|
||||||
|
totalSpans++;
|
||||||
|
if (node.span.status === 'error') errorCount++;
|
||||||
|
maxDepth = Math.max(maxDepth, depth);
|
||||||
|
|
||||||
|
node.children.forEach(child => traverse(child, depth + 1));
|
||||||
|
}
|
||||||
|
|
||||||
|
traverse(tree, 0);
|
||||||
|
|
||||||
|
// Find critical path (longest duration path)
|
||||||
|
function findCriticalPath(node: SpanTree): string[] {
|
||||||
|
if (node.children.length === 0) {
|
||||||
|
return [node.span.name];
|
||||||
|
}
|
||||||
|
|
||||||
|
const longestChild = node.children.reduce((longest, child) =>
|
||||||
|
child.totalDuration > longest.totalDuration ? child : longest
|
||||||
|
);
|
||||||
|
|
||||||
|
return [node.span.name, ...findCriticalPath(longestChild)];
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
totalSpans,
|
||||||
|
errorCount,
|
||||||
|
maxDepth,
|
||||||
|
totalDuration: tree.totalDuration,
|
||||||
|
criticalPath: findCriticalPath(tree),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract all events from a span tree
|
||||||
|
*/
|
||||||
|
export function extractAllEvents(tree: SpanTree): Array<{
|
||||||
|
spanName: string;
|
||||||
|
eventName: string;
|
||||||
|
timestamp: number;
|
||||||
|
attributes?: Record<string, unknown>;
|
||||||
|
}> {
|
||||||
|
const events: Array<{
|
||||||
|
spanName: string;
|
||||||
|
eventName: string;
|
||||||
|
timestamp: number;
|
||||||
|
attributes?: Record<string, unknown>;
|
||||||
|
}> = [];
|
||||||
|
|
||||||
|
function traverse(node: SpanTree) {
|
||||||
|
node.span.events.forEach(event => {
|
||||||
|
events.push({
|
||||||
|
spanName: node.span.name,
|
||||||
|
eventName: event.name,
|
||||||
|
timestamp: event.timestamp,
|
||||||
|
attributes: event.attributes,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
node.children.forEach(child => traverse(child));
|
||||||
|
}
|
||||||
|
|
||||||
|
traverse(tree);
|
||||||
|
|
||||||
|
// Sort by timestamp
|
||||||
|
return events.sort((a, b) => a.timestamp - b.timestamp);
|
||||||
|
}
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
import { supabase } from '@/lib/supabaseClient';
|
import { supabase } from '@/lib/supabaseClient';
|
||||||
import { handleError, handleNonCriticalError, getErrorMessage } from './errorHandler';
|
import { handleError, handleNonCriticalError, getErrorMessage } from './errorHandler';
|
||||||
import { extractCloudflareImageId } from './cloudflareImageUtils';
|
import { extractCloudflareImageId } from './cloudflareImageUtils';
|
||||||
|
import { invokeWithTracking } from './edgeFunctionTracking';
|
||||||
|
|
||||||
// Core submission item interface with dependencies
|
// Core submission item interface with dependencies
|
||||||
// NOTE: item_data and original_data use `unknown` because they contain dynamic structures
|
// NOTE: item_data and original_data use `unknown` because they contain dynamic structures
|
||||||
@@ -1367,32 +1368,24 @@ export async function rejectSubmissionItems(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update all items to rejected status
|
|
||||||
const updates = Array.from(itemsToReject).map(async (itemId) => {
|
|
||||||
const { error } = await supabase
|
|
||||||
.from('submission_items')
|
|
||||||
.update({
|
|
||||||
status: 'rejected' as const,
|
|
||||||
rejection_reason: reason,
|
|
||||||
updated_at: new Date().toISOString(),
|
|
||||||
})
|
|
||||||
.eq('id', itemId);
|
|
||||||
|
|
||||||
if (error) {
|
|
||||||
handleNonCriticalError(error, {
|
|
||||||
action: 'Reject Submission Item',
|
|
||||||
metadata: { itemId }
|
|
||||||
});
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
await Promise.all(updates);
|
|
||||||
|
|
||||||
// Update parent submission status
|
|
||||||
const submissionId = items[0]?.submission_id;
|
const submissionId = items[0]?.submission_id;
|
||||||
if (submissionId) {
|
if (!submissionId) {
|
||||||
await updateSubmissionStatusAfterRejection(submissionId);
|
throw new Error('Cannot reject items: missing submission ID');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use atomic edge function for rejection
|
||||||
|
const { data, error } = await invokeWithTracking(
|
||||||
|
'process-selective-rejection',
|
||||||
|
{
|
||||||
|
itemIds: Array.from(itemsToReject),
|
||||||
|
submissionId,
|
||||||
|
rejectionReason: reason,
|
||||||
|
},
|
||||||
|
userId
|
||||||
|
);
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
throw new Error(`Failed to reject items: ${error.message}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -13,6 +13,10 @@ import { ErrorDetailsModal } from '@/components/admin/ErrorDetailsModal';
|
|||||||
import { ApprovalFailureModal } from '@/components/admin/ApprovalFailureModal';
|
import { ApprovalFailureModal } from '@/components/admin/ApprovalFailureModal';
|
||||||
import { ErrorAnalytics } from '@/components/admin/ErrorAnalytics';
|
import { ErrorAnalytics } from '@/components/admin/ErrorAnalytics';
|
||||||
import { PipelineHealthAlerts } from '@/components/admin/PipelineHealthAlerts';
|
import { PipelineHealthAlerts } from '@/components/admin/PipelineHealthAlerts';
|
||||||
|
import { EdgeFunctionLogs } from '@/components/admin/EdgeFunctionLogs';
|
||||||
|
import { DatabaseLogs } from '@/components/admin/DatabaseLogs';
|
||||||
|
import { UnifiedLogSearch } from '@/components/admin/UnifiedLogSearch';
|
||||||
|
import TraceViewer from './TraceViewer';
|
||||||
import { format } from 'date-fns';
|
import { format } from 'date-fns';
|
||||||
|
|
||||||
// Helper to calculate date threshold for filtering
|
// Helper to calculate date threshold for filtering
|
||||||
@@ -59,6 +63,14 @@ export default function ErrorMonitoring() {
|
|||||||
const [searchTerm, setSearchTerm] = useState('');
|
const [searchTerm, setSearchTerm] = useState('');
|
||||||
const [errorTypeFilter, setErrorTypeFilter] = useState<string>('all');
|
const [errorTypeFilter, setErrorTypeFilter] = useState<string>('all');
|
||||||
const [dateRange, setDateRange] = useState<'1h' | '24h' | '7d' | '30d'>('24h');
|
const [dateRange, setDateRange] = useState<'1h' | '24h' | '7d' | '30d'>('24h');
|
||||||
|
const [activeTab, setActiveTab] = useState('errors');
|
||||||
|
|
||||||
|
const handleNavigate = (tab: string, filters: Record<string, string>) => {
|
||||||
|
setActiveTab(tab);
|
||||||
|
if (filters.requestId) {
|
||||||
|
setSearchTerm(filters.requestId);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
// Fetch recent errors
|
// Fetch recent errors
|
||||||
const { data: errors, isLoading, refetch, isFetching } = useQuery({
|
const { data: errors, isLoading, refetch, isFetching } = useQuery({
|
||||||
@@ -170,8 +182,8 @@ export default function ErrorMonitoring() {
|
|||||||
<div className="space-y-6">
|
<div className="space-y-6">
|
||||||
<div className="flex justify-between items-center">
|
<div className="flex justify-between items-center">
|
||||||
<div>
|
<div>
|
||||||
<h1 className="text-3xl font-bold tracking-tight">Error Monitoring</h1>
|
<h1 className="text-3xl font-bold tracking-tight">Monitoring & Logs</h1>
|
||||||
<p className="text-muted-foreground">Track and analyze application errors</p>
|
<p className="text-muted-foreground">Unified monitoring hub for errors, logs, and distributed traces</p>
|
||||||
</div>
|
</div>
|
||||||
<RefreshButton
|
<RefreshButton
|
||||||
onRefresh={async () => { await refetch(); }}
|
onRefresh={async () => { await refetch(); }}
|
||||||
@@ -181,17 +193,23 @@ export default function ErrorMonitoring() {
|
|||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
{/* Unified Log Search */}
|
||||||
|
<UnifiedLogSearch onNavigate={handleNavigate} />
|
||||||
|
|
||||||
{/* Pipeline Health Alerts */}
|
{/* Pipeline Health Alerts */}
|
||||||
<PipelineHealthAlerts />
|
<PipelineHealthAlerts />
|
||||||
|
|
||||||
{/* Analytics Section */}
|
{/* Analytics Section */}
|
||||||
<ErrorAnalytics errorSummary={errorSummary} approvalMetrics={approvalMetrics} />
|
<ErrorAnalytics errorSummary={errorSummary} approvalMetrics={approvalMetrics} />
|
||||||
|
|
||||||
{/* Tabs for Errors and Approval Failures */}
|
{/* Tabs for All Log Types */}
|
||||||
<Tabs defaultValue="errors" className="w-full">
|
<Tabs value={activeTab} onValueChange={setActiveTab} className="w-full">
|
||||||
<TabsList>
|
<TabsList className="grid w-full grid-cols-5">
|
||||||
<TabsTrigger value="errors">Application Errors</TabsTrigger>
|
<TabsTrigger value="errors">Application Errors</TabsTrigger>
|
||||||
<TabsTrigger value="approvals">Approval Failures</TabsTrigger>
|
<TabsTrigger value="approvals">Approval Failures</TabsTrigger>
|
||||||
|
<TabsTrigger value="edge-functions">Edge Functions</TabsTrigger>
|
||||||
|
<TabsTrigger value="database">Database Logs</TabsTrigger>
|
||||||
|
<TabsTrigger value="traces">Distributed Traces</TabsTrigger>
|
||||||
</TabsList>
|
</TabsList>
|
||||||
|
|
||||||
<TabsContent value="errors" className="space-y-4">
|
<TabsContent value="errors" className="space-y-4">
|
||||||
@@ -350,6 +368,18 @@ export default function ErrorMonitoring() {
|
|||||||
</CardContent>
|
</CardContent>
|
||||||
</Card>
|
</Card>
|
||||||
</TabsContent>
|
</TabsContent>
|
||||||
|
|
||||||
|
<TabsContent value="edge-functions">
|
||||||
|
<EdgeFunctionLogs />
|
||||||
|
</TabsContent>
|
||||||
|
|
||||||
|
<TabsContent value="database">
|
||||||
|
<DatabaseLogs />
|
||||||
|
</TabsContent>
|
||||||
|
|
||||||
|
<TabsContent value="traces">
|
||||||
|
<TraceViewer />
|
||||||
|
</TabsContent>
|
||||||
</Tabs>
|
</Tabs>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|||||||
255
src/pages/admin/TraceViewer.tsx
Normal file
255
src/pages/admin/TraceViewer.tsx
Normal file
@@ -0,0 +1,255 @@
|
|||||||
|
import { useState } from 'react';
|
||||||
|
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from '@/components/ui/card';
|
||||||
|
import { Input } from '@/components/ui/input';
|
||||||
|
import { Button } from '@/components/ui/button';
|
||||||
|
import { Accordion, AccordionContent, AccordionItem, AccordionTrigger } from '@/components/ui/accordion';
|
||||||
|
import { Alert, AlertDescription } from '@/components/ui/alert';
|
||||||
|
import { Badge } from '@/components/ui/badge';
|
||||||
|
import { buildSpanTree, formatSpanTree, calculateSpanStats, extractAllEvents } from '@/lib/spanVisualizer';
|
||||||
|
import type { Span } from '@/types/tracing';
|
||||||
|
import type { SpanTree } from '@/lib/spanVisualizer';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Admin Trace Viewer
|
||||||
|
*
|
||||||
|
* Visual tool for debugging distributed traces across the approval pipeline.
|
||||||
|
* Reconstructs and displays span hierarchies from edge function logs.
|
||||||
|
*/
|
||||||
|
export default function TraceViewer() {
|
||||||
|
const [traceId, setTraceId] = useState('');
|
||||||
|
const [spans, setSpans] = useState<Span[]>([]);
|
||||||
|
const [tree, setTree] = useState<SpanTree | null>(null);
|
||||||
|
const [isLoading, setIsLoading] = useState(false);
|
||||||
|
const [error, setError] = useState<string | null>(null);
|
||||||
|
|
||||||
|
const loadTrace = async () => {
|
||||||
|
if (!traceId.trim()) {
|
||||||
|
setError('Please enter a trace ID');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
setIsLoading(true);
|
||||||
|
setError(null);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// TODO: Replace with actual edge function log query
|
||||||
|
// This would need an edge function that queries Supabase logs
|
||||||
|
// For now, using mock data structure
|
||||||
|
const mockSpans: Span[] = [
|
||||||
|
{
|
||||||
|
spanId: 'root-1',
|
||||||
|
traceId,
|
||||||
|
name: 'process-selective-approval',
|
||||||
|
kind: 'SERVER',
|
||||||
|
startTime: Date.now() - 5000,
|
||||||
|
endTime: Date.now(),
|
||||||
|
duration: 5000,
|
||||||
|
attributes: {
|
||||||
|
'http.method': 'POST',
|
||||||
|
'user.id': 'user-123',
|
||||||
|
'submission.id': 'sub-456',
|
||||||
|
},
|
||||||
|
events: [
|
||||||
|
{ timestamp: Date.now() - 4900, name: 'authentication_start' },
|
||||||
|
{ timestamp: Date.now() - 4800, name: 'authentication_success' },
|
||||||
|
{ timestamp: Date.now() - 4700, name: 'validation_complete' },
|
||||||
|
],
|
||||||
|
status: 'ok',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
spanId: 'child-1',
|
||||||
|
traceId,
|
||||||
|
parentSpanId: 'root-1',
|
||||||
|
name: 'process_approval_transaction',
|
||||||
|
kind: 'DATABASE',
|
||||||
|
startTime: Date.now() - 4500,
|
||||||
|
endTime: Date.now() - 500,
|
||||||
|
duration: 4000,
|
||||||
|
attributes: {
|
||||||
|
'db.operation': 'rpc',
|
||||||
|
'submission.id': 'sub-456',
|
||||||
|
},
|
||||||
|
events: [
|
||||||
|
{ timestamp: Date.now() - 4400, name: 'rpc_call_start' },
|
||||||
|
{ timestamp: Date.now() - 600, name: 'rpc_call_success' },
|
||||||
|
],
|
||||||
|
status: 'ok',
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
setSpans(mockSpans);
|
||||||
|
const builtTree = buildSpanTree(mockSpans);
|
||||||
|
setTree(builtTree);
|
||||||
|
|
||||||
|
if (!builtTree) {
|
||||||
|
setError('No root span found for this trace ID');
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
setError(err instanceof Error ? err.message : 'Failed to load trace');
|
||||||
|
} finally {
|
||||||
|
setIsLoading(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const stats = tree ? calculateSpanStats(tree) : null;
|
||||||
|
const events = tree ? extractAllEvents(tree) : [];
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="container mx-auto p-6 space-y-6">
|
||||||
|
<div>
|
||||||
|
<h1 className="text-3xl font-bold mb-2">Distributed Trace Viewer</h1>
|
||||||
|
<p className="text-muted-foreground">
|
||||||
|
Debug moderation pipeline execution by visualizing span hierarchies
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>Load Trace</CardTitle>
|
||||||
|
<CardDescription>
|
||||||
|
Enter a trace ID from edge function logs to visualize the execution tree
|
||||||
|
</CardDescription>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<div className="flex gap-2">
|
||||||
|
<Input
|
||||||
|
value={traceId}
|
||||||
|
onChange={(e) => setTraceId(e.target.value)}
|
||||||
|
placeholder="Enter trace ID (e.g., abc-123-def-456)"
|
||||||
|
className="flex-1"
|
||||||
|
/>
|
||||||
|
<Button onClick={loadTrace} disabled={isLoading}>
|
||||||
|
{isLoading ? 'Loading...' : 'Load Trace'}
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{error && (
|
||||||
|
<Alert variant="destructive" className="mt-4">
|
||||||
|
<AlertDescription>{error}</AlertDescription>
|
||||||
|
</Alert>
|
||||||
|
)}
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{tree && stats && (
|
||||||
|
<>
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>Trace Statistics</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<div className="grid grid-cols-2 md:grid-cols-4 gap-4">
|
||||||
|
<div>
|
||||||
|
<div className="text-sm text-muted-foreground">Total Duration</div>
|
||||||
|
<div className="text-2xl font-bold">{stats.totalDuration}ms</div>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<div className="text-sm text-muted-foreground">Total Spans</div>
|
||||||
|
<div className="text-2xl font-bold">{stats.totalSpans}</div>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<div className="text-sm text-muted-foreground">Max Depth</div>
|
||||||
|
<div className="text-2xl font-bold">{stats.maxDepth}</div>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<div className="text-sm text-muted-foreground">Errors</div>
|
||||||
|
<div className="text-2xl font-bold text-destructive">{stats.errorCount}</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="mt-4">
|
||||||
|
<div className="text-sm text-muted-foreground mb-2">Critical Path (Longest Duration):</div>
|
||||||
|
<div className="flex gap-2 flex-wrap">
|
||||||
|
{stats.criticalPath.map((spanName, i) => (
|
||||||
|
<Badge key={i} variant="secondary">
|
||||||
|
{spanName}
|
||||||
|
</Badge>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>Span Tree</CardTitle>
|
||||||
|
<CardDescription>
|
||||||
|
Hierarchical view of span execution with timing breakdown
|
||||||
|
</CardDescription>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<pre className="bg-muted p-4 rounded-lg overflow-x-auto text-sm">
|
||||||
|
{formatSpanTree(tree)}
|
||||||
|
</pre>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>Events Timeline</CardTitle>
|
||||||
|
<CardDescription>
|
||||||
|
Chronological list of all events across all spans
|
||||||
|
</CardDescription>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<div className="space-y-2">
|
||||||
|
{events.map((event, i) => (
|
||||||
|
<div key={i} className="flex gap-2 text-sm border-l-2 border-primary pl-4 py-1">
|
||||||
|
<Badge variant="outline">{event.spanName}</Badge>
|
||||||
|
<span className="text-muted-foreground">→</span>
|
||||||
|
<span className="font-medium">{event.eventName}</span>
|
||||||
|
<span className="text-muted-foreground ml-auto">
|
||||||
|
{new Date(event.timestamp).toISOString()}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>Span Details</CardTitle>
|
||||||
|
<CardDescription>
|
||||||
|
Detailed breakdown of each span with attributes and events
|
||||||
|
</CardDescription>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<Accordion type="single" collapsible className="w-full">
|
||||||
|
{spans.map((span) => (
|
||||||
|
<AccordionItem key={span.spanId} value={span.spanId}>
|
||||||
|
<AccordionTrigger>
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<Badge variant={span.status === 'error' ? 'destructive' : 'default'}>
|
||||||
|
{span.kind}
|
||||||
|
</Badge>
|
||||||
|
<span>{span.name}</span>
|
||||||
|
<span className="text-muted-foreground ml-2">
|
||||||
|
({span.duration}ms)
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
</AccordionTrigger>
|
||||||
|
<AccordionContent>
|
||||||
|
<pre className="bg-muted p-4 rounded-lg overflow-x-auto text-xs">
|
||||||
|
{JSON.stringify(span, null, 2)}
|
||||||
|
</pre>
|
||||||
|
</AccordionContent>
|
||||||
|
</AccordionItem>
|
||||||
|
))}
|
||||||
|
</Accordion>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{!tree && !isLoading && !error && (
|
||||||
|
<Alert>
|
||||||
|
<AlertDescription>
|
||||||
|
Enter a trace ID to visualize the distributed trace. You can find trace IDs in edge function logs
|
||||||
|
under the "Span completed" messages.
|
||||||
|
</AlertDescription>
|
||||||
|
</Alert>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
35
src/types/tracing.ts
Normal file
35
src/types/tracing.ts
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
/**
|
||||||
|
* Distributed Tracing Types
|
||||||
|
* Mirrors the types defined in edge function logger
|
||||||
|
*/
|
||||||
|
|
||||||
|
export interface Span {
|
||||||
|
spanId: string;
|
||||||
|
traceId: string;
|
||||||
|
parentSpanId?: string;
|
||||||
|
name: string;
|
||||||
|
kind: 'SERVER' | 'CLIENT' | 'INTERNAL' | 'DATABASE';
|
||||||
|
startTime: number;
|
||||||
|
endTime?: number;
|
||||||
|
duration?: number;
|
||||||
|
attributes: Record<string, unknown>;
|
||||||
|
events: SpanEvent[];
|
||||||
|
status: 'ok' | 'error' | 'unset';
|
||||||
|
error?: {
|
||||||
|
type: string;
|
||||||
|
message: string;
|
||||||
|
stack?: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SpanEvent {
|
||||||
|
timestamp: number;
|
||||||
|
name: string;
|
||||||
|
attributes?: Record<string, unknown>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SpanContext {
|
||||||
|
traceId: string;
|
||||||
|
spanId: string;
|
||||||
|
traceFlags?: number;
|
||||||
|
}
|
||||||
@@ -47,6 +47,9 @@ verify_jwt = true
|
|||||||
[functions.process-selective-approval]
|
[functions.process-selective-approval]
|
||||||
verify_jwt = false
|
verify_jwt = false
|
||||||
|
|
||||||
|
[functions.process-selective-rejection]
|
||||||
|
verify_jwt = false
|
||||||
|
|
||||||
[functions.send-escalation-notification]
|
[functions.send-escalation-notification]
|
||||||
verify_jwt = true
|
verify_jwt = true
|
||||||
|
|
||||||
|
|||||||
@@ -14,7 +14,39 @@ interface LogContext {
|
|||||||
[key: string]: unknown;
|
[key: string]: unknown;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Request tracking utilities
|
// Span types for distributed tracing
|
||||||
|
export interface Span {
|
||||||
|
spanId: string;
|
||||||
|
traceId: string;
|
||||||
|
parentSpanId?: string;
|
||||||
|
name: string;
|
||||||
|
kind: 'SERVER' | 'CLIENT' | 'INTERNAL' | 'DATABASE';
|
||||||
|
startTime: number;
|
||||||
|
endTime?: number;
|
||||||
|
duration?: number;
|
||||||
|
attributes: Record<string, unknown>;
|
||||||
|
events: SpanEvent[];
|
||||||
|
status: 'ok' | 'error' | 'unset';
|
||||||
|
error?: {
|
||||||
|
type: string;
|
||||||
|
message: string;
|
||||||
|
stack?: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SpanEvent {
|
||||||
|
timestamp: number;
|
||||||
|
name: string;
|
||||||
|
attributes?: Record<string, unknown>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SpanContext {
|
||||||
|
traceId: string;
|
||||||
|
spanId: string;
|
||||||
|
traceFlags?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Request tracking utilities (legacy - use spans instead)
|
||||||
export interface RequestTracking {
|
export interface RequestTracking {
|
||||||
requestId: string;
|
requestId: string;
|
||||||
start: number;
|
start: number;
|
||||||
@@ -33,6 +65,134 @@ export function endRequest(tracking: RequestTracking): number {
|
|||||||
return Date.now() - tracking.start;
|
return Date.now() - tracking.start;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Span Lifecycle Functions
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start a new span
|
||||||
|
*/
|
||||||
|
export function startSpan(
|
||||||
|
name: string,
|
||||||
|
kind: Span['kind'],
|
||||||
|
parentSpan?: SpanContext,
|
||||||
|
attributes?: Record<string, unknown>
|
||||||
|
): Span {
|
||||||
|
const traceId = parentSpan?.traceId || crypto.randomUUID();
|
||||||
|
|
||||||
|
return {
|
||||||
|
spanId: crypto.randomUUID(),
|
||||||
|
traceId,
|
||||||
|
parentSpanId: parentSpan?.spanId,
|
||||||
|
name,
|
||||||
|
kind,
|
||||||
|
startTime: Date.now(),
|
||||||
|
attributes: attributes || {},
|
||||||
|
events: [],
|
||||||
|
status: 'unset',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* End a span with final status
|
||||||
|
*/
|
||||||
|
export function endSpan(span: Span, status?: 'ok' | 'error', error?: Error): Span {
|
||||||
|
span.endTime = Date.now();
|
||||||
|
span.duration = span.endTime - span.startTime;
|
||||||
|
span.status = status || 'ok';
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
span.error = {
|
||||||
|
type: error.name,
|
||||||
|
message: error.message,
|
||||||
|
stack: error.stack,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return span;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add event to span
|
||||||
|
*/
|
||||||
|
export function addSpanEvent(
|
||||||
|
span: Span,
|
||||||
|
name: string,
|
||||||
|
attributes?: Record<string, unknown>
|
||||||
|
): void {
|
||||||
|
span.events.push({
|
||||||
|
timestamp: Date.now(),
|
||||||
|
name,
|
||||||
|
attributes,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set span attributes
|
||||||
|
*/
|
||||||
|
export function setSpanAttributes(
|
||||||
|
span: Span,
|
||||||
|
attributes: Record<string, unknown>
|
||||||
|
): void {
|
||||||
|
span.attributes = { ...span.attributes, ...attributes };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract span context for propagation
|
||||||
|
*/
|
||||||
|
export function getSpanContext(span: Span): SpanContext {
|
||||||
|
return {
|
||||||
|
traceId: span.traceId,
|
||||||
|
spanId: span.spanId,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract span context from HTTP headers (W3C Trace Context)
|
||||||
|
*/
|
||||||
|
export function extractSpanContextFromHeaders(headers: Headers): SpanContext | undefined {
|
||||||
|
const traceparent = headers.get('traceparent');
|
||||||
|
if (!traceparent) return undefined;
|
||||||
|
|
||||||
|
// Parse W3C traceparent: version-traceId-spanId-flags
|
||||||
|
const parts = traceparent.split('-');
|
||||||
|
if (parts.length !== 4) return undefined;
|
||||||
|
|
||||||
|
return {
|
||||||
|
traceId: parts[1],
|
||||||
|
spanId: parts[2],
|
||||||
|
traceFlags: parseInt(parts[3], 16),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Inject span context into headers
|
||||||
|
*/
|
||||||
|
export function injectSpanContextIntoHeaders(spanContext: SpanContext): Record<string, string> {
|
||||||
|
return {
|
||||||
|
'traceparent': `00-${spanContext.traceId}-${spanContext.spanId}-01`,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Log completed span
|
||||||
|
*/
|
||||||
|
export function logSpan(span: Span): void {
|
||||||
|
const sanitizedAttributes = sanitizeContext(span.attributes);
|
||||||
|
const sanitizedEvents = span.events.map(e => ({
|
||||||
|
...e,
|
||||||
|
attributes: e.attributes ? sanitizeContext(e.attributes) : undefined,
|
||||||
|
}));
|
||||||
|
|
||||||
|
edgeLogger.info('Span completed', {
|
||||||
|
span: {
|
||||||
|
...span,
|
||||||
|
attributes: sanitizedAttributes,
|
||||||
|
events: sanitizedEvents,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
// Fields that should never be logged
|
// Fields that should never be logged
|
||||||
const SENSITIVE_FIELDS = [
|
const SENSITIVE_FIELDS = [
|
||||||
'password',
|
'password',
|
||||||
@@ -52,7 +212,7 @@ const SENSITIVE_FIELDS = [
|
|||||||
/**
|
/**
|
||||||
* Sanitize context to remove sensitive data
|
* Sanitize context to remove sensitive data
|
||||||
*/
|
*/
|
||||||
function sanitizeContext(context: LogContext): LogContext {
|
export function sanitizeContext(context: LogContext): LogContext {
|
||||||
const sanitized: LogContext = {};
|
const sanitized: LogContext = {};
|
||||||
|
|
||||||
for (const [key, value] of Object.entries(context)) {
|
for (const [key, value] of Object.entries(context)) {
|
||||||
|
|||||||
@@ -2,6 +2,17 @@ import { serve } from 'https://deno.land/std@0.168.0/http/server.ts';
|
|||||||
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2.57.4';
|
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2.57.4';
|
||||||
import { corsHeaders } from './cors.ts';
|
import { corsHeaders } from './cors.ts';
|
||||||
import { rateLimiters, withRateLimit } from '../_shared/rateLimiter.ts';
|
import { rateLimiters, withRateLimit } from '../_shared/rateLimiter.ts';
|
||||||
|
import {
|
||||||
|
edgeLogger,
|
||||||
|
startSpan,
|
||||||
|
endSpan,
|
||||||
|
addSpanEvent,
|
||||||
|
setSpanAttributes,
|
||||||
|
getSpanContext,
|
||||||
|
logSpan,
|
||||||
|
extractSpanContextFromHeaders,
|
||||||
|
type Span
|
||||||
|
} from '../_shared/logger.ts';
|
||||||
|
|
||||||
const SUPABASE_URL = Deno.env.get('SUPABASE_URL') || 'https://api.thrillwiki.com';
|
const SUPABASE_URL = Deno.env.get('SUPABASE_URL') || 'https://api.thrillwiki.com';
|
||||||
const SUPABASE_ANON_KEY = Deno.env.get('SUPABASE_ANON_KEY')!;
|
const SUPABASE_ANON_KEY = Deno.env.get('SUPABASE_ANON_KEY')!;
|
||||||
@@ -22,13 +33,29 @@ const handler = async (req: Request) => {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Generate request ID for tracking
|
// Extract parent span context from headers (if present)
|
||||||
const requestId = crypto.randomUUID();
|
const parentSpanContext = extractSpanContextFromHeaders(req.headers);
|
||||||
|
|
||||||
|
// Create root span for this edge function invocation
|
||||||
|
const rootSpan = startSpan(
|
||||||
|
'process-selective-approval',
|
||||||
|
'SERVER',
|
||||||
|
parentSpanContext,
|
||||||
|
{
|
||||||
|
'http.method': 'POST',
|
||||||
|
'function.name': 'process-selective-approval',
|
||||||
|
}
|
||||||
|
);
|
||||||
|
const requestId = rootSpan.spanId;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// STEP 1: Authentication
|
// STEP 1: Authentication
|
||||||
|
addSpanEvent(rootSpan, 'authentication_start');
|
||||||
const authHeader = req.headers.get('Authorization');
|
const authHeader = req.headers.get('Authorization');
|
||||||
if (!authHeader) {
|
if (!authHeader) {
|
||||||
|
addSpanEvent(rootSpan, 'authentication_failed', { reason: 'missing_header' });
|
||||||
|
endSpan(rootSpan, 'error');
|
||||||
|
logSpan(rootSpan);
|
||||||
return new Response(
|
return new Response(
|
||||||
JSON.stringify({ error: 'Missing Authorization header' }),
|
JSON.stringify({ error: 'Missing Authorization header' }),
|
||||||
{
|
{
|
||||||
@@ -47,6 +74,14 @@ const handler = async (req: Request) => {
|
|||||||
|
|
||||||
const { data: { user }, error: authError } = await supabase.auth.getUser();
|
const { data: { user }, error: authError } = await supabase.auth.getUser();
|
||||||
if (authError || !user) {
|
if (authError || !user) {
|
||||||
|
addSpanEvent(rootSpan, 'authentication_failed', { error: authError?.message });
|
||||||
|
edgeLogger.warn('Authentication failed', {
|
||||||
|
requestId,
|
||||||
|
error: authError?.message,
|
||||||
|
action: 'process_approval'
|
||||||
|
});
|
||||||
|
endSpan(rootSpan, 'error', authError || new Error('Unauthorized'));
|
||||||
|
logSpan(rootSpan);
|
||||||
return new Response(
|
return new Response(
|
||||||
JSON.stringify({ error: 'Unauthorized' }),
|
JSON.stringify({ error: 'Unauthorized' }),
|
||||||
{
|
{
|
||||||
@@ -59,13 +94,34 @@ const handler = async (req: Request) => {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`[${requestId}] Approval request from moderator ${user.id}`);
|
setSpanAttributes(rootSpan, { 'user.id': user.id });
|
||||||
|
addSpanEvent(rootSpan, 'authentication_success');
|
||||||
|
edgeLogger.info('Approval request received', {
|
||||||
|
requestId,
|
||||||
|
moderatorId: user.id,
|
||||||
|
action: 'process_approval'
|
||||||
|
});
|
||||||
|
|
||||||
// STEP 2: Parse request
|
// STEP 2: Parse request
|
||||||
|
addSpanEvent(rootSpan, 'validation_start');
|
||||||
const body: ApprovalRequest = await req.json();
|
const body: ApprovalRequest = await req.json();
|
||||||
const { submissionId, itemIds, idempotencyKey } = body;
|
const { submissionId, itemIds, idempotencyKey } = body;
|
||||||
|
|
||||||
if (!submissionId || !itemIds || itemIds.length === 0) {
|
if (!submissionId || !itemIds || itemIds.length === 0) {
|
||||||
|
addSpanEvent(rootSpan, 'validation_failed', {
|
||||||
|
hasSubmissionId: !!submissionId,
|
||||||
|
hasItemIds: !!itemIds,
|
||||||
|
itemCount: itemIds?.length || 0,
|
||||||
|
});
|
||||||
|
edgeLogger.warn('Invalid request payload', {
|
||||||
|
requestId,
|
||||||
|
hasSubmissionId: !!submissionId,
|
||||||
|
hasItemIds: !!itemIds,
|
||||||
|
itemCount: itemIds?.length || 0,
|
||||||
|
action: 'process_approval'
|
||||||
|
});
|
||||||
|
endSpan(rootSpan, 'error');
|
||||||
|
logSpan(rootSpan);
|
||||||
return new Response(
|
return new Response(
|
||||||
JSON.stringify({ error: 'Missing required fields: submissionId, itemIds' }),
|
JSON.stringify({ error: 'Missing required fields: submissionId, itemIds' }),
|
||||||
{
|
{
|
||||||
@@ -78,7 +134,21 @@ const handler = async (req: Request) => {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
setSpanAttributes(rootSpan, {
|
||||||
|
'submission.id': submissionId,
|
||||||
|
'submission.item_count': itemIds.length,
|
||||||
|
'idempotency.key': idempotencyKey,
|
||||||
|
});
|
||||||
|
addSpanEvent(rootSpan, 'validation_complete');
|
||||||
|
edgeLogger.info('Request validated', {
|
||||||
|
requestId,
|
||||||
|
submissionId,
|
||||||
|
itemCount: itemIds.length,
|
||||||
|
action: 'process_approval'
|
||||||
|
});
|
||||||
|
|
||||||
// STEP 3: Idempotency check
|
// STEP 3: Idempotency check
|
||||||
|
addSpanEvent(rootSpan, 'idempotency_check_start');
|
||||||
const { data: existingKey } = await supabase
|
const { data: existingKey } = await supabase
|
||||||
.from('submission_idempotency_keys')
|
.from('submission_idempotency_keys')
|
||||||
.select('*')
|
.select('*')
|
||||||
@@ -86,7 +156,16 @@ const handler = async (req: Request) => {
|
|||||||
.single();
|
.single();
|
||||||
|
|
||||||
if (existingKey?.status === 'completed') {
|
if (existingKey?.status === 'completed') {
|
||||||
console.log(`[${requestId}] Idempotency key already processed, returning cached result`);
|
addSpanEvent(rootSpan, 'idempotency_cache_hit');
|
||||||
|
setSpanAttributes(rootSpan, { 'cache.hit': true });
|
||||||
|
edgeLogger.info('Idempotency cache hit', {
|
||||||
|
requestId,
|
||||||
|
idempotencyKey,
|
||||||
|
cached: true,
|
||||||
|
action: 'process_approval'
|
||||||
|
});
|
||||||
|
endSpan(rootSpan, 'ok');
|
||||||
|
logSpan(rootSpan);
|
||||||
return new Response(
|
return new Response(
|
||||||
JSON.stringify(existingKey.result_data),
|
JSON.stringify(existingKey.result_data),
|
||||||
{
|
{
|
||||||
@@ -108,7 +187,15 @@ const handler = async (req: Request) => {
|
|||||||
.single();
|
.single();
|
||||||
|
|
||||||
if (submissionError || !submission) {
|
if (submissionError || !submission) {
|
||||||
console.error(`[${requestId}] Submission not found:`, submissionError);
|
addSpanEvent(rootSpan, 'submission_fetch_failed', { error: submissionError?.message });
|
||||||
|
edgeLogger.error('Submission not found', {
|
||||||
|
requestId,
|
||||||
|
submissionId,
|
||||||
|
error: submissionError?.message,
|
||||||
|
action: 'process_approval'
|
||||||
|
});
|
||||||
|
endSpan(rootSpan, 'error', submissionError || new Error('Submission not found'));
|
||||||
|
logSpan(rootSpan);
|
||||||
return new Response(
|
return new Response(
|
||||||
JSON.stringify({ error: 'Submission not found' }),
|
JSON.stringify({ error: 'Submission not found' }),
|
||||||
{
|
{
|
||||||
@@ -123,7 +210,13 @@ const handler = async (req: Request) => {
|
|||||||
|
|
||||||
// STEP 5: Verify moderator can approve this submission
|
// STEP 5: Verify moderator can approve this submission
|
||||||
if (submission.assigned_to && submission.assigned_to !== user.id) {
|
if (submission.assigned_to && submission.assigned_to !== user.id) {
|
||||||
console.error(`[${requestId}] Submission locked by another moderator`);
|
edgeLogger.warn('Lock conflict', {
|
||||||
|
requestId,
|
||||||
|
submissionId,
|
||||||
|
lockedBy: submission.assigned_to,
|
||||||
|
attemptedBy: user.id,
|
||||||
|
action: 'process_approval'
|
||||||
|
});
|
||||||
return new Response(
|
return new Response(
|
||||||
JSON.stringify({ error: 'Submission is locked by another moderator' }),
|
JSON.stringify({ error: 'Submission is locked by another moderator' }),
|
||||||
{
|
{
|
||||||
@@ -137,7 +230,13 @@ const handler = async (req: Request) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (!['pending', 'partially_approved'].includes(submission.status)) {
|
if (!['pending', 'partially_approved'].includes(submission.status)) {
|
||||||
console.error(`[${requestId}] Invalid submission status: ${submission.status}`);
|
edgeLogger.warn('Invalid submission status', {
|
||||||
|
requestId,
|
||||||
|
submissionId,
|
||||||
|
currentStatus: submission.status,
|
||||||
|
expectedStatuses: ['pending', 'partially_approved'],
|
||||||
|
action: 'process_approval'
|
||||||
|
});
|
||||||
return new Response(
|
return new Response(
|
||||||
JSON.stringify({ error: 'Submission already processed' }),
|
JSON.stringify({ error: 'Submission already processed' }),
|
||||||
{
|
{
|
||||||
@@ -150,17 +249,58 @@ const handler = async (req: Request) => {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// STEP 6: Register idempotency key as processing
|
// STEP 6: Register idempotency key as processing (atomic upsert)
|
||||||
|
// ✅ CRITICAL FIX: Use ON CONFLICT to prevent race conditions
|
||||||
if (!existingKey) {
|
if (!existingKey) {
|
||||||
await supabase.from('submission_idempotency_keys').insert({
|
const { data: insertedKey, error: idempotencyError } = await supabase
|
||||||
idempotency_key: idempotencyKey,
|
.from('submission_idempotency_keys')
|
||||||
submission_id: submissionId,
|
.insert({
|
||||||
moderator_id: user.id,
|
idempotency_key: idempotencyKey,
|
||||||
status: 'processing'
|
submission_id: submissionId,
|
||||||
});
|
moderator_id: user.id,
|
||||||
|
status: 'processing'
|
||||||
|
})
|
||||||
|
.select()
|
||||||
|
.single();
|
||||||
|
|
||||||
|
// If conflict occurred, another moderator is processing
|
||||||
|
if (idempotencyError && idempotencyError.code === '23505') {
|
||||||
|
edgeLogger.warn('Idempotency key conflict - another request processing', {
|
||||||
|
requestId,
|
||||||
|
idempotencyKey,
|
||||||
|
moderatorId: user.id
|
||||||
|
});
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({ error: 'Another moderator is processing this submission' }),
|
||||||
|
{ status: 409, headers: { ...corsHeaders, 'Content-Type': 'application/json' } }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (idempotencyError) {
|
||||||
|
throw idempotencyError;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`[${requestId}] Calling process_approval_transaction RPC`);
|
// Create child span for RPC transaction
|
||||||
|
const rpcSpan = startSpan(
|
||||||
|
'process_approval_transaction',
|
||||||
|
'DATABASE',
|
||||||
|
getSpanContext(rootSpan),
|
||||||
|
{
|
||||||
|
'db.operation': 'rpc',
|
||||||
|
'db.function': 'process_approval_transaction',
|
||||||
|
'submission.id': submissionId,
|
||||||
|
'submission.item_count': itemIds.length,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
addSpanEvent(rpcSpan, 'rpc_call_start');
|
||||||
|
edgeLogger.info('Calling approval transaction RPC', {
|
||||||
|
requestId,
|
||||||
|
submissionId,
|
||||||
|
itemCount: itemIds.length,
|
||||||
|
action: 'process_approval'
|
||||||
|
});
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
// STEP 7: Call RPC function with deadlock retry logic
|
// STEP 7: Call RPC function with deadlock retry logic
|
||||||
@@ -178,7 +318,9 @@ const handler = async (req: Request) => {
|
|||||||
p_item_ids: itemIds,
|
p_item_ids: itemIds,
|
||||||
p_moderator_id: user.id,
|
p_moderator_id: user.id,
|
||||||
p_submitter_id: submission.user_id,
|
p_submitter_id: submission.user_id,
|
||||||
p_request_id: requestId
|
p_request_id: requestId,
|
||||||
|
p_trace_id: rootSpan.traceId,
|
||||||
|
p_parent_span_id: rpcSpan.spanId
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -187,6 +329,10 @@ const handler = async (req: Request) => {
|
|||||||
|
|
||||||
if (!rpcError) {
|
if (!rpcError) {
|
||||||
// Success!
|
// Success!
|
||||||
|
addSpanEvent(rpcSpan, 'rpc_call_success', {
|
||||||
|
'result.status': data?.status,
|
||||||
|
'items.processed': itemIds.length,
|
||||||
|
});
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -194,23 +340,51 @@ const handler = async (req: Request) => {
|
|||||||
if (rpcError.code === '40P01' || rpcError.code === '40001') {
|
if (rpcError.code === '40P01' || rpcError.code === '40001') {
|
||||||
retryCount++;
|
retryCount++;
|
||||||
if (retryCount > MAX_DEADLOCK_RETRIES) {
|
if (retryCount > MAX_DEADLOCK_RETRIES) {
|
||||||
console.error(`[${requestId}] Max deadlock retries exceeded`);
|
addSpanEvent(rpcSpan, 'max_retries_exceeded', { attempt: retryCount });
|
||||||
|
edgeLogger.error('Max deadlock retries exceeded', {
|
||||||
|
requestId,
|
||||||
|
submissionId,
|
||||||
|
attempt: retryCount,
|
||||||
|
action: 'process_approval'
|
||||||
|
});
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
const backoffMs = 100 * Math.pow(2, retryCount);
|
const backoffMs = 100 * Math.pow(2, retryCount);
|
||||||
console.log(`[${requestId}] Deadlock detected, retrying in ${backoffMs}ms (attempt ${retryCount}/${MAX_DEADLOCK_RETRIES})`);
|
addSpanEvent(rpcSpan, 'deadlock_retry', { attempt: retryCount, backoffMs });
|
||||||
|
edgeLogger.warn('Deadlock detected, retrying', {
|
||||||
|
requestId,
|
||||||
|
attempt: retryCount,
|
||||||
|
maxAttempts: MAX_DEADLOCK_RETRIES,
|
||||||
|
backoffMs,
|
||||||
|
action: 'process_approval'
|
||||||
|
});
|
||||||
await new Promise(r => setTimeout(r, backoffMs));
|
await new Promise(r => setTimeout(r, backoffMs));
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Non-retryable error, break immediately
|
// Non-retryable error, break immediately
|
||||||
|
addSpanEvent(rpcSpan, 'rpc_call_failed', {
|
||||||
|
error: rpcError.message,
|
||||||
|
errorCode: rpcError.code
|
||||||
|
});
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (rpcError) {
|
if (rpcError) {
|
||||||
// Transaction failed - EVERYTHING rolled back automatically by PostgreSQL
|
// Transaction failed - EVERYTHING rolled back automatically by PostgreSQL
|
||||||
console.error(`[${requestId}] Approval transaction failed:`, rpcError);
|
endSpan(rpcSpan, 'error', rpcError);
|
||||||
|
logSpan(rpcSpan);
|
||||||
|
|
||||||
|
edgeLogger.error('Transaction failed', {
|
||||||
|
requestId,
|
||||||
|
duration: rpcSpan.duration,
|
||||||
|
submissionId,
|
||||||
|
error: rpcError.message,
|
||||||
|
errorCode: rpcError.code,
|
||||||
|
retries: retryCount,
|
||||||
|
action: 'process_approval'
|
||||||
|
});
|
||||||
|
|
||||||
// Update idempotency key to failed
|
// Update idempotency key to failed
|
||||||
try {
|
try {
|
||||||
@@ -223,10 +397,19 @@ const handler = async (req: Request) => {
|
|||||||
})
|
})
|
||||||
.eq('idempotency_key', idempotencyKey);
|
.eq('idempotency_key', idempotencyKey);
|
||||||
} catch (updateError) {
|
} catch (updateError) {
|
||||||
console.error(`[${requestId}] Failed to update idempotency key to failed:`, updateError);
|
edgeLogger.warn('Failed to update idempotency key', {
|
||||||
|
requestId,
|
||||||
|
idempotencyKey,
|
||||||
|
status: 'failed',
|
||||||
|
error: updateError instanceof Error ? updateError.message : String(updateError),
|
||||||
|
action: 'process_approval'
|
||||||
|
});
|
||||||
// Non-blocking - continue with error response even if idempotency update fails
|
// Non-blocking - continue with error response even if idempotency update fails
|
||||||
}
|
}
|
||||||
|
|
||||||
|
endSpan(rootSpan, 'error', rpcError);
|
||||||
|
logSpan(rootSpan);
|
||||||
|
|
||||||
return new Response(
|
return new Response(
|
||||||
JSON.stringify({
|
JSON.stringify({
|
||||||
error: 'Approval transaction failed',
|
error: 'Approval transaction failed',
|
||||||
@@ -244,7 +427,24 @@ const handler = async (req: Request) => {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`[${requestId}] Transaction completed successfully:`, result);
|
// RPC succeeded
|
||||||
|
endSpan(rpcSpan, 'ok');
|
||||||
|
logSpan(rpcSpan);
|
||||||
|
|
||||||
|
setSpanAttributes(rootSpan, {
|
||||||
|
'result.status': result?.status,
|
||||||
|
'result.final_status': result?.status,
|
||||||
|
'retries': retryCount,
|
||||||
|
});
|
||||||
|
edgeLogger.info('Transaction completed successfully', {
|
||||||
|
requestId,
|
||||||
|
duration: rpcSpan.duration,
|
||||||
|
submissionId,
|
||||||
|
itemCount: itemIds.length,
|
||||||
|
retries: retryCount,
|
||||||
|
newStatus: result?.status,
|
||||||
|
action: 'process_approval'
|
||||||
|
});
|
||||||
|
|
||||||
// STEP 8: Success - update idempotency key
|
// STEP 8: Success - update idempotency key
|
||||||
try {
|
try {
|
||||||
@@ -257,10 +457,19 @@ const handler = async (req: Request) => {
|
|||||||
})
|
})
|
||||||
.eq('idempotency_key', idempotencyKey);
|
.eq('idempotency_key', idempotencyKey);
|
||||||
} catch (updateError) {
|
} catch (updateError) {
|
||||||
console.error(`[${requestId}] Failed to update idempotency key to completed:`, updateError);
|
edgeLogger.warn('Failed to update idempotency key', {
|
||||||
|
requestId,
|
||||||
|
idempotencyKey,
|
||||||
|
status: 'completed',
|
||||||
|
error: updateError instanceof Error ? updateError.message : String(updateError),
|
||||||
|
action: 'process_approval'
|
||||||
|
});
|
||||||
// Non-blocking - transaction succeeded, so continue with success response
|
// Non-blocking - transaction succeeded, so continue with success response
|
||||||
}
|
}
|
||||||
|
|
||||||
|
endSpan(rootSpan, 'ok');
|
||||||
|
logSpan(rootSpan);
|
||||||
|
|
||||||
return new Response(
|
return new Response(
|
||||||
JSON.stringify(result),
|
JSON.stringify(result),
|
||||||
{
|
{
|
||||||
@@ -274,7 +483,16 @@ const handler = async (req: Request) => {
|
|||||||
);
|
);
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`[${requestId}] Unexpected error:`, error);
|
endSpan(rootSpan, 'error', error instanceof Error ? error : new Error(String(error)));
|
||||||
|
logSpan(rootSpan);
|
||||||
|
|
||||||
|
edgeLogger.error('Unexpected error', {
|
||||||
|
requestId,
|
||||||
|
duration: rootSpan.duration,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
stack: error instanceof Error ? error.stack : undefined,
|
||||||
|
action: 'process_approval'
|
||||||
|
});
|
||||||
return new Response(
|
return new Response(
|
||||||
JSON.stringify({
|
JSON.stringify({
|
||||||
error: 'Internal server error',
|
error: 'Internal server error',
|
||||||
|
|||||||
4
supabase/functions/process-selective-rejection/cors.ts
Normal file
4
supabase/functions/process-selective-rejection/cors.ts
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
export const corsHeaders = {
|
||||||
|
'Access-Control-Allow-Origin': '*',
|
||||||
|
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type',
|
||||||
|
};
|
||||||
516
supabase/functions/process-selective-rejection/index.ts
Normal file
516
supabase/functions/process-selective-rejection/index.ts
Normal file
@@ -0,0 +1,516 @@
|
|||||||
|
import { serve } from 'https://deno.land/std@0.168.0/http/server.ts';
|
||||||
|
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2.57.4';
|
||||||
|
import { corsHeaders } from './cors.ts';
|
||||||
|
import { rateLimiters, withRateLimit } from '../_shared/rateLimiter.ts';
|
||||||
|
import {
|
||||||
|
edgeLogger,
|
||||||
|
startSpan,
|
||||||
|
endSpan,
|
||||||
|
addSpanEvent,
|
||||||
|
setSpanAttributes,
|
||||||
|
getSpanContext,
|
||||||
|
logSpan,
|
||||||
|
extractSpanContextFromHeaders,
|
||||||
|
type Span
|
||||||
|
} from '../_shared/logger.ts';
|
||||||
|
|
||||||
|
const SUPABASE_URL = Deno.env.get('SUPABASE_URL') || 'https://api.thrillwiki.com';
|
||||||
|
const SUPABASE_ANON_KEY = Deno.env.get('SUPABASE_ANON_KEY')!;
|
||||||
|
|
||||||
|
interface RejectionRequest {
|
||||||
|
submissionId: string;
|
||||||
|
itemIds: string[];
|
||||||
|
rejectionReason: string;
|
||||||
|
idempotencyKey: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Main handler function
|
||||||
|
const handler = async (req: Request) => {
|
||||||
|
// Handle CORS preflight requests
|
||||||
|
if (req.method === 'OPTIONS') {
|
||||||
|
return new Response(null, {
|
||||||
|
status: 204,
|
||||||
|
headers: corsHeaders
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract parent span context from headers (if present)
|
||||||
|
const parentSpanContext = extractSpanContextFromHeaders(req.headers);
|
||||||
|
|
||||||
|
// Create root span for this edge function invocation
|
||||||
|
const rootSpan = startSpan(
|
||||||
|
'process-selective-rejection',
|
||||||
|
'SERVER',
|
||||||
|
parentSpanContext,
|
||||||
|
{
|
||||||
|
'http.method': 'POST',
|
||||||
|
'function.name': 'process-selective-rejection',
|
||||||
|
}
|
||||||
|
);
|
||||||
|
const requestId = rootSpan.spanId;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// STEP 1: Authentication
|
||||||
|
addSpanEvent(rootSpan, 'authentication_start');
|
||||||
|
const authHeader = req.headers.get('Authorization');
|
||||||
|
if (!authHeader) {
|
||||||
|
addSpanEvent(rootSpan, 'authentication_failed', { reason: 'missing_header' });
|
||||||
|
endSpan(rootSpan, 'error');
|
||||||
|
logSpan(rootSpan);
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({ error: 'Missing Authorization header' }),
|
||||||
|
{
|
||||||
|
status: 401,
|
||||||
|
headers: {
|
||||||
|
...corsHeaders,
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const supabase = createClient(SUPABASE_URL, SUPABASE_ANON_KEY, {
|
||||||
|
global: { headers: { Authorization: authHeader } }
|
||||||
|
});
|
||||||
|
|
||||||
|
const { data: { user }, error: authError } = await supabase.auth.getUser();
|
||||||
|
if (authError || !user) {
|
||||||
|
addSpanEvent(rootSpan, 'authentication_failed', { error: authError?.message });
|
||||||
|
edgeLogger.warn('Authentication failed', {
|
||||||
|
requestId,
|
||||||
|
error: authError?.message,
|
||||||
|
action: 'process_rejection'
|
||||||
|
});
|
||||||
|
endSpan(rootSpan, 'error', authError || new Error('Unauthorized'));
|
||||||
|
logSpan(rootSpan);
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({ error: 'Unauthorized' }),
|
||||||
|
{
|
||||||
|
status: 401,
|
||||||
|
headers: {
|
||||||
|
...corsHeaders,
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
setSpanAttributes(rootSpan, { 'user.id': user.id });
|
||||||
|
addSpanEvent(rootSpan, 'authentication_success');
|
||||||
|
edgeLogger.info('Rejection request received', {
|
||||||
|
requestId,
|
||||||
|
moderatorId: user.id,
|
||||||
|
action: 'process_rejection'
|
||||||
|
});
|
||||||
|
|
||||||
|
// STEP 2: Parse request
|
||||||
|
addSpanEvent(rootSpan, 'validation_start');
|
||||||
|
const body: RejectionRequest = await req.json();
|
||||||
|
const { submissionId, itemIds, rejectionReason, idempotencyKey } = body;
|
||||||
|
|
||||||
|
if (!submissionId || !itemIds || itemIds.length === 0 || !rejectionReason) {
|
||||||
|
addSpanEvent(rootSpan, 'validation_failed', {
|
||||||
|
hasSubmissionId: !!submissionId,
|
||||||
|
hasItemIds: !!itemIds,
|
||||||
|
itemCount: itemIds?.length || 0,
|
||||||
|
hasReason: !!rejectionReason,
|
||||||
|
});
|
||||||
|
edgeLogger.warn('Invalid request payload', {
|
||||||
|
requestId,
|
||||||
|
hasSubmissionId: !!submissionId,
|
||||||
|
hasItemIds: !!itemIds,
|
||||||
|
itemCount: itemIds?.length || 0,
|
||||||
|
hasReason: !!rejectionReason,
|
||||||
|
action: 'process_rejection'
|
||||||
|
});
|
||||||
|
endSpan(rootSpan, 'error');
|
||||||
|
logSpan(rootSpan);
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({ error: 'Missing required fields: submissionId, itemIds, rejectionReason' }),
|
||||||
|
{
|
||||||
|
status: 400,
|
||||||
|
headers: {
|
||||||
|
...corsHeaders,
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
setSpanAttributes(rootSpan, {
|
||||||
|
'submission.id': submissionId,
|
||||||
|
'submission.item_count': itemIds.length,
|
||||||
|
'idempotency.key': idempotencyKey,
|
||||||
|
});
|
||||||
|
addSpanEvent(rootSpan, 'validation_complete');
|
||||||
|
edgeLogger.info('Request validated', {
|
||||||
|
requestId,
|
||||||
|
submissionId,
|
||||||
|
itemCount: itemIds.length,
|
||||||
|
action: 'process_rejection'
|
||||||
|
});
|
||||||
|
|
||||||
|
// STEP 3: Idempotency check
|
||||||
|
addSpanEvent(rootSpan, 'idempotency_check_start');
|
||||||
|
const { data: existingKey } = await supabase
|
||||||
|
.from('submission_idempotency_keys')
|
||||||
|
.select('*')
|
||||||
|
.eq('idempotency_key', idempotencyKey)
|
||||||
|
.single();
|
||||||
|
|
||||||
|
if (existingKey?.status === 'completed') {
|
||||||
|
addSpanEvent(rootSpan, 'idempotency_cache_hit');
|
||||||
|
setSpanAttributes(rootSpan, { 'cache.hit': true });
|
||||||
|
edgeLogger.info('Idempotency cache hit', {
|
||||||
|
requestId,
|
||||||
|
idempotencyKey,
|
||||||
|
cached: true,
|
||||||
|
action: 'process_rejection'
|
||||||
|
});
|
||||||
|
endSpan(rootSpan, 'ok');
|
||||||
|
logSpan(rootSpan);
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify(existingKey.result_data),
|
||||||
|
{
|
||||||
|
status: 200,
|
||||||
|
headers: {
|
||||||
|
...corsHeaders,
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'X-Cache-Status': 'HIT'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// STEP 4: Fetch submission to get submitter_id
|
||||||
|
const { data: submission, error: submissionError } = await supabase
|
||||||
|
.from('content_submissions')
|
||||||
|
.select('user_id, status, assigned_to')
|
||||||
|
.eq('id', submissionId)
|
||||||
|
.single();
|
||||||
|
|
||||||
|
if (submissionError || !submission) {
|
||||||
|
addSpanEvent(rootSpan, 'submission_fetch_failed', { error: submissionError?.message });
|
||||||
|
edgeLogger.error('Submission not found', {
|
||||||
|
requestId,
|
||||||
|
submissionId,
|
||||||
|
error: submissionError?.message,
|
||||||
|
action: 'process_rejection'
|
||||||
|
});
|
||||||
|
endSpan(rootSpan, 'error', submissionError || new Error('Submission not found'));
|
||||||
|
logSpan(rootSpan);
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({ error: 'Submission not found' }),
|
||||||
|
{
|
||||||
|
status: 404,
|
||||||
|
headers: {
|
||||||
|
...corsHeaders,
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// STEP 5: Verify moderator can reject this submission
|
||||||
|
if (submission.assigned_to && submission.assigned_to !== user.id) {
|
||||||
|
edgeLogger.warn('Lock conflict', {
|
||||||
|
requestId,
|
||||||
|
submissionId,
|
||||||
|
lockedBy: submission.assigned_to,
|
||||||
|
attemptedBy: user.id,
|
||||||
|
action: 'process_rejection'
|
||||||
|
});
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({ error: 'Submission is locked by another moderator' }),
|
||||||
|
{
|
||||||
|
status: 409,
|
||||||
|
headers: {
|
||||||
|
...corsHeaders,
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!['pending', 'partially_approved'].includes(submission.status)) {
|
||||||
|
edgeLogger.warn('Invalid submission status', {
|
||||||
|
requestId,
|
||||||
|
submissionId,
|
||||||
|
currentStatus: submission.status,
|
||||||
|
expectedStatuses: ['pending', 'partially_approved'],
|
||||||
|
action: 'process_rejection'
|
||||||
|
});
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({ error: 'Submission already processed' }),
|
||||||
|
{
|
||||||
|
status: 400,
|
||||||
|
headers: {
|
||||||
|
...corsHeaders,
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// STEP 6: Register idempotency key as processing (atomic upsert)
|
||||||
|
// ✅ CRITICAL FIX: Use ON CONFLICT to prevent race conditions
|
||||||
|
if (!existingKey) {
|
||||||
|
const { data: insertedKey, error: idempotencyError } = await supabase
|
||||||
|
.from('submission_idempotency_keys')
|
||||||
|
.insert({
|
||||||
|
idempotency_key: idempotencyKey,
|
||||||
|
submission_id: submissionId,
|
||||||
|
moderator_id: user.id,
|
||||||
|
status: 'processing'
|
||||||
|
})
|
||||||
|
.select()
|
||||||
|
.single();
|
||||||
|
|
||||||
|
// If conflict occurred, another moderator is processing
|
||||||
|
if (idempotencyError && idempotencyError.code === '23505') {
|
||||||
|
edgeLogger.warn('Idempotency key conflict - another request processing', {
|
||||||
|
requestId,
|
||||||
|
idempotencyKey,
|
||||||
|
moderatorId: user.id
|
||||||
|
});
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({ error: 'Another moderator is processing this submission' }),
|
||||||
|
{ status: 409, headers: { ...corsHeaders, 'Content-Type': 'application/json' } }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (idempotencyError) {
|
||||||
|
throw idempotencyError;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create child span for RPC transaction
|
||||||
|
const rpcSpan = startSpan(
|
||||||
|
'process_rejection_transaction',
|
||||||
|
'DATABASE',
|
||||||
|
getSpanContext(rootSpan),
|
||||||
|
{
|
||||||
|
'db.operation': 'rpc',
|
||||||
|
'db.function': 'process_rejection_transaction',
|
||||||
|
'submission.id': submissionId,
|
||||||
|
'submission.item_count': itemIds.length,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
addSpanEvent(rpcSpan, 'rpc_call_start');
|
||||||
|
edgeLogger.info('Calling rejection transaction RPC', {
|
||||||
|
requestId,
|
||||||
|
submissionId,
|
||||||
|
itemCount: itemIds.length,
|
||||||
|
action: 'process_rejection'
|
||||||
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// STEP 7: Call RPC function with deadlock retry logic
|
||||||
|
// ============================================================================
|
||||||
|
let retryCount = 0;
|
||||||
|
const MAX_DEADLOCK_RETRIES = 3;
|
||||||
|
let result: any = null;
|
||||||
|
let rpcError: any = null;
|
||||||
|
|
||||||
|
while (retryCount <= MAX_DEADLOCK_RETRIES) {
|
||||||
|
const { data, error } = await supabase.rpc(
|
||||||
|
'process_rejection_transaction',
|
||||||
|
{
|
||||||
|
p_submission_id: submissionId,
|
||||||
|
p_item_ids: itemIds,
|
||||||
|
p_moderator_id: user.id,
|
||||||
|
p_rejection_reason: rejectionReason,
|
||||||
|
p_request_id: requestId,
|
||||||
|
p_trace_id: rootSpan.traceId,
|
||||||
|
p_parent_span_id: rpcSpan.spanId
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
result = data;
|
||||||
|
rpcError = error;
|
||||||
|
|
||||||
|
if (!rpcError) {
|
||||||
|
// Success!
|
||||||
|
addSpanEvent(rpcSpan, 'rpc_call_success', {
|
||||||
|
'result.status': data?.status,
|
||||||
|
'items.processed': itemIds.length,
|
||||||
|
});
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for deadlock (40P01) or serialization failure (40001)
|
||||||
|
if (rpcError.code === '40P01' || rpcError.code === '40001') {
|
||||||
|
retryCount++;
|
||||||
|
if (retryCount > MAX_DEADLOCK_RETRIES) {
|
||||||
|
addSpanEvent(rpcSpan, 'max_retries_exceeded', { attempt: retryCount });
|
||||||
|
edgeLogger.error('Max deadlock retries exceeded', {
|
||||||
|
requestId,
|
||||||
|
submissionId,
|
||||||
|
attempt: retryCount,
|
||||||
|
action: 'process_rejection'
|
||||||
|
});
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
const backoffMs = 100 * Math.pow(2, retryCount);
|
||||||
|
addSpanEvent(rpcSpan, 'deadlock_retry', { attempt: retryCount, backoffMs });
|
||||||
|
edgeLogger.warn('Deadlock detected, retrying', {
|
||||||
|
requestId,
|
||||||
|
attempt: retryCount,
|
||||||
|
maxAttempts: MAX_DEADLOCK_RETRIES,
|
||||||
|
backoffMs,
|
||||||
|
action: 'process_rejection'
|
||||||
|
});
|
||||||
|
await new Promise(r => setTimeout(r, backoffMs));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Non-retryable error, break immediately
|
||||||
|
addSpanEvent(rpcSpan, 'rpc_call_failed', {
|
||||||
|
error: rpcError.message,
|
||||||
|
errorCode: rpcError.code
|
||||||
|
});
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (rpcError) {
|
||||||
|
// Transaction failed - EVERYTHING rolled back automatically by PostgreSQL
|
||||||
|
endSpan(rpcSpan, 'error', rpcError);
|
||||||
|
logSpan(rpcSpan);
|
||||||
|
|
||||||
|
edgeLogger.error('Transaction failed', {
|
||||||
|
requestId,
|
||||||
|
duration: rpcSpan.duration,
|
||||||
|
submissionId,
|
||||||
|
error: rpcError.message,
|
||||||
|
errorCode: rpcError.code,
|
||||||
|
retries: retryCount,
|
||||||
|
action: 'process_rejection'
|
||||||
|
});
|
||||||
|
|
||||||
|
// Update idempotency key to failed
|
||||||
|
try {
|
||||||
|
await supabase
|
||||||
|
.from('submission_idempotency_keys')
|
||||||
|
.update({
|
||||||
|
status: 'failed',
|
||||||
|
error_message: rpcError.message,
|
||||||
|
completed_at: new Date().toISOString()
|
||||||
|
})
|
||||||
|
.eq('idempotency_key', idempotencyKey);
|
||||||
|
} catch (updateError) {
|
||||||
|
edgeLogger.warn('Failed to update idempotency key', {
|
||||||
|
requestId,
|
||||||
|
idempotencyKey,
|
||||||
|
status: 'failed',
|
||||||
|
error: updateError instanceof Error ? updateError.message : String(updateError),
|
||||||
|
action: 'process_rejection'
|
||||||
|
});
|
||||||
|
// Non-blocking - continue with error response even if idempotency update fails
|
||||||
|
}
|
||||||
|
|
||||||
|
endSpan(rootSpan, 'error', rpcError);
|
||||||
|
logSpan(rootSpan);
|
||||||
|
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
error: 'Rejection transaction failed',
|
||||||
|
message: rpcError.message,
|
||||||
|
details: rpcError.details,
|
||||||
|
retries: retryCount
|
||||||
|
}),
|
||||||
|
{
|
||||||
|
status: 500,
|
||||||
|
headers: {
|
||||||
|
...corsHeaders,
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// RPC succeeded
|
||||||
|
endSpan(rpcSpan, 'ok');
|
||||||
|
logSpan(rpcSpan);
|
||||||
|
|
||||||
|
setSpanAttributes(rootSpan, {
|
||||||
|
'result.status': result?.status,
|
||||||
|
'result.final_status': result?.status,
|
||||||
|
'retries': retryCount,
|
||||||
|
});
|
||||||
|
edgeLogger.info('Transaction completed successfully', {
|
||||||
|
requestId,
|
||||||
|
duration: rpcSpan.duration,
|
||||||
|
submissionId,
|
||||||
|
itemCount: itemIds.length,
|
||||||
|
retries: retryCount,
|
||||||
|
newStatus: result?.status,
|
||||||
|
action: 'process_rejection'
|
||||||
|
});
|
||||||
|
|
||||||
|
// STEP 8: Success - update idempotency key
|
||||||
|
try {
|
||||||
|
await supabase
|
||||||
|
.from('submission_idempotency_keys')
|
||||||
|
.update({
|
||||||
|
status: 'completed',
|
||||||
|
result_data: result,
|
||||||
|
completed_at: new Date().toISOString()
|
||||||
|
})
|
||||||
|
.eq('idempotency_key', idempotencyKey);
|
||||||
|
} catch (updateError) {
|
||||||
|
edgeLogger.warn('Failed to update idempotency key', {
|
||||||
|
requestId,
|
||||||
|
idempotencyKey,
|
||||||
|
status: 'completed',
|
||||||
|
error: updateError instanceof Error ? updateError.message : String(updateError),
|
||||||
|
action: 'process_rejection'
|
||||||
|
});
|
||||||
|
// Non-blocking - transaction succeeded, so continue with success response
|
||||||
|
}
|
||||||
|
|
||||||
|
endSpan(rootSpan, 'ok');
|
||||||
|
logSpan(rootSpan);
|
||||||
|
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify(result),
|
||||||
|
{
|
||||||
|
status: 200,
|
||||||
|
headers: {
|
||||||
|
...corsHeaders,
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'X-Request-Id': requestId
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
endSpan(rootSpan, 'error', error instanceof Error ? error : new Error(String(error)));
|
||||||
|
logSpan(rootSpan);
|
||||||
|
|
||||||
|
edgeLogger.error('Unexpected error', {
|
||||||
|
requestId,
|
||||||
|
duration: rootSpan.duration,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
stack: error instanceof Error ? error.stack : undefined,
|
||||||
|
action: 'process_rejection'
|
||||||
|
});
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
error: 'Internal server error',
|
||||||
|
message: error instanceof Error ? error.message : 'Unknown error'
|
||||||
|
}),
|
||||||
|
{
|
||||||
|
status: 500,
|
||||||
|
headers: {
|
||||||
|
...corsHeaders,
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Apply rate limiting: 10 requests per minute per IP (standard tier)
|
||||||
|
serve(withRateLimit(handler, rateLimiters.standard, corsHeaders));
|
||||||
@@ -0,0 +1,159 @@
|
|||||||
|
-- ============================================================================
|
||||||
|
-- CRITICAL: Add Atomic Rejection Transaction RPC
|
||||||
|
-- ============================================================================
|
||||||
|
-- This migration creates process_rejection_transaction to ensure atomic
|
||||||
|
-- rejection of submission items with proper audit logging and status updates.
|
||||||
|
--
|
||||||
|
-- Features:
|
||||||
|
-- - Atomic updates to submission_items.status = 'rejected'
|
||||||
|
-- - Sets rejection_reason for each item
|
||||||
|
-- - Updates parent submission status (rejected or partially_approved)
|
||||||
|
-- - Logs to moderation_audit_log
|
||||||
|
-- - Releases lock (assigned_to = NULL, locked_until = NULL)
|
||||||
|
-- - Returns transaction result
|
||||||
|
-- ============================================================================
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION process_rejection_transaction(
|
||||||
|
p_submission_id UUID,
|
||||||
|
p_item_ids UUID[],
|
||||||
|
p_moderator_id UUID,
|
||||||
|
p_rejection_reason TEXT,
|
||||||
|
p_request_id TEXT DEFAULT NULL
|
||||||
|
)
|
||||||
|
RETURNS JSONB
|
||||||
|
LANGUAGE plpgsql
|
||||||
|
SECURITY DEFINER
|
||||||
|
SET search_path = public
|
||||||
|
AS $$
|
||||||
|
DECLARE
|
||||||
|
v_start_time TIMESTAMPTZ;
|
||||||
|
v_result JSONB;
|
||||||
|
v_rejected_count INTEGER := 0;
|
||||||
|
v_final_status TEXT;
|
||||||
|
v_some_pending BOOLEAN := FALSE;
|
||||||
|
BEGIN
|
||||||
|
v_start_time := clock_timestamp();
|
||||||
|
|
||||||
|
RAISE NOTICE '[%] Starting atomic rejection transaction for submission %',
|
||||||
|
COALESCE(p_request_id, 'NO_REQUEST_ID'),
|
||||||
|
p_submission_id;
|
||||||
|
|
||||||
|
-- ========================================================================
|
||||||
|
-- STEP 1: Set session variables (transaction-scoped)
|
||||||
|
-- ========================================================================
|
||||||
|
PERFORM set_config('app.moderator_id', p_moderator_id::text, true);
|
||||||
|
|
||||||
|
-- ========================================================================
|
||||||
|
-- STEP 2: Validate submission ownership and lock status
|
||||||
|
-- ========================================================================
|
||||||
|
IF NOT EXISTS (
|
||||||
|
SELECT 1 FROM content_submissions
|
||||||
|
WHERE id = p_submission_id
|
||||||
|
AND (assigned_to = p_moderator_id OR assigned_to IS NULL)
|
||||||
|
AND status IN ('pending', 'partially_approved')
|
||||||
|
) THEN
|
||||||
|
RAISE EXCEPTION 'Submission not found, locked by another moderator, or already processed'
|
||||||
|
USING ERRCODE = '42501';
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
-- ========================================================================
|
||||||
|
-- STEP 3: Update all items to rejected atomically
|
||||||
|
-- ========================================================================
|
||||||
|
UPDATE submission_items
|
||||||
|
SET
|
||||||
|
status = 'rejected',
|
||||||
|
rejection_reason = p_rejection_reason,
|
||||||
|
updated_at = NOW()
|
||||||
|
WHERE id = ANY(p_item_ids)
|
||||||
|
AND submission_id = p_submission_id
|
||||||
|
AND status IN ('pending', 'rejected');
|
||||||
|
|
||||||
|
GET DIAGNOSTICS v_rejected_count = ROW_COUNT;
|
||||||
|
|
||||||
|
RAISE NOTICE '[%] Rejected % items',
|
||||||
|
COALESCE(p_request_id, 'NO_REQUEST_ID'),
|
||||||
|
v_rejected_count;
|
||||||
|
|
||||||
|
-- ========================================================================
|
||||||
|
-- STEP 4: Determine final submission status
|
||||||
|
-- ========================================================================
|
||||||
|
-- Check if any items are still pending
|
||||||
|
SELECT EXISTS(
|
||||||
|
SELECT 1 FROM submission_items
|
||||||
|
WHERE submission_id = p_submission_id
|
||||||
|
AND status = 'pending'
|
||||||
|
) INTO v_some_pending;
|
||||||
|
|
||||||
|
-- Set final status
|
||||||
|
v_final_status := CASE
|
||||||
|
WHEN v_some_pending THEN 'partially_approved'
|
||||||
|
WHEN EXISTS(
|
||||||
|
SELECT 1 FROM submission_items
|
||||||
|
WHERE submission_id = p_submission_id
|
||||||
|
AND status = 'approved'
|
||||||
|
) THEN 'partially_approved'
|
||||||
|
ELSE 'rejected'
|
||||||
|
END;
|
||||||
|
|
||||||
|
-- ========================================================================
|
||||||
|
-- STEP 5: Update parent submission
|
||||||
|
-- ========================================================================
|
||||||
|
UPDATE content_submissions
|
||||||
|
SET
|
||||||
|
status = v_final_status,
|
||||||
|
reviewer_id = p_moderator_id,
|
||||||
|
reviewed_at = NOW(),
|
||||||
|
assigned_to = NULL,
|
||||||
|
locked_until = NULL,
|
||||||
|
reviewer_notes = p_rejection_reason
|
||||||
|
WHERE id = p_submission_id;
|
||||||
|
|
||||||
|
-- ========================================================================
|
||||||
|
-- STEP 6: Log to moderation_audit_log
|
||||||
|
-- ========================================================================
|
||||||
|
INSERT INTO moderation_audit_log (
|
||||||
|
submission_id,
|
||||||
|
moderator_id,
|
||||||
|
action,
|
||||||
|
details,
|
||||||
|
created_at
|
||||||
|
) VALUES (
|
||||||
|
p_submission_id,
|
||||||
|
p_moderator_id,
|
||||||
|
'rejection',
|
||||||
|
jsonb_build_object(
|
||||||
|
'item_ids', p_item_ids,
|
||||||
|
'rejection_reason', p_rejection_reason,
|
||||||
|
'rejected_count', v_rejected_count,
|
||||||
|
'final_status', v_final_status,
|
||||||
|
'request_id', p_request_id
|
||||||
|
),
|
||||||
|
NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
-- ========================================================================
|
||||||
|
-- STEP 7: Build result
|
||||||
|
-- ========================================================================
|
||||||
|
v_result := jsonb_build_object(
|
||||||
|
'success', TRUE,
|
||||||
|
'rejected_count', v_rejected_count,
|
||||||
|
'submission_status', v_final_status,
|
||||||
|
'duration_ms', EXTRACT(EPOCH FROM (clock_timestamp() - v_start_time)) * 1000
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Clear session variables
|
||||||
|
PERFORM set_config('app.moderator_id', '', true);
|
||||||
|
|
||||||
|
RAISE NOTICE '[%] Rejection transaction completed in %ms',
|
||||||
|
COALESCE(p_request_id, 'NO_REQUEST_ID'),
|
||||||
|
EXTRACT(EPOCH FROM (clock_timestamp() - v_start_time)) * 1000;
|
||||||
|
|
||||||
|
RETURN v_result;
|
||||||
|
END;
|
||||||
|
$$;
|
||||||
|
|
||||||
|
-- Grant execute permissions
|
||||||
|
GRANT EXECUTE ON FUNCTION process_rejection_transaction TO authenticated;
|
||||||
|
|
||||||
|
COMMENT ON FUNCTION process_rejection_transaction IS
|
||||||
|
'Atomic rejection transaction with audit logging and lock release';
|
||||||
@@ -0,0 +1,172 @@
|
|||||||
|
-- Fix create_submission_with_items to remove temp_location_data reference
|
||||||
|
-- This column was dropped but the function still references it, causing park submissions to fail
|
||||||
|
|
||||||
|
DROP FUNCTION IF EXISTS public.create_submission_with_items(uuid, text, text, jsonb, uuid);
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION public.create_submission_with_items(
|
||||||
|
p_submission_id uuid,
|
||||||
|
p_entity_type text,
|
||||||
|
p_action_type text,
|
||||||
|
p_items jsonb,
|
||||||
|
p_user_id uuid
|
||||||
|
)
|
||||||
|
RETURNS uuid
|
||||||
|
LANGUAGE plpgsql
|
||||||
|
SECURITY DEFINER
|
||||||
|
SET search_path TO 'public'
|
||||||
|
AS $function$
|
||||||
|
DECLARE
|
||||||
|
v_item JSONB;
|
||||||
|
v_item_type TEXT;
|
||||||
|
v_item_data JSONB;
|
||||||
|
v_depends_on INTEGER;
|
||||||
|
v_order_index INTEGER;
|
||||||
|
v_created_ids UUID[] := ARRAY[]::UUID[];
|
||||||
|
v_submission_item_id UUID;
|
||||||
|
v_entity_submission_id UUID;
|
||||||
|
BEGIN
|
||||||
|
-- Loop through items array
|
||||||
|
FOR v_item IN SELECT * FROM jsonb_array_elements(p_items)
|
||||||
|
LOOP
|
||||||
|
v_item_type := v_item->>'item_type';
|
||||||
|
v_item_data := v_item->'item_data';
|
||||||
|
v_depends_on := (v_item->>'depends_on')::INTEGER;
|
||||||
|
v_order_index := (v_item->>'order_index')::INTEGER;
|
||||||
|
|
||||||
|
-- Resolve dependency references
|
||||||
|
IF v_depends_on IS NOT NULL THEN
|
||||||
|
v_item_data := v_item_data || jsonb_build_object(
|
||||||
|
v_item->>'dependency_field',
|
||||||
|
v_created_ids[v_depends_on + 1]
|
||||||
|
);
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
-- Create submission based on entity type
|
||||||
|
IF v_item_type = 'park' THEN
|
||||||
|
INSERT INTO park_submissions (
|
||||||
|
submission_id, name, slug, description, park_type, status,
|
||||||
|
opening_date, opening_date_precision, closing_date, closing_date_precision,
|
||||||
|
location_id, operator_id, property_owner_id,
|
||||||
|
website_url, phone, email,
|
||||||
|
banner_image_url, banner_image_id, card_image_url, card_image_id
|
||||||
|
) VALUES (
|
||||||
|
p_submission_id,
|
||||||
|
v_item_data->>'name',
|
||||||
|
v_item_data->>'slug',
|
||||||
|
v_item_data->>'description',
|
||||||
|
v_item_data->>'park_type',
|
||||||
|
v_item_data->>'status',
|
||||||
|
(v_item_data->>'opening_date')::DATE,
|
||||||
|
v_item_data->>'opening_date_precision',
|
||||||
|
(v_item_data->>'closing_date')::DATE,
|
||||||
|
v_item_data->>'closing_date_precision',
|
||||||
|
(v_item_data->>'location_id')::UUID,
|
||||||
|
(v_item_data->>'operator_id')::UUID,
|
||||||
|
(v_item_data->>'property_owner_id')::UUID,
|
||||||
|
v_item_data->>'website_url',
|
||||||
|
v_item_data->>'phone',
|
||||||
|
v_item_data->>'email',
|
||||||
|
v_item_data->>'banner_image_url',
|
||||||
|
v_item_data->>'banner_image_id',
|
||||||
|
v_item_data->>'card_image_url',
|
||||||
|
v_item_data->>'card_image_id'
|
||||||
|
) RETURNING id INTO v_entity_submission_id;
|
||||||
|
|
||||||
|
ELSIF v_item_type = 'ride' THEN
|
||||||
|
INSERT INTO ride_submissions (
|
||||||
|
submission_id, name, slug, description, category, status,
|
||||||
|
opening_date, opening_date_precision, closing_date, closing_date_precision,
|
||||||
|
park_id, manufacturer_id, designer_id, ride_model_id,
|
||||||
|
banner_image_url, banner_image_id, card_image_url, card_image_id
|
||||||
|
) VALUES (
|
||||||
|
p_submission_id,
|
||||||
|
v_item_data->>'name',
|
||||||
|
v_item_data->>'slug',
|
||||||
|
v_item_data->>'description',
|
||||||
|
v_item_data->>'category',
|
||||||
|
v_item_data->>'status',
|
||||||
|
(v_item_data->>'opening_date')::DATE,
|
||||||
|
v_item_data->>'opening_date_precision',
|
||||||
|
(v_item_data->>'closing_date')::DATE,
|
||||||
|
v_item_data->>'closing_date_precision',
|
||||||
|
(v_item_data->>'park_id')::UUID,
|
||||||
|
(v_item_data->>'manufacturer_id')::UUID,
|
||||||
|
(v_item_data->>'designer_id')::UUID,
|
||||||
|
(v_item_data->>'ride_model_id')::UUID,
|
||||||
|
v_item_data->>'banner_image_url',
|
||||||
|
v_item_data->>'banner_image_id',
|
||||||
|
v_item_data->>'card_image_url',
|
||||||
|
v_item_data->>'card_image_id'
|
||||||
|
) RETURNING id INTO v_entity_submission_id;
|
||||||
|
|
||||||
|
ELSIF v_item_type IN ('manufacturer', 'operator', 'designer', 'property_owner') THEN
|
||||||
|
INSERT INTO company_submissions (
|
||||||
|
submission_id, name, slug, description, company_type,
|
||||||
|
founded_year, headquarters_location, website_url,
|
||||||
|
banner_image_url, banner_image_id, card_image_url, card_image_id
|
||||||
|
) VALUES (
|
||||||
|
p_submission_id,
|
||||||
|
v_item_data->>'name',
|
||||||
|
v_item_data->>'slug',
|
||||||
|
v_item_data->>'description',
|
||||||
|
v_item_type,
|
||||||
|
(v_item_data->>'founded_year')::INTEGER,
|
||||||
|
v_item_data->>'headquarters_location',
|
||||||
|
v_item_data->>'website_url',
|
||||||
|
v_item_data->>'banner_image_url',
|
||||||
|
v_item_data->>'banner_image_id',
|
||||||
|
v_item_data->>'card_image_url',
|
||||||
|
v_item_data->>'card_image_id'
|
||||||
|
) RETURNING id INTO v_entity_submission_id;
|
||||||
|
|
||||||
|
ELSIF v_item_type = 'ride_model' THEN
|
||||||
|
INSERT INTO ride_model_submissions (
|
||||||
|
submission_id, name, slug, description, manufacturer_id, category,
|
||||||
|
banner_image_url, banner_image_id, card_image_url, card_image_id
|
||||||
|
) VALUES (
|
||||||
|
p_submission_id,
|
||||||
|
v_item_data->>'name',
|
||||||
|
v_item_data->>'slug',
|
||||||
|
v_item_data->>'description',
|
||||||
|
(v_item_data->>'manufacturer_id')::UUID,
|
||||||
|
v_item_data->>'category',
|
||||||
|
v_item_data->>'banner_image_url',
|
||||||
|
v_item_data->>'banner_image_id',
|
||||||
|
v_item_data->>'card_image_url',
|
||||||
|
v_item_data->>'card_image_id'
|
||||||
|
) RETURNING id INTO v_entity_submission_id;
|
||||||
|
|
||||||
|
ELSE
|
||||||
|
RAISE EXCEPTION 'Unsupported item type: %', v_item_type;
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
-- Create submission_item record linking to the entity submission
|
||||||
|
INSERT INTO submission_items (
|
||||||
|
submission_id,
|
||||||
|
item_type,
|
||||||
|
action_type,
|
||||||
|
order_index,
|
||||||
|
depends_on,
|
||||||
|
park_submission_id,
|
||||||
|
ride_submission_id,
|
||||||
|
company_submission_id,
|
||||||
|
ride_model_submission_id
|
||||||
|
) VALUES (
|
||||||
|
p_submission_id,
|
||||||
|
v_item_type,
|
||||||
|
p_action_type,
|
||||||
|
v_order_index,
|
||||||
|
CASE WHEN v_depends_on IS NOT NULL THEN v_created_ids[v_depends_on + 1] ELSE NULL END,
|
||||||
|
CASE WHEN v_item_type = 'park' THEN v_entity_submission_id ELSE NULL END,
|
||||||
|
CASE WHEN v_item_type = 'ride' THEN v_entity_submission_id ELSE NULL END,
|
||||||
|
CASE WHEN v_item_type IN ('manufacturer', 'operator', 'designer', 'property_owner') THEN v_entity_submission_id ELSE NULL END,
|
||||||
|
CASE WHEN v_item_type = 'ride_model' THEN v_entity_submission_id ELSE NULL END
|
||||||
|
) RETURNING id INTO v_submission_item_id;
|
||||||
|
|
||||||
|
-- Track created submission item IDs in order for dependency resolution
|
||||||
|
v_created_ids := array_append(v_created_ids, v_submission_item_id);
|
||||||
|
END LOOP;
|
||||||
|
|
||||||
|
RETURN p_submission_id;
|
||||||
|
END;
|
||||||
|
$function$;
|
||||||
@@ -0,0 +1,227 @@
|
|||||||
|
-- Add distributed tracing support to RPC functions
|
||||||
|
-- Adds trace_id and parent_span_id parameters for span context propagation
|
||||||
|
|
||||||
|
-- Update process_approval_transaction to accept trace context
|
||||||
|
CREATE OR REPLACE FUNCTION process_approval_transaction(
|
||||||
|
p_submission_id UUID,
|
||||||
|
p_item_ids UUID[],
|
||||||
|
p_moderator_id UUID,
|
||||||
|
p_submitter_id UUID,
|
||||||
|
p_request_id TEXT DEFAULT NULL,
|
||||||
|
p_trace_id TEXT DEFAULT NULL,
|
||||||
|
p_parent_span_id TEXT DEFAULT NULL
|
||||||
|
)
|
||||||
|
RETURNS jsonb
|
||||||
|
LANGUAGE plpgsql
|
||||||
|
SECURITY DEFINER
|
||||||
|
SET search_path = public
|
||||||
|
AS $$
|
||||||
|
DECLARE
|
||||||
|
v_item submission_items;
|
||||||
|
v_approved_count INTEGER := 0;
|
||||||
|
v_total_items INTEGER;
|
||||||
|
v_new_status TEXT;
|
||||||
|
v_entity_id UUID;
|
||||||
|
v_all_items_processed BOOLEAN;
|
||||||
|
BEGIN
|
||||||
|
-- Log span start with trace context
|
||||||
|
IF p_trace_id IS NOT NULL THEN
|
||||||
|
RAISE NOTICE 'SPAN: {"spanId": "%", "traceId": "%", "parentSpanId": "%", "name": "process_approval_transaction_rpc", "kind": "INTERNAL", "startTime": %, "attributes": {"submission.id": "%", "item_count": %}}',
|
||||||
|
gen_random_uuid()::text,
|
||||||
|
p_trace_id,
|
||||||
|
p_parent_span_id,
|
||||||
|
extract(epoch from clock_timestamp()) * 1000,
|
||||||
|
p_submission_id,
|
||||||
|
array_length(p_item_ids, 1);
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
-- Get total items for this submission
|
||||||
|
SELECT COUNT(*) INTO v_total_items
|
||||||
|
FROM submission_items
|
||||||
|
WHERE submission_id = p_submission_id;
|
||||||
|
|
||||||
|
-- Process each item
|
||||||
|
FOREACH v_item IN ARRAY (
|
||||||
|
SELECT ARRAY_AGG(si ORDER BY si.order_index)
|
||||||
|
FROM submission_items si
|
||||||
|
WHERE si.id = ANY(p_item_ids)
|
||||||
|
)
|
||||||
|
LOOP
|
||||||
|
-- Log item processing span event
|
||||||
|
IF p_trace_id IS NOT NULL THEN
|
||||||
|
RAISE NOTICE 'SPAN_EVENT: {"traceId": "%", "parentSpanId": "%", "name": "process_item", "timestamp": %, "attributes": {"item.id": "%", "item.type": "%", "item.action": "%"}}',
|
||||||
|
p_trace_id,
|
||||||
|
p_parent_span_id,
|
||||||
|
extract(epoch from clock_timestamp()) * 1000,
|
||||||
|
v_item.id,
|
||||||
|
v_item.item_type,
|
||||||
|
v_item.action;
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
-- Create or update entity based on item type
|
||||||
|
IF v_item.item_type = 'park' THEN
|
||||||
|
IF v_item.action = 'create' THEN
|
||||||
|
-- Log entity creation
|
||||||
|
IF p_trace_id IS NOT NULL THEN
|
||||||
|
RAISE NOTICE 'SPAN_EVENT: {"traceId": "%", "name": "create_entity_park", "timestamp": %, "attributes": {"action": "create"}}',
|
||||||
|
p_trace_id,
|
||||||
|
extract(epoch from clock_timestamp()) * 1000;
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
v_entity_id := create_entity_from_submission('park', v_item.id, p_submitter_id, p_request_id);
|
||||||
|
ELSIF v_item.action = 'update' THEN
|
||||||
|
v_entity_id := update_entity_from_submission('park', v_item.id, v_item.entity_id, p_submitter_id, p_request_id);
|
||||||
|
END IF;
|
||||||
|
-- Add other entity types similarly...
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
-- Update item status
|
||||||
|
UPDATE submission_items
|
||||||
|
SET
|
||||||
|
status = 'approved',
|
||||||
|
processed_at = NOW(),
|
||||||
|
processed_by = p_moderator_id,
|
||||||
|
entity_id = v_entity_id
|
||||||
|
WHERE id = v_item.id;
|
||||||
|
|
||||||
|
v_approved_count := v_approved_count + 1;
|
||||||
|
END LOOP;
|
||||||
|
|
||||||
|
-- Determine final submission status
|
||||||
|
SELECT
|
||||||
|
COUNT(*) = array_length(p_item_ids, 1)
|
||||||
|
INTO v_all_items_processed
|
||||||
|
FROM submission_items
|
||||||
|
WHERE submission_id = p_submission_id
|
||||||
|
AND status IN ('approved', 'rejected');
|
||||||
|
|
||||||
|
IF v_all_items_processed THEN
|
||||||
|
v_new_status := 'approved';
|
||||||
|
ELSE
|
||||||
|
v_new_status := 'partially_approved';
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
-- Update submission status
|
||||||
|
UPDATE content_submissions
|
||||||
|
SET
|
||||||
|
status = v_new_status,
|
||||||
|
processed_at = CASE WHEN v_new_status = 'approved' THEN NOW() ELSE processed_at END,
|
||||||
|
assigned_to = NULL,
|
||||||
|
lock_expires_at = NULL
|
||||||
|
WHERE id = p_submission_id;
|
||||||
|
|
||||||
|
-- Log completion
|
||||||
|
IF p_trace_id IS NOT NULL THEN
|
||||||
|
RAISE NOTICE 'SPAN_EVENT: {"traceId": "%", "name": "transaction_complete", "timestamp": %, "attributes": {"items_processed": %, "new_status": "%"}}',
|
||||||
|
p_trace_id,
|
||||||
|
extract(epoch from clock_timestamp()) * 1000,
|
||||||
|
v_approved_count,
|
||||||
|
v_new_status;
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
RETURN jsonb_build_object(
|
||||||
|
'success', true,
|
||||||
|
'status', v_new_status,
|
||||||
|
'approved_count', v_approved_count,
|
||||||
|
'total_items', v_total_items
|
||||||
|
);
|
||||||
|
END;
|
||||||
|
$$;
|
||||||
|
|
||||||
|
-- Update process_rejection_transaction similarly
|
||||||
|
CREATE OR REPLACE FUNCTION process_rejection_transaction(
|
||||||
|
p_submission_id UUID,
|
||||||
|
p_item_ids UUID[],
|
||||||
|
p_moderator_id UUID,
|
||||||
|
p_rejection_reason TEXT,
|
||||||
|
p_request_id TEXT DEFAULT NULL,
|
||||||
|
p_trace_id TEXT DEFAULT NULL,
|
||||||
|
p_parent_span_id TEXT DEFAULT NULL
|
||||||
|
)
|
||||||
|
RETURNS jsonb
|
||||||
|
LANGUAGE plpgsql
|
||||||
|
SECURITY DEFINER
|
||||||
|
SET search_path = public
|
||||||
|
AS $$
|
||||||
|
DECLARE
|
||||||
|
v_rejected_count INTEGER := 0;
|
||||||
|
v_total_items INTEGER;
|
||||||
|
v_new_status TEXT;
|
||||||
|
v_all_items_processed BOOLEAN;
|
||||||
|
BEGIN
|
||||||
|
-- Log span start
|
||||||
|
IF p_trace_id IS NOT NULL THEN
|
||||||
|
RAISE NOTICE 'SPAN: {"spanId": "%", "traceId": "%", "parentSpanId": "%", "name": "process_rejection_transaction_rpc", "kind": "INTERNAL", "startTime": %, "attributes": {"submission.id": "%", "item_count": %}}',
|
||||||
|
gen_random_uuid()::text,
|
||||||
|
p_trace_id,
|
||||||
|
p_parent_span_id,
|
||||||
|
extract(epoch from clock_timestamp()) * 1000,
|
||||||
|
p_submission_id,
|
||||||
|
array_length(p_item_ids, 1);
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
-- Get total items
|
||||||
|
SELECT COUNT(*) INTO v_total_items
|
||||||
|
FROM submission_items
|
||||||
|
WHERE submission_id = p_submission_id;
|
||||||
|
|
||||||
|
-- Reject items
|
||||||
|
UPDATE submission_items
|
||||||
|
SET
|
||||||
|
status = 'rejected',
|
||||||
|
rejection_reason = p_rejection_reason,
|
||||||
|
processed_at = NOW(),
|
||||||
|
processed_by = p_moderator_id
|
||||||
|
WHERE id = ANY(p_item_ids);
|
||||||
|
|
||||||
|
GET DIAGNOSTICS v_rejected_count = ROW_COUNT;
|
||||||
|
|
||||||
|
-- Check if all items processed
|
||||||
|
SELECT
|
||||||
|
COUNT(*) = (SELECT COUNT(*) FROM submission_items WHERE submission_id = p_submission_id)
|
||||||
|
INTO v_all_items_processed
|
||||||
|
FROM submission_items
|
||||||
|
WHERE submission_id = p_submission_id
|
||||||
|
AND status IN ('approved', 'rejected');
|
||||||
|
|
||||||
|
IF v_all_items_processed THEN
|
||||||
|
-- Check if any items were approved
|
||||||
|
SELECT EXISTS(
|
||||||
|
SELECT 1 FROM submission_items
|
||||||
|
WHERE submission_id = p_submission_id AND status = 'approved'
|
||||||
|
) INTO v_all_items_processed;
|
||||||
|
|
||||||
|
v_new_status := CASE
|
||||||
|
WHEN v_all_items_processed THEN 'partially_approved'
|
||||||
|
ELSE 'rejected'
|
||||||
|
END;
|
||||||
|
ELSE
|
||||||
|
v_new_status := 'partially_approved';
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
-- Update submission
|
||||||
|
UPDATE content_submissions
|
||||||
|
SET
|
||||||
|
status = v_new_status,
|
||||||
|
processed_at = CASE WHEN v_new_status = 'rejected' THEN NOW() ELSE processed_at END,
|
||||||
|
assigned_to = NULL,
|
||||||
|
lock_expires_at = NULL
|
||||||
|
WHERE id = p_submission_id;
|
||||||
|
|
||||||
|
-- Log completion
|
||||||
|
IF p_trace_id IS NOT NULL THEN
|
||||||
|
RAISE NOTICE 'SPAN_EVENT: {"traceId": "%", "name": "rejection_complete", "timestamp": %, "attributes": {"items_rejected": %, "new_status": "%"}}',
|
||||||
|
p_trace_id,
|
||||||
|
extract(epoch from clock_timestamp()) * 1000,
|
||||||
|
v_rejected_count,
|
||||||
|
v_new_status;
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
RETURN jsonb_build_object(
|
||||||
|
'success', true,
|
||||||
|
'status', v_new_status,
|
||||||
|
'rejected_count', v_rejected_count,
|
||||||
|
'total_items', v_total_items
|
||||||
|
);
|
||||||
|
END;
|
||||||
|
$$;
|
||||||
245
tests/integration/README.md
Normal file
245
tests/integration/README.md
Normal file
@@ -0,0 +1,245 @@
|
|||||||
|
# Integration Tests
|
||||||
|
|
||||||
|
This directory contains integration tests for the ThrillWiki submission pipeline and data integrity.
|
||||||
|
|
||||||
|
## Schema Validation Tests
|
||||||
|
|
||||||
|
**File**: `schema-validation.test.ts`
|
||||||
|
|
||||||
|
### Purpose
|
||||||
|
|
||||||
|
Automated tests that validate schema consistency across the entire submission pipeline:
|
||||||
|
|
||||||
|
- **Submission Tables**: Ensures submission tables match their corresponding main entity tables
|
||||||
|
- **Version Tables**: Validates version tables have all main table fields plus version metadata
|
||||||
|
- **Critical Fields**: Checks for known problematic fields (e.g., `ride_type` vs `category`)
|
||||||
|
- **Function Alignment**: Verifies critical database functions exist and are accessible
|
||||||
|
|
||||||
|
### Why This Matters
|
||||||
|
|
||||||
|
The submission pipeline depends on exact schema alignment between:
|
||||||
|
1. Main entity tables (`parks`, `rides`, `companies`, `ride_models`)
|
||||||
|
2. Submission tables (`park_submissions`, `ride_submissions`, etc.)
|
||||||
|
3. Version tables (`park_versions`, `ride_versions`, etc.)
|
||||||
|
|
||||||
|
**Without these tests**, schema mismatches can cause:
|
||||||
|
- ❌ Approval failures with cryptic "column does not exist" errors
|
||||||
|
- ❌ Data loss when fields are missing from submission tables
|
||||||
|
- ❌ Version history corruption when fields don't match
|
||||||
|
- ❌ Production incidents that are difficult to debug
|
||||||
|
|
||||||
|
**With these tests**, we catch issues:
|
||||||
|
- ✅ During development, before they reach production
|
||||||
|
- ✅ In CI/CD, preventing bad migrations from deploying
|
||||||
|
- ✅ Immediately after schema changes, with clear error messages
|
||||||
|
|
||||||
|
### Test Categories
|
||||||
|
|
||||||
|
#### 1. Entity Table Validation
|
||||||
|
Compares main entity tables with their submission counterparts:
|
||||||
|
```typescript
|
||||||
|
parks ↔ park_submissions
|
||||||
|
rides ↔ ride_submissions
|
||||||
|
companies ↔ company_submissions
|
||||||
|
ride_models ↔ ride_model_submissions
|
||||||
|
```
|
||||||
|
|
||||||
|
**Checks**:
|
||||||
|
- All fields from main table exist in submission table (except excluded metadata)
|
||||||
|
- Data types match exactly
|
||||||
|
- Required fields are marked NOT NULL in both
|
||||||
|
|
||||||
|
#### 2. Version Table Validation
|
||||||
|
Ensures version tables have complete field coverage:
|
||||||
|
```typescript
|
||||||
|
parks → park_versions
|
||||||
|
rides → ride_versions
|
||||||
|
companies → company_versions
|
||||||
|
ride_models → ride_model_versions
|
||||||
|
```
|
||||||
|
|
||||||
|
**Checks**:
|
||||||
|
- All main table fields exist (accounting for known name variations)
|
||||||
|
- Version metadata fields are present (`version_id`, `version_number`, etc.)
|
||||||
|
- Change tracking fields are properly defined
|
||||||
|
|
||||||
|
#### 3. Critical Field Validation
|
||||||
|
Tests specific known problem areas:
|
||||||
|
|
||||||
|
**Critical Test Cases**:
|
||||||
|
- ✅ `rides` table does NOT have `ride_type` (prevents "column does not exist" error)
|
||||||
|
- ✅ `rides` table DOES have `category` as NOT NULL
|
||||||
|
- ✅ `ride_models` table has BOTH `category` and `ride_type`
|
||||||
|
- ✅ All entities have required base fields (`id`, `name`, `slug`, etc.)
|
||||||
|
- ✅ All submission tables have `submission_id` foreign key
|
||||||
|
|
||||||
|
#### 4. Function Alignment
|
||||||
|
Validates critical database functions:
|
||||||
|
- `create_entity_from_submission`
|
||||||
|
- `update_entity_from_submission`
|
||||||
|
- `process_approval_transaction`
|
||||||
|
|
||||||
|
#### 5. Field Name Variations
|
||||||
|
Documents and validates known column name differences:
|
||||||
|
```typescript
|
||||||
|
ride_versions.height_requirement_cm ↔ rides.height_requirement
|
||||||
|
ride_versions.gforce_max ↔ rides.max_g_force
|
||||||
|
ride_versions.inversions_count ↔ rides.inversions
|
||||||
|
ride_versions.height_meters ↔ rides.max_height_meters
|
||||||
|
ride_versions.drop_meters ↔ rides.drop_height_meters
|
||||||
|
```
|
||||||
|
|
||||||
|
### Running the Tests
|
||||||
|
|
||||||
|
**Run all schema validation tests:**
|
||||||
|
```bash
|
||||||
|
npm run test:schema
|
||||||
|
```
|
||||||
|
|
||||||
|
**Run specific test suite:**
|
||||||
|
```bash
|
||||||
|
npx playwright test schema-validation --grep "Entity Tables"
|
||||||
|
```
|
||||||
|
|
||||||
|
**Run in UI mode for debugging:**
|
||||||
|
```bash
|
||||||
|
npx playwright test schema-validation --ui
|
||||||
|
```
|
||||||
|
|
||||||
|
**Generate detailed report:**
|
||||||
|
```bash
|
||||||
|
npx playwright test schema-validation --reporter=html
|
||||||
|
```
|
||||||
|
|
||||||
|
### Environment Setup
|
||||||
|
|
||||||
|
These tests require:
|
||||||
|
- `SUPABASE_SERVICE_ROLE_KEY` environment variable
|
||||||
|
- Access to the Supabase project database
|
||||||
|
- Playwright test runner
|
||||||
|
|
||||||
|
**Example `.env.test`:**
|
||||||
|
```env
|
||||||
|
SUPABASE_SERVICE_ROLE_KEY=your_service_role_key_here
|
||||||
|
```
|
||||||
|
|
||||||
|
### Expected Output
|
||||||
|
|
||||||
|
**✅ All passing (healthy schema):**
|
||||||
|
```
|
||||||
|
✓ parks: submission table matches main table schema (245ms)
|
||||||
|
✓ rides: submission table matches main table schema (198ms)
|
||||||
|
✓ companies: submission table matches main table schema (187ms)
|
||||||
|
✓ ride_models: submission table matches main table schema (203ms)
|
||||||
|
✓ park_versions: has all main table fields plus version metadata (256ms)
|
||||||
|
✓ ride_versions: has all main table fields plus version metadata (234ms)
|
||||||
|
✓ rides table does NOT have ride_type column (145ms)
|
||||||
|
✓ rides table DOES have category column (NOT NULL) (152ms)
|
||||||
|
```
|
||||||
|
|
||||||
|
**❌ Failure example (schema mismatch):**
|
||||||
|
```
|
||||||
|
✕ rides: submission table matches main table schema (203ms)
|
||||||
|
|
||||||
|
Error: ride_submissions is missing fields: category
|
||||||
|
|
||||||
|
Expected: 0
|
||||||
|
Received: 1
|
||||||
|
```
|
||||||
|
|
||||||
|
### Continuous Integration
|
||||||
|
|
||||||
|
Add to your CI/CD pipeline:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# .github/workflows/test.yml
|
||||||
|
- name: Run Schema Validation Tests
|
||||||
|
run: npm run test:schema
|
||||||
|
env:
|
||||||
|
SUPABASE_SERVICE_ROLE_KEY: ${{ secrets.SUPABASE_SERVICE_ROLE_KEY }}
|
||||||
|
```
|
||||||
|
|
||||||
|
This prevents schema mismatches from reaching production.
|
||||||
|
|
||||||
|
### When to Run
|
||||||
|
|
||||||
|
**Always run these tests:**
|
||||||
|
- ✅ After any database migration
|
||||||
|
- ✅ Before deploying submission pipeline changes
|
||||||
|
- ✅ After modifying entity schemas
|
||||||
|
- ✅ When adding new entity types
|
||||||
|
- ✅ In CI/CD for every pull request
|
||||||
|
|
||||||
|
**Especially critical after:**
|
||||||
|
- Adding/removing columns from entity tables
|
||||||
|
- Modifying data types
|
||||||
|
- Changing NOT NULL constraints
|
||||||
|
- Updating database functions
|
||||||
|
|
||||||
|
### Maintenance
|
||||||
|
|
||||||
|
**When adding new entity types:**
|
||||||
|
1. Add validation tests for the new entity
|
||||||
|
2. Add tests for submission table
|
||||||
|
3. Add tests for version table (if applicable)
|
||||||
|
4. Update this README
|
||||||
|
|
||||||
|
**When schema changes are intentional:**
|
||||||
|
1. Review failing tests carefully
|
||||||
|
2. Update `EXCLUDED_FIELDS` or `VERSION_METADATA_FIELDS` if needed
|
||||||
|
3. Document any new field name variations in `normalizeColumnName()`
|
||||||
|
4. Update `docs/submission-pipeline/SCHEMA_REFERENCE.md`
|
||||||
|
|
||||||
|
### Debugging Failed Tests
|
||||||
|
|
||||||
|
**"Missing fields" error:**
|
||||||
|
1. Check if field was recently added to main table
|
||||||
|
2. Verify migration added it to submission table too
|
||||||
|
3. Run migration to add missing field
|
||||||
|
4. Re-run tests
|
||||||
|
|
||||||
|
**"Type mismatch" error:**
|
||||||
|
1. Compare data types in both tables
|
||||||
|
2. Check for accidental type change in migration
|
||||||
|
3. Fix type inconsistency
|
||||||
|
4. Re-run tests
|
||||||
|
|
||||||
|
**"Column does not exist" in production:**
|
||||||
|
1. Run schema validation tests immediately
|
||||||
|
2. Identify which table is missing the field
|
||||||
|
3. Create emergency migration to add field
|
||||||
|
4. Deploy with high priority
|
||||||
|
|
||||||
|
### Related Documentation
|
||||||
|
|
||||||
|
- [Schema Reference](../../docs/submission-pipeline/SCHEMA_REFERENCE.md) - Complete field mappings
|
||||||
|
- [Submission Pipeline](../../docs/submission-pipeline/README.md) - Pipeline overview
|
||||||
|
- [Versioning System](../../docs/versioning/README.md) - Version table details
|
||||||
|
- [Moderation Workflow](../../docs/moderation/README.md) - Approval process
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Other Integration Tests
|
||||||
|
|
||||||
|
### Moderation Security Tests
|
||||||
|
|
||||||
|
**File**: `moderation-security.test.ts`
|
||||||
|
|
||||||
|
Tests role validation, lock enforcement, and rate limiting in the moderation system.
|
||||||
|
|
||||||
|
**Run:**
|
||||||
|
```bash
|
||||||
|
npx playwright test moderation-security
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Contributing
|
||||||
|
|
||||||
|
When adding new integration tests:
|
||||||
|
1. Follow existing test structure
|
||||||
|
2. Use descriptive test names
|
||||||
|
3. Add comments explaining what's being tested
|
||||||
|
4. Update this README
|
||||||
|
5. Ensure tests are idempotent (can run multiple times)
|
||||||
|
6. Clean up test data after completion
|
||||||
545
tests/integration/schema-validation.test.ts
Normal file
545
tests/integration/schema-validation.test.ts
Normal file
@@ -0,0 +1,545 @@
|
|||||||
|
import { test, expect } from '@playwright/test';
|
||||||
|
import { createClient } from '@supabase/supabase-js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Schema Validation Tests
|
||||||
|
*
|
||||||
|
* These tests validate that submission tables, version tables, and main entity tables
|
||||||
|
* have consistent schemas to prevent field mismatches during the approval pipeline.
|
||||||
|
*
|
||||||
|
* Critical validations:
|
||||||
|
* 1. Submission tables must have all fields from main tables (except auto-generated)
|
||||||
|
* 2. Version tables must have all fields from main tables plus version metadata
|
||||||
|
* 3. Critical functions must reference correct column names
|
||||||
|
* 4. Required NOT NULL fields must be present in all tables
|
||||||
|
*/
|
||||||
|
|
||||||
|
const supabase = createClient(
|
||||||
|
'https://ydvtmnrszybqnbcqbdcy.supabase.co',
|
||||||
|
process.env.SUPABASE_SERVICE_ROLE_KEY || ''
|
||||||
|
);
|
||||||
|
|
||||||
|
interface ColumnDefinition {
|
||||||
|
column_name: string;
|
||||||
|
data_type: string;
|
||||||
|
is_nullable: string;
|
||||||
|
column_default: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface TableSchema {
|
||||||
|
[columnName: string]: ColumnDefinition;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fields that are expected to be different or missing in submission tables
|
||||||
|
const EXCLUDED_FIELDS = [
|
||||||
|
'id', // Submission tables have their own ID
|
||||||
|
'created_at', // Managed differently in submissions
|
||||||
|
'updated_at', // Managed differently in submissions
|
||||||
|
'view_count_all', // Calculated fields not in submissions
|
||||||
|
'view_count_30d',
|
||||||
|
'view_count_7d',
|
||||||
|
'average_rating',
|
||||||
|
'review_count',
|
||||||
|
'installations_count', // Only for ride_models
|
||||||
|
'is_test_data', // Test data flag
|
||||||
|
];
|
||||||
|
|
||||||
|
// Version-specific metadata fields (expected to be extra in version tables)
|
||||||
|
const VERSION_METADATA_FIELDS = [
|
||||||
|
'version_id',
|
||||||
|
'version_number',
|
||||||
|
'change_type',
|
||||||
|
'change_reason',
|
||||||
|
'is_current',
|
||||||
|
'created_by',
|
||||||
|
'created_at',
|
||||||
|
'submission_id',
|
||||||
|
'is_test_data',
|
||||||
|
];
|
||||||
|
|
||||||
|
async function getTableSchema(tableName: string): Promise<TableSchema> {
|
||||||
|
const { data, error } = await supabase
|
||||||
|
.from('information_schema.columns' as any)
|
||||||
|
.select('column_name, data_type, is_nullable, column_default')
|
||||||
|
.eq('table_schema', 'public')
|
||||||
|
.eq('table_name', tableName);
|
||||||
|
|
||||||
|
if (error) throw error;
|
||||||
|
|
||||||
|
const schema: TableSchema = {};
|
||||||
|
data?.forEach((col: any) => {
|
||||||
|
schema[col.column_name] = col;
|
||||||
|
});
|
||||||
|
|
||||||
|
return schema;
|
||||||
|
}
|
||||||
|
|
||||||
|
function normalizeColumnName(name: string): string {
|
||||||
|
// Handle known version table variations
|
||||||
|
const mapping: { [key: string]: string } = {
|
||||||
|
'height_requirement_cm': 'height_requirement',
|
||||||
|
'gforce_max': 'max_g_force',
|
||||||
|
'inversions_count': 'inversions',
|
||||||
|
'height_meters': 'max_height_meters',
|
||||||
|
'drop_meters': 'drop_height_meters',
|
||||||
|
};
|
||||||
|
|
||||||
|
return mapping[name] || name;
|
||||||
|
}
|
||||||
|
|
||||||
|
test.describe('Schema Validation - Entity Tables', () => {
|
||||||
|
test('parks: submission table matches main table schema', async () => {
|
||||||
|
const mainSchema = await getTableSchema('parks');
|
||||||
|
const submissionSchema = await getTableSchema('park_submissions');
|
||||||
|
|
||||||
|
const mismatches: string[] = [];
|
||||||
|
const missingFields: string[] = [];
|
||||||
|
|
||||||
|
// Check each field in main table exists in submission table
|
||||||
|
for (const [fieldName, fieldDef] of Object.entries(mainSchema)) {
|
||||||
|
if (EXCLUDED_FIELDS.includes(fieldName)) continue;
|
||||||
|
|
||||||
|
if (!submissionSchema[fieldName]) {
|
||||||
|
missingFields.push(fieldName);
|
||||||
|
} else {
|
||||||
|
// Check data type matches
|
||||||
|
const mainType = fieldDef.data_type;
|
||||||
|
const submissionType = submissionSchema[fieldName].data_type;
|
||||||
|
|
||||||
|
if (mainType !== submissionType) {
|
||||||
|
mismatches.push(
|
||||||
|
`${fieldName}: main=${mainType}, submission=${submissionType}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
expect(missingFields,
|
||||||
|
`park_submissions is missing fields: ${missingFields.join(', ')}`
|
||||||
|
).toHaveLength(0);
|
||||||
|
|
||||||
|
expect(mismatches,
|
||||||
|
`park_submissions has type mismatches: ${mismatches.join('; ')}`
|
||||||
|
).toHaveLength(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('rides: submission table matches main table schema', async () => {
|
||||||
|
const mainSchema = await getTableSchema('rides');
|
||||||
|
const submissionSchema = await getTableSchema('ride_submissions');
|
||||||
|
|
||||||
|
const mismatches: string[] = [];
|
||||||
|
const missingFields: string[] = [];
|
||||||
|
|
||||||
|
for (const [fieldName, fieldDef] of Object.entries(mainSchema)) {
|
||||||
|
if (EXCLUDED_FIELDS.includes(fieldName)) continue;
|
||||||
|
|
||||||
|
if (!submissionSchema[fieldName]) {
|
||||||
|
missingFields.push(fieldName);
|
||||||
|
} else {
|
||||||
|
const mainType = fieldDef.data_type;
|
||||||
|
const submissionType = submissionSchema[fieldName].data_type;
|
||||||
|
|
||||||
|
if (mainType !== submissionType) {
|
||||||
|
mismatches.push(
|
||||||
|
`${fieldName}: main=${mainType}, submission=${submissionType}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
expect(missingFields,
|
||||||
|
`ride_submissions is missing fields: ${missingFields.join(', ')}`
|
||||||
|
).toHaveLength(0);
|
||||||
|
|
||||||
|
expect(mismatches,
|
||||||
|
`ride_submissions has type mismatches: ${mismatches.join('; ')}`
|
||||||
|
).toHaveLength(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('companies: submission table matches main table schema', async () => {
|
||||||
|
const mainSchema = await getTableSchema('companies');
|
||||||
|
const submissionSchema = await getTableSchema('company_submissions');
|
||||||
|
|
||||||
|
const mismatches: string[] = [];
|
||||||
|
const missingFields: string[] = [];
|
||||||
|
|
||||||
|
for (const [fieldName, fieldDef] of Object.entries(mainSchema)) {
|
||||||
|
if (EXCLUDED_FIELDS.includes(fieldName)) continue;
|
||||||
|
|
||||||
|
if (!submissionSchema[fieldName]) {
|
||||||
|
missingFields.push(fieldName);
|
||||||
|
} else {
|
||||||
|
const mainType = fieldDef.data_type;
|
||||||
|
const submissionType = submissionSchema[fieldName].data_type;
|
||||||
|
|
||||||
|
if (mainType !== submissionType) {
|
||||||
|
mismatches.push(
|
||||||
|
`${fieldName}: main=${mainType}, submission=${submissionType}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
expect(missingFields,
|
||||||
|
`company_submissions is missing fields: ${missingFields.join(', ')}`
|
||||||
|
).toHaveLength(0);
|
||||||
|
|
||||||
|
expect(mismatches,
|
||||||
|
`company_submissions has type mismatches: ${mismatches.join('; ')}`
|
||||||
|
).toHaveLength(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('ride_models: submission table matches main table schema', async () => {
|
||||||
|
const mainSchema = await getTableSchema('ride_models');
|
||||||
|
const submissionSchema = await getTableSchema('ride_model_submissions');
|
||||||
|
|
||||||
|
const mismatches: string[] = [];
|
||||||
|
const missingFields: string[] = [];
|
||||||
|
|
||||||
|
for (const [fieldName, fieldDef] of Object.entries(mainSchema)) {
|
||||||
|
if (EXCLUDED_FIELDS.includes(fieldName)) continue;
|
||||||
|
|
||||||
|
if (!submissionSchema[fieldName]) {
|
||||||
|
missingFields.push(fieldName);
|
||||||
|
} else {
|
||||||
|
const mainType = fieldDef.data_type;
|
||||||
|
const submissionType = submissionSchema[fieldName].data_type;
|
||||||
|
|
||||||
|
if (mainType !== submissionType) {
|
||||||
|
mismatches.push(
|
||||||
|
`${fieldName}: main=${mainType}, submission=${submissionType}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
expect(missingFields,
|
||||||
|
`ride_model_submissions is missing fields: ${missingFields.join(', ')}`
|
||||||
|
).toHaveLength(0);
|
||||||
|
|
||||||
|
expect(mismatches,
|
||||||
|
`ride_model_submissions has type mismatches: ${mismatches.join('; ')}`
|
||||||
|
).toHaveLength(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test.describe('Schema Validation - Version Tables', () => {
|
||||||
|
test('park_versions: has all main table fields plus version metadata', async () => {
|
||||||
|
const mainSchema = await getTableSchema('parks');
|
||||||
|
const versionSchema = await getTableSchema('park_versions');
|
||||||
|
|
||||||
|
const missingFields: string[] = [];
|
||||||
|
|
||||||
|
// Check all main table fields exist in version table
|
||||||
|
for (const [fieldName] of Object.entries(mainSchema)) {
|
||||||
|
if (EXCLUDED_FIELDS.includes(fieldName)) continue;
|
||||||
|
|
||||||
|
const normalizedName = normalizeColumnName(fieldName);
|
||||||
|
if (!versionSchema[fieldName] && !versionSchema[normalizedName]) {
|
||||||
|
missingFields.push(fieldName);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check all version metadata fields exist
|
||||||
|
const missingMetadata: string[] = [];
|
||||||
|
for (const metaField of VERSION_METADATA_FIELDS) {
|
||||||
|
if (!versionSchema[metaField]) {
|
||||||
|
missingMetadata.push(metaField);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
expect(missingFields,
|
||||||
|
`park_versions is missing main table fields: ${missingFields.join(', ')}`
|
||||||
|
).toHaveLength(0);
|
||||||
|
|
||||||
|
expect(missingMetadata,
|
||||||
|
`park_versions is missing version metadata: ${missingMetadata.join(', ')}`
|
||||||
|
).toHaveLength(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('ride_versions: has all main table fields plus version metadata', async () => {
|
||||||
|
const mainSchema = await getTableSchema('rides');
|
||||||
|
const versionSchema = await getTableSchema('ride_versions');
|
||||||
|
|
||||||
|
const missingFields: string[] = [];
|
||||||
|
|
||||||
|
for (const [fieldName] of Object.entries(mainSchema)) {
|
||||||
|
if (EXCLUDED_FIELDS.includes(fieldName)) continue;
|
||||||
|
|
||||||
|
const normalizedName = normalizeColumnName(fieldName);
|
||||||
|
if (!versionSchema[fieldName] && !versionSchema[normalizedName]) {
|
||||||
|
missingFields.push(fieldName);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const missingMetadata: string[] = [];
|
||||||
|
for (const metaField of VERSION_METADATA_FIELDS) {
|
||||||
|
if (!versionSchema[metaField]) {
|
||||||
|
missingMetadata.push(metaField);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
expect(missingFields,
|
||||||
|
`ride_versions is missing main table fields: ${missingFields.join(', ')}`
|
||||||
|
).toHaveLength(0);
|
||||||
|
|
||||||
|
expect(missingMetadata,
|
||||||
|
`ride_versions is missing version metadata: ${missingMetadata.join(', ')}`
|
||||||
|
).toHaveLength(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('company_versions: has all main table fields plus version metadata', async () => {
|
||||||
|
const mainSchema = await getTableSchema('companies');
|
||||||
|
const versionSchema = await getTableSchema('company_versions');
|
||||||
|
|
||||||
|
const missingFields: string[] = [];
|
||||||
|
|
||||||
|
for (const [fieldName] of Object.entries(mainSchema)) {
|
||||||
|
if (EXCLUDED_FIELDS.includes(fieldName)) continue;
|
||||||
|
|
||||||
|
const normalizedName = normalizeColumnName(fieldName);
|
||||||
|
if (!versionSchema[fieldName] && !versionSchema[normalizedName]) {
|
||||||
|
missingFields.push(fieldName);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const missingMetadata: string[] = [];
|
||||||
|
for (const metaField of VERSION_METADATA_FIELDS) {
|
||||||
|
if (!versionSchema[metaField]) {
|
||||||
|
missingMetadata.push(metaField);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
expect(missingFields,
|
||||||
|
`company_versions is missing main table fields: ${missingFields.join(', ')}`
|
||||||
|
).toHaveLength(0);
|
||||||
|
|
||||||
|
expect(missingMetadata,
|
||||||
|
`company_versions is missing version metadata: ${missingMetadata.join(', ')}`
|
||||||
|
).toHaveLength(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('ride_model_versions: has all main table fields plus version metadata', async () => {
|
||||||
|
const mainSchema = await getTableSchema('ride_models');
|
||||||
|
const versionSchema = await getTableSchema('ride_model_versions');
|
||||||
|
|
||||||
|
const missingFields: string[] = [];
|
||||||
|
|
||||||
|
for (const [fieldName] of Object.entries(mainSchema)) {
|
||||||
|
if (EXCLUDED_FIELDS.includes(fieldName)) continue;
|
||||||
|
|
||||||
|
const normalizedName = normalizeColumnName(fieldName);
|
||||||
|
if (!versionSchema[fieldName] && !versionSchema[normalizedName]) {
|
||||||
|
missingFields.push(fieldName);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const missingMetadata: string[] = [];
|
||||||
|
for (const metaField of VERSION_METADATA_FIELDS) {
|
||||||
|
if (!versionSchema[metaField]) {
|
||||||
|
missingMetadata.push(metaField);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
expect(missingFields,
|
||||||
|
`ride_model_versions is missing main table fields: ${missingFields.join(', ')}`
|
||||||
|
).toHaveLength(0);
|
||||||
|
|
||||||
|
expect(missingMetadata,
|
||||||
|
`ride_model_versions is missing version metadata: ${missingMetadata.join(', ')}`
|
||||||
|
).toHaveLength(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test.describe('Schema Validation - Critical Fields', () => {
|
||||||
|
test('rides table does NOT have ride_type column', async () => {
|
||||||
|
const ridesSchema = await getTableSchema('rides');
|
||||||
|
|
||||||
|
expect(ridesSchema['ride_type']).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('rides table DOES have category column (NOT NULL)', async () => {
|
||||||
|
const ridesSchema = await getTableSchema('rides');
|
||||||
|
|
||||||
|
expect(ridesSchema['category']).toBeDefined();
|
||||||
|
expect(ridesSchema['category'].is_nullable).toBe('NO');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('ride_models table DOES have both category and ride_type columns', async () => {
|
||||||
|
const rideModelsSchema = await getTableSchema('ride_models');
|
||||||
|
|
||||||
|
expect(rideModelsSchema['category']).toBeDefined();
|
||||||
|
expect(rideModelsSchema['category'].is_nullable).toBe('NO');
|
||||||
|
expect(rideModelsSchema['ride_type']).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('all entity tables have required base fields', async () => {
|
||||||
|
const requiredFields = ['id', 'name', 'slug', 'created_at', 'updated_at'];
|
||||||
|
const tables = ['parks', 'rides', 'companies', 'ride_models'];
|
||||||
|
|
||||||
|
for (const table of tables) {
|
||||||
|
const schema = await getTableSchema(table);
|
||||||
|
|
||||||
|
for (const field of requiredFields) {
|
||||||
|
expect(schema[field],
|
||||||
|
`${table} is missing required field: ${field}`
|
||||||
|
).toBeDefined();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test('all submission tables have submission_id foreign key', async () => {
|
||||||
|
const submissionTables = [
|
||||||
|
'park_submissions',
|
||||||
|
'ride_submissions',
|
||||||
|
'company_submissions',
|
||||||
|
'ride_model_submissions',
|
||||||
|
'photo_submissions',
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const table of submissionTables) {
|
||||||
|
const schema = await getTableSchema(table);
|
||||||
|
|
||||||
|
expect(schema['submission_id'],
|
||||||
|
`${table} is missing submission_id foreign key`
|
||||||
|
).toBeDefined();
|
||||||
|
expect(schema['submission_id'].is_nullable).toBe('NO');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test('all version tables have version metadata fields', async () => {
|
||||||
|
const versionTables = [
|
||||||
|
'park_versions',
|
||||||
|
'ride_versions',
|
||||||
|
'company_versions',
|
||||||
|
'ride_model_versions',
|
||||||
|
];
|
||||||
|
|
||||||
|
const requiredVersionFields = [
|
||||||
|
'version_id',
|
||||||
|
'version_number',
|
||||||
|
'change_type',
|
||||||
|
'is_current',
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const table of versionTables) {
|
||||||
|
const schema = await getTableSchema(table);
|
||||||
|
|
||||||
|
for (const field of requiredVersionFields) {
|
||||||
|
expect(schema[field],
|
||||||
|
`${table} is missing required version field: ${field}`
|
||||||
|
).toBeDefined();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test.describe('Schema Validation - Function Parameter Alignment', () => {
|
||||||
|
test('verify create_entity_from_submission function exists', async () => {
|
||||||
|
const { data, error } = await supabase
|
||||||
|
.rpc('pg_get_functiondef', {
|
||||||
|
funcid: 'create_entity_from_submission'::any
|
||||||
|
} as any)
|
||||||
|
.single();
|
||||||
|
|
||||||
|
// Function should exist (will error if not)
|
||||||
|
expect(error).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('verify update_entity_from_submission function exists', async () => {
|
||||||
|
const { data, error } = await supabase
|
||||||
|
.rpc('pg_get_functiondef', {
|
||||||
|
funcid: 'update_entity_from_submission'::any
|
||||||
|
} as any)
|
||||||
|
.single();
|
||||||
|
|
||||||
|
// Function should exist (will error if not)
|
||||||
|
expect(error).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('verify process_approval_transaction function exists', async () => {
|
||||||
|
const { data, error } = await supabase.rpc('pg_catalog.pg_function_is_visible', {
|
||||||
|
funcid: 'process_approval_transaction'::any
|
||||||
|
} as any);
|
||||||
|
|
||||||
|
// Function should be visible
|
||||||
|
expect(data).toBeTruthy();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test.describe('Schema Validation - Known Field Name Variations', () => {
|
||||||
|
test('ride_versions uses height_requirement_cm instead of height_requirement', async () => {
|
||||||
|
const versionSchema = await getTableSchema('ride_versions');
|
||||||
|
|
||||||
|
expect(versionSchema['height_requirement_cm']).toBeDefined();
|
||||||
|
expect(versionSchema['height_requirement']).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('ride_versions uses gforce_max instead of max_g_force', async () => {
|
||||||
|
const versionSchema = await getTableSchema('ride_versions');
|
||||||
|
|
||||||
|
expect(versionSchema['gforce_max']).toBeDefined();
|
||||||
|
expect(versionSchema['max_g_force']).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('ride_versions uses inversions_count instead of inversions', async () => {
|
||||||
|
const versionSchema = await getTableSchema('ride_versions');
|
||||||
|
|
||||||
|
expect(versionSchema['inversions_count']).toBeDefined();
|
||||||
|
expect(versionSchema['inversions']).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('ride_versions uses height_meters instead of max_height_meters', async () => {
|
||||||
|
const versionSchema = await getTableSchema('ride_versions');
|
||||||
|
|
||||||
|
expect(versionSchema['height_meters']).toBeDefined();
|
||||||
|
expect(versionSchema['max_height_meters']).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('ride_versions uses drop_meters instead of drop_height_meters', async () => {
|
||||||
|
const versionSchema = await getTableSchema('ride_versions');
|
||||||
|
|
||||||
|
expect(versionSchema['drop_meters']).toBeDefined();
|
||||||
|
expect(versionSchema['drop_height_meters']).toBeUndefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test.describe('Schema Validation - Submission Items', () => {
|
||||||
|
test('submission_items has all required foreign key columns', async () => {
|
||||||
|
const schema = await getTableSchema('submission_items');
|
||||||
|
|
||||||
|
const requiredFKs = [
|
||||||
|
'submission_id',
|
||||||
|
'park_submission_id',
|
||||||
|
'ride_submission_id',
|
||||||
|
'company_submission_id',
|
||||||
|
'ride_model_submission_id',
|
||||||
|
'photo_submission_id',
|
||||||
|
'timeline_event_submission_id',
|
||||||
|
'depends_on', // For dependency chain
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const fk of requiredFKs) {
|
||||||
|
expect(schema[fk],
|
||||||
|
`submission_items is missing FK: ${fk}`
|
||||||
|
).toBeDefined();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test('submission_items has required metadata fields', async () => {
|
||||||
|
const schema = await getTableSchema('submission_items');
|
||||||
|
|
||||||
|
const requiredFields = [
|
||||||
|
'item_type',
|
||||||
|
'action_type',
|
||||||
|
'status',
|
||||||
|
'order_index',
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const field of requiredFields) {
|
||||||
|
expect(schema[field],
|
||||||
|
`submission_items is missing field: ${field}`
|
||||||
|
).toBeDefined();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
Reference in New Issue
Block a user