mirror of
https://github.com/pacnpal/thrilltrack-explorer.git
synced 2025-12-29 14:27:06 -05:00
Compare commits
7 Commits
ced3a80fee
...
403bc78765
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
403bc78765 | ||
|
|
26e38b6d49 | ||
|
|
4e187cd1ff | ||
|
|
da0ccf7e27 | ||
|
|
f315f935cc | ||
|
|
071f538a4e | ||
|
|
0601600ee5 |
186
.github/workflows/schema-validation.yml
vendored
Normal file
186
.github/workflows/schema-validation.yml
vendored
Normal file
@@ -0,0 +1,186 @@
|
||||
name: Schema Validation
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- 'supabase/migrations/**'
|
||||
- 'src/lib/moderation/**'
|
||||
- 'supabase/functions/**'
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- develop
|
||||
paths:
|
||||
- 'supabase/migrations/**'
|
||||
- 'src/lib/moderation/**'
|
||||
- 'supabase/functions/**'
|
||||
workflow_dispatch: # Allow manual triggering
|
||||
|
||||
jobs:
|
||||
validate-schema:
|
||||
name: Validate Database Schema
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
cache: 'npm'
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Run schema validation script
|
||||
env:
|
||||
SUPABASE_SERVICE_ROLE_KEY: ${{ secrets.SUPABASE_SERVICE_ROLE_KEY }}
|
||||
run: |
|
||||
echo "🔍 Running schema validation checks..."
|
||||
npm run validate-schema
|
||||
|
||||
- name: Run Playwright schema validation tests
|
||||
env:
|
||||
SUPABASE_SERVICE_ROLE_KEY: ${{ secrets.SUPABASE_SERVICE_ROLE_KEY }}
|
||||
run: |
|
||||
echo "🧪 Running integration tests..."
|
||||
npx playwright test schema-validation --reporter=list
|
||||
|
||||
- name: Upload test results
|
||||
if: failure()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: schema-validation-results
|
||||
path: |
|
||||
playwright-report/
|
||||
test-results/
|
||||
retention-days: 7
|
||||
|
||||
- name: Comment PR with validation results
|
||||
if: failure() && github.event_name == 'pull_request'
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
github.rest.issues.createComment({
|
||||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: `## ❌ Schema Validation Failed
|
||||
|
||||
The schema validation checks have detected inconsistencies in your database changes.
|
||||
|
||||
**Common issues:**
|
||||
- Missing fields in submission tables
|
||||
- Mismatched data types between tables
|
||||
- Missing version metadata fields
|
||||
- Invalid column names (e.g., \`ride_type\` in \`rides\` table)
|
||||
|
||||
**Next steps:**
|
||||
1. Review the failed tests in the Actions log
|
||||
2. Check the [Schema Reference documentation](https://github.com/${context.repo.owner}/${context.repo.repo}/blob/main/docs/submission-pipeline/SCHEMA_REFERENCE.md)
|
||||
3. Fix the identified issues
|
||||
4. Push your fixes to re-run validation
|
||||
|
||||
**Need help?** Consult the [Integration Tests README](https://github.com/${context.repo.owner}/${context.repo.repo}/blob/main/tests/integration/README.md).`
|
||||
})
|
||||
|
||||
migration-safety-check:
|
||||
name: Migration Safety Check
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'pull_request'
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Check for breaking changes in migrations
|
||||
run: |
|
||||
echo "🔍 Checking for potentially breaking migration patterns..."
|
||||
|
||||
# Check if any migrations contain DROP COLUMN
|
||||
if git diff origin/main...HEAD -- 'supabase/migrations/**' | grep -i "DROP COLUMN"; then
|
||||
echo "⚠️ Warning: Migration contains DROP COLUMN"
|
||||
echo "::warning::Migration contains DROP COLUMN - ensure data migration plan exists"
|
||||
fi
|
||||
|
||||
# Check if any migrations alter NOT NULL constraints
|
||||
if git diff origin/main...HEAD -- 'supabase/migrations/**' | grep -i "ALTER COLUMN.*NOT NULL"; then
|
||||
echo "⚠️ Warning: Migration alters NOT NULL constraints"
|
||||
echo "::warning::Migration alters NOT NULL constraints - ensure data backfill is complete"
|
||||
fi
|
||||
|
||||
# Check if any migrations rename columns
|
||||
if git diff origin/main...HEAD -- 'supabase/migrations/**' | grep -i "RENAME COLUMN"; then
|
||||
echo "⚠️ Warning: Migration renames columns"
|
||||
echo "::warning::Migration renames columns - ensure all code references are updated"
|
||||
fi
|
||||
|
||||
- name: Validate migration file naming
|
||||
run: |
|
||||
echo "🔍 Validating migration file names..."
|
||||
|
||||
# Check that all migration files follow the timestamp pattern
|
||||
for file in supabase/migrations/*.sql; do
|
||||
if [[ ! $(basename "$file") =~ ^[0-9]{14}_ ]]; then
|
||||
echo "❌ Invalid migration filename: $(basename "$file")"
|
||||
echo "::error::Migration files must start with a 14-digit timestamp (YYYYMMDDHHMMSS)"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
echo "✅ All migration filenames are valid"
|
||||
|
||||
documentation-check:
|
||||
name: Documentation Check
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'pull_request'
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Check if schema docs need updating
|
||||
run: |
|
||||
echo "📚 Checking if schema documentation is up to date..."
|
||||
|
||||
# Check if migrations changed but SCHEMA_REFERENCE.md didn't
|
||||
MIGRATIONS_CHANGED=$(git diff origin/main...HEAD --name-only | grep -c "supabase/migrations/" || true)
|
||||
SCHEMA_DOCS_CHANGED=$(git diff origin/main...HEAD --name-only | grep -c "docs/submission-pipeline/SCHEMA_REFERENCE.md" || true)
|
||||
|
||||
if [ "$MIGRATIONS_CHANGED" -gt 0 ] && [ "$SCHEMA_DOCS_CHANGED" -eq 0 ]; then
|
||||
echo "⚠️ Warning: Migrations were changed but SCHEMA_REFERENCE.md was not updated"
|
||||
echo "::warning::Consider updating docs/submission-pipeline/SCHEMA_REFERENCE.md to reflect schema changes"
|
||||
else
|
||||
echo "✅ Documentation check passed"
|
||||
fi
|
||||
|
||||
- name: Comment PR with documentation reminder
|
||||
if: success()
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
const migrationsChanged = (await exec.getExecOutput('git', ['diff', 'origin/main...HEAD', '--name-only'])).stdout.includes('supabase/migrations/');
|
||||
const docsChanged = (await exec.getExecOutput('git', ['diff', 'origin/main...HEAD', '--name-only'])).stdout.includes('docs/submission-pipeline/SCHEMA_REFERENCE.md');
|
||||
|
||||
if (migrationsChanged && !docsChanged) {
|
||||
github.rest.issues.createComment({
|
||||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: `## 📚 Documentation Reminder
|
||||
|
||||
This PR includes database migrations but doesn't update the schema reference documentation.
|
||||
|
||||
**If you added/modified fields**, please update:
|
||||
- \`docs/submission-pipeline/SCHEMA_REFERENCE.md\`
|
||||
|
||||
**If this is a minor change** (e.g., fixing typos, adding indexes), you can ignore this message.`
|
||||
})
|
||||
}
|
||||
636
docs/submission-pipeline/SCHEMA_REFERENCE.md
Normal file
636
docs/submission-pipeline/SCHEMA_REFERENCE.md
Normal file
@@ -0,0 +1,636 @@
|
||||
# Submission Pipeline Schema Reference
|
||||
|
||||
**Critical Document**: This reference maps all entity types to their exact database schema fields across the entire submission pipeline to prevent schema mismatches.
|
||||
|
||||
**Last Updated**: 2025-11-08
|
||||
**Status**: ✅ All schemas audited and verified
|
||||
|
||||
---
|
||||
|
||||
## Table of Contents
|
||||
|
||||
1. [Overview](#overview)
|
||||
2. [Parks](#parks)
|
||||
3. [Rides](#rides)
|
||||
4. [Companies](#companies)
|
||||
5. [Ride Models](#ride-models)
|
||||
6. [Photos](#photos)
|
||||
7. [Timeline Events](#timeline-events)
|
||||
8. [Critical Functions Reference](#critical-functions-reference)
|
||||
9. [Common Pitfalls](#common-pitfalls)
|
||||
|
||||
---
|
||||
|
||||
## Overview
|
||||
|
||||
### Pipeline Flow
|
||||
|
||||
```
|
||||
User Input → *_submissions table → submission_items → Moderation →
|
||||
process_approval_transaction → create/update_entity_from_submission →
|
||||
Main entity table → Version trigger → *_versions table
|
||||
```
|
||||
|
||||
### Entity Types
|
||||
|
||||
- `park` - Theme parks and amusement parks
|
||||
- `ride` - Individual rides and attractions
|
||||
- `company` - Used for: `manufacturer`, `operator`, `designer`, `property_owner`
|
||||
- `ride_model` - Ride model templates
|
||||
- `photo` - Entity photos
|
||||
- `timeline_event` - Historical events
|
||||
|
||||
---
|
||||
|
||||
## Parks
|
||||
|
||||
### Main Table: `parks`
|
||||
|
||||
**Required Fields:**
|
||||
- `id` (uuid, PK)
|
||||
- `name` (text, NOT NULL)
|
||||
- `slug` (text, NOT NULL, UNIQUE)
|
||||
- `park_type` (text, NOT NULL) - Values: `theme_park`, `amusement_park`, `water_park`, etc.
|
||||
- `status` (text, NOT NULL) - Values: `operating`, `closed`, `under_construction`, etc.
|
||||
|
||||
**Optional Fields:**
|
||||
- `description` (text)
|
||||
- `location_id` (uuid, FK → locations)
|
||||
- `operator_id` (uuid, FK → companies)
|
||||
- `property_owner_id` (uuid, FK → companies)
|
||||
- `opening_date` (date)
|
||||
- `closing_date` (date)
|
||||
- `opening_date_precision` (text) - Values: `year`, `month`, `day`
|
||||
- `closing_date_precision` (text)
|
||||
- `website_url` (text)
|
||||
- `phone` (text)
|
||||
- `email` (text)
|
||||
- `banner_image_url` (text)
|
||||
- `banner_image_id` (text)
|
||||
- `card_image_url` (text)
|
||||
- `card_image_id` (text)
|
||||
|
||||
**Metadata Fields:**
|
||||
- `view_count_all` (integer, default: 0)
|
||||
- `view_count_30d` (integer, default: 0)
|
||||
- `view_count_7d` (integer, default: 0)
|
||||
- `average_rating` (numeric, default: 0.00)
|
||||
- `review_count` (integer, default: 0)
|
||||
- `created_at` (timestamptz)
|
||||
- `updated_at` (timestamptz)
|
||||
- `is_test_data` (boolean, default: false)
|
||||
|
||||
### Submission Table: `park_submissions`
|
||||
|
||||
**Schema Identical to Main Table** (excluding auto-generated fields like `id`, timestamps)
|
||||
|
||||
**Additional Field:**
|
||||
- `submission_id` (uuid, NOT NULL, FK → content_submissions)
|
||||
- `temp_location_data` (jsonb) - For pending location creation
|
||||
|
||||
### Version Table: `park_versions`
|
||||
|
||||
**All Main Table Fields PLUS:**
|
||||
- `version_id` (uuid, PK)
|
||||
- `park_id` (uuid, NOT NULL, FK → parks)
|
||||
- `version_number` (integer, NOT NULL)
|
||||
- `change_type` (version_change_type, NOT NULL) - Values: `created`, `updated`, `restored`
|
||||
- `change_reason` (text)
|
||||
- `is_current` (boolean, default: true)
|
||||
- `created_by` (uuid, FK → auth.users)
|
||||
- `created_at` (timestamptz)
|
||||
- `submission_id` (uuid, FK → content_submissions)
|
||||
|
||||
---
|
||||
|
||||
## Rides
|
||||
|
||||
### Main Table: `rides`
|
||||
|
||||
**Required Fields:**
|
||||
- `id` (uuid, PK)
|
||||
- `name` (text, NOT NULL)
|
||||
- `slug` (text, NOT NULL, UNIQUE)
|
||||
- `park_id` (uuid, NOT NULL, FK → parks)
|
||||
- `category` (text, NOT NULL) ⚠️ **CRITICAL: This field is required**
|
||||
- Values: `roller_coaster`, `water_ride`, `dark_ride`, `flat_ride`, `transport`, `kids_ride`
|
||||
- `status` (text, NOT NULL)
|
||||
- Values: `operating`, `closed`, `under_construction`, `sbno`, etc.
|
||||
|
||||
**⚠️ IMPORTANT: `rides` table does NOT have `ride_type` column!**
|
||||
- `ride_type` only exists in `ride_models` table
|
||||
- Using `ride_type` in rides updates will cause "column does not exist" error
|
||||
|
||||
**Optional Relationship Fields:**
|
||||
- `manufacturer_id` (uuid, FK → companies)
|
||||
- `designer_id` (uuid, FK → companies)
|
||||
- `ride_model_id` (uuid, FK → ride_models)
|
||||
|
||||
**Optional Descriptive Fields:**
|
||||
- `description` (text)
|
||||
- `opening_date` (date)
|
||||
- `closing_date` (date)
|
||||
- `opening_date_precision` (text)
|
||||
- `closing_date_precision` (text)
|
||||
|
||||
**Optional Technical Fields:**
|
||||
- `height_requirement` (integer) - Height requirement in cm
|
||||
- `age_requirement` (integer)
|
||||
- `max_speed_kmh` (numeric)
|
||||
- `duration_seconds` (integer)
|
||||
- `capacity_per_hour` (integer)
|
||||
- `max_g_force` (numeric)
|
||||
- `inversions` (integer) - Number of inversions
|
||||
- `length_meters` (numeric)
|
||||
- `max_height_meters` (numeric)
|
||||
- `drop_height_meters` (numeric)
|
||||
|
||||
**Category-Specific Fields:**
|
||||
|
||||
*Roller Coasters:*
|
||||
- `ride_sub_type` (text)
|
||||
- `coaster_type` (text)
|
||||
- `seating_type` (text)
|
||||
- `intensity_level` (text)
|
||||
- `track_material` (text)
|
||||
- `support_material` (text)
|
||||
- `propulsion_method` (text)
|
||||
|
||||
*Water Rides:*
|
||||
- `water_depth_cm` (integer)
|
||||
- `splash_height_meters` (numeric)
|
||||
- `wetness_level` (text)
|
||||
- `flume_type` (text)
|
||||
- `boat_capacity` (integer)
|
||||
|
||||
*Dark Rides:*
|
||||
- `theme_name` (text)
|
||||
- `story_description` (text)
|
||||
- `show_duration_seconds` (integer)
|
||||
- `animatronics_count` (integer)
|
||||
- `projection_type` (text)
|
||||
- `ride_system` (text)
|
||||
- `scenes_count` (integer)
|
||||
|
||||
*Flat Rides:*
|
||||
- `rotation_type` (text)
|
||||
- `motion_pattern` (text)
|
||||
- `platform_count` (integer)
|
||||
- `swing_angle_degrees` (numeric)
|
||||
- `rotation_speed_rpm` (numeric)
|
||||
- `arm_length_meters` (numeric)
|
||||
- `max_height_reached_meters` (numeric)
|
||||
|
||||
*Kids Rides:*
|
||||
- `min_age` (integer)
|
||||
- `max_age` (integer)
|
||||
- `educational_theme` (text)
|
||||
- `character_theme` (text)
|
||||
|
||||
*Transport:*
|
||||
- `transport_type` (text)
|
||||
- `route_length_meters` (numeric)
|
||||
- `stations_count` (integer)
|
||||
- `vehicle_capacity` (integer)
|
||||
- `vehicles_count` (integer)
|
||||
- `round_trip_duration_seconds` (integer)
|
||||
|
||||
**Image Fields:**
|
||||
- `banner_image_url` (text)
|
||||
- `banner_image_id` (text)
|
||||
- `card_image_url` (text)
|
||||
- `card_image_id` (text)
|
||||
- `image_url` (text) - Legacy field
|
||||
|
||||
**Metadata Fields:**
|
||||
- `view_count_all` (integer, default: 0)
|
||||
- `view_count_30d` (integer, default: 0)
|
||||
- `view_count_7d` (integer, default: 0)
|
||||
- `average_rating` (numeric, default: 0.00)
|
||||
- `review_count` (integer, default: 0)
|
||||
- `created_at` (timestamptz)
|
||||
- `updated_at` (timestamptz)
|
||||
- `is_test_data` (boolean, default: false)
|
||||
|
||||
### Submission Table: `ride_submissions`
|
||||
|
||||
**Schema Identical to Main Table** (excluding auto-generated fields)
|
||||
|
||||
**Additional Fields:**
|
||||
- `submission_id` (uuid, NOT NULL, FK → content_submissions)
|
||||
|
||||
### Version Table: `ride_versions`
|
||||
|
||||
**All Main Table Fields PLUS:**
|
||||
- `version_id` (uuid, PK)
|
||||
- `ride_id` (uuid, NOT NULL, FK → rides)
|
||||
- `version_number` (integer, NOT NULL)
|
||||
- `change_type` (version_change_type, NOT NULL)
|
||||
- `change_reason` (text)
|
||||
- `is_current` (boolean, default: true)
|
||||
- `created_by` (uuid, FK → auth.users)
|
||||
- `created_at` (timestamptz)
|
||||
- `submission_id` (uuid, FK → content_submissions)
|
||||
|
||||
**⚠️ Field Name Differences (Version Table vs Main Table):**
|
||||
- `height_requirement_cm` in versions → `height_requirement` in rides
|
||||
- `gforce_max` in versions → `max_g_force` in rides
|
||||
- `inversions_count` in versions → `inversions` in rides
|
||||
- `height_meters` in versions → `max_height_meters` in rides
|
||||
- `drop_meters` in versions → `drop_height_meters` in rides
|
||||
|
||||
---
|
||||
|
||||
## Companies
|
||||
|
||||
**Used For**: `manufacturer`, `operator`, `designer`, `property_owner`
|
||||
|
||||
### Main Table: `companies`
|
||||
|
||||
**Required Fields:**
|
||||
- `id` (uuid, PK)
|
||||
- `name` (text, NOT NULL)
|
||||
- `slug` (text, NOT NULL, UNIQUE)
|
||||
- `company_type` (text, NOT NULL)
|
||||
- Values: `manufacturer`, `operator`, `designer`, `property_owner`
|
||||
|
||||
**Optional Fields:**
|
||||
- `description` (text)
|
||||
- `person_type` (text, default: 'company')
|
||||
- Values: `company`, `individual`
|
||||
- `founded_year` (integer)
|
||||
- `founded_date` (date)
|
||||
- `founded_date_precision` (text)
|
||||
- `headquarters_location` (text)
|
||||
- `website_url` (text)
|
||||
- `logo_url` (text)
|
||||
- `banner_image_url` (text)
|
||||
- `banner_image_id` (text)
|
||||
- `card_image_url` (text)
|
||||
- `card_image_id` (text)
|
||||
|
||||
**Metadata Fields:**
|
||||
- `view_count_all` (integer, default: 0)
|
||||
- `view_count_30d` (integer, default: 0)
|
||||
- `view_count_7d` (integer, default: 0)
|
||||
- `average_rating` (numeric, default: 0.00)
|
||||
- `review_count` (integer, default: 0)
|
||||
- `created_at` (timestamptz)
|
||||
- `updated_at` (timestamptz)
|
||||
- `is_test_data` (boolean, default: false)
|
||||
|
||||
### Submission Table: `company_submissions`
|
||||
|
||||
**Schema Identical to Main Table** (excluding auto-generated fields)
|
||||
|
||||
**Additional Field:**
|
||||
- `submission_id` (uuid, NOT NULL, FK → content_submissions)
|
||||
|
||||
### Version Table: `company_versions`
|
||||
|
||||
**All Main Table Fields PLUS:**
|
||||
- `version_id` (uuid, PK)
|
||||
- `company_id` (uuid, NOT NULL, FK → companies)
|
||||
- `version_number` (integer, NOT NULL)
|
||||
- `change_type` (version_change_type, NOT NULL)
|
||||
- `change_reason` (text)
|
||||
- `is_current` (boolean, default: true)
|
||||
- `created_by` (uuid, FK → auth.users)
|
||||
- `created_at` (timestamptz)
|
||||
- `submission_id` (uuid, FK → content_submissions)
|
||||
|
||||
---
|
||||
|
||||
## Ride Models
|
||||
|
||||
### Main Table: `ride_models`
|
||||
|
||||
**Required Fields:**
|
||||
- `id` (uuid, PK)
|
||||
- `name` (text, NOT NULL)
|
||||
- `slug` (text, NOT NULL, UNIQUE)
|
||||
- `manufacturer_id` (uuid, NOT NULL, FK → companies)
|
||||
- `category` (text, NOT NULL) ⚠️ **CRITICAL: This field is required**
|
||||
- Values: `roller_coaster`, `water_ride`, `dark_ride`, `flat_ride`, `transport`, `kids_ride`
|
||||
|
||||
**Optional Fields:**
|
||||
- `ride_type` (text) ⚠️ **This field exists in ride_models but NOT in rides**
|
||||
- More specific classification than category
|
||||
- Example: category = `roller_coaster`, ride_type = `inverted_coaster`
|
||||
- `description` (text)
|
||||
- `banner_image_url` (text)
|
||||
- `banner_image_id` (text)
|
||||
- `card_image_url` (text)
|
||||
- `card_image_id` (text)
|
||||
|
||||
**Metadata Fields:**
|
||||
- `view_count_all` (integer, default: 0)
|
||||
- `view_count_30d` (integer, default: 0)
|
||||
- `view_count_7d` (integer, default: 0)
|
||||
- `average_rating` (numeric, default: 0.00)
|
||||
- `review_count` (integer, default: 0)
|
||||
- `installations_count` (integer, default: 0)
|
||||
- `created_at` (timestamptz)
|
||||
- `updated_at` (timestamptz)
|
||||
- `is_test_data` (boolean, default: false)
|
||||
|
||||
### Submission Table: `ride_model_submissions`
|
||||
|
||||
**Schema Identical to Main Table** (excluding auto-generated fields)
|
||||
|
||||
**Additional Field:**
|
||||
- `submission_id` (uuid, NOT NULL, FK → content_submissions)
|
||||
|
||||
### Version Table: `ride_model_versions`
|
||||
|
||||
**All Main Table Fields PLUS:**
|
||||
- `version_id` (uuid, PK)
|
||||
- `ride_model_id` (uuid, NOT NULL, FK → ride_models)
|
||||
- `version_number` (integer, NOT NULL)
|
||||
- `change_type` (version_change_type, NOT NULL)
|
||||
- `change_reason` (text)
|
||||
- `is_current` (boolean, default: true)
|
||||
- `created_by` (uuid, FK → auth.users)
|
||||
- `created_at` (timestamptz)
|
||||
- `submission_id` (uuid, FK → content_submissions)
|
||||
|
||||
---
|
||||
|
||||
## Photos
|
||||
|
||||
### Main Table: `photos`
|
||||
|
||||
**Required Fields:**
|
||||
- `id` (uuid, PK)
|
||||
- `cloudflare_id` (text, NOT NULL)
|
||||
- `url` (text, NOT NULL)
|
||||
- `entity_type` (text, NOT NULL)
|
||||
- `entity_id` (uuid, NOT NULL)
|
||||
- `uploader_id` (uuid, NOT NULL, FK → auth.users)
|
||||
|
||||
**Optional Fields:**
|
||||
- `title` (text)
|
||||
- `caption` (text)
|
||||
- `taken_date` (date)
|
||||
- `taken_date_precision` (text)
|
||||
- `photographer_name` (text)
|
||||
- `order_index` (integer, default: 0)
|
||||
- `is_primary` (boolean, default: false)
|
||||
- `status` (text, default: 'active')
|
||||
|
||||
**Metadata Fields:**
|
||||
- `created_at` (timestamptz)
|
||||
- `updated_at` (timestamptz)
|
||||
- `is_test_data` (boolean, default: false)
|
||||
|
||||
### Submission Table: `photo_submissions`
|
||||
|
||||
**Required Fields:**
|
||||
- `id` (uuid, PK)
|
||||
- `submission_id` (uuid, NOT NULL, FK → content_submissions)
|
||||
- `entity_type` (text, NOT NULL)
|
||||
- `entity_id` (uuid, NOT NULL)
|
||||
- `cloudflare_id` (text, NOT NULL)
|
||||
- `url` (text, NOT NULL)
|
||||
|
||||
**Optional Fields:**
|
||||
- `title` (text)
|
||||
- `caption` (text)
|
||||
- `taken_date` (date)
|
||||
- `taken_date_precision` (text)
|
||||
- `photographer_name` (text)
|
||||
- `order_index` (integer)
|
||||
|
||||
**Note**: Photos do NOT have version tables - they are immutable after approval
|
||||
|
||||
---
|
||||
|
||||
## Timeline Events
|
||||
|
||||
### Main Table: `entity_timeline_events`
|
||||
|
||||
**Required Fields:**
|
||||
- `id` (uuid, PK)
|
||||
- `entity_type` (text, NOT NULL)
|
||||
- `entity_id` (uuid, NOT NULL)
|
||||
- `event_type` (text, NOT NULL)
|
||||
- Values: `opening`, `closing`, `relocation`, `renovation`, `name_change`, `ownership_change`, etc.
|
||||
- `title` (text, NOT NULL)
|
||||
- `event_date` (date, NOT NULL)
|
||||
|
||||
**Optional Fields:**
|
||||
- `description` (text)
|
||||
- `event_date_precision` (text, default: 'day')
|
||||
- `from_value` (text)
|
||||
- `to_value` (text)
|
||||
- `from_entity_id` (uuid)
|
||||
- `to_entity_id` (uuid)
|
||||
- `from_location_id` (uuid)
|
||||
- `to_location_id` (uuid)
|
||||
- `is_public` (boolean, default: true)
|
||||
- `display_order` (integer, default: 0)
|
||||
|
||||
**Approval Fields:**
|
||||
- `created_by` (uuid, FK → auth.users)
|
||||
- `approved_by` (uuid, FK → auth.users)
|
||||
- `submission_id` (uuid, FK → content_submissions)
|
||||
|
||||
**Metadata Fields:**
|
||||
- `created_at` (timestamptz)
|
||||
- `updated_at` (timestamptz)
|
||||
|
||||
### Submission Table: `timeline_event_submissions`
|
||||
|
||||
**Schema Identical to Main Table** (excluding auto-generated fields)
|
||||
|
||||
**Additional Field:**
|
||||
- `submission_id` (uuid, NOT NULL, FK → content_submissions)
|
||||
|
||||
**Note**: Timeline events do NOT have version tables
|
||||
|
||||
---
|
||||
|
||||
## Critical Functions Reference
|
||||
|
||||
### 1. `create_entity_from_submission`
|
||||
|
||||
**Purpose**: Creates new entities from approved submissions
|
||||
|
||||
**Parameters**:
|
||||
- `p_entity_type` (text) - Entity type identifier
|
||||
- `p_data` (jsonb) - Entity data from submission
|
||||
- `p_created_by` (uuid) - User who created it
|
||||
- `p_submission_id` (uuid) - Source submission
|
||||
|
||||
**Critical Requirements**:
|
||||
- ✅ MUST extract `category` for rides and ride_models
|
||||
- ✅ MUST NOT use `ride_type` for rides (doesn't exist)
|
||||
- ✅ MUST use `ride_type` for ride_models (does exist)
|
||||
- ✅ MUST handle all required NOT NULL fields
|
||||
|
||||
**Returns**: `uuid` - New entity ID
|
||||
|
||||
### 2. `update_entity_from_submission`
|
||||
|
||||
**Purpose**: Updates existing entities from approved edits
|
||||
|
||||
**Parameters**:
|
||||
- `p_entity_type` (text) - Entity type identifier
|
||||
- `p_data` (jsonb) - Updated entity data
|
||||
- `p_entity_id` (uuid) - Existing entity ID
|
||||
- `p_changed_by` (uuid) - User who changed it
|
||||
|
||||
**Critical Requirements**:
|
||||
- ✅ MUST use COALESCE to preserve existing values
|
||||
- ✅ MUST include `category` for rides and ride_models
|
||||
- ✅ MUST NOT use `ride_type` for rides
|
||||
- ✅ MUST use `ride_type` for ride_models
|
||||
- ✅ MUST update `updated_at` timestamp
|
||||
|
||||
**Returns**: `uuid` - Updated entity ID
|
||||
|
||||
### 3. `process_approval_transaction`
|
||||
|
||||
**Purpose**: Atomic transaction for selective approval
|
||||
|
||||
**Parameters**:
|
||||
- `p_submission_id` (uuid)
|
||||
- `p_item_ids` (uuid[]) - Specific items to approve
|
||||
- `p_moderator_id` (uuid)
|
||||
- `p_change_reason` (text)
|
||||
|
||||
**Critical Requirements**:
|
||||
- ✅ MUST validate all item dependencies first
|
||||
- ✅ MUST extract correct fields from submission tables
|
||||
- ✅ MUST set session variables for triggers
|
||||
- ✅ MUST handle rollback on any error
|
||||
|
||||
**Called By**: Edge function `process-selective-approval`
|
||||
|
||||
### 4. `create_submission_with_items`
|
||||
|
||||
**Purpose**: Creates multi-item submissions atomically
|
||||
|
||||
**Parameters**:
|
||||
- `p_submission_id` (uuid)
|
||||
- `p_entity_type` (text)
|
||||
- `p_action_type` (text) - `create` or `edit`
|
||||
- `p_items` (jsonb) - Array of submission items
|
||||
- `p_user_id` (uuid)
|
||||
|
||||
**Critical Requirements**:
|
||||
- ✅ MUST resolve dependencies in order
|
||||
- ✅ MUST validate all required fields per entity type
|
||||
- ✅ MUST link items to submission correctly
|
||||
|
||||
---
|
||||
|
||||
## Common Pitfalls
|
||||
|
||||
### 1. ❌ Using `ride_type` for rides
|
||||
```sql
|
||||
-- WRONG
|
||||
UPDATE rides SET ride_type = 'inverted_coaster' WHERE id = $1;
|
||||
-- ERROR: column "ride_type" does not exist
|
||||
|
||||
-- CORRECT
|
||||
UPDATE rides SET category = 'roller_coaster' WHERE id = $1;
|
||||
```
|
||||
|
||||
### 2. ❌ Missing `category` field
|
||||
```sql
|
||||
-- WRONG - Missing required category
|
||||
INSERT INTO rides (name, slug, park_id, status) VALUES (...);
|
||||
-- ERROR: null value violates not-null constraint
|
||||
|
||||
-- CORRECT
|
||||
INSERT INTO rides (name, slug, park_id, category, status) VALUES (..., 'roller_coaster', ...);
|
||||
```
|
||||
|
||||
### 3. ❌ Wrong column names in version tables
|
||||
```sql
|
||||
-- WRONG
|
||||
SELECT height_requirement FROM ride_versions WHERE ride_id = $1;
|
||||
-- Returns null
|
||||
|
||||
-- CORRECT
|
||||
SELECT height_requirement_cm FROM ride_versions WHERE ride_id = $1;
|
||||
```
|
||||
|
||||
### 4. ❌ Forgetting COALESCE in updates
|
||||
```sql
|
||||
-- WRONG - Overwrites fields with NULL
|
||||
UPDATE rides SET
|
||||
name = (p_data->>'name'),
|
||||
description = (p_data->>'description')
|
||||
WHERE id = $1;
|
||||
|
||||
-- CORRECT - Preserves existing values if not provided
|
||||
UPDATE rides SET
|
||||
name = COALESCE(p_data->>'name', name),
|
||||
description = COALESCE(p_data->>'description', description)
|
||||
WHERE id = $1;
|
||||
```
|
||||
|
||||
### 5. ❌ Not handling submission_id in version triggers
|
||||
```sql
|
||||
-- WRONG - Version doesn't link back to submission
|
||||
INSERT INTO ride_versions (ride_id, ...) VALUES (...);
|
||||
|
||||
-- CORRECT - Trigger must read session variable
|
||||
v_submission_id := current_setting('app.submission_id', true)::uuid;
|
||||
INSERT INTO ride_versions (ride_id, submission_id, ...) VALUES (..., v_submission_id, ...);
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Validation Checklist
|
||||
|
||||
Before deploying any submission pipeline changes:
|
||||
|
||||
- [ ] All entity tables have matching submission tables
|
||||
- [ ] All required NOT NULL fields are included in CREATE functions
|
||||
- [ ] All required NOT NULL fields are included in UPDATE functions
|
||||
- [ ] `category` is extracted for rides and ride_models
|
||||
- [ ] `ride_type` is NOT used for rides
|
||||
- [ ] `ride_type` IS used for ride_models
|
||||
- [ ] COALESCE is used for all UPDATE statements
|
||||
- [ ] Version table column name differences are handled
|
||||
- [ ] Session variables are set for version triggers
|
||||
- [ ] Foreign key relationships are validated
|
||||
- [ ] Dependency resolution works correctly
|
||||
- [ ] Error handling and rollback logic is present
|
||||
|
||||
---
|
||||
|
||||
## Maintenance
|
||||
|
||||
**When adding new entity types:**
|
||||
|
||||
1. Create main table with all fields
|
||||
2. Create matching submission table + `submission_id` FK
|
||||
3. Create version table with all fields + version metadata
|
||||
4. Add case to `create_entity_from_submission`
|
||||
5. Add case to `update_entity_from_submission`
|
||||
6. Add case to `process_approval_transaction`
|
||||
7. Add case to `create_submission_with_items`
|
||||
8. Create version trigger for main table
|
||||
9. Update this documentation
|
||||
10. Run full test suite
|
||||
|
||||
**When modifying schemas:**
|
||||
|
||||
1. Check if field exists in ALL three tables (main, submission, version)
|
||||
2. Update ALL three tables in migration
|
||||
3. Update ALL functions that reference the field
|
||||
4. Update this documentation
|
||||
5. Test create, update, and rollback flows
|
||||
|
||||
---
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- [Submission Pipeline Overview](./README.md)
|
||||
- [Versioning System](../versioning/README.md)
|
||||
- [Moderation Workflow](../moderation/README.md)
|
||||
- [Migration Guide](../versioning/MIGRATION.md)
|
||||
402
docs/submission-pipeline/VALIDATION_SETUP.md
Normal file
402
docs/submission-pipeline/VALIDATION_SETUP.md
Normal file
@@ -0,0 +1,402 @@
|
||||
# Schema Validation Setup Guide
|
||||
|
||||
This guide explains how to set up and use the automated schema validation tools to prevent field mismatches in the submission pipeline.
|
||||
|
||||
## Overview
|
||||
|
||||
The validation system consists of three layers:
|
||||
|
||||
1. **Pre-migration Script** - Quick validation before deploying migrations
|
||||
2. **Integration Tests** - Comprehensive Playwright tests for CI/CD
|
||||
3. **GitHub Actions** - Automated checks on every pull request
|
||||
|
||||
## Quick Start
|
||||
|
||||
### 1. Add NPM Scripts
|
||||
|
||||
Add these scripts to your `package.json`:
|
||||
|
||||
```json
|
||||
{
|
||||
"scripts": {
|
||||
"validate-schema": "tsx scripts/validate-schema.ts",
|
||||
"test:schema": "playwright test schema-validation",
|
||||
"test:schema:ui": "playwright test schema-validation --ui",
|
||||
"pre-migrate": "npm run validate-schema"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 2. Environment Variables
|
||||
|
||||
Create a `.env.test` file:
|
||||
|
||||
```env
|
||||
SUPABASE_SERVICE_ROLE_KEY=your_service_role_key_here
|
||||
```
|
||||
|
||||
**⚠️ Important**: Never commit this file! Add it to `.gitignore`:
|
||||
|
||||
```gitignore
|
||||
.env.test
|
||||
.env.local
|
||||
```
|
||||
|
||||
### 3. Install Dependencies
|
||||
|
||||
If not already installed:
|
||||
|
||||
```bash
|
||||
npm install --save-dev @supabase/supabase-js @playwright/test tsx
|
||||
```
|
||||
|
||||
## Using the Validation Tools
|
||||
|
||||
### Pre-Migration Validation Script
|
||||
|
||||
**When to use**: Before applying any database migration
|
||||
|
||||
**Run manually:**
|
||||
```bash
|
||||
npm run validate-schema
|
||||
```
|
||||
|
||||
**What it checks:**
|
||||
- ✅ Submission tables match main tables
|
||||
- ✅ Version tables have all required fields
|
||||
- ✅ Critical fields are correct (e.g., `category` vs `ride_type`)
|
||||
- ✅ Database functions exist and are accessible
|
||||
|
||||
**Example output:**
|
||||
```
|
||||
🔍 Starting schema validation...
|
||||
|
||||
Submission Tables:
|
||||
────────────────────────────────────────────────────────────────────────────────
|
||||
✅ Parks: submission table matches main table
|
||||
✅ Rides: submission table matches main table
|
||||
✅ Companies: submission table matches main table
|
||||
✅ Ride Models: submission table matches main table
|
||||
|
||||
Version Tables:
|
||||
────────────────────────────────────────────────────────────────────────────────
|
||||
✅ Parks: version table has all fields
|
||||
✅ Rides: version table has all fields
|
||||
✅ Companies: version table has all fields
|
||||
✅ Ride Models: version table has all fields
|
||||
|
||||
Critical Fields:
|
||||
────────────────────────────────────────────────────────────────────────────────
|
||||
✅ rides table does NOT have ride_type column
|
||||
✅ rides table has category column
|
||||
✅ ride_models has both category and ride_type
|
||||
|
||||
Functions:
|
||||
────────────────────────────────────────────────────────────────────────────────
|
||||
✅ create_entity_from_submission exists and is accessible
|
||||
✅ update_entity_from_submission exists and is accessible
|
||||
✅ process_approval_transaction exists and is accessible
|
||||
|
||||
════════════════════════════════════════════════════════════════════════════════
|
||||
Total: 15 passed, 0 failed
|
||||
════════════════════════════════════════════════════════════════════════════════
|
||||
|
||||
✅ All schema validations passed. Safe to deploy.
|
||||
```
|
||||
|
||||
### Integration Tests
|
||||
|
||||
**When to use**: In CI/CD, before merging PRs, after major changes
|
||||
|
||||
**Run all tests:**
|
||||
```bash
|
||||
npm run test:schema
|
||||
```
|
||||
|
||||
**Run in UI mode (for debugging):**
|
||||
```bash
|
||||
npm run test:schema:ui
|
||||
```
|
||||
|
||||
**Run specific test suite:**
|
||||
```bash
|
||||
npx playwright test schema-validation --grep "Entity Tables"
|
||||
```
|
||||
|
||||
**What it tests:**
|
||||
- All pre-migration script checks PLUS:
|
||||
- Field-by-field data type comparison
|
||||
- NOT NULL constraint validation
|
||||
- Foreign key existence checks
|
||||
- Known field name variations (e.g., `height_requirement_cm` vs `height_requirement`)
|
||||
|
||||
### GitHub Actions (Automated)
|
||||
|
||||
**Automatically runs on:**
|
||||
- Every pull request that touches:
|
||||
- `supabase/migrations/**`
|
||||
- `src/lib/moderation/**`
|
||||
- `supabase/functions/**`
|
||||
- Pushes to `main` or `develop` branches
|
||||
- Manual workflow dispatch
|
||||
|
||||
**What it does:**
|
||||
1. Runs validation script
|
||||
2. Runs integration tests
|
||||
3. Checks for breaking migration patterns
|
||||
4. Validates migration file naming
|
||||
5. Comments on PRs with helpful guidance if tests fail
|
||||
|
||||
## Workflow Examples
|
||||
|
||||
### Before Creating a Migration
|
||||
|
||||
```bash
|
||||
# 1. Make schema changes locally
|
||||
# 2. Validate before creating migration
|
||||
npm run validate-schema
|
||||
|
||||
# 3. If validation passes, create migration
|
||||
supabase db diff -f add_new_field
|
||||
|
||||
# 4. Run validation again
|
||||
npm run validate-schema
|
||||
|
||||
# 5. Commit and push
|
||||
git add .
|
||||
git commit -m "Add new field to rides table"
|
||||
git push
|
||||
```
|
||||
|
||||
### After Modifying Entity Schemas
|
||||
|
||||
```bash
|
||||
# 1. Modified rides table schema
|
||||
# 2. Run full test suite
|
||||
npm run test:schema
|
||||
|
||||
# 3. Check specific validation
|
||||
npx playwright test schema-validation --grep "rides"
|
||||
|
||||
# 4. Fix any issues
|
||||
# 5. Re-run tests
|
||||
npm run test:schema
|
||||
```
|
||||
|
||||
### During Code Review
|
||||
|
||||
**PR Author:**
|
||||
1. Ensure all validation tests pass locally
|
||||
2. Push changes
|
||||
3. Wait for GitHub Actions to complete
|
||||
4. Address any automated feedback
|
||||
|
||||
**Reviewer:**
|
||||
1. Check that GitHub Actions passed
|
||||
2. Review schema changes in migrations
|
||||
3. Verify documentation was updated
|
||||
4. Approve if all checks pass
|
||||
|
||||
## Common Issues and Solutions
|
||||
|
||||
### Issue: "Missing fields" Error
|
||||
|
||||
**Symptom:**
|
||||
```
|
||||
❌ Rides: submission table matches main table
|
||||
└─ Missing fields: category
|
||||
```
|
||||
|
||||
**Cause**: Field was added to main table but not submission table
|
||||
|
||||
**Solution:**
|
||||
```sql
|
||||
-- In your migration file
|
||||
ALTER TABLE ride_submissions ADD COLUMN category TEXT NOT NULL;
|
||||
```
|
||||
|
||||
### Issue: "Type mismatch" Error
|
||||
|
||||
**Symptom:**
|
||||
```
|
||||
❌ Rides: submission table matches main table
|
||||
└─ Type mismatches: max_speed_kmh: main=numeric, submission=integer
|
||||
```
|
||||
|
||||
**Cause**: Data types don't match between tables
|
||||
|
||||
**Solution:**
|
||||
```sql
|
||||
-- In your migration file
|
||||
ALTER TABLE ride_submissions
|
||||
ALTER COLUMN max_speed_kmh TYPE NUMERIC USING max_speed_kmh::numeric;
|
||||
```
|
||||
|
||||
### Issue: "Column does not exist" in Production
|
||||
|
||||
**Symptom**: Approval fails with `column "category" does not exist`
|
||||
|
||||
**Immediate action:**
|
||||
1. Run validation script to identify issue
|
||||
2. Create emergency migration to add missing field
|
||||
3. Deploy immediately
|
||||
4. Update functions if needed
|
||||
|
||||
**Prevention**: Always run validation before deploying
|
||||
|
||||
### Issue: Tests Pass Locally but Fail in CI
|
||||
|
||||
**Possible causes:**
|
||||
- Different database state in CI vs local
|
||||
- Missing environment variables
|
||||
- Outdated schema in test database
|
||||
|
||||
**Solution:**
|
||||
```bash
|
||||
# Pull latest schema
|
||||
supabase db pull
|
||||
|
||||
# Reset local database
|
||||
supabase db reset
|
||||
|
||||
# Re-run tests
|
||||
npm run test:schema
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
### ✅ Do's
|
||||
|
||||
- ✅ Run validation script before every migration
|
||||
- ✅ Run integration tests before merging PRs
|
||||
- ✅ Update all three tables when adding fields (main, submission, version)
|
||||
- ✅ Document field name variations in tests
|
||||
- ✅ Check GitHub Actions results before merging
|
||||
- ✅ Keep SCHEMA_REFERENCE.md up to date
|
||||
|
||||
### ❌ Don'ts
|
||||
|
||||
- ❌ Don't skip validation "because it's a small change"
|
||||
- ❌ Don't add fields to only main tables
|
||||
- ❌ Don't ignore failing tests
|
||||
- ❌ Don't bypass CI checks
|
||||
- ❌ Don't commit service role keys
|
||||
- ❌ Don't modify submission pipeline functions without testing
|
||||
|
||||
## Continuous Integration Setup
|
||||
|
||||
### GitHub Secrets
|
||||
|
||||
Add to your repository secrets:
|
||||
|
||||
```
|
||||
SUPABASE_SERVICE_ROLE_KEY=your_service_role_key_here
|
||||
```
|
||||
|
||||
**Steps:**
|
||||
1. Go to repository Settings → Secrets and variables → Actions
|
||||
2. Click "New repository secret"
|
||||
3. Name: `SUPABASE_SERVICE_ROLE_KEY`
|
||||
4. Value: Your service role key from Supabase dashboard
|
||||
5. Save
|
||||
|
||||
### Branch Protection Rules
|
||||
|
||||
Recommended settings:
|
||||
|
||||
```
|
||||
Branch: main
|
||||
✓ Require status checks to pass before merging
|
||||
✓ validate-schema (Schema Validation)
|
||||
✓ migration-safety-check (Migration Safety Check)
|
||||
✓ Require branches to be up to date before merging
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Script Won't Run
|
||||
|
||||
**Error:** `tsx: command not found`
|
||||
|
||||
**Solution:**
|
||||
```bash
|
||||
npm install -g tsx
|
||||
# or
|
||||
npx tsx scripts/validate-schema.ts
|
||||
```
|
||||
|
||||
### Authentication Errors
|
||||
|
||||
**Error:** `Invalid API key`
|
||||
|
||||
**Solution:**
|
||||
1. Check `.env.test` has correct service role key
|
||||
2. Verify key has not expired
|
||||
3. Ensure environment variable is loaded:
|
||||
```bash
|
||||
source .env.test
|
||||
npm run validate-schema
|
||||
```
|
||||
|
||||
### Tests Timeout
|
||||
|
||||
**Error:** Tests timeout after 30 seconds
|
||||
|
||||
**Solution:**
|
||||
```bash
|
||||
# Increase timeout
|
||||
npx playwright test schema-validation --timeout=60000
|
||||
```
|
||||
|
||||
## Maintenance
|
||||
|
||||
### Adding New Entity Types
|
||||
|
||||
When adding a new entity type (e.g., `events`):
|
||||
|
||||
1. **Update validation script:**
|
||||
```typescript
|
||||
// In scripts/validate-schema.ts
|
||||
await validateSubmissionTable('events', 'event_submissions', 'Events');
|
||||
await validateVersionTable('events', 'event_versions', 'Events');
|
||||
```
|
||||
|
||||
2. **Update integration tests:**
|
||||
```typescript
|
||||
// In tests/integration/schema-validation.test.ts
|
||||
test('events: submission table matches main table schema', async () => {
|
||||
// Add test logic
|
||||
});
|
||||
```
|
||||
|
||||
3. **Update documentation:**
|
||||
- `docs/submission-pipeline/SCHEMA_REFERENCE.md`
|
||||
- This file (`VALIDATION_SETUP.md`)
|
||||
|
||||
### Updating Field Mappings
|
||||
|
||||
When version tables use different field names:
|
||||
|
||||
```typescript
|
||||
// In both script and tests
|
||||
const fieldMapping: { [key: string]: string } = {
|
||||
'new_main_field': 'version_field_name',
|
||||
};
|
||||
```
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- [Schema Reference](./SCHEMA_REFERENCE.md) - Complete field mappings
|
||||
- [Integration Tests README](../../tests/integration/README.md) - Detailed test documentation
|
||||
- [Submission Pipeline](./README.md) - Pipeline overview
|
||||
- [Versioning System](../versioning/README.md) - Version table details
|
||||
|
||||
## Support
|
||||
|
||||
**Questions?** Check the documentation above or review existing migration files.
|
||||
|
||||
**Found a bug in validation?** Open an issue with:
|
||||
- Expected behavior
|
||||
- Actual behavior
|
||||
- Validation script output
|
||||
- Database schema snippets
|
||||
332
scripts/validate-schema.ts
Normal file
332
scripts/validate-schema.ts
Normal file
@@ -0,0 +1,332 @@
|
||||
#!/usr/bin/env tsx
|
||||
/**
|
||||
* Schema Validation Script
|
||||
*
|
||||
* Pre-migration validation script that checks schema consistency
|
||||
* across the submission pipeline before deploying changes.
|
||||
*
|
||||
* Usage:
|
||||
* npm run validate-schema
|
||||
* or
|
||||
* tsx scripts/validate-schema.ts
|
||||
*
|
||||
* Exit codes:
|
||||
* 0 = All validations passed
|
||||
* 1 = Validation failures detected
|
||||
*/
|
||||
|
||||
import { createClient } from '@supabase/supabase-js';
|
||||
|
||||
const SUPABASE_URL = 'https://ydvtmnrszybqnbcqbdcy.supabase.co';
|
||||
const SUPABASE_KEY = process.env.SUPABASE_SERVICE_ROLE_KEY;
|
||||
|
||||
if (!SUPABASE_KEY) {
|
||||
console.error('❌ SUPABASE_SERVICE_ROLE_KEY environment variable is required');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const supabase = createClient(SUPABASE_URL, SUPABASE_KEY);
|
||||
|
||||
interface ValidationResult {
|
||||
category: string;
|
||||
test: string;
|
||||
passed: boolean;
|
||||
message?: string;
|
||||
}
|
||||
|
||||
const results: ValidationResult[] = [];
|
||||
|
||||
async function getTableColumns(tableName: string): Promise<Set<string>> {
|
||||
const { data, error } = await supabase
|
||||
.from('information_schema.columns' as any)
|
||||
.select('column_name')
|
||||
.eq('table_schema', 'public')
|
||||
.eq('table_name', tableName);
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
return new Set(data?.map((row: any) => row.column_name) || []);
|
||||
}
|
||||
|
||||
async function validateSubmissionTable(
|
||||
mainTable: string,
|
||||
submissionTable: string,
|
||||
entityName: string
|
||||
): Promise<void> {
|
||||
const mainColumns = await getTableColumns(mainTable);
|
||||
const submissionColumns = await getTableColumns(submissionTable);
|
||||
|
||||
const excludedFields = new Set([
|
||||
'id', 'created_at', 'updated_at', 'is_test_data',
|
||||
'view_count_all', 'view_count_30d', 'view_count_7d',
|
||||
'average_rating', 'review_count', 'installations_count',
|
||||
]);
|
||||
|
||||
const missingFields: string[] = [];
|
||||
|
||||
for (const field of mainColumns) {
|
||||
if (excludedFields.has(field)) continue;
|
||||
if (!submissionColumns.has(field)) {
|
||||
missingFields.push(field);
|
||||
}
|
||||
}
|
||||
|
||||
if (missingFields.length === 0) {
|
||||
results.push({
|
||||
category: 'Submission Tables',
|
||||
test: `${entityName}: submission table matches main table`,
|
||||
passed: true,
|
||||
});
|
||||
} else {
|
||||
results.push({
|
||||
category: 'Submission Tables',
|
||||
test: `${entityName}: submission table matches main table`,
|
||||
passed: false,
|
||||
message: `Missing fields: ${missingFields.join(', ')}`,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async function validateVersionTable(
|
||||
mainTable: string,
|
||||
versionTable: string,
|
||||
entityName: string
|
||||
): Promise<void> {
|
||||
const mainColumns = await getTableColumns(mainTable);
|
||||
const versionColumns = await getTableColumns(versionTable);
|
||||
|
||||
const excludedFields = new Set([
|
||||
'id', 'created_at', 'updated_at', 'is_test_data',
|
||||
'view_count_all', 'view_count_30d', 'view_count_7d',
|
||||
'average_rating', 'review_count', 'installations_count',
|
||||
]);
|
||||
|
||||
const fieldMapping: { [key: string]: string } = {
|
||||
'height_requirement': 'height_requirement_cm',
|
||||
'max_g_force': 'gforce_max',
|
||||
'inversions': 'inversions_count',
|
||||
'max_height_meters': 'height_meters',
|
||||
'drop_height_meters': 'drop_meters',
|
||||
};
|
||||
|
||||
const requiredVersionFields = new Set([
|
||||
'version_id', 'version_number', 'change_type', 'change_reason',
|
||||
'is_current', 'created_by', 'submission_id', 'is_test_data',
|
||||
]);
|
||||
|
||||
const missingMainFields: string[] = [];
|
||||
const missingVersionFields: string[] = [];
|
||||
|
||||
// Check main table fields exist in version table
|
||||
for (const field of mainColumns) {
|
||||
if (excludedFields.has(field)) continue;
|
||||
|
||||
const mappedField = fieldMapping[field] || field;
|
||||
if (!versionColumns.has(field) && !versionColumns.has(mappedField)) {
|
||||
missingMainFields.push(field);
|
||||
}
|
||||
}
|
||||
|
||||
// Check version metadata fields exist
|
||||
for (const field of requiredVersionFields) {
|
||||
if (!versionColumns.has(field)) {
|
||||
missingVersionFields.push(field);
|
||||
}
|
||||
}
|
||||
|
||||
if (missingMainFields.length === 0 && missingVersionFields.length === 0) {
|
||||
results.push({
|
||||
category: 'Version Tables',
|
||||
test: `${entityName}: version table has all fields`,
|
||||
passed: true,
|
||||
});
|
||||
} else {
|
||||
const messages: string[] = [];
|
||||
if (missingMainFields.length > 0) {
|
||||
messages.push(`Missing main fields: ${missingMainFields.join(', ')}`);
|
||||
}
|
||||
if (missingVersionFields.length > 0) {
|
||||
messages.push(`Missing version fields: ${missingVersionFields.join(', ')}`);
|
||||
}
|
||||
|
||||
results.push({
|
||||
category: 'Version Tables',
|
||||
test: `${entityName}: version table has all fields`,
|
||||
passed: false,
|
||||
message: messages.join('; '),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async function validateCriticalFields(): Promise<void> {
|
||||
const ridesColumns = await getTableColumns('rides');
|
||||
const rideModelsColumns = await getTableColumns('ride_models');
|
||||
|
||||
// Rides should NOT have ride_type
|
||||
if (!ridesColumns.has('ride_type')) {
|
||||
results.push({
|
||||
category: 'Critical Fields',
|
||||
test: 'rides table does NOT have ride_type column',
|
||||
passed: true,
|
||||
});
|
||||
} else {
|
||||
results.push({
|
||||
category: 'Critical Fields',
|
||||
test: 'rides table does NOT have ride_type column',
|
||||
passed: false,
|
||||
message: 'rides table incorrectly has ride_type column',
|
||||
});
|
||||
}
|
||||
|
||||
// Rides MUST have category
|
||||
if (ridesColumns.has('category')) {
|
||||
results.push({
|
||||
category: 'Critical Fields',
|
||||
test: 'rides table has category column',
|
||||
passed: true,
|
||||
});
|
||||
} else {
|
||||
results.push({
|
||||
category: 'Critical Fields',
|
||||
test: 'rides table has category column',
|
||||
passed: false,
|
||||
message: 'rides table is missing required category column',
|
||||
});
|
||||
}
|
||||
|
||||
// Ride models must have both category and ride_type
|
||||
if (rideModelsColumns.has('category') && rideModelsColumns.has('ride_type')) {
|
||||
results.push({
|
||||
category: 'Critical Fields',
|
||||
test: 'ride_models has both category and ride_type',
|
||||
passed: true,
|
||||
});
|
||||
} else {
|
||||
const missing: string[] = [];
|
||||
if (!rideModelsColumns.has('category')) missing.push('category');
|
||||
if (!rideModelsColumns.has('ride_type')) missing.push('ride_type');
|
||||
|
||||
results.push({
|
||||
category: 'Critical Fields',
|
||||
test: 'ride_models has both category and ride_type',
|
||||
passed: false,
|
||||
message: `ride_models is missing: ${missing.join(', ')}`,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async function validateFunctions(): Promise<void> {
|
||||
const functionsToCheck = [
|
||||
'create_entity_from_submission',
|
||||
'update_entity_from_submission',
|
||||
'process_approval_transaction',
|
||||
];
|
||||
|
||||
for (const funcName of functionsToCheck) {
|
||||
try {
|
||||
const { data, error } = await supabase
|
||||
.rpc('pg_catalog.pg_function_is_visible' as any, {
|
||||
funcid: `public.${funcName}`::any
|
||||
} as any);
|
||||
|
||||
if (!error) {
|
||||
results.push({
|
||||
category: 'Functions',
|
||||
test: `${funcName} exists and is accessible`,
|
||||
passed: true,
|
||||
});
|
||||
} else {
|
||||
results.push({
|
||||
category: 'Functions',
|
||||
test: `${funcName} exists and is accessible`,
|
||||
passed: false,
|
||||
message: error.message,
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
results.push({
|
||||
category: 'Functions',
|
||||
test: `${funcName} exists and is accessible`,
|
||||
passed: false,
|
||||
message: err instanceof Error ? err.message : String(err),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function printResults(): void {
|
||||
console.log('\n' + '='.repeat(80));
|
||||
console.log('Schema Validation Results');
|
||||
console.log('='.repeat(80) + '\n');
|
||||
|
||||
const categories = [...new Set(results.map(r => r.category))];
|
||||
let totalPassed = 0;
|
||||
let totalFailed = 0;
|
||||
|
||||
for (const category of categories) {
|
||||
const categoryResults = results.filter(r => r.category === category);
|
||||
const passed = categoryResults.filter(r => r.passed).length;
|
||||
const failed = categoryResults.filter(r => !r.passed).length;
|
||||
|
||||
console.log(`\n${category}:`);
|
||||
console.log('-'.repeat(80));
|
||||
|
||||
for (const result of categoryResults) {
|
||||
const icon = result.passed ? '✅' : '❌';
|
||||
console.log(`${icon} ${result.test}`);
|
||||
if (result.message) {
|
||||
console.log(` └─ ${result.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
totalPassed += passed;
|
||||
totalFailed += failed;
|
||||
}
|
||||
|
||||
console.log('\n' + '='.repeat(80));
|
||||
console.log(`Total: ${totalPassed} passed, ${totalFailed} failed`);
|
||||
console.log('='.repeat(80) + '\n');
|
||||
}
|
||||
|
||||
async function main(): Promise<void> {
|
||||
console.log('🔍 Starting schema validation...\n');
|
||||
|
||||
try {
|
||||
// Validate submission tables
|
||||
await validateSubmissionTable('parks', 'park_submissions', 'Parks');
|
||||
await validateSubmissionTable('rides', 'ride_submissions', 'Rides');
|
||||
await validateSubmissionTable('companies', 'company_submissions', 'Companies');
|
||||
await validateSubmissionTable('ride_models', 'ride_model_submissions', 'Ride Models');
|
||||
|
||||
// Validate version tables
|
||||
await validateVersionTable('parks', 'park_versions', 'Parks');
|
||||
await validateVersionTable('rides', 'ride_versions', 'Rides');
|
||||
await validateVersionTable('companies', 'company_versions', 'Companies');
|
||||
await validateVersionTable('ride_models', 'ride_model_versions', 'Ride Models');
|
||||
|
||||
// Validate critical fields
|
||||
await validateCriticalFields();
|
||||
|
||||
// Validate functions
|
||||
await validateFunctions();
|
||||
|
||||
// Print results
|
||||
printResults();
|
||||
|
||||
// Exit with appropriate code
|
||||
const hasFailures = results.some(r => !r.passed);
|
||||
if (hasFailures) {
|
||||
console.error('❌ Schema validation failed. Please fix the issues above before deploying.\n');
|
||||
process.exit(1);
|
||||
} else {
|
||||
console.log('✅ All schema validations passed. Safe to deploy.\n');
|
||||
process.exit(0);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('❌ Fatal error during validation:');
|
||||
console.error(error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
@@ -6422,10 +6422,10 @@ export type Database = {
|
||||
}
|
||||
update_entity_from_submission: {
|
||||
Args: {
|
||||
p_changed_by: string
|
||||
p_data: Json
|
||||
p_entity_id: string
|
||||
p_entity_type: string
|
||||
p_updated_by: string
|
||||
}
|
||||
Returns: string
|
||||
}
|
||||
|
||||
@@ -0,0 +1,109 @@
|
||||
-- Fix update_entity_from_submission function with correct category fields
|
||||
-- This migration corrects two critical bugs:
|
||||
-- 1. Removes non-existent 'ride_type' column reference for rides
|
||||
-- 2. Adds missing 'category' field for both rides and ride_models
|
||||
|
||||
DROP FUNCTION IF EXISTS update_entity_from_submission(TEXT, JSONB, UUID, UUID);
|
||||
|
||||
CREATE OR REPLACE FUNCTION update_entity_from_submission(
|
||||
p_entity_type TEXT,
|
||||
p_data JSONB,
|
||||
p_entity_id UUID,
|
||||
p_changed_by UUID
|
||||
) RETURNS UUID
|
||||
LANGUAGE plpgsql
|
||||
SECURITY DEFINER
|
||||
SET search_path TO 'public'
|
||||
AS $$
|
||||
BEGIN
|
||||
CASE p_entity_type
|
||||
WHEN 'park' THEN
|
||||
UPDATE parks SET
|
||||
name = COALESCE(p_data->>'name', name),
|
||||
slug = COALESCE(p_data->>'slug', slug),
|
||||
description = COALESCE(p_data->>'description', description),
|
||||
park_type = COALESCE(p_data->>'park_type', park_type),
|
||||
status = COALESCE(p_data->>'status', status),
|
||||
location_id = COALESCE((p_data->>'location_id')::UUID, location_id),
|
||||
operator_id = COALESCE((p_data->>'operator_id')::UUID, operator_id),
|
||||
property_owner_id = COALESCE((p_data->>'property_owner_id')::UUID, property_owner_id),
|
||||
opening_date = COALESCE((p_data->>'opening_date')::DATE, opening_date),
|
||||
closing_date = COALESCE((p_data->>'closing_date')::DATE, closing_date),
|
||||
opening_date_precision = COALESCE(p_data->>'opening_date_precision', opening_date_precision),
|
||||
closing_date_precision = COALESCE(p_data->>'closing_date_precision', closing_date_precision),
|
||||
website_url = COALESCE(p_data->>'website_url', website_url),
|
||||
phone = COALESCE(p_data->>'phone', phone),
|
||||
email = COALESCE(p_data->>'email', email),
|
||||
banner_image_url = COALESCE(p_data->>'banner_image_url', banner_image_url),
|
||||
banner_image_id = COALESCE(p_data->>'banner_image_id', banner_image_id),
|
||||
card_image_url = COALESCE(p_data->>'card_image_url', card_image_url),
|
||||
card_image_id = COALESCE(p_data->>'card_image_id', card_image_id),
|
||||
updated_at = NOW()
|
||||
WHERE id = p_entity_id;
|
||||
|
||||
WHEN 'ride' THEN
|
||||
UPDATE rides SET
|
||||
name = COALESCE(p_data->>'name', name),
|
||||
slug = COALESCE(p_data->>'slug', slug),
|
||||
park_id = COALESCE((p_data->>'park_id')::UUID, park_id),
|
||||
category = COALESCE(p_data->>'category', category),
|
||||
status = COALESCE(p_data->>'status', status),
|
||||
manufacturer_id = COALESCE((p_data->>'manufacturer_id')::UUID, manufacturer_id),
|
||||
designer_id = COALESCE((p_data->>'designer_id')::UUID, designer_id),
|
||||
ride_model_id = COALESCE((p_data->>'ride_model_id')::UUID, ride_model_id),
|
||||
opening_date = COALESCE((p_data->>'opening_date')::DATE, opening_date),
|
||||
closing_date = COALESCE((p_data->>'closing_date')::DATE, closing_date),
|
||||
opening_date_precision = COALESCE(p_data->>'opening_date_precision', opening_date_precision),
|
||||
closing_date_precision = COALESCE(p_data->>'closing_date_precision', closing_date_precision),
|
||||
description = COALESCE(p_data->>'description', description),
|
||||
banner_image_url = COALESCE(p_data->>'banner_image_url', banner_image_url),
|
||||
banner_image_id = COALESCE(p_data->>'banner_image_id', banner_image_id),
|
||||
card_image_url = COALESCE(p_data->>'card_image_url', card_image_url),
|
||||
card_image_id = COALESCE(p_data->>'card_image_id', card_image_id),
|
||||
updated_at = NOW()
|
||||
WHERE id = p_entity_id;
|
||||
|
||||
WHEN 'company' THEN
|
||||
UPDATE companies SET
|
||||
name = COALESCE(p_data->>'name', name),
|
||||
slug = COALESCE(p_data->>'slug', slug),
|
||||
description = COALESCE(p_data->>'description', description),
|
||||
company_type = COALESCE(p_data->>'company_type', company_type),
|
||||
person_type = COALESCE(p_data->>'person_type', person_type),
|
||||
founded_year = COALESCE((p_data->>'founded_year')::INTEGER, founded_year),
|
||||
founded_date = COALESCE((p_data->>'founded_date')::DATE, founded_date),
|
||||
founded_date_precision = COALESCE(p_data->>'founded_date_precision', founded_date_precision),
|
||||
headquarters_location = COALESCE(p_data->>'headquarters_location', headquarters_location),
|
||||
website_url = COALESCE(p_data->>'website_url', website_url),
|
||||
logo_url = COALESCE(p_data->>'logo_url', logo_url),
|
||||
banner_image_url = COALESCE(p_data->>'banner_image_url', banner_image_url),
|
||||
banner_image_id = COALESCE(p_data->>'banner_image_id', banner_image_id),
|
||||
card_image_url = COALESCE(p_data->>'card_image_url', card_image_url),
|
||||
card_image_id = COALESCE(p_data->>'card_image_id', card_image_id),
|
||||
updated_at = NOW()
|
||||
WHERE id = p_entity_id;
|
||||
|
||||
WHEN 'ride_model' THEN
|
||||
UPDATE ride_models SET
|
||||
name = COALESCE(p_data->>'name', name),
|
||||
slug = COALESCE(p_data->>'slug', slug),
|
||||
manufacturer_id = COALESCE((p_data->>'manufacturer_id')::UUID, manufacturer_id),
|
||||
category = COALESCE(p_data->>'category', category),
|
||||
ride_type = COALESCE(p_data->>'ride_type', ride_type),
|
||||
description = COALESCE(p_data->>'description', description),
|
||||
banner_image_url = COALESCE(p_data->>'banner_image_url', banner_image_url),
|
||||
banner_image_id = COALESCE(p_data->>'banner_image_id', banner_image_id),
|
||||
card_image_url = COALESCE(p_data->>'card_image_url', card_image_url),
|
||||
card_image_id = COALESCE(p_data->>'card_image_id', card_image_id),
|
||||
updated_at = NOW()
|
||||
WHERE id = p_entity_id;
|
||||
|
||||
ELSE
|
||||
RAISE EXCEPTION 'Unsupported entity type: %', p_entity_type;
|
||||
END CASE;
|
||||
|
||||
RETURN p_entity_id;
|
||||
END;
|
||||
$$;
|
||||
|
||||
GRANT EXECUTE ON FUNCTION update_entity_from_submission TO authenticated;
|
||||
@@ -0,0 +1,109 @@
|
||||
-- Fix update_entity_from_submission function with correct category fields
|
||||
-- This migration corrects two critical bugs:
|
||||
-- 1. Removes non-existent 'ride_type' column reference for rides
|
||||
-- 2. Adds missing 'category' field for both rides and ride_models
|
||||
|
||||
DROP FUNCTION IF EXISTS update_entity_from_submission(TEXT, JSONB, UUID, UUID);
|
||||
|
||||
CREATE OR REPLACE FUNCTION update_entity_from_submission(
|
||||
p_entity_type TEXT,
|
||||
p_data JSONB,
|
||||
p_entity_id UUID,
|
||||
p_changed_by UUID
|
||||
) RETURNS UUID
|
||||
LANGUAGE plpgsql
|
||||
SECURITY DEFINER
|
||||
SET search_path TO 'public'
|
||||
AS $$
|
||||
BEGIN
|
||||
CASE p_entity_type
|
||||
WHEN 'park' THEN
|
||||
UPDATE parks SET
|
||||
name = COALESCE(p_data->>'name', name),
|
||||
slug = COALESCE(p_data->>'slug', slug),
|
||||
description = COALESCE(p_data->>'description', description),
|
||||
park_type = COALESCE(p_data->>'park_type', park_type),
|
||||
status = COALESCE(p_data->>'status', status),
|
||||
location_id = COALESCE((p_data->>'location_id')::UUID, location_id),
|
||||
operator_id = COALESCE((p_data->>'operator_id')::UUID, operator_id),
|
||||
property_owner_id = COALESCE((p_data->>'property_owner_id')::UUID, property_owner_id),
|
||||
opening_date = COALESCE((p_data->>'opening_date')::DATE, opening_date),
|
||||
closing_date = COALESCE((p_data->>'closing_date')::DATE, closing_date),
|
||||
opening_date_precision = COALESCE(p_data->>'opening_date_precision', opening_date_precision),
|
||||
closing_date_precision = COALESCE(p_data->>'closing_date_precision', closing_date_precision),
|
||||
website_url = COALESCE(p_data->>'website_url', website_url),
|
||||
phone = COALESCE(p_data->>'phone', phone),
|
||||
email = COALESCE(p_data->>'email', email),
|
||||
banner_image_url = COALESCE(p_data->>'banner_image_url', banner_image_url),
|
||||
banner_image_id = COALESCE(p_data->>'banner_image_id', banner_image_id),
|
||||
card_image_url = COALESCE(p_data->>'card_image_url', card_image_url),
|
||||
card_image_id = COALESCE(p_data->>'card_image_id', card_image_id),
|
||||
updated_at = NOW()
|
||||
WHERE id = p_entity_id;
|
||||
|
||||
WHEN 'ride' THEN
|
||||
UPDATE rides SET
|
||||
name = COALESCE(p_data->>'name', name),
|
||||
slug = COALESCE(p_data->>'slug', slug),
|
||||
park_id = COALESCE((p_data->>'park_id')::UUID, park_id),
|
||||
category = COALESCE(p_data->>'category', category),
|
||||
status = COALESCE(p_data->>'status', status),
|
||||
manufacturer_id = COALESCE((p_data->>'manufacturer_id')::UUID, manufacturer_id),
|
||||
designer_id = COALESCE((p_data->>'designer_id')::UUID, designer_id),
|
||||
ride_model_id = COALESCE((p_data->>'ride_model_id')::UUID, ride_model_id),
|
||||
opening_date = COALESCE((p_data->>'opening_date')::DATE, opening_date),
|
||||
closing_date = COALESCE((p_data->>'closing_date')::DATE, closing_date),
|
||||
opening_date_precision = COALESCE(p_data->>'opening_date_precision', opening_date_precision),
|
||||
closing_date_precision = COALESCE(p_data->>'closing_date_precision', closing_date_precision),
|
||||
description = COALESCE(p_data->>'description', description),
|
||||
banner_image_url = COALESCE(p_data->>'banner_image_url', banner_image_url),
|
||||
banner_image_id = COALESCE(p_data->>'banner_image_id', banner_image_id),
|
||||
card_image_url = COALESCE(p_data->>'card_image_url', card_image_url),
|
||||
card_image_id = COALESCE(p_data->>'card_image_id', card_image_id),
|
||||
updated_at = NOW()
|
||||
WHERE id = p_entity_id;
|
||||
|
||||
WHEN 'company' THEN
|
||||
UPDATE companies SET
|
||||
name = COALESCE(p_data->>'name', name),
|
||||
slug = COALESCE(p_data->>'slug', slug),
|
||||
description = COALESCE(p_data->>'description', description),
|
||||
company_type = COALESCE(p_data->>'company_type', company_type),
|
||||
person_type = COALESCE(p_data->>'person_type', person_type),
|
||||
founded_year = COALESCE((p_data->>'founded_year')::INTEGER, founded_year),
|
||||
founded_date = COALESCE((p_data->>'founded_date')::DATE, founded_date),
|
||||
founded_date_precision = COALESCE(p_data->>'founded_date_precision', founded_date_precision),
|
||||
headquarters_location = COALESCE(p_data->>'headquarters_location', headquarters_location),
|
||||
website_url = COALESCE(p_data->>'website_url', website_url),
|
||||
logo_url = COALESCE(p_data->>'logo_url', logo_url),
|
||||
banner_image_url = COALESCE(p_data->>'banner_image_url', banner_image_url),
|
||||
banner_image_id = COALESCE(p_data->>'banner_image_id', banner_image_id),
|
||||
card_image_url = COALESCE(p_data->>'card_image_url', card_image_url),
|
||||
card_image_id = COALESCE(p_data->>'card_image_id', card_image_id),
|
||||
updated_at = NOW()
|
||||
WHERE id = p_entity_id;
|
||||
|
||||
WHEN 'ride_model' THEN
|
||||
UPDATE ride_models SET
|
||||
name = COALESCE(p_data->>'name', name),
|
||||
slug = COALESCE(p_data->>'slug', slug),
|
||||
manufacturer_id = COALESCE((p_data->>'manufacturer_id')::UUID, manufacturer_id),
|
||||
category = COALESCE(p_data->>'category', category),
|
||||
ride_type = COALESCE(p_data->>'ride_type', ride_type),
|
||||
description = COALESCE(p_data->>'description', description),
|
||||
banner_image_url = COALESCE(p_data->>'banner_image_url', banner_image_url),
|
||||
banner_image_id = COALESCE(p_data->>'banner_image_id', banner_image_id),
|
||||
card_image_url = COALESCE(p_data->>'card_image_url', card_image_url),
|
||||
card_image_id = COALESCE(p_data->>'card_image_id', card_image_id),
|
||||
updated_at = NOW()
|
||||
WHERE id = p_entity_id;
|
||||
|
||||
ELSE
|
||||
RAISE EXCEPTION 'Unsupported entity type: %', p_entity_type;
|
||||
END CASE;
|
||||
|
||||
RETURN p_entity_id;
|
||||
END;
|
||||
$$;
|
||||
|
||||
GRANT EXECUTE ON FUNCTION update_entity_from_submission TO authenticated;
|
||||
@@ -0,0 +1,485 @@
|
||||
-- ============================================================================
|
||||
-- CRITICAL FIX: Add missing `category` field to RPC SELECT query
|
||||
-- ============================================================================
|
||||
-- Bug: The process_approval_transaction function reads ride and ride_model
|
||||
-- data but doesn't SELECT the category field, causing NULL to be passed
|
||||
-- to create_entity_from_submission, which violates NOT NULL constraints.
|
||||
--
|
||||
-- This will cause ALL ride and ride_model approvals to fail with:
|
||||
-- "ERROR: null value in column "category" violates not-null constraint"
|
||||
-- ============================================================================
|
||||
|
||||
-- Drop and recreate with category fields in SELECT
|
||||
DO $$
|
||||
DECLARE
|
||||
func_rec RECORD;
|
||||
BEGIN
|
||||
FOR func_rec IN
|
||||
SELECT oid::regprocedure::text as func_signature
|
||||
FROM pg_proc
|
||||
WHERE proname = 'process_approval_transaction'
|
||||
AND pg_function_is_visible(oid)
|
||||
LOOP
|
||||
EXECUTE format('DROP FUNCTION IF EXISTS %s CASCADE', func_rec.func_signature);
|
||||
END LOOP;
|
||||
END $$;
|
||||
|
||||
CREATE FUNCTION process_approval_transaction(
|
||||
p_submission_id UUID,
|
||||
p_item_ids UUID[],
|
||||
p_moderator_id UUID,
|
||||
p_submitter_id UUID,
|
||||
p_request_id TEXT DEFAULT NULL
|
||||
)
|
||||
RETURNS JSONB
|
||||
LANGUAGE plpgsql
|
||||
SECURITY DEFINER
|
||||
SET search_path = public
|
||||
AS $$
|
||||
DECLARE
|
||||
v_start_time TIMESTAMPTZ;
|
||||
v_result JSONB;
|
||||
v_item RECORD;
|
||||
v_item_data JSONB;
|
||||
v_resolved_refs JSONB;
|
||||
v_entity_id UUID;
|
||||
v_approval_results JSONB[] := ARRAY[]::JSONB[];
|
||||
v_final_status TEXT;
|
||||
v_all_approved BOOLEAN := TRUE;
|
||||
v_some_approved BOOLEAN := FALSE;
|
||||
v_items_processed INTEGER := 0;
|
||||
BEGIN
|
||||
v_start_time := clock_timestamp();
|
||||
|
||||
RAISE NOTICE '[%] Starting atomic approval transaction for submission %',
|
||||
COALESCE(p_request_id, 'NO_REQUEST_ID'),
|
||||
p_submission_id;
|
||||
|
||||
-- ========================================================================
|
||||
-- STEP 1: Set session variables (transaction-scoped with is_local=true)
|
||||
-- ========================================================================
|
||||
PERFORM set_config('app.current_user_id', p_submitter_id::text, true);
|
||||
PERFORM set_config('app.submission_id', p_submission_id::text, true);
|
||||
PERFORM set_config('app.moderator_id', p_moderator_id::text, true);
|
||||
|
||||
-- ========================================================================
|
||||
-- STEP 2: Validate submission ownership and lock status
|
||||
-- ========================================================================
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM content_submissions
|
||||
WHERE id = p_submission_id
|
||||
AND (assigned_to = p_moderator_id OR assigned_to IS NULL)
|
||||
AND status IN ('pending', 'partially_approved')
|
||||
) THEN
|
||||
RAISE EXCEPTION 'Submission not found, locked by another moderator, or already processed'
|
||||
USING ERRCODE = '42501';
|
||||
END IF;
|
||||
|
||||
-- ========================================================================
|
||||
-- STEP 3: Process each item sequentially within this transaction
|
||||
-- ========================================================================
|
||||
FOR v_item IN
|
||||
SELECT
|
||||
si.*,
|
||||
ps.name as park_name,
|
||||
ps.slug as park_slug,
|
||||
ps.description as park_description,
|
||||
ps.park_type,
|
||||
ps.status as park_status,
|
||||
ps.location_id,
|
||||
ps.operator_id,
|
||||
ps.property_owner_id,
|
||||
ps.opening_date as park_opening_date,
|
||||
ps.closing_date as park_closing_date,
|
||||
ps.opening_date_precision as park_opening_date_precision,
|
||||
ps.closing_date_precision as park_closing_date_precision,
|
||||
ps.website_url as park_website_url,
|
||||
ps.phone as park_phone,
|
||||
ps.email as park_email,
|
||||
ps.banner_image_url as park_banner_image_url,
|
||||
ps.banner_image_id as park_banner_image_id,
|
||||
ps.card_image_url as park_card_image_url,
|
||||
ps.card_image_id as park_card_image_id,
|
||||
psl.name as location_name,
|
||||
psl.street_address as location_street_address,
|
||||
psl.city as location_city,
|
||||
psl.state_province as location_state_province,
|
||||
psl.country as location_country,
|
||||
psl.postal_code as location_postal_code,
|
||||
psl.latitude as location_latitude,
|
||||
psl.longitude as location_longitude,
|
||||
psl.timezone as location_timezone,
|
||||
psl.display_name as location_display_name,
|
||||
rs.name as ride_name,
|
||||
rs.slug as ride_slug,
|
||||
rs.park_id as ride_park_id,
|
||||
rs.category as ride_category,
|
||||
rs.ride_type,
|
||||
rs.status as ride_status,
|
||||
rs.manufacturer_id,
|
||||
rs.ride_model_id,
|
||||
rs.opening_date as ride_opening_date,
|
||||
rs.closing_date as ride_closing_date,
|
||||
rs.opening_date_precision as ride_opening_date_precision,
|
||||
rs.closing_date_precision as ride_closing_date_precision,
|
||||
rs.description as ride_description,
|
||||
rs.banner_image_url as ride_banner_image_url,
|
||||
rs.banner_image_id as ride_banner_image_id,
|
||||
rs.card_image_url as ride_card_image_url,
|
||||
rs.card_image_id as ride_card_image_id,
|
||||
cs.name as company_name,
|
||||
cs.slug as company_slug,
|
||||
cs.description as company_description,
|
||||
cs.website_url as company_website_url,
|
||||
cs.founded_year,
|
||||
cs.banner_image_url as company_banner_image_url,
|
||||
cs.banner_image_id as company_banner_image_id,
|
||||
cs.card_image_url as company_card_image_url,
|
||||
cs.card_image_id as company_card_image_id,
|
||||
rms.name as ride_model_name,
|
||||
rms.slug as ride_model_slug,
|
||||
rms.manufacturer_id as ride_model_manufacturer_id,
|
||||
rms.category as ride_model_category,
|
||||
rms.ride_type as ride_model_ride_type,
|
||||
rms.description as ride_model_description,
|
||||
rms.banner_image_url as ride_model_banner_image_url,
|
||||
rms.banner_image_id as ride_model_banner_image_id,
|
||||
rms.card_image_url as ride_model_card_image_url,
|
||||
rms.card_image_id as ride_model_card_image_id,
|
||||
tes.entity_type as timeline_entity_type,
|
||||
tes.entity_id as timeline_entity_id,
|
||||
tes.event_type as timeline_event_type,
|
||||
tes.event_date as timeline_event_date,
|
||||
tes.event_date_precision as timeline_event_date_precision,
|
||||
tes.title as timeline_title,
|
||||
tes.description as timeline_description,
|
||||
tes.from_value as timeline_from_value,
|
||||
tes.to_value as timeline_to_value,
|
||||
tes.from_entity_id as timeline_from_entity_id,
|
||||
tes.to_entity_id as timeline_to_entity_id,
|
||||
tes.from_location_id as timeline_from_location_id,
|
||||
tes.to_location_id as timeline_to_location_id
|
||||
FROM submission_items si
|
||||
LEFT JOIN park_submissions ps ON si.park_submission_id = ps.id
|
||||
LEFT JOIN park_submission_locations psl ON ps.id = psl.park_submission_id
|
||||
LEFT JOIN ride_submissions rs ON si.ride_submission_id = rs.id
|
||||
LEFT JOIN company_submissions cs ON si.company_submission_id = cs.id
|
||||
LEFT JOIN ride_model_submissions rms ON si.ride_model_submission_id = rms.id
|
||||
LEFT JOIN timeline_event_submissions tes ON si.timeline_event_submission_id = tes.id
|
||||
WHERE si.id = ANY(p_item_ids)
|
||||
ORDER BY si.order_index, si.created_at
|
||||
LOOP
|
||||
BEGIN
|
||||
v_items_processed := v_items_processed + 1;
|
||||
|
||||
-- Build item data based on entity type
|
||||
IF v_item.item_type = 'park' THEN
|
||||
v_item_data := jsonb_build_object(
|
||||
'name', v_item.park_name,
|
||||
'slug', v_item.park_slug,
|
||||
'description', v_item.park_description,
|
||||
'park_type', v_item.park_type,
|
||||
'status', v_item.park_status,
|
||||
'location_id', v_item.location_id,
|
||||
'operator_id', v_item.operator_id,
|
||||
'property_owner_id', v_item.property_owner_id,
|
||||
'opening_date', v_item.park_opening_date,
|
||||
'closing_date', v_item.park_closing_date,
|
||||
'opening_date_precision', v_item.park_opening_date_precision,
|
||||
'closing_date_precision', v_item.park_closing_date_precision,
|
||||
'website_url', v_item.park_website_url,
|
||||
'phone', v_item.park_phone,
|
||||
'email', v_item.park_email,
|
||||
'banner_image_url', v_item.park_banner_image_url,
|
||||
'banner_image_id', v_item.park_banner_image_id,
|
||||
'card_image_url', v_item.park_card_image_url,
|
||||
'card_image_id', v_item.park_card_image_id,
|
||||
'location_name', v_item.location_name,
|
||||
'location_street_address', v_item.location_street_address,
|
||||
'location_city', v_item.location_city,
|
||||
'location_state_province', v_item.location_state_province,
|
||||
'location_country', v_item.location_country,
|
||||
'location_postal_code', v_item.location_postal_code,
|
||||
'location_latitude', v_item.location_latitude,
|
||||
'location_longitude', v_item.location_longitude,
|
||||
'location_timezone', v_item.location_timezone,
|
||||
'location_display_name', v_item.location_display_name
|
||||
);
|
||||
ELSIF v_item.item_type = 'ride' THEN
|
||||
v_item_data := jsonb_build_object(
|
||||
'name', v_item.ride_name,
|
||||
'slug', v_item.ride_slug,
|
||||
'park_id', v_item.ride_park_id,
|
||||
'category', v_item.ride_category,
|
||||
'ride_type', v_item.ride_type,
|
||||
'status', v_item.ride_status,
|
||||
'manufacturer_id', v_item.manufacturer_id,
|
||||
'ride_model_id', v_item.ride_model_id,
|
||||
'opening_date', v_item.ride_opening_date,
|
||||
'closing_date', v_item.ride_closing_date,
|
||||
'opening_date_precision', v_item.ride_opening_date_precision,
|
||||
'closing_date_precision', v_item.ride_closing_date_precision,
|
||||
'description', v_item.ride_description,
|
||||
'banner_image_url', v_item.ride_banner_image_url,
|
||||
'banner_image_id', v_item.ride_banner_image_id,
|
||||
'card_image_url', v_item.ride_card_image_url,
|
||||
'card_image_id', v_item.ride_card_image_id
|
||||
);
|
||||
ELSIF v_item.item_type IN ('manufacturer', 'operator', 'property_owner', 'designer') THEN
|
||||
v_item_data := jsonb_build_object(
|
||||
'name', v_item.company_name,
|
||||
'slug', v_item.company_slug,
|
||||
'description', v_item.company_description,
|
||||
'website_url', v_item.company_website_url,
|
||||
'founded_year', v_item.founded_year,
|
||||
'banner_image_url', v_item.company_banner_image_url,
|
||||
'banner_image_id', v_item.company_banner_image_id,
|
||||
'card_image_url', v_item.company_card_image_url,
|
||||
'card_image_id', v_item.company_card_image_id
|
||||
);
|
||||
ELSIF v_item.item_type = 'ride_model' THEN
|
||||
v_item_data := jsonb_build_object(
|
||||
'name', v_item.ride_model_name,
|
||||
'slug', v_item.ride_model_slug,
|
||||
'manufacturer_id', v_item.ride_model_manufacturer_id,
|
||||
'category', v_item.ride_model_category,
|
||||
'ride_type', v_item.ride_model_ride_type,
|
||||
'description', v_item.ride_model_description,
|
||||
'banner_image_url', v_item.ride_model_banner_image_url,
|
||||
'banner_image_id', v_item.ride_model_banner_image_id,
|
||||
'card_image_url', v_item.ride_model_card_image_url,
|
||||
'card_image_id', v_item.ride_model_card_image_id
|
||||
);
|
||||
ELSIF v_item.item_type IN ('timeline_event', 'milestone') THEN
|
||||
v_item_data := jsonb_build_object(
|
||||
'entity_type', v_item.timeline_entity_type,
|
||||
'entity_id', v_item.timeline_entity_id,
|
||||
'event_type', v_item.timeline_event_type,
|
||||
'event_date', v_item.timeline_event_date,
|
||||
'event_date_precision', v_item.timeline_event_date_precision,
|
||||
'title', v_item.timeline_title,
|
||||
'description', v_item.timeline_description,
|
||||
'from_value', v_item.timeline_from_value,
|
||||
'to_value', v_item.timeline_to_value,
|
||||
'from_entity_id', v_item.timeline_from_entity_id,
|
||||
'to_entity_id', v_item.timeline_to_entity_id,
|
||||
'from_location_id', v_item.timeline_from_location_id,
|
||||
'to_location_id', v_item.timeline_to_location_id
|
||||
);
|
||||
ELSE
|
||||
RAISE EXCEPTION 'Unsupported item_type: %', v_item.item_type;
|
||||
END IF;
|
||||
|
||||
-- ======================================================================
|
||||
-- Resolve temp refs and update v_item_data with actual entity IDs
|
||||
-- ======================================================================
|
||||
v_resolved_refs := resolve_temp_refs_for_item(v_item.id, p_submission_id);
|
||||
|
||||
IF v_resolved_refs IS NOT NULL AND jsonb_typeof(v_resolved_refs) = 'object' THEN
|
||||
IF v_item.item_type = 'park' THEN
|
||||
IF v_resolved_refs ? 'operator' AND (v_item_data->>'operator_id') IS NULL THEN
|
||||
v_item_data := v_item_data || jsonb_build_object('operator_id', v_resolved_refs->>'operator');
|
||||
RAISE NOTICE 'Resolved park.operator_id → %', v_resolved_refs->>'operator';
|
||||
END IF;
|
||||
IF v_resolved_refs ? 'property_owner' AND (v_item_data->>'property_owner_id') IS NULL THEN
|
||||
v_item_data := v_item_data || jsonb_build_object('property_owner_id', v_resolved_refs->>'property_owner');
|
||||
RAISE NOTICE 'Resolved park.property_owner_id → %', v_resolved_refs->>'property_owner';
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
IF v_item.item_type = 'ride' THEN
|
||||
IF v_resolved_refs ? 'park' AND (v_item_data->>'park_id') IS NULL THEN
|
||||
v_item_data := v_item_data || jsonb_build_object('park_id', v_resolved_refs->>'park');
|
||||
RAISE NOTICE 'Resolved ride.park_id → %', v_resolved_refs->>'park';
|
||||
END IF;
|
||||
IF v_resolved_refs ? 'manufacturer' AND (v_item_data->>'manufacturer_id') IS NULL THEN
|
||||
v_item_data := v_item_data || jsonb_build_object('manufacturer_id', v_resolved_refs->>'manufacturer');
|
||||
RAISE NOTICE 'Resolved ride.manufacturer_id → %', v_resolved_refs->>'manufacturer';
|
||||
END IF;
|
||||
IF v_resolved_refs ? 'ride_model' AND (v_item_data->>'ride_model_id') IS NULL THEN
|
||||
v_item_data := v_item_data || jsonb_build_object('ride_model_id', v_resolved_refs->>'ride_model');
|
||||
RAISE NOTICE 'Resolved ride.ride_model_id → %', v_resolved_refs->>'ride_model';
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
IF v_item.item_type = 'ride_model' THEN
|
||||
IF v_resolved_refs ? 'manufacturer' AND (v_item_data->>'manufacturer_id') IS NULL THEN
|
||||
v_item_data := v_item_data || jsonb_build_object('manufacturer_id', v_resolved_refs->>'manufacturer');
|
||||
RAISE NOTICE 'Resolved ride_model.manufacturer_id → %', v_resolved_refs->>'manufacturer';
|
||||
END IF;
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
-- Execute action based on action_type (now with resolved foreign keys)
|
||||
IF v_item.action_type = 'create' THEN
|
||||
v_entity_id := create_entity_from_submission(
|
||||
v_item.item_type,
|
||||
v_item_data,
|
||||
p_submitter_id
|
||||
);
|
||||
ELSIF v_item.action_type = 'update' THEN
|
||||
v_entity_id := update_entity_from_submission(
|
||||
v_item.item_type,
|
||||
v_item_data,
|
||||
v_item.target_entity_id,
|
||||
p_submitter_id
|
||||
);
|
||||
ELSIF v_item.action_type = 'delete' THEN
|
||||
PERFORM delete_entity_from_submission(
|
||||
v_item.item_type,
|
||||
v_item.target_entity_id,
|
||||
p_submitter_id
|
||||
);
|
||||
v_entity_id := v_item.target_entity_id;
|
||||
ELSE
|
||||
RAISE EXCEPTION 'Unknown action_type: %', v_item.action_type;
|
||||
END IF;
|
||||
|
||||
UPDATE submission_items
|
||||
SET
|
||||
status = 'approved',
|
||||
approved_entity_id = v_entity_id,
|
||||
updated_at = NOW()
|
||||
WHERE id = v_item.id;
|
||||
|
||||
v_approval_results := array_append(
|
||||
v_approval_results,
|
||||
jsonb_build_object(
|
||||
'itemId', v_item.id,
|
||||
'entityId', v_entity_id,
|
||||
'itemType', v_item.item_type,
|
||||
'actionType', v_item.action_type,
|
||||
'success', true
|
||||
)
|
||||
);
|
||||
|
||||
v_some_approved := TRUE;
|
||||
|
||||
RAISE NOTICE '[%] Approved item % (type=%s, action=%s, entityId=%s)',
|
||||
COALESCE(p_request_id, 'NO_REQUEST_ID'),
|
||||
v_item.id,
|
||||
v_item.item_type,
|
||||
v_item.action_type,
|
||||
v_entity_id;
|
||||
|
||||
EXCEPTION WHEN OTHERS THEN
|
||||
RAISE WARNING '[%] Item % failed: % (SQLSTATE: %)',
|
||||
COALESCE(p_request_id, 'NO_REQUEST_ID'),
|
||||
v_item.id,
|
||||
SQLERRM,
|
||||
SQLSTATE;
|
||||
|
||||
UPDATE submission_items
|
||||
SET
|
||||
status = 'rejected',
|
||||
rejection_reason = SQLERRM,
|
||||
updated_at = NOW()
|
||||
WHERE id = v_item.id;
|
||||
|
||||
v_approval_results := array_append(
|
||||
v_approval_results,
|
||||
jsonb_build_object(
|
||||
'itemId', v_item.id,
|
||||
'itemType', v_item.item_type,
|
||||
'actionType', v_item.action_type,
|
||||
'success', false,
|
||||
'error', SQLERRM
|
||||
)
|
||||
);
|
||||
|
||||
v_all_approved := FALSE;
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
v_final_status := CASE
|
||||
WHEN v_all_approved THEN 'approved'
|
||||
WHEN v_some_approved THEN 'partially_approved'
|
||||
ELSE 'rejected'
|
||||
END;
|
||||
|
||||
UPDATE content_submissions
|
||||
SET
|
||||
status = v_final_status,
|
||||
reviewer_id = p_moderator_id,
|
||||
reviewed_at = NOW(),
|
||||
assigned_to = NULL,
|
||||
locked_until = NULL
|
||||
WHERE id = p_submission_id;
|
||||
|
||||
INSERT INTO approval_transaction_metrics (
|
||||
submission_id,
|
||||
moderator_id,
|
||||
submitter_id,
|
||||
items_count,
|
||||
duration_ms,
|
||||
success,
|
||||
request_id
|
||||
) VALUES (
|
||||
p_submission_id,
|
||||
p_moderator_id,
|
||||
p_submitter_id,
|
||||
array_length(p_item_ids, 1),
|
||||
EXTRACT(EPOCH FROM (clock_timestamp() - v_start_time)) * 1000,
|
||||
v_all_approved,
|
||||
p_request_id
|
||||
);
|
||||
|
||||
v_result := jsonb_build_object(
|
||||
'success', TRUE,
|
||||
'results', to_jsonb(v_approval_results),
|
||||
'submissionStatus', v_final_status,
|
||||
'itemsProcessed', v_items_processed,
|
||||
'allApproved', v_all_approved,
|
||||
'someApproved', v_some_approved
|
||||
);
|
||||
|
||||
PERFORM set_config('app.current_user_id', '', true);
|
||||
PERFORM set_config('app.submission_id', '', true);
|
||||
PERFORM set_config('app.moderator_id', '', true);
|
||||
|
||||
RAISE NOTICE '[%] Transaction completed successfully in %ms',
|
||||
COALESCE(p_request_id, 'NO_REQUEST_ID'),
|
||||
EXTRACT(EPOCH FROM (clock_timestamp() - v_start_time)) * 1000;
|
||||
|
||||
RETURN v_result;
|
||||
|
||||
EXCEPTION WHEN OTHERS THEN
|
||||
RAISE WARNING '[%] Transaction failed, rolling back: % (SQLSTATE: %)',
|
||||
COALESCE(p_request_id, 'NO_REQUEST_ID'),
|
||||
SQLERRM,
|
||||
SQLSTATE;
|
||||
|
||||
INSERT INTO approval_transaction_metrics (
|
||||
submission_id,
|
||||
moderator_id,
|
||||
submitter_id,
|
||||
items_count,
|
||||
duration_ms,
|
||||
success,
|
||||
rollback_triggered,
|
||||
error_message,
|
||||
request_id
|
||||
) VALUES (
|
||||
p_submission_id,
|
||||
p_moderator_id,
|
||||
p_submitter_id,
|
||||
array_length(p_item_ids, 1),
|
||||
EXTRACT(EPOCH FROM (clock_timestamp() - v_start_time)) * 1000,
|
||||
FALSE,
|
||||
TRUE,
|
||||
SQLERRM,
|
||||
p_request_id
|
||||
);
|
||||
|
||||
PERFORM set_config('app.current_user_id', '', true);
|
||||
PERFORM set_config('app.submission_id', '', true);
|
||||
PERFORM set_config('app.moderator_id', '', true);
|
||||
|
||||
RAISE;
|
||||
END;
|
||||
$$;
|
||||
|
||||
GRANT EXECUTE ON FUNCTION process_approval_transaction TO authenticated;
|
||||
|
||||
COMMENT ON FUNCTION process_approval_transaction IS
|
||||
'Fixed: Now correctly reads and passes category field for rides and ride_models';
|
||||
245
tests/integration/README.md
Normal file
245
tests/integration/README.md
Normal file
@@ -0,0 +1,245 @@
|
||||
# Integration Tests
|
||||
|
||||
This directory contains integration tests for the ThrillWiki submission pipeline and data integrity.
|
||||
|
||||
## Schema Validation Tests
|
||||
|
||||
**File**: `schema-validation.test.ts`
|
||||
|
||||
### Purpose
|
||||
|
||||
Automated tests that validate schema consistency across the entire submission pipeline:
|
||||
|
||||
- **Submission Tables**: Ensures submission tables match their corresponding main entity tables
|
||||
- **Version Tables**: Validates version tables have all main table fields plus version metadata
|
||||
- **Critical Fields**: Checks for known problematic fields (e.g., `ride_type` vs `category`)
|
||||
- **Function Alignment**: Verifies critical database functions exist and are accessible
|
||||
|
||||
### Why This Matters
|
||||
|
||||
The submission pipeline depends on exact schema alignment between:
|
||||
1. Main entity tables (`parks`, `rides`, `companies`, `ride_models`)
|
||||
2. Submission tables (`park_submissions`, `ride_submissions`, etc.)
|
||||
3. Version tables (`park_versions`, `ride_versions`, etc.)
|
||||
|
||||
**Without these tests**, schema mismatches can cause:
|
||||
- ❌ Approval failures with cryptic "column does not exist" errors
|
||||
- ❌ Data loss when fields are missing from submission tables
|
||||
- ❌ Version history corruption when fields don't match
|
||||
- ❌ Production incidents that are difficult to debug
|
||||
|
||||
**With these tests**, we catch issues:
|
||||
- ✅ During development, before they reach production
|
||||
- ✅ In CI/CD, preventing bad migrations from deploying
|
||||
- ✅ Immediately after schema changes, with clear error messages
|
||||
|
||||
### Test Categories
|
||||
|
||||
#### 1. Entity Table Validation
|
||||
Compares main entity tables with their submission counterparts:
|
||||
```typescript
|
||||
parks ↔ park_submissions
|
||||
rides ↔ ride_submissions
|
||||
companies ↔ company_submissions
|
||||
ride_models ↔ ride_model_submissions
|
||||
```
|
||||
|
||||
**Checks**:
|
||||
- All fields from main table exist in submission table (except excluded metadata)
|
||||
- Data types match exactly
|
||||
- Required fields are marked NOT NULL in both
|
||||
|
||||
#### 2. Version Table Validation
|
||||
Ensures version tables have complete field coverage:
|
||||
```typescript
|
||||
parks → park_versions
|
||||
rides → ride_versions
|
||||
companies → company_versions
|
||||
ride_models → ride_model_versions
|
||||
```
|
||||
|
||||
**Checks**:
|
||||
- All main table fields exist (accounting for known name variations)
|
||||
- Version metadata fields are present (`version_id`, `version_number`, etc.)
|
||||
- Change tracking fields are properly defined
|
||||
|
||||
#### 3. Critical Field Validation
|
||||
Tests specific known problem areas:
|
||||
|
||||
**Critical Test Cases**:
|
||||
- ✅ `rides` table does NOT have `ride_type` (prevents "column does not exist" error)
|
||||
- ✅ `rides` table DOES have `category` as NOT NULL
|
||||
- ✅ `ride_models` table has BOTH `category` and `ride_type`
|
||||
- ✅ All entities have required base fields (`id`, `name`, `slug`, etc.)
|
||||
- ✅ All submission tables have `submission_id` foreign key
|
||||
|
||||
#### 4. Function Alignment
|
||||
Validates critical database functions:
|
||||
- `create_entity_from_submission`
|
||||
- `update_entity_from_submission`
|
||||
- `process_approval_transaction`
|
||||
|
||||
#### 5. Field Name Variations
|
||||
Documents and validates known column name differences:
|
||||
```typescript
|
||||
ride_versions.height_requirement_cm ↔ rides.height_requirement
|
||||
ride_versions.gforce_max ↔ rides.max_g_force
|
||||
ride_versions.inversions_count ↔ rides.inversions
|
||||
ride_versions.height_meters ↔ rides.max_height_meters
|
||||
ride_versions.drop_meters ↔ rides.drop_height_meters
|
||||
```
|
||||
|
||||
### Running the Tests
|
||||
|
||||
**Run all schema validation tests:**
|
||||
```bash
|
||||
npm run test:schema
|
||||
```
|
||||
|
||||
**Run specific test suite:**
|
||||
```bash
|
||||
npx playwright test schema-validation --grep "Entity Tables"
|
||||
```
|
||||
|
||||
**Run in UI mode for debugging:**
|
||||
```bash
|
||||
npx playwright test schema-validation --ui
|
||||
```
|
||||
|
||||
**Generate detailed report:**
|
||||
```bash
|
||||
npx playwright test schema-validation --reporter=html
|
||||
```
|
||||
|
||||
### Environment Setup
|
||||
|
||||
These tests require:
|
||||
- `SUPABASE_SERVICE_ROLE_KEY` environment variable
|
||||
- Access to the Supabase project database
|
||||
- Playwright test runner
|
||||
|
||||
**Example `.env.test`:**
|
||||
```env
|
||||
SUPABASE_SERVICE_ROLE_KEY=your_service_role_key_here
|
||||
```
|
||||
|
||||
### Expected Output
|
||||
|
||||
**✅ All passing (healthy schema):**
|
||||
```
|
||||
✓ parks: submission table matches main table schema (245ms)
|
||||
✓ rides: submission table matches main table schema (198ms)
|
||||
✓ companies: submission table matches main table schema (187ms)
|
||||
✓ ride_models: submission table matches main table schema (203ms)
|
||||
✓ park_versions: has all main table fields plus version metadata (256ms)
|
||||
✓ ride_versions: has all main table fields plus version metadata (234ms)
|
||||
✓ rides table does NOT have ride_type column (145ms)
|
||||
✓ rides table DOES have category column (NOT NULL) (152ms)
|
||||
```
|
||||
|
||||
**❌ Failure example (schema mismatch):**
|
||||
```
|
||||
✕ rides: submission table matches main table schema (203ms)
|
||||
|
||||
Error: ride_submissions is missing fields: category
|
||||
|
||||
Expected: 0
|
||||
Received: 1
|
||||
```
|
||||
|
||||
### Continuous Integration
|
||||
|
||||
Add to your CI/CD pipeline:
|
||||
|
||||
```yaml
|
||||
# .github/workflows/test.yml
|
||||
- name: Run Schema Validation Tests
|
||||
run: npm run test:schema
|
||||
env:
|
||||
SUPABASE_SERVICE_ROLE_KEY: ${{ secrets.SUPABASE_SERVICE_ROLE_KEY }}
|
||||
```
|
||||
|
||||
This prevents schema mismatches from reaching production.
|
||||
|
||||
### When to Run
|
||||
|
||||
**Always run these tests:**
|
||||
- ✅ After any database migration
|
||||
- ✅ Before deploying submission pipeline changes
|
||||
- ✅ After modifying entity schemas
|
||||
- ✅ When adding new entity types
|
||||
- ✅ In CI/CD for every pull request
|
||||
|
||||
**Especially critical after:**
|
||||
- Adding/removing columns from entity tables
|
||||
- Modifying data types
|
||||
- Changing NOT NULL constraints
|
||||
- Updating database functions
|
||||
|
||||
### Maintenance
|
||||
|
||||
**When adding new entity types:**
|
||||
1. Add validation tests for the new entity
|
||||
2. Add tests for submission table
|
||||
3. Add tests for version table (if applicable)
|
||||
4. Update this README
|
||||
|
||||
**When schema changes are intentional:**
|
||||
1. Review failing tests carefully
|
||||
2. Update `EXCLUDED_FIELDS` or `VERSION_METADATA_FIELDS` if needed
|
||||
3. Document any new field name variations in `normalizeColumnName()`
|
||||
4. Update `docs/submission-pipeline/SCHEMA_REFERENCE.md`
|
||||
|
||||
### Debugging Failed Tests
|
||||
|
||||
**"Missing fields" error:**
|
||||
1. Check if field was recently added to main table
|
||||
2. Verify migration added it to submission table too
|
||||
3. Run migration to add missing field
|
||||
4. Re-run tests
|
||||
|
||||
**"Type mismatch" error:**
|
||||
1. Compare data types in both tables
|
||||
2. Check for accidental type change in migration
|
||||
3. Fix type inconsistency
|
||||
4. Re-run tests
|
||||
|
||||
**"Column does not exist" in production:**
|
||||
1. Run schema validation tests immediately
|
||||
2. Identify which table is missing the field
|
||||
3. Create emergency migration to add field
|
||||
4. Deploy with high priority
|
||||
|
||||
### Related Documentation
|
||||
|
||||
- [Schema Reference](../../docs/submission-pipeline/SCHEMA_REFERENCE.md) - Complete field mappings
|
||||
- [Submission Pipeline](../../docs/submission-pipeline/README.md) - Pipeline overview
|
||||
- [Versioning System](../../docs/versioning/README.md) - Version table details
|
||||
- [Moderation Workflow](../../docs/moderation/README.md) - Approval process
|
||||
|
||||
---
|
||||
|
||||
## Other Integration Tests
|
||||
|
||||
### Moderation Security Tests
|
||||
|
||||
**File**: `moderation-security.test.ts`
|
||||
|
||||
Tests role validation, lock enforcement, and rate limiting in the moderation system.
|
||||
|
||||
**Run:**
|
||||
```bash
|
||||
npx playwright test moderation-security
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Contributing
|
||||
|
||||
When adding new integration tests:
|
||||
1. Follow existing test structure
|
||||
2. Use descriptive test names
|
||||
3. Add comments explaining what's being tested
|
||||
4. Update this README
|
||||
5. Ensure tests are idempotent (can run multiple times)
|
||||
6. Clean up test data after completion
|
||||
545
tests/integration/schema-validation.test.ts
Normal file
545
tests/integration/schema-validation.test.ts
Normal file
@@ -0,0 +1,545 @@
|
||||
import { test, expect } from '@playwright/test';
|
||||
import { createClient } from '@supabase/supabase-js';
|
||||
|
||||
/**
|
||||
* Schema Validation Tests
|
||||
*
|
||||
* These tests validate that submission tables, version tables, and main entity tables
|
||||
* have consistent schemas to prevent field mismatches during the approval pipeline.
|
||||
*
|
||||
* Critical validations:
|
||||
* 1. Submission tables must have all fields from main tables (except auto-generated)
|
||||
* 2. Version tables must have all fields from main tables plus version metadata
|
||||
* 3. Critical functions must reference correct column names
|
||||
* 4. Required NOT NULL fields must be present in all tables
|
||||
*/
|
||||
|
||||
const supabase = createClient(
|
||||
'https://ydvtmnrszybqnbcqbdcy.supabase.co',
|
||||
process.env.SUPABASE_SERVICE_ROLE_KEY || ''
|
||||
);
|
||||
|
||||
interface ColumnDefinition {
|
||||
column_name: string;
|
||||
data_type: string;
|
||||
is_nullable: string;
|
||||
column_default: string | null;
|
||||
}
|
||||
|
||||
interface TableSchema {
|
||||
[columnName: string]: ColumnDefinition;
|
||||
}
|
||||
|
||||
// Fields that are expected to be different or missing in submission tables
|
||||
const EXCLUDED_FIELDS = [
|
||||
'id', // Submission tables have their own ID
|
||||
'created_at', // Managed differently in submissions
|
||||
'updated_at', // Managed differently in submissions
|
||||
'view_count_all', // Calculated fields not in submissions
|
||||
'view_count_30d',
|
||||
'view_count_7d',
|
||||
'average_rating',
|
||||
'review_count',
|
||||
'installations_count', // Only for ride_models
|
||||
'is_test_data', // Test data flag
|
||||
];
|
||||
|
||||
// Version-specific metadata fields (expected to be extra in version tables)
|
||||
const VERSION_METADATA_FIELDS = [
|
||||
'version_id',
|
||||
'version_number',
|
||||
'change_type',
|
||||
'change_reason',
|
||||
'is_current',
|
||||
'created_by',
|
||||
'created_at',
|
||||
'submission_id',
|
||||
'is_test_data',
|
||||
];
|
||||
|
||||
async function getTableSchema(tableName: string): Promise<TableSchema> {
|
||||
const { data, error } = await supabase
|
||||
.from('information_schema.columns' as any)
|
||||
.select('column_name, data_type, is_nullable, column_default')
|
||||
.eq('table_schema', 'public')
|
||||
.eq('table_name', tableName);
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
const schema: TableSchema = {};
|
||||
data?.forEach((col: any) => {
|
||||
schema[col.column_name] = col;
|
||||
});
|
||||
|
||||
return schema;
|
||||
}
|
||||
|
||||
function normalizeColumnName(name: string): string {
|
||||
// Handle known version table variations
|
||||
const mapping: { [key: string]: string } = {
|
||||
'height_requirement_cm': 'height_requirement',
|
||||
'gforce_max': 'max_g_force',
|
||||
'inversions_count': 'inversions',
|
||||
'height_meters': 'max_height_meters',
|
||||
'drop_meters': 'drop_height_meters',
|
||||
};
|
||||
|
||||
return mapping[name] || name;
|
||||
}
|
||||
|
||||
test.describe('Schema Validation - Entity Tables', () => {
|
||||
test('parks: submission table matches main table schema', async () => {
|
||||
const mainSchema = await getTableSchema('parks');
|
||||
const submissionSchema = await getTableSchema('park_submissions');
|
||||
|
||||
const mismatches: string[] = [];
|
||||
const missingFields: string[] = [];
|
||||
|
||||
// Check each field in main table exists in submission table
|
||||
for (const [fieldName, fieldDef] of Object.entries(mainSchema)) {
|
||||
if (EXCLUDED_FIELDS.includes(fieldName)) continue;
|
||||
|
||||
if (!submissionSchema[fieldName]) {
|
||||
missingFields.push(fieldName);
|
||||
} else {
|
||||
// Check data type matches
|
||||
const mainType = fieldDef.data_type;
|
||||
const submissionType = submissionSchema[fieldName].data_type;
|
||||
|
||||
if (mainType !== submissionType) {
|
||||
mismatches.push(
|
||||
`${fieldName}: main=${mainType}, submission=${submissionType}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
expect(missingFields,
|
||||
`park_submissions is missing fields: ${missingFields.join(', ')}`
|
||||
).toHaveLength(0);
|
||||
|
||||
expect(mismatches,
|
||||
`park_submissions has type mismatches: ${mismatches.join('; ')}`
|
||||
).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('rides: submission table matches main table schema', async () => {
|
||||
const mainSchema = await getTableSchema('rides');
|
||||
const submissionSchema = await getTableSchema('ride_submissions');
|
||||
|
||||
const mismatches: string[] = [];
|
||||
const missingFields: string[] = [];
|
||||
|
||||
for (const [fieldName, fieldDef] of Object.entries(mainSchema)) {
|
||||
if (EXCLUDED_FIELDS.includes(fieldName)) continue;
|
||||
|
||||
if (!submissionSchema[fieldName]) {
|
||||
missingFields.push(fieldName);
|
||||
} else {
|
||||
const mainType = fieldDef.data_type;
|
||||
const submissionType = submissionSchema[fieldName].data_type;
|
||||
|
||||
if (mainType !== submissionType) {
|
||||
mismatches.push(
|
||||
`${fieldName}: main=${mainType}, submission=${submissionType}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
expect(missingFields,
|
||||
`ride_submissions is missing fields: ${missingFields.join(', ')}`
|
||||
).toHaveLength(0);
|
||||
|
||||
expect(mismatches,
|
||||
`ride_submissions has type mismatches: ${mismatches.join('; ')}`
|
||||
).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('companies: submission table matches main table schema', async () => {
|
||||
const mainSchema = await getTableSchema('companies');
|
||||
const submissionSchema = await getTableSchema('company_submissions');
|
||||
|
||||
const mismatches: string[] = [];
|
||||
const missingFields: string[] = [];
|
||||
|
||||
for (const [fieldName, fieldDef] of Object.entries(mainSchema)) {
|
||||
if (EXCLUDED_FIELDS.includes(fieldName)) continue;
|
||||
|
||||
if (!submissionSchema[fieldName]) {
|
||||
missingFields.push(fieldName);
|
||||
} else {
|
||||
const mainType = fieldDef.data_type;
|
||||
const submissionType = submissionSchema[fieldName].data_type;
|
||||
|
||||
if (mainType !== submissionType) {
|
||||
mismatches.push(
|
||||
`${fieldName}: main=${mainType}, submission=${submissionType}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
expect(missingFields,
|
||||
`company_submissions is missing fields: ${missingFields.join(', ')}`
|
||||
).toHaveLength(0);
|
||||
|
||||
expect(mismatches,
|
||||
`company_submissions has type mismatches: ${mismatches.join('; ')}`
|
||||
).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('ride_models: submission table matches main table schema', async () => {
|
||||
const mainSchema = await getTableSchema('ride_models');
|
||||
const submissionSchema = await getTableSchema('ride_model_submissions');
|
||||
|
||||
const mismatches: string[] = [];
|
||||
const missingFields: string[] = [];
|
||||
|
||||
for (const [fieldName, fieldDef] of Object.entries(mainSchema)) {
|
||||
if (EXCLUDED_FIELDS.includes(fieldName)) continue;
|
||||
|
||||
if (!submissionSchema[fieldName]) {
|
||||
missingFields.push(fieldName);
|
||||
} else {
|
||||
const mainType = fieldDef.data_type;
|
||||
const submissionType = submissionSchema[fieldName].data_type;
|
||||
|
||||
if (mainType !== submissionType) {
|
||||
mismatches.push(
|
||||
`${fieldName}: main=${mainType}, submission=${submissionType}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
expect(missingFields,
|
||||
`ride_model_submissions is missing fields: ${missingFields.join(', ')}`
|
||||
).toHaveLength(0);
|
||||
|
||||
expect(mismatches,
|
||||
`ride_model_submissions has type mismatches: ${mismatches.join('; ')}`
|
||||
).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
test.describe('Schema Validation - Version Tables', () => {
|
||||
test('park_versions: has all main table fields plus version metadata', async () => {
|
||||
const mainSchema = await getTableSchema('parks');
|
||||
const versionSchema = await getTableSchema('park_versions');
|
||||
|
||||
const missingFields: string[] = [];
|
||||
|
||||
// Check all main table fields exist in version table
|
||||
for (const [fieldName] of Object.entries(mainSchema)) {
|
||||
if (EXCLUDED_FIELDS.includes(fieldName)) continue;
|
||||
|
||||
const normalizedName = normalizeColumnName(fieldName);
|
||||
if (!versionSchema[fieldName] && !versionSchema[normalizedName]) {
|
||||
missingFields.push(fieldName);
|
||||
}
|
||||
}
|
||||
|
||||
// Check all version metadata fields exist
|
||||
const missingMetadata: string[] = [];
|
||||
for (const metaField of VERSION_METADATA_FIELDS) {
|
||||
if (!versionSchema[metaField]) {
|
||||
missingMetadata.push(metaField);
|
||||
}
|
||||
}
|
||||
|
||||
expect(missingFields,
|
||||
`park_versions is missing main table fields: ${missingFields.join(', ')}`
|
||||
).toHaveLength(0);
|
||||
|
||||
expect(missingMetadata,
|
||||
`park_versions is missing version metadata: ${missingMetadata.join(', ')}`
|
||||
).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('ride_versions: has all main table fields plus version metadata', async () => {
|
||||
const mainSchema = await getTableSchema('rides');
|
||||
const versionSchema = await getTableSchema('ride_versions');
|
||||
|
||||
const missingFields: string[] = [];
|
||||
|
||||
for (const [fieldName] of Object.entries(mainSchema)) {
|
||||
if (EXCLUDED_FIELDS.includes(fieldName)) continue;
|
||||
|
||||
const normalizedName = normalizeColumnName(fieldName);
|
||||
if (!versionSchema[fieldName] && !versionSchema[normalizedName]) {
|
||||
missingFields.push(fieldName);
|
||||
}
|
||||
}
|
||||
|
||||
const missingMetadata: string[] = [];
|
||||
for (const metaField of VERSION_METADATA_FIELDS) {
|
||||
if (!versionSchema[metaField]) {
|
||||
missingMetadata.push(metaField);
|
||||
}
|
||||
}
|
||||
|
||||
expect(missingFields,
|
||||
`ride_versions is missing main table fields: ${missingFields.join(', ')}`
|
||||
).toHaveLength(0);
|
||||
|
||||
expect(missingMetadata,
|
||||
`ride_versions is missing version metadata: ${missingMetadata.join(', ')}`
|
||||
).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('company_versions: has all main table fields plus version metadata', async () => {
|
||||
const mainSchema = await getTableSchema('companies');
|
||||
const versionSchema = await getTableSchema('company_versions');
|
||||
|
||||
const missingFields: string[] = [];
|
||||
|
||||
for (const [fieldName] of Object.entries(mainSchema)) {
|
||||
if (EXCLUDED_FIELDS.includes(fieldName)) continue;
|
||||
|
||||
const normalizedName = normalizeColumnName(fieldName);
|
||||
if (!versionSchema[fieldName] && !versionSchema[normalizedName]) {
|
||||
missingFields.push(fieldName);
|
||||
}
|
||||
}
|
||||
|
||||
const missingMetadata: string[] = [];
|
||||
for (const metaField of VERSION_METADATA_FIELDS) {
|
||||
if (!versionSchema[metaField]) {
|
||||
missingMetadata.push(metaField);
|
||||
}
|
||||
}
|
||||
|
||||
expect(missingFields,
|
||||
`company_versions is missing main table fields: ${missingFields.join(', ')}`
|
||||
).toHaveLength(0);
|
||||
|
||||
expect(missingMetadata,
|
||||
`company_versions is missing version metadata: ${missingMetadata.join(', ')}`
|
||||
).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('ride_model_versions: has all main table fields plus version metadata', async () => {
|
||||
const mainSchema = await getTableSchema('ride_models');
|
||||
const versionSchema = await getTableSchema('ride_model_versions');
|
||||
|
||||
const missingFields: string[] = [];
|
||||
|
||||
for (const [fieldName] of Object.entries(mainSchema)) {
|
||||
if (EXCLUDED_FIELDS.includes(fieldName)) continue;
|
||||
|
||||
const normalizedName = normalizeColumnName(fieldName);
|
||||
if (!versionSchema[fieldName] && !versionSchema[normalizedName]) {
|
||||
missingFields.push(fieldName);
|
||||
}
|
||||
}
|
||||
|
||||
const missingMetadata: string[] = [];
|
||||
for (const metaField of VERSION_METADATA_FIELDS) {
|
||||
if (!versionSchema[metaField]) {
|
||||
missingMetadata.push(metaField);
|
||||
}
|
||||
}
|
||||
|
||||
expect(missingFields,
|
||||
`ride_model_versions is missing main table fields: ${missingFields.join(', ')}`
|
||||
).toHaveLength(0);
|
||||
|
||||
expect(missingMetadata,
|
||||
`ride_model_versions is missing version metadata: ${missingMetadata.join(', ')}`
|
||||
).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
test.describe('Schema Validation - Critical Fields', () => {
|
||||
test('rides table does NOT have ride_type column', async () => {
|
||||
const ridesSchema = await getTableSchema('rides');
|
||||
|
||||
expect(ridesSchema['ride_type']).toBeUndefined();
|
||||
});
|
||||
|
||||
test('rides table DOES have category column (NOT NULL)', async () => {
|
||||
const ridesSchema = await getTableSchema('rides');
|
||||
|
||||
expect(ridesSchema['category']).toBeDefined();
|
||||
expect(ridesSchema['category'].is_nullable).toBe('NO');
|
||||
});
|
||||
|
||||
test('ride_models table DOES have both category and ride_type columns', async () => {
|
||||
const rideModelsSchema = await getTableSchema('ride_models');
|
||||
|
||||
expect(rideModelsSchema['category']).toBeDefined();
|
||||
expect(rideModelsSchema['category'].is_nullable).toBe('NO');
|
||||
expect(rideModelsSchema['ride_type']).toBeDefined();
|
||||
});
|
||||
|
||||
test('all entity tables have required base fields', async () => {
|
||||
const requiredFields = ['id', 'name', 'slug', 'created_at', 'updated_at'];
|
||||
const tables = ['parks', 'rides', 'companies', 'ride_models'];
|
||||
|
||||
for (const table of tables) {
|
||||
const schema = await getTableSchema(table);
|
||||
|
||||
for (const field of requiredFields) {
|
||||
expect(schema[field],
|
||||
`${table} is missing required field: ${field}`
|
||||
).toBeDefined();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
test('all submission tables have submission_id foreign key', async () => {
|
||||
const submissionTables = [
|
||||
'park_submissions',
|
||||
'ride_submissions',
|
||||
'company_submissions',
|
||||
'ride_model_submissions',
|
||||
'photo_submissions',
|
||||
];
|
||||
|
||||
for (const table of submissionTables) {
|
||||
const schema = await getTableSchema(table);
|
||||
|
||||
expect(schema['submission_id'],
|
||||
`${table} is missing submission_id foreign key`
|
||||
).toBeDefined();
|
||||
expect(schema['submission_id'].is_nullable).toBe('NO');
|
||||
}
|
||||
});
|
||||
|
||||
test('all version tables have version metadata fields', async () => {
|
||||
const versionTables = [
|
||||
'park_versions',
|
||||
'ride_versions',
|
||||
'company_versions',
|
||||
'ride_model_versions',
|
||||
];
|
||||
|
||||
const requiredVersionFields = [
|
||||
'version_id',
|
||||
'version_number',
|
||||
'change_type',
|
||||
'is_current',
|
||||
];
|
||||
|
||||
for (const table of versionTables) {
|
||||
const schema = await getTableSchema(table);
|
||||
|
||||
for (const field of requiredVersionFields) {
|
||||
expect(schema[field],
|
||||
`${table} is missing required version field: ${field}`
|
||||
).toBeDefined();
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
test.describe('Schema Validation - Function Parameter Alignment', () => {
|
||||
test('verify create_entity_from_submission function exists', async () => {
|
||||
const { data, error } = await supabase
|
||||
.rpc('pg_get_functiondef', {
|
||||
funcid: 'create_entity_from_submission'::any
|
||||
} as any)
|
||||
.single();
|
||||
|
||||
// Function should exist (will error if not)
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
|
||||
test('verify update_entity_from_submission function exists', async () => {
|
||||
const { data, error } = await supabase
|
||||
.rpc('pg_get_functiondef', {
|
||||
funcid: 'update_entity_from_submission'::any
|
||||
} as any)
|
||||
.single();
|
||||
|
||||
// Function should exist (will error if not)
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
|
||||
test('verify process_approval_transaction function exists', async () => {
|
||||
const { data, error } = await supabase.rpc('pg_catalog.pg_function_is_visible', {
|
||||
funcid: 'process_approval_transaction'::any
|
||||
} as any);
|
||||
|
||||
// Function should be visible
|
||||
expect(data).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
test.describe('Schema Validation - Known Field Name Variations', () => {
|
||||
test('ride_versions uses height_requirement_cm instead of height_requirement', async () => {
|
||||
const versionSchema = await getTableSchema('ride_versions');
|
||||
|
||||
expect(versionSchema['height_requirement_cm']).toBeDefined();
|
||||
expect(versionSchema['height_requirement']).toBeUndefined();
|
||||
});
|
||||
|
||||
test('ride_versions uses gforce_max instead of max_g_force', async () => {
|
||||
const versionSchema = await getTableSchema('ride_versions');
|
||||
|
||||
expect(versionSchema['gforce_max']).toBeDefined();
|
||||
expect(versionSchema['max_g_force']).toBeUndefined();
|
||||
});
|
||||
|
||||
test('ride_versions uses inversions_count instead of inversions', async () => {
|
||||
const versionSchema = await getTableSchema('ride_versions');
|
||||
|
||||
expect(versionSchema['inversions_count']).toBeDefined();
|
||||
expect(versionSchema['inversions']).toBeUndefined();
|
||||
});
|
||||
|
||||
test('ride_versions uses height_meters instead of max_height_meters', async () => {
|
||||
const versionSchema = await getTableSchema('ride_versions');
|
||||
|
||||
expect(versionSchema['height_meters']).toBeDefined();
|
||||
expect(versionSchema['max_height_meters']).toBeUndefined();
|
||||
});
|
||||
|
||||
test('ride_versions uses drop_meters instead of drop_height_meters', async () => {
|
||||
const versionSchema = await getTableSchema('ride_versions');
|
||||
|
||||
expect(versionSchema['drop_meters']).toBeDefined();
|
||||
expect(versionSchema['drop_height_meters']).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
test.describe('Schema Validation - Submission Items', () => {
|
||||
test('submission_items has all required foreign key columns', async () => {
|
||||
const schema = await getTableSchema('submission_items');
|
||||
|
||||
const requiredFKs = [
|
||||
'submission_id',
|
||||
'park_submission_id',
|
||||
'ride_submission_id',
|
||||
'company_submission_id',
|
||||
'ride_model_submission_id',
|
||||
'photo_submission_id',
|
||||
'timeline_event_submission_id',
|
||||
'depends_on', // For dependency chain
|
||||
];
|
||||
|
||||
for (const fk of requiredFKs) {
|
||||
expect(schema[fk],
|
||||
`submission_items is missing FK: ${fk}`
|
||||
).toBeDefined();
|
||||
}
|
||||
});
|
||||
|
||||
test('submission_items has required metadata fields', async () => {
|
||||
const schema = await getTableSchema('submission_items');
|
||||
|
||||
const requiredFields = [
|
||||
'item_type',
|
||||
'action_type',
|
||||
'status',
|
||||
'order_index',
|
||||
];
|
||||
|
||||
for (const field of requiredFields) {
|
||||
expect(schema[field],
|
||||
`submission_items is missing field: ${field}`
|
||||
).toBeDefined();
|
||||
}
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user