mirror of
https://github.com/pacnpal/thrilltrack-explorer.git
synced 2025-12-20 02:51:12 -05:00
Add schema validation tests
Create automated tests to validate schema consistency across submission, version, and main entity tables. This includes checking for missing fields, data type mismatches, and correct field presence in critical functions. Also includes a pre-migration validation script and GitHub Actions workflow for automated checks.
This commit is contained in:
186
.github/workflows/schema-validation.yml
vendored
Normal file
186
.github/workflows/schema-validation.yml
vendored
Normal file
@@ -0,0 +1,186 @@
|
||||
name: Schema Validation
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- 'supabase/migrations/**'
|
||||
- 'src/lib/moderation/**'
|
||||
- 'supabase/functions/**'
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- develop
|
||||
paths:
|
||||
- 'supabase/migrations/**'
|
||||
- 'src/lib/moderation/**'
|
||||
- 'supabase/functions/**'
|
||||
workflow_dispatch: # Allow manual triggering
|
||||
|
||||
jobs:
|
||||
validate-schema:
|
||||
name: Validate Database Schema
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
cache: 'npm'
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Run schema validation script
|
||||
env:
|
||||
SUPABASE_SERVICE_ROLE_KEY: ${{ secrets.SUPABASE_SERVICE_ROLE_KEY }}
|
||||
run: |
|
||||
echo "🔍 Running schema validation checks..."
|
||||
npm run validate-schema
|
||||
|
||||
- name: Run Playwright schema validation tests
|
||||
env:
|
||||
SUPABASE_SERVICE_ROLE_KEY: ${{ secrets.SUPABASE_SERVICE_ROLE_KEY }}
|
||||
run: |
|
||||
echo "🧪 Running integration tests..."
|
||||
npx playwright test schema-validation --reporter=list
|
||||
|
||||
- name: Upload test results
|
||||
if: failure()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: schema-validation-results
|
||||
path: |
|
||||
playwright-report/
|
||||
test-results/
|
||||
retention-days: 7
|
||||
|
||||
- name: Comment PR with validation results
|
||||
if: failure() && github.event_name == 'pull_request'
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
github.rest.issues.createComment({
|
||||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: `## ❌ Schema Validation Failed
|
||||
|
||||
The schema validation checks have detected inconsistencies in your database changes.
|
||||
|
||||
**Common issues:**
|
||||
- Missing fields in submission tables
|
||||
- Mismatched data types between tables
|
||||
- Missing version metadata fields
|
||||
- Invalid column names (e.g., \`ride_type\` in \`rides\` table)
|
||||
|
||||
**Next steps:**
|
||||
1. Review the failed tests in the Actions log
|
||||
2. Check the [Schema Reference documentation](https://github.com/${context.repo.owner}/${context.repo.repo}/blob/main/docs/submission-pipeline/SCHEMA_REFERENCE.md)
|
||||
3. Fix the identified issues
|
||||
4. Push your fixes to re-run validation
|
||||
|
||||
**Need help?** Consult the [Integration Tests README](https://github.com/${context.repo.owner}/${context.repo.repo}/blob/main/tests/integration/README.md).`
|
||||
})
|
||||
|
||||
migration-safety-check:
|
||||
name: Migration Safety Check
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'pull_request'
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Check for breaking changes in migrations
|
||||
run: |
|
||||
echo "🔍 Checking for potentially breaking migration patterns..."
|
||||
|
||||
# Check if any migrations contain DROP COLUMN
|
||||
if git diff origin/main...HEAD -- 'supabase/migrations/**' | grep -i "DROP COLUMN"; then
|
||||
echo "⚠️ Warning: Migration contains DROP COLUMN"
|
||||
echo "::warning::Migration contains DROP COLUMN - ensure data migration plan exists"
|
||||
fi
|
||||
|
||||
# Check if any migrations alter NOT NULL constraints
|
||||
if git diff origin/main...HEAD -- 'supabase/migrations/**' | grep -i "ALTER COLUMN.*NOT NULL"; then
|
||||
echo "⚠️ Warning: Migration alters NOT NULL constraints"
|
||||
echo "::warning::Migration alters NOT NULL constraints - ensure data backfill is complete"
|
||||
fi
|
||||
|
||||
# Check if any migrations rename columns
|
||||
if git diff origin/main...HEAD -- 'supabase/migrations/**' | grep -i "RENAME COLUMN"; then
|
||||
echo "⚠️ Warning: Migration renames columns"
|
||||
echo "::warning::Migration renames columns - ensure all code references are updated"
|
||||
fi
|
||||
|
||||
- name: Validate migration file naming
|
||||
run: |
|
||||
echo "🔍 Validating migration file names..."
|
||||
|
||||
# Check that all migration files follow the timestamp pattern
|
||||
for file in supabase/migrations/*.sql; do
|
||||
if [[ ! $(basename "$file") =~ ^[0-9]{14}_ ]]; then
|
||||
echo "❌ Invalid migration filename: $(basename "$file")"
|
||||
echo "::error::Migration files must start with a 14-digit timestamp (YYYYMMDDHHMMSS)"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
echo "✅ All migration filenames are valid"
|
||||
|
||||
documentation-check:
|
||||
name: Documentation Check
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'pull_request'
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Check if schema docs need updating
|
||||
run: |
|
||||
echo "📚 Checking if schema documentation is up to date..."
|
||||
|
||||
# Check if migrations changed but SCHEMA_REFERENCE.md didn't
|
||||
MIGRATIONS_CHANGED=$(git diff origin/main...HEAD --name-only | grep -c "supabase/migrations/" || true)
|
||||
SCHEMA_DOCS_CHANGED=$(git diff origin/main...HEAD --name-only | grep -c "docs/submission-pipeline/SCHEMA_REFERENCE.md" || true)
|
||||
|
||||
if [ "$MIGRATIONS_CHANGED" -gt 0 ] && [ "$SCHEMA_DOCS_CHANGED" -eq 0 ]; then
|
||||
echo "⚠️ Warning: Migrations were changed but SCHEMA_REFERENCE.md was not updated"
|
||||
echo "::warning::Consider updating docs/submission-pipeline/SCHEMA_REFERENCE.md to reflect schema changes"
|
||||
else
|
||||
echo "✅ Documentation check passed"
|
||||
fi
|
||||
|
||||
- name: Comment PR with documentation reminder
|
||||
if: success()
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
const migrationsChanged = (await exec.getExecOutput('git', ['diff', 'origin/main...HEAD', '--name-only'])).stdout.includes('supabase/migrations/');
|
||||
const docsChanged = (await exec.getExecOutput('git', ['diff', 'origin/main...HEAD', '--name-only'])).stdout.includes('docs/submission-pipeline/SCHEMA_REFERENCE.md');
|
||||
|
||||
if (migrationsChanged && !docsChanged) {
|
||||
github.rest.issues.createComment({
|
||||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: `## 📚 Documentation Reminder
|
||||
|
||||
This PR includes database migrations but doesn't update the schema reference documentation.
|
||||
|
||||
**If you added/modified fields**, please update:
|
||||
- \`docs/submission-pipeline/SCHEMA_REFERENCE.md\`
|
||||
|
||||
**If this is a minor change** (e.g., fixing typos, adding indexes), you can ignore this message.`
|
||||
})
|
||||
}
|
||||
402
docs/submission-pipeline/VALIDATION_SETUP.md
Normal file
402
docs/submission-pipeline/VALIDATION_SETUP.md
Normal file
@@ -0,0 +1,402 @@
|
||||
# Schema Validation Setup Guide
|
||||
|
||||
This guide explains how to set up and use the automated schema validation tools to prevent field mismatches in the submission pipeline.
|
||||
|
||||
## Overview
|
||||
|
||||
The validation system consists of three layers:
|
||||
|
||||
1. **Pre-migration Script** - Quick validation before deploying migrations
|
||||
2. **Integration Tests** - Comprehensive Playwright tests for CI/CD
|
||||
3. **GitHub Actions** - Automated checks on every pull request
|
||||
|
||||
## Quick Start
|
||||
|
||||
### 1. Add NPM Scripts
|
||||
|
||||
Add these scripts to your `package.json`:
|
||||
|
||||
```json
|
||||
{
|
||||
"scripts": {
|
||||
"validate-schema": "tsx scripts/validate-schema.ts",
|
||||
"test:schema": "playwright test schema-validation",
|
||||
"test:schema:ui": "playwright test schema-validation --ui",
|
||||
"pre-migrate": "npm run validate-schema"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 2. Environment Variables
|
||||
|
||||
Create a `.env.test` file:
|
||||
|
||||
```env
|
||||
SUPABASE_SERVICE_ROLE_KEY=your_service_role_key_here
|
||||
```
|
||||
|
||||
**⚠️ Important**: Never commit this file! Add it to `.gitignore`:
|
||||
|
||||
```gitignore
|
||||
.env.test
|
||||
.env.local
|
||||
```
|
||||
|
||||
### 3. Install Dependencies
|
||||
|
||||
If not already installed:
|
||||
|
||||
```bash
|
||||
npm install --save-dev @supabase/supabase-js @playwright/test tsx
|
||||
```
|
||||
|
||||
## Using the Validation Tools
|
||||
|
||||
### Pre-Migration Validation Script
|
||||
|
||||
**When to use**: Before applying any database migration
|
||||
|
||||
**Run manually:**
|
||||
```bash
|
||||
npm run validate-schema
|
||||
```
|
||||
|
||||
**What it checks:**
|
||||
- ✅ Submission tables match main tables
|
||||
- ✅ Version tables have all required fields
|
||||
- ✅ Critical fields are correct (e.g., `category` vs `ride_type`)
|
||||
- ✅ Database functions exist and are accessible
|
||||
|
||||
**Example output:**
|
||||
```
|
||||
🔍 Starting schema validation...
|
||||
|
||||
Submission Tables:
|
||||
────────────────────────────────────────────────────────────────────────────────
|
||||
✅ Parks: submission table matches main table
|
||||
✅ Rides: submission table matches main table
|
||||
✅ Companies: submission table matches main table
|
||||
✅ Ride Models: submission table matches main table
|
||||
|
||||
Version Tables:
|
||||
────────────────────────────────────────────────────────────────────────────────
|
||||
✅ Parks: version table has all fields
|
||||
✅ Rides: version table has all fields
|
||||
✅ Companies: version table has all fields
|
||||
✅ Ride Models: version table has all fields
|
||||
|
||||
Critical Fields:
|
||||
────────────────────────────────────────────────────────────────────────────────
|
||||
✅ rides table does NOT have ride_type column
|
||||
✅ rides table has category column
|
||||
✅ ride_models has both category and ride_type
|
||||
|
||||
Functions:
|
||||
────────────────────────────────────────────────────────────────────────────────
|
||||
✅ create_entity_from_submission exists and is accessible
|
||||
✅ update_entity_from_submission exists and is accessible
|
||||
✅ process_approval_transaction exists and is accessible
|
||||
|
||||
════════════════════════════════════════════════════════════════════════════════
|
||||
Total: 15 passed, 0 failed
|
||||
════════════════════════════════════════════════════════════════════════════════
|
||||
|
||||
✅ All schema validations passed. Safe to deploy.
|
||||
```
|
||||
|
||||
### Integration Tests
|
||||
|
||||
**When to use**: In CI/CD, before merging PRs, after major changes
|
||||
|
||||
**Run all tests:**
|
||||
```bash
|
||||
npm run test:schema
|
||||
```
|
||||
|
||||
**Run in UI mode (for debugging):**
|
||||
```bash
|
||||
npm run test:schema:ui
|
||||
```
|
||||
|
||||
**Run specific test suite:**
|
||||
```bash
|
||||
npx playwright test schema-validation --grep "Entity Tables"
|
||||
```
|
||||
|
||||
**What it tests:**
|
||||
- All pre-migration script checks PLUS:
|
||||
- Field-by-field data type comparison
|
||||
- NOT NULL constraint validation
|
||||
- Foreign key existence checks
|
||||
- Known field name variations (e.g., `height_requirement_cm` vs `height_requirement`)
|
||||
|
||||
### GitHub Actions (Automated)
|
||||
|
||||
**Automatically runs on:**
|
||||
- Every pull request that touches:
|
||||
- `supabase/migrations/**`
|
||||
- `src/lib/moderation/**`
|
||||
- `supabase/functions/**`
|
||||
- Pushes to `main` or `develop` branches
|
||||
- Manual workflow dispatch
|
||||
|
||||
**What it does:**
|
||||
1. Runs validation script
|
||||
2. Runs integration tests
|
||||
3. Checks for breaking migration patterns
|
||||
4. Validates migration file naming
|
||||
5. Comments on PRs with helpful guidance if tests fail
|
||||
|
||||
## Workflow Examples
|
||||
|
||||
### Before Creating a Migration
|
||||
|
||||
```bash
|
||||
# 1. Make schema changes locally
|
||||
# 2. Validate before creating migration
|
||||
npm run validate-schema
|
||||
|
||||
# 3. If validation passes, create migration
|
||||
supabase db diff -f add_new_field
|
||||
|
||||
# 4. Run validation again
|
||||
npm run validate-schema
|
||||
|
||||
# 5. Commit and push
|
||||
git add .
|
||||
git commit -m "Add new field to rides table"
|
||||
git push
|
||||
```
|
||||
|
||||
### After Modifying Entity Schemas
|
||||
|
||||
```bash
|
||||
# 1. Modified rides table schema
|
||||
# 2. Run full test suite
|
||||
npm run test:schema
|
||||
|
||||
# 3. Check specific validation
|
||||
npx playwright test schema-validation --grep "rides"
|
||||
|
||||
# 4. Fix any issues
|
||||
# 5. Re-run tests
|
||||
npm run test:schema
|
||||
```
|
||||
|
||||
### During Code Review
|
||||
|
||||
**PR Author:**
|
||||
1. Ensure all validation tests pass locally
|
||||
2. Push changes
|
||||
3. Wait for GitHub Actions to complete
|
||||
4. Address any automated feedback
|
||||
|
||||
**Reviewer:**
|
||||
1. Check that GitHub Actions passed
|
||||
2. Review schema changes in migrations
|
||||
3. Verify documentation was updated
|
||||
4. Approve if all checks pass
|
||||
|
||||
## Common Issues and Solutions
|
||||
|
||||
### Issue: "Missing fields" Error
|
||||
|
||||
**Symptom:**
|
||||
```
|
||||
❌ Rides: submission table matches main table
|
||||
└─ Missing fields: category
|
||||
```
|
||||
|
||||
**Cause**: Field was added to main table but not submission table
|
||||
|
||||
**Solution:**
|
||||
```sql
|
||||
-- In your migration file
|
||||
ALTER TABLE ride_submissions ADD COLUMN category TEXT NOT NULL;
|
||||
```
|
||||
|
||||
### Issue: "Type mismatch" Error
|
||||
|
||||
**Symptom:**
|
||||
```
|
||||
❌ Rides: submission table matches main table
|
||||
└─ Type mismatches: max_speed_kmh: main=numeric, submission=integer
|
||||
```
|
||||
|
||||
**Cause**: Data types don't match between tables
|
||||
|
||||
**Solution:**
|
||||
```sql
|
||||
-- In your migration file
|
||||
ALTER TABLE ride_submissions
|
||||
ALTER COLUMN max_speed_kmh TYPE NUMERIC USING max_speed_kmh::numeric;
|
||||
```
|
||||
|
||||
### Issue: "Column does not exist" in Production
|
||||
|
||||
**Symptom**: Approval fails with `column "category" does not exist`
|
||||
|
||||
**Immediate action:**
|
||||
1. Run validation script to identify issue
|
||||
2. Create emergency migration to add missing field
|
||||
3. Deploy immediately
|
||||
4. Update functions if needed
|
||||
|
||||
**Prevention**: Always run validation before deploying
|
||||
|
||||
### Issue: Tests Pass Locally but Fail in CI
|
||||
|
||||
**Possible causes:**
|
||||
- Different database state in CI vs local
|
||||
- Missing environment variables
|
||||
- Outdated schema in test database
|
||||
|
||||
**Solution:**
|
||||
```bash
|
||||
# Pull latest schema
|
||||
supabase db pull
|
||||
|
||||
# Reset local database
|
||||
supabase db reset
|
||||
|
||||
# Re-run tests
|
||||
npm run test:schema
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
### ✅ Do's
|
||||
|
||||
- ✅ Run validation script before every migration
|
||||
- ✅ Run integration tests before merging PRs
|
||||
- ✅ Update all three tables when adding fields (main, submission, version)
|
||||
- ✅ Document field name variations in tests
|
||||
- ✅ Check GitHub Actions results before merging
|
||||
- ✅ Keep SCHEMA_REFERENCE.md up to date
|
||||
|
||||
### ❌ Don'ts
|
||||
|
||||
- ❌ Don't skip validation "because it's a small change"
|
||||
- ❌ Don't add fields to only main tables
|
||||
- ❌ Don't ignore failing tests
|
||||
- ❌ Don't bypass CI checks
|
||||
- ❌ Don't commit service role keys
|
||||
- ❌ Don't modify submission pipeline functions without testing
|
||||
|
||||
## Continuous Integration Setup
|
||||
|
||||
### GitHub Secrets
|
||||
|
||||
Add to your repository secrets:
|
||||
|
||||
```
|
||||
SUPABASE_SERVICE_ROLE_KEY=your_service_role_key_here
|
||||
```
|
||||
|
||||
**Steps:**
|
||||
1. Go to repository Settings → Secrets and variables → Actions
|
||||
2. Click "New repository secret"
|
||||
3. Name: `SUPABASE_SERVICE_ROLE_KEY`
|
||||
4. Value: Your service role key from Supabase dashboard
|
||||
5. Save
|
||||
|
||||
### Branch Protection Rules
|
||||
|
||||
Recommended settings:
|
||||
|
||||
```
|
||||
Branch: main
|
||||
✓ Require status checks to pass before merging
|
||||
✓ validate-schema (Schema Validation)
|
||||
✓ migration-safety-check (Migration Safety Check)
|
||||
✓ Require branches to be up to date before merging
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Script Won't Run
|
||||
|
||||
**Error:** `tsx: command not found`
|
||||
|
||||
**Solution:**
|
||||
```bash
|
||||
npm install -g tsx
|
||||
# or
|
||||
npx tsx scripts/validate-schema.ts
|
||||
```
|
||||
|
||||
### Authentication Errors
|
||||
|
||||
**Error:** `Invalid API key`
|
||||
|
||||
**Solution:**
|
||||
1. Check `.env.test` has correct service role key
|
||||
2. Verify key has not expired
|
||||
3. Ensure environment variable is loaded:
|
||||
```bash
|
||||
source .env.test
|
||||
npm run validate-schema
|
||||
```
|
||||
|
||||
### Tests Timeout
|
||||
|
||||
**Error:** Tests timeout after 30 seconds
|
||||
|
||||
**Solution:**
|
||||
```bash
|
||||
# Increase timeout
|
||||
npx playwright test schema-validation --timeout=60000
|
||||
```
|
||||
|
||||
## Maintenance
|
||||
|
||||
### Adding New Entity Types
|
||||
|
||||
When adding a new entity type (e.g., `events`):
|
||||
|
||||
1. **Update validation script:**
|
||||
```typescript
|
||||
// In scripts/validate-schema.ts
|
||||
await validateSubmissionTable('events', 'event_submissions', 'Events');
|
||||
await validateVersionTable('events', 'event_versions', 'Events');
|
||||
```
|
||||
|
||||
2. **Update integration tests:**
|
||||
```typescript
|
||||
// In tests/integration/schema-validation.test.ts
|
||||
test('events: submission table matches main table schema', async () => {
|
||||
// Add test logic
|
||||
});
|
||||
```
|
||||
|
||||
3. **Update documentation:**
|
||||
- `docs/submission-pipeline/SCHEMA_REFERENCE.md`
|
||||
- This file (`VALIDATION_SETUP.md`)
|
||||
|
||||
### Updating Field Mappings
|
||||
|
||||
When version tables use different field names:
|
||||
|
||||
```typescript
|
||||
// In both script and tests
|
||||
const fieldMapping: { [key: string]: string } = {
|
||||
'new_main_field': 'version_field_name',
|
||||
};
|
||||
```
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- [Schema Reference](./SCHEMA_REFERENCE.md) - Complete field mappings
|
||||
- [Integration Tests README](../../tests/integration/README.md) - Detailed test documentation
|
||||
- [Submission Pipeline](./README.md) - Pipeline overview
|
||||
- [Versioning System](../versioning/README.md) - Version table details
|
||||
|
||||
## Support
|
||||
|
||||
**Questions?** Check the documentation above or review existing migration files.
|
||||
|
||||
**Found a bug in validation?** Open an issue with:
|
||||
- Expected behavior
|
||||
- Actual behavior
|
||||
- Validation script output
|
||||
- Database schema snippets
|
||||
332
scripts/validate-schema.ts
Normal file
332
scripts/validate-schema.ts
Normal file
@@ -0,0 +1,332 @@
|
||||
#!/usr/bin/env tsx
|
||||
/**
|
||||
* Schema Validation Script
|
||||
*
|
||||
* Pre-migration validation script that checks schema consistency
|
||||
* across the submission pipeline before deploying changes.
|
||||
*
|
||||
* Usage:
|
||||
* npm run validate-schema
|
||||
* or
|
||||
* tsx scripts/validate-schema.ts
|
||||
*
|
||||
* Exit codes:
|
||||
* 0 = All validations passed
|
||||
* 1 = Validation failures detected
|
||||
*/
|
||||
|
||||
import { createClient } from '@supabase/supabase-js';
|
||||
|
||||
const SUPABASE_URL = 'https://ydvtmnrszybqnbcqbdcy.supabase.co';
|
||||
const SUPABASE_KEY = process.env.SUPABASE_SERVICE_ROLE_KEY;
|
||||
|
||||
if (!SUPABASE_KEY) {
|
||||
console.error('❌ SUPABASE_SERVICE_ROLE_KEY environment variable is required');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const supabase = createClient(SUPABASE_URL, SUPABASE_KEY);
|
||||
|
||||
interface ValidationResult {
|
||||
category: string;
|
||||
test: string;
|
||||
passed: boolean;
|
||||
message?: string;
|
||||
}
|
||||
|
||||
const results: ValidationResult[] = [];
|
||||
|
||||
async function getTableColumns(tableName: string): Promise<Set<string>> {
|
||||
const { data, error } = await supabase
|
||||
.from('information_schema.columns' as any)
|
||||
.select('column_name')
|
||||
.eq('table_schema', 'public')
|
||||
.eq('table_name', tableName);
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
return new Set(data?.map((row: any) => row.column_name) || []);
|
||||
}
|
||||
|
||||
async function validateSubmissionTable(
|
||||
mainTable: string,
|
||||
submissionTable: string,
|
||||
entityName: string
|
||||
): Promise<void> {
|
||||
const mainColumns = await getTableColumns(mainTable);
|
||||
const submissionColumns = await getTableColumns(submissionTable);
|
||||
|
||||
const excludedFields = new Set([
|
||||
'id', 'created_at', 'updated_at', 'is_test_data',
|
||||
'view_count_all', 'view_count_30d', 'view_count_7d',
|
||||
'average_rating', 'review_count', 'installations_count',
|
||||
]);
|
||||
|
||||
const missingFields: string[] = [];
|
||||
|
||||
for (const field of mainColumns) {
|
||||
if (excludedFields.has(field)) continue;
|
||||
if (!submissionColumns.has(field)) {
|
||||
missingFields.push(field);
|
||||
}
|
||||
}
|
||||
|
||||
if (missingFields.length === 0) {
|
||||
results.push({
|
||||
category: 'Submission Tables',
|
||||
test: `${entityName}: submission table matches main table`,
|
||||
passed: true,
|
||||
});
|
||||
} else {
|
||||
results.push({
|
||||
category: 'Submission Tables',
|
||||
test: `${entityName}: submission table matches main table`,
|
||||
passed: false,
|
||||
message: `Missing fields: ${missingFields.join(', ')}`,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async function validateVersionTable(
|
||||
mainTable: string,
|
||||
versionTable: string,
|
||||
entityName: string
|
||||
): Promise<void> {
|
||||
const mainColumns = await getTableColumns(mainTable);
|
||||
const versionColumns = await getTableColumns(versionTable);
|
||||
|
||||
const excludedFields = new Set([
|
||||
'id', 'created_at', 'updated_at', 'is_test_data',
|
||||
'view_count_all', 'view_count_30d', 'view_count_7d',
|
||||
'average_rating', 'review_count', 'installations_count',
|
||||
]);
|
||||
|
||||
const fieldMapping: { [key: string]: string } = {
|
||||
'height_requirement': 'height_requirement_cm',
|
||||
'max_g_force': 'gforce_max',
|
||||
'inversions': 'inversions_count',
|
||||
'max_height_meters': 'height_meters',
|
||||
'drop_height_meters': 'drop_meters',
|
||||
};
|
||||
|
||||
const requiredVersionFields = new Set([
|
||||
'version_id', 'version_number', 'change_type', 'change_reason',
|
||||
'is_current', 'created_by', 'submission_id', 'is_test_data',
|
||||
]);
|
||||
|
||||
const missingMainFields: string[] = [];
|
||||
const missingVersionFields: string[] = [];
|
||||
|
||||
// Check main table fields exist in version table
|
||||
for (const field of mainColumns) {
|
||||
if (excludedFields.has(field)) continue;
|
||||
|
||||
const mappedField = fieldMapping[field] || field;
|
||||
if (!versionColumns.has(field) && !versionColumns.has(mappedField)) {
|
||||
missingMainFields.push(field);
|
||||
}
|
||||
}
|
||||
|
||||
// Check version metadata fields exist
|
||||
for (const field of requiredVersionFields) {
|
||||
if (!versionColumns.has(field)) {
|
||||
missingVersionFields.push(field);
|
||||
}
|
||||
}
|
||||
|
||||
if (missingMainFields.length === 0 && missingVersionFields.length === 0) {
|
||||
results.push({
|
||||
category: 'Version Tables',
|
||||
test: `${entityName}: version table has all fields`,
|
||||
passed: true,
|
||||
});
|
||||
} else {
|
||||
const messages: string[] = [];
|
||||
if (missingMainFields.length > 0) {
|
||||
messages.push(`Missing main fields: ${missingMainFields.join(', ')}`);
|
||||
}
|
||||
if (missingVersionFields.length > 0) {
|
||||
messages.push(`Missing version fields: ${missingVersionFields.join(', ')}`);
|
||||
}
|
||||
|
||||
results.push({
|
||||
category: 'Version Tables',
|
||||
test: `${entityName}: version table has all fields`,
|
||||
passed: false,
|
||||
message: messages.join('; '),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async function validateCriticalFields(): Promise<void> {
|
||||
const ridesColumns = await getTableColumns('rides');
|
||||
const rideModelsColumns = await getTableColumns('ride_models');
|
||||
|
||||
// Rides should NOT have ride_type
|
||||
if (!ridesColumns.has('ride_type')) {
|
||||
results.push({
|
||||
category: 'Critical Fields',
|
||||
test: 'rides table does NOT have ride_type column',
|
||||
passed: true,
|
||||
});
|
||||
} else {
|
||||
results.push({
|
||||
category: 'Critical Fields',
|
||||
test: 'rides table does NOT have ride_type column',
|
||||
passed: false,
|
||||
message: 'rides table incorrectly has ride_type column',
|
||||
});
|
||||
}
|
||||
|
||||
// Rides MUST have category
|
||||
if (ridesColumns.has('category')) {
|
||||
results.push({
|
||||
category: 'Critical Fields',
|
||||
test: 'rides table has category column',
|
||||
passed: true,
|
||||
});
|
||||
} else {
|
||||
results.push({
|
||||
category: 'Critical Fields',
|
||||
test: 'rides table has category column',
|
||||
passed: false,
|
||||
message: 'rides table is missing required category column',
|
||||
});
|
||||
}
|
||||
|
||||
// Ride models must have both category and ride_type
|
||||
if (rideModelsColumns.has('category') && rideModelsColumns.has('ride_type')) {
|
||||
results.push({
|
||||
category: 'Critical Fields',
|
||||
test: 'ride_models has both category and ride_type',
|
||||
passed: true,
|
||||
});
|
||||
} else {
|
||||
const missing: string[] = [];
|
||||
if (!rideModelsColumns.has('category')) missing.push('category');
|
||||
if (!rideModelsColumns.has('ride_type')) missing.push('ride_type');
|
||||
|
||||
results.push({
|
||||
category: 'Critical Fields',
|
||||
test: 'ride_models has both category and ride_type',
|
||||
passed: false,
|
||||
message: `ride_models is missing: ${missing.join(', ')}`,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async function validateFunctions(): Promise<void> {
|
||||
const functionsToCheck = [
|
||||
'create_entity_from_submission',
|
||||
'update_entity_from_submission',
|
||||
'process_approval_transaction',
|
||||
];
|
||||
|
||||
for (const funcName of functionsToCheck) {
|
||||
try {
|
||||
const { data, error } = await supabase
|
||||
.rpc('pg_catalog.pg_function_is_visible' as any, {
|
||||
funcid: `public.${funcName}`::any
|
||||
} as any);
|
||||
|
||||
if (!error) {
|
||||
results.push({
|
||||
category: 'Functions',
|
||||
test: `${funcName} exists and is accessible`,
|
||||
passed: true,
|
||||
});
|
||||
} else {
|
||||
results.push({
|
||||
category: 'Functions',
|
||||
test: `${funcName} exists and is accessible`,
|
||||
passed: false,
|
||||
message: error.message,
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
results.push({
|
||||
category: 'Functions',
|
||||
test: `${funcName} exists and is accessible`,
|
||||
passed: false,
|
||||
message: err instanceof Error ? err.message : String(err),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function printResults(): void {
|
||||
console.log('\n' + '='.repeat(80));
|
||||
console.log('Schema Validation Results');
|
||||
console.log('='.repeat(80) + '\n');
|
||||
|
||||
const categories = [...new Set(results.map(r => r.category))];
|
||||
let totalPassed = 0;
|
||||
let totalFailed = 0;
|
||||
|
||||
for (const category of categories) {
|
||||
const categoryResults = results.filter(r => r.category === category);
|
||||
const passed = categoryResults.filter(r => r.passed).length;
|
||||
const failed = categoryResults.filter(r => !r.passed).length;
|
||||
|
||||
console.log(`\n${category}:`);
|
||||
console.log('-'.repeat(80));
|
||||
|
||||
for (const result of categoryResults) {
|
||||
const icon = result.passed ? '✅' : '❌';
|
||||
console.log(`${icon} ${result.test}`);
|
||||
if (result.message) {
|
||||
console.log(` └─ ${result.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
totalPassed += passed;
|
||||
totalFailed += failed;
|
||||
}
|
||||
|
||||
console.log('\n' + '='.repeat(80));
|
||||
console.log(`Total: ${totalPassed} passed, ${totalFailed} failed`);
|
||||
console.log('='.repeat(80) + '\n');
|
||||
}
|
||||
|
||||
async function main(): Promise<void> {
|
||||
console.log('🔍 Starting schema validation...\n');
|
||||
|
||||
try {
|
||||
// Validate submission tables
|
||||
await validateSubmissionTable('parks', 'park_submissions', 'Parks');
|
||||
await validateSubmissionTable('rides', 'ride_submissions', 'Rides');
|
||||
await validateSubmissionTable('companies', 'company_submissions', 'Companies');
|
||||
await validateSubmissionTable('ride_models', 'ride_model_submissions', 'Ride Models');
|
||||
|
||||
// Validate version tables
|
||||
await validateVersionTable('parks', 'park_versions', 'Parks');
|
||||
await validateVersionTable('rides', 'ride_versions', 'Rides');
|
||||
await validateVersionTable('companies', 'company_versions', 'Companies');
|
||||
await validateVersionTable('ride_models', 'ride_model_versions', 'Ride Models');
|
||||
|
||||
// Validate critical fields
|
||||
await validateCriticalFields();
|
||||
|
||||
// Validate functions
|
||||
await validateFunctions();
|
||||
|
||||
// Print results
|
||||
printResults();
|
||||
|
||||
// Exit with appropriate code
|
||||
const hasFailures = results.some(r => !r.passed);
|
||||
if (hasFailures) {
|
||||
console.error('❌ Schema validation failed. Please fix the issues above before deploying.\n');
|
||||
process.exit(1);
|
||||
} else {
|
||||
console.log('✅ All schema validations passed. Safe to deploy.\n');
|
||||
process.exit(0);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('❌ Fatal error during validation:');
|
||||
console.error(error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
245
tests/integration/README.md
Normal file
245
tests/integration/README.md
Normal file
@@ -0,0 +1,245 @@
|
||||
# Integration Tests
|
||||
|
||||
This directory contains integration tests for the ThrillWiki submission pipeline and data integrity.
|
||||
|
||||
## Schema Validation Tests
|
||||
|
||||
**File**: `schema-validation.test.ts`
|
||||
|
||||
### Purpose
|
||||
|
||||
Automated tests that validate schema consistency across the entire submission pipeline:
|
||||
|
||||
- **Submission Tables**: Ensures submission tables match their corresponding main entity tables
|
||||
- **Version Tables**: Validates version tables have all main table fields plus version metadata
|
||||
- **Critical Fields**: Checks for known problematic fields (e.g., `ride_type` vs `category`)
|
||||
- **Function Alignment**: Verifies critical database functions exist and are accessible
|
||||
|
||||
### Why This Matters
|
||||
|
||||
The submission pipeline depends on exact schema alignment between:
|
||||
1. Main entity tables (`parks`, `rides`, `companies`, `ride_models`)
|
||||
2. Submission tables (`park_submissions`, `ride_submissions`, etc.)
|
||||
3. Version tables (`park_versions`, `ride_versions`, etc.)
|
||||
|
||||
**Without these tests**, schema mismatches can cause:
|
||||
- ❌ Approval failures with cryptic "column does not exist" errors
|
||||
- ❌ Data loss when fields are missing from submission tables
|
||||
- ❌ Version history corruption when fields don't match
|
||||
- ❌ Production incidents that are difficult to debug
|
||||
|
||||
**With these tests**, we catch issues:
|
||||
- ✅ During development, before they reach production
|
||||
- ✅ In CI/CD, preventing bad migrations from deploying
|
||||
- ✅ Immediately after schema changes, with clear error messages
|
||||
|
||||
### Test Categories
|
||||
|
||||
#### 1. Entity Table Validation
|
||||
Compares main entity tables with their submission counterparts:
|
||||
```typescript
|
||||
parks ↔ park_submissions
|
||||
rides ↔ ride_submissions
|
||||
companies ↔ company_submissions
|
||||
ride_models ↔ ride_model_submissions
|
||||
```
|
||||
|
||||
**Checks**:
|
||||
- All fields from main table exist in submission table (except excluded metadata)
|
||||
- Data types match exactly
|
||||
- Required fields are marked NOT NULL in both
|
||||
|
||||
#### 2. Version Table Validation
|
||||
Ensures version tables have complete field coverage:
|
||||
```typescript
|
||||
parks → park_versions
|
||||
rides → ride_versions
|
||||
companies → company_versions
|
||||
ride_models → ride_model_versions
|
||||
```
|
||||
|
||||
**Checks**:
|
||||
- All main table fields exist (accounting for known name variations)
|
||||
- Version metadata fields are present (`version_id`, `version_number`, etc.)
|
||||
- Change tracking fields are properly defined
|
||||
|
||||
#### 3. Critical Field Validation
|
||||
Tests specific known problem areas:
|
||||
|
||||
**Critical Test Cases**:
|
||||
- ✅ `rides` table does NOT have `ride_type` (prevents "column does not exist" error)
|
||||
- ✅ `rides` table DOES have `category` as NOT NULL
|
||||
- ✅ `ride_models` table has BOTH `category` and `ride_type`
|
||||
- ✅ All entities have required base fields (`id`, `name`, `slug`, etc.)
|
||||
- ✅ All submission tables have `submission_id` foreign key
|
||||
|
||||
#### 4. Function Alignment
|
||||
Validates critical database functions:
|
||||
- `create_entity_from_submission`
|
||||
- `update_entity_from_submission`
|
||||
- `process_approval_transaction`
|
||||
|
||||
#### 5. Field Name Variations
|
||||
Documents and validates known column name differences:
|
||||
```typescript
|
||||
ride_versions.height_requirement_cm ↔ rides.height_requirement
|
||||
ride_versions.gforce_max ↔ rides.max_g_force
|
||||
ride_versions.inversions_count ↔ rides.inversions
|
||||
ride_versions.height_meters ↔ rides.max_height_meters
|
||||
ride_versions.drop_meters ↔ rides.drop_height_meters
|
||||
```
|
||||
|
||||
### Running the Tests
|
||||
|
||||
**Run all schema validation tests:**
|
||||
```bash
|
||||
npm run test:schema
|
||||
```
|
||||
|
||||
**Run specific test suite:**
|
||||
```bash
|
||||
npx playwright test schema-validation --grep "Entity Tables"
|
||||
```
|
||||
|
||||
**Run in UI mode for debugging:**
|
||||
```bash
|
||||
npx playwright test schema-validation --ui
|
||||
```
|
||||
|
||||
**Generate detailed report:**
|
||||
```bash
|
||||
npx playwright test schema-validation --reporter=html
|
||||
```
|
||||
|
||||
### Environment Setup
|
||||
|
||||
These tests require:
|
||||
- `SUPABASE_SERVICE_ROLE_KEY` environment variable
|
||||
- Access to the Supabase project database
|
||||
- Playwright test runner
|
||||
|
||||
**Example `.env.test`:**
|
||||
```env
|
||||
SUPABASE_SERVICE_ROLE_KEY=your_service_role_key_here
|
||||
```
|
||||
|
||||
### Expected Output
|
||||
|
||||
**✅ All passing (healthy schema):**
|
||||
```
|
||||
✓ parks: submission table matches main table schema (245ms)
|
||||
✓ rides: submission table matches main table schema (198ms)
|
||||
✓ companies: submission table matches main table schema (187ms)
|
||||
✓ ride_models: submission table matches main table schema (203ms)
|
||||
✓ park_versions: has all main table fields plus version metadata (256ms)
|
||||
✓ ride_versions: has all main table fields plus version metadata (234ms)
|
||||
✓ rides table does NOT have ride_type column (145ms)
|
||||
✓ rides table DOES have category column (NOT NULL) (152ms)
|
||||
```
|
||||
|
||||
**❌ Failure example (schema mismatch):**
|
||||
```
|
||||
✕ rides: submission table matches main table schema (203ms)
|
||||
|
||||
Error: ride_submissions is missing fields: category
|
||||
|
||||
Expected: 0
|
||||
Received: 1
|
||||
```
|
||||
|
||||
### Continuous Integration
|
||||
|
||||
Add to your CI/CD pipeline:
|
||||
|
||||
```yaml
|
||||
# .github/workflows/test.yml
|
||||
- name: Run Schema Validation Tests
|
||||
run: npm run test:schema
|
||||
env:
|
||||
SUPABASE_SERVICE_ROLE_KEY: ${{ secrets.SUPABASE_SERVICE_ROLE_KEY }}
|
||||
```
|
||||
|
||||
This prevents schema mismatches from reaching production.
|
||||
|
||||
### When to Run
|
||||
|
||||
**Always run these tests:**
|
||||
- ✅ After any database migration
|
||||
- ✅ Before deploying submission pipeline changes
|
||||
- ✅ After modifying entity schemas
|
||||
- ✅ When adding new entity types
|
||||
- ✅ In CI/CD for every pull request
|
||||
|
||||
**Especially critical after:**
|
||||
- Adding/removing columns from entity tables
|
||||
- Modifying data types
|
||||
- Changing NOT NULL constraints
|
||||
- Updating database functions
|
||||
|
||||
### Maintenance
|
||||
|
||||
**When adding new entity types:**
|
||||
1. Add validation tests for the new entity
|
||||
2. Add tests for submission table
|
||||
3. Add tests for version table (if applicable)
|
||||
4. Update this README
|
||||
|
||||
**When schema changes are intentional:**
|
||||
1. Review failing tests carefully
|
||||
2. Update `EXCLUDED_FIELDS` or `VERSION_METADATA_FIELDS` if needed
|
||||
3. Document any new field name variations in `normalizeColumnName()`
|
||||
4. Update `docs/submission-pipeline/SCHEMA_REFERENCE.md`
|
||||
|
||||
### Debugging Failed Tests
|
||||
|
||||
**"Missing fields" error:**
|
||||
1. Check if field was recently added to main table
|
||||
2. Verify migration added it to submission table too
|
||||
3. Run migration to add missing field
|
||||
4. Re-run tests
|
||||
|
||||
**"Type mismatch" error:**
|
||||
1. Compare data types in both tables
|
||||
2. Check for accidental type change in migration
|
||||
3. Fix type inconsistency
|
||||
4. Re-run tests
|
||||
|
||||
**"Column does not exist" in production:**
|
||||
1. Run schema validation tests immediately
|
||||
2. Identify which table is missing the field
|
||||
3. Create emergency migration to add field
|
||||
4. Deploy with high priority
|
||||
|
||||
### Related Documentation
|
||||
|
||||
- [Schema Reference](../../docs/submission-pipeline/SCHEMA_REFERENCE.md) - Complete field mappings
|
||||
- [Submission Pipeline](../../docs/submission-pipeline/README.md) - Pipeline overview
|
||||
- [Versioning System](../../docs/versioning/README.md) - Version table details
|
||||
- [Moderation Workflow](../../docs/moderation/README.md) - Approval process
|
||||
|
||||
---
|
||||
|
||||
## Other Integration Tests
|
||||
|
||||
### Moderation Security Tests
|
||||
|
||||
**File**: `moderation-security.test.ts`
|
||||
|
||||
Tests role validation, lock enforcement, and rate limiting in the moderation system.
|
||||
|
||||
**Run:**
|
||||
```bash
|
||||
npx playwright test moderation-security
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Contributing
|
||||
|
||||
When adding new integration tests:
|
||||
1. Follow existing test structure
|
||||
2. Use descriptive test names
|
||||
3. Add comments explaining what's being tested
|
||||
4. Update this README
|
||||
5. Ensure tests are idempotent (can run multiple times)
|
||||
6. Clean up test data after completion
|
||||
545
tests/integration/schema-validation.test.ts
Normal file
545
tests/integration/schema-validation.test.ts
Normal file
@@ -0,0 +1,545 @@
|
||||
import { test, expect } from '@playwright/test';
|
||||
import { createClient } from '@supabase/supabase-js';
|
||||
|
||||
/**
|
||||
* Schema Validation Tests
|
||||
*
|
||||
* These tests validate that submission tables, version tables, and main entity tables
|
||||
* have consistent schemas to prevent field mismatches during the approval pipeline.
|
||||
*
|
||||
* Critical validations:
|
||||
* 1. Submission tables must have all fields from main tables (except auto-generated)
|
||||
* 2. Version tables must have all fields from main tables plus version metadata
|
||||
* 3. Critical functions must reference correct column names
|
||||
* 4. Required NOT NULL fields must be present in all tables
|
||||
*/
|
||||
|
||||
const supabase = createClient(
|
||||
'https://ydvtmnrszybqnbcqbdcy.supabase.co',
|
||||
process.env.SUPABASE_SERVICE_ROLE_KEY || ''
|
||||
);
|
||||
|
||||
interface ColumnDefinition {
|
||||
column_name: string;
|
||||
data_type: string;
|
||||
is_nullable: string;
|
||||
column_default: string | null;
|
||||
}
|
||||
|
||||
interface TableSchema {
|
||||
[columnName: string]: ColumnDefinition;
|
||||
}
|
||||
|
||||
// Fields that are expected to be different or missing in submission tables
|
||||
const EXCLUDED_FIELDS = [
|
||||
'id', // Submission tables have their own ID
|
||||
'created_at', // Managed differently in submissions
|
||||
'updated_at', // Managed differently in submissions
|
||||
'view_count_all', // Calculated fields not in submissions
|
||||
'view_count_30d',
|
||||
'view_count_7d',
|
||||
'average_rating',
|
||||
'review_count',
|
||||
'installations_count', // Only for ride_models
|
||||
'is_test_data', // Test data flag
|
||||
];
|
||||
|
||||
// Version-specific metadata fields (expected to be extra in version tables)
|
||||
const VERSION_METADATA_FIELDS = [
|
||||
'version_id',
|
||||
'version_number',
|
||||
'change_type',
|
||||
'change_reason',
|
||||
'is_current',
|
||||
'created_by',
|
||||
'created_at',
|
||||
'submission_id',
|
||||
'is_test_data',
|
||||
];
|
||||
|
||||
async function getTableSchema(tableName: string): Promise<TableSchema> {
|
||||
const { data, error } = await supabase
|
||||
.from('information_schema.columns' as any)
|
||||
.select('column_name, data_type, is_nullable, column_default')
|
||||
.eq('table_schema', 'public')
|
||||
.eq('table_name', tableName);
|
||||
|
||||
if (error) throw error;
|
||||
|
||||
const schema: TableSchema = {};
|
||||
data?.forEach((col: any) => {
|
||||
schema[col.column_name] = col;
|
||||
});
|
||||
|
||||
return schema;
|
||||
}
|
||||
|
||||
function normalizeColumnName(name: string): string {
|
||||
// Handle known version table variations
|
||||
const mapping: { [key: string]: string } = {
|
||||
'height_requirement_cm': 'height_requirement',
|
||||
'gforce_max': 'max_g_force',
|
||||
'inversions_count': 'inversions',
|
||||
'height_meters': 'max_height_meters',
|
||||
'drop_meters': 'drop_height_meters',
|
||||
};
|
||||
|
||||
return mapping[name] || name;
|
||||
}
|
||||
|
||||
test.describe('Schema Validation - Entity Tables', () => {
|
||||
test('parks: submission table matches main table schema', async () => {
|
||||
const mainSchema = await getTableSchema('parks');
|
||||
const submissionSchema = await getTableSchema('park_submissions');
|
||||
|
||||
const mismatches: string[] = [];
|
||||
const missingFields: string[] = [];
|
||||
|
||||
// Check each field in main table exists in submission table
|
||||
for (const [fieldName, fieldDef] of Object.entries(mainSchema)) {
|
||||
if (EXCLUDED_FIELDS.includes(fieldName)) continue;
|
||||
|
||||
if (!submissionSchema[fieldName]) {
|
||||
missingFields.push(fieldName);
|
||||
} else {
|
||||
// Check data type matches
|
||||
const mainType = fieldDef.data_type;
|
||||
const submissionType = submissionSchema[fieldName].data_type;
|
||||
|
||||
if (mainType !== submissionType) {
|
||||
mismatches.push(
|
||||
`${fieldName}: main=${mainType}, submission=${submissionType}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
expect(missingFields,
|
||||
`park_submissions is missing fields: ${missingFields.join(', ')}`
|
||||
).toHaveLength(0);
|
||||
|
||||
expect(mismatches,
|
||||
`park_submissions has type mismatches: ${mismatches.join('; ')}`
|
||||
).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('rides: submission table matches main table schema', async () => {
|
||||
const mainSchema = await getTableSchema('rides');
|
||||
const submissionSchema = await getTableSchema('ride_submissions');
|
||||
|
||||
const mismatches: string[] = [];
|
||||
const missingFields: string[] = [];
|
||||
|
||||
for (const [fieldName, fieldDef] of Object.entries(mainSchema)) {
|
||||
if (EXCLUDED_FIELDS.includes(fieldName)) continue;
|
||||
|
||||
if (!submissionSchema[fieldName]) {
|
||||
missingFields.push(fieldName);
|
||||
} else {
|
||||
const mainType = fieldDef.data_type;
|
||||
const submissionType = submissionSchema[fieldName].data_type;
|
||||
|
||||
if (mainType !== submissionType) {
|
||||
mismatches.push(
|
||||
`${fieldName}: main=${mainType}, submission=${submissionType}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
expect(missingFields,
|
||||
`ride_submissions is missing fields: ${missingFields.join(', ')}`
|
||||
).toHaveLength(0);
|
||||
|
||||
expect(mismatches,
|
||||
`ride_submissions has type mismatches: ${mismatches.join('; ')}`
|
||||
).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('companies: submission table matches main table schema', async () => {
|
||||
const mainSchema = await getTableSchema('companies');
|
||||
const submissionSchema = await getTableSchema('company_submissions');
|
||||
|
||||
const mismatches: string[] = [];
|
||||
const missingFields: string[] = [];
|
||||
|
||||
for (const [fieldName, fieldDef] of Object.entries(mainSchema)) {
|
||||
if (EXCLUDED_FIELDS.includes(fieldName)) continue;
|
||||
|
||||
if (!submissionSchema[fieldName]) {
|
||||
missingFields.push(fieldName);
|
||||
} else {
|
||||
const mainType = fieldDef.data_type;
|
||||
const submissionType = submissionSchema[fieldName].data_type;
|
||||
|
||||
if (mainType !== submissionType) {
|
||||
mismatches.push(
|
||||
`${fieldName}: main=${mainType}, submission=${submissionType}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
expect(missingFields,
|
||||
`company_submissions is missing fields: ${missingFields.join(', ')}`
|
||||
).toHaveLength(0);
|
||||
|
||||
expect(mismatches,
|
||||
`company_submissions has type mismatches: ${mismatches.join('; ')}`
|
||||
).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('ride_models: submission table matches main table schema', async () => {
|
||||
const mainSchema = await getTableSchema('ride_models');
|
||||
const submissionSchema = await getTableSchema('ride_model_submissions');
|
||||
|
||||
const mismatches: string[] = [];
|
||||
const missingFields: string[] = [];
|
||||
|
||||
for (const [fieldName, fieldDef] of Object.entries(mainSchema)) {
|
||||
if (EXCLUDED_FIELDS.includes(fieldName)) continue;
|
||||
|
||||
if (!submissionSchema[fieldName]) {
|
||||
missingFields.push(fieldName);
|
||||
} else {
|
||||
const mainType = fieldDef.data_type;
|
||||
const submissionType = submissionSchema[fieldName].data_type;
|
||||
|
||||
if (mainType !== submissionType) {
|
||||
mismatches.push(
|
||||
`${fieldName}: main=${mainType}, submission=${submissionType}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
expect(missingFields,
|
||||
`ride_model_submissions is missing fields: ${missingFields.join(', ')}`
|
||||
).toHaveLength(0);
|
||||
|
||||
expect(mismatches,
|
||||
`ride_model_submissions has type mismatches: ${mismatches.join('; ')}`
|
||||
).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
test.describe('Schema Validation - Version Tables', () => {
|
||||
test('park_versions: has all main table fields plus version metadata', async () => {
|
||||
const mainSchema = await getTableSchema('parks');
|
||||
const versionSchema = await getTableSchema('park_versions');
|
||||
|
||||
const missingFields: string[] = [];
|
||||
|
||||
// Check all main table fields exist in version table
|
||||
for (const [fieldName] of Object.entries(mainSchema)) {
|
||||
if (EXCLUDED_FIELDS.includes(fieldName)) continue;
|
||||
|
||||
const normalizedName = normalizeColumnName(fieldName);
|
||||
if (!versionSchema[fieldName] && !versionSchema[normalizedName]) {
|
||||
missingFields.push(fieldName);
|
||||
}
|
||||
}
|
||||
|
||||
// Check all version metadata fields exist
|
||||
const missingMetadata: string[] = [];
|
||||
for (const metaField of VERSION_METADATA_FIELDS) {
|
||||
if (!versionSchema[metaField]) {
|
||||
missingMetadata.push(metaField);
|
||||
}
|
||||
}
|
||||
|
||||
expect(missingFields,
|
||||
`park_versions is missing main table fields: ${missingFields.join(', ')}`
|
||||
).toHaveLength(0);
|
||||
|
||||
expect(missingMetadata,
|
||||
`park_versions is missing version metadata: ${missingMetadata.join(', ')}`
|
||||
).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('ride_versions: has all main table fields plus version metadata', async () => {
|
||||
const mainSchema = await getTableSchema('rides');
|
||||
const versionSchema = await getTableSchema('ride_versions');
|
||||
|
||||
const missingFields: string[] = [];
|
||||
|
||||
for (const [fieldName] of Object.entries(mainSchema)) {
|
||||
if (EXCLUDED_FIELDS.includes(fieldName)) continue;
|
||||
|
||||
const normalizedName = normalizeColumnName(fieldName);
|
||||
if (!versionSchema[fieldName] && !versionSchema[normalizedName]) {
|
||||
missingFields.push(fieldName);
|
||||
}
|
||||
}
|
||||
|
||||
const missingMetadata: string[] = [];
|
||||
for (const metaField of VERSION_METADATA_FIELDS) {
|
||||
if (!versionSchema[metaField]) {
|
||||
missingMetadata.push(metaField);
|
||||
}
|
||||
}
|
||||
|
||||
expect(missingFields,
|
||||
`ride_versions is missing main table fields: ${missingFields.join(', ')}`
|
||||
).toHaveLength(0);
|
||||
|
||||
expect(missingMetadata,
|
||||
`ride_versions is missing version metadata: ${missingMetadata.join(', ')}`
|
||||
).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('company_versions: has all main table fields plus version metadata', async () => {
|
||||
const mainSchema = await getTableSchema('companies');
|
||||
const versionSchema = await getTableSchema('company_versions');
|
||||
|
||||
const missingFields: string[] = [];
|
||||
|
||||
for (const [fieldName] of Object.entries(mainSchema)) {
|
||||
if (EXCLUDED_FIELDS.includes(fieldName)) continue;
|
||||
|
||||
const normalizedName = normalizeColumnName(fieldName);
|
||||
if (!versionSchema[fieldName] && !versionSchema[normalizedName]) {
|
||||
missingFields.push(fieldName);
|
||||
}
|
||||
}
|
||||
|
||||
const missingMetadata: string[] = [];
|
||||
for (const metaField of VERSION_METADATA_FIELDS) {
|
||||
if (!versionSchema[metaField]) {
|
||||
missingMetadata.push(metaField);
|
||||
}
|
||||
}
|
||||
|
||||
expect(missingFields,
|
||||
`company_versions is missing main table fields: ${missingFields.join(', ')}`
|
||||
).toHaveLength(0);
|
||||
|
||||
expect(missingMetadata,
|
||||
`company_versions is missing version metadata: ${missingMetadata.join(', ')}`
|
||||
).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('ride_model_versions: has all main table fields plus version metadata', async () => {
|
||||
const mainSchema = await getTableSchema('ride_models');
|
||||
const versionSchema = await getTableSchema('ride_model_versions');
|
||||
|
||||
const missingFields: string[] = [];
|
||||
|
||||
for (const [fieldName] of Object.entries(mainSchema)) {
|
||||
if (EXCLUDED_FIELDS.includes(fieldName)) continue;
|
||||
|
||||
const normalizedName = normalizeColumnName(fieldName);
|
||||
if (!versionSchema[fieldName] && !versionSchema[normalizedName]) {
|
||||
missingFields.push(fieldName);
|
||||
}
|
||||
}
|
||||
|
||||
const missingMetadata: string[] = [];
|
||||
for (const metaField of VERSION_METADATA_FIELDS) {
|
||||
if (!versionSchema[metaField]) {
|
||||
missingMetadata.push(metaField);
|
||||
}
|
||||
}
|
||||
|
||||
expect(missingFields,
|
||||
`ride_model_versions is missing main table fields: ${missingFields.join(', ')}`
|
||||
).toHaveLength(0);
|
||||
|
||||
expect(missingMetadata,
|
||||
`ride_model_versions is missing version metadata: ${missingMetadata.join(', ')}`
|
||||
).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
test.describe('Schema Validation - Critical Fields', () => {
|
||||
test('rides table does NOT have ride_type column', async () => {
|
||||
const ridesSchema = await getTableSchema('rides');
|
||||
|
||||
expect(ridesSchema['ride_type']).toBeUndefined();
|
||||
});
|
||||
|
||||
test('rides table DOES have category column (NOT NULL)', async () => {
|
||||
const ridesSchema = await getTableSchema('rides');
|
||||
|
||||
expect(ridesSchema['category']).toBeDefined();
|
||||
expect(ridesSchema['category'].is_nullable).toBe('NO');
|
||||
});
|
||||
|
||||
test('ride_models table DOES have both category and ride_type columns', async () => {
|
||||
const rideModelsSchema = await getTableSchema('ride_models');
|
||||
|
||||
expect(rideModelsSchema['category']).toBeDefined();
|
||||
expect(rideModelsSchema['category'].is_nullable).toBe('NO');
|
||||
expect(rideModelsSchema['ride_type']).toBeDefined();
|
||||
});
|
||||
|
||||
test('all entity tables have required base fields', async () => {
|
||||
const requiredFields = ['id', 'name', 'slug', 'created_at', 'updated_at'];
|
||||
const tables = ['parks', 'rides', 'companies', 'ride_models'];
|
||||
|
||||
for (const table of tables) {
|
||||
const schema = await getTableSchema(table);
|
||||
|
||||
for (const field of requiredFields) {
|
||||
expect(schema[field],
|
||||
`${table} is missing required field: ${field}`
|
||||
).toBeDefined();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
test('all submission tables have submission_id foreign key', async () => {
|
||||
const submissionTables = [
|
||||
'park_submissions',
|
||||
'ride_submissions',
|
||||
'company_submissions',
|
||||
'ride_model_submissions',
|
||||
'photo_submissions',
|
||||
];
|
||||
|
||||
for (const table of submissionTables) {
|
||||
const schema = await getTableSchema(table);
|
||||
|
||||
expect(schema['submission_id'],
|
||||
`${table} is missing submission_id foreign key`
|
||||
).toBeDefined();
|
||||
expect(schema['submission_id'].is_nullable).toBe('NO');
|
||||
}
|
||||
});
|
||||
|
||||
test('all version tables have version metadata fields', async () => {
|
||||
const versionTables = [
|
||||
'park_versions',
|
||||
'ride_versions',
|
||||
'company_versions',
|
||||
'ride_model_versions',
|
||||
];
|
||||
|
||||
const requiredVersionFields = [
|
||||
'version_id',
|
||||
'version_number',
|
||||
'change_type',
|
||||
'is_current',
|
||||
];
|
||||
|
||||
for (const table of versionTables) {
|
||||
const schema = await getTableSchema(table);
|
||||
|
||||
for (const field of requiredVersionFields) {
|
||||
expect(schema[field],
|
||||
`${table} is missing required version field: ${field}`
|
||||
).toBeDefined();
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
test.describe('Schema Validation - Function Parameter Alignment', () => {
|
||||
test('verify create_entity_from_submission function exists', async () => {
|
||||
const { data, error } = await supabase
|
||||
.rpc('pg_get_functiondef', {
|
||||
funcid: 'create_entity_from_submission'::any
|
||||
} as any)
|
||||
.single();
|
||||
|
||||
// Function should exist (will error if not)
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
|
||||
test('verify update_entity_from_submission function exists', async () => {
|
||||
const { data, error } = await supabase
|
||||
.rpc('pg_get_functiondef', {
|
||||
funcid: 'update_entity_from_submission'::any
|
||||
} as any)
|
||||
.single();
|
||||
|
||||
// Function should exist (will error if not)
|
||||
expect(error).toBeNull();
|
||||
});
|
||||
|
||||
test('verify process_approval_transaction function exists', async () => {
|
||||
const { data, error } = await supabase.rpc('pg_catalog.pg_function_is_visible', {
|
||||
funcid: 'process_approval_transaction'::any
|
||||
} as any);
|
||||
|
||||
// Function should be visible
|
||||
expect(data).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
test.describe('Schema Validation - Known Field Name Variations', () => {
|
||||
test('ride_versions uses height_requirement_cm instead of height_requirement', async () => {
|
||||
const versionSchema = await getTableSchema('ride_versions');
|
||||
|
||||
expect(versionSchema['height_requirement_cm']).toBeDefined();
|
||||
expect(versionSchema['height_requirement']).toBeUndefined();
|
||||
});
|
||||
|
||||
test('ride_versions uses gforce_max instead of max_g_force', async () => {
|
||||
const versionSchema = await getTableSchema('ride_versions');
|
||||
|
||||
expect(versionSchema['gforce_max']).toBeDefined();
|
||||
expect(versionSchema['max_g_force']).toBeUndefined();
|
||||
});
|
||||
|
||||
test('ride_versions uses inversions_count instead of inversions', async () => {
|
||||
const versionSchema = await getTableSchema('ride_versions');
|
||||
|
||||
expect(versionSchema['inversions_count']).toBeDefined();
|
||||
expect(versionSchema['inversions']).toBeUndefined();
|
||||
});
|
||||
|
||||
test('ride_versions uses height_meters instead of max_height_meters', async () => {
|
||||
const versionSchema = await getTableSchema('ride_versions');
|
||||
|
||||
expect(versionSchema['height_meters']).toBeDefined();
|
||||
expect(versionSchema['max_height_meters']).toBeUndefined();
|
||||
});
|
||||
|
||||
test('ride_versions uses drop_meters instead of drop_height_meters', async () => {
|
||||
const versionSchema = await getTableSchema('ride_versions');
|
||||
|
||||
expect(versionSchema['drop_meters']).toBeDefined();
|
||||
expect(versionSchema['drop_height_meters']).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
test.describe('Schema Validation - Submission Items', () => {
|
||||
test('submission_items has all required foreign key columns', async () => {
|
||||
const schema = await getTableSchema('submission_items');
|
||||
|
||||
const requiredFKs = [
|
||||
'submission_id',
|
||||
'park_submission_id',
|
||||
'ride_submission_id',
|
||||
'company_submission_id',
|
||||
'ride_model_submission_id',
|
||||
'photo_submission_id',
|
||||
'timeline_event_submission_id',
|
||||
'depends_on', // For dependency chain
|
||||
];
|
||||
|
||||
for (const fk of requiredFKs) {
|
||||
expect(schema[fk],
|
||||
`submission_items is missing FK: ${fk}`
|
||||
).toBeDefined();
|
||||
}
|
||||
});
|
||||
|
||||
test('submission_items has required metadata fields', async () => {
|
||||
const schema = await getTableSchema('submission_items');
|
||||
|
||||
const requiredFields = [
|
||||
'item_type',
|
||||
'action_type',
|
||||
'status',
|
||||
'order_index',
|
||||
];
|
||||
|
||||
for (const field of requiredFields) {
|
||||
expect(schema[field],
|
||||
`submission_items is missing field: ${field}`
|
||||
).toBeDefined();
|
||||
}
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user