mirror of
https://github.com/pacnpal/thrilltrack-explorer.git
synced 2025-12-27 09:46:58 -05:00
Compare commits
1 Commits
1f7e4bf81c
...
claude/aud
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2c2a6c90f0 |
186
.github/workflows/schema-validation.yml
vendored
186
.github/workflows/schema-validation.yml
vendored
@@ -1,186 +0,0 @@
|
|||||||
name: Schema Validation
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- 'supabase/migrations/**'
|
|
||||||
- 'src/lib/moderation/**'
|
|
||||||
- 'supabase/functions/**'
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
- develop
|
|
||||||
paths:
|
|
||||||
- 'supabase/migrations/**'
|
|
||||||
- 'src/lib/moderation/**'
|
|
||||||
- 'supabase/functions/**'
|
|
||||||
workflow_dispatch: # Allow manual triggering
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
validate-schema:
|
|
||||||
name: Validate Database Schema
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Setup Node.js
|
|
||||||
uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: '20'
|
|
||||||
cache: 'npm'
|
|
||||||
|
|
||||||
- name: Install dependencies
|
|
||||||
run: npm ci
|
|
||||||
|
|
||||||
- name: Run schema validation script
|
|
||||||
env:
|
|
||||||
SUPABASE_SERVICE_ROLE_KEY: ${{ secrets.SUPABASE_SERVICE_ROLE_KEY }}
|
|
||||||
run: |
|
|
||||||
echo "🔍 Running schema validation checks..."
|
|
||||||
npm run validate-schema
|
|
||||||
|
|
||||||
- name: Run Playwright schema validation tests
|
|
||||||
env:
|
|
||||||
SUPABASE_SERVICE_ROLE_KEY: ${{ secrets.SUPABASE_SERVICE_ROLE_KEY }}
|
|
||||||
run: |
|
|
||||||
echo "🧪 Running integration tests..."
|
|
||||||
npx playwright test schema-validation --reporter=list
|
|
||||||
|
|
||||||
- name: Upload test results
|
|
||||||
if: failure()
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: schema-validation-results
|
|
||||||
path: |
|
|
||||||
playwright-report/
|
|
||||||
test-results/
|
|
||||||
retention-days: 7
|
|
||||||
|
|
||||||
- name: Comment PR with validation results
|
|
||||||
if: failure() && github.event_name == 'pull_request'
|
|
||||||
uses: actions/github-script@v7
|
|
||||||
with:
|
|
||||||
script: |
|
|
||||||
github.rest.issues.createComment({
|
|
||||||
issue_number: context.issue.number,
|
|
||||||
owner: context.repo.owner,
|
|
||||||
repo: context.repo.repo,
|
|
||||||
body: `## ❌ Schema Validation Failed
|
|
||||||
|
|
||||||
The schema validation checks have detected inconsistencies in your database changes.
|
|
||||||
|
|
||||||
**Common issues:**
|
|
||||||
- Missing fields in submission tables
|
|
||||||
- Mismatched data types between tables
|
|
||||||
- Missing version metadata fields
|
|
||||||
- Invalid column names (e.g., \`ride_type\` in \`rides\` table)
|
|
||||||
|
|
||||||
**Next steps:**
|
|
||||||
1. Review the failed tests in the Actions log
|
|
||||||
2. Check the [Schema Reference documentation](https://github.com/${context.repo.owner}/${context.repo.repo}/blob/main/docs/submission-pipeline/SCHEMA_REFERENCE.md)
|
|
||||||
3. Fix the identified issues
|
|
||||||
4. Push your fixes to re-run validation
|
|
||||||
|
|
||||||
**Need help?** Consult the [Integration Tests README](https://github.com/${context.repo.owner}/${context.repo.repo}/blob/main/tests/integration/README.md).`
|
|
||||||
})
|
|
||||||
|
|
||||||
migration-safety-check:
|
|
||||||
name: Migration Safety Check
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
if: github.event_name == 'pull_request'
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Check for breaking changes in migrations
|
|
||||||
run: |
|
|
||||||
echo "🔍 Checking for potentially breaking migration patterns..."
|
|
||||||
|
|
||||||
# Check if any migrations contain DROP COLUMN
|
|
||||||
if git diff origin/main...HEAD -- 'supabase/migrations/**' | grep -i "DROP COLUMN"; then
|
|
||||||
echo "⚠️ Warning: Migration contains DROP COLUMN"
|
|
||||||
echo "::warning::Migration contains DROP COLUMN - ensure data migration plan exists"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Check if any migrations alter NOT NULL constraints
|
|
||||||
if git diff origin/main...HEAD -- 'supabase/migrations/**' | grep -i "ALTER COLUMN.*NOT NULL"; then
|
|
||||||
echo "⚠️ Warning: Migration alters NOT NULL constraints"
|
|
||||||
echo "::warning::Migration alters NOT NULL constraints - ensure data backfill is complete"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Check if any migrations rename columns
|
|
||||||
if git diff origin/main...HEAD -- 'supabase/migrations/**' | grep -i "RENAME COLUMN"; then
|
|
||||||
echo "⚠️ Warning: Migration renames columns"
|
|
||||||
echo "::warning::Migration renames columns - ensure all code references are updated"
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Validate migration file naming
|
|
||||||
run: |
|
|
||||||
echo "🔍 Validating migration file names..."
|
|
||||||
|
|
||||||
# Check that all migration files follow the timestamp pattern
|
|
||||||
for file in supabase/migrations/*.sql; do
|
|
||||||
if [[ ! $(basename "$file") =~ ^[0-9]{14}_ ]]; then
|
|
||||||
echo "❌ Invalid migration filename: $(basename "$file")"
|
|
||||||
echo "::error::Migration files must start with a 14-digit timestamp (YYYYMMDDHHMMSS)"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
echo "✅ All migration filenames are valid"
|
|
||||||
|
|
||||||
documentation-check:
|
|
||||||
name: Documentation Check
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
if: github.event_name == 'pull_request'
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Check if schema docs need updating
|
|
||||||
run: |
|
|
||||||
echo "📚 Checking if schema documentation is up to date..."
|
|
||||||
|
|
||||||
# Check if migrations changed but SCHEMA_REFERENCE.md didn't
|
|
||||||
MIGRATIONS_CHANGED=$(git diff origin/main...HEAD --name-only | grep -c "supabase/migrations/" || true)
|
|
||||||
SCHEMA_DOCS_CHANGED=$(git diff origin/main...HEAD --name-only | grep -c "docs/submission-pipeline/SCHEMA_REFERENCE.md" || true)
|
|
||||||
|
|
||||||
if [ "$MIGRATIONS_CHANGED" -gt 0 ] && [ "$SCHEMA_DOCS_CHANGED" -eq 0 ]; then
|
|
||||||
echo "⚠️ Warning: Migrations were changed but SCHEMA_REFERENCE.md was not updated"
|
|
||||||
echo "::warning::Consider updating docs/submission-pipeline/SCHEMA_REFERENCE.md to reflect schema changes"
|
|
||||||
else
|
|
||||||
echo "✅ Documentation check passed"
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Comment PR with documentation reminder
|
|
||||||
if: success()
|
|
||||||
uses: actions/github-script@v7
|
|
||||||
with:
|
|
||||||
script: |
|
|
||||||
const fs = require('fs');
|
|
||||||
const migrationsChanged = (await exec.getExecOutput('git', ['diff', 'origin/main...HEAD', '--name-only'])).stdout.includes('supabase/migrations/');
|
|
||||||
const docsChanged = (await exec.getExecOutput('git', ['diff', 'origin/main...HEAD', '--name-only'])).stdout.includes('docs/submission-pipeline/SCHEMA_REFERENCE.md');
|
|
||||||
|
|
||||||
if (migrationsChanged && !docsChanged) {
|
|
||||||
github.rest.issues.createComment({
|
|
||||||
issue_number: context.issue.number,
|
|
||||||
owner: context.repo.owner,
|
|
||||||
repo: context.repo.repo,
|
|
||||||
body: `## 📚 Documentation Reminder
|
|
||||||
|
|
||||||
This PR includes database migrations but doesn't update the schema reference documentation.
|
|
||||||
|
|
||||||
**If you added/modified fields**, please update:
|
|
||||||
- \`docs/submission-pipeline/SCHEMA_REFERENCE.md\`
|
|
||||||
|
|
||||||
**If this is a minor change** (e.g., fixing typos, adding indexes), you can ignore this message.`
|
|
||||||
})
|
|
||||||
}
|
|
||||||
963
DJANGO_MIGRATION_AUDIT.md
Normal file
963
DJANGO_MIGRATION_AUDIT.md
Normal file
@@ -0,0 +1,963 @@
|
|||||||
|
# Django Migration Audit & Plan
|
||||||
|
**Date**: 2025-11-08
|
||||||
|
**Project**: ThrillTrack Explorer
|
||||||
|
**Objective**: Complete migration from Supabase to Django backend
|
||||||
|
|
||||||
|
## Executive Summary
|
||||||
|
|
||||||
|
This audit examines the current state of the Django migration for ThrillTrack Explorer, a comprehensive amusement park and roller coaster tracking platform. The migration is approximately **40% complete** in terms of core functionality.
|
||||||
|
|
||||||
|
**Key Findings:**
|
||||||
|
- ✅ Core entity models (Parks, Rides, Companies, RideModels) are implemented
|
||||||
|
- ✅ Photo/media system is implemented
|
||||||
|
- ✅ Versioning system is implemented
|
||||||
|
- ✅ Moderation workflow with FSM is implemented
|
||||||
|
- ✅ Basic API endpoints (~3,700 lines) are implemented
|
||||||
|
- ❌ Reviews system is NOT implemented
|
||||||
|
- ❌ User features (lists, credits, blocking) are NOT implemented
|
||||||
|
- ❌ Notifications system is NOT implemented (model file is empty)
|
||||||
|
- ❌ Admin features are NOT implemented
|
||||||
|
- ❌ 42 Edge Functions need migration to Django
|
||||||
|
- ❌ Blog/content features are NOT implemented
|
||||||
|
- ❌ Advanced submission features are partially missing
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 1. Database Schema Comparison
|
||||||
|
|
||||||
|
### 1.1 Core Entities - ✅ COMPLETE
|
||||||
|
|
||||||
|
| Entity | Supabase | Django | Status | Notes |
|
||||||
|
|--------|----------|--------|--------|-------|
|
||||||
|
| Companies | ✅ | ✅ | **DONE** | Includes manufacturers, operators, designers |
|
||||||
|
| Parks | ✅ | ✅ | **DONE** | Location tracking, operating status |
|
||||||
|
| Rides | ✅ | ✅ | **DONE** | Full specs, coaster stats |
|
||||||
|
| Ride Models | ✅ | ✅ | **DONE** | Manufacturer templates |
|
||||||
|
| Locations | ✅ | ✅ | **DONE** | Country, Subdivision, Locality hierarchy |
|
||||||
|
|
||||||
|
### 1.2 User & Profile - ⚠️ PARTIAL
|
||||||
|
|
||||||
|
| Feature | Supabase | Django | Status | Notes |
|
||||||
|
|---------|----------|--------|--------|-------|
|
||||||
|
| User (auth.users) | ✅ | ✅ | **DONE** | Custom user model with OAuth, MFA |
|
||||||
|
| User Profile | ✅ (profiles) | ✅ (UserProfile) | **DONE** | Extended profile info |
|
||||||
|
| User Roles | ✅ (user_roles) | ✅ (UserRole) | **DONE** | admin/moderator/user |
|
||||||
|
| User Sessions | ✅ | ❌ | **MISSING** | Session tracking table |
|
||||||
|
| User Preferences | ✅ | ❌ | **MISSING** | Theme, notification settings |
|
||||||
|
| User Notification Preferences | ✅ | ❌ | **MISSING** | Per-channel notification prefs |
|
||||||
|
| User Blocks | ✅ | ❌ | **MISSING** | User blocking system |
|
||||||
|
|
||||||
|
### 1.3 User Content - ❌ NOT IMPLEMENTED
|
||||||
|
|
||||||
|
| Feature | Supabase | Django | Status | Notes |
|
||||||
|
|---------|----------|--------|--------|-------|
|
||||||
|
| Reviews | ✅ (reviews) | ❌ | **MISSING** | Park & ride reviews |
|
||||||
|
| Review Photos | ✅ (review_photos) | ❌ | **MISSING** | Photos attached to reviews |
|
||||||
|
| Review Deletions | ✅ (review_deletions) | ❌ | **MISSING** | Soft delete tracking |
|
||||||
|
| User Ride Credits | ✅ (user_ride_credits) | ❌ | **MISSING** | Track rides users have been on |
|
||||||
|
| User Top Lists | ✅ (user_top_lists) | ❌ | **MISSING** | Custom ranked lists |
|
||||||
|
| List Items | ✅ (list_items) | ❌ | **MISSING** | Items within lists |
|
||||||
|
| User Top List Items | ✅ | ❌ | **MISSING** | Detailed list item data |
|
||||||
|
|
||||||
|
### 1.4 Media & Photos - ✅ COMPLETE
|
||||||
|
|
||||||
|
| Feature | Supabase | Django | Status | Notes |
|
||||||
|
|---------|----------|--------|--------|-------|
|
||||||
|
| Photos | ✅ | ✅ | **DONE** | CloudFlare Images integration |
|
||||||
|
| Photo Submissions | ✅ | ⚠️ | **PARTIAL** | Through moderation system |
|
||||||
|
| Generic Photo Relations | ✅ | ✅ | **DONE** | Photos attach to any entity |
|
||||||
|
|
||||||
|
### 1.5 Moderation & Submissions - ✅ MOSTLY COMPLETE
|
||||||
|
|
||||||
|
| Feature | Supabase | Django | Status | Notes |
|
||||||
|
|---------|----------|--------|--------|-------|
|
||||||
|
| Content Submissions | ✅ | ✅ | **DONE** | FSM-based workflow |
|
||||||
|
| Submission Items | ✅ | ✅ | **DONE** | Individual field changes |
|
||||||
|
| Moderation Locks | ✅ | ✅ | **DONE** | 15-minute review locks |
|
||||||
|
| Park Submissions | ✅ | ⚠️ | **PARTIAL** | Need specialized submission types |
|
||||||
|
| Ride Submissions | ✅ | ⚠️ | **PARTIAL** | Need specialized submission types |
|
||||||
|
| Company Submissions | ✅ | ⚠️ | **PARTIAL** | Need specialized submission types |
|
||||||
|
| Ride Model Submissions | ✅ | ⚠️ | **PARTIAL** | Need specialized submission types |
|
||||||
|
| Photo Submissions | ✅ | ⚠️ | **PARTIAL** | Need specialized submission types |
|
||||||
|
| Submission Dependencies | ✅ | ❌ | **MISSING** | Track dependent submissions |
|
||||||
|
| Submission Idempotency Keys | ✅ | ❌ | **MISSING** | Prevent duplicate submissions |
|
||||||
|
| Submission Item Temp Refs | ✅ | ❌ | **MISSING** | Temporary reference handling |
|
||||||
|
| Conflict Resolutions | ✅ | ❌ | **MISSING** | Handle edit conflicts |
|
||||||
|
|
||||||
|
### 1.6 Versioning & History - ✅ COMPLETE
|
||||||
|
|
||||||
|
| Feature | Supabase | Django | Status | Notes |
|
||||||
|
|---------|----------|--------|--------|-------|
|
||||||
|
| Entity Versions | ✅ | ✅ | **DONE** | Generic version tracking |
|
||||||
|
| Version Diffs | ✅ | ⚠️ | **PARTIAL** | Stored in changed_fields JSON |
|
||||||
|
| Company Versions | ✅ | ✅ | **DONE** | Via generic EntityVersion |
|
||||||
|
| Park Versions | ✅ | ✅ | **DONE** | Via generic EntityVersion |
|
||||||
|
| Ride Versions | ✅ | ✅ | **DONE** | Via generic EntityVersion |
|
||||||
|
| Ride Model Versions | ✅ | ✅ | **DONE** | Via generic EntityVersion |
|
||||||
|
| Entity Versions Archive | ✅ | ❌ | **MISSING** | Old version archival |
|
||||||
|
| Item Edit History | ✅ | ❌ | **MISSING** | Detailed edit tracking |
|
||||||
|
| Item Field Changes | ✅ | ❌ | **MISSING** | Field-level change tracking |
|
||||||
|
| Entity Field History | ✅ | ❌ | **MISSING** | Historical field values |
|
||||||
|
| Entity Relationships History | ✅ | ❌ | **MISSING** | Track relationship changes |
|
||||||
|
|
||||||
|
### 1.7 Ride-Specific Details - ❌ NOT IMPLEMENTED
|
||||||
|
|
||||||
|
| Feature | Supabase | Django | Status | Notes |
|
||||||
|
|---------|----------|--------|--------|-------|
|
||||||
|
| Ride Coaster Stats | ✅ | ❌ | **MISSING** | Detailed coaster statistics |
|
||||||
|
| Ride Technical Specs | ✅ | ⚠️ | **PARTIAL** | Using JSONField, need dedicated table |
|
||||||
|
| Ride Water Details | ✅ | ❌ | **MISSING** | Water ride specifics |
|
||||||
|
| Ride Dark Details | ✅ | ❌ | **MISSING** | Dark ride specifics |
|
||||||
|
| Ride Flat Details | ✅ | ❌ | **MISSING** | Flat ride specifics |
|
||||||
|
| Ride Kiddie Details | ✅ | ❌ | **MISSING** | Kiddie ride specifics |
|
||||||
|
| Ride Transportation Details | ✅ | ❌ | **MISSING** | Transport ride specifics |
|
||||||
|
| Ride Former Names | ✅ | ❌ | **MISSING** | Historical ride names |
|
||||||
|
| Ride Name History | ✅ | ❌ | **MISSING** | Track name changes |
|
||||||
|
| Ride Model Technical Specs | ✅ | ❌ | **MISSING** | Model-specific specs |
|
||||||
|
|
||||||
|
### 1.8 Notifications - ❌ NOT IMPLEMENTED
|
||||||
|
|
||||||
|
| Feature | Supabase | Django | Status | Notes |
|
||||||
|
|---------|----------|--------|--------|-------|
|
||||||
|
| Notification Channels | ✅ | ❌ | **MISSING** | Email, push, in-app channels |
|
||||||
|
| Notification Templates | ✅ | ❌ | **MISSING** | Template system |
|
||||||
|
| Notification Logs | ✅ | ❌ | **MISSING** | Delivery tracking |
|
||||||
|
| Notification Event Data | ✅ | ❌ | **MISSING** | Event-specific data |
|
||||||
|
| Notification Duplicate Stats | ✅ | ❌ | **MISSING** | Prevent duplicate notifications |
|
||||||
|
|
||||||
|
### 1.9 Admin & Audit - ❌ NOT IMPLEMENTED
|
||||||
|
|
||||||
|
| Feature | Supabase | Django | Status | Notes |
|
||||||
|
|---------|----------|--------|--------|-------|
|
||||||
|
| Admin Settings | ✅ | ❌ | **MISSING** | System-wide settings |
|
||||||
|
| Admin Audit Log | ✅ | ❌ | **MISSING** | Admin action tracking |
|
||||||
|
| Admin Audit Details | ✅ | ❌ | **MISSING** | Detailed audit data |
|
||||||
|
| Moderation Audit Log | ✅ | ❌ | **MISSING** | Moderation action tracking |
|
||||||
|
| Moderation Audit Metadata | ✅ | ❌ | **MISSING** | Additional audit context |
|
||||||
|
| Profile Audit Log | ✅ | ❌ | **MISSING** | Profile change tracking |
|
||||||
|
| Profile Change Fields | ✅ | ❌ | **MISSING** | Field-level profile changes |
|
||||||
|
|
||||||
|
### 1.10 Timeline & Events - ❌ NOT IMPLEMENTED
|
||||||
|
|
||||||
|
| Feature | Supabase | Django | Status | Notes |
|
||||||
|
|---------|----------|--------|--------|-------|
|
||||||
|
| Entity Timeline Events | ✅ | ❌ | **MISSING** | Significant entity events |
|
||||||
|
| Timeline Event Submissions | ✅ | ❌ | **MISSING** | User-submitted events |
|
||||||
|
|
||||||
|
### 1.11 Reports & Contact - ❌ NOT IMPLEMENTED
|
||||||
|
|
||||||
|
| Feature | Supabase | Django | Status | Notes |
|
||||||
|
|---------|----------|--------|--------|-------|
|
||||||
|
| Reports | ✅ (reports table) | ❌ | **MISSING** | User reports/flagging |
|
||||||
|
| Contact Submissions | ✅ | ❌ | **MISSING** | Contact form submissions |
|
||||||
|
| Contact Email Threads | ✅ | ❌ | **MISSING** | Email thread tracking |
|
||||||
|
| Contact Rate Limits | ✅ | ❌ | **MISSING** | Prevent spam |
|
||||||
|
|
||||||
|
### 1.12 Historical Data - ❌ NOT IMPLEMENTED
|
||||||
|
|
||||||
|
| Feature | Supabase | Django | Status | Notes |
|
||||||
|
|---------|----------|--------|--------|-------|
|
||||||
|
| Historical Parks | ✅ | ❌ | **MISSING** | Closed/defunct parks |
|
||||||
|
| Historical Rides | ✅ | ❌ | **MISSING** | Closed/defunct rides |
|
||||||
|
| Park Location History | ✅ | ❌ | **MISSING** | Track relocations |
|
||||||
|
|
||||||
|
### 1.13 Content & Blog - ❌ NOT IMPLEMENTED
|
||||||
|
|
||||||
|
| Feature | Supabase | Django | Status | Notes |
|
||||||
|
|---------|----------|--------|--------|-------|
|
||||||
|
| Blog Posts | ✅ | ❌ | **MISSING** | Blog/news system |
|
||||||
|
|
||||||
|
### 1.14 System Tables - ❌ NOT IMPLEMENTED
|
||||||
|
|
||||||
|
| Feature | Supabase | Django | Status | Notes |
|
||||||
|
|---------|----------|--------|--------|-------|
|
||||||
|
| Entity Page Views | ✅ | ❌ | **MISSING** | Analytics/view tracking |
|
||||||
|
| Rate Limits | ✅ | ❌ | **MISSING** | API rate limiting |
|
||||||
|
| Account Deletion Requests | ✅ | ❌ | **MISSING** | GDPR compliance |
|
||||||
|
| Cleanup Job Log | ✅ | ❌ | **MISSING** | Maintenance job tracking |
|
||||||
|
| Orphaned Images | ✅ | ❌ | **MISSING** | Media cleanup |
|
||||||
|
| Orphaned Images Log | ✅ | ❌ | **MISSING** | Cleanup history |
|
||||||
|
| Test Data Registry | ✅ | ❌ | **MISSING** | Test data management |
|
||||||
|
| Approval Transaction Metrics | ✅ | ❌ | **MISSING** | Performance tracking |
|
||||||
|
| Request Metadata | ✅ | ❌ | **MISSING** | Request tracking |
|
||||||
|
| Request Breadcrumbs | ✅ | ❌ | **MISSING** | Request flow tracking |
|
||||||
|
| System Alerts | ✅ | ❌ | **MISSING** | System-wide alerts |
|
||||||
|
|
||||||
|
### 1.15 Park Operating Details - ⚠️ PARTIAL
|
||||||
|
|
||||||
|
| Feature | Supabase | Django | Status | Notes |
|
||||||
|
|---------|----------|--------|--------|-------|
|
||||||
|
| Park Operating Hours | ✅ | ❌ | **MISSING** | Schedule by day/season |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 2. API Endpoints Comparison
|
||||||
|
|
||||||
|
### 2.1 Implemented Endpoints - ✅ COMPLETE
|
||||||
|
|
||||||
|
| Category | Supabase | Django | Lines of Code | Status |
|
||||||
|
|----------|----------|--------|---------------|--------|
|
||||||
|
| Authentication | ✅ | ✅ | 596 | **DONE** - JWT, OAuth, MFA |
|
||||||
|
| Companies | ✅ | ✅ | 254 | **DONE** - CRUD + search |
|
||||||
|
| Ride Models | ✅ | ✅ | 247 | **DONE** - CRUD + search |
|
||||||
|
| Parks | ✅ | ✅ | 362 | **DONE** - CRUD + nearby search |
|
||||||
|
| Rides | ✅ | ✅ | 360 | **DONE** - CRUD + search |
|
||||||
|
| Photos | ✅ | ✅ | 600 | **DONE** - Upload + moderation |
|
||||||
|
| Moderation | ✅ | ✅ | 496 | **DONE** - Submission workflow |
|
||||||
|
| Versioning | ✅ | ✅ | 369 | **DONE** - History + diffs |
|
||||||
|
| Search | ✅ | ✅ | 438 | **DONE** - Full-text search |
|
||||||
|
|
||||||
|
**Total API Code**: ~3,725 lines across 9 endpoint modules
|
||||||
|
|
||||||
|
### 2.2 Missing Endpoints - ❌ NOT IMPLEMENTED
|
||||||
|
|
||||||
|
| Category | Required | Status | Priority |
|
||||||
|
|----------|----------|--------|----------|
|
||||||
|
| Reviews | ✅ | ❌ **MISSING** | **HIGH** |
|
||||||
|
| User Lists | ✅ | ❌ **MISSING** | **HIGH** |
|
||||||
|
| User Credits | ✅ | ❌ **MISSING** | **MEDIUM** |
|
||||||
|
| Notifications | ✅ | ❌ **MISSING** | **HIGH** |
|
||||||
|
| Admin | ✅ | ❌ **MISSING** | **MEDIUM** |
|
||||||
|
| Reports | ✅ | ❌ **MISSING** | **MEDIUM** |
|
||||||
|
| Contact | ✅ | ❌ **MISSING** | **LOW** |
|
||||||
|
| Blog | ✅ | ❌ **MISSING** | **LOW** |
|
||||||
|
| Analytics | ✅ | ❌ **MISSING** | **LOW** |
|
||||||
|
| Timeline Events | ✅ | ❌ **MISSING** | **LOW** |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 3. Supabase Edge Functions Analysis
|
||||||
|
|
||||||
|
**Total Edge Functions**: 42 functions
|
||||||
|
|
||||||
|
### 3.1 Edge Function Categories
|
||||||
|
|
||||||
|
#### 3.1.1 Authentication & User Management (9 functions)
|
||||||
|
- ❌ `admin-delete-user` - Admin user deletion
|
||||||
|
- ❌ `cancel-account-deletion` - Cancel pending deletion
|
||||||
|
- ❌ `cancel-email-change` - Cancel email change
|
||||||
|
- ❌ `confirm-account-deletion` - Confirm account deletion
|
||||||
|
- ❌ `export-user-data` - GDPR data export
|
||||||
|
- ❌ `mfa-unenroll` - Disable MFA
|
||||||
|
- ❌ `process-oauth-profile` - OAuth profile sync
|
||||||
|
- ❌ `request-account-deletion` - Request account deletion
|
||||||
|
- ❌ `resend-deletion-code` - Resend deletion confirmation
|
||||||
|
|
||||||
|
**Migration Strategy**: Implement as Django management commands + API endpoints
|
||||||
|
|
||||||
|
#### 3.1.2 Notifications (9 functions)
|
||||||
|
- ❌ `create-novu-subscriber` - Create notification subscriber
|
||||||
|
- ❌ `migrate-novu-users` - Migrate notification users
|
||||||
|
- ❌ `notify-moderators-report` - Notify mods of reports
|
||||||
|
- ❌ `notify-moderators-submission` - Notify mods of submissions
|
||||||
|
- ❌ `notify-system-announcement` - System announcements
|
||||||
|
- ❌ `notify-user-submission-status` - Submission status updates
|
||||||
|
- ❌ `novu-webhook` - Webhook receiver
|
||||||
|
- ❌ `remove-novu-subscriber` - Remove subscriber
|
||||||
|
- ❌ `trigger-notification` - Generic notification trigger
|
||||||
|
- ❌ `update-novu-preferences` - Update notification prefs
|
||||||
|
- ❌ `update-novu-subscriber` - Update subscriber info
|
||||||
|
|
||||||
|
**Migration Strategy**: Replace Novu with Django + Celery + email/push service
|
||||||
|
|
||||||
|
#### 3.1.3 Moderation & Content (5 functions)
|
||||||
|
- ❌ `manage-moderator-topic` - Manage mod topics/assignments
|
||||||
|
- ❌ `process-selective-approval` - Selective item approval
|
||||||
|
- ❌ `send-escalation-notification` - Escalate to senior mods
|
||||||
|
- ❌ `sync-all-moderators-to-topic` - Sync mod assignments
|
||||||
|
- ❌ `check-transaction-status` - Transaction monitoring
|
||||||
|
|
||||||
|
**Migration Strategy**: Implement as Celery tasks + API endpoints
|
||||||
|
|
||||||
|
#### 3.1.4 Maintenance & Cleanup (4 functions)
|
||||||
|
- ❌ `cleanup-old-versions` - Version history cleanup
|
||||||
|
- ❌ `process-expired-bans` - Process ban expirations
|
||||||
|
- ❌ `process-scheduled-deletions` - Process scheduled deletions
|
||||||
|
- ❌ `run-cleanup-jobs` - General maintenance
|
||||||
|
- ❌ `scheduled-maintenance` - Scheduled maintenance tasks
|
||||||
|
|
||||||
|
**Migration Strategy**: Implement as Celery periodic tasks
|
||||||
|
|
||||||
|
#### 3.1.5 Communication (3 functions)
|
||||||
|
- ❌ `merge-contact-tickets` - Merge duplicate tickets
|
||||||
|
- ❌ `receive-inbound-email` - Email receiver
|
||||||
|
- ❌ `send-admin-email-reply` - Admin email responses
|
||||||
|
- ❌ `send-contact-message` - Send contact message
|
||||||
|
- ❌ `send-password-added-email` - Password set notification
|
||||||
|
|
||||||
|
**Migration Strategy**: Implement with Django email backend
|
||||||
|
|
||||||
|
#### 3.1.6 Utilities (6 functions)
|
||||||
|
- ❌ `detect-location` - IP geolocation
|
||||||
|
- ❌ `seed-test-data` - Test data generation
|
||||||
|
- ❌ `sitemap` - Generate sitemap
|
||||||
|
- ❌ `upload-image` - Image upload to CloudFlare
|
||||||
|
- ❌ `validate-email` - Email validation
|
||||||
|
- ❌ `validate-email-backend` - Backend email validation
|
||||||
|
|
||||||
|
**Migration Strategy**: Mix of Celery tasks, management commands, and API endpoints
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 4. Frontend Feature Analysis
|
||||||
|
|
||||||
|
**Total Component Files**: 325 TypeScript/TSX files
|
||||||
|
**Component Directories**: 36 directories
|
||||||
|
**Page Directories**: 43 directories
|
||||||
|
|
||||||
|
### 4.1 Frontend Components Requiring Backend Support
|
||||||
|
|
||||||
|
Based on directory structure, the following features need backend support:
|
||||||
|
|
||||||
|
#### ✅ Implemented in Django
|
||||||
|
- Companies (manufacturers, operators)
|
||||||
|
- Parks (listings, details, maps)
|
||||||
|
- Rides (listings, details, search)
|
||||||
|
- Moderation (submissions, approval workflow)
|
||||||
|
- Versioning (history, diffs)
|
||||||
|
- Photos (upload, gallery, moderation)
|
||||||
|
- Search (full-text, filters)
|
||||||
|
- Auth (login, register, OAuth, MFA)
|
||||||
|
|
||||||
|
#### ❌ Missing from Django
|
||||||
|
- **Reviews** (`src/components/reviews/`) - **HIGH PRIORITY**
|
||||||
|
- **User Lists** (`src/components/lists/`) - **HIGH PRIORITY**
|
||||||
|
- **Notifications** (`src/components/notifications/`) - **HIGH PRIORITY**
|
||||||
|
- **Profile** (full features in `src/components/profile/`) - **MEDIUM PRIORITY**
|
||||||
|
- **Analytics** (`src/components/analytics/`) - **LOW PRIORITY**
|
||||||
|
- **Blog** (`src/components/blog/`) - **LOW PRIORITY**
|
||||||
|
- **Contact** (`src/components/contact/`) - **LOW PRIORITY**
|
||||||
|
- **Settings** (full features in `src/components/settings/`) - **MEDIUM PRIORITY**
|
||||||
|
- **Timeline** (`src/components/timeline/`) - **LOW PRIORITY**
|
||||||
|
- **Designers** (`src/components/designers/`) - **LOW PRIORITY**
|
||||||
|
- **Park Owners** (`src/components/park-owners/`) - **LOW PRIORITY**
|
||||||
|
- **Operators** (`src/components/operators/`) - **MEDIUM PRIORITY**
|
||||||
|
- **Manufacturers** (`src/components/manufacturers/`) - **MEDIUM PRIORITY**
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 5. Critical Missing Features
|
||||||
|
|
||||||
|
### 5.1 HIGHEST PRIORITY (Core User Features)
|
||||||
|
|
||||||
|
#### Reviews System
|
||||||
|
**Impact**: Critical - core feature for users
|
||||||
|
**Tables Needed**:
|
||||||
|
- `reviews` - Main review table
|
||||||
|
- `review_photos` - Photo attachments
|
||||||
|
- `review_deletions` - Soft delete tracking
|
||||||
|
|
||||||
|
**API Endpoints Needed**:
|
||||||
|
- `POST /api/v1/reviews/` - Create review
|
||||||
|
- `GET /api/v1/reviews/` - List reviews
|
||||||
|
- `GET /api/v1/reviews/{id}/` - Get review
|
||||||
|
- `PATCH /api/v1/reviews/{id}/` - Update review
|
||||||
|
- `DELETE /api/v1/reviews/{id}/` - Delete review
|
||||||
|
- `POST /api/v1/reviews/{id}/helpful/` - Mark as helpful
|
||||||
|
- `GET /api/v1/parks/{id}/reviews/` - Park reviews
|
||||||
|
- `GET /api/v1/rides/{id}/reviews/` - Ride reviews
|
||||||
|
|
||||||
|
**Estimated Effort**: 2-3 days
|
||||||
|
|
||||||
|
#### User Lists System
|
||||||
|
**Impact**: Critical - popular feature for enthusiasts
|
||||||
|
**Tables Needed**:
|
||||||
|
- `user_top_lists` - List metadata
|
||||||
|
- `list_items` - List entries
|
||||||
|
- `user_top_list_items` - Extended item data
|
||||||
|
|
||||||
|
**API Endpoints Needed**:
|
||||||
|
- `POST /api/v1/lists/` - Create list
|
||||||
|
- `GET /api/v1/lists/` - List all lists
|
||||||
|
- `GET /api/v1/lists/{id}/` - Get list
|
||||||
|
- `PATCH /api/v1/lists/{id}/` - Update list
|
||||||
|
- `DELETE /api/v1/lists/{id}/` - Delete list
|
||||||
|
- `POST /api/v1/lists/{id}/items/` - Add item
|
||||||
|
- `DELETE /api/v1/lists/{id}/items/{item_id}/` - Remove item
|
||||||
|
- `PATCH /api/v1/lists/{id}/reorder/` - Reorder items
|
||||||
|
|
||||||
|
**Estimated Effort**: 2-3 days
|
||||||
|
|
||||||
|
#### Notifications System
|
||||||
|
**Impact**: Critical - user engagement
|
||||||
|
**Tables Needed**:
|
||||||
|
- `notification_channels` - Channel config
|
||||||
|
- `notification_templates` - Templates
|
||||||
|
- `notification_logs` - Delivery tracking
|
||||||
|
- `notification_event_data` - Event data
|
||||||
|
- `user_notification_preferences` - User preferences
|
||||||
|
|
||||||
|
**API Endpoints Needed**:
|
||||||
|
- `GET /api/v1/notifications/` - List notifications
|
||||||
|
- `PATCH /api/v1/notifications/{id}/read/` - Mark as read
|
||||||
|
- `PATCH /api/v1/notifications/read-all/` - Mark all as read
|
||||||
|
- `GET /api/v1/notifications/preferences/` - Get preferences
|
||||||
|
- `PATCH /api/v1/notifications/preferences/` - Update preferences
|
||||||
|
|
||||||
|
**Background Tasks**:
|
||||||
|
- Send email notifications (Celery)
|
||||||
|
- Send push notifications (Celery)
|
||||||
|
- Batch notification processing
|
||||||
|
|
||||||
|
**Estimated Effort**: 3-4 days
|
||||||
|
|
||||||
|
### 5.2 HIGH PRIORITY (Enhanced Features)
|
||||||
|
|
||||||
|
#### User Ride Credits
|
||||||
|
**Impact**: High - tracks user's ride history
|
||||||
|
**Tables Needed**:
|
||||||
|
- `user_ride_credits` - Credit tracking
|
||||||
|
|
||||||
|
**API Endpoints Needed**:
|
||||||
|
- `POST /api/v1/credits/` - Add credit
|
||||||
|
- `GET /api/v1/credits/` - List user's credits
|
||||||
|
- `GET /api/v1/users/{id}/credits/` - User's public credits
|
||||||
|
- `DELETE /api/v1/credits/{id}/` - Remove credit
|
||||||
|
|
||||||
|
**Estimated Effort**: 1 day
|
||||||
|
|
||||||
|
#### Ride Detail Tables
|
||||||
|
**Impact**: High - richer data for enthusiasts
|
||||||
|
**Tables Needed**:
|
||||||
|
- `ride_coaster_stats` - Coaster-specific stats
|
||||||
|
- `ride_water_details` - Water ride details
|
||||||
|
- `ride_dark_details` - Dark ride details
|
||||||
|
- `ride_flat_details` - Flat ride details
|
||||||
|
- `ride_kiddie_details` - Kiddie ride details
|
||||||
|
- `ride_transportation_details` - Transport details
|
||||||
|
- `ride_former_names` - Name history
|
||||||
|
- `ride_technical_specs` - Technical specifications
|
||||||
|
|
||||||
|
**API Endpoints**: Extend existing ride endpoints
|
||||||
|
|
||||||
|
**Estimated Effort**: 2 days
|
||||||
|
|
||||||
|
#### User Sessions & Preferences
|
||||||
|
**Impact**: High - better UX
|
||||||
|
**Tables Needed**:
|
||||||
|
- `user_sessions` - Session tracking
|
||||||
|
- `user_preferences` - User settings
|
||||||
|
|
||||||
|
**API Endpoints**:
|
||||||
|
- `GET /api/v1/auth/sessions/` - List sessions
|
||||||
|
- `DELETE /api/v1/auth/sessions/{id}/` - Revoke session
|
||||||
|
- `GET /api/v1/users/preferences/` - Get preferences
|
||||||
|
- `PATCH /api/v1/users/preferences/` - Update preferences
|
||||||
|
|
||||||
|
**Estimated Effort**: 1 day
|
||||||
|
|
||||||
|
### 5.3 MEDIUM PRIORITY (Operational Features)
|
||||||
|
|
||||||
|
#### Reports System
|
||||||
|
**Impact**: Medium - content moderation
|
||||||
|
**Tables Needed**:
|
||||||
|
- `reports` - User reports
|
||||||
|
|
||||||
|
**API Endpoints**:
|
||||||
|
- `POST /api/v1/reports/` - Submit report
|
||||||
|
- `GET /api/v1/moderation/reports/` - List reports (mods only)
|
||||||
|
- `PATCH /api/v1/moderation/reports/{id}/` - Process report
|
||||||
|
|
||||||
|
**Estimated Effort**: 1-2 days
|
||||||
|
|
||||||
|
#### Admin Audit System
|
||||||
|
**Impact**: Medium - admin oversight
|
||||||
|
**Tables Needed**:
|
||||||
|
- `admin_audit_log` - Admin actions
|
||||||
|
- `admin_audit_details` - Detailed audit data
|
||||||
|
- `moderation_audit_log` - Mod actions
|
||||||
|
- `profile_audit_log` - Profile changes
|
||||||
|
|
||||||
|
**API Endpoints**: Admin-only endpoints
|
||||||
|
|
||||||
|
**Estimated Effort**: 2 days
|
||||||
|
|
||||||
|
#### Account Management
|
||||||
|
**Impact**: Medium - GDPR compliance
|
||||||
|
**Tables Needed**:
|
||||||
|
- `account_deletion_requests` - Deletion workflow
|
||||||
|
|
||||||
|
**API Endpoints**:
|
||||||
|
- `POST /api/v1/auth/request-deletion/` - Request deletion
|
||||||
|
- `POST /api/v1/auth/confirm-deletion/` - Confirm deletion
|
||||||
|
- `POST /api/v1/auth/cancel-deletion/` - Cancel deletion
|
||||||
|
- `GET /api/v1/auth/export-data/` - Export user data
|
||||||
|
|
||||||
|
**Estimated Effort**: 2 days
|
||||||
|
|
||||||
|
#### Contact System
|
||||||
|
**Impact**: Medium - customer support
|
||||||
|
**Tables Needed**:
|
||||||
|
- `contact_submissions` - Contact messages
|
||||||
|
- `contact_email_threads` - Email threads
|
||||||
|
- `contact_rate_limits` - Spam prevention
|
||||||
|
|
||||||
|
**API Endpoints**:
|
||||||
|
- `POST /api/v1/contact/` - Submit message
|
||||||
|
- `GET /api/v1/admin/contact/` - List messages
|
||||||
|
|
||||||
|
**Estimated Effort**: 1 day
|
||||||
|
|
||||||
|
### 5.4 LOW PRIORITY (Nice-to-Have)
|
||||||
|
|
||||||
|
#### Blog System
|
||||||
|
**Impact**: Low - content marketing
|
||||||
|
**Tables Needed**:
|
||||||
|
- `blog_posts` - Blog content
|
||||||
|
|
||||||
|
**Estimated Effort**: 1-2 days
|
||||||
|
|
||||||
|
#### Analytics System
|
||||||
|
**Impact**: Low - insights
|
||||||
|
**Tables Needed**:
|
||||||
|
- `entity_page_views` - View tracking
|
||||||
|
|
||||||
|
**Estimated Effort**: 1 day
|
||||||
|
|
||||||
|
#### Timeline Events
|
||||||
|
**Impact**: Low - historical tracking
|
||||||
|
**Tables Needed**:
|
||||||
|
- `entity_timeline_events` - Events
|
||||||
|
- `timeline_event_submissions` - User submissions
|
||||||
|
|
||||||
|
**Estimated Effort**: 1-2 days
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 6. Migration Phases
|
||||||
|
|
||||||
|
### Phase 1: Critical User Features (1-2 weeks)
|
||||||
|
**Goal**: Enable core user functionality
|
||||||
|
|
||||||
|
1. **Reviews System** (3 days)
|
||||||
|
- Models: Review, ReviewPhoto, ReviewDeletion
|
||||||
|
- API: Full CRUD + helpful voting
|
||||||
|
- Frontend integration
|
||||||
|
|
||||||
|
2. **User Lists System** (3 days)
|
||||||
|
- Models: UserTopList, ListItem
|
||||||
|
- API: CRUD + reordering
|
||||||
|
- Frontend integration
|
||||||
|
|
||||||
|
3. **Notifications System** (4 days)
|
||||||
|
- Models: NotificationChannel, NotificationTemplate, NotificationLog, UserNotificationPreferences
|
||||||
|
- API: List, mark read, preferences
|
||||||
|
- Background tasks: Email, push notifications
|
||||||
|
- Replace Novu integration
|
||||||
|
|
||||||
|
4. **User Ride Credits** (1 day)
|
||||||
|
- Model: UserRideCredit
|
||||||
|
- API: CRUD
|
||||||
|
- Frontend integration
|
||||||
|
|
||||||
|
**Deliverable**: Users can review, create lists, track rides, receive notifications
|
||||||
|
|
||||||
|
### Phase 2: Enhanced Data & Features (1 week)
|
||||||
|
**Goal**: Richer data and improved UX
|
||||||
|
|
||||||
|
1. **Ride Detail Tables** (2 days)
|
||||||
|
- Models: RideCoasterStats, RideWaterDetails, RideDarkDetails, etc.
|
||||||
|
- API: Extend ride endpoints
|
||||||
|
- Frontend: Display detailed stats
|
||||||
|
|
||||||
|
2. **User Sessions & Preferences** (1 day)
|
||||||
|
- Models: UserSession, UserPreferences
|
||||||
|
- API: Session management, preferences
|
||||||
|
- Frontend: Settings page
|
||||||
|
|
||||||
|
3. **User Blocking** (1 day)
|
||||||
|
- Model: UserBlock
|
||||||
|
- API: Block/unblock users
|
||||||
|
- Frontend: Block UI
|
||||||
|
|
||||||
|
4. **Park Operating Hours** (1 day)
|
||||||
|
- Model: ParkOperatingHours
|
||||||
|
- API: CRUD
|
||||||
|
- Frontend: Display hours
|
||||||
|
|
||||||
|
**Deliverable**: Richer entity data, better user control
|
||||||
|
|
||||||
|
### Phase 3: Moderation & Admin (1 week)
|
||||||
|
**Goal**: Complete moderation tools
|
||||||
|
|
||||||
|
1. **Reports System** (2 days)
|
||||||
|
- Model: Report
|
||||||
|
- API: Submit + moderate reports
|
||||||
|
- Frontend: Report UI + mod queue
|
||||||
|
|
||||||
|
2. **Admin Audit System** (2 days)
|
||||||
|
- Models: AdminAuditLog, ModerationAuditLog, ProfileAuditLog
|
||||||
|
- API: Admin audit views
|
||||||
|
- Frontend: Audit log viewer
|
||||||
|
|
||||||
|
3. **Enhanced Submission Features** (3 days)
|
||||||
|
- Models: SubmissionDependency, SubmissionIdempotencyKey, ConflictResolution
|
||||||
|
- API: Dependency tracking, conflict resolution
|
||||||
|
- Frontend: Advanced submission UI
|
||||||
|
|
||||||
|
**Deliverable**: Complete moderation workflow
|
||||||
|
|
||||||
|
### Phase 4: Account & Compliance (3-4 days)
|
||||||
|
**Goal**: GDPR compliance and account management
|
||||||
|
|
||||||
|
1. **Account Deletion Workflow** (2 days)
|
||||||
|
- Model: AccountDeletionRequest
|
||||||
|
- API: Request, confirm, cancel deletion
|
||||||
|
- Management commands: Process deletions
|
||||||
|
- Frontend: Account settings
|
||||||
|
|
||||||
|
2. **Data Export** (1 day)
|
||||||
|
- API: Export user data (GDPR)
|
||||||
|
- Background task: Generate export
|
||||||
|
|
||||||
|
3. **Contact System** (1 day)
|
||||||
|
- Models: ContactSubmission, ContactEmailThread, ContactRateLimit
|
||||||
|
- API: Submit contact messages
|
||||||
|
- Frontend: Contact form
|
||||||
|
|
||||||
|
**Deliverable**: GDPR compliance, user account management
|
||||||
|
|
||||||
|
### Phase 5: Background Tasks & Automation (1 week)
|
||||||
|
**Goal**: Replace Edge Functions with Celery tasks
|
||||||
|
|
||||||
|
1. **Setup Celery** (1 day)
|
||||||
|
- Configure Celery with Redis/RabbitMQ
|
||||||
|
- Set up periodic tasks
|
||||||
|
|
||||||
|
2. **Authentication Tasks** (1 day)
|
||||||
|
- OAuth profile sync
|
||||||
|
- MFA management
|
||||||
|
- Session cleanup
|
||||||
|
|
||||||
|
3. **Moderation Tasks** (2 days)
|
||||||
|
- Selective approval processing
|
||||||
|
- Escalation notifications
|
||||||
|
- Transaction monitoring
|
||||||
|
|
||||||
|
4. **Maintenance Tasks** (2 days)
|
||||||
|
- Version cleanup
|
||||||
|
- Ban expiration
|
||||||
|
- Scheduled deletions
|
||||||
|
- Orphaned image cleanup
|
||||||
|
- Test data management
|
||||||
|
|
||||||
|
5. **Utility Tasks** (1 day)
|
||||||
|
- Sitemap generation
|
||||||
|
- Email validation
|
||||||
|
- Location detection
|
||||||
|
|
||||||
|
**Deliverable**: All Edge Functions migrated to Celery
|
||||||
|
|
||||||
|
### Phase 6: Content & Analytics (Optional - 1 week)
|
||||||
|
**Goal**: Content features and insights
|
||||||
|
|
||||||
|
1. **Blog System** (2 days)
|
||||||
|
- Model: BlogPost
|
||||||
|
- API: CRUD
|
||||||
|
- Frontend: Blog pages
|
||||||
|
|
||||||
|
2. **Analytics System** (2 days)
|
||||||
|
- Model: EntityPageView
|
||||||
|
- API: Analytics endpoints
|
||||||
|
- Frontend: Analytics dashboard
|
||||||
|
|
||||||
|
3. **Timeline Events** (2 days)
|
||||||
|
- Models: EntityTimelineEvent, TimelineEventSubmission
|
||||||
|
- API: CRUD
|
||||||
|
- Frontend: Timeline view
|
||||||
|
|
||||||
|
4. **Historical Data** (1 day)
|
||||||
|
- Models: HistoricalPark, HistoricalRide, ParkLocationHistory
|
||||||
|
- API: Historical queries
|
||||||
|
- Frontend: History display
|
||||||
|
|
||||||
|
**Deliverable**: Content management, user insights
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 7. Technical Debt & Architecture
|
||||||
|
|
||||||
|
### 7.1 What's Working Well ✅
|
||||||
|
|
||||||
|
1. **Clean Architecture**
|
||||||
|
- Separation of concerns (models, services, API endpoints)
|
||||||
|
- Generic versioning system using ContentType
|
||||||
|
- FSM-based moderation workflow
|
||||||
|
|
||||||
|
2. **Django Packages Used**
|
||||||
|
- `django-ninja`: Modern API framework (excellent choice)
|
||||||
|
- `django-fsm`: State machine for moderation
|
||||||
|
- `django-lifecycle`: Model lifecycle hooks
|
||||||
|
- `dirtyfields`: Track field changes
|
||||||
|
|
||||||
|
3. **Database Design**
|
||||||
|
- UUID primary keys
|
||||||
|
- Proper indexing
|
||||||
|
- JSON fields for flexibility
|
||||||
|
- PostGIS conditional support
|
||||||
|
|
||||||
|
4. **Code Quality**
|
||||||
|
- Well-documented models
|
||||||
|
- Type hints in API
|
||||||
|
- Consistent naming
|
||||||
|
|
||||||
|
### 7.2 Areas for Improvement ⚠️
|
||||||
|
|
||||||
|
1. **Empty Models**
|
||||||
|
- `apps/notifications/models.py` is essentially empty
|
||||||
|
- `apps/reviews/models.py` doesn't exist
|
||||||
|
|
||||||
|
2. **Missing Services**
|
||||||
|
- Need service layer for complex business logic
|
||||||
|
- Edge Function logic needs to be translated to services
|
||||||
|
|
||||||
|
3. **Testing**
|
||||||
|
- No Django tests found
|
||||||
|
- Need comprehensive test suite
|
||||||
|
|
||||||
|
4. **Background Tasks**
|
||||||
|
- Celery not yet configured
|
||||||
|
- All Edge Function logic currently synchronous
|
||||||
|
|
||||||
|
5. **Rate Limiting**
|
||||||
|
- Not implemented in Django yet
|
||||||
|
- Supabase has rate limiting tables
|
||||||
|
|
||||||
|
### 7.3 Recommended Architecture Changes
|
||||||
|
|
||||||
|
1. **Add Celery**
|
||||||
|
```
|
||||||
|
django/
|
||||||
|
celery.py # Celery app configuration
|
||||||
|
tasks/
|
||||||
|
__init__.py
|
||||||
|
notifications.py # Notification tasks
|
||||||
|
moderation.py # Moderation tasks
|
||||||
|
maintenance.py # Cleanup tasks
|
||||||
|
auth.py # Auth tasks
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Add Service Layer**
|
||||||
|
```
|
||||||
|
django/apps/*/services/
|
||||||
|
__init__.py
|
||||||
|
business_logic.py # Complex operations
|
||||||
|
email.py # Email sending
|
||||||
|
notifications.py # Notification logic
|
||||||
|
```
|
||||||
|
|
||||||
|
3. **Add Tests**
|
||||||
|
```
|
||||||
|
django/apps/*/tests/
|
||||||
|
__init__.py
|
||||||
|
test_models.py
|
||||||
|
test_services.py
|
||||||
|
test_api.py
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 8. Estimated Timeline
|
||||||
|
|
||||||
|
### Minimum Viable Migration (Core Features Only)
|
||||||
|
**Timeline**: 3-4 weeks
|
||||||
|
|
||||||
|
- Phase 1: Critical User Features (2 weeks)
|
||||||
|
- Phase 2: Enhanced Data (1 week)
|
||||||
|
- Phase 5: Background Tasks (1 week)
|
||||||
|
|
||||||
|
**Result**: Feature parity for 80% of users
|
||||||
|
|
||||||
|
### Complete Migration (All Features)
|
||||||
|
**Timeline**: 6-8 weeks
|
||||||
|
|
||||||
|
- Phase 1: Critical User Features (2 weeks)
|
||||||
|
- Phase 2: Enhanced Data (1 week)
|
||||||
|
- Phase 3: Moderation & Admin (1 week)
|
||||||
|
- Phase 4: Account & Compliance (4 days)
|
||||||
|
- Phase 5: Background Tasks (1 week)
|
||||||
|
- Phase 6: Content & Analytics (1 week)
|
||||||
|
- Testing & Polish (1 week)
|
||||||
|
|
||||||
|
**Result**: 100% feature parity with Supabase
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 9. Risk Assessment
|
||||||
|
|
||||||
|
### High Risk
|
||||||
|
1. **Notification System Migration**
|
||||||
|
- Currently using Novu (third-party service)
|
||||||
|
- Need to replace with Django + Celery + email/push provider
|
||||||
|
- Risk: Feature gap if not implemented properly
|
||||||
|
- Mitigation: Implement core notifications first, enhance later
|
||||||
|
|
||||||
|
2. **Background Task Migration**
|
||||||
|
- 42 Edge Functions to migrate
|
||||||
|
- Complex business logic in functions
|
||||||
|
- Risk: Missing functionality
|
||||||
|
- Mitigation: Systematic function-by-function migration
|
||||||
|
|
||||||
|
### Medium Risk
|
||||||
|
1. **Data Migration**
|
||||||
|
- No existing data (stated: "no data to be worried about")
|
||||||
|
- Risk: Low
|
||||||
|
|
||||||
|
2. **Frontend Integration**
|
||||||
|
- Frontend expects specific Supabase patterns
|
||||||
|
- Risk: API contract changes
|
||||||
|
- Mitigation: Maintain compatible API responses
|
||||||
|
|
||||||
|
### Low Risk
|
||||||
|
1. **Core Entity Models**
|
||||||
|
- Already implemented
|
||||||
|
- Well-tested architecture
|
||||||
|
|
||||||
|
2. **Authentication**
|
||||||
|
- Already implemented with JWT, OAuth, MFA
|
||||||
|
- Solid foundation
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 10. Recommendations
|
||||||
|
|
||||||
|
### Immediate Actions (This Week)
|
||||||
|
1. ✅ Complete this audit
|
||||||
|
2. Implement Reviews system (highest user impact)
|
||||||
|
3. Implement User Lists system (popular feature)
|
||||||
|
4. Set up Celery infrastructure
|
||||||
|
|
||||||
|
### Short Term (Next 2 Weeks)
|
||||||
|
1. Implement Notifications system
|
||||||
|
2. Implement User Ride Credits
|
||||||
|
3. Add Ride detail tables
|
||||||
|
4. Begin Edge Function migration
|
||||||
|
|
||||||
|
### Medium Term (Next Month)
|
||||||
|
1. Complete all moderation features
|
||||||
|
2. Implement GDPR compliance features
|
||||||
|
3. Add admin audit system
|
||||||
|
4. Complete Edge Function migration
|
||||||
|
|
||||||
|
### Long Term (Next 2 Months)
|
||||||
|
1. Add blog/content features
|
||||||
|
2. Implement analytics
|
||||||
|
3. Add timeline features
|
||||||
|
4. Comprehensive testing
|
||||||
|
5. Performance optimization
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 11. Success Criteria
|
||||||
|
|
||||||
|
### Migration Complete When:
|
||||||
|
- ✅ All core entity CRUD operations work
|
||||||
|
- ✅ All user features work (reviews, lists, credits)
|
||||||
|
- ✅ Notification system functional
|
||||||
|
- ✅ Moderation workflow complete
|
||||||
|
- ✅ All Edge Functions replaced
|
||||||
|
- ✅ GDPR compliance features implemented
|
||||||
|
- ✅ Test coverage >80%
|
||||||
|
- ✅ Frontend fully integrated
|
||||||
|
- ✅ Performance meets or exceeds Supabase
|
||||||
|
|
||||||
|
### Optional (Nice-to-Have):
|
||||||
|
- Blog system
|
||||||
|
- Analytics dashboard
|
||||||
|
- Timeline features
|
||||||
|
- Advanced admin features
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 12. Next Steps
|
||||||
|
|
||||||
|
1. **Review this audit** with stakeholders
|
||||||
|
2. **Prioritize phases** based on business needs
|
||||||
|
3. **Assign resources** to each phase
|
||||||
|
4. **Begin Phase 1** (Critical User Features)
|
||||||
|
5. **Set up CI/CD** for Django backend
|
||||||
|
6. **Create staging environment** for testing
|
||||||
|
7. **Plan data cutover** (when ready to switch from Supabase)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Appendix A: File Structure Analysis
|
||||||
|
|
||||||
|
```
|
||||||
|
django/
|
||||||
|
├── api/
|
||||||
|
│ └── v1/
|
||||||
|
│ ├── endpoints/
|
||||||
|
│ │ ├── auth.py (596 lines) ✅
|
||||||
|
│ │ ├── companies.py (254 lines) ✅
|
||||||
|
│ │ ├── moderation.py (496 lines) ✅
|
||||||
|
│ │ ├── parks.py (362 lines) ✅
|
||||||
|
│ │ ├── photos.py (600 lines) ✅
|
||||||
|
│ │ ├── ride_models.py (247 lines) ✅
|
||||||
|
│ │ ├── rides.py (360 lines) ✅
|
||||||
|
│ │ ├── search.py (438 lines) ✅
|
||||||
|
│ │ └── versioning.py (369 lines) ✅
|
||||||
|
│ └── api.py (159 lines) ✅
|
||||||
|
├── apps/
|
||||||
|
│ ├── core/ ✅ Complete
|
||||||
|
│ │ └── models.py (265 lines)
|
||||||
|
│ ├── users/ ✅ Complete (basic)
|
||||||
|
│ │ └── models.py (258 lines)
|
||||||
|
│ ├── entities/ ✅ Complete
|
||||||
|
│ │ └── models.py (931 lines)
|
||||||
|
│ ├── media/ ✅ Complete
|
||||||
|
│ │ └── models.py (267 lines)
|
||||||
|
│ ├── moderation/ ✅ Complete
|
||||||
|
│ │ └── models.py (478 lines)
|
||||||
|
│ ├── versioning/ ✅ Complete
|
||||||
|
│ │ └── models.py (288 lines)
|
||||||
|
│ ├── notifications/ ❌ Empty (1 line)
|
||||||
|
│ └── reviews/ ❌ Missing
|
||||||
|
└── config/ ✅ Complete
|
||||||
|
└── settings/
|
||||||
|
```
|
||||||
|
|
||||||
|
## Appendix B: Database Table Checklist
|
||||||
|
|
||||||
|
**✅ Implemented (19 tables)**:
|
||||||
|
- users (via Django auth)
|
||||||
|
- user_roles
|
||||||
|
- user_profiles
|
||||||
|
- countries
|
||||||
|
- subdivisions
|
||||||
|
- localities
|
||||||
|
- companies
|
||||||
|
- parks
|
||||||
|
- rides
|
||||||
|
- ride_models
|
||||||
|
- photos
|
||||||
|
- content_submissions
|
||||||
|
- submission_items
|
||||||
|
- moderation_locks
|
||||||
|
- entity_versions
|
||||||
|
|
||||||
|
**❌ Missing (60+ tables)**:
|
||||||
|
- reviews & review_photos
|
||||||
|
- user_ride_credits
|
||||||
|
- user_top_lists & list_items
|
||||||
|
- user_blocks
|
||||||
|
- user_sessions
|
||||||
|
- user_preferences
|
||||||
|
- user_notification_preferences
|
||||||
|
- notification_channels, notification_templates, notification_logs
|
||||||
|
- ride_coaster_stats, ride_*_details (7 tables)
|
||||||
|
- ride_former_names, ride_name_history
|
||||||
|
- reports
|
||||||
|
- contact_submissions, contact_email_threads
|
||||||
|
- admin_audit_log, moderation_audit_log, profile_audit_log
|
||||||
|
- account_deletion_requests
|
||||||
|
- park_operating_hours
|
||||||
|
- historical_parks, historical_rides
|
||||||
|
- entity_timeline_events
|
||||||
|
- blog_posts
|
||||||
|
- entity_page_views
|
||||||
|
- And 30+ more system/tracking tables
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**End of Audit**
|
||||||
@@ -1,636 +0,0 @@
|
|||||||
# Submission Pipeline Schema Reference
|
|
||||||
|
|
||||||
**Critical Document**: This reference maps all entity types to their exact database schema fields across the entire submission pipeline to prevent schema mismatches.
|
|
||||||
|
|
||||||
**Last Updated**: 2025-11-08
|
|
||||||
**Status**: ✅ All schemas audited and verified
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Table of Contents
|
|
||||||
|
|
||||||
1. [Overview](#overview)
|
|
||||||
2. [Parks](#parks)
|
|
||||||
3. [Rides](#rides)
|
|
||||||
4. [Companies](#companies)
|
|
||||||
5. [Ride Models](#ride-models)
|
|
||||||
6. [Photos](#photos)
|
|
||||||
7. [Timeline Events](#timeline-events)
|
|
||||||
8. [Critical Functions Reference](#critical-functions-reference)
|
|
||||||
9. [Common Pitfalls](#common-pitfalls)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
|
|
||||||
### Pipeline Flow
|
|
||||||
|
|
||||||
```
|
|
||||||
User Input → *_submissions table → submission_items → Moderation →
|
|
||||||
process_approval_transaction → create/update_entity_from_submission →
|
|
||||||
Main entity table → Version trigger → *_versions table
|
|
||||||
```
|
|
||||||
|
|
||||||
### Entity Types
|
|
||||||
|
|
||||||
- `park` - Theme parks and amusement parks
|
|
||||||
- `ride` - Individual rides and attractions
|
|
||||||
- `company` - Used for: `manufacturer`, `operator`, `designer`, `property_owner`
|
|
||||||
- `ride_model` - Ride model templates
|
|
||||||
- `photo` - Entity photos
|
|
||||||
- `timeline_event` - Historical events
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Parks
|
|
||||||
|
|
||||||
### Main Table: `parks`
|
|
||||||
|
|
||||||
**Required Fields:**
|
|
||||||
- `id` (uuid, PK)
|
|
||||||
- `name` (text, NOT NULL)
|
|
||||||
- `slug` (text, NOT NULL, UNIQUE)
|
|
||||||
- `park_type` (text, NOT NULL) - Values: `theme_park`, `amusement_park`, `water_park`, etc.
|
|
||||||
- `status` (text, NOT NULL) - Values: `operating`, `closed`, `under_construction`, etc.
|
|
||||||
|
|
||||||
**Optional Fields:**
|
|
||||||
- `description` (text)
|
|
||||||
- `location_id` (uuid, FK → locations)
|
|
||||||
- `operator_id` (uuid, FK → companies)
|
|
||||||
- `property_owner_id` (uuid, FK → companies)
|
|
||||||
- `opening_date` (date)
|
|
||||||
- `closing_date` (date)
|
|
||||||
- `opening_date_precision` (text) - Values: `year`, `month`, `day`
|
|
||||||
- `closing_date_precision` (text)
|
|
||||||
- `website_url` (text)
|
|
||||||
- `phone` (text)
|
|
||||||
- `email` (text)
|
|
||||||
- `banner_image_url` (text)
|
|
||||||
- `banner_image_id` (text)
|
|
||||||
- `card_image_url` (text)
|
|
||||||
- `card_image_id` (text)
|
|
||||||
|
|
||||||
**Metadata Fields:**
|
|
||||||
- `view_count_all` (integer, default: 0)
|
|
||||||
- `view_count_30d` (integer, default: 0)
|
|
||||||
- `view_count_7d` (integer, default: 0)
|
|
||||||
- `average_rating` (numeric, default: 0.00)
|
|
||||||
- `review_count` (integer, default: 0)
|
|
||||||
- `created_at` (timestamptz)
|
|
||||||
- `updated_at` (timestamptz)
|
|
||||||
- `is_test_data` (boolean, default: false)
|
|
||||||
|
|
||||||
### Submission Table: `park_submissions`
|
|
||||||
|
|
||||||
**Schema Identical to Main Table** (excluding auto-generated fields like `id`, timestamps)
|
|
||||||
|
|
||||||
**Additional Field:**
|
|
||||||
- `submission_id` (uuid, NOT NULL, FK → content_submissions)
|
|
||||||
- `temp_location_data` (jsonb) - For pending location creation
|
|
||||||
|
|
||||||
### Version Table: `park_versions`
|
|
||||||
|
|
||||||
**All Main Table Fields PLUS:**
|
|
||||||
- `version_id` (uuid, PK)
|
|
||||||
- `park_id` (uuid, NOT NULL, FK → parks)
|
|
||||||
- `version_number` (integer, NOT NULL)
|
|
||||||
- `change_type` (version_change_type, NOT NULL) - Values: `created`, `updated`, `restored`
|
|
||||||
- `change_reason` (text)
|
|
||||||
- `is_current` (boolean, default: true)
|
|
||||||
- `created_by` (uuid, FK → auth.users)
|
|
||||||
- `created_at` (timestamptz)
|
|
||||||
- `submission_id` (uuid, FK → content_submissions)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Rides
|
|
||||||
|
|
||||||
### Main Table: `rides`
|
|
||||||
|
|
||||||
**Required Fields:**
|
|
||||||
- `id` (uuid, PK)
|
|
||||||
- `name` (text, NOT NULL)
|
|
||||||
- `slug` (text, NOT NULL, UNIQUE)
|
|
||||||
- `park_id` (uuid, NOT NULL, FK → parks)
|
|
||||||
- `category` (text, NOT NULL) ⚠️ **CRITICAL: This field is required**
|
|
||||||
- Values: `roller_coaster`, `water_ride`, `dark_ride`, `flat_ride`, `transport`, `kids_ride`
|
|
||||||
- `status` (text, NOT NULL)
|
|
||||||
- Values: `operating`, `closed`, `under_construction`, `sbno`, etc.
|
|
||||||
|
|
||||||
**⚠️ IMPORTANT: `rides` table does NOT have `ride_type` column!**
|
|
||||||
- `ride_type` only exists in `ride_models` table
|
|
||||||
- Using `ride_type` in rides updates will cause "column does not exist" error
|
|
||||||
|
|
||||||
**Optional Relationship Fields:**
|
|
||||||
- `manufacturer_id` (uuid, FK → companies)
|
|
||||||
- `designer_id` (uuid, FK → companies)
|
|
||||||
- `ride_model_id` (uuid, FK → ride_models)
|
|
||||||
|
|
||||||
**Optional Descriptive Fields:**
|
|
||||||
- `description` (text)
|
|
||||||
- `opening_date` (date)
|
|
||||||
- `closing_date` (date)
|
|
||||||
- `opening_date_precision` (text)
|
|
||||||
- `closing_date_precision` (text)
|
|
||||||
|
|
||||||
**Optional Technical Fields:**
|
|
||||||
- `height_requirement` (integer) - Height requirement in cm
|
|
||||||
- `age_requirement` (integer)
|
|
||||||
- `max_speed_kmh` (numeric)
|
|
||||||
- `duration_seconds` (integer)
|
|
||||||
- `capacity_per_hour` (integer)
|
|
||||||
- `max_g_force` (numeric)
|
|
||||||
- `inversions` (integer) - Number of inversions
|
|
||||||
- `length_meters` (numeric)
|
|
||||||
- `max_height_meters` (numeric)
|
|
||||||
- `drop_height_meters` (numeric)
|
|
||||||
|
|
||||||
**Category-Specific Fields:**
|
|
||||||
|
|
||||||
*Roller Coasters:*
|
|
||||||
- `ride_sub_type` (text)
|
|
||||||
- `coaster_type` (text)
|
|
||||||
- `seating_type` (text)
|
|
||||||
- `intensity_level` (text)
|
|
||||||
- `track_material` (text)
|
|
||||||
- `support_material` (text)
|
|
||||||
- `propulsion_method` (text)
|
|
||||||
|
|
||||||
*Water Rides:*
|
|
||||||
- `water_depth_cm` (integer)
|
|
||||||
- `splash_height_meters` (numeric)
|
|
||||||
- `wetness_level` (text)
|
|
||||||
- `flume_type` (text)
|
|
||||||
- `boat_capacity` (integer)
|
|
||||||
|
|
||||||
*Dark Rides:*
|
|
||||||
- `theme_name` (text)
|
|
||||||
- `story_description` (text)
|
|
||||||
- `show_duration_seconds` (integer)
|
|
||||||
- `animatronics_count` (integer)
|
|
||||||
- `projection_type` (text)
|
|
||||||
- `ride_system` (text)
|
|
||||||
- `scenes_count` (integer)
|
|
||||||
|
|
||||||
*Flat Rides:*
|
|
||||||
- `rotation_type` (text)
|
|
||||||
- `motion_pattern` (text)
|
|
||||||
- `platform_count` (integer)
|
|
||||||
- `swing_angle_degrees` (numeric)
|
|
||||||
- `rotation_speed_rpm` (numeric)
|
|
||||||
- `arm_length_meters` (numeric)
|
|
||||||
- `max_height_reached_meters` (numeric)
|
|
||||||
|
|
||||||
*Kids Rides:*
|
|
||||||
- `min_age` (integer)
|
|
||||||
- `max_age` (integer)
|
|
||||||
- `educational_theme` (text)
|
|
||||||
- `character_theme` (text)
|
|
||||||
|
|
||||||
*Transport:*
|
|
||||||
- `transport_type` (text)
|
|
||||||
- `route_length_meters` (numeric)
|
|
||||||
- `stations_count` (integer)
|
|
||||||
- `vehicle_capacity` (integer)
|
|
||||||
- `vehicles_count` (integer)
|
|
||||||
- `round_trip_duration_seconds` (integer)
|
|
||||||
|
|
||||||
**Image Fields:**
|
|
||||||
- `banner_image_url` (text)
|
|
||||||
- `banner_image_id` (text)
|
|
||||||
- `card_image_url` (text)
|
|
||||||
- `card_image_id` (text)
|
|
||||||
- `image_url` (text) - Legacy field
|
|
||||||
|
|
||||||
**Metadata Fields:**
|
|
||||||
- `view_count_all` (integer, default: 0)
|
|
||||||
- `view_count_30d` (integer, default: 0)
|
|
||||||
- `view_count_7d` (integer, default: 0)
|
|
||||||
- `average_rating` (numeric, default: 0.00)
|
|
||||||
- `review_count` (integer, default: 0)
|
|
||||||
- `created_at` (timestamptz)
|
|
||||||
- `updated_at` (timestamptz)
|
|
||||||
- `is_test_data` (boolean, default: false)
|
|
||||||
|
|
||||||
### Submission Table: `ride_submissions`
|
|
||||||
|
|
||||||
**Schema Identical to Main Table** (excluding auto-generated fields)
|
|
||||||
|
|
||||||
**Additional Fields:**
|
|
||||||
- `submission_id` (uuid, NOT NULL, FK → content_submissions)
|
|
||||||
|
|
||||||
### Version Table: `ride_versions`
|
|
||||||
|
|
||||||
**All Main Table Fields PLUS:**
|
|
||||||
- `version_id` (uuid, PK)
|
|
||||||
- `ride_id` (uuid, NOT NULL, FK → rides)
|
|
||||||
- `version_number` (integer, NOT NULL)
|
|
||||||
- `change_type` (version_change_type, NOT NULL)
|
|
||||||
- `change_reason` (text)
|
|
||||||
- `is_current` (boolean, default: true)
|
|
||||||
- `created_by` (uuid, FK → auth.users)
|
|
||||||
- `created_at` (timestamptz)
|
|
||||||
- `submission_id` (uuid, FK → content_submissions)
|
|
||||||
|
|
||||||
**⚠️ Field Name Differences (Version Table vs Main Table):**
|
|
||||||
- `height_requirement_cm` in versions → `height_requirement` in rides
|
|
||||||
- `gforce_max` in versions → `max_g_force` in rides
|
|
||||||
- `inversions_count` in versions → `inversions` in rides
|
|
||||||
- `height_meters` in versions → `max_height_meters` in rides
|
|
||||||
- `drop_meters` in versions → `drop_height_meters` in rides
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Companies
|
|
||||||
|
|
||||||
**Used For**: `manufacturer`, `operator`, `designer`, `property_owner`
|
|
||||||
|
|
||||||
### Main Table: `companies`
|
|
||||||
|
|
||||||
**Required Fields:**
|
|
||||||
- `id` (uuid, PK)
|
|
||||||
- `name` (text, NOT NULL)
|
|
||||||
- `slug` (text, NOT NULL, UNIQUE)
|
|
||||||
- `company_type` (text, NOT NULL)
|
|
||||||
- Values: `manufacturer`, `operator`, `designer`, `property_owner`
|
|
||||||
|
|
||||||
**Optional Fields:**
|
|
||||||
- `description` (text)
|
|
||||||
- `person_type` (text, default: 'company')
|
|
||||||
- Values: `company`, `individual`
|
|
||||||
- `founded_year` (integer)
|
|
||||||
- `founded_date` (date)
|
|
||||||
- `founded_date_precision` (text)
|
|
||||||
- `headquarters_location` (text)
|
|
||||||
- `website_url` (text)
|
|
||||||
- `logo_url` (text)
|
|
||||||
- `banner_image_url` (text)
|
|
||||||
- `banner_image_id` (text)
|
|
||||||
- `card_image_url` (text)
|
|
||||||
- `card_image_id` (text)
|
|
||||||
|
|
||||||
**Metadata Fields:**
|
|
||||||
- `view_count_all` (integer, default: 0)
|
|
||||||
- `view_count_30d` (integer, default: 0)
|
|
||||||
- `view_count_7d` (integer, default: 0)
|
|
||||||
- `average_rating` (numeric, default: 0.00)
|
|
||||||
- `review_count` (integer, default: 0)
|
|
||||||
- `created_at` (timestamptz)
|
|
||||||
- `updated_at` (timestamptz)
|
|
||||||
- `is_test_data` (boolean, default: false)
|
|
||||||
|
|
||||||
### Submission Table: `company_submissions`
|
|
||||||
|
|
||||||
**Schema Identical to Main Table** (excluding auto-generated fields)
|
|
||||||
|
|
||||||
**Additional Field:**
|
|
||||||
- `submission_id` (uuid, NOT NULL, FK → content_submissions)
|
|
||||||
|
|
||||||
### Version Table: `company_versions`
|
|
||||||
|
|
||||||
**All Main Table Fields PLUS:**
|
|
||||||
- `version_id` (uuid, PK)
|
|
||||||
- `company_id` (uuid, NOT NULL, FK → companies)
|
|
||||||
- `version_number` (integer, NOT NULL)
|
|
||||||
- `change_type` (version_change_type, NOT NULL)
|
|
||||||
- `change_reason` (text)
|
|
||||||
- `is_current` (boolean, default: true)
|
|
||||||
- `created_by` (uuid, FK → auth.users)
|
|
||||||
- `created_at` (timestamptz)
|
|
||||||
- `submission_id` (uuid, FK → content_submissions)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Ride Models
|
|
||||||
|
|
||||||
### Main Table: `ride_models`
|
|
||||||
|
|
||||||
**Required Fields:**
|
|
||||||
- `id` (uuid, PK)
|
|
||||||
- `name` (text, NOT NULL)
|
|
||||||
- `slug` (text, NOT NULL, UNIQUE)
|
|
||||||
- `manufacturer_id` (uuid, NOT NULL, FK → companies)
|
|
||||||
- `category` (text, NOT NULL) ⚠️ **CRITICAL: This field is required**
|
|
||||||
- Values: `roller_coaster`, `water_ride`, `dark_ride`, `flat_ride`, `transport`, `kids_ride`
|
|
||||||
|
|
||||||
**Optional Fields:**
|
|
||||||
- `ride_type` (text) ⚠️ **This field exists in ride_models but NOT in rides**
|
|
||||||
- More specific classification than category
|
|
||||||
- Example: category = `roller_coaster`, ride_type = `inverted_coaster`
|
|
||||||
- `description` (text)
|
|
||||||
- `banner_image_url` (text)
|
|
||||||
- `banner_image_id` (text)
|
|
||||||
- `card_image_url` (text)
|
|
||||||
- `card_image_id` (text)
|
|
||||||
|
|
||||||
**Metadata Fields:**
|
|
||||||
- `view_count_all` (integer, default: 0)
|
|
||||||
- `view_count_30d` (integer, default: 0)
|
|
||||||
- `view_count_7d` (integer, default: 0)
|
|
||||||
- `average_rating` (numeric, default: 0.00)
|
|
||||||
- `review_count` (integer, default: 0)
|
|
||||||
- `installations_count` (integer, default: 0)
|
|
||||||
- `created_at` (timestamptz)
|
|
||||||
- `updated_at` (timestamptz)
|
|
||||||
- `is_test_data` (boolean, default: false)
|
|
||||||
|
|
||||||
### Submission Table: `ride_model_submissions`
|
|
||||||
|
|
||||||
**Schema Identical to Main Table** (excluding auto-generated fields)
|
|
||||||
|
|
||||||
**Additional Field:**
|
|
||||||
- `submission_id` (uuid, NOT NULL, FK → content_submissions)
|
|
||||||
|
|
||||||
### Version Table: `ride_model_versions`
|
|
||||||
|
|
||||||
**All Main Table Fields PLUS:**
|
|
||||||
- `version_id` (uuid, PK)
|
|
||||||
- `ride_model_id` (uuid, NOT NULL, FK → ride_models)
|
|
||||||
- `version_number` (integer, NOT NULL)
|
|
||||||
- `change_type` (version_change_type, NOT NULL)
|
|
||||||
- `change_reason` (text)
|
|
||||||
- `is_current` (boolean, default: true)
|
|
||||||
- `created_by` (uuid, FK → auth.users)
|
|
||||||
- `created_at` (timestamptz)
|
|
||||||
- `submission_id` (uuid, FK → content_submissions)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Photos
|
|
||||||
|
|
||||||
### Main Table: `photos`
|
|
||||||
|
|
||||||
**Required Fields:**
|
|
||||||
- `id` (uuid, PK)
|
|
||||||
- `cloudflare_id` (text, NOT NULL)
|
|
||||||
- `url` (text, NOT NULL)
|
|
||||||
- `entity_type` (text, NOT NULL)
|
|
||||||
- `entity_id` (uuid, NOT NULL)
|
|
||||||
- `uploader_id` (uuid, NOT NULL, FK → auth.users)
|
|
||||||
|
|
||||||
**Optional Fields:**
|
|
||||||
- `title` (text)
|
|
||||||
- `caption` (text)
|
|
||||||
- `taken_date` (date)
|
|
||||||
- `taken_date_precision` (text)
|
|
||||||
- `photographer_name` (text)
|
|
||||||
- `order_index` (integer, default: 0)
|
|
||||||
- `is_primary` (boolean, default: false)
|
|
||||||
- `status` (text, default: 'active')
|
|
||||||
|
|
||||||
**Metadata Fields:**
|
|
||||||
- `created_at` (timestamptz)
|
|
||||||
- `updated_at` (timestamptz)
|
|
||||||
- `is_test_data` (boolean, default: false)
|
|
||||||
|
|
||||||
### Submission Table: `photo_submissions`
|
|
||||||
|
|
||||||
**Required Fields:**
|
|
||||||
- `id` (uuid, PK)
|
|
||||||
- `submission_id` (uuid, NOT NULL, FK → content_submissions)
|
|
||||||
- `entity_type` (text, NOT NULL)
|
|
||||||
- `entity_id` (uuid, NOT NULL)
|
|
||||||
- `cloudflare_id` (text, NOT NULL)
|
|
||||||
- `url` (text, NOT NULL)
|
|
||||||
|
|
||||||
**Optional Fields:**
|
|
||||||
- `title` (text)
|
|
||||||
- `caption` (text)
|
|
||||||
- `taken_date` (date)
|
|
||||||
- `taken_date_precision` (text)
|
|
||||||
- `photographer_name` (text)
|
|
||||||
- `order_index` (integer)
|
|
||||||
|
|
||||||
**Note**: Photos do NOT have version tables - they are immutable after approval
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Timeline Events
|
|
||||||
|
|
||||||
### Main Table: `entity_timeline_events`
|
|
||||||
|
|
||||||
**Required Fields:**
|
|
||||||
- `id` (uuid, PK)
|
|
||||||
- `entity_type` (text, NOT NULL)
|
|
||||||
- `entity_id` (uuid, NOT NULL)
|
|
||||||
- `event_type` (text, NOT NULL)
|
|
||||||
- Values: `opening`, `closing`, `relocation`, `renovation`, `name_change`, `ownership_change`, etc.
|
|
||||||
- `title` (text, NOT NULL)
|
|
||||||
- `event_date` (date, NOT NULL)
|
|
||||||
|
|
||||||
**Optional Fields:**
|
|
||||||
- `description` (text)
|
|
||||||
- `event_date_precision` (text, default: 'day')
|
|
||||||
- `from_value` (text)
|
|
||||||
- `to_value` (text)
|
|
||||||
- `from_entity_id` (uuid)
|
|
||||||
- `to_entity_id` (uuid)
|
|
||||||
- `from_location_id` (uuid)
|
|
||||||
- `to_location_id` (uuid)
|
|
||||||
- `is_public` (boolean, default: true)
|
|
||||||
- `display_order` (integer, default: 0)
|
|
||||||
|
|
||||||
**Approval Fields:**
|
|
||||||
- `created_by` (uuid, FK → auth.users)
|
|
||||||
- `approved_by` (uuid, FK → auth.users)
|
|
||||||
- `submission_id` (uuid, FK → content_submissions)
|
|
||||||
|
|
||||||
**Metadata Fields:**
|
|
||||||
- `created_at` (timestamptz)
|
|
||||||
- `updated_at` (timestamptz)
|
|
||||||
|
|
||||||
### Submission Table: `timeline_event_submissions`
|
|
||||||
|
|
||||||
**Schema Identical to Main Table** (excluding auto-generated fields)
|
|
||||||
|
|
||||||
**Additional Field:**
|
|
||||||
- `submission_id` (uuid, NOT NULL, FK → content_submissions)
|
|
||||||
|
|
||||||
**Note**: Timeline events do NOT have version tables
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Critical Functions Reference
|
|
||||||
|
|
||||||
### 1. `create_entity_from_submission`
|
|
||||||
|
|
||||||
**Purpose**: Creates new entities from approved submissions
|
|
||||||
|
|
||||||
**Parameters**:
|
|
||||||
- `p_entity_type` (text) - Entity type identifier
|
|
||||||
- `p_data` (jsonb) - Entity data from submission
|
|
||||||
- `p_created_by` (uuid) - User who created it
|
|
||||||
- `p_submission_id` (uuid) - Source submission
|
|
||||||
|
|
||||||
**Critical Requirements**:
|
|
||||||
- ✅ MUST extract `category` for rides and ride_models
|
|
||||||
- ✅ MUST NOT use `ride_type` for rides (doesn't exist)
|
|
||||||
- ✅ MUST use `ride_type` for ride_models (does exist)
|
|
||||||
- ✅ MUST handle all required NOT NULL fields
|
|
||||||
|
|
||||||
**Returns**: `uuid` - New entity ID
|
|
||||||
|
|
||||||
### 2. `update_entity_from_submission`
|
|
||||||
|
|
||||||
**Purpose**: Updates existing entities from approved edits
|
|
||||||
|
|
||||||
**Parameters**:
|
|
||||||
- `p_entity_type` (text) - Entity type identifier
|
|
||||||
- `p_data` (jsonb) - Updated entity data
|
|
||||||
- `p_entity_id` (uuid) - Existing entity ID
|
|
||||||
- `p_changed_by` (uuid) - User who changed it
|
|
||||||
|
|
||||||
**Critical Requirements**:
|
|
||||||
- ✅ MUST use COALESCE to preserve existing values
|
|
||||||
- ✅ MUST include `category` for rides and ride_models
|
|
||||||
- ✅ MUST NOT use `ride_type` for rides
|
|
||||||
- ✅ MUST use `ride_type` for ride_models
|
|
||||||
- ✅ MUST update `updated_at` timestamp
|
|
||||||
|
|
||||||
**Returns**: `uuid` - Updated entity ID
|
|
||||||
|
|
||||||
### 3. `process_approval_transaction`
|
|
||||||
|
|
||||||
**Purpose**: Atomic transaction for selective approval
|
|
||||||
|
|
||||||
**Parameters**:
|
|
||||||
- `p_submission_id` (uuid)
|
|
||||||
- `p_item_ids` (uuid[]) - Specific items to approve
|
|
||||||
- `p_moderator_id` (uuid)
|
|
||||||
- `p_change_reason` (text)
|
|
||||||
|
|
||||||
**Critical Requirements**:
|
|
||||||
- ✅ MUST validate all item dependencies first
|
|
||||||
- ✅ MUST extract correct fields from submission tables
|
|
||||||
- ✅ MUST set session variables for triggers
|
|
||||||
- ✅ MUST handle rollback on any error
|
|
||||||
|
|
||||||
**Called By**: Edge function `process-selective-approval`
|
|
||||||
|
|
||||||
### 4. `create_submission_with_items`
|
|
||||||
|
|
||||||
**Purpose**: Creates multi-item submissions atomically
|
|
||||||
|
|
||||||
**Parameters**:
|
|
||||||
- `p_submission_id` (uuid)
|
|
||||||
- `p_entity_type` (text)
|
|
||||||
- `p_action_type` (text) - `create` or `edit`
|
|
||||||
- `p_items` (jsonb) - Array of submission items
|
|
||||||
- `p_user_id` (uuid)
|
|
||||||
|
|
||||||
**Critical Requirements**:
|
|
||||||
- ✅ MUST resolve dependencies in order
|
|
||||||
- ✅ MUST validate all required fields per entity type
|
|
||||||
- ✅ MUST link items to submission correctly
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Common Pitfalls
|
|
||||||
|
|
||||||
### 1. ❌ Using `ride_type` for rides
|
|
||||||
```sql
|
|
||||||
-- WRONG
|
|
||||||
UPDATE rides SET ride_type = 'inverted_coaster' WHERE id = $1;
|
|
||||||
-- ERROR: column "ride_type" does not exist
|
|
||||||
|
|
||||||
-- CORRECT
|
|
||||||
UPDATE rides SET category = 'roller_coaster' WHERE id = $1;
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. ❌ Missing `category` field
|
|
||||||
```sql
|
|
||||||
-- WRONG - Missing required category
|
|
||||||
INSERT INTO rides (name, slug, park_id, status) VALUES (...);
|
|
||||||
-- ERROR: null value violates not-null constraint
|
|
||||||
|
|
||||||
-- CORRECT
|
|
||||||
INSERT INTO rides (name, slug, park_id, category, status) VALUES (..., 'roller_coaster', ...);
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3. ❌ Wrong column names in version tables
|
|
||||||
```sql
|
|
||||||
-- WRONG
|
|
||||||
SELECT height_requirement FROM ride_versions WHERE ride_id = $1;
|
|
||||||
-- Returns null
|
|
||||||
|
|
||||||
-- CORRECT
|
|
||||||
SELECT height_requirement_cm FROM ride_versions WHERE ride_id = $1;
|
|
||||||
```
|
|
||||||
|
|
||||||
### 4. ❌ Forgetting COALESCE in updates
|
|
||||||
```sql
|
|
||||||
-- WRONG - Overwrites fields with NULL
|
|
||||||
UPDATE rides SET
|
|
||||||
name = (p_data->>'name'),
|
|
||||||
description = (p_data->>'description')
|
|
||||||
WHERE id = $1;
|
|
||||||
|
|
||||||
-- CORRECT - Preserves existing values if not provided
|
|
||||||
UPDATE rides SET
|
|
||||||
name = COALESCE(p_data->>'name', name),
|
|
||||||
description = COALESCE(p_data->>'description', description)
|
|
||||||
WHERE id = $1;
|
|
||||||
```
|
|
||||||
|
|
||||||
### 5. ❌ Not handling submission_id in version triggers
|
|
||||||
```sql
|
|
||||||
-- WRONG - Version doesn't link back to submission
|
|
||||||
INSERT INTO ride_versions (ride_id, ...) VALUES (...);
|
|
||||||
|
|
||||||
-- CORRECT - Trigger must read session variable
|
|
||||||
v_submission_id := current_setting('app.submission_id', true)::uuid;
|
|
||||||
INSERT INTO ride_versions (ride_id, submission_id, ...) VALUES (..., v_submission_id, ...);
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Validation Checklist
|
|
||||||
|
|
||||||
Before deploying any submission pipeline changes:
|
|
||||||
|
|
||||||
- [ ] All entity tables have matching submission tables
|
|
||||||
- [ ] All required NOT NULL fields are included in CREATE functions
|
|
||||||
- [ ] All required NOT NULL fields are included in UPDATE functions
|
|
||||||
- [ ] `category` is extracted for rides and ride_models
|
|
||||||
- [ ] `ride_type` is NOT used for rides
|
|
||||||
- [ ] `ride_type` IS used for ride_models
|
|
||||||
- [ ] COALESCE is used for all UPDATE statements
|
|
||||||
- [ ] Version table column name differences are handled
|
|
||||||
- [ ] Session variables are set for version triggers
|
|
||||||
- [ ] Foreign key relationships are validated
|
|
||||||
- [ ] Dependency resolution works correctly
|
|
||||||
- [ ] Error handling and rollback logic is present
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Maintenance
|
|
||||||
|
|
||||||
**When adding new entity types:**
|
|
||||||
|
|
||||||
1. Create main table with all fields
|
|
||||||
2. Create matching submission table + `submission_id` FK
|
|
||||||
3. Create version table with all fields + version metadata
|
|
||||||
4. Add case to `create_entity_from_submission`
|
|
||||||
5. Add case to `update_entity_from_submission`
|
|
||||||
6. Add case to `process_approval_transaction`
|
|
||||||
7. Add case to `create_submission_with_items`
|
|
||||||
8. Create version trigger for main table
|
|
||||||
9. Update this documentation
|
|
||||||
10. Run full test suite
|
|
||||||
|
|
||||||
**When modifying schemas:**
|
|
||||||
|
|
||||||
1. Check if field exists in ALL three tables (main, submission, version)
|
|
||||||
2. Update ALL three tables in migration
|
|
||||||
3. Update ALL functions that reference the field
|
|
||||||
4. Update this documentation
|
|
||||||
5. Test create, update, and rollback flows
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Related Documentation
|
|
||||||
|
|
||||||
- [Submission Pipeline Overview](./README.md)
|
|
||||||
- [Versioning System](../versioning/README.md)
|
|
||||||
- [Moderation Workflow](../moderation/README.md)
|
|
||||||
- [Migration Guide](../versioning/MIGRATION.md)
|
|
||||||
@@ -1,402 +0,0 @@
|
|||||||
# Schema Validation Setup Guide
|
|
||||||
|
|
||||||
This guide explains how to set up and use the automated schema validation tools to prevent field mismatches in the submission pipeline.
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
|
|
||||||
The validation system consists of three layers:
|
|
||||||
|
|
||||||
1. **Pre-migration Script** - Quick validation before deploying migrations
|
|
||||||
2. **Integration Tests** - Comprehensive Playwright tests for CI/CD
|
|
||||||
3. **GitHub Actions** - Automated checks on every pull request
|
|
||||||
|
|
||||||
## Quick Start
|
|
||||||
|
|
||||||
### 1. Add NPM Scripts
|
|
||||||
|
|
||||||
Add these scripts to your `package.json`:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"scripts": {
|
|
||||||
"validate-schema": "tsx scripts/validate-schema.ts",
|
|
||||||
"test:schema": "playwright test schema-validation",
|
|
||||||
"test:schema:ui": "playwright test schema-validation --ui",
|
|
||||||
"pre-migrate": "npm run validate-schema"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. Environment Variables
|
|
||||||
|
|
||||||
Create a `.env.test` file:
|
|
||||||
|
|
||||||
```env
|
|
||||||
SUPABASE_SERVICE_ROLE_KEY=your_service_role_key_here
|
|
||||||
```
|
|
||||||
|
|
||||||
**⚠️ Important**: Never commit this file! Add it to `.gitignore`:
|
|
||||||
|
|
||||||
```gitignore
|
|
||||||
.env.test
|
|
||||||
.env.local
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3. Install Dependencies
|
|
||||||
|
|
||||||
If not already installed:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
npm install --save-dev @supabase/supabase-js @playwright/test tsx
|
|
||||||
```
|
|
||||||
|
|
||||||
## Using the Validation Tools
|
|
||||||
|
|
||||||
### Pre-Migration Validation Script
|
|
||||||
|
|
||||||
**When to use**: Before applying any database migration
|
|
||||||
|
|
||||||
**Run manually:**
|
|
||||||
```bash
|
|
||||||
npm run validate-schema
|
|
||||||
```
|
|
||||||
|
|
||||||
**What it checks:**
|
|
||||||
- ✅ Submission tables match main tables
|
|
||||||
- ✅ Version tables have all required fields
|
|
||||||
- ✅ Critical fields are correct (e.g., `category` vs `ride_type`)
|
|
||||||
- ✅ Database functions exist and are accessible
|
|
||||||
|
|
||||||
**Example output:**
|
|
||||||
```
|
|
||||||
🔍 Starting schema validation...
|
|
||||||
|
|
||||||
Submission Tables:
|
|
||||||
────────────────────────────────────────────────────────────────────────────────
|
|
||||||
✅ Parks: submission table matches main table
|
|
||||||
✅ Rides: submission table matches main table
|
|
||||||
✅ Companies: submission table matches main table
|
|
||||||
✅ Ride Models: submission table matches main table
|
|
||||||
|
|
||||||
Version Tables:
|
|
||||||
────────────────────────────────────────────────────────────────────────────────
|
|
||||||
✅ Parks: version table has all fields
|
|
||||||
✅ Rides: version table has all fields
|
|
||||||
✅ Companies: version table has all fields
|
|
||||||
✅ Ride Models: version table has all fields
|
|
||||||
|
|
||||||
Critical Fields:
|
|
||||||
────────────────────────────────────────────────────────────────────────────────
|
|
||||||
✅ rides table does NOT have ride_type column
|
|
||||||
✅ rides table has category column
|
|
||||||
✅ ride_models has both category and ride_type
|
|
||||||
|
|
||||||
Functions:
|
|
||||||
────────────────────────────────────────────────────────────────────────────────
|
|
||||||
✅ create_entity_from_submission exists and is accessible
|
|
||||||
✅ update_entity_from_submission exists and is accessible
|
|
||||||
✅ process_approval_transaction exists and is accessible
|
|
||||||
|
|
||||||
════════════════════════════════════════════════════════════════════════════════
|
|
||||||
Total: 15 passed, 0 failed
|
|
||||||
════════════════════════════════════════════════════════════════════════════════
|
|
||||||
|
|
||||||
✅ All schema validations passed. Safe to deploy.
|
|
||||||
```
|
|
||||||
|
|
||||||
### Integration Tests
|
|
||||||
|
|
||||||
**When to use**: In CI/CD, before merging PRs, after major changes
|
|
||||||
|
|
||||||
**Run all tests:**
|
|
||||||
```bash
|
|
||||||
npm run test:schema
|
|
||||||
```
|
|
||||||
|
|
||||||
**Run in UI mode (for debugging):**
|
|
||||||
```bash
|
|
||||||
npm run test:schema:ui
|
|
||||||
```
|
|
||||||
|
|
||||||
**Run specific test suite:**
|
|
||||||
```bash
|
|
||||||
npx playwright test schema-validation --grep "Entity Tables"
|
|
||||||
```
|
|
||||||
|
|
||||||
**What it tests:**
|
|
||||||
- All pre-migration script checks PLUS:
|
|
||||||
- Field-by-field data type comparison
|
|
||||||
- NOT NULL constraint validation
|
|
||||||
- Foreign key existence checks
|
|
||||||
- Known field name variations (e.g., `height_requirement_cm` vs `height_requirement`)
|
|
||||||
|
|
||||||
### GitHub Actions (Automated)
|
|
||||||
|
|
||||||
**Automatically runs on:**
|
|
||||||
- Every pull request that touches:
|
|
||||||
- `supabase/migrations/**`
|
|
||||||
- `src/lib/moderation/**`
|
|
||||||
- `supabase/functions/**`
|
|
||||||
- Pushes to `main` or `develop` branches
|
|
||||||
- Manual workflow dispatch
|
|
||||||
|
|
||||||
**What it does:**
|
|
||||||
1. Runs validation script
|
|
||||||
2. Runs integration tests
|
|
||||||
3. Checks for breaking migration patterns
|
|
||||||
4. Validates migration file naming
|
|
||||||
5. Comments on PRs with helpful guidance if tests fail
|
|
||||||
|
|
||||||
## Workflow Examples
|
|
||||||
|
|
||||||
### Before Creating a Migration
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# 1. Make schema changes locally
|
|
||||||
# 2. Validate before creating migration
|
|
||||||
npm run validate-schema
|
|
||||||
|
|
||||||
# 3. If validation passes, create migration
|
|
||||||
supabase db diff -f add_new_field
|
|
||||||
|
|
||||||
# 4. Run validation again
|
|
||||||
npm run validate-schema
|
|
||||||
|
|
||||||
# 5. Commit and push
|
|
||||||
git add .
|
|
||||||
git commit -m "Add new field to rides table"
|
|
||||||
git push
|
|
||||||
```
|
|
||||||
|
|
||||||
### After Modifying Entity Schemas
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# 1. Modified rides table schema
|
|
||||||
# 2. Run full test suite
|
|
||||||
npm run test:schema
|
|
||||||
|
|
||||||
# 3. Check specific validation
|
|
||||||
npx playwright test schema-validation --grep "rides"
|
|
||||||
|
|
||||||
# 4. Fix any issues
|
|
||||||
# 5. Re-run tests
|
|
||||||
npm run test:schema
|
|
||||||
```
|
|
||||||
|
|
||||||
### During Code Review
|
|
||||||
|
|
||||||
**PR Author:**
|
|
||||||
1. Ensure all validation tests pass locally
|
|
||||||
2. Push changes
|
|
||||||
3. Wait for GitHub Actions to complete
|
|
||||||
4. Address any automated feedback
|
|
||||||
|
|
||||||
**Reviewer:**
|
|
||||||
1. Check that GitHub Actions passed
|
|
||||||
2. Review schema changes in migrations
|
|
||||||
3. Verify documentation was updated
|
|
||||||
4. Approve if all checks pass
|
|
||||||
|
|
||||||
## Common Issues and Solutions
|
|
||||||
|
|
||||||
### Issue: "Missing fields" Error
|
|
||||||
|
|
||||||
**Symptom:**
|
|
||||||
```
|
|
||||||
❌ Rides: submission table matches main table
|
|
||||||
└─ Missing fields: category
|
|
||||||
```
|
|
||||||
|
|
||||||
**Cause**: Field was added to main table but not submission table
|
|
||||||
|
|
||||||
**Solution:**
|
|
||||||
```sql
|
|
||||||
-- In your migration file
|
|
||||||
ALTER TABLE ride_submissions ADD COLUMN category TEXT NOT NULL;
|
|
||||||
```
|
|
||||||
|
|
||||||
### Issue: "Type mismatch" Error
|
|
||||||
|
|
||||||
**Symptom:**
|
|
||||||
```
|
|
||||||
❌ Rides: submission table matches main table
|
|
||||||
└─ Type mismatches: max_speed_kmh: main=numeric, submission=integer
|
|
||||||
```
|
|
||||||
|
|
||||||
**Cause**: Data types don't match between tables
|
|
||||||
|
|
||||||
**Solution:**
|
|
||||||
```sql
|
|
||||||
-- In your migration file
|
|
||||||
ALTER TABLE ride_submissions
|
|
||||||
ALTER COLUMN max_speed_kmh TYPE NUMERIC USING max_speed_kmh::numeric;
|
|
||||||
```
|
|
||||||
|
|
||||||
### Issue: "Column does not exist" in Production
|
|
||||||
|
|
||||||
**Symptom**: Approval fails with `column "category" does not exist`
|
|
||||||
|
|
||||||
**Immediate action:**
|
|
||||||
1. Run validation script to identify issue
|
|
||||||
2. Create emergency migration to add missing field
|
|
||||||
3. Deploy immediately
|
|
||||||
4. Update functions if needed
|
|
||||||
|
|
||||||
**Prevention**: Always run validation before deploying
|
|
||||||
|
|
||||||
### Issue: Tests Pass Locally but Fail in CI
|
|
||||||
|
|
||||||
**Possible causes:**
|
|
||||||
- Different database state in CI vs local
|
|
||||||
- Missing environment variables
|
|
||||||
- Outdated schema in test database
|
|
||||||
|
|
||||||
**Solution:**
|
|
||||||
```bash
|
|
||||||
# Pull latest schema
|
|
||||||
supabase db pull
|
|
||||||
|
|
||||||
# Reset local database
|
|
||||||
supabase db reset
|
|
||||||
|
|
||||||
# Re-run tests
|
|
||||||
npm run test:schema
|
|
||||||
```
|
|
||||||
|
|
||||||
## Best Practices
|
|
||||||
|
|
||||||
### ✅ Do's
|
|
||||||
|
|
||||||
- ✅ Run validation script before every migration
|
|
||||||
- ✅ Run integration tests before merging PRs
|
|
||||||
- ✅ Update all three tables when adding fields (main, submission, version)
|
|
||||||
- ✅ Document field name variations in tests
|
|
||||||
- ✅ Check GitHub Actions results before merging
|
|
||||||
- ✅ Keep SCHEMA_REFERENCE.md up to date
|
|
||||||
|
|
||||||
### ❌ Don'ts
|
|
||||||
|
|
||||||
- ❌ Don't skip validation "because it's a small change"
|
|
||||||
- ❌ Don't add fields to only main tables
|
|
||||||
- ❌ Don't ignore failing tests
|
|
||||||
- ❌ Don't bypass CI checks
|
|
||||||
- ❌ Don't commit service role keys
|
|
||||||
- ❌ Don't modify submission pipeline functions without testing
|
|
||||||
|
|
||||||
## Continuous Integration Setup
|
|
||||||
|
|
||||||
### GitHub Secrets
|
|
||||||
|
|
||||||
Add to your repository secrets:
|
|
||||||
|
|
||||||
```
|
|
||||||
SUPABASE_SERVICE_ROLE_KEY=your_service_role_key_here
|
|
||||||
```
|
|
||||||
|
|
||||||
**Steps:**
|
|
||||||
1. Go to repository Settings → Secrets and variables → Actions
|
|
||||||
2. Click "New repository secret"
|
|
||||||
3. Name: `SUPABASE_SERVICE_ROLE_KEY`
|
|
||||||
4. Value: Your service role key from Supabase dashboard
|
|
||||||
5. Save
|
|
||||||
|
|
||||||
### Branch Protection Rules
|
|
||||||
|
|
||||||
Recommended settings:
|
|
||||||
|
|
||||||
```
|
|
||||||
Branch: main
|
|
||||||
✓ Require status checks to pass before merging
|
|
||||||
✓ validate-schema (Schema Validation)
|
|
||||||
✓ migration-safety-check (Migration Safety Check)
|
|
||||||
✓ Require branches to be up to date before merging
|
|
||||||
```
|
|
||||||
|
|
||||||
## Troubleshooting
|
|
||||||
|
|
||||||
### Script Won't Run
|
|
||||||
|
|
||||||
**Error:** `tsx: command not found`
|
|
||||||
|
|
||||||
**Solution:**
|
|
||||||
```bash
|
|
||||||
npm install -g tsx
|
|
||||||
# or
|
|
||||||
npx tsx scripts/validate-schema.ts
|
|
||||||
```
|
|
||||||
|
|
||||||
### Authentication Errors
|
|
||||||
|
|
||||||
**Error:** `Invalid API key`
|
|
||||||
|
|
||||||
**Solution:**
|
|
||||||
1. Check `.env.test` has correct service role key
|
|
||||||
2. Verify key has not expired
|
|
||||||
3. Ensure environment variable is loaded:
|
|
||||||
```bash
|
|
||||||
source .env.test
|
|
||||||
npm run validate-schema
|
|
||||||
```
|
|
||||||
|
|
||||||
### Tests Timeout
|
|
||||||
|
|
||||||
**Error:** Tests timeout after 30 seconds
|
|
||||||
|
|
||||||
**Solution:**
|
|
||||||
```bash
|
|
||||||
# Increase timeout
|
|
||||||
npx playwright test schema-validation --timeout=60000
|
|
||||||
```
|
|
||||||
|
|
||||||
## Maintenance
|
|
||||||
|
|
||||||
### Adding New Entity Types
|
|
||||||
|
|
||||||
When adding a new entity type (e.g., `events`):
|
|
||||||
|
|
||||||
1. **Update validation script:**
|
|
||||||
```typescript
|
|
||||||
// In scripts/validate-schema.ts
|
|
||||||
await validateSubmissionTable('events', 'event_submissions', 'Events');
|
|
||||||
await validateVersionTable('events', 'event_versions', 'Events');
|
|
||||||
```
|
|
||||||
|
|
||||||
2. **Update integration tests:**
|
|
||||||
```typescript
|
|
||||||
// In tests/integration/schema-validation.test.ts
|
|
||||||
test('events: submission table matches main table schema', async () => {
|
|
||||||
// Add test logic
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
3. **Update documentation:**
|
|
||||||
- `docs/submission-pipeline/SCHEMA_REFERENCE.md`
|
|
||||||
- This file (`VALIDATION_SETUP.md`)
|
|
||||||
|
|
||||||
### Updating Field Mappings
|
|
||||||
|
|
||||||
When version tables use different field names:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
// In both script and tests
|
|
||||||
const fieldMapping: { [key: string]: string } = {
|
|
||||||
'new_main_field': 'version_field_name',
|
|
||||||
};
|
|
||||||
```
|
|
||||||
|
|
||||||
## Related Documentation
|
|
||||||
|
|
||||||
- [Schema Reference](./SCHEMA_REFERENCE.md) - Complete field mappings
|
|
||||||
- [Integration Tests README](../../tests/integration/README.md) - Detailed test documentation
|
|
||||||
- [Submission Pipeline](./README.md) - Pipeline overview
|
|
||||||
- [Versioning System](../versioning/README.md) - Version table details
|
|
||||||
|
|
||||||
## Support
|
|
||||||
|
|
||||||
**Questions?** Check the documentation above or review existing migration files.
|
|
||||||
|
|
||||||
**Found a bug in validation?** Open an issue with:
|
|
||||||
- Expected behavior
|
|
||||||
- Actual behavior
|
|
||||||
- Validation script output
|
|
||||||
- Database schema snippets
|
|
||||||
@@ -1,332 +0,0 @@
|
|||||||
#!/usr/bin/env tsx
|
|
||||||
/**
|
|
||||||
* Schema Validation Script
|
|
||||||
*
|
|
||||||
* Pre-migration validation script that checks schema consistency
|
|
||||||
* across the submission pipeline before deploying changes.
|
|
||||||
*
|
|
||||||
* Usage:
|
|
||||||
* npm run validate-schema
|
|
||||||
* or
|
|
||||||
* tsx scripts/validate-schema.ts
|
|
||||||
*
|
|
||||||
* Exit codes:
|
|
||||||
* 0 = All validations passed
|
|
||||||
* 1 = Validation failures detected
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { createClient } from '@supabase/supabase-js';
|
|
||||||
|
|
||||||
const SUPABASE_URL = 'https://ydvtmnrszybqnbcqbdcy.supabase.co';
|
|
||||||
const SUPABASE_KEY = process.env.SUPABASE_SERVICE_ROLE_KEY;
|
|
||||||
|
|
||||||
if (!SUPABASE_KEY) {
|
|
||||||
console.error('❌ SUPABASE_SERVICE_ROLE_KEY environment variable is required');
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
const supabase = createClient(SUPABASE_URL, SUPABASE_KEY);
|
|
||||||
|
|
||||||
interface ValidationResult {
|
|
||||||
category: string;
|
|
||||||
test: string;
|
|
||||||
passed: boolean;
|
|
||||||
message?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
const results: ValidationResult[] = [];
|
|
||||||
|
|
||||||
async function getTableColumns(tableName: string): Promise<Set<string>> {
|
|
||||||
const { data, error } = await supabase
|
|
||||||
.from('information_schema.columns' as any)
|
|
||||||
.select('column_name')
|
|
||||||
.eq('table_schema', 'public')
|
|
||||||
.eq('table_name', tableName);
|
|
||||||
|
|
||||||
if (error) throw error;
|
|
||||||
|
|
||||||
return new Set(data?.map((row: any) => row.column_name) || []);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function validateSubmissionTable(
|
|
||||||
mainTable: string,
|
|
||||||
submissionTable: string,
|
|
||||||
entityName: string
|
|
||||||
): Promise<void> {
|
|
||||||
const mainColumns = await getTableColumns(mainTable);
|
|
||||||
const submissionColumns = await getTableColumns(submissionTable);
|
|
||||||
|
|
||||||
const excludedFields = new Set([
|
|
||||||
'id', 'created_at', 'updated_at', 'is_test_data',
|
|
||||||
'view_count_all', 'view_count_30d', 'view_count_7d',
|
|
||||||
'average_rating', 'review_count', 'installations_count',
|
|
||||||
]);
|
|
||||||
|
|
||||||
const missingFields: string[] = [];
|
|
||||||
|
|
||||||
for (const field of mainColumns) {
|
|
||||||
if (excludedFields.has(field)) continue;
|
|
||||||
if (!submissionColumns.has(field)) {
|
|
||||||
missingFields.push(field);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (missingFields.length === 0) {
|
|
||||||
results.push({
|
|
||||||
category: 'Submission Tables',
|
|
||||||
test: `${entityName}: submission table matches main table`,
|
|
||||||
passed: true,
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
results.push({
|
|
||||||
category: 'Submission Tables',
|
|
||||||
test: `${entityName}: submission table matches main table`,
|
|
||||||
passed: false,
|
|
||||||
message: `Missing fields: ${missingFields.join(', ')}`,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function validateVersionTable(
|
|
||||||
mainTable: string,
|
|
||||||
versionTable: string,
|
|
||||||
entityName: string
|
|
||||||
): Promise<void> {
|
|
||||||
const mainColumns = await getTableColumns(mainTable);
|
|
||||||
const versionColumns = await getTableColumns(versionTable);
|
|
||||||
|
|
||||||
const excludedFields = new Set([
|
|
||||||
'id', 'created_at', 'updated_at', 'is_test_data',
|
|
||||||
'view_count_all', 'view_count_30d', 'view_count_7d',
|
|
||||||
'average_rating', 'review_count', 'installations_count',
|
|
||||||
]);
|
|
||||||
|
|
||||||
const fieldMapping: { [key: string]: string } = {
|
|
||||||
'height_requirement': 'height_requirement_cm',
|
|
||||||
'max_g_force': 'gforce_max',
|
|
||||||
'inversions': 'inversions_count',
|
|
||||||
'max_height_meters': 'height_meters',
|
|
||||||
'drop_height_meters': 'drop_meters',
|
|
||||||
};
|
|
||||||
|
|
||||||
const requiredVersionFields = new Set([
|
|
||||||
'version_id', 'version_number', 'change_type', 'change_reason',
|
|
||||||
'is_current', 'created_by', 'submission_id', 'is_test_data',
|
|
||||||
]);
|
|
||||||
|
|
||||||
const missingMainFields: string[] = [];
|
|
||||||
const missingVersionFields: string[] = [];
|
|
||||||
|
|
||||||
// Check main table fields exist in version table
|
|
||||||
for (const field of mainColumns) {
|
|
||||||
if (excludedFields.has(field)) continue;
|
|
||||||
|
|
||||||
const mappedField = fieldMapping[field] || field;
|
|
||||||
if (!versionColumns.has(field) && !versionColumns.has(mappedField)) {
|
|
||||||
missingMainFields.push(field);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check version metadata fields exist
|
|
||||||
for (const field of requiredVersionFields) {
|
|
||||||
if (!versionColumns.has(field)) {
|
|
||||||
missingVersionFields.push(field);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (missingMainFields.length === 0 && missingVersionFields.length === 0) {
|
|
||||||
results.push({
|
|
||||||
category: 'Version Tables',
|
|
||||||
test: `${entityName}: version table has all fields`,
|
|
||||||
passed: true,
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
const messages: string[] = [];
|
|
||||||
if (missingMainFields.length > 0) {
|
|
||||||
messages.push(`Missing main fields: ${missingMainFields.join(', ')}`);
|
|
||||||
}
|
|
||||||
if (missingVersionFields.length > 0) {
|
|
||||||
messages.push(`Missing version fields: ${missingVersionFields.join(', ')}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
results.push({
|
|
||||||
category: 'Version Tables',
|
|
||||||
test: `${entityName}: version table has all fields`,
|
|
||||||
passed: false,
|
|
||||||
message: messages.join('; '),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function validateCriticalFields(): Promise<void> {
|
|
||||||
const ridesColumns = await getTableColumns('rides');
|
|
||||||
const rideModelsColumns = await getTableColumns('ride_models');
|
|
||||||
|
|
||||||
// Rides should NOT have ride_type
|
|
||||||
if (!ridesColumns.has('ride_type')) {
|
|
||||||
results.push({
|
|
||||||
category: 'Critical Fields',
|
|
||||||
test: 'rides table does NOT have ride_type column',
|
|
||||||
passed: true,
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
results.push({
|
|
||||||
category: 'Critical Fields',
|
|
||||||
test: 'rides table does NOT have ride_type column',
|
|
||||||
passed: false,
|
|
||||||
message: 'rides table incorrectly has ride_type column',
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Rides MUST have category
|
|
||||||
if (ridesColumns.has('category')) {
|
|
||||||
results.push({
|
|
||||||
category: 'Critical Fields',
|
|
||||||
test: 'rides table has category column',
|
|
||||||
passed: true,
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
results.push({
|
|
||||||
category: 'Critical Fields',
|
|
||||||
test: 'rides table has category column',
|
|
||||||
passed: false,
|
|
||||||
message: 'rides table is missing required category column',
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ride models must have both category and ride_type
|
|
||||||
if (rideModelsColumns.has('category') && rideModelsColumns.has('ride_type')) {
|
|
||||||
results.push({
|
|
||||||
category: 'Critical Fields',
|
|
||||||
test: 'ride_models has both category and ride_type',
|
|
||||||
passed: true,
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
const missing: string[] = [];
|
|
||||||
if (!rideModelsColumns.has('category')) missing.push('category');
|
|
||||||
if (!rideModelsColumns.has('ride_type')) missing.push('ride_type');
|
|
||||||
|
|
||||||
results.push({
|
|
||||||
category: 'Critical Fields',
|
|
||||||
test: 'ride_models has both category and ride_type',
|
|
||||||
passed: false,
|
|
||||||
message: `ride_models is missing: ${missing.join(', ')}`,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function validateFunctions(): Promise<void> {
|
|
||||||
const functionsToCheck = [
|
|
||||||
'create_entity_from_submission',
|
|
||||||
'update_entity_from_submission',
|
|
||||||
'process_approval_transaction',
|
|
||||||
];
|
|
||||||
|
|
||||||
for (const funcName of functionsToCheck) {
|
|
||||||
try {
|
|
||||||
const { data, error } = await supabase
|
|
||||||
.rpc('pg_catalog.pg_function_is_visible' as any, {
|
|
||||||
funcid: `public.${funcName}`::any
|
|
||||||
} as any);
|
|
||||||
|
|
||||||
if (!error) {
|
|
||||||
results.push({
|
|
||||||
category: 'Functions',
|
|
||||||
test: `${funcName} exists and is accessible`,
|
|
||||||
passed: true,
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
results.push({
|
|
||||||
category: 'Functions',
|
|
||||||
test: `${funcName} exists and is accessible`,
|
|
||||||
passed: false,
|
|
||||||
message: error.message,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
results.push({
|
|
||||||
category: 'Functions',
|
|
||||||
test: `${funcName} exists and is accessible`,
|
|
||||||
passed: false,
|
|
||||||
message: err instanceof Error ? err.message : String(err),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function printResults(): void {
|
|
||||||
console.log('\n' + '='.repeat(80));
|
|
||||||
console.log('Schema Validation Results');
|
|
||||||
console.log('='.repeat(80) + '\n');
|
|
||||||
|
|
||||||
const categories = [...new Set(results.map(r => r.category))];
|
|
||||||
let totalPassed = 0;
|
|
||||||
let totalFailed = 0;
|
|
||||||
|
|
||||||
for (const category of categories) {
|
|
||||||
const categoryResults = results.filter(r => r.category === category);
|
|
||||||
const passed = categoryResults.filter(r => r.passed).length;
|
|
||||||
const failed = categoryResults.filter(r => !r.passed).length;
|
|
||||||
|
|
||||||
console.log(`\n${category}:`);
|
|
||||||
console.log('-'.repeat(80));
|
|
||||||
|
|
||||||
for (const result of categoryResults) {
|
|
||||||
const icon = result.passed ? '✅' : '❌';
|
|
||||||
console.log(`${icon} ${result.test}`);
|
|
||||||
if (result.message) {
|
|
||||||
console.log(` └─ ${result.message}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
totalPassed += passed;
|
|
||||||
totalFailed += failed;
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log('\n' + '='.repeat(80));
|
|
||||||
console.log(`Total: ${totalPassed} passed, ${totalFailed} failed`);
|
|
||||||
console.log('='.repeat(80) + '\n');
|
|
||||||
}
|
|
||||||
|
|
||||||
async function main(): Promise<void> {
|
|
||||||
console.log('🔍 Starting schema validation...\n');
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Validate submission tables
|
|
||||||
await validateSubmissionTable('parks', 'park_submissions', 'Parks');
|
|
||||||
await validateSubmissionTable('rides', 'ride_submissions', 'Rides');
|
|
||||||
await validateSubmissionTable('companies', 'company_submissions', 'Companies');
|
|
||||||
await validateSubmissionTable('ride_models', 'ride_model_submissions', 'Ride Models');
|
|
||||||
|
|
||||||
// Validate version tables
|
|
||||||
await validateVersionTable('parks', 'park_versions', 'Parks');
|
|
||||||
await validateVersionTable('rides', 'ride_versions', 'Rides');
|
|
||||||
await validateVersionTable('companies', 'company_versions', 'Companies');
|
|
||||||
await validateVersionTable('ride_models', 'ride_model_versions', 'Ride Models');
|
|
||||||
|
|
||||||
// Validate critical fields
|
|
||||||
await validateCriticalFields();
|
|
||||||
|
|
||||||
// Validate functions
|
|
||||||
await validateFunctions();
|
|
||||||
|
|
||||||
// Print results
|
|
||||||
printResults();
|
|
||||||
|
|
||||||
// Exit with appropriate code
|
|
||||||
const hasFailures = results.some(r => !r.passed);
|
|
||||||
if (hasFailures) {
|
|
||||||
console.error('❌ Schema validation failed. Please fix the issues above before deploying.\n');
|
|
||||||
process.exit(1);
|
|
||||||
} else {
|
|
||||||
console.log('✅ All schema validations passed. Safe to deploy.\n');
|
|
||||||
process.exit(0);
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
console.error('❌ Fatal error during validation:');
|
|
||||||
console.error(error);
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
main();
|
|
||||||
@@ -73,7 +73,6 @@ const AdminContact = lazy(() => import("./pages/admin/AdminContact"));
|
|||||||
const AdminEmailSettings = lazy(() => import("./pages/admin/AdminEmailSettings"));
|
const AdminEmailSettings = lazy(() => import("./pages/admin/AdminEmailSettings"));
|
||||||
const ErrorMonitoring = lazy(() => import("./pages/admin/ErrorMonitoring"));
|
const ErrorMonitoring = lazy(() => import("./pages/admin/ErrorMonitoring"));
|
||||||
const ErrorLookup = lazy(() => import("./pages/admin/ErrorLookup"));
|
const ErrorLookup = lazy(() => import("./pages/admin/ErrorLookup"));
|
||||||
const TraceViewer = lazy(() => import("./pages/admin/TraceViewer"));
|
|
||||||
|
|
||||||
// User routes (lazy-loaded)
|
// User routes (lazy-loaded)
|
||||||
const Profile = lazy(() => import("./pages/Profile"));
|
const Profile = lazy(() => import("./pages/Profile"));
|
||||||
@@ -388,14 +387,6 @@ function AppContent(): React.JSX.Element {
|
|||||||
</AdminErrorBoundary>
|
</AdminErrorBoundary>
|
||||||
}
|
}
|
||||||
/>
|
/>
|
||||||
<Route
|
|
||||||
path="/admin/trace-viewer"
|
|
||||||
element={
|
|
||||||
<AdminErrorBoundary section="Trace Viewer">
|
|
||||||
<TraceViewer />
|
|
||||||
</AdminErrorBoundary>
|
|
||||||
}
|
|
||||||
/>
|
|
||||||
|
|
||||||
{/* Utility routes - lazy loaded */}
|
{/* Utility routes - lazy loaded */}
|
||||||
<Route path="/force-logout" element={<ForceLogout />} />
|
<Route path="/force-logout" element={<ForceLogout />} />
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
import { Dialog, DialogContent, DialogHeader, DialogTitle } from '@/components/ui/dialog';
|
import { Dialog, DialogContent, DialogHeader, DialogTitle } from '@/components/ui/dialog';
|
||||||
import { Badge } from '@/components/ui/badge';
|
import { Badge } from '@/components/ui/badge';
|
||||||
import { Button } from '@/components/ui/button';
|
|
||||||
import { Tabs, TabsContent, TabsList, TabsTrigger } from '@/components/ui/tabs';
|
import { Tabs, TabsContent, TabsList, TabsTrigger } from '@/components/ui/tabs';
|
||||||
import { Card, CardContent } from '@/components/ui/card';
|
import { Card, CardContent } from '@/components/ui/card';
|
||||||
import { format } from 'date-fns';
|
import { format } from 'date-fns';
|
||||||
@@ -197,27 +196,6 @@ export function ApprovalFailureModal({ failure, onClose }: ApprovalFailureModalP
|
|||||||
</Card>
|
</Card>
|
||||||
</TabsContent>
|
</TabsContent>
|
||||||
</Tabs>
|
</Tabs>
|
||||||
|
|
||||||
<div className="flex justify-end gap-2 mt-4">
|
|
||||||
{failure.request_id && (
|
|
||||||
<>
|
|
||||||
<Button
|
|
||||||
variant="outline"
|
|
||||||
size="sm"
|
|
||||||
onClick={() => window.open(`/admin/error-monitoring?tab=edge-functions&requestId=${failure.request_id}`, '_blank')}
|
|
||||||
>
|
|
||||||
View Edge Logs
|
|
||||||
</Button>
|
|
||||||
<Button
|
|
||||||
variant="outline"
|
|
||||||
size="sm"
|
|
||||||
onClick={() => window.open(`/admin/error-monitoring?tab=traces&traceId=${failure.request_id}`, '_blank')}
|
|
||||||
>
|
|
||||||
View Full Trace
|
|
||||||
</Button>
|
|
||||||
</>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</DialogContent>
|
</DialogContent>
|
||||||
</Dialog>
|
</Dialog>
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -1,161 +0,0 @@
|
|||||||
import { useQuery } from '@tanstack/react-query';
|
|
||||||
import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card';
|
|
||||||
import { Badge } from '@/components/ui/badge';
|
|
||||||
import { Loader2, Clock } from 'lucide-react';
|
|
||||||
import { format } from 'date-fns';
|
|
||||||
import { supabase } from '@/lib/supabaseClient';
|
|
||||||
|
|
||||||
interface CorrelatedLogsViewProps {
|
|
||||||
requestId: string;
|
|
||||||
traceId?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface TimelineEvent {
|
|
||||||
timestamp: Date;
|
|
||||||
type: 'error' | 'edge' | 'database' | 'approval';
|
|
||||||
message: string;
|
|
||||||
severity?: string;
|
|
||||||
metadata?: Record<string, any>;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function CorrelatedLogsView({ requestId, traceId }: CorrelatedLogsViewProps) {
|
|
||||||
const { data: events, isLoading } = useQuery({
|
|
||||||
queryKey: ['correlated-logs', requestId, traceId],
|
|
||||||
queryFn: async () => {
|
|
||||||
const events: TimelineEvent[] = [];
|
|
||||||
|
|
||||||
// Fetch application error
|
|
||||||
const { data: error } = await supabase
|
|
||||||
.from('request_metadata')
|
|
||||||
.select('*')
|
|
||||||
.eq('request_id', requestId)
|
|
||||||
.single();
|
|
||||||
|
|
||||||
if (error) {
|
|
||||||
events.push({
|
|
||||||
timestamp: new Date(error.created_at),
|
|
||||||
type: 'error',
|
|
||||||
message: error.error_message || 'Unknown error',
|
|
||||||
severity: error.error_type || undefined,
|
|
||||||
metadata: {
|
|
||||||
endpoint: error.endpoint,
|
|
||||||
method: error.method,
|
|
||||||
status_code: error.status_code,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fetch approval metrics
|
|
||||||
const { data: approval } = await supabase
|
|
||||||
.from('approval_transaction_metrics')
|
|
||||||
.select('*')
|
|
||||||
.eq('request_id', requestId)
|
|
||||||
.maybeSingle();
|
|
||||||
|
|
||||||
if (approval && approval.created_at) {
|
|
||||||
events.push({
|
|
||||||
timestamp: new Date(approval.created_at),
|
|
||||||
type: 'approval',
|
|
||||||
message: approval.success ? 'Approval successful' : (approval.error_message || 'Approval failed'),
|
|
||||||
severity: approval.success ? 'success' : 'error',
|
|
||||||
metadata: {
|
|
||||||
items_count: approval.items_count,
|
|
||||||
duration_ms: approval.duration_ms || undefined,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: Fetch edge function logs (requires Management API access)
|
|
||||||
// TODO: Fetch database logs (requires analytics API access)
|
|
||||||
|
|
||||||
// Sort chronologically
|
|
||||||
events.sort((a, b) => a.timestamp.getTime() - b.timestamp.getTime());
|
|
||||||
|
|
||||||
return events;
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
const getTypeColor = (type: string): "default" | "destructive" | "outline" | "secondary" => {
|
|
||||||
switch (type) {
|
|
||||||
case 'error': return 'destructive';
|
|
||||||
case 'approval': return 'destructive';
|
|
||||||
case 'edge': return 'default';
|
|
||||||
case 'database': return 'secondary';
|
|
||||||
default: return 'outline';
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
if (isLoading) {
|
|
||||||
return (
|
|
||||||
<div className="flex items-center justify-center py-12">
|
|
||||||
<Loader2 className="w-6 h-6 animate-spin text-muted-foreground" />
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!events || events.length === 0) {
|
|
||||||
return (
|
|
||||||
<Card>
|
|
||||||
<CardContent className="pt-6">
|
|
||||||
<p className="text-center text-muted-foreground">
|
|
||||||
No correlated logs found for this request.
|
|
||||||
</p>
|
|
||||||
</CardContent>
|
|
||||||
</Card>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
|
||||||
<Card>
|
|
||||||
<CardHeader>
|
|
||||||
<CardTitle className="text-lg flex items-center gap-2">
|
|
||||||
<Clock className="w-5 h-5" />
|
|
||||||
Timeline for Request {requestId.slice(0, 8)}
|
|
||||||
</CardTitle>
|
|
||||||
</CardHeader>
|
|
||||||
<CardContent>
|
|
||||||
<div className="relative space-y-4">
|
|
||||||
{/* Timeline line */}
|
|
||||||
<div className="absolute left-6 top-0 bottom-0 w-0.5 bg-border" />
|
|
||||||
|
|
||||||
{events.map((event, index) => (
|
|
||||||
<div key={index} className="relative pl-14">
|
|
||||||
{/* Timeline dot */}
|
|
||||||
<div className="absolute left-[18px] top-2 w-4 h-4 rounded-full bg-background border-2 border-primary" />
|
|
||||||
|
|
||||||
<Card>
|
|
||||||
<CardContent className="pt-4">
|
|
||||||
<div className="space-y-2">
|
|
||||||
<div className="flex items-center gap-2">
|
|
||||||
<Badge variant={getTypeColor(event.type)}>
|
|
||||||
{event.type.toUpperCase()}
|
|
||||||
</Badge>
|
|
||||||
{event.severity && (
|
|
||||||
<Badge variant="outline" className="text-xs">
|
|
||||||
{event.severity}
|
|
||||||
</Badge>
|
|
||||||
)}
|
|
||||||
<span className="text-xs text-muted-foreground">
|
|
||||||
{format(event.timestamp, 'HH:mm:ss.SSS')}
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
<p className="text-sm">{event.message}</p>
|
|
||||||
{event.metadata && Object.keys(event.metadata).length > 0 && (
|
|
||||||
<div className="text-xs text-muted-foreground space-y-1">
|
|
||||||
{Object.entries(event.metadata).map(([key, value]) => (
|
|
||||||
<div key={key}>
|
|
||||||
<span className="font-medium">{key}:</span> {String(value)}
|
|
||||||
</div>
|
|
||||||
))}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</CardContent>
|
|
||||||
</Card>
|
|
||||||
</div>
|
|
||||||
))}
|
|
||||||
</div>
|
|
||||||
</CardContent>
|
|
||||||
</Card>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -1,172 +0,0 @@
|
|||||||
import { useState } from 'react';
|
|
||||||
import { useQuery } from '@tanstack/react-query';
|
|
||||||
import { Card, CardContent, CardHeader } from '@/components/ui/card';
|
|
||||||
import { Badge } from '@/components/ui/badge';
|
|
||||||
import { Input } from '@/components/ui/input';
|
|
||||||
import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from '@/components/ui/select';
|
|
||||||
import { Loader2, Search, ChevronDown, ChevronRight } from 'lucide-react';
|
|
||||||
import { format } from 'date-fns';
|
|
||||||
import { supabase } from '@/lib/supabaseClient';
|
|
||||||
|
|
||||||
interface DatabaseLog {
|
|
||||||
id: string;
|
|
||||||
timestamp: number;
|
|
||||||
identifier: string;
|
|
||||||
error_severity: string;
|
|
||||||
event_message: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function DatabaseLogs() {
|
|
||||||
const [searchTerm, setSearchTerm] = useState('');
|
|
||||||
const [severity, setSeverity] = useState<string>('all');
|
|
||||||
const [timeRange, setTimeRange] = useState<'1h' | '24h' | '7d'>('24h');
|
|
||||||
const [expandedLog, setExpandedLog] = useState<string | null>(null);
|
|
||||||
|
|
||||||
const { data: logs, isLoading } = useQuery({
|
|
||||||
queryKey: ['database-logs', severity, timeRange],
|
|
||||||
queryFn: async () => {
|
|
||||||
// For now, return empty array as we need proper permissions for analytics query
|
|
||||||
// In production, this would use Supabase Analytics API
|
|
||||||
// const hoursAgo = timeRange === '1h' ? 1 : timeRange === '24h' ? 24 : 168;
|
|
||||||
// const startTime = Date.now() * 1000 - (hoursAgo * 60 * 60 * 1000 * 1000);
|
|
||||||
|
|
||||||
return [] as DatabaseLog[];
|
|
||||||
},
|
|
||||||
refetchInterval: 30000,
|
|
||||||
});
|
|
||||||
|
|
||||||
const filteredLogs = logs?.filter(log => {
|
|
||||||
if (searchTerm && !log.event_message.toLowerCase().includes(searchTerm.toLowerCase())) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}) || [];
|
|
||||||
|
|
||||||
const getSeverityColor = (severity: string): "default" | "destructive" | "outline" | "secondary" => {
|
|
||||||
switch (severity.toUpperCase()) {
|
|
||||||
case 'ERROR': return 'destructive';
|
|
||||||
case 'WARNING': return 'destructive';
|
|
||||||
case 'NOTICE': return 'default';
|
|
||||||
case 'LOG': return 'secondary';
|
|
||||||
default: return 'outline';
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const isSpanLog = (message: string) => {
|
|
||||||
return message.includes('SPAN:') || message.includes('SPAN_EVENT:');
|
|
||||||
};
|
|
||||||
|
|
||||||
const toggleExpand = (logId: string) => {
|
|
||||||
setExpandedLog(expandedLog === logId ? null : logId);
|
|
||||||
};
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="space-y-4">
|
|
||||||
<div className="flex flex-col md:flex-row gap-4">
|
|
||||||
<div className="flex-1">
|
|
||||||
<div className="relative">
|
|
||||||
<Search className="absolute left-3 top-1/2 -translate-y-1/2 w-4 h-4 text-muted-foreground" />
|
|
||||||
<Input
|
|
||||||
placeholder="Search database logs..."
|
|
||||||
value={searchTerm}
|
|
||||||
onChange={(e) => setSearchTerm(e.target.value)}
|
|
||||||
className="pl-10"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<Select value={severity} onValueChange={setSeverity}>
|
|
||||||
<SelectTrigger className="w-[150px]">
|
|
||||||
<SelectValue placeholder="Severity" />
|
|
||||||
</SelectTrigger>
|
|
||||||
<SelectContent>
|
|
||||||
<SelectItem value="all">All Levels</SelectItem>
|
|
||||||
<SelectItem value="ERROR">Error</SelectItem>
|
|
||||||
<SelectItem value="WARNING">Warning</SelectItem>
|
|
||||||
<SelectItem value="NOTICE">Notice</SelectItem>
|
|
||||||
<SelectItem value="LOG">Log</SelectItem>
|
|
||||||
</SelectContent>
|
|
||||||
</Select>
|
|
||||||
<Select value={timeRange} onValueChange={(v) => setTimeRange(v as any)}>
|
|
||||||
<SelectTrigger className="w-[120px]">
|
|
||||||
<SelectValue />
|
|
||||||
</SelectTrigger>
|
|
||||||
<SelectContent>
|
|
||||||
<SelectItem value="1h">Last Hour</SelectItem>
|
|
||||||
<SelectItem value="24h">Last 24h</SelectItem>
|
|
||||||
<SelectItem value="7d">Last 7 Days</SelectItem>
|
|
||||||
</SelectContent>
|
|
||||||
</Select>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{isLoading ? (
|
|
||||||
<div className="flex items-center justify-center py-12">
|
|
||||||
<Loader2 className="w-6 h-6 animate-spin text-muted-foreground" />
|
|
||||||
</div>
|
|
||||||
) : filteredLogs.length === 0 ? (
|
|
||||||
<Card>
|
|
||||||
<CardContent className="pt-6">
|
|
||||||
<p className="text-center text-muted-foreground">
|
|
||||||
No database logs found for the selected criteria.
|
|
||||||
</p>
|
|
||||||
</CardContent>
|
|
||||||
</Card>
|
|
||||||
) : (
|
|
||||||
<div className="space-y-2">
|
|
||||||
{filteredLogs.map((log) => (
|
|
||||||
<Card key={log.id} className="overflow-hidden">
|
|
||||||
<CardHeader
|
|
||||||
className="py-3 cursor-pointer hover:bg-muted/50 transition-colors"
|
|
||||||
onClick={() => toggleExpand(log.id)}
|
|
||||||
>
|
|
||||||
<div className="flex items-center justify-between">
|
|
||||||
<div className="flex items-center gap-3">
|
|
||||||
{expandedLog === log.id ? (
|
|
||||||
<ChevronDown className="w-4 h-4 text-muted-foreground" />
|
|
||||||
) : (
|
|
||||||
<ChevronRight className="w-4 h-4 text-muted-foreground" />
|
|
||||||
)}
|
|
||||||
<Badge variant={getSeverityColor(log.error_severity)}>
|
|
||||||
{log.error_severity}
|
|
||||||
</Badge>
|
|
||||||
{isSpanLog(log.event_message) && (
|
|
||||||
<Badge variant="outline" className="text-xs">
|
|
||||||
TRACE
|
|
||||||
</Badge>
|
|
||||||
)}
|
|
||||||
<span className="text-sm text-muted-foreground">
|
|
||||||
{format(log.timestamp / 1000, 'HH:mm:ss.SSS')}
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
<span className="text-sm truncate max-w-[500px]">
|
|
||||||
{log.event_message.slice(0, 100)}
|
|
||||||
{log.event_message.length > 100 && '...'}
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
</CardHeader>
|
|
||||||
{expandedLog === log.id && (
|
|
||||||
<CardContent className="pt-0 pb-4 border-t">
|
|
||||||
<div className="space-y-2 mt-4">
|
|
||||||
<div>
|
|
||||||
<span className="text-xs text-muted-foreground">Full Message:</span>
|
|
||||||
<pre className="text-xs font-mono mt-1 whitespace-pre-wrap break-all">
|
|
||||||
{log.event_message}
|
|
||||||
</pre>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<span className="text-xs text-muted-foreground">Timestamp:</span>
|
|
||||||
<p className="text-sm">{format(log.timestamp / 1000, 'PPpp')}</p>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<span className="text-xs text-muted-foreground">Identifier:</span>
|
|
||||||
<p className="text-sm font-mono">{log.identifier}</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</CardContent>
|
|
||||||
)}
|
|
||||||
</Card>
|
|
||||||
))}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -1,168 +0,0 @@
|
|||||||
import { useState } from 'react';
|
|
||||||
import { useQuery } from '@tanstack/react-query';
|
|
||||||
import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card';
|
|
||||||
import { Badge } from '@/components/ui/badge';
|
|
||||||
import { Input } from '@/components/ui/input';
|
|
||||||
import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from '@/components/ui/select';
|
|
||||||
import { Loader2, Search, ChevronDown, ChevronRight } from 'lucide-react';
|
|
||||||
import { format } from 'date-fns';
|
|
||||||
import { supabase } from '@/lib/supabaseClient';
|
|
||||||
|
|
||||||
interface EdgeFunctionLog {
|
|
||||||
id: string;
|
|
||||||
timestamp: number;
|
|
||||||
event_type: string;
|
|
||||||
event_message: string;
|
|
||||||
function_id: string;
|
|
||||||
level: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
const FUNCTION_NAMES = [
|
|
||||||
'detect-location',
|
|
||||||
'process-selective-approval',
|
|
||||||
'process-selective-rejection',
|
|
||||||
];
|
|
||||||
|
|
||||||
export function EdgeFunctionLogs() {
|
|
||||||
const [selectedFunction, setSelectedFunction] = useState<string>('all');
|
|
||||||
const [searchTerm, setSearchTerm] = useState('');
|
|
||||||
const [timeRange, setTimeRange] = useState<'1h' | '24h' | '7d'>('24h');
|
|
||||||
const [expandedLog, setExpandedLog] = useState<string | null>(null);
|
|
||||||
|
|
||||||
const { data: logs, isLoading } = useQuery({
|
|
||||||
queryKey: ['edge-function-logs', selectedFunction, timeRange],
|
|
||||||
queryFn: async () => {
|
|
||||||
// Query Supabase edge function logs
|
|
||||||
// Note: This uses the analytics endpoint which requires specific permissions
|
|
||||||
const hoursAgo = timeRange === '1h' ? 1 : timeRange === '24h' ? 24 : 168;
|
|
||||||
const startTime = Date.now() - (hoursAgo * 60 * 60 * 1000);
|
|
||||||
|
|
||||||
// For now, return the logs from context as an example
|
|
||||||
// In production, this would call the Supabase Management API
|
|
||||||
const allLogs: EdgeFunctionLog[] = [];
|
|
||||||
|
|
||||||
return allLogs;
|
|
||||||
},
|
|
||||||
refetchInterval: 30000, // Refresh every 30 seconds
|
|
||||||
});
|
|
||||||
|
|
||||||
const filteredLogs = logs?.filter(log => {
|
|
||||||
if (searchTerm && !log.event_message.toLowerCase().includes(searchTerm.toLowerCase())) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}) || [];
|
|
||||||
|
|
||||||
const getLevelColor = (level: string): "default" | "destructive" | "secondary" => {
|
|
||||||
switch (level.toLowerCase()) {
|
|
||||||
case 'error': return 'destructive';
|
|
||||||
case 'warn': return 'destructive';
|
|
||||||
case 'info': return 'default';
|
|
||||||
default: return 'secondary';
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const toggleExpand = (logId: string) => {
|
|
||||||
setExpandedLog(expandedLog === logId ? null : logId);
|
|
||||||
};
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="space-y-4">
|
|
||||||
<div className="flex flex-col md:flex-row gap-4">
|
|
||||||
<div className="flex-1">
|
|
||||||
<div className="relative">
|
|
||||||
<Search className="absolute left-3 top-1/2 -translate-y-1/2 w-4 h-4 text-muted-foreground" />
|
|
||||||
<Input
|
|
||||||
placeholder="Search logs..."
|
|
||||||
value={searchTerm}
|
|
||||||
onChange={(e) => setSearchTerm(e.target.value)}
|
|
||||||
className="pl-10"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<Select value={selectedFunction} onValueChange={setSelectedFunction}>
|
|
||||||
<SelectTrigger className="w-[200px]">
|
|
||||||
<SelectValue placeholder="Select function" />
|
|
||||||
</SelectTrigger>
|
|
||||||
<SelectContent>
|
|
||||||
<SelectItem value="all">All Functions</SelectItem>
|
|
||||||
{FUNCTION_NAMES.map(name => (
|
|
||||||
<SelectItem key={name} value={name}>{name}</SelectItem>
|
|
||||||
))}
|
|
||||||
</SelectContent>
|
|
||||||
</Select>
|
|
||||||
<Select value={timeRange} onValueChange={(v) => setTimeRange(v as any)}>
|
|
||||||
<SelectTrigger className="w-[120px]">
|
|
||||||
<SelectValue />
|
|
||||||
</SelectTrigger>
|
|
||||||
<SelectContent>
|
|
||||||
<SelectItem value="1h">Last Hour</SelectItem>
|
|
||||||
<SelectItem value="24h">Last 24h</SelectItem>
|
|
||||||
<SelectItem value="7d">Last 7 Days</SelectItem>
|
|
||||||
</SelectContent>
|
|
||||||
</Select>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{isLoading ? (
|
|
||||||
<div className="flex items-center justify-center py-12">
|
|
||||||
<Loader2 className="w-6 h-6 animate-spin text-muted-foreground" />
|
|
||||||
</div>
|
|
||||||
) : filteredLogs.length === 0 ? (
|
|
||||||
<Card>
|
|
||||||
<CardContent className="pt-6">
|
|
||||||
<p className="text-center text-muted-foreground">
|
|
||||||
No edge function logs found. Logs will appear here when edge functions are invoked.
|
|
||||||
</p>
|
|
||||||
</CardContent>
|
|
||||||
</Card>
|
|
||||||
) : (
|
|
||||||
<div className="space-y-2">
|
|
||||||
{filteredLogs.map((log) => (
|
|
||||||
<Card key={log.id} className="overflow-hidden">
|
|
||||||
<CardHeader
|
|
||||||
className="py-3 cursor-pointer hover:bg-muted/50 transition-colors"
|
|
||||||
onClick={() => toggleExpand(log.id)}
|
|
||||||
>
|
|
||||||
<div className="flex items-center justify-between">
|
|
||||||
<div className="flex items-center gap-3">
|
|
||||||
{expandedLog === log.id ? (
|
|
||||||
<ChevronDown className="w-4 h-4 text-muted-foreground" />
|
|
||||||
) : (
|
|
||||||
<ChevronRight className="w-4 h-4 text-muted-foreground" />
|
|
||||||
)}
|
|
||||||
<Badge variant={getLevelColor(log.level)}>
|
|
||||||
{log.level}
|
|
||||||
</Badge>
|
|
||||||
<span className="text-sm text-muted-foreground">
|
|
||||||
{format(log.timestamp, 'HH:mm:ss.SSS')}
|
|
||||||
</span>
|
|
||||||
<Badge variant="outline" className="text-xs">
|
|
||||||
{log.event_type}
|
|
||||||
</Badge>
|
|
||||||
</div>
|
|
||||||
<span className="text-sm truncate max-w-[400px]">
|
|
||||||
{log.event_message}
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
</CardHeader>
|
|
||||||
{expandedLog === log.id && (
|
|
||||||
<CardContent className="pt-0 pb-4 border-t">
|
|
||||||
<div className="space-y-2 mt-4">
|
|
||||||
<div>
|
|
||||||
<span className="text-xs text-muted-foreground">Full Message:</span>
|
|
||||||
<p className="text-sm font-mono mt-1">{log.event_message}</p>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<span className="text-xs text-muted-foreground">Timestamp:</span>
|
|
||||||
<p className="text-sm">{format(log.timestamp, 'PPpp')}</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</CardContent>
|
|
||||||
)}
|
|
||||||
</Card>
|
|
||||||
))}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -222,30 +222,12 @@ ${error.error_stack ? `Stack Trace:\n${error.error_stack}` : ''}
|
|||||||
</TabsContent>
|
</TabsContent>
|
||||||
</Tabs>
|
</Tabs>
|
||||||
|
|
||||||
<div className="flex justify-between items-center">
|
<div className="flex justify-end gap-2">
|
||||||
<div className="flex gap-2">
|
<Button variant="outline" onClick={copyErrorReport}>
|
||||||
<Button
|
<Copy className="w-4 h-4 mr-2" />
|
||||||
variant="outline"
|
Copy Report
|
||||||
size="sm"
|
</Button>
|
||||||
onClick={() => window.open(`/admin/error-monitoring?tab=edge-functions&requestId=${error.request_id}`, '_blank')}
|
<Button onClick={onClose}>Close</Button>
|
||||||
>
|
|
||||||
View Edge Logs
|
|
||||||
</Button>
|
|
||||||
<Button
|
|
||||||
variant="outline"
|
|
||||||
size="sm"
|
|
||||||
onClick={() => window.open(`/admin/error-monitoring?tab=database&requestId=${error.request_id}`, '_blank')}
|
|
||||||
>
|
|
||||||
View DB Logs
|
|
||||||
</Button>
|
|
||||||
</div>
|
|
||||||
<div className="flex gap-2">
|
|
||||||
<Button variant="outline" onClick={copyErrorReport}>
|
|
||||||
<Copy className="w-4 h-4 mr-2" />
|
|
||||||
Copy Report
|
|
||||||
</Button>
|
|
||||||
<Button onClick={onClose}>Close</Button>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
</DialogContent>
|
</DialogContent>
|
||||||
</Dialog>
|
</Dialog>
|
||||||
|
|||||||
@@ -14,11 +14,10 @@ import { ScrollArea } from '@/components/ui/scroll-area';
|
|||||||
import { Badge } from '@/components/ui/badge';
|
import { Badge } from '@/components/ui/badge';
|
||||||
import { Collapsible, CollapsibleContent, CollapsibleTrigger } from '@/components/ui/collapsible';
|
import { Collapsible, CollapsibleContent, CollapsibleTrigger } from '@/components/ui/collapsible';
|
||||||
import { useSuperuserGuard } from '@/hooks/useSuperuserGuard';
|
import { useSuperuserGuard } from '@/hooks/useSuperuserGuard';
|
||||||
import { IntegrationTestRunner as TestRunner, allTestSuites, type TestResult, formatResultsAsMarkdown, formatSingleTestAsMarkdown } from '@/lib/integrationTests';
|
import { IntegrationTestRunner as TestRunner, allTestSuites, type TestResult } from '@/lib/integrationTests';
|
||||||
import { Play, Square, Download, ChevronDown, CheckCircle2, XCircle, Clock, SkipForward, Copy, ClipboardX } from 'lucide-react';
|
import { Play, Square, Download, ChevronDown, CheckCircle2, XCircle, Clock, SkipForward } from 'lucide-react';
|
||||||
import { toast } from 'sonner';
|
import { toast } from 'sonner';
|
||||||
import { handleError } from '@/lib/errorHandler';
|
import { handleError } from '@/lib/errorHandler';
|
||||||
import { CleanupReport } from '@/components/ui/cleanup-report';
|
|
||||||
|
|
||||||
export function IntegrationTestRunner() {
|
export function IntegrationTestRunner() {
|
||||||
const superuserGuard = useSuperuserGuard();
|
const superuserGuard = useSuperuserGuard();
|
||||||
@@ -106,38 +105,6 @@ export function IntegrationTestRunner() {
|
|||||||
toast.success('Test results exported');
|
toast.success('Test results exported');
|
||||||
}, [runner]);
|
}, [runner]);
|
||||||
|
|
||||||
const copyAllResults = useCallback(async () => {
|
|
||||||
const summary = runner.getSummary();
|
|
||||||
const results = runner.getResults();
|
|
||||||
|
|
||||||
const markdown = formatResultsAsMarkdown(results, summary);
|
|
||||||
|
|
||||||
await navigator.clipboard.writeText(markdown);
|
|
||||||
toast.success('All test results copied to clipboard');
|
|
||||||
}, [runner]);
|
|
||||||
|
|
||||||
const copyFailedTests = useCallback(async () => {
|
|
||||||
const summary = runner.getSummary();
|
|
||||||
const failedResults = runner.getResults().filter(r => r.status === 'fail');
|
|
||||||
|
|
||||||
if (failedResults.length === 0) {
|
|
||||||
toast.info('No failed tests to copy');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const markdown = formatResultsAsMarkdown(failedResults, summary, true);
|
|
||||||
|
|
||||||
await navigator.clipboard.writeText(markdown);
|
|
||||||
toast.success(`${failedResults.length} failed test(s) copied to clipboard`);
|
|
||||||
}, [runner]);
|
|
||||||
|
|
||||||
const copyTestResult = useCallback(async (result: TestResult) => {
|
|
||||||
const markdown = formatSingleTestAsMarkdown(result);
|
|
||||||
|
|
||||||
await navigator.clipboard.writeText(markdown);
|
|
||||||
toast.success('Test result copied to clipboard');
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
// Guard is handled by the route/page, no loading state needed here
|
// Guard is handled by the route/page, no loading state needed here
|
||||||
|
|
||||||
const summary = runner.getSummary();
|
const summary = runner.getSummary();
|
||||||
@@ -199,22 +166,10 @@ export function IntegrationTestRunner() {
|
|||||||
</Button>
|
</Button>
|
||||||
)}
|
)}
|
||||||
{results.length > 0 && !isRunning && (
|
{results.length > 0 && !isRunning && (
|
||||||
<>
|
<Button onClick={exportResults} variant="outline">
|
||||||
<Button onClick={exportResults} variant="outline">
|
<Download className="w-4 h-4 mr-2" />
|
||||||
<Download className="w-4 h-4 mr-2" />
|
Export Results
|
||||||
Export JSON
|
</Button>
|
||||||
</Button>
|
|
||||||
<Button onClick={copyAllResults} variant="outline">
|
|
||||||
<Copy className="w-4 h-4 mr-2" />
|
|
||||||
Copy All
|
|
||||||
</Button>
|
|
||||||
{summary.failed > 0 && (
|
|
||||||
<Button onClick={copyFailedTests} variant="outline">
|
|
||||||
<ClipboardX className="w-4 h-4 mr-2" />
|
|
||||||
Copy Failed ({summary.failed})
|
|
||||||
</Button>
|
|
||||||
)}
|
|
||||||
</>
|
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
@@ -253,11 +208,6 @@ export function IntegrationTestRunner() {
|
|||||||
</CardContent>
|
</CardContent>
|
||||||
</Card>
|
</Card>
|
||||||
|
|
||||||
{/* Cleanup Report */}
|
|
||||||
{!isRunning && summary.cleanup && (
|
|
||||||
<CleanupReport summary={summary.cleanup} />
|
|
||||||
)}
|
|
||||||
|
|
||||||
{/* Results */}
|
{/* Results */}
|
||||||
{results.length > 0 && (
|
{results.length > 0 && (
|
||||||
<Card>
|
<Card>
|
||||||
@@ -270,13 +220,11 @@ export function IntegrationTestRunner() {
|
|||||||
{results.map(result => (
|
{results.map(result => (
|
||||||
<Collapsible key={result.id}>
|
<Collapsible key={result.id}>
|
||||||
<div className="flex items-start gap-3 p-3 rounded-lg border bg-card">
|
<div className="flex items-start gap-3 p-3 rounded-lg border bg-card">
|
||||||
<div className="pt-0.5">
|
<div className="pt-0.5">
|
||||||
{result.status === 'pass' && <CheckCircle2 className="w-4 h-4 text-green-500" />}
|
{result.status === 'pass' && <CheckCircle2 className="w-4 h-4 text-green-500" />}
|
||||||
{result.status === 'fail' && <XCircle className="w-4 h-4 text-destructive" />}
|
{result.status === 'fail' && <XCircle className="w-4 h-4 text-destructive" />}
|
||||||
{result.status === 'skip' && !result.name.includes('⏳') && <SkipForward className="w-4 h-4 text-muted-foreground" />}
|
{result.status === 'skip' && <SkipForward className="w-4 h-4 text-muted-foreground" />}
|
||||||
{result.status === 'skip' && result.name.includes('⏳') && <Clock className="w-4 h-4 text-muted-foreground" />}
|
{result.status === 'running' && <Clock className="w-4 h-4 text-blue-500 animate-pulse" />}
|
||||||
{result.status === 'running' && !result.name.includes('⏳') && <Clock className="w-4 h-4 text-blue-500 animate-pulse" />}
|
|
||||||
{result.status === 'running' && result.name.includes('⏳') && <Clock className="w-4 h-4 text-amber-500 animate-pulse" />}
|
|
||||||
</div>
|
</div>
|
||||||
<div className="flex-1 space-y-1">
|
<div className="flex-1 space-y-1">
|
||||||
<div className="flex items-start justify-between gap-2">
|
<div className="flex items-start justify-between gap-2">
|
||||||
@@ -288,14 +236,6 @@ export function IntegrationTestRunner() {
|
|||||||
<Badge variant="outline" className="text-xs">
|
<Badge variant="outline" className="text-xs">
|
||||||
{result.duration}ms
|
{result.duration}ms
|
||||||
</Badge>
|
</Badge>
|
||||||
<Button
|
|
||||||
variant="ghost"
|
|
||||||
size="sm"
|
|
||||||
className="h-6 w-6 p-0"
|
|
||||||
onClick={() => copyTestResult(result)}
|
|
||||||
>
|
|
||||||
<Copy className="h-3 w-3" />
|
|
||||||
</Button>
|
|
||||||
{(result.error || result.details) && (
|
{(result.error || result.details) && (
|
||||||
<CollapsibleTrigger asChild>
|
<CollapsibleTrigger asChild>
|
||||||
<Button variant="ghost" size="sm" className="h-6 w-6 p-0">
|
<Button variant="ghost" size="sm" className="h-6 w-6 p-0">
|
||||||
|
|||||||
@@ -1,203 +0,0 @@
|
|||||||
import { useState } from 'react';
|
|
||||||
import { useQuery } from '@tanstack/react-query';
|
|
||||||
import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card';
|
|
||||||
import { Input } from '@/components/ui/input';
|
|
||||||
import { Button } from '@/components/ui/button';
|
|
||||||
import { Badge } from '@/components/ui/badge';
|
|
||||||
import { Search, Loader2, ExternalLink } from 'lucide-react';
|
|
||||||
import { format } from 'date-fns';
|
|
||||||
import { supabase } from '@/lib/supabaseClient';
|
|
||||||
|
|
||||||
interface SearchResult {
|
|
||||||
type: 'error' | 'approval' | 'edge' | 'database';
|
|
||||||
id: string;
|
|
||||||
timestamp: string;
|
|
||||||
message: string;
|
|
||||||
severity?: string;
|
|
||||||
metadata?: Record<string, any>;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface UnifiedLogSearchProps {
|
|
||||||
onNavigate: (tab: string, filters: Record<string, string>) => void;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function UnifiedLogSearch({ onNavigate }: UnifiedLogSearchProps) {
|
|
||||||
const [searchQuery, setSearchQuery] = useState('');
|
|
||||||
const [searchTerm, setSearchTerm] = useState('');
|
|
||||||
|
|
||||||
const { data: results, isLoading } = useQuery({
|
|
||||||
queryKey: ['unified-log-search', searchTerm],
|
|
||||||
queryFn: async () => {
|
|
||||||
if (!searchTerm) return [];
|
|
||||||
|
|
||||||
const results: SearchResult[] = [];
|
|
||||||
|
|
||||||
// Search application errors
|
|
||||||
const { data: errors } = await supabase
|
|
||||||
.from('request_metadata')
|
|
||||||
.select('request_id, created_at, error_type, error_message')
|
|
||||||
.or(`request_id.ilike.%${searchTerm}%,error_message.ilike.%${searchTerm}%`)
|
|
||||||
.order('created_at', { ascending: false })
|
|
||||||
.limit(10);
|
|
||||||
|
|
||||||
if (errors) {
|
|
||||||
results.push(...errors.map(e => ({
|
|
||||||
type: 'error' as const,
|
|
||||||
id: e.request_id,
|
|
||||||
timestamp: e.created_at,
|
|
||||||
message: e.error_message || 'Unknown error',
|
|
||||||
severity: e.error_type || undefined,
|
|
||||||
})));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Search approval failures
|
|
||||||
const { data: approvals } = await supabase
|
|
||||||
.from('approval_transaction_metrics')
|
|
||||||
.select('id, created_at, error_message, request_id')
|
|
||||||
.eq('success', false)
|
|
||||||
.or(`request_id.ilike.%${searchTerm}%,error_message.ilike.%${searchTerm}%`)
|
|
||||||
.order('created_at', { ascending: false })
|
|
||||||
.limit(10);
|
|
||||||
|
|
||||||
if (approvals) {
|
|
||||||
results.push(...approvals
|
|
||||||
.filter(a => a.created_at)
|
|
||||||
.map(a => ({
|
|
||||||
type: 'approval' as const,
|
|
||||||
id: a.id,
|
|
||||||
timestamp: a.created_at!,
|
|
||||||
message: a.error_message || 'Approval failed',
|
|
||||||
metadata: { request_id: a.request_id },
|
|
||||||
})));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sort by timestamp
|
|
||||||
results.sort((a, b) => new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime());
|
|
||||||
|
|
||||||
return results;
|
|
||||||
},
|
|
||||||
enabled: !!searchTerm,
|
|
||||||
});
|
|
||||||
|
|
||||||
const handleSearch = () => {
|
|
||||||
setSearchTerm(searchQuery);
|
|
||||||
};
|
|
||||||
|
|
||||||
const getTypeColor = (type: string): "default" | "destructive" | "outline" | "secondary" => {
|
|
||||||
switch (type) {
|
|
||||||
case 'error': return 'destructive';
|
|
||||||
case 'approval': return 'destructive';
|
|
||||||
case 'edge': return 'default';
|
|
||||||
case 'database': return 'secondary';
|
|
||||||
default: return 'outline';
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const getTypeLabel = (type: string) => {
|
|
||||||
switch (type) {
|
|
||||||
case 'error': return 'Application Error';
|
|
||||||
case 'approval': return 'Approval Failure';
|
|
||||||
case 'edge': return 'Edge Function';
|
|
||||||
case 'database': return 'Database Log';
|
|
||||||
default: return type;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleResultClick = (result: SearchResult) => {
|
|
||||||
switch (result.type) {
|
|
||||||
case 'error':
|
|
||||||
onNavigate('errors', { requestId: result.id });
|
|
||||||
break;
|
|
||||||
case 'approval':
|
|
||||||
onNavigate('approvals', { failureId: result.id });
|
|
||||||
break;
|
|
||||||
case 'edge':
|
|
||||||
onNavigate('edge-functions', { search: result.message });
|
|
||||||
break;
|
|
||||||
case 'database':
|
|
||||||
onNavigate('database', { search: result.message });
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
return (
|
|
||||||
<Card>
|
|
||||||
<CardHeader>
|
|
||||||
<CardTitle className="text-lg">Unified Log Search</CardTitle>
|
|
||||||
</CardHeader>
|
|
||||||
<CardContent className="space-y-4">
|
|
||||||
<div className="flex gap-2">
|
|
||||||
<div className="relative flex-1">
|
|
||||||
<Search className="absolute left-3 top-1/2 -translate-y-1/2 w-4 h-4 text-muted-foreground" />
|
|
||||||
<Input
|
|
||||||
placeholder="Search across all logs (request ID, error message, trace ID...)"
|
|
||||||
value={searchQuery}
|
|
||||||
onChange={(e) => setSearchQuery(e.target.value)}
|
|
||||||
onKeyDown={(e) => e.key === 'Enter' && handleSearch()}
|
|
||||||
className="pl-10"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
<Button onClick={handleSearch} disabled={!searchQuery || isLoading}>
|
|
||||||
{isLoading ? (
|
|
||||||
<Loader2 className="w-4 h-4 animate-spin" />
|
|
||||||
) : (
|
|
||||||
<Search className="w-4 h-4" />
|
|
||||||
)}
|
|
||||||
</Button>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{searchTerm && (
|
|
||||||
<div className="space-y-2">
|
|
||||||
{isLoading ? (
|
|
||||||
<div className="flex items-center justify-center py-8">
|
|
||||||
<Loader2 className="w-6 h-6 animate-spin text-muted-foreground" />
|
|
||||||
</div>
|
|
||||||
) : results && results.length > 0 ? (
|
|
||||||
<>
|
|
||||||
<div className="text-sm text-muted-foreground">
|
|
||||||
Found {results.length} results
|
|
||||||
</div>
|
|
||||||
{results.map((result) => (
|
|
||||||
<Card
|
|
||||||
key={`${result.type}-${result.id}`}
|
|
||||||
className="cursor-pointer hover:bg-muted/50 transition-colors"
|
|
||||||
onClick={() => handleResultClick(result)}
|
|
||||||
>
|
|
||||||
<CardContent className="pt-4 pb-3">
|
|
||||||
<div className="flex items-start justify-between gap-4">
|
|
||||||
<div className="flex-1 space-y-1">
|
|
||||||
<div className="flex items-center gap-2">
|
|
||||||
<Badge variant={getTypeColor(result.type)}>
|
|
||||||
{getTypeLabel(result.type)}
|
|
||||||
</Badge>
|
|
||||||
{result.severity && (
|
|
||||||
<Badge variant="outline" className="text-xs">
|
|
||||||
{result.severity}
|
|
||||||
</Badge>
|
|
||||||
)}
|
|
||||||
<span className="text-xs text-muted-foreground">
|
|
||||||
{format(new Date(result.timestamp), 'PPp')}
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
<p className="text-sm line-clamp-2">{result.message}</p>
|
|
||||||
<code className="text-xs text-muted-foreground">
|
|
||||||
{result.id.slice(0, 16)}...
|
|
||||||
</code>
|
|
||||||
</div>
|
|
||||||
<ExternalLink className="w-4 h-4 text-muted-foreground flex-shrink-0" />
|
|
||||||
</div>
|
|
||||||
</CardContent>
|
|
||||||
</Card>
|
|
||||||
))}
|
|
||||||
</>
|
|
||||||
) : (
|
|
||||||
<p className="text-center text-muted-foreground py-8">
|
|
||||||
No results found for "{searchTerm}"
|
|
||||||
</p>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</CardContent>
|
|
||||||
</Card>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -49,7 +49,7 @@ export function AdminSidebar() {
|
|||||||
icon: ScrollText,
|
icon: ScrollText,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
title: 'Monitoring & Logs',
|
title: 'Error Monitoring',
|
||||||
url: '/admin/error-monitoring',
|
url: '/admin/error-monitoring',
|
||||||
icon: AlertTriangle,
|
icon: AlertTriangle,
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -1,221 +0,0 @@
|
|||||||
/**
|
|
||||||
* Cleanup Verification Report Component
|
|
||||||
*
|
|
||||||
* Displays detailed results of test data cleanup after integration tests complete.
|
|
||||||
* Shows tables cleaned, records deleted, errors, and verification status.
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { CheckCircle2, XCircle, AlertCircle, Database, Trash2, Clock } from 'lucide-react';
|
|
||||||
import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card';
|
|
||||||
import { Badge } from '@/components/ui/badge';
|
|
||||||
import { Progress } from '@/components/ui/progress';
|
|
||||||
import type { CleanupSummary } from '@/lib/integrationTests/testCleanup';
|
|
||||||
|
|
||||||
interface CleanupReportProps {
|
|
||||||
summary: CleanupSummary;
|
|
||||||
className?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function CleanupReport({ summary, className = '' }: CleanupReportProps) {
|
|
||||||
const successCount = summary.results.filter(r => !r.error).length;
|
|
||||||
const errorCount = summary.results.filter(r => r.error).length;
|
|
||||||
const successRate = summary.results.length > 0
|
|
||||||
? (successCount / summary.results.length) * 100
|
|
||||||
: 0;
|
|
||||||
|
|
||||||
return (
|
|
||||||
<Card className={`border-border ${className}`}>
|
|
||||||
<CardHeader>
|
|
||||||
<CardTitle className="flex items-center gap-2">
|
|
||||||
<Trash2 className="h-5 w-5 text-muted-foreground" />
|
|
||||||
Test Data Cleanup Report
|
|
||||||
</CardTitle>
|
|
||||||
</CardHeader>
|
|
||||||
|
|
||||||
<CardContent className="space-y-4">
|
|
||||||
{/* Summary Stats */}
|
|
||||||
<div className="grid grid-cols-2 md:grid-cols-4 gap-4">
|
|
||||||
<div className="space-y-1">
|
|
||||||
<p className="text-sm text-muted-foreground">Total Deleted</p>
|
|
||||||
<p className="text-2xl font-bold text-foreground">
|
|
||||||
{summary.totalDeleted.toLocaleString()}
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="space-y-1">
|
|
||||||
<p className="text-sm text-muted-foreground">Tables Cleaned</p>
|
|
||||||
<p className="text-2xl font-bold text-foreground">
|
|
||||||
{successCount}/{summary.results.length}
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="space-y-1">
|
|
||||||
<p className="text-sm text-muted-foreground">Duration</p>
|
|
||||||
<p className="text-2xl font-bold text-foreground flex items-center gap-1">
|
|
||||||
<Clock className="h-4 w-4" />
|
|
||||||
{(summary.totalDuration / 1000).toFixed(1)}s
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="space-y-1">
|
|
||||||
<p className="text-sm text-muted-foreground">Status</p>
|
|
||||||
<Badge
|
|
||||||
variant={summary.success ? "default" : "destructive"}
|
|
||||||
className="text-base font-semibold"
|
|
||||||
>
|
|
||||||
{summary.success ? (
|
|
||||||
<span className="flex items-center gap-1">
|
|
||||||
<CheckCircle2 className="h-4 w-4" />
|
|
||||||
Complete
|
|
||||||
</span>
|
|
||||||
) : (
|
|
||||||
<span className="flex items-center gap-1">
|
|
||||||
<XCircle className="h-4 w-4" />
|
|
||||||
Failed
|
|
||||||
</span>
|
|
||||||
)}
|
|
||||||
</Badge>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Success Rate Progress */}
|
|
||||||
<div className="space-y-2">
|
|
||||||
<div className="flex justify-between text-sm">
|
|
||||||
<span className="text-muted-foreground">Success Rate</span>
|
|
||||||
<span className="font-medium text-foreground">{successRate.toFixed(1)}%</span>
|
|
||||||
</div>
|
|
||||||
<Progress value={successRate} className="h-2" />
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Table-by-Table Results */}
|
|
||||||
<div className="space-y-2">
|
|
||||||
<h3 className="text-sm font-semibold text-foreground flex items-center gap-2">
|
|
||||||
<Database className="h-4 w-4" />
|
|
||||||
Cleanup Details
|
|
||||||
</h3>
|
|
||||||
|
|
||||||
<div className="space-y-1 max-h-64 overflow-y-auto border border-border rounded-md">
|
|
||||||
{summary.results.map((result, index) => (
|
|
||||||
<div
|
|
||||||
key={`${result.table}-${index}`}
|
|
||||||
className="flex items-center justify-between p-3 hover:bg-accent/50 transition-colors border-b border-border last:border-b-0"
|
|
||||||
>
|
|
||||||
<div className="flex items-center gap-3 flex-1 min-w-0">
|
|
||||||
{result.error ? (
|
|
||||||
<XCircle className="h-4 w-4 text-destructive flex-shrink-0" />
|
|
||||||
) : result.deleted > 0 ? (
|
|
||||||
<CheckCircle2 className="h-4 w-4 text-green-600 dark:text-green-400 flex-shrink-0" />
|
|
||||||
) : (
|
|
||||||
<AlertCircle className="h-4 w-4 text-muted-foreground flex-shrink-0" />
|
|
||||||
)}
|
|
||||||
|
|
||||||
<div className="flex-1 min-w-0">
|
|
||||||
<p className="font-mono text-sm text-foreground truncate">
|
|
||||||
{result.table}
|
|
||||||
</p>
|
|
||||||
{result.error && (
|
|
||||||
<p className="text-xs text-destructive truncate">
|
|
||||||
{result.error}
|
|
||||||
</p>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="flex items-center gap-3 flex-shrink-0">
|
|
||||||
<Badge
|
|
||||||
variant={result.deleted > 0 ? "default" : "secondary"}
|
|
||||||
className="font-mono"
|
|
||||||
>
|
|
||||||
{result.deleted} deleted
|
|
||||||
</Badge>
|
|
||||||
<span className="text-xs text-muted-foreground font-mono w-16 text-right">
|
|
||||||
{result.duration}ms
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
))}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Error Summary (if any) */}
|
|
||||||
{errorCount > 0 && (
|
|
||||||
<div className="p-3 bg-destructive/10 border border-destructive/20 rounded-md">
|
|
||||||
<div className="flex items-start gap-2">
|
|
||||||
<AlertCircle className="h-5 w-5 text-destructive flex-shrink-0 mt-0.5" />
|
|
||||||
<div>
|
|
||||||
<p className="text-sm font-semibold text-destructive">
|
|
||||||
{errorCount} {errorCount === 1 ? 'table' : 'tables'} failed to clean
|
|
||||||
</p>
|
|
||||||
<p className="text-xs text-destructive/80 mt-1">
|
|
||||||
Check error messages above for details. Test data may remain in database.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{/* Success Message */}
|
|
||||||
{summary.success && summary.totalDeleted > 0 && (
|
|
||||||
<div className="p-3 bg-green-500/10 border border-green-500/20 rounded-md">
|
|
||||||
<div className="flex items-start gap-2">
|
|
||||||
<CheckCircle2 className="h-5 w-5 text-green-600 dark:text-green-400 flex-shrink-0 mt-0.5" />
|
|
||||||
<div>
|
|
||||||
<p className="text-sm font-semibold text-green-700 dark:text-green-300">
|
|
||||||
Cleanup completed successfully
|
|
||||||
</p>
|
|
||||||
<p className="text-xs text-green-600 dark:text-green-400 mt-1">
|
|
||||||
All test data has been removed from the database.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{/* No Data Message */}
|
|
||||||
{summary.success && summary.totalDeleted === 0 && (
|
|
||||||
<div className="p-3 bg-muted border border-border rounded-md">
|
|
||||||
<div className="flex items-start gap-2">
|
|
||||||
<AlertCircle className="h-5 w-5 text-muted-foreground flex-shrink-0 mt-0.5" />
|
|
||||||
<div>
|
|
||||||
<p className="text-sm font-semibold text-muted-foreground">
|
|
||||||
No test data found
|
|
||||||
</p>
|
|
||||||
<p className="text-xs text-muted-foreground mt-1">
|
|
||||||
Database is already clean or no test data was created during this run.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</CardContent>
|
|
||||||
</Card>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Compact version for inline display in test results
|
|
||||||
*/
|
|
||||||
export function CleanupReportCompact({ summary }: CleanupReportProps) {
|
|
||||||
return (
|
|
||||||
<div className="flex items-center gap-3 p-3 bg-accent/50 rounded-md border border-border">
|
|
||||||
<Trash2 className="h-5 w-5 text-muted-foreground flex-shrink-0" />
|
|
||||||
|
|
||||||
<div className="flex-1 min-w-0">
|
|
||||||
<p className="text-sm font-medium text-foreground">
|
|
||||||
Cleanup: {summary.totalDeleted} records deleted
|
|
||||||
</p>
|
|
||||||
<p className="text-xs text-muted-foreground">
|
|
||||||
{summary.results.filter(r => !r.error).length}/{summary.results.length} tables cleaned
|
|
||||||
{' • '}
|
|
||||||
{(summary.totalDuration / 1000).toFixed(1)}s
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{summary.success ? (
|
|
||||||
<CheckCircle2 className="h-5 w-5 text-green-600 dark:text-green-400 flex-shrink-0" />
|
|
||||||
) : (
|
|
||||||
<XCircle className="h-5 w-5 text-destructive flex-shrink-0" />
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -12,8 +12,6 @@ interface RetryStatus {
|
|||||||
type: string;
|
type: string;
|
||||||
state: 'retrying' | 'success' | 'failed';
|
state: 'retrying' | 'success' | 'failed';
|
||||||
errorId?: string;
|
errorId?: string;
|
||||||
isRateLimit?: boolean;
|
|
||||||
retryAfter?: number;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -26,22 +24,12 @@ export function RetryStatusIndicator() {
|
|||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const handleRetry = (event: Event) => {
|
const handleRetry = (event: Event) => {
|
||||||
const customEvent = event as CustomEvent<Omit<RetryStatus, 'state' | 'countdown'>>;
|
const customEvent = event as CustomEvent<Omit<RetryStatus, 'state'>>;
|
||||||
const { id, attempt, maxAttempts, delay, type, isRateLimit, retryAfter } = customEvent.detail;
|
const { id, attempt, maxAttempts, delay, type } = customEvent.detail;
|
||||||
|
|
||||||
setRetries(prev => {
|
setRetries(prev => {
|
||||||
const next = new Map(prev);
|
const next = new Map(prev);
|
||||||
next.set(id, {
|
next.set(id, { id, attempt, maxAttempts, delay, type, state: 'retrying', countdown: delay });
|
||||||
id,
|
|
||||||
attempt,
|
|
||||||
maxAttempts,
|
|
||||||
delay,
|
|
||||||
type,
|
|
||||||
state: 'retrying',
|
|
||||||
countdown: delay,
|
|
||||||
isRateLimit,
|
|
||||||
retryAfter
|
|
||||||
});
|
|
||||||
return next;
|
return next;
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
@@ -173,17 +161,6 @@ function RetryCard({ retry }: { retry: RetryStatus & { countdown: number } }) {
|
|||||||
// Retrying state
|
// Retrying state
|
||||||
const progress = retry.delay > 0 ? ((retry.delay - retry.countdown) / retry.delay) * 100 : 0;
|
const progress = retry.delay > 0 ? ((retry.delay - retry.countdown) / retry.delay) * 100 : 0;
|
||||||
|
|
||||||
// Customize message based on rate limit status
|
|
||||||
const getMessage = () => {
|
|
||||||
if (retry.isRateLimit) {
|
|
||||||
if (retry.retryAfter) {
|
|
||||||
return `Rate limit reached. Waiting ${Math.ceil(retry.countdown / 1000)}s as requested by server...`;
|
|
||||||
}
|
|
||||||
return `Rate limit reached. Using smart backoff - retrying in ${Math.ceil(retry.countdown / 1000)}s...`;
|
|
||||||
}
|
|
||||||
return `Network issue detected. Retrying ${retry.type} submission in ${Math.ceil(retry.countdown / 1000)}s`;
|
|
||||||
};
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Card className="p-4 shadow-lg border-amber-500 bg-amber-50 dark:bg-amber-950 w-80 animate-in slide-in-from-bottom-4">
|
<Card className="p-4 shadow-lg border-amber-500 bg-amber-50 dark:bg-amber-950 w-80 animate-in slide-in-from-bottom-4">
|
||||||
<div className="flex items-start gap-3">
|
<div className="flex items-start gap-3">
|
||||||
@@ -191,7 +168,7 @@ function RetryCard({ retry }: { retry: RetryStatus & { countdown: number } }) {
|
|||||||
<div className="flex-1 space-y-2">
|
<div className="flex-1 space-y-2">
|
||||||
<div className="flex items-center justify-between">
|
<div className="flex items-center justify-between">
|
||||||
<p className="text-sm font-medium text-amber-900 dark:text-amber-100">
|
<p className="text-sm font-medium text-amber-900 dark:text-amber-100">
|
||||||
{retry.isRateLimit ? 'Rate Limited' : 'Retrying submission...'}
|
Retrying submission...
|
||||||
</p>
|
</p>
|
||||||
<span className="text-xs font-mono text-amber-700 dark:text-amber-300">
|
<span className="text-xs font-mono text-amber-700 dark:text-amber-300">
|
||||||
{retry.attempt}/{retry.maxAttempts}
|
{retry.attempt}/{retry.maxAttempts}
|
||||||
@@ -199,7 +176,7 @@ function RetryCard({ retry }: { retry: RetryStatus & { countdown: number } }) {
|
|||||||
</div>
|
</div>
|
||||||
|
|
||||||
<p className="text-xs text-amber-700 dark:text-amber-300">
|
<p className="text-xs text-amber-700 dark:text-amber-300">
|
||||||
{getMessage()}
|
Network issue detected. Retrying {retry.type} submission in {Math.ceil(retry.countdown / 1000)}s
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
<Progress value={progress} className="h-1" />
|
<Progress value={progress} className="h-1" />
|
||||||
|
|||||||
@@ -52,31 +52,6 @@ export function UppyPhotoSubmissionUpload({
|
|||||||
const { user } = useAuth();
|
const { user } = useAuth();
|
||||||
const { toast } = useToast();
|
const { toast } = useToast();
|
||||||
|
|
||||||
/**
|
|
||||||
* ✅ CRITICAL FIX: Cleanup orphaned Cloudflare images
|
|
||||||
* Called when DB transaction fails after successful uploads
|
|
||||||
*/
|
|
||||||
const cleanupOrphanedImages = async (imageIds: string[]) => {
|
|
||||||
if (imageIds.length === 0) return;
|
|
||||||
|
|
||||||
logger.warn('Cleaning up orphaned images', { count: imageIds.length });
|
|
||||||
|
|
||||||
try {
|
|
||||||
await Promise.allSettled(
|
|
||||||
imageIds.map(id =>
|
|
||||||
invokeWithTracking('upload-image', { action: 'delete', imageId: id }, user?.id)
|
|
||||||
)
|
|
||||||
);
|
|
||||||
logger.info('Orphaned images cleaned up', { count: imageIds.length });
|
|
||||||
} catch (error) {
|
|
||||||
// Non-blocking cleanup - log but don't fail
|
|
||||||
logger.error('Failed to cleanup orphaned images', {
|
|
||||||
error: getErrorMessage(error),
|
|
||||||
imageIds
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleFilesSelected = (files: File[]) => {
|
const handleFilesSelected = (files: File[]) => {
|
||||||
// Convert files to photo objects with object URLs for preview
|
// Convert files to photo objects with object URLs for preview
|
||||||
const newPhotos: PhotoWithCaption[] = files.map((file, index) => ({
|
const newPhotos: PhotoWithCaption[] = files.map((file, index) => ({
|
||||||
@@ -449,22 +424,6 @@ export function UppyPhotoSubmissionUpload({
|
|||||||
throw photoSubmissionError || new Error("Failed to create photo submission");
|
throw photoSubmissionError || new Error("Failed to create photo submission");
|
||||||
}
|
}
|
||||||
|
|
||||||
// ✅ CRITICAL FIX: Create submission_items record for moderation queue
|
|
||||||
const { error: submissionItemError } = await supabase
|
|
||||||
.from('submission_items')
|
|
||||||
.insert({
|
|
||||||
submission_id: submissionData.id,
|
|
||||||
item_type: 'photo',
|
|
||||||
action_type: 'create',
|
|
||||||
status: 'pending',
|
|
||||||
order_index: 0,
|
|
||||||
photo_submission_id: photoSubmissionData.id
|
|
||||||
});
|
|
||||||
|
|
||||||
if (submissionItemError) {
|
|
||||||
throw submissionItemError;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Insert only successful photo items
|
// Insert only successful photo items
|
||||||
const photoItems = successfulPhotos.map((photo, index) => ({
|
const photoItems = successfulPhotos.map((photo, index) => ({
|
||||||
photo_submission_id: photoSubmissionData.id,
|
photo_submission_id: photoSubmissionData.id,
|
||||||
@@ -568,13 +527,6 @@ export function UppyPhotoSubmissionUpload({
|
|||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
const errorMsg = sanitizeErrorMessage(error);
|
const errorMsg = sanitizeErrorMessage(error);
|
||||||
|
|
||||||
// ✅ CRITICAL FIX: Cleanup orphaned images on failure
|
|
||||||
if (orphanedCloudflareIds.length > 0) {
|
|
||||||
cleanupOrphanedImages(orphanedCloudflareIds).catch(() => {
|
|
||||||
// Non-blocking - log already handled in cleanupOrphanedImages
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.error('Photo submission failed', {
|
logger.error('Photo submission failed', {
|
||||||
error: errorMsg,
|
error: errorMsg,
|
||||||
photoCount: photos.length,
|
photoCount: photos.length,
|
||||||
|
|||||||
@@ -306,6 +306,75 @@ export function useModerationActions(config: ModerationActionsConfig): Moderatio
|
|||||||
action: 'approved' | 'rejected';
|
action: 'approved' | 'rejected';
|
||||||
moderatorNotes?: string;
|
moderatorNotes?: string;
|
||||||
}) => {
|
}) => {
|
||||||
|
// Handle photo submissions
|
||||||
|
if (action === 'approved' && item.submission_type === 'photo') {
|
||||||
|
const { data: photoSubmission, error: fetchError } = await supabase
|
||||||
|
.from('photo_submissions')
|
||||||
|
.select(`
|
||||||
|
*,
|
||||||
|
items:photo_submission_items(*),
|
||||||
|
submission:content_submissions!inner(user_id)
|
||||||
|
`)
|
||||||
|
.eq('submission_id', item.id)
|
||||||
|
.single();
|
||||||
|
|
||||||
|
// Add explicit error handling
|
||||||
|
if (fetchError) {
|
||||||
|
throw new Error(`Failed to fetch photo submission: ${fetchError.message}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!photoSubmission) {
|
||||||
|
throw new Error('Photo submission not found');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Type assertion with validation
|
||||||
|
const typedPhotoSubmission = photoSubmission as {
|
||||||
|
id: string;
|
||||||
|
entity_id: string;
|
||||||
|
entity_type: string;
|
||||||
|
items: Array<{
|
||||||
|
id: string;
|
||||||
|
cloudflare_image_id: string;
|
||||||
|
cloudflare_image_url: string;
|
||||||
|
caption?: string;
|
||||||
|
title?: string;
|
||||||
|
date_taken?: string;
|
||||||
|
date_taken_precision?: string;
|
||||||
|
order_index: number;
|
||||||
|
}>;
|
||||||
|
submission: { user_id: string };
|
||||||
|
};
|
||||||
|
|
||||||
|
// Validate required fields
|
||||||
|
if (!typedPhotoSubmission.items || typedPhotoSubmission.items.length === 0) {
|
||||||
|
throw new Error('No photo items found in submission');
|
||||||
|
}
|
||||||
|
|
||||||
|
const { data: existingPhotos } = await supabase
|
||||||
|
.from('photos')
|
||||||
|
.select('id')
|
||||||
|
.eq('submission_id', item.id);
|
||||||
|
|
||||||
|
if (!existingPhotos || existingPhotos.length === 0) {
|
||||||
|
const photoRecords = typedPhotoSubmission.items.map((photoItem) => ({
|
||||||
|
entity_id: typedPhotoSubmission.entity_id,
|
||||||
|
entity_type: typedPhotoSubmission.entity_type,
|
||||||
|
cloudflare_image_id: photoItem.cloudflare_image_id,
|
||||||
|
cloudflare_image_url: photoItem.cloudflare_image_url,
|
||||||
|
title: photoItem.title || null,
|
||||||
|
caption: photoItem.caption || null,
|
||||||
|
date_taken: photoItem.date_taken || null,
|
||||||
|
order_index: photoItem.order_index,
|
||||||
|
submission_id: item.id,
|
||||||
|
submitted_by: typedPhotoSubmission.submission?.user_id,
|
||||||
|
approved_by: user?.id,
|
||||||
|
approved_at: new Date().toISOString(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
await supabase.from('photos').insert(photoRecords);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Check for submission items
|
// Check for submission items
|
||||||
const { data: submissionItems } = await supabase
|
const { data: submissionItems } = await supabase
|
||||||
.from('submission_items')
|
.from('submission_items')
|
||||||
@@ -374,61 +443,15 @@ export function useModerationActions(config: ModerationActionsConfig): Moderatio
|
|||||||
});
|
});
|
||||||
return;
|
return;
|
||||||
} else if (action === 'rejected') {
|
} else if (action === 'rejected') {
|
||||||
// Use atomic rejection transaction for submission items
|
await supabase
|
||||||
const {
|
.from('submission_items')
|
||||||
data,
|
.update({
|
||||||
error,
|
status: 'rejected',
|
||||||
requestId,
|
rejection_reason: moderatorNotes || 'Parent submission rejected',
|
||||||
attempts,
|
updated_at: new Date().toISOString(),
|
||||||
cached,
|
})
|
||||||
conflictRetries
|
.eq('submission_id', item.id)
|
||||||
} = await invokeWithResilience(
|
.eq('status', 'pending');
|
||||||
'process-selective-rejection',
|
|
||||||
{
|
|
||||||
itemIds: submissionItems.map((i) => i.id),
|
|
||||||
submissionId: item.id,
|
|
||||||
rejectionReason: moderatorNotes || 'Parent submission rejected',
|
|
||||||
},
|
|
||||||
'rejection',
|
|
||||||
submissionItems.map((i) => i.id),
|
|
||||||
config.user?.id,
|
|
||||||
3, // Max 3 conflict retries
|
|
||||||
30000 // 30s timeout
|
|
||||||
);
|
|
||||||
|
|
||||||
// Log retry attempts
|
|
||||||
if (attempts && attempts > 1) {
|
|
||||||
logger.log(`Rejection succeeded after ${attempts} network retries`, {
|
|
||||||
submissionId: item.id,
|
|
||||||
requestId,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (conflictRetries && conflictRetries > 0) {
|
|
||||||
logger.log(`Resolved 409 conflict after ${conflictRetries} retries`, {
|
|
||||||
submissionId: item.id,
|
|
||||||
requestId,
|
|
||||||
cached: !!cached,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (error) {
|
|
||||||
// Enhance error with context for better UI feedback
|
|
||||||
if (is409Conflict(error)) {
|
|
||||||
throw new Error(
|
|
||||||
'This rejection is being processed by another request. Please wait and try again if it does not complete.'
|
|
||||||
);
|
|
||||||
}
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
|
|
||||||
toast({
|
|
||||||
title: cached ? 'Cached Result' : 'Submission Rejected',
|
|
||||||
description: cached
|
|
||||||
? `Returned cached result for ${submissionItems.length} item(s)`
|
|
||||||
: `Successfully rejected ${submissionItems.length} item(s)${requestId ? ` (Request: ${requestId.substring(0, 8)})` : ''}`,
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -6345,52 +6345,16 @@ export type Database = {
|
|||||||
monitor_ban_attempts: { Args: never; Returns: undefined }
|
monitor_ban_attempts: { Args: never; Returns: undefined }
|
||||||
monitor_failed_submissions: { Args: never; Returns: undefined }
|
monitor_failed_submissions: { Args: never; Returns: undefined }
|
||||||
monitor_slow_approvals: { Args: never; Returns: undefined }
|
monitor_slow_approvals: { Args: never; Returns: undefined }
|
||||||
process_approval_transaction:
|
process_approval_transaction: {
|
||||||
| {
|
Args: {
|
||||||
Args: {
|
p_item_ids: string[]
|
||||||
p_item_ids: string[]
|
p_moderator_id: string
|
||||||
p_moderator_id: string
|
p_request_id?: string
|
||||||
p_request_id?: string
|
p_submission_id: string
|
||||||
p_submission_id: string
|
p_submitter_id: string
|
||||||
p_submitter_id: string
|
}
|
||||||
}
|
Returns: Json
|
||||||
Returns: Json
|
}
|
||||||
}
|
|
||||||
| {
|
|
||||||
Args: {
|
|
||||||
p_item_ids: string[]
|
|
||||||
p_moderator_id: string
|
|
||||||
p_parent_span_id?: string
|
|
||||||
p_request_id?: string
|
|
||||||
p_submission_id: string
|
|
||||||
p_submitter_id: string
|
|
||||||
p_trace_id?: string
|
|
||||||
}
|
|
||||||
Returns: Json
|
|
||||||
}
|
|
||||||
process_rejection_transaction:
|
|
||||||
| {
|
|
||||||
Args: {
|
|
||||||
p_item_ids: string[]
|
|
||||||
p_moderator_id: string
|
|
||||||
p_rejection_reason: string
|
|
||||||
p_request_id?: string
|
|
||||||
p_submission_id: string
|
|
||||||
}
|
|
||||||
Returns: Json
|
|
||||||
}
|
|
||||||
| {
|
|
||||||
Args: {
|
|
||||||
p_item_ids: string[]
|
|
||||||
p_moderator_id: string
|
|
||||||
p_parent_span_id?: string
|
|
||||||
p_rejection_reason: string
|
|
||||||
p_request_id?: string
|
|
||||||
p_submission_id: string
|
|
||||||
p_trace_id?: string
|
|
||||||
}
|
|
||||||
Returns: Json
|
|
||||||
}
|
|
||||||
release_expired_locks: { Args: never; Returns: number }
|
release_expired_locks: { Args: never; Returns: number }
|
||||||
release_submission_lock: {
|
release_submission_lock: {
|
||||||
Args: { moderator_id: string; submission_id: string }
|
Args: { moderator_id: string; submission_id: string }
|
||||||
|
|||||||
@@ -33,7 +33,7 @@ export async function invokeWithTracking<T = any>(
|
|||||||
timeout: number = 30000,
|
timeout: number = 30000,
|
||||||
retryOptions?: Partial<RetryOptions>,
|
retryOptions?: Partial<RetryOptions>,
|
||||||
customHeaders?: Record<string, string>
|
customHeaders?: Record<string, string>
|
||||||
): Promise<{ data: T | null; error: any; requestId: string; duration: number; attempts?: number; status?: number; traceId?: string }> {
|
): Promise<{ data: T | null; error: any; requestId: string; duration: number; attempts?: number; status?: number }> {
|
||||||
// Configure retry options with defaults
|
// Configure retry options with defaults
|
||||||
const effectiveRetryOptions: RetryOptions = {
|
const effectiveRetryOptions: RetryOptions = {
|
||||||
maxAttempts: retryOptions?.maxAttempts ?? 3,
|
maxAttempts: retryOptions?.maxAttempts ?? 3,
|
||||||
@@ -75,30 +75,11 @@ export async function invokeWithTracking<T = any>(
|
|||||||
const controller = new AbortController();
|
const controller = new AbortController();
|
||||||
const timeoutId = setTimeout(() => controller.abort(), timeout);
|
const timeoutId = setTimeout(() => controller.abort(), timeout);
|
||||||
|
|
||||||
// Generate W3C Trace Context header
|
|
||||||
const effectiveTraceId = context.traceId || crypto.randomUUID();
|
|
||||||
const spanId = crypto.randomUUID();
|
|
||||||
const traceparent = `00-${effectiveTraceId}-${spanId}-01`;
|
|
||||||
|
|
||||||
// Add breadcrumb with trace context
|
|
||||||
breadcrumb.apiCall(
|
|
||||||
`/functions/${functionName}`,
|
|
||||||
'POST',
|
|
||||||
undefined
|
|
||||||
);
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const { data, error } = await supabase.functions.invoke<T>(functionName, {
|
const { data, error } = await supabase.functions.invoke<T>(functionName, {
|
||||||
body: {
|
body: { ...payload, clientRequestId: context.requestId },
|
||||||
...payload,
|
|
||||||
clientRequestId: context.requestId,
|
|
||||||
traceId: effectiveTraceId,
|
|
||||||
},
|
|
||||||
signal: controller.signal,
|
signal: controller.signal,
|
||||||
headers: {
|
headers: customHeaders,
|
||||||
...customHeaders,
|
|
||||||
'traceparent': traceparent,
|
|
||||||
},
|
|
||||||
});
|
});
|
||||||
|
|
||||||
clearTimeout(timeoutId);
|
clearTimeout(timeoutId);
|
||||||
@@ -122,15 +103,7 @@ export async function invokeWithTracking<T = any>(
|
|||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
return {
|
return { data: result, error: null, requestId, duration, attempts: attemptCount, status: 200 };
|
||||||
data: result,
|
|
||||||
error: null,
|
|
||||||
requestId,
|
|
||||||
duration,
|
|
||||||
attempts: attemptCount,
|
|
||||||
status: 200,
|
|
||||||
traceId,
|
|
||||||
};
|
|
||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
// Handle AbortError specifically
|
// Handle AbortError specifically
|
||||||
if (error instanceof Error && error.name === 'AbortError') {
|
if (error instanceof Error && error.name === 'AbortError') {
|
||||||
@@ -144,22 +117,20 @@ export async function invokeWithTracking<T = any>(
|
|||||||
duration: timeout,
|
duration: timeout,
|
||||||
attempts: attemptCount,
|
attempts: attemptCount,
|
||||||
status: 408,
|
status: 408,
|
||||||
traceId: undefined,
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const errorMessage = getErrorMessage(error);
|
const errorMessage = getErrorMessage(error);
|
||||||
return {
|
return {
|
||||||
data: null,
|
data: null,
|
||||||
error: { message: errorMessage, status: (error as any)?.status },
|
error: { message: errorMessage, status: (error as any)?.status },
|
||||||
requestId: 'unknown',
|
requestId: 'unknown',
|
||||||
duration: 0,
|
duration: 0,
|
||||||
attempts: attemptCount,
|
attempts: attemptCount,
|
||||||
status: (error as any)?.status,
|
status: (error as any)?.status,
|
||||||
traceId: undefined,
|
};
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Invoke multiple edge functions in parallel with batch tracking
|
* Invoke multiple edge functions in parallel with batch tracking
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ import { logger } from './logger';
|
|||||||
import { handleError } from './errorHandler';
|
import { handleError } from './errorHandler';
|
||||||
import type { TimelineEventFormData, EntityType } from '@/types/timeline';
|
import type { TimelineEventFormData, EntityType } from '@/types/timeline';
|
||||||
import { breadcrumb } from './errorBreadcrumbs';
|
import { breadcrumb } from './errorBreadcrumbs';
|
||||||
import { isRetryableError, isRateLimitError, extractRetryAfter } from './retryHelpers';
|
import { isRetryableError } from './retryHelpers';
|
||||||
import {
|
import {
|
||||||
validateParkCreateFields,
|
validateParkCreateFields,
|
||||||
validateRideCreateFields,
|
validateRideCreateFields,
|
||||||
@@ -773,8 +773,6 @@ export async function submitParkCreation(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Create submission with retry logic
|
// Create submission with retry logic
|
||||||
const retryId = crypto.randomUUID();
|
|
||||||
|
|
||||||
const result = await withRetry(
|
const result = await withRetry(
|
||||||
async () => {
|
async () => {
|
||||||
// Create the main submission record
|
// Create the main submission record
|
||||||
@@ -884,30 +882,12 @@ export async function submitParkCreation(
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
maxAttempts: 3,
|
maxAttempts: 3,
|
||||||
baseDelay: 1000,
|
|
||||||
onRetry: (attempt, error, delay) => {
|
onRetry: (attempt, error, delay) => {
|
||||||
const isRateLimit = isRateLimitError(error);
|
logger.warn('Retrying park submission', { attempt, delay });
|
||||||
const retryAfter = isRateLimit ? extractRetryAfter(error) : null;
|
|
||||||
|
|
||||||
logger.warn('Retrying park submission', {
|
// Emit event for UI indicator
|
||||||
attempt,
|
|
||||||
delay,
|
|
||||||
isRateLimit,
|
|
||||||
retryAfter,
|
|
||||||
error: error instanceof Error ? error.message : String(error)
|
|
||||||
});
|
|
||||||
|
|
||||||
// Emit event for UI indicator with rate limit info
|
|
||||||
window.dispatchEvent(new CustomEvent('submission-retry', {
|
window.dispatchEvent(new CustomEvent('submission-retry', {
|
||||||
detail: {
|
detail: { attempt, maxAttempts: 3, delay, type: 'park' }
|
||||||
id: retryId,
|
|
||||||
attempt,
|
|
||||||
maxAttempts: 3,
|
|
||||||
delay,
|
|
||||||
type: 'park',
|
|
||||||
isRateLimit,
|
|
||||||
retryAfter
|
|
||||||
}
|
|
||||||
}));
|
}));
|
||||||
},
|
},
|
||||||
shouldRetry: (error) => {
|
shouldRetry: (error) => {
|
||||||
@@ -916,35 +896,18 @@ export async function submitParkCreation(
|
|||||||
const message = error.message.toLowerCase();
|
const message = error.message.toLowerCase();
|
||||||
if (message.includes('required')) return false;
|
if (message.includes('required')) return false;
|
||||||
if (message.includes('banned')) return false;
|
if (message.includes('banned')) return false;
|
||||||
if (message.includes('suspended')) return false;
|
|
||||||
if (message.includes('slug')) return false;
|
if (message.includes('slug')) return false;
|
||||||
if (message.includes('already exists')) return false;
|
|
||||||
if (message.includes('duplicate')) return false;
|
|
||||||
if (message.includes('permission')) return false;
|
if (message.includes('permission')) return false;
|
||||||
if (message.includes('forbidden')) return false;
|
|
||||||
if (message.includes('unauthorized')) return false;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return isRetryableError(error);
|
return isRetryableError(error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
).then((data) => {
|
).catch((error) => {
|
||||||
// Emit success event
|
handleError(error, {
|
||||||
window.dispatchEvent(new CustomEvent('submission-retry-success', {
|
|
||||||
detail: { id: retryId }
|
|
||||||
}));
|
|
||||||
return data;
|
|
||||||
}).catch((error) => {
|
|
||||||
const errorId = handleError(error, {
|
|
||||||
action: 'Park submission',
|
action: 'Park submission',
|
||||||
metadata: { retriesExhausted: true },
|
metadata: { retriesExhausted: true },
|
||||||
});
|
});
|
||||||
|
|
||||||
// Emit failure event
|
|
||||||
window.dispatchEvent(new CustomEvent('submission-retry-failed', {
|
|
||||||
detail: { id: retryId, errorId }
|
|
||||||
}));
|
|
||||||
|
|
||||||
throw error;
|
throw error;
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -1140,31 +1103,17 @@ export async function submitParkUpdate(
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
maxAttempts: 3,
|
maxAttempts: 3,
|
||||||
baseDelay: 1000,
|
|
||||||
onRetry: (attempt, error, delay) => {
|
onRetry: (attempt, error, delay) => {
|
||||||
const isRateLimit = isRateLimitError(error);
|
|
||||||
const retryAfter = isRateLimit ? extractRetryAfter(error) : null;
|
|
||||||
|
|
||||||
logger.warn('Retrying park update submission', {
|
logger.warn('Retrying park update submission', {
|
||||||
attempt,
|
attempt,
|
||||||
delay,
|
delay,
|
||||||
parkId,
|
parkId,
|
||||||
isRateLimit,
|
|
||||||
retryAfter,
|
|
||||||
error: error instanceof Error ? error.message : String(error)
|
error: error instanceof Error ? error.message : String(error)
|
||||||
});
|
});
|
||||||
|
|
||||||
// Emit event for UI retry indicator with rate limit info
|
// Emit event for UI retry indicator
|
||||||
window.dispatchEvent(new CustomEvent('submission-retry', {
|
window.dispatchEvent(new CustomEvent('submission-retry', {
|
||||||
detail: {
|
detail: { id: retryId, attempt, maxAttempts: 3, delay, type: 'park update' }
|
||||||
id: retryId,
|
|
||||||
attempt,
|
|
||||||
maxAttempts: 3,
|
|
||||||
delay,
|
|
||||||
type: 'park update',
|
|
||||||
isRateLimit,
|
|
||||||
retryAfter
|
|
||||||
}
|
|
||||||
}));
|
}));
|
||||||
},
|
},
|
||||||
shouldRetry: (error) => {
|
shouldRetry: (error) => {
|
||||||
@@ -1557,30 +1506,12 @@ export async function submitRideCreation(
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
maxAttempts: 3,
|
maxAttempts: 3,
|
||||||
baseDelay: 1000,
|
|
||||||
onRetry: (attempt, error, delay) => {
|
onRetry: (attempt, error, delay) => {
|
||||||
const isRateLimit = isRateLimitError(error);
|
logger.warn('Retrying ride submission', { attempt, delay });
|
||||||
const retryAfter = isRateLimit ? extractRetryAfter(error) : null;
|
|
||||||
|
|
||||||
logger.warn('Retrying ride submission', {
|
// Emit event for UI indicator
|
||||||
attempt,
|
|
||||||
delay,
|
|
||||||
isRateLimit,
|
|
||||||
retryAfter,
|
|
||||||
error: error instanceof Error ? error.message : String(error)
|
|
||||||
});
|
|
||||||
|
|
||||||
// Emit event for UI indicator with rate limit info
|
|
||||||
window.dispatchEvent(new CustomEvent('submission-retry', {
|
window.dispatchEvent(new CustomEvent('submission-retry', {
|
||||||
detail: {
|
detail: { id: retryId, attempt, maxAttempts: 3, delay, type: 'ride' }
|
||||||
id: retryId,
|
|
||||||
attempt,
|
|
||||||
maxAttempts: 3,
|
|
||||||
delay,
|
|
||||||
type: 'ride',
|
|
||||||
isRateLimit,
|
|
||||||
retryAfter
|
|
||||||
}
|
|
||||||
}));
|
}));
|
||||||
},
|
},
|
||||||
shouldRetry: (error) => {
|
shouldRetry: (error) => {
|
||||||
@@ -1589,13 +1520,8 @@ export async function submitRideCreation(
|
|||||||
const message = error.message.toLowerCase();
|
const message = error.message.toLowerCase();
|
||||||
if (message.includes('required')) return false;
|
if (message.includes('required')) return false;
|
||||||
if (message.includes('banned')) return false;
|
if (message.includes('banned')) return false;
|
||||||
if (message.includes('suspended')) return false;
|
|
||||||
if (message.includes('slug')) return false;
|
if (message.includes('slug')) return false;
|
||||||
if (message.includes('already exists')) return false;
|
|
||||||
if (message.includes('duplicate')) return false;
|
|
||||||
if (message.includes('permission')) return false;
|
if (message.includes('permission')) return false;
|
||||||
if (message.includes('forbidden')) return false;
|
|
||||||
if (message.includes('unauthorized')) return false;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return isRetryableError(error);
|
return isRetryableError(error);
|
||||||
@@ -1788,31 +1714,17 @@ export async function submitRideUpdate(
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
maxAttempts: 3,
|
maxAttempts: 3,
|
||||||
baseDelay: 1000,
|
|
||||||
onRetry: (attempt, error, delay) => {
|
onRetry: (attempt, error, delay) => {
|
||||||
const isRateLimit = isRateLimitError(error);
|
|
||||||
const retryAfter = isRateLimit ? extractRetryAfter(error) : null;
|
|
||||||
|
|
||||||
logger.warn('Retrying ride update submission', {
|
logger.warn('Retrying ride update submission', {
|
||||||
attempt,
|
attempt,
|
||||||
delay,
|
delay,
|
||||||
rideId,
|
rideId,
|
||||||
isRateLimit,
|
|
||||||
retryAfter,
|
|
||||||
error: error instanceof Error ? error.message : String(error)
|
error: error instanceof Error ? error.message : String(error)
|
||||||
});
|
});
|
||||||
|
|
||||||
// Emit event for UI retry indicator with rate limit info
|
// Emit event for UI retry indicator
|
||||||
window.dispatchEvent(new CustomEvent('submission-retry', {
|
window.dispatchEvent(new CustomEvent('submission-retry', {
|
||||||
detail: {
|
detail: { id: retryId, attempt, maxAttempts: 3, delay, type: 'ride update' }
|
||||||
id: retryId,
|
|
||||||
attempt,
|
|
||||||
maxAttempts: 3,
|
|
||||||
delay,
|
|
||||||
type: 'ride update',
|
|
||||||
isRateLimit,
|
|
||||||
retryAfter
|
|
||||||
}
|
|
||||||
}));
|
}));
|
||||||
},
|
},
|
||||||
shouldRetry: (error) => {
|
shouldRetry: (error) => {
|
||||||
@@ -1821,13 +1733,8 @@ export async function submitRideUpdate(
|
|||||||
const message = error.message.toLowerCase();
|
const message = error.message.toLowerCase();
|
||||||
if (message.includes('required')) return false;
|
if (message.includes('required')) return false;
|
||||||
if (message.includes('banned')) return false;
|
if (message.includes('banned')) return false;
|
||||||
if (message.includes('suspended')) return false;
|
|
||||||
if (message.includes('slug')) return false;
|
if (message.includes('slug')) return false;
|
||||||
if (message.includes('already exists')) return false;
|
|
||||||
if (message.includes('duplicate')) return false;
|
|
||||||
if (message.includes('permission')) return false;
|
if (message.includes('permission')) return false;
|
||||||
if (message.includes('forbidden')) return false;
|
|
||||||
if (message.includes('unauthorized')) return false;
|
|
||||||
if (message.includes('not found')) return false;
|
if (message.includes('not found')) return false;
|
||||||
if (message.includes('not allowed')) return false;
|
if (message.includes('not allowed')) return false;
|
||||||
}
|
}
|
||||||
@@ -1931,8 +1838,6 @@ export async function submitRideModelCreation(
|
|||||||
|
|
||||||
// Submit with retry logic
|
// Submit with retry logic
|
||||||
breadcrumb.apiCall('content_submissions', 'INSERT');
|
breadcrumb.apiCall('content_submissions', 'INSERT');
|
||||||
const retryId = crypto.randomUUID();
|
|
||||||
|
|
||||||
const result = await withRetry(
|
const result = await withRetry(
|
||||||
async () => {
|
async () => {
|
||||||
// Create the main submission record
|
// Create the main submission record
|
||||||
@@ -2020,28 +1925,10 @@ export async function submitRideModelCreation(
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
maxAttempts: 3,
|
maxAttempts: 3,
|
||||||
baseDelay: 1000,
|
|
||||||
onRetry: (attempt, error, delay) => {
|
onRetry: (attempt, error, delay) => {
|
||||||
const isRateLimit = isRateLimitError(error);
|
logger.warn('Retrying ride model submission', { attempt, delay });
|
||||||
const retryAfter = isRateLimit ? extractRetryAfter(error) : null;
|
|
||||||
|
|
||||||
logger.warn('Retrying ride model submission', {
|
|
||||||
attempt,
|
|
||||||
delay,
|
|
||||||
isRateLimit,
|
|
||||||
retryAfter,
|
|
||||||
error: error instanceof Error ? error.message : String(error)
|
|
||||||
});
|
|
||||||
window.dispatchEvent(new CustomEvent('submission-retry', {
|
window.dispatchEvent(new CustomEvent('submission-retry', {
|
||||||
detail: {
|
detail: { attempt, maxAttempts: 3, delay, type: 'ride_model' }
|
||||||
id: retryId,
|
|
||||||
attempt,
|
|
||||||
maxAttempts: 3,
|
|
||||||
delay,
|
|
||||||
type: 'ride_model',
|
|
||||||
isRateLimit,
|
|
||||||
retryAfter
|
|
||||||
}
|
|
||||||
}));
|
}));
|
||||||
},
|
},
|
||||||
shouldRetry: (error) => {
|
shouldRetry: (error) => {
|
||||||
@@ -2049,36 +1936,12 @@ export async function submitRideModelCreation(
|
|||||||
const message = error.message.toLowerCase();
|
const message = error.message.toLowerCase();
|
||||||
if (message.includes('required')) return false;
|
if (message.includes('required')) return false;
|
||||||
if (message.includes('banned')) return false;
|
if (message.includes('banned')) return false;
|
||||||
if (message.includes('suspended')) return false;
|
|
||||||
if (message.includes('slug')) return false;
|
if (message.includes('slug')) return false;
|
||||||
if (message.includes('already exists')) return false;
|
|
||||||
if (message.includes('duplicate')) return false;
|
|
||||||
if (message.includes('permission')) return false;
|
|
||||||
if (message.includes('forbidden')) return false;
|
|
||||||
if (message.includes('unauthorized')) return false;
|
|
||||||
}
|
}
|
||||||
return isRetryableError(error);
|
return isRetryableError(error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
).then((data) => {
|
);
|
||||||
// Emit success event
|
|
||||||
window.dispatchEvent(new CustomEvent('submission-retry-success', {
|
|
||||||
detail: { id: retryId }
|
|
||||||
}));
|
|
||||||
return data;
|
|
||||||
}).catch((error) => {
|
|
||||||
const errorId = handleError(error, {
|
|
||||||
action: 'Ride model submission',
|
|
||||||
metadata: { retriesExhausted: true },
|
|
||||||
});
|
|
||||||
|
|
||||||
// Emit failure event
|
|
||||||
window.dispatchEvent(new CustomEvent('submission-retry-failed', {
|
|
||||||
detail: { id: retryId, errorId }
|
|
||||||
}));
|
|
||||||
|
|
||||||
throw error;
|
|
||||||
});
|
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
@@ -2143,8 +2006,6 @@ export async function submitRideModelUpdate(
|
|||||||
|
|
||||||
// Submit with retry logic
|
// Submit with retry logic
|
||||||
breadcrumb.apiCall('content_submissions', 'INSERT');
|
breadcrumb.apiCall('content_submissions', 'INSERT');
|
||||||
const retryId = crypto.randomUUID();
|
|
||||||
|
|
||||||
const result = await withRetry(
|
const result = await withRetry(
|
||||||
async () => {
|
async () => {
|
||||||
// Create the main submission record
|
// Create the main submission record
|
||||||
@@ -2230,28 +2091,10 @@ export async function submitRideModelUpdate(
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
maxAttempts: 3,
|
maxAttempts: 3,
|
||||||
baseDelay: 1000,
|
|
||||||
onRetry: (attempt, error, delay) => {
|
onRetry: (attempt, error, delay) => {
|
||||||
const isRateLimit = isRateLimitError(error);
|
logger.warn('Retrying ride model update', { attempt, delay });
|
||||||
const retryAfter = isRateLimit ? extractRetryAfter(error) : null;
|
|
||||||
|
|
||||||
logger.warn('Retrying ride model update', {
|
|
||||||
attempt,
|
|
||||||
delay,
|
|
||||||
isRateLimit,
|
|
||||||
retryAfter,
|
|
||||||
error: error instanceof Error ? error.message : String(error)
|
|
||||||
});
|
|
||||||
window.dispatchEvent(new CustomEvent('submission-retry', {
|
window.dispatchEvent(new CustomEvent('submission-retry', {
|
||||||
detail: {
|
detail: { attempt, maxAttempts: 3, delay, type: 'ride_model_update' }
|
||||||
id: retryId,
|
|
||||||
attempt,
|
|
||||||
maxAttempts: 3,
|
|
||||||
delay,
|
|
||||||
type: 'ride_model_update',
|
|
||||||
isRateLimit,
|
|
||||||
retryAfter
|
|
||||||
}
|
|
||||||
}));
|
}));
|
||||||
},
|
},
|
||||||
shouldRetry: (error) => {
|
shouldRetry: (error) => {
|
||||||
@@ -2259,34 +2102,12 @@ export async function submitRideModelUpdate(
|
|||||||
const message = error.message.toLowerCase();
|
const message = error.message.toLowerCase();
|
||||||
if (message.includes('required')) return false;
|
if (message.includes('required')) return false;
|
||||||
if (message.includes('banned')) return false;
|
if (message.includes('banned')) return false;
|
||||||
if (message.includes('suspended')) return false;
|
|
||||||
if (message.includes('slug')) return false;
|
if (message.includes('slug')) return false;
|
||||||
if (message.includes('already exists')) return false;
|
|
||||||
if (message.includes('duplicate')) return false;
|
|
||||||
if (message.includes('permission')) return false;
|
|
||||||
if (message.includes('forbidden')) return false;
|
|
||||||
if (message.includes('unauthorized')) return false;
|
|
||||||
}
|
}
|
||||||
return isRetryableError(error);
|
return isRetryableError(error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
).then((data) => {
|
);
|
||||||
window.dispatchEvent(new CustomEvent('submission-retry-success', {
|
|
||||||
detail: { id: retryId }
|
|
||||||
}));
|
|
||||||
return data;
|
|
||||||
}).catch((error) => {
|
|
||||||
const errorId = handleError(error, {
|
|
||||||
action: 'Ride model update submission',
|
|
||||||
metadata: { retriesExhausted: true },
|
|
||||||
});
|
|
||||||
|
|
||||||
window.dispatchEvent(new CustomEvent('submission-retry-failed', {
|
|
||||||
detail: { id: retryId, errorId }
|
|
||||||
}));
|
|
||||||
|
|
||||||
throw error;
|
|
||||||
});
|
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
@@ -2349,8 +2170,6 @@ export async function submitManufacturerCreation(
|
|||||||
|
|
||||||
// Submit with retry logic
|
// Submit with retry logic
|
||||||
breadcrumb.apiCall('content_submissions', 'INSERT');
|
breadcrumb.apiCall('content_submissions', 'INSERT');
|
||||||
const retryId = crypto.randomUUID();
|
|
||||||
|
|
||||||
const result = await withRetry(
|
const result = await withRetry(
|
||||||
async () => {
|
async () => {
|
||||||
const { data: submissionData, error: submissionError } = await supabase
|
const { data: submissionData, error: submissionError } = await supabase
|
||||||
@@ -2390,28 +2209,10 @@ export async function submitManufacturerCreation(
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
maxAttempts: 3,
|
maxAttempts: 3,
|
||||||
baseDelay: 1000,
|
|
||||||
onRetry: (attempt, error, delay) => {
|
onRetry: (attempt, error, delay) => {
|
||||||
const isRateLimit = isRateLimitError(error);
|
logger.warn('Retrying manufacturer submission', { attempt, delay });
|
||||||
const retryAfter = isRateLimit ? extractRetryAfter(error) : null;
|
|
||||||
|
|
||||||
logger.warn('Retrying manufacturer submission', {
|
|
||||||
attempt,
|
|
||||||
delay,
|
|
||||||
isRateLimit,
|
|
||||||
retryAfter,
|
|
||||||
error: error instanceof Error ? error.message : String(error)
|
|
||||||
});
|
|
||||||
window.dispatchEvent(new CustomEvent('submission-retry', {
|
window.dispatchEvent(new CustomEvent('submission-retry', {
|
||||||
detail: {
|
detail: { attempt, maxAttempts: 3, delay, type: 'manufacturer' }
|
||||||
id: retryId,
|
|
||||||
attempt,
|
|
||||||
maxAttempts: 3,
|
|
||||||
delay,
|
|
||||||
type: 'manufacturer',
|
|
||||||
isRateLimit,
|
|
||||||
retryAfter
|
|
||||||
}
|
|
||||||
}));
|
}));
|
||||||
},
|
},
|
||||||
shouldRetry: (error) => {
|
shouldRetry: (error) => {
|
||||||
@@ -2419,34 +2220,12 @@ export async function submitManufacturerCreation(
|
|||||||
const message = error.message.toLowerCase();
|
const message = error.message.toLowerCase();
|
||||||
if (message.includes('required')) return false;
|
if (message.includes('required')) return false;
|
||||||
if (message.includes('banned')) return false;
|
if (message.includes('banned')) return false;
|
||||||
if (message.includes('suspended')) return false;
|
|
||||||
if (message.includes('slug')) return false;
|
if (message.includes('slug')) return false;
|
||||||
if (message.includes('already exists')) return false;
|
|
||||||
if (message.includes('duplicate')) return false;
|
|
||||||
if (message.includes('permission')) return false;
|
|
||||||
if (message.includes('forbidden')) return false;
|
|
||||||
if (message.includes('unauthorized')) return false;
|
|
||||||
}
|
}
|
||||||
return isRetryableError(error);
|
return isRetryableError(error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
).then((data) => {
|
);
|
||||||
window.dispatchEvent(new CustomEvent('submission-retry-success', {
|
|
||||||
detail: { id: retryId }
|
|
||||||
}));
|
|
||||||
return data;
|
|
||||||
}).catch((error) => {
|
|
||||||
const errorId = handleError(error, {
|
|
||||||
action: 'Manufacturer submission',
|
|
||||||
metadata: { retriesExhausted: true },
|
|
||||||
});
|
|
||||||
|
|
||||||
window.dispatchEvent(new CustomEvent('submission-retry-failed', {
|
|
||||||
detail: { id: retryId, errorId }
|
|
||||||
}));
|
|
||||||
|
|
||||||
throw error;
|
|
||||||
});
|
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
@@ -2504,8 +2283,6 @@ export async function submitManufacturerUpdate(
|
|||||||
|
|
||||||
// Submit with retry logic
|
// Submit with retry logic
|
||||||
breadcrumb.apiCall('content_submissions', 'INSERT');
|
breadcrumb.apiCall('content_submissions', 'INSERT');
|
||||||
const retryId = crypto.randomUUID();
|
|
||||||
|
|
||||||
const result = await withRetry(
|
const result = await withRetry(
|
||||||
async () => {
|
async () => {
|
||||||
const { data: submissionData, error: submissionError } = await supabase
|
const { data: submissionData, error: submissionError } = await supabase
|
||||||
@@ -2543,28 +2320,10 @@ export async function submitManufacturerUpdate(
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
maxAttempts: 3,
|
maxAttempts: 3,
|
||||||
baseDelay: 1000,
|
|
||||||
onRetry: (attempt, error, delay) => {
|
onRetry: (attempt, error, delay) => {
|
||||||
const isRateLimit = isRateLimitError(error);
|
logger.warn('Retrying manufacturer update', { attempt, delay });
|
||||||
const retryAfter = isRateLimit ? extractRetryAfter(error) : null;
|
|
||||||
|
|
||||||
logger.warn('Retrying manufacturer update', {
|
|
||||||
attempt,
|
|
||||||
delay,
|
|
||||||
isRateLimit,
|
|
||||||
retryAfter,
|
|
||||||
error: error instanceof Error ? error.message : String(error)
|
|
||||||
});
|
|
||||||
window.dispatchEvent(new CustomEvent('submission-retry', {
|
window.dispatchEvent(new CustomEvent('submission-retry', {
|
||||||
detail: {
|
detail: { attempt, maxAttempts: 3, delay, type: 'manufacturer_update' }
|
||||||
id: retryId,
|
|
||||||
attempt,
|
|
||||||
maxAttempts: 3,
|
|
||||||
delay,
|
|
||||||
type: 'manufacturer_update',
|
|
||||||
isRateLimit,
|
|
||||||
retryAfter
|
|
||||||
}
|
|
||||||
}));
|
}));
|
||||||
},
|
},
|
||||||
shouldRetry: (error) => {
|
shouldRetry: (error) => {
|
||||||
@@ -2635,8 +2394,6 @@ export async function submitDesignerCreation(
|
|||||||
|
|
||||||
// Submit with retry logic
|
// Submit with retry logic
|
||||||
breadcrumb.apiCall('content_submissions', 'INSERT');
|
breadcrumb.apiCall('content_submissions', 'INSERT');
|
||||||
const retryId = crypto.randomUUID();
|
|
||||||
|
|
||||||
const result = await withRetry(
|
const result = await withRetry(
|
||||||
async () => {
|
async () => {
|
||||||
const { data: submissionData, error: submissionError } = await supabase
|
const { data: submissionData, error: submissionError } = await supabase
|
||||||
@@ -2676,28 +2433,10 @@ export async function submitDesignerCreation(
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
maxAttempts: 3,
|
maxAttempts: 3,
|
||||||
baseDelay: 1000,
|
|
||||||
onRetry: (attempt, error, delay) => {
|
onRetry: (attempt, error, delay) => {
|
||||||
const isRateLimit = isRateLimitError(error);
|
logger.warn('Retrying designer submission', { attempt, delay });
|
||||||
const retryAfter = isRateLimit ? extractRetryAfter(error) : null;
|
|
||||||
|
|
||||||
logger.warn('Retrying designer submission', {
|
|
||||||
attempt,
|
|
||||||
delay,
|
|
||||||
isRateLimit,
|
|
||||||
retryAfter,
|
|
||||||
error: error instanceof Error ? error.message : String(error)
|
|
||||||
});
|
|
||||||
window.dispatchEvent(new CustomEvent('submission-retry', {
|
window.dispatchEvent(new CustomEvent('submission-retry', {
|
||||||
detail: {
|
detail: { attempt, maxAttempts: 3, delay, type: 'designer' }
|
||||||
id: retryId,
|
|
||||||
attempt,
|
|
||||||
maxAttempts: 3,
|
|
||||||
delay,
|
|
||||||
type: 'designer',
|
|
||||||
isRateLimit,
|
|
||||||
retryAfter
|
|
||||||
}
|
|
||||||
}));
|
}));
|
||||||
},
|
},
|
||||||
shouldRetry: (error) => {
|
shouldRetry: (error) => {
|
||||||
@@ -2768,8 +2507,6 @@ export async function submitDesignerUpdate(
|
|||||||
|
|
||||||
// Submit with retry logic
|
// Submit with retry logic
|
||||||
breadcrumb.apiCall('content_submissions', 'INSERT');
|
breadcrumb.apiCall('content_submissions', 'INSERT');
|
||||||
const retryId = crypto.randomUUID();
|
|
||||||
|
|
||||||
const result = await withRetry(
|
const result = await withRetry(
|
||||||
async () => {
|
async () => {
|
||||||
const { data: submissionData, error: submissionError } = await supabase
|
const { data: submissionData, error: submissionError } = await supabase
|
||||||
@@ -2807,28 +2544,10 @@ export async function submitDesignerUpdate(
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
maxAttempts: 3,
|
maxAttempts: 3,
|
||||||
baseDelay: 1000,
|
|
||||||
onRetry: (attempt, error, delay) => {
|
onRetry: (attempt, error, delay) => {
|
||||||
const isRateLimit = isRateLimitError(error);
|
logger.warn('Retrying designer update', { attempt, delay });
|
||||||
const retryAfter = isRateLimit ? extractRetryAfter(error) : null;
|
|
||||||
|
|
||||||
logger.warn('Retrying designer update', {
|
|
||||||
attempt,
|
|
||||||
delay,
|
|
||||||
isRateLimit,
|
|
||||||
retryAfter,
|
|
||||||
error: error instanceof Error ? error.message : String(error)
|
|
||||||
});
|
|
||||||
window.dispatchEvent(new CustomEvent('submission-retry', {
|
window.dispatchEvent(new CustomEvent('submission-retry', {
|
||||||
detail: {
|
detail: { attempt, maxAttempts: 3, delay, type: 'designer_update' }
|
||||||
id: retryId,
|
|
||||||
attempt,
|
|
||||||
maxAttempts: 3,
|
|
||||||
delay,
|
|
||||||
type: 'designer_update',
|
|
||||||
isRateLimit,
|
|
||||||
retryAfter
|
|
||||||
}
|
|
||||||
}));
|
}));
|
||||||
},
|
},
|
||||||
shouldRetry: (error) => {
|
shouldRetry: (error) => {
|
||||||
@@ -2899,8 +2618,6 @@ export async function submitOperatorCreation(
|
|||||||
|
|
||||||
// Submit with retry logic
|
// Submit with retry logic
|
||||||
breadcrumb.apiCall('content_submissions', 'INSERT');
|
breadcrumb.apiCall('content_submissions', 'INSERT');
|
||||||
const retryId = crypto.randomUUID();
|
|
||||||
|
|
||||||
const result = await withRetry(
|
const result = await withRetry(
|
||||||
async () => {
|
async () => {
|
||||||
const { data: submissionData, error: submissionError } = await supabase
|
const { data: submissionData, error: submissionError } = await supabase
|
||||||
@@ -2940,15 +2657,10 @@ export async function submitOperatorCreation(
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
maxAttempts: 3,
|
maxAttempts: 3,
|
||||||
baseDelay: 1000,
|
|
||||||
onRetry: (attempt, error, delay) => {
|
onRetry: (attempt, error, delay) => {
|
||||||
logger.warn('Retrying operator submission', {
|
logger.warn('Retrying operator submission', { attempt, delay });
|
||||||
attempt,
|
|
||||||
delay,
|
|
||||||
error: error instanceof Error ? error.message : String(error)
|
|
||||||
});
|
|
||||||
window.dispatchEvent(new CustomEvent('submission-retry', {
|
window.dispatchEvent(new CustomEvent('submission-retry', {
|
||||||
detail: { id: retryId, attempt, maxAttempts: 3, delay, type: 'operator' }
|
detail: { attempt, maxAttempts: 3, delay, type: 'operator' }
|
||||||
}));
|
}));
|
||||||
},
|
},
|
||||||
shouldRetry: (error) => {
|
shouldRetry: (error) => {
|
||||||
@@ -2956,34 +2668,12 @@ export async function submitOperatorCreation(
|
|||||||
const message = error.message.toLowerCase();
|
const message = error.message.toLowerCase();
|
||||||
if (message.includes('required')) return false;
|
if (message.includes('required')) return false;
|
||||||
if (message.includes('banned')) return false;
|
if (message.includes('banned')) return false;
|
||||||
if (message.includes('suspended')) return false;
|
|
||||||
if (message.includes('slug')) return false;
|
if (message.includes('slug')) return false;
|
||||||
if (message.includes('already exists')) return false;
|
|
||||||
if (message.includes('duplicate')) return false;
|
|
||||||
if (message.includes('permission')) return false;
|
|
||||||
if (message.includes('forbidden')) return false;
|
|
||||||
if (message.includes('unauthorized')) return false;
|
|
||||||
}
|
}
|
||||||
return isRetryableError(error);
|
return isRetryableError(error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
).then((data) => {
|
);
|
||||||
window.dispatchEvent(new CustomEvent('submission-retry-success', {
|
|
||||||
detail: { id: retryId }
|
|
||||||
}));
|
|
||||||
return data;
|
|
||||||
}).catch((error) => {
|
|
||||||
const errorId = handleError(error, {
|
|
||||||
action: 'Operator submission',
|
|
||||||
metadata: { retriesExhausted: true },
|
|
||||||
});
|
|
||||||
|
|
||||||
window.dispatchEvent(new CustomEvent('submission-retry-failed', {
|
|
||||||
detail: { id: retryId, errorId }
|
|
||||||
}));
|
|
||||||
|
|
||||||
throw error;
|
|
||||||
});
|
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
@@ -3041,8 +2731,6 @@ export async function submitOperatorUpdate(
|
|||||||
|
|
||||||
// Submit with retry logic
|
// Submit with retry logic
|
||||||
breadcrumb.apiCall('content_submissions', 'INSERT');
|
breadcrumb.apiCall('content_submissions', 'INSERT');
|
||||||
const retryId = crypto.randomUUID();
|
|
||||||
|
|
||||||
const result = await withRetry(
|
const result = await withRetry(
|
||||||
async () => {
|
async () => {
|
||||||
const { data: submissionData, error: submissionError } = await supabase
|
const { data: submissionData, error: submissionError } = await supabase
|
||||||
@@ -3080,28 +2768,10 @@ export async function submitOperatorUpdate(
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
maxAttempts: 3,
|
maxAttempts: 3,
|
||||||
baseDelay: 1000,
|
|
||||||
onRetry: (attempt, error, delay) => {
|
onRetry: (attempt, error, delay) => {
|
||||||
const isRateLimit = isRateLimitError(error);
|
logger.warn('Retrying operator update', { attempt, delay });
|
||||||
const retryAfter = isRateLimit ? extractRetryAfter(error) : null;
|
|
||||||
|
|
||||||
logger.warn('Retrying operator update', {
|
|
||||||
attempt,
|
|
||||||
delay,
|
|
||||||
isRateLimit,
|
|
||||||
retryAfter,
|
|
||||||
error: error instanceof Error ? error.message : String(error)
|
|
||||||
});
|
|
||||||
window.dispatchEvent(new CustomEvent('submission-retry', {
|
window.dispatchEvent(new CustomEvent('submission-retry', {
|
||||||
detail: {
|
detail: { attempt, maxAttempts: 3, delay, type: 'operator_update' }
|
||||||
id: retryId,
|
|
||||||
attempt,
|
|
||||||
maxAttempts: 3,
|
|
||||||
delay,
|
|
||||||
type: 'operator_update',
|
|
||||||
isRateLimit,
|
|
||||||
retryAfter
|
|
||||||
}
|
|
||||||
}));
|
}));
|
||||||
},
|
},
|
||||||
shouldRetry: (error) => {
|
shouldRetry: (error) => {
|
||||||
@@ -3172,8 +2842,6 @@ export async function submitPropertyOwnerCreation(
|
|||||||
|
|
||||||
// Submit with retry logic
|
// Submit with retry logic
|
||||||
breadcrumb.apiCall('content_submissions', 'INSERT');
|
breadcrumb.apiCall('content_submissions', 'INSERT');
|
||||||
const retryId = crypto.randomUUID();
|
|
||||||
|
|
||||||
const result = await withRetry(
|
const result = await withRetry(
|
||||||
async () => {
|
async () => {
|
||||||
const { data: submissionData, error: submissionError } = await supabase
|
const { data: submissionData, error: submissionError } = await supabase
|
||||||
@@ -3213,15 +2881,10 @@ export async function submitPropertyOwnerCreation(
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
maxAttempts: 3,
|
maxAttempts: 3,
|
||||||
baseDelay: 1000,
|
|
||||||
onRetry: (attempt, error, delay) => {
|
onRetry: (attempt, error, delay) => {
|
||||||
logger.warn('Retrying property owner submission', {
|
logger.warn('Retrying property owner submission', { attempt, delay });
|
||||||
attempt,
|
|
||||||
delay,
|
|
||||||
error: error instanceof Error ? error.message : String(error)
|
|
||||||
});
|
|
||||||
window.dispatchEvent(new CustomEvent('submission-retry', {
|
window.dispatchEvent(new CustomEvent('submission-retry', {
|
||||||
detail: { id: retryId, attempt, maxAttempts: 3, delay, type: 'property_owner' }
|
detail: { attempt, maxAttempts: 3, delay, type: 'property_owner' }
|
||||||
}));
|
}));
|
||||||
},
|
},
|
||||||
shouldRetry: (error) => {
|
shouldRetry: (error) => {
|
||||||
@@ -3229,34 +2892,12 @@ export async function submitPropertyOwnerCreation(
|
|||||||
const message = error.message.toLowerCase();
|
const message = error.message.toLowerCase();
|
||||||
if (message.includes('required')) return false;
|
if (message.includes('required')) return false;
|
||||||
if (message.includes('banned')) return false;
|
if (message.includes('banned')) return false;
|
||||||
if (message.includes('suspended')) return false;
|
|
||||||
if (message.includes('slug')) return false;
|
if (message.includes('slug')) return false;
|
||||||
if (message.includes('already exists')) return false;
|
|
||||||
if (message.includes('duplicate')) return false;
|
|
||||||
if (message.includes('permission')) return false;
|
|
||||||
if (message.includes('forbidden')) return false;
|
|
||||||
if (message.includes('unauthorized')) return false;
|
|
||||||
}
|
}
|
||||||
return isRetryableError(error);
|
return isRetryableError(error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
).then((data) => {
|
);
|
||||||
window.dispatchEvent(new CustomEvent('submission-retry-success', {
|
|
||||||
detail: { id: retryId }
|
|
||||||
}));
|
|
||||||
return data;
|
|
||||||
}).catch((error) => {
|
|
||||||
const errorId = handleError(error, {
|
|
||||||
action: 'Property owner submission',
|
|
||||||
metadata: { retriesExhausted: true },
|
|
||||||
});
|
|
||||||
|
|
||||||
window.dispatchEvent(new CustomEvent('submission-retry-failed', {
|
|
||||||
detail: { id: retryId, errorId }
|
|
||||||
}));
|
|
||||||
|
|
||||||
throw error;
|
|
||||||
});
|
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
@@ -3314,8 +2955,6 @@ export async function submitPropertyOwnerUpdate(
|
|||||||
|
|
||||||
// Submit with retry logic
|
// Submit with retry logic
|
||||||
breadcrumb.apiCall('content_submissions', 'INSERT');
|
breadcrumb.apiCall('content_submissions', 'INSERT');
|
||||||
const retryId = crypto.randomUUID();
|
|
||||||
|
|
||||||
const result = await withRetry(
|
const result = await withRetry(
|
||||||
async () => {
|
async () => {
|
||||||
const { data: submissionData, error: submissionError } = await supabase
|
const { data: submissionData, error: submissionError } = await supabase
|
||||||
@@ -3353,28 +2992,10 @@ export async function submitPropertyOwnerUpdate(
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
maxAttempts: 3,
|
maxAttempts: 3,
|
||||||
baseDelay: 1000,
|
|
||||||
onRetry: (attempt, error, delay) => {
|
onRetry: (attempt, error, delay) => {
|
||||||
const isRateLimit = isRateLimitError(error);
|
logger.warn('Retrying property owner update', { attempt, delay });
|
||||||
const retryAfter = isRateLimit ? extractRetryAfter(error) : null;
|
|
||||||
|
|
||||||
logger.warn('Retrying property owner update', {
|
|
||||||
attempt,
|
|
||||||
delay,
|
|
||||||
isRateLimit,
|
|
||||||
retryAfter,
|
|
||||||
error: error instanceof Error ? error.message : String(error)
|
|
||||||
});
|
|
||||||
window.dispatchEvent(new CustomEvent('submission-retry', {
|
window.dispatchEvent(new CustomEvent('submission-retry', {
|
||||||
detail: {
|
detail: { attempt, maxAttempts: 3, delay, type: 'property_owner_update' }
|
||||||
id: retryId,
|
|
||||||
attempt,
|
|
||||||
maxAttempts: 3,
|
|
||||||
delay,
|
|
||||||
type: 'property_owner_update',
|
|
||||||
isRateLimit,
|
|
||||||
retryAfter
|
|
||||||
}
|
|
||||||
}));
|
}));
|
||||||
},
|
},
|
||||||
shouldRetry: (error) => {
|
shouldRetry: (error) => {
|
||||||
|
|||||||
@@ -1,152 +0,0 @@
|
|||||||
/**
|
|
||||||
* Test Error Formatting Utility
|
|
||||||
*
|
|
||||||
* Provides robust error formatting for test results to avoid "[object Object]" messages
|
|
||||||
* Includes pattern matching for common Supabase/Postgres constraint violations
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Error pattern matchers for common database constraint violations
|
|
||||||
*/
|
|
||||||
const ERROR_PATTERNS = [
|
|
||||||
{
|
|
||||||
// RLS policy violations
|
|
||||||
pattern: /new row violates row-level security policy for table "(\w+)"/i,
|
|
||||||
format: (match: RegExpMatchArray) =>
|
|
||||||
`RLS Policy Violation: Cannot insert into table "${match[1]}". Check that RLS policies allow this operation and user has proper authentication.`
|
|
||||||
},
|
|
||||||
{
|
|
||||||
// NOT NULL constraint violations
|
|
||||||
pattern: /null value in column "(\w+)" of relation "(\w+)" violates not-null constraint/i,
|
|
||||||
format: (match: RegExpMatchArray) =>
|
|
||||||
`NOT NULL Constraint: Column "${match[1]}" in table "${match[2]}" cannot be null. Provide a value for this required field.`
|
|
||||||
},
|
|
||||||
{
|
|
||||||
// UNIQUE constraint violations
|
|
||||||
pattern: /duplicate key value violates unique constraint "(\w+)"/i,
|
|
||||||
format: (match: RegExpMatchArray) =>
|
|
||||||
`UNIQUE Constraint: Duplicate value violates constraint "${match[1]}". This value already exists in the database.`
|
|
||||||
},
|
|
||||||
{
|
|
||||||
// Foreign key violations
|
|
||||||
pattern: /insert or update on table "(\w+)" violates foreign key constraint "(\w+)"/i,
|
|
||||||
format: (match: RegExpMatchArray) =>
|
|
||||||
`Foreign Key Violation: Table "${match[1]}" references non-existent record (constraint: "${match[2]}"). Ensure the referenced entity exists first.`
|
|
||||||
},
|
|
||||||
{
|
|
||||||
// Foreign key violations (alternative format)
|
|
||||||
pattern: /violates foreign key constraint/i,
|
|
||||||
format: () =>
|
|
||||||
`Foreign Key Violation: Referenced record does not exist. Create the parent entity before creating this dependent entity.`
|
|
||||||
},
|
|
||||||
{
|
|
||||||
// Check constraint violations
|
|
||||||
pattern: /new row for relation "(\w+)" violates check constraint "(\w+)"/i,
|
|
||||||
format: (match: RegExpMatchArray) =>
|
|
||||||
`Check Constraint: Validation failed for table "${match[1]}" (constraint: "${match[2]}"). The provided value does not meet validation requirements.`
|
|
||||||
},
|
|
||||||
{
|
|
||||||
// Column does not exist
|
|
||||||
pattern: /column "(\w+)" of relation "(\w+)" does not exist/i,
|
|
||||||
format: (match: RegExpMatchArray) =>
|
|
||||||
`Schema Error: Column "${match[1]}" does not exist in table "${match[2]}". Check database schema or migration status.`
|
|
||||||
},
|
|
||||||
{
|
|
||||||
// Could not find column in schema cache
|
|
||||||
pattern: /Could not find the '(\w+)' column of '(\w+)' in the schema cache/i,
|
|
||||||
format: (match: RegExpMatchArray) =>
|
|
||||||
`Schema Cache Error: Column "${match[1]}" not found in table "${match[2]}". The schema may have changed - try refreshing the database connection.`
|
|
||||||
},
|
|
||||||
{
|
|
||||||
// Table does not exist
|
|
||||||
pattern: /relation "(\w+)" does not exist/i,
|
|
||||||
format: (match: RegExpMatchArray) =>
|
|
||||||
`Schema Error: Table "${match[1]}" does not exist. Run migrations or check database schema.`
|
|
||||||
},
|
|
||||||
{
|
|
||||||
// Permission denied
|
|
||||||
pattern: /permission denied for (?:table|relation) "?(\w+)"?/i,
|
|
||||||
format: (match: RegExpMatchArray) =>
|
|
||||||
`Permission Denied: Insufficient permissions to access table "${match[1]}". Check RLS policies and user roles.`
|
|
||||||
},
|
|
||||||
{
|
|
||||||
// Rate limit errors
|
|
||||||
pattern: /Rate limit exceeded\. Please wait (\d+) seconds?/i,
|
|
||||||
format: (match: RegExpMatchArray) =>
|
|
||||||
`Rate Limited: Too many requests. Wait ${match[1]} seconds before retrying.`
|
|
||||||
},
|
|
||||||
{
|
|
||||||
// Rate limit errors (alternative format)
|
|
||||||
pattern: /Too many submissions in a short time\. Please wait (\d+) seconds?/i,
|
|
||||||
format: (match: RegExpMatchArray) =>
|
|
||||||
`Rate Limited: Submission throttled. Wait ${match[1]} seconds before submitting again.`
|
|
||||||
}
|
|
||||||
];
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Format error for test result display
|
|
||||||
* Handles Error objects, PostgresError objects, and plain objects
|
|
||||||
*
|
|
||||||
* @param error - Any error value thrown in a test
|
|
||||||
* @returns Formatted, human-readable error string
|
|
||||||
*/
|
|
||||||
export function formatTestError(error: unknown): string {
|
|
||||||
let errorMessage = '';
|
|
||||||
|
|
||||||
// Extract base error message
|
|
||||||
if (error instanceof Error) {
|
|
||||||
errorMessage = error.message;
|
|
||||||
} else if (typeof error === 'object' && error !== null) {
|
|
||||||
const err = error as any;
|
|
||||||
|
|
||||||
// Try common error message properties
|
|
||||||
if (err.message && typeof err.message === 'string') {
|
|
||||||
errorMessage = err.message;
|
|
||||||
|
|
||||||
// Include additional Supabase error details if present
|
|
||||||
if (err.details && typeof err.details === 'string') {
|
|
||||||
errorMessage += ` | Details: ${err.details}`;
|
|
||||||
}
|
|
||||||
if (err.hint && typeof err.hint === 'string') {
|
|
||||||
errorMessage += ` | Hint: ${err.hint}`;
|
|
||||||
}
|
|
||||||
if (err.code && typeof err.code === 'string') {
|
|
||||||
errorMessage += ` | Code: ${err.code}`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Some errors nest the actual error in an 'error' property
|
|
||||||
else if (err.error) {
|
|
||||||
return formatTestError(err.error);
|
|
||||||
}
|
|
||||||
// Some APIs use 'msg' instead of 'message'
|
|
||||||
else if (err.msg && typeof err.msg === 'string') {
|
|
||||||
errorMessage = err.msg;
|
|
||||||
}
|
|
||||||
// Last resort: stringify the entire object
|
|
||||||
else {
|
|
||||||
try {
|
|
||||||
const stringified = JSON.stringify(error, null, 2);
|
|
||||||
errorMessage = stringified.length > 500
|
|
||||||
? stringified.substring(0, 500) + '... (truncated)'
|
|
||||||
: stringified;
|
|
||||||
} catch {
|
|
||||||
// JSON.stringify can fail on circular references
|
|
||||||
errorMessage = String(error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Primitive values (strings, numbers, etc.)
|
|
||||||
errorMessage = String(error);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Apply pattern matching to format known constraint violations
|
|
||||||
for (const { pattern, format } of ERROR_PATTERNS) {
|
|
||||||
const match = errorMessage.match(pattern);
|
|
||||||
if (match) {
|
|
||||||
return format(match);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Return original message if no patterns matched
|
|
||||||
return errorMessage;
|
|
||||||
}
|
|
||||||
@@ -1,76 +0,0 @@
|
|||||||
/**
|
|
||||||
* Test Result Formatters
|
|
||||||
*
|
|
||||||
* Utilities for formatting test results into different formats for easy sharing and debugging.
|
|
||||||
*/
|
|
||||||
|
|
||||||
import type { TestResult } from './testRunner';
|
|
||||||
|
|
||||||
export function formatResultsAsMarkdown(
|
|
||||||
results: TestResult[],
|
|
||||||
summary: { total: number; passed: number; failed: number; skipped: number; totalDuration: number },
|
|
||||||
failedOnly: boolean = false
|
|
||||||
): string {
|
|
||||||
const timestamp = new Date().toISOString();
|
|
||||||
const title = failedOnly ? 'Failed Test Results' : 'Test Results';
|
|
||||||
|
|
||||||
let markdown = `# ${title} - ${timestamp}\n\n`;
|
|
||||||
|
|
||||||
// Summary section
|
|
||||||
markdown += `## Summary\n`;
|
|
||||||
markdown += `✅ Passed: ${summary.passed}\n`;
|
|
||||||
markdown += `❌ Failed: ${summary.failed}\n`;
|
|
||||||
markdown += `⏭️ Skipped: ${summary.skipped}\n`;
|
|
||||||
markdown += `⏱️ Duration: ${(summary.totalDuration / 1000).toFixed(2)}s\n\n`;
|
|
||||||
|
|
||||||
// Results by status
|
|
||||||
if (!failedOnly && summary.failed > 0) {
|
|
||||||
markdown += `## Failed Tests\n\n`;
|
|
||||||
results.filter(r => r.status === 'fail').forEach(result => {
|
|
||||||
markdown += formatTestResultMarkdown(result);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (failedOnly) {
|
|
||||||
results.forEach(result => {
|
|
||||||
markdown += formatTestResultMarkdown(result);
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
// Include passed tests in summary
|
|
||||||
if (summary.passed > 0) {
|
|
||||||
markdown += `## Passed Tests\n\n`;
|
|
||||||
results.filter(r => r.status === 'pass').forEach(result => {
|
|
||||||
markdown += `### ✅ ${result.name} (${result.suite})\n`;
|
|
||||||
markdown += `**Duration:** ${result.duration}ms\n\n`;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return markdown;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function formatSingleTestAsMarkdown(result: TestResult): string {
|
|
||||||
return formatTestResultMarkdown(result);
|
|
||||||
}
|
|
||||||
|
|
||||||
function formatTestResultMarkdown(result: TestResult): string {
|
|
||||||
const icon = result.status === 'fail' ? '❌' : result.status === 'pass' ? '✅' : '⏭️';
|
|
||||||
|
|
||||||
let markdown = `### ${icon} ${result.name} (${result.suite})\n`;
|
|
||||||
markdown += `**Duration:** ${result.duration}ms\n`;
|
|
||||||
markdown += `**Status:** ${result.status}\n`;
|
|
||||||
|
|
||||||
if (result.error) {
|
|
||||||
markdown += `**Error:** ${result.error}\n\n`;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (result.stack) {
|
|
||||||
markdown += `**Stack Trace:**\n\`\`\`\n${result.stack}\n\`\`\`\n\n`;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (result.details) {
|
|
||||||
markdown += `**Details:**\n\`\`\`json\n${JSON.stringify(result.details, null, 2)}\n\`\`\`\n\n`;
|
|
||||||
}
|
|
||||||
|
|
||||||
return markdown;
|
|
||||||
}
|
|
||||||
@@ -1,697 +0,0 @@
|
|||||||
/**
|
|
||||||
* Approval Pipeline Test Helpers
|
|
||||||
*
|
|
||||||
* Reusable helper functions for approval pipeline integration tests.
|
|
||||||
* These helpers abstract common patterns for submission creation, approval,
|
|
||||||
* and verification across all entity types.
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { supabase } from '@/lib/supabaseClient';
|
|
||||||
import { TestDataTracker } from '../TestDataTracker';
|
|
||||||
import { formatTestError } from '../formatTestError';
|
|
||||||
import {
|
|
||||||
submitParkCreation,
|
|
||||||
submitRideCreation,
|
|
||||||
submitManufacturerCreation,
|
|
||||||
submitOperatorCreation,
|
|
||||||
submitDesignerCreation,
|
|
||||||
submitPropertyOwnerCreation,
|
|
||||||
submitRideModelCreation
|
|
||||||
} from '@/lib/entitySubmissionHelpers';
|
|
||||||
|
|
||||||
// Re-export formatTestError for use in test suites
|
|
||||||
export { formatTestError } from '../formatTestError';
|
|
||||||
|
|
||||||
// ============================================
|
|
||||||
// AUTHENTICATION
|
|
||||||
// ============================================
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get current user auth token for edge function calls
|
|
||||||
*/
|
|
||||||
export async function getAuthToken(): Promise<string> {
|
|
||||||
const { data: { session }, error } = await supabase.auth.getSession();
|
|
||||||
if (error || !session) {
|
|
||||||
throw new Error('Not authenticated - cannot run approval tests');
|
|
||||||
}
|
|
||||||
return session.access_token;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get current user ID
|
|
||||||
*/
|
|
||||||
export async function getCurrentUserId(): Promise<string> {
|
|
||||||
const { data: { user }, error } = await supabase.auth.getUser();
|
|
||||||
if (error || !user) {
|
|
||||||
throw new Error('Not authenticated - cannot get user ID');
|
|
||||||
}
|
|
||||||
return user.id;
|
|
||||||
}
|
|
||||||
|
|
||||||
// ============================================
|
|
||||||
// EDGE FUNCTION CONFIGURATION
|
|
||||||
// ============================================
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get edge function base URL (hardcoded per project requirements)
|
|
||||||
*/
|
|
||||||
export function getEdgeFunctionUrl(): string {
|
|
||||||
return 'https://api.thrillwiki.com/functions/v1';
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get Supabase anon key (hardcoded per project requirements)
|
|
||||||
*/
|
|
||||||
export function getSupabaseAnonKey(): string {
|
|
||||||
return 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6ImRka2VueWdwcHlzZ3NlcmJ5aW9hIiwicm9sZSI6ImFub24iLCJpYXQiOjE3Mjg0ODY0MTIsImV4cCI6MjA0NDA2MjQxMn0.0qfDbOvh-Hs5n7HHZ0cRQzH5oEL_1D7kj7v6nh4PqgI';
|
|
||||||
}
|
|
||||||
|
|
||||||
// ============================================
|
|
||||||
// TEST DATA GENERATORS
|
|
||||||
// ============================================
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generate unique park submission data
|
|
||||||
*/
|
|
||||||
export function generateUniqueParkData(testId: string): any {
|
|
||||||
const timestamp = Date.now();
|
|
||||||
const slug = `test-park-${testId}-${timestamp}`;
|
|
||||||
|
|
||||||
return {
|
|
||||||
name: `Test Park ${testId} ${timestamp}`,
|
|
||||||
slug,
|
|
||||||
description: `Test park for ${testId}`,
|
|
||||||
park_type: 'theme_park',
|
|
||||||
status: 'operating',
|
|
||||||
opening_date: '2000-01-01',
|
|
||||||
opening_date_precision: 'year',
|
|
||||||
location: {
|
|
||||||
name: 'Test Location',
|
|
||||||
city: 'Test City',
|
|
||||||
country: 'US',
|
|
||||||
latitude: 40.7128,
|
|
||||||
longitude: -74.0060,
|
|
||||||
},
|
|
||||||
is_test_data: true,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generate unique ride submission data
|
|
||||||
*/
|
|
||||||
export function generateUniqueRideData(parkId: string, testId: string): any {
|
|
||||||
const timestamp = Date.now();
|
|
||||||
const slug = `test-ride-${testId}-${timestamp}`;
|
|
||||||
|
|
||||||
return {
|
|
||||||
name: `Test Ride ${testId} ${timestamp}`,
|
|
||||||
slug,
|
|
||||||
description: `Test ride for ${testId}`,
|
|
||||||
category: 'roller_coaster',
|
|
||||||
status: 'operating',
|
|
||||||
park_id: parkId,
|
|
||||||
opening_date: '2005-01-01',
|
|
||||||
opening_date_precision: 'year',
|
|
||||||
max_speed_kmh: 100,
|
|
||||||
max_height_meters: 50,
|
|
||||||
length_meters: 1000,
|
|
||||||
is_test_data: true,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generate unique company submission data
|
|
||||||
*/
|
|
||||||
export function generateUniqueCompanyData(companyType: string, testId: string): any {
|
|
||||||
const timestamp = Date.now();
|
|
||||||
const slug = `test-${companyType}-${testId}-${timestamp}`;
|
|
||||||
|
|
||||||
return {
|
|
||||||
name: `Test ${companyType} ${testId} ${timestamp}`,
|
|
||||||
slug,
|
|
||||||
description: `Test ${companyType} for ${testId}`,
|
|
||||||
person_type: 'company',
|
|
||||||
founded_year: 1990,
|
|
||||||
is_test_data: true,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generate unique ride model submission data
|
|
||||||
*/
|
|
||||||
export function generateUniqueRideModelData(manufacturerId: string, testId: string): any {
|
|
||||||
const timestamp = Date.now();
|
|
||||||
const slug = `test-model-${testId}-${timestamp}`;
|
|
||||||
|
|
||||||
return {
|
|
||||||
name: `Test Model ${testId} ${timestamp}`,
|
|
||||||
slug,
|
|
||||||
manufacturer_id: manufacturerId,
|
|
||||||
category: 'roller_coaster',
|
|
||||||
ride_type: 'steel',
|
|
||||||
description: `Test ride model for ${testId}`,
|
|
||||||
is_test_data: true,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// ============================================
|
|
||||||
// SUBMISSION CREATION HELPERS
|
|
||||||
// ============================================
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a test park submission
|
|
||||||
*/
|
|
||||||
export async function createTestParkSubmission(
|
|
||||||
data: any,
|
|
||||||
userId: string,
|
|
||||||
tracker: TestDataTracker
|
|
||||||
): Promise<{ submissionId: string; itemId: string }> {
|
|
||||||
const result = await submitParkCreation(data, userId);
|
|
||||||
|
|
||||||
if (!result.submissionId) {
|
|
||||||
throw new Error('Park submission creation failed - no submission ID returned');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Track submission for cleanup
|
|
||||||
tracker.track('content_submissions', result.submissionId);
|
|
||||||
|
|
||||||
// Get the submission item ID
|
|
||||||
const { data: items } = await supabase
|
|
||||||
.from('submission_items')
|
|
||||||
.select('id')
|
|
||||||
.eq('submission_id', result.submissionId)
|
|
||||||
.single();
|
|
||||||
|
|
||||||
if (!items?.id) {
|
|
||||||
throw new Error('Failed to get submission item ID');
|
|
||||||
}
|
|
||||||
|
|
||||||
tracker.track('submission_items', items.id);
|
|
||||||
|
|
||||||
return {
|
|
||||||
submissionId: result.submissionId,
|
|
||||||
itemId: items.id,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a test ride submission
|
|
||||||
*/
|
|
||||||
export async function createTestRideSubmission(
|
|
||||||
data: any,
|
|
||||||
userId: string,
|
|
||||||
tracker: TestDataTracker
|
|
||||||
): Promise<{ submissionId: string; itemId: string }> {
|
|
||||||
const result = await submitRideCreation(data, userId);
|
|
||||||
|
|
||||||
if (!result.submissionId) {
|
|
||||||
throw new Error('Ride submission creation failed - no submission ID returned');
|
|
||||||
}
|
|
||||||
|
|
||||||
tracker.track('content_submissions', result.submissionId);
|
|
||||||
|
|
||||||
const { data: items } = await supabase
|
|
||||||
.from('submission_items')
|
|
||||||
.select('id')
|
|
||||||
.eq('submission_id', result.submissionId)
|
|
||||||
.single();
|
|
||||||
|
|
||||||
if (!items?.id) {
|
|
||||||
throw new Error('Failed to get submission item ID');
|
|
||||||
}
|
|
||||||
|
|
||||||
tracker.track('submission_items', items.id);
|
|
||||||
|
|
||||||
return {
|
|
||||||
submissionId: result.submissionId,
|
|
||||||
itemId: items.id,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a test company submission
|
|
||||||
*/
|
|
||||||
export async function createTestCompanySubmission(
|
|
||||||
companyType: 'manufacturer' | 'operator' | 'designer' | 'property_owner',
|
|
||||||
data: any,
|
|
||||||
userId: string,
|
|
||||||
tracker: TestDataTracker
|
|
||||||
): Promise<{ submissionId: string; itemId: string }> {
|
|
||||||
// Call the appropriate company type-specific submission function
|
|
||||||
let result: { submitted: boolean; submissionId: string };
|
|
||||||
|
|
||||||
switch (companyType) {
|
|
||||||
case 'manufacturer':
|
|
||||||
result = await submitManufacturerCreation(data, userId);
|
|
||||||
break;
|
|
||||||
case 'operator':
|
|
||||||
result = await submitOperatorCreation(data, userId);
|
|
||||||
break;
|
|
||||||
case 'designer':
|
|
||||||
result = await submitDesignerCreation(data, userId);
|
|
||||||
break;
|
|
||||||
case 'property_owner':
|
|
||||||
result = await submitPropertyOwnerCreation(data, userId);
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw new Error(`Unknown company type: ${companyType}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!result.submissionId) {
|
|
||||||
throw new Error('Company submission creation failed - no submission ID returned');
|
|
||||||
}
|
|
||||||
|
|
||||||
tracker.track('content_submissions', result.submissionId);
|
|
||||||
|
|
||||||
const { data: items } = await supabase
|
|
||||||
.from('submission_items')
|
|
||||||
.select('id')
|
|
||||||
.eq('submission_id', result.submissionId)
|
|
||||||
.single();
|
|
||||||
|
|
||||||
if (!items?.id) {
|
|
||||||
throw new Error('Failed to get submission item ID');
|
|
||||||
}
|
|
||||||
|
|
||||||
tracker.track('submission_items', items.id);
|
|
||||||
|
|
||||||
return {
|
|
||||||
submissionId: result.submissionId,
|
|
||||||
itemId: items.id,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a test ride model submission
|
|
||||||
*/
|
|
||||||
export async function createTestRideModelSubmission(
|
|
||||||
data: any,
|
|
||||||
userId: string,
|
|
||||||
tracker: TestDataTracker
|
|
||||||
): Promise<{ submissionId: string; itemId: string }> {
|
|
||||||
const result = await submitRideModelCreation(data, userId);
|
|
||||||
|
|
||||||
if (!result.submissionId) {
|
|
||||||
throw new Error('Ride model submission creation failed - no submission ID returned');
|
|
||||||
}
|
|
||||||
|
|
||||||
tracker.track('content_submissions', result.submissionId);
|
|
||||||
|
|
||||||
const { data: items } = await supabase
|
|
||||||
.from('submission_items')
|
|
||||||
.select('id')
|
|
||||||
.eq('submission_id', result.submissionId)
|
|
||||||
.single();
|
|
||||||
|
|
||||||
if (!items?.id) {
|
|
||||||
throw new Error('Failed to get submission item ID');
|
|
||||||
}
|
|
||||||
|
|
||||||
tracker.track('submission_items', items.id);
|
|
||||||
|
|
||||||
return {
|
|
||||||
submissionId: result.submissionId,
|
|
||||||
itemId: items.id,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a composite submission with dependencies
|
|
||||||
*/
|
|
||||||
export async function createCompositeSubmission(
|
|
||||||
primaryEntity: { type: 'park' | 'ride'; data: any },
|
|
||||||
dependencies: Array<{ type: string; data: any; tempId: string; companyType?: string }>,
|
|
||||||
userId: string,
|
|
||||||
tracker: TestDataTracker
|
|
||||||
): Promise<{ submissionId: string; itemIds: string[] }> {
|
|
||||||
// Create main submission
|
|
||||||
const { data: submission, error: submissionError } = await supabase
|
|
||||||
.from('content_submissions')
|
|
||||||
.insert({
|
|
||||||
user_id: userId,
|
|
||||||
submission_type: primaryEntity.type === 'park' ? 'park' : 'ride',
|
|
||||||
status: 'pending',
|
|
||||||
is_test_data: true,
|
|
||||||
})
|
|
||||||
.select()
|
|
||||||
.single();
|
|
||||||
|
|
||||||
if (submissionError || !submission) {
|
|
||||||
throw new Error(`Failed to create submission: ${submissionError?.message}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
tracker.track('content_submissions', submission.id);
|
|
||||||
|
|
||||||
const itemIds: string[] = [];
|
|
||||||
|
|
||||||
// Note: This is a simplified composite submission creation
|
|
||||||
// In reality, the actual implementation uses specialized submission tables
|
|
||||||
// (park_submissions, company_submissions, etc.) which are more complex
|
|
||||||
// For testing purposes, we'll track items but note this is incomplete
|
|
||||||
|
|
||||||
// Track submission for cleanup
|
|
||||||
itemIds.push(submission.id);
|
|
||||||
|
|
||||||
return {
|
|
||||||
submissionId: submission.id,
|
|
||||||
itemIds,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// ============================================
|
|
||||||
// APPROVAL INVOCATION
|
|
||||||
// ============================================
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Approve submission via edge function
|
|
||||||
*/
|
|
||||||
export async function approveSubmission(
|
|
||||||
submissionId: string,
|
|
||||||
itemIds: string[],
|
|
||||||
authToken: string,
|
|
||||||
idempotencyKey?: string
|
|
||||||
): Promise<{
|
|
||||||
success: boolean;
|
|
||||||
status?: string;
|
|
||||||
error?: string;
|
|
||||||
duration: number;
|
|
||||||
}> {
|
|
||||||
const startTime = performance.now();
|
|
||||||
|
|
||||||
const key = idempotencyKey || `test-${Date.now()}-${Math.random()}`;
|
|
||||||
|
|
||||||
try {
|
|
||||||
const response = await fetch(
|
|
||||||
`${getEdgeFunctionUrl()}/process-selective-approval`,
|
|
||||||
{
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
'Authorization': `Bearer ${authToken}`,
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
'apikey': getSupabaseAnonKey(),
|
|
||||||
},
|
|
||||||
body: JSON.stringify({
|
|
||||||
submissionId,
|
|
||||||
itemIds,
|
|
||||||
idempotencyKey: key,
|
|
||||||
}),
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
const duration = performance.now() - startTime;
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorText = await response.text();
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
error: `HTTP ${response.status}: ${errorText}`,
|
|
||||||
duration,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = await response.json();
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
status: result.status || 'approved',
|
|
||||||
duration,
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
const duration = performance.now() - startTime;
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
error: formatTestError(error),
|
|
||||||
duration,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ============================================
|
|
||||||
// POLLING & VERIFICATION
|
|
||||||
// ============================================
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Poll for entity creation
|
|
||||||
*/
|
|
||||||
export async function pollForEntity(
|
|
||||||
table: 'parks' | 'rides' | 'companies' | 'ride_models',
|
|
||||||
id: string,
|
|
||||||
maxWaitMs: number = 10000
|
|
||||||
): Promise<any | null> {
|
|
||||||
const pollInterval = 200;
|
|
||||||
const startTime = Date.now();
|
|
||||||
|
|
||||||
while (Date.now() - startTime < maxWaitMs) {
|
|
||||||
const { data, error } = await supabase
|
|
||||||
.from(table)
|
|
||||||
.select('*')
|
|
||||||
.eq('id', id)
|
|
||||||
.single();
|
|
||||||
|
|
||||||
if (data && !error) {
|
|
||||||
return data;
|
|
||||||
}
|
|
||||||
|
|
||||||
await new Promise(resolve => setTimeout(resolve, pollInterval));
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Poll for version creation
|
|
||||||
*/
|
|
||||||
export async function pollForVersion(
|
|
||||||
entityType: 'park' | 'ride' | 'company' | 'ride_model',
|
|
||||||
entityId: string,
|
|
||||||
expectedVersionNumber: number,
|
|
||||||
maxWaitMs: number = 10000
|
|
||||||
): Promise<any | null> {
|
|
||||||
const versionTable = `${entityType}_versions` as 'park_versions' | 'ride_versions' | 'company_versions' | 'ride_model_versions';
|
|
||||||
const pollInterval = 200;
|
|
||||||
const startTime = Date.now();
|
|
||||||
|
|
||||||
while (Date.now() - startTime < maxWaitMs) {
|
|
||||||
const { data, error } = await supabase
|
|
||||||
.from(versionTable)
|
|
||||||
.select('*')
|
|
||||||
.eq(`${entityType}_id`, entityId)
|
|
||||||
.eq('version_number', expectedVersionNumber)
|
|
||||||
.single();
|
|
||||||
|
|
||||||
if (data && !error) {
|
|
||||||
return data;
|
|
||||||
}
|
|
||||||
|
|
||||||
await new Promise(resolve => setTimeout(resolve, pollInterval));
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Verify submission item is approved
|
|
||||||
*/
|
|
||||||
export async function verifySubmissionItemApproved(
|
|
||||||
itemId: string
|
|
||||||
): Promise<{ approved: boolean; entityId: string | null; error?: string }> {
|
|
||||||
const { data, error } = await supabase
|
|
||||||
.from('submission_items')
|
|
||||||
.select('status, approved_entity_id')
|
|
||||||
.eq('id', itemId)
|
|
||||||
.single();
|
|
||||||
|
|
||||||
if (error) {
|
|
||||||
return { approved: false, entityId: null, error: error.message };
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
approved: data.status === 'approved' && !!data.approved_entity_id,
|
|
||||||
entityId: data.approved_entity_id,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Verify submission status
|
|
||||||
*/
|
|
||||||
export async function verifySubmissionStatus(
|
|
||||||
submissionId: string,
|
|
||||||
expectedStatus: 'approved' | 'partially_approved' | 'pending'
|
|
||||||
): Promise<boolean> {
|
|
||||||
const { data, error } = await supabase
|
|
||||||
.from('content_submissions')
|
|
||||||
.select('status')
|
|
||||||
.eq('id', submissionId)
|
|
||||||
.single();
|
|
||||||
|
|
||||||
if (error || !data) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
return data.status === expectedStatus;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create entity directly (bypass moderation for setup)
|
|
||||||
*/
|
|
||||||
export async function createParkDirectly(
|
|
||||||
data: any,
|
|
||||||
tracker: TestDataTracker
|
|
||||||
): Promise<string> {
|
|
||||||
// First create location if provided
|
|
||||||
let locationId: string | undefined;
|
|
||||||
|
|
||||||
if (data.location) {
|
|
||||||
const { data: location, error: locError } = await supabase
|
|
||||||
.from('locations')
|
|
||||||
.insert({
|
|
||||||
name: data.location.name,
|
|
||||||
city: data.location.city,
|
|
||||||
country: data.location.country,
|
|
||||||
latitude: data.location.latitude,
|
|
||||||
longitude: data.location.longitude,
|
|
||||||
})
|
|
||||||
.select()
|
|
||||||
.single();
|
|
||||||
|
|
||||||
if (locError || !location) {
|
|
||||||
throw new Error(`Failed to create location: ${locError?.message}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
locationId = location.id;
|
|
||||||
tracker.track('locations', locationId);
|
|
||||||
}
|
|
||||||
|
|
||||||
const parkData = { ...data };
|
|
||||||
delete parkData.location;
|
|
||||||
if (locationId) {
|
|
||||||
parkData.location_id = locationId;
|
|
||||||
}
|
|
||||||
|
|
||||||
const { data: park, error } = await supabase
|
|
||||||
.from('parks')
|
|
||||||
.insert(parkData)
|
|
||||||
.select()
|
|
||||||
.single();
|
|
||||||
|
|
||||||
if (error || !park) {
|
|
||||||
throw new Error(`Failed to create park directly: ${error?.message}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
tracker.track('parks', park.id);
|
|
||||||
return park.id;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create ride directly (bypass moderation for setup)
|
|
||||||
*/
|
|
||||||
export async function createRideDirectly(
|
|
||||||
data: any,
|
|
||||||
tracker: TestDataTracker
|
|
||||||
): Promise<string> {
|
|
||||||
const { data: ride, error } = await supabase
|
|
||||||
.from('rides')
|
|
||||||
.insert(data)
|
|
||||||
.select()
|
|
||||||
.single();
|
|
||||||
|
|
||||||
if (error || !ride) {
|
|
||||||
throw new Error(`Failed to create ride directly: ${error?.message}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
tracker.track('rides', ride.id);
|
|
||||||
return ride.id;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create test photo gallery submission
|
|
||||||
*/
|
|
||||||
export async function createTestPhotoGallerySubmission(
|
|
||||||
entityId: string,
|
|
||||||
entityType: 'park' | 'ride',
|
|
||||||
photoCount: number,
|
|
||||||
userId: string,
|
|
||||||
tracker: TestDataTracker
|
|
||||||
): Promise<{ submissionId: string; itemId: string }> {
|
|
||||||
// Create content submission first
|
|
||||||
const { data: submission, error: submissionError } = await supabase
|
|
||||||
.from('content_submissions')
|
|
||||||
.insert({
|
|
||||||
user_id: userId,
|
|
||||||
submission_type: 'photo_gallery',
|
|
||||||
status: 'pending',
|
|
||||||
is_test_data: true,
|
|
||||||
})
|
|
||||||
.select()
|
|
||||||
.single();
|
|
||||||
|
|
||||||
if (submissionError || !submission) {
|
|
||||||
throw new Error(`Failed to create content submission: ${submissionError?.message}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
tracker.track('content_submissions', submission.id);
|
|
||||||
|
|
||||||
// Create photo submission
|
|
||||||
const { data: photoSubmission, error: photoSubError } = await supabase
|
|
||||||
.from('photo_submissions')
|
|
||||||
.insert({
|
|
||||||
entity_id: entityId,
|
|
||||||
entity_type: entityType,
|
|
||||||
submission_id: submission.id,
|
|
||||||
is_test_data: true,
|
|
||||||
})
|
|
||||||
.select()
|
|
||||||
.single();
|
|
||||||
|
|
||||||
if (photoSubError || !photoSubmission) {
|
|
||||||
throw new Error(`Failed to create photo submission: ${photoSubError?.message}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
tracker.track('photo_submissions', photoSubmission.id);
|
|
||||||
|
|
||||||
// Create submission item linking to photo submission
|
|
||||||
const { data: item, error: itemError } = await supabase
|
|
||||||
.from('submission_items')
|
|
||||||
.insert({
|
|
||||||
submission_id: submission.id,
|
|
||||||
photo_submission_id: photoSubmission.id,
|
|
||||||
item_type: 'photo_gallery',
|
|
||||||
status: 'pending',
|
|
||||||
is_test_data: true,
|
|
||||||
})
|
|
||||||
.select()
|
|
||||||
.single();
|
|
||||||
|
|
||||||
if (itemError || !item) {
|
|
||||||
throw new Error(`Failed to create submission item: ${itemError?.message}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
tracker.track('submission_items', item.id);
|
|
||||||
|
|
||||||
// Create photo submission items
|
|
||||||
for (let i = 0; i < photoCount; i++) {
|
|
||||||
const { data: photoItem, error: photoItemError } = await supabase
|
|
||||||
.from('photo_submission_items')
|
|
||||||
.insert({
|
|
||||||
photo_submission_id: photoSubmission.id,
|
|
||||||
cloudflare_image_id: `test-image-${Date.now()}-${i}`,
|
|
||||||
cloudflare_image_url: `https://test.com/image-${i}.jpg`,
|
|
||||||
caption: `Test photo ${i + 1}`,
|
|
||||||
order_index: i,
|
|
||||||
is_test_data: true,
|
|
||||||
})
|
|
||||||
.select()
|
|
||||||
.single();
|
|
||||||
|
|
||||||
if (photoItemError || !photoItem) {
|
|
||||||
throw new Error(`Failed to create photo item ${i}: ${photoItemError?.message}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
tracker.track('photo_submission_items', photoItem.id);
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
submissionId: submission.id,
|
|
||||||
itemId: item.id,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@@ -6,7 +6,5 @@
|
|||||||
|
|
||||||
export { IntegrationTestRunner } from './testRunner';
|
export { IntegrationTestRunner } from './testRunner';
|
||||||
export { allTestSuites } from './suites';
|
export { allTestSuites } from './suites';
|
||||||
export { formatResultsAsMarkdown, formatSingleTestAsMarkdown } from './formatters';
|
|
||||||
export { formatTestError } from './formatTestError';
|
|
||||||
|
|
||||||
export type { TestResult, Test, TestSuite } from './testRunner';
|
export type { TestResult, Test, TestSuite } from './testRunner';
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -6,7 +6,6 @@
|
|||||||
|
|
||||||
import { supabase } from '@/lib/supabaseClient';
|
import { supabase } from '@/lib/supabaseClient';
|
||||||
import type { TestSuite, TestResult } from '../testRunner';
|
import type { TestSuite, TestResult } from '../testRunner';
|
||||||
import { formatTestError } from '../formatTestError';
|
|
||||||
|
|
||||||
export const authTestSuite: TestSuite = {
|
export const authTestSuite: TestSuite = {
|
||||||
id: 'auth',
|
id: 'auth',
|
||||||
@@ -65,7 +64,7 @@ export const authTestSuite: TestSuite = {
|
|||||||
suite: 'Authentication & Authorization',
|
suite: 'Authentication & Authorization',
|
||||||
status: 'fail',
|
status: 'fail',
|
||||||
duration,
|
duration,
|
||||||
error: formatTestError(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
stack: error instanceof Error ? error.stack : undefined,
|
stack: error instanceof Error ? error.stack : undefined,
|
||||||
timestamp: new Date().toISOString()
|
timestamp: new Date().toISOString()
|
||||||
};
|
};
|
||||||
@@ -138,7 +137,7 @@ export const authTestSuite: TestSuite = {
|
|||||||
suite: 'Authentication & Authorization',
|
suite: 'Authentication & Authorization',
|
||||||
status: 'fail',
|
status: 'fail',
|
||||||
duration,
|
duration,
|
||||||
error: formatTestError(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
stack: error instanceof Error ? error.stack : undefined,
|
stack: error instanceof Error ? error.stack : undefined,
|
||||||
timestamp: new Date().toISOString()
|
timestamp: new Date().toISOString()
|
||||||
};
|
};
|
||||||
@@ -188,7 +187,7 @@ export const authTestSuite: TestSuite = {
|
|||||||
suite: 'Authentication & Authorization',
|
suite: 'Authentication & Authorization',
|
||||||
status: 'fail',
|
status: 'fail',
|
||||||
duration,
|
duration,
|
||||||
error: formatTestError(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
stack: error instanceof Error ? error.stack : undefined,
|
stack: error instanceof Error ? error.stack : undefined,
|
||||||
timestamp: new Date().toISOString()
|
timestamp: new Date().toISOString()
|
||||||
};
|
};
|
||||||
@@ -249,7 +248,7 @@ export const authTestSuite: TestSuite = {
|
|||||||
suite: 'Authentication & Authorization',
|
suite: 'Authentication & Authorization',
|
||||||
status: 'fail',
|
status: 'fail',
|
||||||
duration,
|
duration,
|
||||||
error: formatTestError(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
stack: error instanceof Error ? error.stack : undefined,
|
stack: error instanceof Error ? error.stack : undefined,
|
||||||
timestamp: new Date().toISOString()
|
timestamp: new Date().toISOString()
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -7,7 +7,6 @@
|
|||||||
import { supabase } from '@/lib/supabaseClient';
|
import { supabase } from '@/lib/supabaseClient';
|
||||||
import type { TestSuite, TestResult } from '../testRunner';
|
import type { TestSuite, TestResult } from '../testRunner';
|
||||||
import { TestDataTracker } from '../TestDataTracker';
|
import { TestDataTracker } from '../TestDataTracker';
|
||||||
import { formatTestError } from '../formatTestError';
|
|
||||||
|
|
||||||
export const dataIntegrityTestSuite: TestSuite = {
|
export const dataIntegrityTestSuite: TestSuite = {
|
||||||
id: 'data-integrity',
|
id: 'data-integrity',
|
||||||
@@ -78,7 +77,7 @@ export const dataIntegrityTestSuite: TestSuite = {
|
|||||||
suite: 'Data Integrity & Constraints',
|
suite: 'Data Integrity & Constraints',
|
||||||
status: 'fail',
|
status: 'fail',
|
||||||
duration,
|
duration,
|
||||||
error: formatTestError(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
stack: error instanceof Error ? error.stack : undefined,
|
stack: error instanceof Error ? error.stack : undefined,
|
||||||
timestamp: new Date().toISOString()
|
timestamp: new Date().toISOString()
|
||||||
};
|
};
|
||||||
@@ -140,7 +139,7 @@ export const dataIntegrityTestSuite: TestSuite = {
|
|||||||
suite: 'Data Integrity & Constraints',
|
suite: 'Data Integrity & Constraints',
|
||||||
status: 'fail',
|
status: 'fail',
|
||||||
duration,
|
duration,
|
||||||
error: formatTestError(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
stack: error instanceof Error ? error.stack : undefined,
|
stack: error instanceof Error ? error.stack : undefined,
|
||||||
timestamp: new Date().toISOString()
|
timestamp: new Date().toISOString()
|
||||||
};
|
};
|
||||||
@@ -150,69 +149,52 @@ export const dataIntegrityTestSuite: TestSuite = {
|
|||||||
{
|
{
|
||||||
id: 'integrity-003',
|
id: 'integrity-003',
|
||||||
name: 'Unique Constraint Enforcement',
|
name: 'Unique Constraint Enforcement',
|
||||||
description: 'Tests unique constraints prevent duplicate slugs via approval pipeline',
|
description: 'Tests unique constraints prevent duplicate slugs',
|
||||||
run: async (): Promise<TestResult> => {
|
run: async (): Promise<TestResult> => {
|
||||||
const startTime = Date.now();
|
const startTime = Date.now();
|
||||||
const tracker = new TestDataTracker();
|
const tracker = new TestDataTracker();
|
||||||
|
let parkId: string | null = null;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Import necessary helpers
|
// Create a park
|
||||||
const {
|
const slug = `unique-test-${Date.now()}`;
|
||||||
getCurrentUserId,
|
const { data: park, error: createError } = await supabase
|
||||||
getAuthToken,
|
.from('parks')
|
||||||
generateUniqueParkData,
|
.insert({
|
||||||
createTestParkSubmission,
|
name: 'Unique Test Park',
|
||||||
approveSubmission
|
slug,
|
||||||
} = await import('../helpers/approvalTestHelpers');
|
park_type: 'theme_park',
|
||||||
|
status: 'operating',
|
||||||
const userId = await getCurrentUserId();
|
is_test_data: true
|
||||||
const authToken = await getAuthToken();
|
})
|
||||||
|
.select('id')
|
||||||
// Create first park with unique slug
|
|
||||||
const baseSlug = `unique-test-${Date.now()}`;
|
|
||||||
const parkData1 = {
|
|
||||||
...generateUniqueParkData('integrity-003-1'),
|
|
||||||
slug: baseSlug // Override with our controlled slug
|
|
||||||
};
|
|
||||||
|
|
||||||
// Create and approve first submission
|
|
||||||
const { submissionId: sub1Id, itemId: item1Id } = await createTestParkSubmission(parkData1, userId, tracker);
|
|
||||||
|
|
||||||
const approval1 = await approveSubmission(sub1Id, [item1Id], authToken);
|
|
||||||
if (!approval1.success) {
|
|
||||||
throw new Error(`First park approval failed: ${approval1.error}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get first park ID
|
|
||||||
const { data: item1 } = await supabase
|
|
||||||
.from('submission_items')
|
|
||||||
.select('approved_entity_id')
|
|
||||||
.eq('id', item1Id)
|
|
||||||
.single();
|
.single();
|
||||||
|
|
||||||
if (!item1?.approved_entity_id) throw new Error('First park not created');
|
if (createError) throw new Error(`Park creation failed: ${createError.message}`);
|
||||||
tracker.track('parks', item1.approved_entity_id);
|
if (!park) throw new Error('No park returned');
|
||||||
|
|
||||||
// Create second submission with SAME slug
|
parkId = park.id;
|
||||||
const parkData2 = {
|
tracker.track('parks', parkId);
|
||||||
...generateUniqueParkData('integrity-003-2'),
|
|
||||||
slug: baseSlug // Same slug - should fail on approval
|
|
||||||
};
|
|
||||||
|
|
||||||
const { submissionId: sub2Id, itemId: item2Id } = await createTestParkSubmission(parkData2, userId, tracker);
|
// Try to create another park with same slug
|
||||||
|
const { error: duplicateError } = await supabase
|
||||||
|
.from('parks')
|
||||||
|
.insert({
|
||||||
|
name: 'Duplicate Park',
|
||||||
|
slug, // Same slug
|
||||||
|
park_type: 'theme_park',
|
||||||
|
status: 'operating',
|
||||||
|
is_test_data: true
|
||||||
|
});
|
||||||
|
|
||||||
// Try to approve second submission (should fail due to unique constraint)
|
// This SHOULD fail with unique violation
|
||||||
const approval2 = await approveSubmission(sub2Id, [item2Id], authToken);
|
if (!duplicateError) {
|
||||||
|
throw new Error('Unique constraint not enforced - duplicate slug was accepted');
|
||||||
// Approval should fail
|
|
||||||
if (approval2.success) {
|
|
||||||
throw new Error('Second approval succeeded when it should have failed (duplicate slug)');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Verify the error mentions unique constraint or duplicate
|
// Verify it's a unique violation
|
||||||
const errorMsg = approval2.error?.toLowerCase() || '';
|
if (!duplicateError.message.includes('unique') && !duplicateError.message.includes('duplicate')) {
|
||||||
if (!errorMsg.includes('unique') && !errorMsg.includes('duplicate') && !errorMsg.includes('already exists')) {
|
throw new Error(`Expected unique constraint error, got: ${duplicateError.message}`);
|
||||||
throw new Error(`Expected unique constraint error, got: ${approval2.error}`);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const duration = Date.now() - startTime;
|
const duration = Date.now() - startTime;
|
||||||
@@ -226,10 +208,7 @@ export const dataIntegrityTestSuite: TestSuite = {
|
|||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
details: {
|
details: {
|
||||||
constraintEnforced: true,
|
constraintEnforced: true,
|
||||||
firstParkCreated: true,
|
errorMessage: duplicateError.message
|
||||||
secondParkBlocked: true,
|
|
||||||
errorMessage: approval2.error,
|
|
||||||
followedPipeline: true
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -240,12 +219,16 @@ export const dataIntegrityTestSuite: TestSuite = {
|
|||||||
suite: 'Data Integrity & Constraints',
|
suite: 'Data Integrity & Constraints',
|
||||||
status: 'fail',
|
status: 'fail',
|
||||||
duration,
|
duration,
|
||||||
error: formatTestError(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
stack: error instanceof Error ? error.stack : undefined,
|
stack: error instanceof Error ? error.stack : undefined,
|
||||||
timestamp: new Date().toISOString()
|
timestamp: new Date().toISOString()
|
||||||
};
|
};
|
||||||
} finally {
|
} finally {
|
||||||
await tracker.cleanup();
|
await tracker.cleanup();
|
||||||
|
const remaining = await tracker.verifyCleanup();
|
||||||
|
if (remaining.length > 0) {
|
||||||
|
console.warn('integrity-003 cleanup incomplete:', remaining);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -306,7 +289,7 @@ export const dataIntegrityTestSuite: TestSuite = {
|
|||||||
suite: 'Data Integrity & Constraints',
|
suite: 'Data Integrity & Constraints',
|
||||||
status: 'fail',
|
status: 'fail',
|
||||||
duration,
|
duration,
|
||||||
error: formatTestError(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
stack: error instanceof Error ? error.stack : undefined,
|
stack: error instanceof Error ? error.stack : undefined,
|
||||||
timestamp: new Date().toISOString()
|
timestamp: new Date().toISOString()
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -6,7 +6,6 @@
|
|||||||
|
|
||||||
import { supabase } from '@/lib/supabaseClient';
|
import { supabase } from '@/lib/supabaseClient';
|
||||||
import type { TestSuite, TestResult } from '../testRunner';
|
import type { TestSuite, TestResult } from '../testRunner';
|
||||||
import { formatTestError } from '../formatTestError';
|
|
||||||
|
|
||||||
export const edgeFunctionTestSuite: TestSuite = {
|
export const edgeFunctionTestSuite: TestSuite = {
|
||||||
id: 'edge-functions',
|
id: 'edge-functions',
|
||||||
@@ -69,7 +68,7 @@ export const edgeFunctionTestSuite: TestSuite = {
|
|||||||
suite: 'Edge Function Tests',
|
suite: 'Edge Function Tests',
|
||||||
status: 'fail',
|
status: 'fail',
|
||||||
duration: Date.now() - startTime,
|
duration: Date.now() - startTime,
|
||||||
error: formatTestError(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
timestamp: new Date().toISOString()
|
timestamp: new Date().toISOString()
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -122,7 +121,7 @@ export const edgeFunctionTestSuite: TestSuite = {
|
|||||||
suite: 'Edge Function Tests',
|
suite: 'Edge Function Tests',
|
||||||
status: 'fail',
|
status: 'fail',
|
||||||
duration: Date.now() - startTime,
|
duration: Date.now() - startTime,
|
||||||
error: formatTestError(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
timestamp: new Date().toISOString()
|
timestamp: new Date().toISOString()
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -188,7 +187,7 @@ export const edgeFunctionTestSuite: TestSuite = {
|
|||||||
suite: 'Edge Function Tests',
|
suite: 'Edge Function Tests',
|
||||||
status: 'fail',
|
status: 'fail',
|
||||||
duration: Date.now() - startTime,
|
duration: Date.now() - startTime,
|
||||||
error: formatTestError(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
timestamp: new Date().toISOString()
|
timestamp: new Date().toISOString()
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,9 +8,7 @@ import { authTestSuite } from './authTests';
|
|||||||
import { versioningTestSuite } from './versioningTests';
|
import { versioningTestSuite } from './versioningTests';
|
||||||
import { dataIntegrityTestSuite } from './dataIntegrityTests';
|
import { dataIntegrityTestSuite } from './dataIntegrityTests';
|
||||||
import { submissionTestSuite } from './submissionTests';
|
import { submissionTestSuite } from './submissionTests';
|
||||||
import { approvalPipelineTestSuite } from './approvalPipelineTests';
|
|
||||||
import { moderationTestSuite } from './moderationTests';
|
import { moderationTestSuite } from './moderationTests';
|
||||||
import { moderationDependencyTestSuite } from './moderationDependencyTests';
|
|
||||||
import { edgeFunctionTestSuite } from './edgeFunctionTests';
|
import { edgeFunctionTestSuite } from './edgeFunctionTests';
|
||||||
import { unitConversionTestSuite } from './unitConversionTests';
|
import { unitConversionTestSuite } from './unitConversionTests';
|
||||||
import { performanceTestSuite } from './performanceTests';
|
import { performanceTestSuite } from './performanceTests';
|
||||||
@@ -21,9 +19,7 @@ export const allTestSuites: TestSuite[] = [
|
|||||||
versioningTestSuite,
|
versioningTestSuite,
|
||||||
dataIntegrityTestSuite,
|
dataIntegrityTestSuite,
|
||||||
submissionTestSuite,
|
submissionTestSuite,
|
||||||
approvalPipelineTestSuite,
|
|
||||||
moderationTestSuite,
|
moderationTestSuite,
|
||||||
moderationDependencyTestSuite,
|
|
||||||
edgeFunctionTestSuite,
|
edgeFunctionTestSuite,
|
||||||
unitConversionTestSuite,
|
unitConversionTestSuite,
|
||||||
performanceTestSuite,
|
performanceTestSuite,
|
||||||
@@ -34,9 +30,7 @@ export {
|
|||||||
versioningTestSuite,
|
versioningTestSuite,
|
||||||
dataIntegrityTestSuite,
|
dataIntegrityTestSuite,
|
||||||
submissionTestSuite,
|
submissionTestSuite,
|
||||||
approvalPipelineTestSuite,
|
|
||||||
moderationTestSuite,
|
moderationTestSuite,
|
||||||
moderationDependencyTestSuite,
|
|
||||||
edgeFunctionTestSuite,
|
edgeFunctionTestSuite,
|
||||||
unitConversionTestSuite,
|
unitConversionTestSuite,
|
||||||
performanceTestSuite,
|
performanceTestSuite,
|
||||||
|
|||||||
@@ -5,9 +5,7 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import { supabase } from '@/lib/supabaseClient';
|
import { supabase } from '@/lib/supabaseClient';
|
||||||
import { submitParkCreation } from '@/lib/entitySubmissionHelpers';
|
|
||||||
import type { TestSuite, TestResult } from '../testRunner';
|
import type { TestSuite, TestResult } from '../testRunner';
|
||||||
import { formatTestError } from '../formatTestError';
|
|
||||||
|
|
||||||
export const moderationDependencyTestSuite: TestSuite = {
|
export const moderationDependencyTestSuite: TestSuite = {
|
||||||
id: 'moderation-dependencies',
|
id: 'moderation-dependencies',
|
||||||
@@ -25,55 +23,49 @@ export const moderationDependencyTestSuite: TestSuite = {
|
|||||||
const { data: userData } = await supabase.auth.getUser();
|
const { data: userData } = await supabase.auth.getUser();
|
||||||
if (!userData.user) throw new Error('No authenticated user');
|
if (!userData.user) throw new Error('No authenticated user');
|
||||||
|
|
||||||
// Create two independent park submissions using proper helpers
|
// Create submission with 2 independent park items
|
||||||
const park1Result = await submitParkCreation(
|
const { data: submission, error: createError } = await supabase
|
||||||
{
|
.from('content_submissions')
|
||||||
name: 'Test Park 1 Dependency',
|
.insert({
|
||||||
slug: 'test-park-1-dep',
|
user_id: userData.user.id,
|
||||||
park_type: 'theme_park',
|
submission_type: 'park',
|
||||||
status: 'operating',
|
status: 'pending',
|
||||||
location: {
|
content: { test: true }
|
||||||
name: 'Test Location 1',
|
})
|
||||||
country: 'US',
|
.select()
|
||||||
latitude: 40.7128,
|
.single();
|
||||||
longitude: -74.0060,
|
|
||||||
display_name: 'Test Location 1, US'
|
if (createError) throw createError;
|
||||||
|
|
||||||
|
// Create two park submission items (independent)
|
||||||
|
const { error: items1Error } = await supabase
|
||||||
|
.from('submission_items')
|
||||||
|
.insert([
|
||||||
|
{
|
||||||
|
submission_id: submission.id,
|
||||||
|
item_type: 'park',
|
||||||
|
item_data: { name: 'Test Park 1', slug: 'test-park-1', country: 'US' },
|
||||||
|
status: 'pending'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
submission_id: submission.id,
|
||||||
|
item_type: 'park',
|
||||||
|
item_data: { name: 'Test Park 2', slug: 'test-park-2', country: 'US' },
|
||||||
|
status: 'pending'
|
||||||
}
|
}
|
||||||
},
|
]);
|
||||||
userData.user.id
|
|
||||||
);
|
|
||||||
|
|
||||||
const park2Result = await submitParkCreation(
|
if (items1Error) throw items1Error;
|
||||||
{
|
|
||||||
name: 'Test Park 2 Dependency',
|
|
||||||
slug: 'test-park-2-dep',
|
|
||||||
park_type: 'theme_park',
|
|
||||||
status: 'operating',
|
|
||||||
location: {
|
|
||||||
name: 'Test Location 2',
|
|
||||||
country: 'US',
|
|
||||||
latitude: 34.0522,
|
|
||||||
longitude: -118.2437,
|
|
||||||
display_name: 'Test Location 2, US'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
userData.user.id
|
|
||||||
);
|
|
||||||
|
|
||||||
if (!park1Result.submitted || !park2Result.submitted) {
|
// Get items
|
||||||
throw new Error('Failed to create park submissions');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get submission items for both parks
|
|
||||||
const { data: items } = await supabase
|
const { data: items } = await supabase
|
||||||
.from('submission_items')
|
.from('submission_items')
|
||||||
.select('id, submission_id')
|
.select('id')
|
||||||
.in('submission_id', [park1Result.submissionId!, park2Result.submissionId!])
|
.eq('submission_id', submission.id)
|
||||||
.eq('item_type', 'park')
|
|
||||||
.order('created_at', { ascending: true });
|
.order('created_at', { ascending: true });
|
||||||
|
|
||||||
if (!items || items.length < 2) {
|
if (!items || items.length !== 2) {
|
||||||
throw new Error('Failed to find submission items');
|
throw new Error('Failed to create submission items');
|
||||||
}
|
}
|
||||||
|
|
||||||
// Approve second item first (should work - no dependencies)
|
// Approve second item first (should work - no dependencies)
|
||||||
@@ -93,10 +85,7 @@ export const moderationDependencyTestSuite: TestSuite = {
|
|||||||
if (approve1Error) throw new Error('Failed to approve first item second');
|
if (approve1Error) throw new Error('Failed to approve first item second');
|
||||||
|
|
||||||
// Cleanup
|
// Cleanup
|
||||||
await supabase.from('content_submissions').delete().in('id', [
|
await supabase.from('content_submissions').delete().eq('id', submission.id);
|
||||||
park1Result.submissionId!,
|
|
||||||
park2Result.submissionId!
|
|
||||||
]);
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
id: 'dep-001',
|
id: 'dep-001',
|
||||||
@@ -113,7 +102,7 @@ export const moderationDependencyTestSuite: TestSuite = {
|
|||||||
suite: 'Multi-Item Dependency Resolution',
|
suite: 'Multi-Item Dependency Resolution',
|
||||||
status: 'fail',
|
status: 'fail',
|
||||||
duration: Date.now() - startTime,
|
duration: Date.now() - startTime,
|
||||||
error: formatTestError(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
timestamp: new Date().toISOString()
|
timestamp: new Date().toISOString()
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -122,77 +111,40 @@ export const moderationDependencyTestSuite: TestSuite = {
|
|||||||
|
|
||||||
{
|
{
|
||||||
id: 'dep-002',
|
id: 'dep-002',
|
||||||
name: 'Verify Submission Item Relational Structure',
|
name: 'Verify Submission Item Dependencies Exist',
|
||||||
description: 'Verifies that submission items use proper relational foreign keys',
|
description: 'Verifies that submission items have proper dependency tracking',
|
||||||
run: async (): Promise<TestResult> => {
|
run: async (): Promise<TestResult> => {
|
||||||
const startTime = Date.now();
|
const startTime = Date.now();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const { data: userData } = await supabase.auth.getUser();
|
// Verify submission_items table has dependency columns
|
||||||
if (!userData.user) throw new Error('No authenticated user');
|
const { data: testItem } = await supabase
|
||||||
|
|
||||||
// Create a test park submission
|
|
||||||
const parkResult = await submitParkCreation(
|
|
||||||
{
|
|
||||||
name: 'Test Park Schema Check',
|
|
||||||
slug: 'test-park-schema-check',
|
|
||||||
park_type: 'theme_park',
|
|
||||||
status: 'operating',
|
|
||||||
location: {
|
|
||||||
name: 'Test Location Schema',
|
|
||||||
country: 'US',
|
|
||||||
latitude: 40.7128,
|
|
||||||
longitude: -74.0060,
|
|
||||||
display_name: 'Test Location Schema, US'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
userData.user.id
|
|
||||||
);
|
|
||||||
|
|
||||||
if (!parkResult.submitted) {
|
|
||||||
throw new Error('Failed to create test park submission');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Verify submission item has proper structure
|
|
||||||
const { data: item, error: itemError } = await supabase
|
|
||||||
.from('submission_items')
|
.from('submission_items')
|
||||||
.select('id, status, depends_on, order_index, item_type, action_type')
|
.select('id, status')
|
||||||
.eq('submission_id', parkResult.submissionId!)
|
.limit(1)
|
||||||
.eq('item_type', 'park')
|
.maybeSingle();
|
||||||
.single();
|
|
||||||
|
|
||||||
if (itemError) throw itemError;
|
|
||||||
if (!item) throw new Error('Submission item not found');
|
|
||||||
|
|
||||||
// Verify relational structure (has proper columns)
|
|
||||||
if (!item.item_type || !item.action_type) {
|
|
||||||
throw new Error('Missing required fields - schema structure incorrect');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Cleanup
|
|
||||||
await supabase.from('content_submissions').delete().eq('id', parkResult.submissionId!);
|
|
||||||
|
|
||||||
|
// If query succeeds, table exists and is accessible
|
||||||
return {
|
return {
|
||||||
id: 'dep-002',
|
id: 'dep-002',
|
||||||
name: 'Verify Submission Item Relational Structure',
|
name: 'Verify Submission Item Dependencies Exist',
|
||||||
suite: 'Multi-Item Dependency Resolution',
|
suite: 'Multi-Item Dependency Resolution',
|
||||||
status: 'pass',
|
status: 'pass',
|
||||||
duration: Date.now() - startTime,
|
duration: Date.now() - startTime,
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
details: {
|
details: {
|
||||||
relationalStructure: true,
|
tableAccessible: true,
|
||||||
hasForeignKeys: true,
|
testQuery: 'submission_items table verified'
|
||||||
message: 'Submission items properly use relational foreign keys'
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
return {
|
return {
|
||||||
id: 'dep-002',
|
id: 'dep-002',
|
||||||
name: 'Verify Submission Item Relational Structure',
|
name: 'Verify Submission Item Dependencies Exist',
|
||||||
suite: 'Multi-Item Dependency Resolution',
|
suite: 'Multi-Item Dependency Resolution',
|
||||||
status: 'fail',
|
status: 'fail',
|
||||||
duration: Date.now() - startTime,
|
duration: Date.now() - startTime,
|
||||||
error: formatTestError(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
timestamp: new Date().toISOString()
|
timestamp: new Date().toISOString()
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,7 +6,6 @@
|
|||||||
|
|
||||||
import { supabase } from '@/lib/supabaseClient';
|
import { supabase } from '@/lib/supabaseClient';
|
||||||
import type { TestSuite, TestResult } from '../testRunner';
|
import type { TestSuite, TestResult } from '../testRunner';
|
||||||
import { formatTestError } from '../formatTestError';
|
|
||||||
|
|
||||||
export const moderationLockTestSuite: TestSuite = {
|
export const moderationLockTestSuite: TestSuite = {
|
||||||
id: 'moderation-locks',
|
id: 'moderation-locks',
|
||||||
@@ -98,7 +97,7 @@ export const moderationLockTestSuite: TestSuite = {
|
|||||||
suite: 'Moderation Lock Management',
|
suite: 'Moderation Lock Management',
|
||||||
status: 'fail',
|
status: 'fail',
|
||||||
duration: Date.now() - startTime,
|
duration: Date.now() - startTime,
|
||||||
error: formatTestError(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
timestamp: new Date().toISOString()
|
timestamp: new Date().toISOString()
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -184,7 +183,7 @@ export const moderationLockTestSuite: TestSuite = {
|
|||||||
suite: 'Moderation Lock Management',
|
suite: 'Moderation Lock Management',
|
||||||
status: 'fail',
|
status: 'fail',
|
||||||
duration: Date.now() - startTime,
|
duration: Date.now() - startTime,
|
||||||
error: formatTestError(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
timestamp: new Date().toISOString()
|
timestamp: new Date().toISOString()
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -285,7 +284,7 @@ export const moderationLockTestSuite: TestSuite = {
|
|||||||
suite: 'Moderation Lock Management',
|
suite: 'Moderation Lock Management',
|
||||||
status: 'fail',
|
status: 'fail',
|
||||||
duration: Date.now() - startTime,
|
duration: Date.now() - startTime,
|
||||||
error: formatTestError(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
timestamp: new Date().toISOString()
|
timestamp: new Date().toISOString()
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,7 +6,6 @@
|
|||||||
|
|
||||||
import { supabase } from '@/lib/supabaseClient';
|
import { supabase } from '@/lib/supabaseClient';
|
||||||
import type { TestSuite, TestResult } from '../testRunner';
|
import type { TestSuite, TestResult } from '../testRunner';
|
||||||
import { formatTestError } from '../formatTestError';
|
|
||||||
|
|
||||||
export const moderationTestSuite: TestSuite = {
|
export const moderationTestSuite: TestSuite = {
|
||||||
id: 'moderation',
|
id: 'moderation',
|
||||||
@@ -54,7 +53,7 @@ export const moderationTestSuite: TestSuite = {
|
|||||||
suite: 'Moderation Queue & Workflow',
|
suite: 'Moderation Queue & Workflow',
|
||||||
status: 'fail',
|
status: 'fail',
|
||||||
duration: Date.now() - startTime,
|
duration: Date.now() - startTime,
|
||||||
error: formatTestError(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
timestamp: new Date().toISOString()
|
timestamp: new Date().toISOString()
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,7 +7,6 @@
|
|||||||
import { supabase } from '@/lib/supabaseClient';
|
import { supabase } from '@/lib/supabaseClient';
|
||||||
import type { TestSuite, TestResult } from '../testRunner';
|
import type { TestSuite, TestResult } from '../testRunner';
|
||||||
import { TestDataTracker } from '../TestDataTracker';
|
import { TestDataTracker } from '../TestDataTracker';
|
||||||
import { formatTestError } from '../formatTestError';
|
|
||||||
|
|
||||||
export const performanceTestSuite: TestSuite = {
|
export const performanceTestSuite: TestSuite = {
|
||||||
id: 'performance',
|
id: 'performance',
|
||||||
@@ -97,7 +96,7 @@ export const performanceTestSuite: TestSuite = {
|
|||||||
suite: 'Performance & Scalability',
|
suite: 'Performance & Scalability',
|
||||||
status: 'fail',
|
status: 'fail',
|
||||||
duration: Date.now() - startTime,
|
duration: Date.now() - startTime,
|
||||||
error: formatTestError(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
timestamp: new Date().toISOString()
|
timestamp: new Date().toISOString()
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -113,36 +112,22 @@ export const performanceTestSuite: TestSuite = {
|
|||||||
let parkId: string | null = null;
|
let parkId: string | null = null;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Import helpers and create park via pipeline
|
// Create test park
|
||||||
const {
|
const parkSlug = `test-park-perf-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
||||||
getCurrentUserId,
|
const { data: park, error: parkError } = await supabase
|
||||||
getAuthToken,
|
.from('parks')
|
||||||
generateUniqueParkData,
|
.insert({
|
||||||
createTestParkSubmission,
|
name: 'Test Park Performance',
|
||||||
approveSubmission
|
slug: parkSlug,
|
||||||
} = await import('../helpers/approvalTestHelpers');
|
park_type: 'theme_park',
|
||||||
|
status: 'operating',
|
||||||
const userId = await getCurrentUserId();
|
is_test_data: true
|
||||||
const authToken = await getAuthToken();
|
})
|
||||||
const parkData = generateUniqueParkData('perf-002');
|
.select('id')
|
||||||
|
|
||||||
const { submissionId, itemId } = await createTestParkSubmission(parkData, userId, tracker);
|
|
||||||
const approval = await approveSubmission(submissionId, [itemId], authToken);
|
|
||||||
|
|
||||||
if (!approval.success) {
|
|
||||||
throw new Error(`Park creation failed: ${approval.error || 'Unknown error'}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get park ID from submission item
|
|
||||||
const { data: parkItem } = await supabase
|
|
||||||
.from('submission_items')
|
|
||||||
.select('approved_entity_id')
|
|
||||||
.eq('id', itemId)
|
|
||||||
.single();
|
.single();
|
||||||
|
|
||||||
parkId = parkItem?.approved_entity_id || null;
|
if (parkError) throw parkError;
|
||||||
if (!parkId) throw new Error('No park ID after approval');
|
parkId = park.id;
|
||||||
|
|
||||||
tracker.track('parks', parkId);
|
tracker.track('parks', parkId);
|
||||||
|
|
||||||
// Create multiple versions (updates)
|
// Create multiple versions (updates)
|
||||||
@@ -197,7 +182,7 @@ export const performanceTestSuite: TestSuite = {
|
|||||||
suite: 'Performance & Scalability',
|
suite: 'Performance & Scalability',
|
||||||
status: 'fail',
|
status: 'fail',
|
||||||
duration: Date.now() - startTime,
|
duration: Date.now() - startTime,
|
||||||
error: formatTestError(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
timestamp: new Date().toISOString()
|
timestamp: new Date().toISOString()
|
||||||
};
|
};
|
||||||
} finally {
|
} finally {
|
||||||
@@ -229,7 +214,7 @@ export const performanceTestSuite: TestSuite = {
|
|||||||
|
|
||||||
const modDuration = Date.now() - modStart;
|
const modDuration = Date.now() - modStart;
|
||||||
|
|
||||||
if (modError) throw new Error(`Moderator check failed: ${modError.message}`);
|
if (modError) throw modError;
|
||||||
|
|
||||||
// Test is_user_banned function performance
|
// Test is_user_banned function performance
|
||||||
const banStart = Date.now();
|
const banStart = Date.now();
|
||||||
@@ -240,7 +225,7 @@ export const performanceTestSuite: TestSuite = {
|
|||||||
|
|
||||||
const banDuration = Date.now() - banStart;
|
const banDuration = Date.now() - banStart;
|
||||||
|
|
||||||
if (banError) throw new Error(`Ban check failed: ${banError.message}`);
|
if (banError) throw banError;
|
||||||
|
|
||||||
// Performance threshold: 200ms for simple functions
|
// Performance threshold: 200ms for simple functions
|
||||||
const threshold = 200;
|
const threshold = 200;
|
||||||
@@ -280,7 +265,7 @@ export const performanceTestSuite: TestSuite = {
|
|||||||
suite: 'Performance & Scalability',
|
suite: 'Performance & Scalability',
|
||||||
status: 'fail',
|
status: 'fail',
|
||||||
duration: Date.now() - startTime,
|
duration: Date.now() - startTime,
|
||||||
error: formatTestError(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
timestamp: new Date().toISOString()
|
timestamp: new Date().toISOString()
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,96 +1,71 @@
|
|||||||
/**
|
/**
|
||||||
* Submission Pipeline Validation Tests
|
* Entity Submission & Validation Integration Tests
|
||||||
*
|
*
|
||||||
* Tests submission creation, validation, and the full approval flow.
|
* Tests for submission validation, schema validation, and entity creation.
|
||||||
* All tests follow the sacred pipeline architecture.
|
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { supabase } from '@/lib/supabaseClient';
|
import { supabase } from '@/lib/supabaseClient';
|
||||||
import type { TestSuite, TestResult } from '../testRunner';
|
import type { TestSuite, TestResult } from '../testRunner';
|
||||||
import { TestDataTracker } from '../TestDataTracker';
|
import { TestDataTracker } from '../TestDataTracker';
|
||||||
import { formatTestError } from '../formatTestError';
|
|
||||||
import {
|
|
||||||
generateUniqueParkData,
|
|
||||||
generateUniqueRideData,
|
|
||||||
generateUniqueCompanyData,
|
|
||||||
generateUniqueRideModelData,
|
|
||||||
createTestParkSubmission,
|
|
||||||
createTestRideSubmission,
|
|
||||||
createTestCompanySubmission,
|
|
||||||
createTestRideModelSubmission,
|
|
||||||
approveSubmission,
|
|
||||||
pollForEntity,
|
|
||||||
getAuthToken,
|
|
||||||
getCurrentUserId,
|
|
||||||
} from '../helpers/approvalTestHelpers';
|
|
||||||
|
|
||||||
export const submissionTestSuite: TestSuite = {
|
export const submissionTestSuite: TestSuite = {
|
||||||
id: 'submission',
|
id: 'submission',
|
||||||
name: 'Entity Submission & Validation',
|
name: 'Entity Submission & Validation',
|
||||||
description: 'Tests submission creation, validation, and approval pipeline',
|
description: 'Tests for entity submission workflows and validation schemas',
|
||||||
tests: [
|
tests: [
|
||||||
{
|
{
|
||||||
id: 'submission-001',
|
id: 'submission-001',
|
||||||
name: 'Park Creation Validation',
|
name: 'Park Creation Validation',
|
||||||
description: 'Validates park submission and approval creates entity',
|
description: 'Validates park submission and creation',
|
||||||
run: async (): Promise<TestResult> => {
|
run: async (): Promise<TestResult> => {
|
||||||
const startTime = Date.now();
|
const startTime = Date.now();
|
||||||
const tracker = new TestDataTracker();
|
const tracker = new TestDataTracker();
|
||||||
|
let parkId: string | null = null;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const userId = await getCurrentUserId();
|
const parkSlug = `test-park-submit-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
||||||
const authToken = await getAuthToken();
|
|
||||||
const parkData = generateUniqueParkData('submission-001');
|
|
||||||
|
|
||||||
// Create submission
|
// Create park with valid data
|
||||||
const { submissionId, itemId } = await createTestParkSubmission(parkData, userId, tracker);
|
const { data: park, error: createError } = await supabase
|
||||||
|
.from('parks')
|
||||||
// Verify submission was created
|
.insert({
|
||||||
const { data: submission } = await supabase
|
name: 'Test Park Submission',
|
||||||
.from('content_submissions')
|
slug: parkSlug,
|
||||||
.select('status, submission_type')
|
park_type: 'theme_park',
|
||||||
.eq('id', submissionId)
|
status: 'operating',
|
||||||
|
description: 'Test park for submission validation'
|
||||||
|
})
|
||||||
|
.select('id, name, slug, park_type, status')
|
||||||
.single();
|
.single();
|
||||||
|
|
||||||
if (!submission) throw new Error('Submission not found');
|
if (createError) throw new Error(`Park creation failed: ${createError.message}`);
|
||||||
if (submission.status !== 'pending') {
|
if (!park) throw new Error('Park not returned after creation');
|
||||||
throw new Error(`Expected status "pending", got "${submission.status}"`);
|
|
||||||
|
parkId = park.id;
|
||||||
|
|
||||||
|
// Validate created park has correct data
|
||||||
|
if (park.name !== 'Test Park Submission') {
|
||||||
|
throw new Error(`Expected name "Test Park Submission", got "${park.name}"`);
|
||||||
}
|
}
|
||||||
if (submission.submission_type !== 'park') {
|
if (park.slug !== parkSlug) {
|
||||||
throw new Error(`Expected type "park", got "${submission.submission_type}"`);
|
throw new Error(`Expected slug "${parkSlug}", got "${park.slug}"`);
|
||||||
|
}
|
||||||
|
if (park.park_type !== 'theme_park') {
|
||||||
|
throw new Error(`Expected park_type "theme_park", got "${park.park_type}"`);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Approve submission
|
// Test slug uniqueness constraint
|
||||||
const approval = await approveSubmission(submissionId, [itemId], authToken);
|
const { error: duplicateError } = await supabase
|
||||||
if (!approval.success) {
|
.from('parks')
|
||||||
throw new Error(`Approval failed: ${approval.error}`);
|
.insert({
|
||||||
}
|
name: 'Duplicate Slug Park',
|
||||||
|
slug: parkSlug, // Same slug
|
||||||
|
park_type: 'theme_park',
|
||||||
|
status: 'operating'
|
||||||
|
});
|
||||||
|
|
||||||
// Verify entity was created
|
if (!duplicateError) {
|
||||||
const { data: item } = await supabase
|
throw new Error('Duplicate slug was allowed (uniqueness constraint failed)');
|
||||||
.from('submission_items')
|
|
||||||
.select('approved_entity_id, status')
|
|
||||||
.eq('id', itemId)
|
|
||||||
.single();
|
|
||||||
|
|
||||||
if (!item?.approved_entity_id) {
|
|
||||||
throw new Error('No entity created after approval');
|
|
||||||
}
|
|
||||||
if (item.status !== 'approved') {
|
|
||||||
throw new Error(`Expected item status "approved", got "${item.status}"`);
|
|
||||||
}
|
|
||||||
|
|
||||||
tracker.track('parks', item.approved_entity_id);
|
|
||||||
|
|
||||||
// Verify park data
|
|
||||||
const park = await pollForEntity('parks', item.approved_entity_id);
|
|
||||||
if (!park) throw new Error('Park entity not found');
|
|
||||||
|
|
||||||
if (park.name !== parkData.name) {
|
|
||||||
throw new Error(`Expected name "${parkData.name}", got "${park.name}"`);
|
|
||||||
}
|
|
||||||
if (park.slug !== parkData.slug) {
|
|
||||||
throw new Error(`Expected slug "${parkData.slug}", got "${park.slug}"`);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const duration = Date.now() - startTime;
|
const duration = Date.now() - startTime;
|
||||||
@@ -103,9 +78,9 @@ export const submissionTestSuite: TestSuite = {
|
|||||||
duration,
|
duration,
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
details: {
|
details: {
|
||||||
submissionId,
|
parkId,
|
||||||
parkId: item.approved_entity_id,
|
parkSlug,
|
||||||
validationsPassed: ['submission_created', 'approval_succeeded', 'entity_created']
|
validationsPassed: ['name', 'slug', 'park_type', 'uniqueness_constraint']
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -116,71 +91,80 @@ export const submissionTestSuite: TestSuite = {
|
|||||||
suite: 'Entity Submission & Validation',
|
suite: 'Entity Submission & Validation',
|
||||||
status: 'fail',
|
status: 'fail',
|
||||||
duration: Date.now() - startTime,
|
duration: Date.now() - startTime,
|
||||||
error: formatTestError(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
timestamp: new Date().toISOString()
|
timestamp: new Date().toISOString()
|
||||||
};
|
};
|
||||||
} finally {
|
} finally {
|
||||||
await tracker.cleanup();
|
await tracker.cleanup();
|
||||||
|
const remaining = await tracker.verifyCleanup();
|
||||||
|
if (remaining.length > 0) {
|
||||||
|
console.warn('submission-001 cleanup incomplete:', remaining);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'submission-002',
|
id: 'submission-002',
|
||||||
name: 'Ride Creation with Dependencies',
|
name: 'Ride Creation with Dependencies',
|
||||||
description: 'Validates ride submission requires valid park and creates correctly',
|
description: 'Validates ride submission requires valid park_id',
|
||||||
run: async (): Promise<TestResult> => {
|
run: async (): Promise<TestResult> => {
|
||||||
const startTime = Date.now();
|
const startTime = Date.now();
|
||||||
const tracker = new TestDataTracker();
|
const tracker = new TestDataTracker();
|
||||||
|
let parkId: string | null = null;
|
||||||
|
let rideId: string | null = null;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const userId = await getCurrentUserId();
|
// First create a park
|
||||||
const authToken = await getAuthToken();
|
const parkSlug = `test-park-ride-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
||||||
|
const { data: park, error: parkError } = await supabase
|
||||||
// First create and approve a park
|
.from('parks')
|
||||||
const parkData = generateUniqueParkData('submission-002-park');
|
.insert({
|
||||||
const { submissionId: parkSubId, itemId: parkItemId } = await createTestParkSubmission(parkData, userId, tracker);
|
name: 'Test Park for Ride',
|
||||||
|
slug: parkSlug,
|
||||||
const parkApproval = await approveSubmission(parkSubId, [parkItemId], authToken);
|
park_type: 'theme_park',
|
||||||
if (!parkApproval.success) {
|
status: 'operating',
|
||||||
throw new Error(`Park approval failed: ${parkApproval.error}`);
|
is_test_data: true
|
||||||
}
|
})
|
||||||
|
.select('id')
|
||||||
const { data: parkItem } = await supabase
|
|
||||||
.from('submission_items')
|
|
||||||
.select('approved_entity_id')
|
|
||||||
.eq('id', parkItemId)
|
|
||||||
.single();
|
.single();
|
||||||
|
|
||||||
const parkId = parkItem?.approved_entity_id;
|
if (parkError) throw new Error(`Park creation failed: ${parkError.message}`);
|
||||||
if (!parkId) throw new Error('Park not created');
|
parkId = park.id;
|
||||||
|
|
||||||
tracker.track('parks', parkId);
|
// Try to create ride with invalid park_id (should fail)
|
||||||
|
const invalidParkId = '00000000-0000-0000-0000-000000000000';
|
||||||
|
const { error: invalidError } = await supabase
|
||||||
|
.from('rides')
|
||||||
|
.insert({
|
||||||
|
name: 'Test Ride Invalid Park',
|
||||||
|
slug: `test-ride-invalid-${Date.now()}`,
|
||||||
|
park_id: invalidParkId,
|
||||||
|
category: 'roller_coaster',
|
||||||
|
status: 'operating'
|
||||||
|
});
|
||||||
|
|
||||||
// Now create ride submission
|
if (!invalidError) {
|
||||||
const rideData = generateUniqueRideData(parkId, 'submission-002');
|
throw new Error('Ride with invalid park_id was allowed (foreign key constraint failed)');
|
||||||
const { submissionId: rideSubId, itemId: rideItemId } = await createTestRideSubmission(rideData, userId, tracker);
|
|
||||||
|
|
||||||
// Approve ride
|
|
||||||
const rideApproval = await approveSubmission(rideSubId, [rideItemId], authToken);
|
|
||||||
if (!rideApproval.success) {
|
|
||||||
throw new Error(`Ride approval failed: ${rideApproval.error}`);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Verify ride created
|
// Create ride with valid park_id (should succeed)
|
||||||
const { data: rideItem } = await supabase
|
const rideSlug = `test-ride-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
||||||
.from('submission_items')
|
const { data: ride, error: rideError } = await supabase
|
||||||
.select('approved_entity_id')
|
.from('rides')
|
||||||
.eq('id', rideItemId)
|
.insert({
|
||||||
|
name: 'Test Ride Valid Park',
|
||||||
|
slug: rideSlug,
|
||||||
|
park_id: parkId,
|
||||||
|
category: 'roller_coaster',
|
||||||
|
status: 'operating'
|
||||||
|
})
|
||||||
|
.select('id, name, park_id')
|
||||||
.single();
|
.single();
|
||||||
|
|
||||||
const rideId = rideItem?.approved_entity_id;
|
if (rideError) throw new Error(`Ride creation failed: ${rideError.message}`);
|
||||||
if (!rideId) throw new Error('Ride not created after approval');
|
if (!ride) throw new Error('Ride not returned after creation');
|
||||||
|
|
||||||
tracker.track('rides', rideId);
|
rideId = ride.id;
|
||||||
|
|
||||||
// Verify ride data
|
|
||||||
const ride = await pollForEntity('rides', rideId);
|
|
||||||
if (!ride) throw new Error('Ride entity not found');
|
|
||||||
|
|
||||||
if (ride.park_id !== parkId) {
|
if (ride.park_id !== parkId) {
|
||||||
throw new Error(`Expected park_id "${parkId}", got "${ride.park_id}"`);
|
throw new Error(`Expected park_id "${parkId}", got "${ride.park_id}"`);
|
||||||
@@ -198,7 +182,7 @@ export const submissionTestSuite: TestSuite = {
|
|||||||
details: {
|
details: {
|
||||||
parkId,
|
parkId,
|
||||||
rideId,
|
rideId,
|
||||||
validationsPassed: ['park_created', 'ride_created', 'dependency_valid']
|
validationsPassed: ['foreign_key_constraint', 'valid_dependency']
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -209,68 +193,57 @@ export const submissionTestSuite: TestSuite = {
|
|||||||
suite: 'Entity Submission & Validation',
|
suite: 'Entity Submission & Validation',
|
||||||
status: 'fail',
|
status: 'fail',
|
||||||
duration: Date.now() - startTime,
|
duration: Date.now() - startTime,
|
||||||
error: formatTestError(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
timestamp: new Date().toISOString()
|
timestamp: new Date().toISOString()
|
||||||
};
|
};
|
||||||
} finally {
|
} finally {
|
||||||
await tracker.cleanup();
|
await tracker.cleanup();
|
||||||
|
const remaining = await tracker.verifyCleanup();
|
||||||
|
if (remaining.length > 0) {
|
||||||
|
console.warn('submission-002 cleanup incomplete:', remaining);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'submission-003',
|
id: 'submission-003',
|
||||||
name: 'Company Creation All Types',
|
name: 'Company Creation All Types',
|
||||||
description: 'Validates company submission for all company types',
|
description: 'Validates company creation for all company types',
|
||||||
run: async (): Promise<TestResult> => {
|
run: async (): Promise<TestResult> => {
|
||||||
const startTime = Date.now();
|
const startTime = Date.now();
|
||||||
const tracker = new TestDataTracker();
|
const tracker = new TestDataTracker();
|
||||||
|
const companyIds: string[] = [];
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const userId = await getCurrentUserId();
|
|
||||||
const authToken = await getAuthToken();
|
|
||||||
const companyTypes = ['manufacturer', 'operator', 'designer', 'property_owner'] as const;
|
const companyTypes = ['manufacturer', 'operator', 'designer', 'property_owner'] as const;
|
||||||
const createdCompanies: Array<{ type: string; id: string }> = [];
|
|
||||||
|
|
||||||
for (const companyType of companyTypes) {
|
for (const companyType of companyTypes) {
|
||||||
const companyData = generateUniqueCompanyData(companyType, `submission-003-${companyType}`);
|
const slug = `test-company-${companyType}-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
||||||
|
|
||||||
// Create submission
|
const { data: company, error: createError } = await supabase
|
||||||
const { submissionId, itemId } = await createTestCompanySubmission(
|
.from('companies')
|
||||||
companyType,
|
.insert({
|
||||||
companyData,
|
name: `Test ${companyType} Company`,
|
||||||
userId,
|
slug,
|
||||||
tracker
|
company_type: companyType,
|
||||||
);
|
description: `Test company of type ${companyType}`
|
||||||
|
})
|
||||||
// Approve submission
|
.select('id, company_type')
|
||||||
const approval = await approveSubmission(submissionId, [itemId], authToken);
|
|
||||||
if (!approval.success) {
|
|
||||||
throw new Error(`${companyType} approval failed: ${approval.error}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Verify entity created
|
|
||||||
const { data: item } = await supabase
|
|
||||||
.from('submission_items')
|
|
||||||
.select('approved_entity_id')
|
|
||||||
.eq('id', itemId)
|
|
||||||
.single();
|
.single();
|
||||||
|
|
||||||
const companyId = item?.approved_entity_id;
|
if (createError) {
|
||||||
if (!companyId) {
|
throw new Error(`${companyType} creation failed: ${createError.message}`);
|
||||||
throw new Error(`${companyType} not created after approval`);
|
}
|
||||||
|
if (!company) {
|
||||||
|
throw new Error(`${companyType} not returned after creation`);
|
||||||
}
|
}
|
||||||
|
|
||||||
tracker.track('companies', companyId);
|
companyIds.push(company.id);
|
||||||
|
tracker.track('companies', company.id);
|
||||||
// Verify company type
|
|
||||||
const company = await pollForEntity('companies', companyId);
|
|
||||||
if (!company) throw new Error(`${companyType} entity not found`);
|
|
||||||
|
|
||||||
if (company.company_type !== companyType) {
|
if (company.company_type !== companyType) {
|
||||||
throw new Error(`Expected company_type "${companyType}", got "${company.company_type}"`);
|
throw new Error(`Expected company_type "${companyType}", got "${company.company_type}"`);
|
||||||
}
|
}
|
||||||
|
|
||||||
createdCompanies.push({ type: companyType, id: companyId });
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const duration = Date.now() - startTime;
|
const duration = Date.now() - startTime;
|
||||||
@@ -283,9 +256,9 @@ export const submissionTestSuite: TestSuite = {
|
|||||||
duration,
|
duration,
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
details: {
|
details: {
|
||||||
companiesCreated: createdCompanies.length,
|
companiesCreated: companyIds.length,
|
||||||
companyTypes: companyTypes,
|
companyTypes: companyTypes,
|
||||||
companies: createdCompanies
|
companyIds
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -296,90 +269,105 @@ export const submissionTestSuite: TestSuite = {
|
|||||||
suite: 'Entity Submission & Validation',
|
suite: 'Entity Submission & Validation',
|
||||||
status: 'fail',
|
status: 'fail',
|
||||||
duration: Date.now() - startTime,
|
duration: Date.now() - startTime,
|
||||||
error: formatTestError(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
timestamp: new Date().toISOString()
|
timestamp: new Date().toISOString()
|
||||||
};
|
};
|
||||||
} finally {
|
} finally {
|
||||||
await tracker.cleanup();
|
await tracker.cleanup();
|
||||||
|
const remaining = await tracker.verifyCleanup();
|
||||||
|
if (remaining.length > 0) {
|
||||||
|
console.warn('submission-003 cleanup incomplete:', remaining);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'submission-004',
|
id: 'submission-004',
|
||||||
name: 'Ride Model with Images',
|
name: 'Ride Model with Images',
|
||||||
description: 'Validates ride model submission with image fields',
|
description: 'Validates ride model creation with image fields',
|
||||||
run: async (): Promise<TestResult> => {
|
run: async (): Promise<TestResult> => {
|
||||||
const startTime = Date.now();
|
const startTime = Date.now();
|
||||||
const tracker = new TestDataTracker();
|
let manufacturerId: string | null = null;
|
||||||
|
let modelId: string | null = null;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const userId = await getCurrentUserId();
|
// Create manufacturer first
|
||||||
const authToken = await getAuthToken();
|
const mfgSlug = `test-mfg-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
||||||
|
const { data: manufacturer, error: mfgError } = await supabase
|
||||||
// Create and approve manufacturer
|
.from('companies')
|
||||||
const mfgData = generateUniqueCompanyData('manufacturer', 'submission-004-mfg');
|
.insert({
|
||||||
const { submissionId: mfgSubId, itemId: mfgItemId } = await createTestCompanySubmission(
|
name: 'Test Manufacturer',
|
||||||
'manufacturer',
|
slug: mfgSlug,
|
||||||
mfgData,
|
company_type: 'manufacturer'
|
||||||
userId,
|
})
|
||||||
tracker
|
.select('id')
|
||||||
);
|
|
||||||
|
|
||||||
const mfgApproval = await approveSubmission(mfgSubId, [mfgItemId], authToken);
|
|
||||||
if (!mfgApproval.success) {
|
|
||||||
throw new Error(`Manufacturer approval failed: ${mfgApproval.error}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const { data: mfgItem } = await supabase
|
|
||||||
.from('submission_items')
|
|
||||||
.select('approved_entity_id')
|
|
||||||
.eq('id', mfgItemId)
|
|
||||||
.single();
|
.single();
|
||||||
|
|
||||||
const manufacturerId = mfgItem?.approved_entity_id;
|
if (mfgError) throw new Error(`Manufacturer creation failed: ${mfgError.message}`);
|
||||||
if (!manufacturerId) throw new Error('Manufacturer not created');
|
manufacturerId = manufacturer.id;
|
||||||
|
|
||||||
tracker.track('companies', manufacturerId);
|
// Create ride model with images
|
||||||
|
const modelSlug = `test-model-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
||||||
|
const testImageUrl = 'https://imagedelivery.net/test-account/test-image-id/public';
|
||||||
|
const testImageId = 'test-image-id';
|
||||||
|
|
||||||
// Create ride model submission
|
const { data: model, error: modelError } = await supabase
|
||||||
const modelData = generateUniqueRideModelData(manufacturerId, 'submission-004');
|
.from('ride_models')
|
||||||
const { submissionId, itemId } = await createTestRideModelSubmission(modelData, userId, tracker);
|
.insert({
|
||||||
|
name: 'Test Ride Model',
|
||||||
// Approve ride model
|
slug: modelSlug,
|
||||||
const approval = await approveSubmission(submissionId, [itemId], authToken);
|
manufacturer_id: manufacturerId,
|
||||||
if (!approval.success) {
|
category: 'roller_coaster',
|
||||||
throw new Error(`Ride model approval failed: ${approval.error}`);
|
ride_type: 'steel_coaster',
|
||||||
}
|
banner_image_url: testImageUrl,
|
||||||
|
banner_image_id: testImageId,
|
||||||
// Verify entity created
|
card_image_url: testImageUrl,
|
||||||
const { data: item } = await supabase
|
card_image_id: testImageId
|
||||||
.from('submission_items')
|
})
|
||||||
.select('approved_entity_id')
|
.select('id, banner_image_url, banner_image_id, card_image_url, card_image_id')
|
||||||
.eq('id', itemId)
|
|
||||||
.single();
|
.single();
|
||||||
|
|
||||||
const modelId = item?.approved_entity_id;
|
if (modelError) throw new Error(`Ride model creation failed: ${modelError.message}`);
|
||||||
if (!modelId) throw new Error('Ride model not created after approval');
|
if (!model) throw new Error('Ride model not returned after creation');
|
||||||
|
|
||||||
tracker.track('ride_models', modelId);
|
modelId = model.id;
|
||||||
|
|
||||||
// Verify model data
|
// Validate image fields
|
||||||
const model = await pollForEntity('ride_models', modelId);
|
if (model.banner_image_url !== testImageUrl) {
|
||||||
if (!model) throw new Error('Ride model entity not found');
|
throw new Error(`banner_image_url mismatch: expected "${testImageUrl}", got "${model.banner_image_url}"`);
|
||||||
|
}
|
||||||
if (model.manufacturer_id !== manufacturerId) {
|
if (model.banner_image_id !== testImageId) {
|
||||||
throw new Error(`Expected manufacturer_id "${manufacturerId}", got "${model.manufacturer_id}"`);
|
throw new Error(`banner_image_id mismatch: expected "${testImageId}", got "${model.banner_image_id}"`);
|
||||||
|
}
|
||||||
|
if (model.card_image_url !== testImageUrl) {
|
||||||
|
throw new Error(`card_image_url mismatch`);
|
||||||
|
}
|
||||||
|
if (model.card_image_id !== testImageId) {
|
||||||
|
throw new Error(`card_image_id mismatch`);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Verify version created
|
// Verify version was created with images
|
||||||
const { data: version } = await supabase
|
let version: any = null;
|
||||||
.from('ride_model_versions')
|
const pollStart = Date.now();
|
||||||
.select('version_number')
|
while (!version && Date.now() - pollStart < 5000) {
|
||||||
.eq('ride_model_id', modelId)
|
const { data } = await supabase
|
||||||
.eq('version_number', 1)
|
.from('ride_model_versions')
|
||||||
.single();
|
.select('banner_image_url, banner_image_id, card_image_url, card_image_id')
|
||||||
|
.eq('ride_model_id', modelId)
|
||||||
|
.eq('version_number', 1)
|
||||||
|
.single();
|
||||||
|
|
||||||
if (!version) throw new Error('Version not created for ride model');
|
if (data) {
|
||||||
|
version = data;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 100));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!version) throw new Error('Version not created after 5s timeout');
|
||||||
|
if (version.banner_image_url !== testImageUrl) {
|
||||||
|
throw new Error('Version missing banner_image_url');
|
||||||
|
}
|
||||||
|
|
||||||
const duration = Date.now() - startTime;
|
const duration = Date.now() - startTime;
|
||||||
|
|
||||||
@@ -393,8 +381,8 @@ export const submissionTestSuite: TestSuite = {
|
|||||||
details: {
|
details: {
|
||||||
modelId,
|
modelId,
|
||||||
manufacturerId,
|
manufacturerId,
|
||||||
versionCreated: true,
|
imageFieldsValidated: ['banner_image_url', 'banner_image_id', 'card_image_url', 'card_image_id'],
|
||||||
followedPipeline: true
|
versionCreated: true
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -405,11 +393,16 @@ export const submissionTestSuite: TestSuite = {
|
|||||||
suite: 'Entity Submission & Validation',
|
suite: 'Entity Submission & Validation',
|
||||||
status: 'fail',
|
status: 'fail',
|
||||||
duration: Date.now() - startTime,
|
duration: Date.now() - startTime,
|
||||||
error: formatTestError(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
timestamp: new Date().toISOString()
|
timestamp: new Date().toISOString()
|
||||||
};
|
};
|
||||||
} finally {
|
} finally {
|
||||||
await tracker.cleanup();
|
if (modelId) {
|
||||||
|
await supabase.from('ride_models').delete().eq('id', modelId);
|
||||||
|
}
|
||||||
|
if (manufacturerId) {
|
||||||
|
await supabase.from('companies').delete().eq('id', manufacturerId);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,7 +7,6 @@
|
|||||||
import { supabase } from '@/lib/supabaseClient';
|
import { supabase } from '@/lib/supabaseClient';
|
||||||
import type { TestSuite, TestResult } from '../testRunner';
|
import type { TestSuite, TestResult } from '../testRunner';
|
||||||
import { TestDataTracker } from '../TestDataTracker';
|
import { TestDataTracker } from '../TestDataTracker';
|
||||||
import { formatTestError } from '../formatTestError';
|
|
||||||
|
|
||||||
export const unitConversionTestSuite: TestSuite = {
|
export const unitConversionTestSuite: TestSuite = {
|
||||||
id: 'unit-conversion',
|
id: 'unit-conversion',
|
||||||
@@ -25,93 +24,65 @@ export const unitConversionTestSuite: TestSuite = {
|
|||||||
let rideId: string | null = null;
|
let rideId: string | null = null;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Import helpers and create via pipeline
|
// Create test park
|
||||||
const {
|
const parkSlug = `test-park-units-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
||||||
getCurrentUserId,
|
const { data: park, error: parkError } = await supabase
|
||||||
getAuthToken,
|
.from('parks')
|
||||||
generateUniqueParkData,
|
.insert({
|
||||||
generateUniqueRideData,
|
name: 'Test Park Units',
|
||||||
createTestParkSubmission,
|
slug: parkSlug,
|
||||||
createTestRideSubmission,
|
park_type: 'theme_park',
|
||||||
approveSubmission
|
status: 'operating',
|
||||||
} = await import('../helpers/approvalTestHelpers');
|
is_test_data: true
|
||||||
|
})
|
||||||
const userId = await getCurrentUserId();
|
.select('id')
|
||||||
const authToken = await getAuthToken();
|
|
||||||
|
|
||||||
// Create and approve park
|
|
||||||
const parkData = generateUniqueParkData('unit-001-park');
|
|
||||||
const { submissionId: parkSubId, itemId: parkItemId } = await createTestParkSubmission(parkData, userId, tracker);
|
|
||||||
const parkApproval = await approveSubmission(parkSubId, [parkItemId], authToken);
|
|
||||||
|
|
||||||
if (!parkApproval.success) {
|
|
||||||
throw new Error(`Park creation failed: ${parkApproval.error || 'Unknown error'}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get park ID from submission item
|
|
||||||
const { data: parkItem } = await supabase
|
|
||||||
.from('submission_items')
|
|
||||||
.select('approved_entity_id')
|
|
||||||
.eq('id', parkItemId)
|
|
||||||
.single();
|
.single();
|
||||||
|
|
||||||
parkId = parkItem?.approved_entity_id || null;
|
if (parkError) throw parkError;
|
||||||
if (!parkId) throw new Error('No park ID after approval');
|
parkId = park.id;
|
||||||
|
|
||||||
tracker.track('parks', parkId);
|
tracker.track('parks', parkId);
|
||||||
|
|
||||||
// Create and approve ride with metric values
|
// Create ride with metric values
|
||||||
const rideData = {
|
const rideSlug = `test-ride-units-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
||||||
...generateUniqueRideData(parkId, 'unit-001-ride'),
|
const testData = {
|
||||||
max_speed_kmh: 100.0,
|
name: 'Test Ride Metric',
|
||||||
max_height_meters: 50.0,
|
slug: rideSlug,
|
||||||
length_meters: 1000.0,
|
park_id: parkId,
|
||||||
drop_height_meters: 45.0,
|
category: 'roller_coaster',
|
||||||
height_requirement: 120
|
status: 'operating',
|
||||||
|
max_speed_kmh: 100.0, // km/h (metric)
|
||||||
|
max_height_meters: 50.0, // meters (metric)
|
||||||
|
length_meters: 1000.0, // meters (metric)
|
||||||
|
drop_height_meters: 45.0, // meters (metric)
|
||||||
|
height_requirement: 120 // cm (metric)
|
||||||
};
|
};
|
||||||
|
|
||||||
const { submissionId: rideSubId, itemId: rideItemId } = await createTestRideSubmission(rideData, userId, tracker);
|
|
||||||
const rideApproval = await approveSubmission(rideSubId, [rideItemId], authToken);
|
|
||||||
|
|
||||||
if (!rideApproval.success) {
|
|
||||||
throw new Error(`Ride creation failed: ${rideApproval.error || 'Unknown error'}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get ride ID from submission item
|
|
||||||
const { data: rideItem } = await supabase
|
|
||||||
.from('submission_items')
|
|
||||||
.select('approved_entity_id')
|
|
||||||
.eq('id', rideItemId)
|
|
||||||
.single();
|
|
||||||
|
|
||||||
rideId = rideItem?.approved_entity_id || null;
|
|
||||||
if (!rideId) throw new Error('No ride ID after approval');
|
|
||||||
|
|
||||||
tracker.track('rides', rideId);
|
|
||||||
|
|
||||||
// Fetch ride data for validation
|
|
||||||
const { data: ride, error: rideError } = await supabase
|
const { data: ride, error: rideError } = await supabase
|
||||||
.from('rides')
|
.from('rides')
|
||||||
|
.insert({ ...testData, is_test_data: true })
|
||||||
.select('id, max_speed_kmh, max_height_meters, length_meters, drop_height_meters, height_requirement')
|
.select('id, max_speed_kmh, max_height_meters, length_meters, drop_height_meters, height_requirement')
|
||||||
.eq('id', rideId)
|
|
||||||
.single();
|
.single();
|
||||||
|
|
||||||
if (rideError || !ride) throw new Error('Ride not found after creation');
|
if (rideError) throw new Error(`Ride creation failed: ${rideError.message}`);
|
||||||
|
if (!ride) throw new Error('Ride not returned');
|
||||||
|
|
||||||
|
rideId = ride.id;
|
||||||
|
tracker.track('rides', rideId);
|
||||||
|
|
||||||
// Validate values are stored in metric
|
// Validate values are stored in metric
|
||||||
const tolerance = 0.01;
|
const tolerance = 0.01; // Allow small floating point differences
|
||||||
|
|
||||||
if (Math.abs((ride.max_speed_kmh ?? 0) - 100.0) > tolerance) {
|
if (Math.abs((ride.max_speed_kmh ?? 0) - testData.max_speed_kmh) > tolerance) {
|
||||||
throw new Error(`max_speed_kmh mismatch: expected 100.0, got ${ride.max_speed_kmh}`);
|
throw new Error(`max_speed_kmh mismatch: expected ${testData.max_speed_kmh}, got ${ride.max_speed_kmh}`);
|
||||||
}
|
}
|
||||||
if (Math.abs((ride.max_height_meters ?? 0) - 50.0) > tolerance) {
|
if (Math.abs((ride.max_height_meters ?? 0) - testData.max_height_meters) > tolerance) {
|
||||||
throw new Error(`max_height_meters mismatch: expected 50.0, got ${ride.max_height_meters}`);
|
throw new Error(`max_height_meters mismatch: expected ${testData.max_height_meters}, got ${ride.max_height_meters}`);
|
||||||
}
|
}
|
||||||
if (Math.abs((ride.length_meters ?? 0) - 1000.0) > tolerance) {
|
if (Math.abs((ride.length_meters ?? 0) - testData.length_meters) > tolerance) {
|
||||||
throw new Error(`length_meters mismatch: expected 1000.0, got ${ride.length_meters}`);
|
throw new Error(`length_meters mismatch: expected ${testData.length_meters}, got ${ride.length_meters}`);
|
||||||
}
|
}
|
||||||
if (Math.abs((ride.height_requirement ?? 0) - 120) > tolerance) {
|
if (Math.abs((ride.height_requirement ?? 0) - testData.height_requirement) > tolerance) {
|
||||||
throw new Error(`height_requirement mismatch: expected 120 cm, got ${ride.height_requirement}`);
|
throw new Error(`height_requirement mismatch: expected ${testData.height_requirement} cm, got ${ride.height_requirement}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
const duration = Date.now() - startTime;
|
const duration = Date.now() - startTime;
|
||||||
@@ -137,7 +108,7 @@ export const unitConversionTestSuite: TestSuite = {
|
|||||||
suite: 'Unit Conversion Tests',
|
suite: 'Unit Conversion Tests',
|
||||||
status: 'fail',
|
status: 'fail',
|
||||||
duration: Date.now() - startTime,
|
duration: Date.now() - startTime,
|
||||||
error: formatTestError(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
timestamp: new Date().toISOString()
|
timestamp: new Date().toISOString()
|
||||||
};
|
};
|
||||||
} finally {
|
} finally {
|
||||||
@@ -160,66 +131,44 @@ export const unitConversionTestSuite: TestSuite = {
|
|||||||
let rideId: string | null = null;
|
let rideId: string | null = null;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Import helpers and create via pipeline
|
// Create test park
|
||||||
const {
|
const parkSlug = `test-park-ver-units-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
||||||
getCurrentUserId,
|
const { data: park, error: parkError } = await supabase
|
||||||
getAuthToken,
|
.from('parks')
|
||||||
generateUniqueParkData,
|
.insert({
|
||||||
generateUniqueRideData,
|
name: 'Test Park Version Units',
|
||||||
createTestParkSubmission,
|
slug: parkSlug,
|
||||||
createTestRideSubmission,
|
park_type: 'theme_park',
|
||||||
approveSubmission
|
status: 'operating',
|
||||||
} = await import('../helpers/approvalTestHelpers');
|
is_test_data: true
|
||||||
|
})
|
||||||
const userId = await getCurrentUserId();
|
.select('id')
|
||||||
const authToken = await getAuthToken();
|
|
||||||
|
|
||||||
// Create and approve park
|
|
||||||
const parkData = generateUniqueParkData('unit-002-park');
|
|
||||||
const { submissionId: parkSubId, itemId: parkItemId } = await createTestParkSubmission(parkData, userId, tracker);
|
|
||||||
const parkApproval = await approveSubmission(parkSubId, [parkItemId], authToken);
|
|
||||||
|
|
||||||
if (!parkApproval.success) {
|
|
||||||
throw new Error(`Park creation failed: ${parkApproval.error || 'Unknown error'}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get park ID from submission item
|
|
||||||
const { data: parkItem } = await supabase
|
|
||||||
.from('submission_items')
|
|
||||||
.select('approved_entity_id')
|
|
||||||
.eq('id', parkItemId)
|
|
||||||
.single();
|
.single();
|
||||||
|
|
||||||
parkId = parkItem?.approved_entity_id || null;
|
if (parkError) throw parkError;
|
||||||
if (!parkId) throw new Error('No park ID after approval');
|
parkId = park.id;
|
||||||
|
|
||||||
tracker.track('parks', parkId);
|
tracker.track('parks', parkId);
|
||||||
|
|
||||||
// Create and approve ride with metric values
|
// Create ride with metric values
|
||||||
const rideData = {
|
const rideSlug = `test-ride-ver-units-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
||||||
...generateUniqueRideData(parkId, 'unit-002-ride'),
|
const { data: ride, error: rideError } = await supabase
|
||||||
max_speed_kmh: 120.0,
|
.from('rides')
|
||||||
max_height_meters: 60.0,
|
.insert({
|
||||||
height_requirement: 140
|
name: 'Test Ride Version Metric',
|
||||||
};
|
slug: rideSlug,
|
||||||
|
park_id: parkId,
|
||||||
const { submissionId: rideSubId, itemId: rideItemId } = await createTestRideSubmission(rideData, userId, tracker);
|
category: 'roller_coaster',
|
||||||
const rideApproval = await approveSubmission(rideSubId, [rideItemId], authToken);
|
status: 'operating',
|
||||||
|
max_speed_kmh: 120.0,
|
||||||
if (!rideApproval.success) {
|
max_height_meters: 60.0,
|
||||||
throw new Error(`Ride creation failed: ${rideApproval.error || 'Unknown error'}`);
|
height_requirement: 140,
|
||||||
}
|
is_test_data: true
|
||||||
|
})
|
||||||
// Get ride ID from submission item
|
.select('id')
|
||||||
const { data: rideItem } = await supabase
|
|
||||||
.from('submission_items')
|
|
||||||
.select('approved_entity_id')
|
|
||||||
.eq('id', rideItemId)
|
|
||||||
.single();
|
.single();
|
||||||
|
|
||||||
rideId = rideItem?.approved_entity_id || null;
|
if (rideError) throw rideError;
|
||||||
if (!rideId) throw new Error('No ride ID after approval');
|
rideId = ride.id;
|
||||||
|
|
||||||
tracker.track('rides', rideId);
|
tracker.track('rides', rideId);
|
||||||
|
|
||||||
// Poll for version creation
|
// Poll for version creation
|
||||||
@@ -277,7 +226,7 @@ export const unitConversionTestSuite: TestSuite = {
|
|||||||
suite: 'Unit Conversion Tests',
|
suite: 'Unit Conversion Tests',
|
||||||
status: 'fail',
|
status: 'fail',
|
||||||
duration: Date.now() - startTime,
|
duration: Date.now() - startTime,
|
||||||
error: formatTestError(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
timestamp: new Date().toISOString()
|
timestamp: new Date().toISOString()
|
||||||
};
|
};
|
||||||
} finally {
|
} finally {
|
||||||
@@ -358,7 +307,7 @@ export const unitConversionTestSuite: TestSuite = {
|
|||||||
suite: 'Unit Conversion Tests',
|
suite: 'Unit Conversion Tests',
|
||||||
status: 'fail',
|
status: 'fail',
|
||||||
duration: Date.now() - startTime,
|
duration: Date.now() - startTime,
|
||||||
error: formatTestError(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
timestamp: new Date().toISOString()
|
timestamp: new Date().toISOString()
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,83 +3,76 @@
|
|||||||
*
|
*
|
||||||
* Tests the complete versioning system end-to-end including automatic
|
* Tests the complete versioning system end-to-end including automatic
|
||||||
* version creation, attribution, and rollback functionality.
|
* version creation, attribution, and rollback functionality.
|
||||||
*
|
|
||||||
* All tests follow the sacred pipeline: submitParkCreation → approve → verify versioning
|
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { supabase } from '@/lib/supabaseClient';
|
import { supabase } from '@/lib/supabaseClient';
|
||||||
import type { TestSuite, TestResult } from '../testRunner';
|
import type { TestSuite, TestResult } from '../testRunner';
|
||||||
import { TestDataTracker } from '../TestDataTracker';
|
import { TestDataTracker } from '../TestDataTracker';
|
||||||
import { formatTestError } from '../formatTestError';
|
|
||||||
import {
|
|
||||||
generateUniqueParkData,
|
|
||||||
createTestParkSubmission,
|
|
||||||
approveSubmission,
|
|
||||||
pollForEntity,
|
|
||||||
pollForVersion,
|
|
||||||
getAuthToken,
|
|
||||||
getCurrentUserId,
|
|
||||||
} from '../helpers/approvalTestHelpers';
|
|
||||||
|
|
||||||
export const versioningTestSuite: TestSuite = {
|
export const versioningTestSuite: TestSuite = {
|
||||||
id: 'versioning',
|
id: 'versioning',
|
||||||
name: 'Versioning & Rollback',
|
name: 'Versioning & Rollback',
|
||||||
description: 'Tests version creation, attribution, rollback, and cleanup via sacred pipeline',
|
description: 'Tests version creation, attribution, rollback, and cleanup',
|
||||||
tests: [
|
tests: [
|
||||||
{
|
{
|
||||||
id: 'version-001',
|
id: 'version-001',
|
||||||
name: 'Automatic Version Creation on Insert',
|
name: 'Automatic Version Creation on Insert',
|
||||||
description: 'Verifies version 1 is created automatically when entity is approved',
|
description: 'Verifies version 1 is created automatically when entity is created',
|
||||||
run: async (): Promise<TestResult> => {
|
run: async (): Promise<TestResult> => {
|
||||||
const startTime = Date.now();
|
const startTime = Date.now();
|
||||||
const tracker = new TestDataTracker();
|
const tracker = new TestDataTracker();
|
||||||
|
let parkId: string | null = null;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Follow sacred pipeline: Form → Submission → Approval → Versioning
|
// Create a park
|
||||||
const userId = await getCurrentUserId();
|
const slug = `test-park-${Date.now()}`;
|
||||||
const authToken = await getAuthToken();
|
const { data: park, error: createError } = await supabase
|
||||||
const parkData = generateUniqueParkData('version-001');
|
.from('parks')
|
||||||
|
.insert({
|
||||||
// Create submission
|
name: 'Version Test Park',
|
||||||
const { submissionId, itemId } = await createTestParkSubmission(parkData, userId, tracker);
|
slug,
|
||||||
|
park_type: 'theme_park',
|
||||||
// Approve submission
|
status: 'operating'
|
||||||
const approval = await approveSubmission(submissionId, [itemId], authToken);
|
})
|
||||||
if (!approval.success) {
|
.select('id')
|
||||||
throw new Error(`Approval failed: ${approval.error}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get approved entity ID
|
|
||||||
const { data: item } = await supabase
|
|
||||||
.from('submission_items')
|
|
||||||
.select('approved_entity_id')
|
|
||||||
.eq('id', itemId)
|
|
||||||
.single();
|
.single();
|
||||||
|
|
||||||
if (!item?.approved_entity_id) {
|
if (createError) throw new Error(`Park creation failed: ${createError.message}`);
|
||||||
throw new Error('No entity ID returned after approval');
|
if (!park) throw new Error('No park returned from insert');
|
||||||
|
|
||||||
|
parkId = park.id;
|
||||||
|
|
||||||
|
// Poll for version creation
|
||||||
|
let v1: any = null;
|
||||||
|
const pollStart = Date.now();
|
||||||
|
while (!v1 && Date.now() - pollStart < 5000) {
|
||||||
|
const { data } = await supabase
|
||||||
|
.from('park_versions')
|
||||||
|
.select('version_id')
|
||||||
|
.eq('park_id', park.id)
|
||||||
|
.eq('version_number', 1)
|
||||||
|
.single();
|
||||||
|
|
||||||
|
if (data) {
|
||||||
|
v1 = data;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 100));
|
||||||
}
|
}
|
||||||
|
|
||||||
const parkId = item.approved_entity_id;
|
// Check version was created
|
||||||
tracker.track('parks', parkId);
|
const { data: version, error: versionError } = await supabase
|
||||||
|
.from('park_versions')
|
||||||
|
.select('*')
|
||||||
|
.eq('park_id', park.id)
|
||||||
|
.eq('version_number', 1)
|
||||||
|
.single();
|
||||||
|
|
||||||
// Poll for park entity
|
if (versionError) throw new Error(`Version query failed: ${versionError.message}`);
|
||||||
const park = await pollForEntity('parks', parkId);
|
|
||||||
if (!park) throw new Error('Park not created after approval');
|
|
||||||
|
|
||||||
// Verify version 1 was created automatically
|
|
||||||
const version = await pollForVersion('park', parkId, 1);
|
|
||||||
if (!version) throw new Error('Version 1 not created');
|
if (!version) throw new Error('Version 1 not created');
|
||||||
|
if (version.name !== 'Version Test Park') throw new Error('Version has incorrect name');
|
||||||
if (version.name !== parkData.name) {
|
if (version.change_type !== 'created') throw new Error(`Expected change_type "created", got "${version.change_type}"`);
|
||||||
throw new Error(`Version has incorrect name: expected "${parkData.name}", got "${version.name}"`);
|
if (!version.is_current) throw new Error('Version is not marked as current');
|
||||||
}
|
|
||||||
if (version.change_type !== 'created') {
|
|
||||||
throw new Error(`Expected change_type "created", got "${version.change_type}"`);
|
|
||||||
}
|
|
||||||
if (!version.is_current) {
|
|
||||||
throw new Error('Version is not marked as current');
|
|
||||||
}
|
|
||||||
|
|
||||||
const duration = Date.now() - startTime;
|
const duration = Date.now() - startTime;
|
||||||
|
|
||||||
@@ -91,12 +84,10 @@ export const versioningTestSuite: TestSuite = {
|
|||||||
duration,
|
duration,
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
details: {
|
details: {
|
||||||
parkId,
|
parkId: park.id,
|
||||||
submissionId,
|
|
||||||
versionNumber: version.version_number,
|
versionNumber: version.version_number,
|
||||||
changeType: version.change_type,
|
changeType: version.change_type,
|
||||||
isCurrent: version.is_current,
|
isCurrent: version.is_current
|
||||||
followedPipeline: true
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -107,86 +98,84 @@ export const versioningTestSuite: TestSuite = {
|
|||||||
suite: 'Versioning & Rollback',
|
suite: 'Versioning & Rollback',
|
||||||
status: 'fail',
|
status: 'fail',
|
||||||
duration,
|
duration,
|
||||||
error: formatTestError(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
stack: error instanceof Error ? error.stack : undefined,
|
stack: error instanceof Error ? error.stack : undefined,
|
||||||
timestamp: new Date().toISOString()
|
timestamp: new Date().toISOString()
|
||||||
};
|
};
|
||||||
} finally {
|
} finally {
|
||||||
await tracker.cleanup();
|
// Cleanup
|
||||||
|
if (parkId) {
|
||||||
|
await supabase.from('parks').delete().eq('id', parkId);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'version-002',
|
id: 'version-002',
|
||||||
name: 'Automatic Version Creation on Update',
|
name: 'Automatic Version Creation on Update',
|
||||||
description: 'Verifies version 2 is created when entity is updated via pipeline',
|
description: 'Verifies version 2 is created when entity is updated',
|
||||||
run: async (): Promise<TestResult> => {
|
run: async (): Promise<TestResult> => {
|
||||||
const startTime = Date.now();
|
const startTime = Date.now();
|
||||||
const tracker = new TestDataTracker();
|
const tracker = new TestDataTracker();
|
||||||
|
let parkId: string | null = null;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Create and approve initial park
|
// Create a park
|
||||||
const userId = await getCurrentUserId();
|
const slug = `test-park-${Date.now()}`;
|
||||||
const authToken = await getAuthToken();
|
const { data: park, error: createError } = await supabase
|
||||||
const parkData = generateUniqueParkData('version-002');
|
.from('parks')
|
||||||
|
.insert({
|
||||||
const { submissionId, itemId } = await createTestParkSubmission(parkData, userId, tracker);
|
name: 'Original Name',
|
||||||
const approval = await approveSubmission(submissionId, [itemId], authToken);
|
slug,
|
||||||
|
park_type: 'theme_park',
|
||||||
if (!approval.success) {
|
status: 'operating'
|
||||||
throw new Error(`Initial approval failed: ${approval.error}`);
|
})
|
||||||
}
|
.select('id')
|
||||||
|
|
||||||
// Get park ID
|
|
||||||
const { data: item } = await supabase
|
|
||||||
.from('submission_items')
|
|
||||||
.select('approved_entity_id')
|
|
||||||
.eq('id', itemId)
|
|
||||||
.single();
|
.single();
|
||||||
|
|
||||||
const parkId = item?.approved_entity_id;
|
if (createError) throw new Error(`Park creation failed: ${createError.message}`);
|
||||||
if (!parkId) throw new Error('No park ID after approval');
|
if (!park) throw new Error('No park returned');
|
||||||
|
|
||||||
tracker.track('parks', parkId);
|
parkId = park.id;
|
||||||
|
|
||||||
// Wait for version 1
|
// Wait for version 1
|
||||||
const v1 = await pollForVersion('park', parkId, 1);
|
await new Promise(resolve => setTimeout(resolve, 100));
|
||||||
if (!v1) throw new Error('Version 1 not created');
|
|
||||||
|
|
||||||
// Update park directly (simulating approved edit)
|
// Update the park
|
||||||
// In production, this would go through edit submission pipeline
|
|
||||||
const { error: updateError } = await supabase
|
const { error: updateError } = await supabase
|
||||||
.from('parks')
|
.from('parks')
|
||||||
.update({ name: 'Updated Name', description: 'Updated Description' })
|
.update({ name: 'Updated Name' })
|
||||||
.eq('id', parkId);
|
.eq('id', park.id);
|
||||||
|
|
||||||
if (updateError) throw new Error(`Park update failed: ${updateError.message}`);
|
if (updateError) throw new Error(`Park update failed: ${updateError.message}`);
|
||||||
|
|
||||||
// Verify version 2 created
|
// Wait for version 2
|
||||||
const v2 = await pollForVersion('park', parkId, 2);
|
await new Promise(resolve => setTimeout(resolve, 100));
|
||||||
if (!v2) throw new Error('Version 2 not created after update');
|
|
||||||
|
|
||||||
if (v2.name !== 'Updated Name') {
|
// Check version 2 exists
|
||||||
throw new Error(`Version 2 has incorrect name: expected "Updated Name", got "${v2.name}"`);
|
const { data: v2, error: v2Error } = await supabase
|
||||||
}
|
.from('park_versions')
|
||||||
if (v2.change_type !== 'updated') {
|
.select('*')
|
||||||
throw new Error(`Expected change_type "updated", got "${v2.change_type}"`);
|
.eq('park_id', park.id)
|
||||||
}
|
.eq('version_number', 2)
|
||||||
if (!v2.is_current) {
|
.single();
|
||||||
throw new Error('Version 2 is not marked as current');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Verify version 1 is no longer current
|
if (v2Error) throw new Error(`Version 2 query failed: ${v2Error.message}`);
|
||||||
const { data: v1Updated } = await supabase
|
if (!v2) throw new Error('Version 2 not created');
|
||||||
|
if (v2.name !== 'Updated Name') throw new Error('Version 2 has incorrect name');
|
||||||
|
if (v2.change_type !== 'updated') throw new Error(`Expected change_type "updated", got "${v2.change_type}"`);
|
||||||
|
if (!v2.is_current) throw new Error('Version 2 is not marked as current');
|
||||||
|
|
||||||
|
// Check version 1 is no longer current
|
||||||
|
const { data: v1, error: v1Error } = await supabase
|
||||||
.from('park_versions')
|
.from('park_versions')
|
||||||
.select('is_current')
|
.select('is_current')
|
||||||
.eq('park_id', parkId)
|
.eq('park_id', park.id)
|
||||||
.eq('version_number', 1)
|
.eq('version_number', 1)
|
||||||
.single();
|
.single();
|
||||||
|
|
||||||
if (v1Updated?.is_current) {
|
if (v1Error) throw new Error(`Version 1 query failed: ${v1Error.message}`);
|
||||||
throw new Error('Version 1 is still marked as current');
|
if (v1?.is_current) throw new Error('Version 1 is still marked as current');
|
||||||
}
|
|
||||||
|
|
||||||
const duration = Date.now() - startTime;
|
const duration = Date.now() - startTime;
|
||||||
|
|
||||||
@@ -198,8 +187,8 @@ export const versioningTestSuite: TestSuite = {
|
|||||||
duration,
|
duration,
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
details: {
|
details: {
|
||||||
parkId,
|
parkId: park.id,
|
||||||
v1IsCurrent: v1Updated?.is_current,
|
v1IsCurrent: v1?.is_current,
|
||||||
v2IsCurrent: v2.is_current,
|
v2IsCurrent: v2.is_current,
|
||||||
v2ChangeType: v2.change_type
|
v2ChangeType: v2.change_type
|
||||||
}
|
}
|
||||||
@@ -212,12 +201,16 @@ export const versioningTestSuite: TestSuite = {
|
|||||||
suite: 'Versioning & Rollback',
|
suite: 'Versioning & Rollback',
|
||||||
status: 'fail',
|
status: 'fail',
|
||||||
duration,
|
duration,
|
||||||
error: formatTestError(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
stack: error instanceof Error ? error.stack : undefined,
|
stack: error instanceof Error ? error.stack : undefined,
|
||||||
timestamp: new Date().toISOString()
|
timestamp: new Date().toISOString()
|
||||||
};
|
};
|
||||||
} finally {
|
} finally {
|
||||||
await tracker.cleanup();
|
await tracker.cleanup();
|
||||||
|
const remaining = await tracker.verifyCleanup();
|
||||||
|
if (remaining.length > 0) {
|
||||||
|
console.warn('version-001 cleanup incomplete:', remaining);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -228,37 +221,48 @@ export const versioningTestSuite: TestSuite = {
|
|||||||
run: async (): Promise<TestResult> => {
|
run: async (): Promise<TestResult> => {
|
||||||
const startTime = Date.now();
|
const startTime = Date.now();
|
||||||
const tracker = new TestDataTracker();
|
const tracker = new TestDataTracker();
|
||||||
|
let parkId: string | null = null;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Create and approve park
|
// Create a park
|
||||||
const userId = await getCurrentUserId();
|
const slug = `test-park-${Date.now()}`;
|
||||||
const authToken = await getAuthToken();
|
const { data: park, error: createError } = await supabase
|
||||||
const parkData = generateUniqueParkData('version-003');
|
.from('parks')
|
||||||
|
.insert({
|
||||||
const { submissionId, itemId } = await createTestParkSubmission(parkData, userId, tracker);
|
name: 'Rollback Test Park',
|
||||||
const approval = await approveSubmission(submissionId, [itemId], authToken);
|
slug,
|
||||||
|
park_type: 'theme_park',
|
||||||
if (!approval.success) {
|
status: 'operating'
|
||||||
throw new Error(`Approval failed: ${approval.error}`);
|
})
|
||||||
}
|
.select('id')
|
||||||
|
|
||||||
// Get park ID
|
|
||||||
const { data: item } = await supabase
|
|
||||||
.from('submission_items')
|
|
||||||
.select('approved_entity_id')
|
|
||||||
.eq('id', itemId)
|
|
||||||
.single();
|
.single();
|
||||||
|
|
||||||
const parkId = item?.approved_entity_id;
|
if (createError) throw new Error(`Park creation failed: ${createError.message}`);
|
||||||
if (!parkId) throw new Error('No park ID after approval');
|
if (!park) throw new Error('No park returned');
|
||||||
|
|
||||||
tracker.track('parks', parkId);
|
parkId = park.id;
|
||||||
|
|
||||||
// Wait for version 1
|
// Poll for version creation
|
||||||
const v1 = await pollForVersion('park', parkId, 1);
|
let v1: any = null;
|
||||||
if (!v1) throw new Error('Version 1 not created');
|
const pollStart = Date.now();
|
||||||
|
while (!v1 && Date.now() - pollStart < 5000) {
|
||||||
|
const { data } = await supabase
|
||||||
|
.from('park_versions')
|
||||||
|
.select('version_id')
|
||||||
|
.eq('park_id', park.id)
|
||||||
|
.eq('version_number', 1)
|
||||||
|
.single();
|
||||||
|
|
||||||
// Check current user role
|
if (data) {
|
||||||
|
v1 = data;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 100));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!v1) throw new Error('Version 1 not created after 5s timeout');
|
||||||
|
|
||||||
|
// Check current user is moderator
|
||||||
const { data: { user } } = await supabase.auth.getUser();
|
const { data: { user } } = await supabase.auth.getUser();
|
||||||
if (!user) throw new Error('No authenticated user');
|
if (!user) throw new Error('No authenticated user');
|
||||||
|
|
||||||
@@ -267,13 +271,14 @@ export const versioningTestSuite: TestSuite = {
|
|||||||
// Try rollback
|
// Try rollback
|
||||||
const { error: rollbackError } = await supabase.rpc('rollback_to_version', {
|
const { error: rollbackError } = await supabase.rpc('rollback_to_version', {
|
||||||
p_entity_type: 'park',
|
p_entity_type: 'park',
|
||||||
p_entity_id: parkId,
|
p_entity_id: park.id,
|
||||||
p_target_version_id: v1.version_id,
|
p_target_version_id: v1.version_id,
|
||||||
p_changed_by: user.id,
|
p_changed_by: user.id,
|
||||||
p_reason: 'Authorization test'
|
p_reason: 'Authorization test'
|
||||||
});
|
});
|
||||||
|
|
||||||
// Verify authorization enforcement
|
// If user is moderator, rollback should succeed
|
||||||
|
// If not, rollback should fail with permission error
|
||||||
if (isMod && rollbackError) {
|
if (isMod && rollbackError) {
|
||||||
throw new Error(`Rollback failed for moderator: ${rollbackError.message}`);
|
throw new Error(`Rollback failed for moderator: ${rollbackError.message}`);
|
||||||
}
|
}
|
||||||
@@ -305,12 +310,16 @@ export const versioningTestSuite: TestSuite = {
|
|||||||
suite: 'Versioning & Rollback',
|
suite: 'Versioning & Rollback',
|
||||||
status: 'fail',
|
status: 'fail',
|
||||||
duration,
|
duration,
|
||||||
error: formatTestError(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
stack: error instanceof Error ? error.stack : undefined,
|
stack: error instanceof Error ? error.stack : undefined,
|
||||||
timestamp: new Date().toISOString()
|
timestamp: new Date().toISOString()
|
||||||
};
|
};
|
||||||
} finally {
|
} finally {
|
||||||
await tracker.cleanup();
|
await tracker.cleanup();
|
||||||
|
const remaining = await tracker.verifyCleanup();
|
||||||
|
if (remaining.length > 0) {
|
||||||
|
console.warn('version-002 cleanup incomplete:', remaining);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -321,6 +330,7 @@ export const versioningTestSuite: TestSuite = {
|
|||||||
run: async (): Promise<TestResult> => {
|
run: async (): Promise<TestResult> => {
|
||||||
const startTime = Date.now();
|
const startTime = Date.now();
|
||||||
const tracker = new TestDataTracker();
|
const tracker = new TestDataTracker();
|
||||||
|
let parkId: string | null = null;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Check if user is moderator
|
// Check if user is moderator
|
||||||
@@ -330,6 +340,7 @@ export const versioningTestSuite: TestSuite = {
|
|||||||
const { data: isMod } = await supabase.rpc('is_moderator', { _user_id: user.id });
|
const { data: isMod } = await supabase.rpc('is_moderator', { _user_id: user.id });
|
||||||
|
|
||||||
if (!isMod) {
|
if (!isMod) {
|
||||||
|
// Skip test if not moderator
|
||||||
const duration = Date.now() - startTime;
|
const duration = Date.now() - startTime;
|
||||||
return {
|
return {
|
||||||
id: 'version-004',
|
id: 'version-004',
|
||||||
@@ -342,54 +353,61 @@ export const versioningTestSuite: TestSuite = {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create and approve park
|
// Create park
|
||||||
const userId = await getCurrentUserId();
|
const slug = `test-park-${Date.now()}`;
|
||||||
const authToken = await getAuthToken();
|
const { data: park, error: createError } = await supabase
|
||||||
const parkData = {
|
.from('parks')
|
||||||
...generateUniqueParkData('version-004'),
|
.insert({
|
||||||
description: 'Original Description'
|
name: 'Original Name',
|
||||||
};
|
slug,
|
||||||
|
park_type: 'theme_park',
|
||||||
const { submissionId, itemId } = await createTestParkSubmission(parkData, userId, tracker);
|
status: 'operating',
|
||||||
const approval = await approveSubmission(submissionId, [itemId], authToken);
|
description: 'Original Description'
|
||||||
|
})
|
||||||
if (!approval.success) {
|
.select('id')
|
||||||
throw new Error(`Approval failed: ${approval.error}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get park ID
|
|
||||||
const { data: item } = await supabase
|
|
||||||
.from('submission_items')
|
|
||||||
.select('approved_entity_id')
|
|
||||||
.eq('id', itemId)
|
|
||||||
.single();
|
.single();
|
||||||
|
|
||||||
const parkId = item?.approved_entity_id;
|
if (createError) throw new Error(`Park creation failed: ${createError.message}`);
|
||||||
if (!parkId) throw new Error('No park ID after approval');
|
if (!park) throw new Error('No park returned');
|
||||||
|
|
||||||
tracker.track('parks', parkId);
|
parkId = park.id;
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 100));
|
||||||
|
|
||||||
// Wait for version 1
|
// Get version 1
|
||||||
const v1 = await pollForVersion('park', parkId, 1);
|
const { data: v1, error: v1Error } = await supabase
|
||||||
if (!v1) throw new Error('Version 1 not created');
|
.from('park_versions')
|
||||||
|
.select('version_id, name, description')
|
||||||
|
.eq('park_id', park.id)
|
||||||
|
.eq('version_number', 1)
|
||||||
|
.single();
|
||||||
|
|
||||||
|
if (v1Error || !v1) throw new Error('Version 1 not found');
|
||||||
|
|
||||||
// Update park
|
// Update park
|
||||||
const { error: updateError } = await supabase
|
const { error: updateError } = await supabase
|
||||||
.from('parks')
|
.from('parks')
|
||||||
.update({ name: 'Modified Name', description: 'Modified Description' })
|
.update({ name: 'Modified Name', description: 'Modified Description' })
|
||||||
.eq('id', parkId);
|
.eq('id', park.id);
|
||||||
|
|
||||||
if (updateError) throw new Error(`Park update failed: ${updateError.message}`);
|
if (updateError) throw new Error(`Park update failed: ${updateError.message}`);
|
||||||
|
|
||||||
// Wait for version 2
|
await new Promise(resolve => setTimeout(resolve, 100));
|
||||||
const v2 = await pollForVersion('park', parkId, 2);
|
|
||||||
|
// Verify version 2
|
||||||
|
const { data: v2 } = await supabase
|
||||||
|
.from('park_versions')
|
||||||
|
.select('version_number, name')
|
||||||
|
.eq('park_id', park.id)
|
||||||
|
.eq('version_number', 2)
|
||||||
|
.single();
|
||||||
|
|
||||||
if (!v2) throw new Error('Version 2 not created');
|
if (!v2) throw new Error('Version 2 not created');
|
||||||
if (v2.name !== 'Modified Name') throw new Error('Version 2 has incorrect data');
|
if (v2.name !== 'Modified Name') throw new Error('Version 2 has incorrect data');
|
||||||
|
|
||||||
// Rollback to version 1
|
// Rollback to version 1
|
||||||
const { error: rollbackError } = await supabase.rpc('rollback_to_version', {
|
const { error: rollbackError } = await supabase.rpc('rollback_to_version', {
|
||||||
p_entity_type: 'park',
|
p_entity_type: 'park',
|
||||||
p_entity_id: parkId,
|
p_entity_id: park.id,
|
||||||
p_target_version_id: v1.version_id,
|
p_target_version_id: v1.version_id,
|
||||||
p_changed_by: user.id,
|
p_changed_by: user.id,
|
||||||
p_reason: 'Integration test rollback'
|
p_reason: 'Integration test rollback'
|
||||||
@@ -397,24 +415,37 @@ export const versioningTestSuite: TestSuite = {
|
|||||||
|
|
||||||
if (rollbackError) throw new Error(`Rollback failed: ${rollbackError.message}`);
|
if (rollbackError) throw new Error(`Rollback failed: ${rollbackError.message}`);
|
||||||
|
|
||||||
// Verify park data restored
|
await new Promise(resolve => setTimeout(resolve, 200));
|
||||||
const restored = await pollForEntity('parks', parkId, 3000);
|
|
||||||
if (!restored) throw new Error('Could not fetch restored park');
|
|
||||||
|
|
||||||
if (restored.name !== parkData.name) {
|
// Verify park data restored
|
||||||
throw new Error(`Rollback failed: expected "${parkData.name}", got "${restored.name}"`);
|
const { data: restored, error: restoredError } = await supabase
|
||||||
|
.from('parks')
|
||||||
|
.select('name, description')
|
||||||
|
.eq('id', park.id)
|
||||||
|
.single();
|
||||||
|
|
||||||
|
if (restoredError) throw new Error(`Failed to fetch restored park: ${restoredError.message}`);
|
||||||
|
if (!restored) throw new Error('Restored park not found');
|
||||||
|
if (restored.name !== 'Original Name') {
|
||||||
|
throw new Error(`Rollback failed: expected "Original Name", got "${restored.name}"`);
|
||||||
}
|
}
|
||||||
if (restored.description !== 'Original Description') {
|
if (restored.description !== 'Original Description') {
|
||||||
throw new Error(`Description not restored: got "${restored.description}"`);
|
throw new Error(`Description not restored: expected "Original Description", got "${restored.description}"`);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Verify version 3 created with change_type = 'restored'
|
// Verify version 3 created with change_type = 'restored'
|
||||||
const v3 = await pollForVersion('park', parkId, 3, 3000);
|
const { data: v3, error: v3Error } = await supabase
|
||||||
if (!v3) throw new Error('Version 3 (restored) not created');
|
.from('park_versions')
|
||||||
|
.select('*')
|
||||||
|
.eq('park_id', park.id)
|
||||||
|
.eq('version_number', 3)
|
||||||
|
.single();
|
||||||
|
|
||||||
|
if (v3Error || !v3) throw new Error('Version 3 (restored) not created');
|
||||||
if (v3.change_type !== 'restored') {
|
if (v3.change_type !== 'restored') {
|
||||||
throw new Error(`Expected change_type "restored", got "${v3.change_type}"`);
|
throw new Error(`Expected change_type "restored", got "${v3.change_type}"`);
|
||||||
}
|
}
|
||||||
if (v3.name !== parkData.name) throw new Error('Version 3 has incorrect data');
|
if (v3.name !== 'Original Name') throw new Error('Version 3 has incorrect data');
|
||||||
if (!v3.is_current) throw new Error('Version 3 is not marked as current');
|
if (!v3.is_current) throw new Error('Version 3 is not marked as current');
|
||||||
|
|
||||||
const duration = Date.now() - startTime;
|
const duration = Date.now() - startTime;
|
||||||
@@ -427,7 +458,7 @@ export const versioningTestSuite: TestSuite = {
|
|||||||
duration,
|
duration,
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
details: {
|
details: {
|
||||||
parkId,
|
parkId: park.id,
|
||||||
versionsCreated: 3,
|
versionsCreated: 3,
|
||||||
dataRestored: true,
|
dataRestored: true,
|
||||||
v3ChangeType: v3.change_type,
|
v3ChangeType: v3.change_type,
|
||||||
@@ -442,12 +473,16 @@ export const versioningTestSuite: TestSuite = {
|
|||||||
suite: 'Versioning & Rollback',
|
suite: 'Versioning & Rollback',
|
||||||
status: 'fail',
|
status: 'fail',
|
||||||
duration,
|
duration,
|
||||||
error: formatTestError(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
stack: error instanceof Error ? error.stack : undefined,
|
stack: error instanceof Error ? error.stack : undefined,
|
||||||
timestamp: new Date().toISOString()
|
timestamp: new Date().toISOString()
|
||||||
};
|
};
|
||||||
} finally {
|
} finally {
|
||||||
await tracker.cleanup();
|
await tracker.cleanup();
|
||||||
|
const remaining = await tracker.verifyCleanup();
|
||||||
|
if (remaining.length > 0) {
|
||||||
|
console.warn('version-003 cleanup incomplete:', remaining);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,441 +0,0 @@
|
|||||||
/**
|
|
||||||
* Test Data Cleanup Utility
|
|
||||||
*
|
|
||||||
* Safely removes test fixtures created during integration tests.
|
|
||||||
*
|
|
||||||
* SAFETY FEATURES:
|
|
||||||
* - Only deletes records marked with is_test_data = true
|
|
||||||
* - Only deletes records with test-specific naming patterns
|
|
||||||
* - Cascading deletes handled by database foreign keys
|
|
||||||
* - Detailed logging of all deletions
|
|
||||||
* - Rollback support via transactions
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { supabase } from '@/lib/supabaseClient';
|
|
||||||
import { handleError } from '@/lib/errorHandler';
|
|
||||||
|
|
||||||
export interface CleanupResult {
|
|
||||||
table: string;
|
|
||||||
deleted: number;
|
|
||||||
duration: number;
|
|
||||||
error?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface CleanupSummary {
|
|
||||||
totalDeleted: number;
|
|
||||||
totalDuration: number;
|
|
||||||
results: CleanupResult[];
|
|
||||||
success: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Delete test data from a specific table using type-safe queries
|
|
||||||
*/
|
|
||||||
async function cleanupParks(): Promise<CleanupResult> {
|
|
||||||
const startTime = Date.now();
|
|
||||||
try {
|
|
||||||
const { error, count } = await supabase
|
|
||||||
.from('parks')
|
|
||||||
.delete()
|
|
||||||
.eq('is_test_data', true);
|
|
||||||
|
|
||||||
if (error) throw error;
|
|
||||||
console.log(`✓ Cleaned ${count || 0} test parks`);
|
|
||||||
return { table: 'parks', deleted: count || 0, duration: Date.now() - startTime };
|
|
||||||
} catch (error) {
|
|
||||||
return {
|
|
||||||
table: 'parks',
|
|
||||||
deleted: 0,
|
|
||||||
duration: Date.now() - startTime,
|
|
||||||
error: error instanceof Error ? error.message : String(error)
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function cleanupRides(): Promise<CleanupResult> {
|
|
||||||
const startTime = Date.now();
|
|
||||||
try {
|
|
||||||
const { error, count } = await supabase
|
|
||||||
.from('rides')
|
|
||||||
.delete()
|
|
||||||
.eq('is_test_data', true);
|
|
||||||
|
|
||||||
if (error) throw error;
|
|
||||||
console.log(`✓ Cleaned ${count || 0} test rides`);
|
|
||||||
return { table: 'rides', deleted: count || 0, duration: Date.now() - startTime };
|
|
||||||
} catch (error) {
|
|
||||||
return {
|
|
||||||
table: 'rides',
|
|
||||||
deleted: 0,
|
|
||||||
duration: Date.now() - startTime,
|
|
||||||
error: error instanceof Error ? error.message : String(error)
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function cleanupCompanies(): Promise<CleanupResult> {
|
|
||||||
const startTime = Date.now();
|
|
||||||
try {
|
|
||||||
const { error, count } = await supabase
|
|
||||||
.from('companies')
|
|
||||||
.delete()
|
|
||||||
.eq('is_test_data', true);
|
|
||||||
|
|
||||||
if (error) throw error;
|
|
||||||
console.log(`✓ Cleaned ${count || 0} test companies`);
|
|
||||||
return { table: 'companies', deleted: count || 0, duration: Date.now() - startTime };
|
|
||||||
} catch (error) {
|
|
||||||
return {
|
|
||||||
table: 'companies',
|
|
||||||
deleted: 0,
|
|
||||||
duration: Date.now() - startTime,
|
|
||||||
error: error instanceof Error ? error.message : String(error)
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function cleanupRideModels(): Promise<CleanupResult> {
|
|
||||||
const startTime = Date.now();
|
|
||||||
try {
|
|
||||||
const { error, count } = await supabase
|
|
||||||
.from('ride_models')
|
|
||||||
.delete()
|
|
||||||
.eq('is_test_data', true);
|
|
||||||
|
|
||||||
if (error) throw error;
|
|
||||||
console.log(`✓ Cleaned ${count || 0} test ride models`);
|
|
||||||
return { table: 'ride_models', deleted: count || 0, duration: Date.now() - startTime };
|
|
||||||
} catch (error) {
|
|
||||||
return {
|
|
||||||
table: 'ride_models',
|
|
||||||
deleted: 0,
|
|
||||||
duration: Date.now() - startTime,
|
|
||||||
error: error instanceof Error ? error.message : String(error)
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function cleanupLocations(): Promise<CleanupResult> {
|
|
||||||
const startTime = Date.now();
|
|
||||||
try {
|
|
||||||
const { error, count } = await supabase
|
|
||||||
.from('locations')
|
|
||||||
.delete()
|
|
||||||
.eq('is_test_data', true);
|
|
||||||
|
|
||||||
if (error) throw error;
|
|
||||||
console.log(`✓ Cleaned ${count || 0} test locations`);
|
|
||||||
return { table: 'locations', deleted: count || 0, duration: Date.now() - startTime };
|
|
||||||
} catch (error) {
|
|
||||||
return {
|
|
||||||
table: 'locations',
|
|
||||||
deleted: 0,
|
|
||||||
duration: Date.now() - startTime,
|
|
||||||
error: error instanceof Error ? error.message : String(error)
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Clean up test submissions (must be done before entities due to FK constraints)
|
|
||||||
*/
|
|
||||||
async function cleanupSubmissions(): Promise<CleanupResult[]> {
|
|
||||||
const results: CleanupResult[] = [];
|
|
||||||
|
|
||||||
// Clean content_submissions (cascade will handle related tables)
|
|
||||||
const startTime = Date.now();
|
|
||||||
try {
|
|
||||||
const { error, count } = await supabase
|
|
||||||
.from('content_submissions')
|
|
||||||
.delete()
|
|
||||||
.eq('is_test_data', true);
|
|
||||||
|
|
||||||
if (!error) {
|
|
||||||
results.push({
|
|
||||||
table: 'content_submissions',
|
|
||||||
deleted: count || 0,
|
|
||||||
duration: Date.now() - startTime
|
|
||||||
});
|
|
||||||
console.log(`✓ Cleaned ${count || 0} test submissions (cascade cleanup)`);
|
|
||||||
} else {
|
|
||||||
results.push({
|
|
||||||
table: 'content_submissions',
|
|
||||||
deleted: 0,
|
|
||||||
duration: Date.now() - startTime,
|
|
||||||
error: error.message
|
|
||||||
});
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
results.push({
|
|
||||||
table: 'content_submissions',
|
|
||||||
deleted: 0,
|
|
||||||
duration: Date.now() - startTime,
|
|
||||||
error: error instanceof Error ? error.message : String(error)
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
return results;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Clean up test versions (historical records)
|
|
||||||
*/
|
|
||||||
async function cleanupVersions(): Promise<CleanupResult[]> {
|
|
||||||
const results: CleanupResult[] = [];
|
|
||||||
|
|
||||||
// Clean park versions
|
|
||||||
try {
|
|
||||||
const { error, count } = await supabase.from('park_versions').delete().eq('is_test_data', true);
|
|
||||||
results.push({
|
|
||||||
table: 'park_versions',
|
|
||||||
deleted: error ? 0 : (count || 0),
|
|
||||||
duration: 0,
|
|
||||||
error: error?.message
|
|
||||||
});
|
|
||||||
} catch (e) {
|
|
||||||
results.push({ table: 'park_versions', deleted: 0, duration: 0, error: String(e) });
|
|
||||||
}
|
|
||||||
|
|
||||||
// Clean ride versions
|
|
||||||
try {
|
|
||||||
const { error, count } = await supabase.from('ride_versions').delete().eq('is_test_data', true);
|
|
||||||
results.push({
|
|
||||||
table: 'ride_versions',
|
|
||||||
deleted: error ? 0 : (count || 0),
|
|
||||||
duration: 0,
|
|
||||||
error: error?.message
|
|
||||||
});
|
|
||||||
} catch (e) {
|
|
||||||
results.push({ table: 'ride_versions', deleted: 0, duration: 0, error: String(e) });
|
|
||||||
}
|
|
||||||
|
|
||||||
// Clean company versions
|
|
||||||
try {
|
|
||||||
const { error, count } = await supabase.from('company_versions').delete().eq('is_test_data', true);
|
|
||||||
results.push({
|
|
||||||
table: 'company_versions',
|
|
||||||
deleted: error ? 0 : (count || 0),
|
|
||||||
duration: 0,
|
|
||||||
error: error?.message
|
|
||||||
});
|
|
||||||
} catch (e) {
|
|
||||||
results.push({ table: 'company_versions', deleted: 0, duration: 0, error: String(e) });
|
|
||||||
}
|
|
||||||
|
|
||||||
// Clean ride_model versions
|
|
||||||
try {
|
|
||||||
const { error, count } = await supabase.from('ride_model_versions').delete().eq('is_test_data', true);
|
|
||||||
results.push({
|
|
||||||
table: 'ride_model_versions',
|
|
||||||
deleted: error ? 0 : (count || 0),
|
|
||||||
duration: 0,
|
|
||||||
error: error?.message
|
|
||||||
});
|
|
||||||
} catch (e) {
|
|
||||||
results.push({ table: 'ride_model_versions', deleted: 0, duration: 0, error: String(e) });
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log(`✓ Cleaned ${results.reduce((sum, r) => sum + r.deleted, 0)} version records`);
|
|
||||||
return results;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Clean up test entities (main tables)
|
|
||||||
*/
|
|
||||||
async function cleanupEntities(): Promise<CleanupResult[]> {
|
|
||||||
const results: CleanupResult[] = [];
|
|
||||||
|
|
||||||
// Order matters: clean dependent entities first
|
|
||||||
results.push(await cleanupRides());
|
|
||||||
results.push(await cleanupParks());
|
|
||||||
results.push(await cleanupRideModels());
|
|
||||||
results.push(await cleanupCompanies());
|
|
||||||
results.push(await cleanupLocations());
|
|
||||||
|
|
||||||
return results;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Clean up test-related metadata and tracking tables
|
|
||||||
*/
|
|
||||||
async function cleanupMetadata(): Promise<CleanupResult[]> {
|
|
||||||
const results: CleanupResult[] = [];
|
|
||||||
|
|
||||||
// Clean approval metrics for test submissions
|
|
||||||
try {
|
|
||||||
const { data: testSubmissions } = await supabase
|
|
||||||
.from('content_submissions')
|
|
||||||
.select('id')
|
|
||||||
.eq('is_test_data', true);
|
|
||||||
|
|
||||||
if (testSubmissions && testSubmissions.length > 0) {
|
|
||||||
const submissionIds = testSubmissions.map(s => s.id);
|
|
||||||
|
|
||||||
const { error, count } = await supabase
|
|
||||||
.from('approval_transaction_metrics')
|
|
||||||
.delete()
|
|
||||||
.in('submission_id', submissionIds);
|
|
||||||
|
|
||||||
if (!error) {
|
|
||||||
results.push({
|
|
||||||
table: 'approval_transaction_metrics',
|
|
||||||
deleted: count || 0,
|
|
||||||
duration: 0
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Failed to cleanup metadata:', error);
|
|
||||||
}
|
|
||||||
|
|
||||||
return results;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Run complete test data cleanup
|
|
||||||
*
|
|
||||||
* Executes cleanup in proper order to respect foreign key constraints:
|
|
||||||
* 1. Submissions (depend on entities)
|
|
||||||
* 2. Versions (historical records)
|
|
||||||
* 3. Metadata (metrics, audit logs)
|
|
||||||
* 4. Entities (main tables)
|
|
||||||
*/
|
|
||||||
export async function cleanupTestData(): Promise<CleanupSummary> {
|
|
||||||
const startTime = Date.now();
|
|
||||||
const allResults: CleanupResult[] = [];
|
|
||||||
|
|
||||||
console.log('🧹 Starting test data cleanup...');
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Phase 1: Clean submissions first (they reference entities)
|
|
||||||
console.log('\n📋 Phase 1: Cleaning submissions...');
|
|
||||||
const submissionResults = await cleanupSubmissions();
|
|
||||||
allResults.push(...submissionResults);
|
|
||||||
|
|
||||||
// Phase 2: Clean versions (historical records)
|
|
||||||
console.log('\n📚 Phase 2: Cleaning version history...');
|
|
||||||
const versionResults = await cleanupVersions();
|
|
||||||
allResults.push(...versionResults);
|
|
||||||
|
|
||||||
// Phase 3: Clean metadata
|
|
||||||
console.log('\n📊 Phase 3: Cleaning metadata...');
|
|
||||||
const metadataResults = await cleanupMetadata();
|
|
||||||
allResults.push(...metadataResults);
|
|
||||||
|
|
||||||
// Phase 4: Clean entities (main tables)
|
|
||||||
console.log('\n🏗️ Phase 4: Cleaning entities...');
|
|
||||||
const entityResults = await cleanupEntities();
|
|
||||||
allResults.push(...entityResults);
|
|
||||||
|
|
||||||
const totalDeleted = allResults.reduce((sum, r) => sum + r.deleted, 0);
|
|
||||||
const totalDuration = Date.now() - startTime;
|
|
||||||
const hasErrors = allResults.some(r => r.error);
|
|
||||||
|
|
||||||
console.log(`\n✅ Cleanup complete: ${totalDeleted} records deleted in ${totalDuration}ms`);
|
|
||||||
|
|
||||||
return {
|
|
||||||
totalDeleted,
|
|
||||||
totalDuration,
|
|
||||||
results: allResults,
|
|
||||||
success: !hasErrors
|
|
||||||
};
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
console.error('❌ Cleanup failed:', error);
|
|
||||||
|
|
||||||
return {
|
|
||||||
totalDeleted: allResults.reduce((sum, r) => sum + r.deleted, 0),
|
|
||||||
totalDuration: Date.now() - startTime,
|
|
||||||
results: allResults,
|
|
||||||
success: false
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Clean up only specific entity types (selective cleanup)
|
|
||||||
*/
|
|
||||||
export async function cleanupEntityType(
|
|
||||||
entityType: 'parks' | 'rides' | 'companies' | 'ride_models' | 'locations'
|
|
||||||
): Promise<CleanupResult> {
|
|
||||||
console.log(`🧹 Cleaning test ${entityType}...`);
|
|
||||||
|
|
||||||
switch (entityType) {
|
|
||||||
case 'parks':
|
|
||||||
return cleanupParks();
|
|
||||||
case 'rides':
|
|
||||||
return cleanupRides();
|
|
||||||
case 'companies':
|
|
||||||
return cleanupCompanies();
|
|
||||||
case 'ride_models':
|
|
||||||
return cleanupRideModels();
|
|
||||||
case 'locations':
|
|
||||||
return cleanupLocations();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Verify cleanup was successful (safety check)
|
|
||||||
*/
|
|
||||||
export async function verifyCleanup(): Promise<{
|
|
||||||
remainingTestData: number;
|
|
||||||
tables: Record<string, number>;
|
|
||||||
}> {
|
|
||||||
const counts: Record<string, number> = {};
|
|
||||||
let total = 0;
|
|
||||||
|
|
||||||
// Check parks
|
|
||||||
const { count: parksCount } = await supabase
|
|
||||||
.from('parks')
|
|
||||||
.select('*', { count: 'exact', head: true })
|
|
||||||
.eq('is_test_data', true);
|
|
||||||
if (parksCount !== null) {
|
|
||||||
counts.parks = parksCount;
|
|
||||||
total += parksCount;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check rides
|
|
||||||
const { count: ridesCount } = await supabase
|
|
||||||
.from('rides')
|
|
||||||
.select('*', { count: 'exact', head: true })
|
|
||||||
.eq('is_test_data', true);
|
|
||||||
if (ridesCount !== null) {
|
|
||||||
counts.rides = ridesCount;
|
|
||||||
total += ridesCount;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check companies
|
|
||||||
const { count: companiesCount } = await supabase
|
|
||||||
.from('companies')
|
|
||||||
.select('*', { count: 'exact', head: true })
|
|
||||||
.eq('is_test_data', true);
|
|
||||||
if (companiesCount !== null) {
|
|
||||||
counts.companies = companiesCount;
|
|
||||||
total += companiesCount;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check ride_models
|
|
||||||
const { count: rideModelsCount } = await supabase
|
|
||||||
.from('ride_models')
|
|
||||||
.select('*', { count: 'exact', head: true })
|
|
||||||
.eq('is_test_data', true);
|
|
||||||
if (rideModelsCount !== null) {
|
|
||||||
counts.ride_models = rideModelsCount;
|
|
||||||
total += rideModelsCount;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check locations
|
|
||||||
const { count: locationsCount } = await supabase
|
|
||||||
.from('locations')
|
|
||||||
.select('*', { count: 'exact', head: true })
|
|
||||||
.eq('is_test_data', true);
|
|
||||||
if (locationsCount !== null) {
|
|
||||||
counts.locations = locationsCount;
|
|
||||||
total += locationsCount;
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
remainingTestData: total,
|
|
||||||
tables: counts
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@@ -8,8 +8,6 @@
|
|||||||
import { moderationTestSuite } from './suites/moderationTests';
|
import { moderationTestSuite } from './suites/moderationTests';
|
||||||
import { moderationLockTestSuite } from './suites/moderationLockTests';
|
import { moderationLockTestSuite } from './suites/moderationLockTests';
|
||||||
import { moderationDependencyTestSuite } from './suites/moderationDependencyTests';
|
import { moderationDependencyTestSuite } from './suites/moderationDependencyTests';
|
||||||
import { approvalPipelineTestSuite } from './suites/approvalPipelineTests';
|
|
||||||
import { cleanupTestData, type CleanupSummary } from './testCleanup';
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Registry of all available test suites
|
* Registry of all available test suites
|
||||||
@@ -17,8 +15,7 @@ import { cleanupTestData, type CleanupSummary } from './testCleanup';
|
|||||||
export const ALL_TEST_SUITES = [
|
export const ALL_TEST_SUITES = [
|
||||||
moderationTestSuite,
|
moderationTestSuite,
|
||||||
moderationLockTestSuite,
|
moderationLockTestSuite,
|
||||||
moderationDependencyTestSuite,
|
moderationDependencyTestSuite
|
||||||
approvalPipelineTestSuite,
|
|
||||||
];
|
];
|
||||||
|
|
||||||
export interface TestResult {
|
export interface TestResult {
|
||||||
@@ -52,25 +49,9 @@ export class IntegrationTestRunner {
|
|||||||
private isRunning = false;
|
private isRunning = false;
|
||||||
private shouldStop = false;
|
private shouldStop = false;
|
||||||
private onProgress?: (result: TestResult) => void;
|
private onProgress?: (result: TestResult) => void;
|
||||||
private delayBetweenTests: number;
|
|
||||||
private cleanupEnabled: boolean;
|
|
||||||
private cleanupSummary?: CleanupSummary;
|
|
||||||
|
|
||||||
constructor(
|
constructor(onProgress?: (result: TestResult) => void) {
|
||||||
onProgress?: (result: TestResult) => void,
|
|
||||||
delayBetweenTests: number = 8000,
|
|
||||||
cleanupEnabled: boolean = true
|
|
||||||
) {
|
|
||||||
this.onProgress = onProgress;
|
this.onProgress = onProgress;
|
||||||
this.delayBetweenTests = delayBetweenTests; // Default 8 seconds to prevent rate limiting
|
|
||||||
this.cleanupEnabled = cleanupEnabled;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Wait for specified milliseconds (for rate limiting prevention)
|
|
||||||
*/
|
|
||||||
private async delay(ms: number): Promise<void> {
|
|
||||||
return new Promise(resolve => setTimeout(resolve, ms));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -140,50 +121,13 @@ export class IntegrationTestRunner {
|
|||||||
async runSuite(suite: TestSuite): Promise<TestResult[]> {
|
async runSuite(suite: TestSuite): Promise<TestResult[]> {
|
||||||
const suiteResults: TestResult[] = [];
|
const suiteResults: TestResult[] = [];
|
||||||
|
|
||||||
for (let i = 0; i < suite.tests.length; i++) {
|
for (const test of suite.tests) {
|
||||||
const test = suite.tests[i];
|
|
||||||
const result = await this.runTest(test, suite.name);
|
const result = await this.runTest(test, suite.name);
|
||||||
suiteResults.push(result);
|
suiteResults.push(result);
|
||||||
|
|
||||||
if (this.shouldStop) {
|
if (this.shouldStop) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add delay between tests to prevent rate limiting (except after the last test)
|
|
||||||
if (i < suite.tests.length - 1 && this.delayBetweenTests > 0) {
|
|
||||||
// Report delay status with countdown
|
|
||||||
const delaySeconds = this.delayBetweenTests / 1000;
|
|
||||||
const delayResult: TestResult = {
|
|
||||||
id: `delay-${Date.now()}`,
|
|
||||||
name: `⏳ Rate limit delay: ${delaySeconds}s`,
|
|
||||||
suite: suite.name,
|
|
||||||
status: 'running',
|
|
||||||
duration: 0,
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
details: {
|
|
||||||
reason: 'Pausing to prevent rate limiting',
|
|
||||||
delayMs: this.delayBetweenTests
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
if (this.onProgress) {
|
|
||||||
this.onProgress(delayResult);
|
|
||||||
}
|
|
||||||
|
|
||||||
await this.delay(this.delayBetweenTests);
|
|
||||||
|
|
||||||
// Mark delay as complete
|
|
||||||
const delayCompleteResult: TestResult = {
|
|
||||||
...delayResult,
|
|
||||||
status: 'skip',
|
|
||||||
duration: this.delayBetweenTests,
|
|
||||||
details: { reason: 'Rate limit delay completed' }
|
|
||||||
};
|
|
||||||
|
|
||||||
if (this.onProgress) {
|
|
||||||
this.onProgress(delayCompleteResult);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return suiteResults;
|
return suiteResults;
|
||||||
@@ -197,145 +141,12 @@ export class IntegrationTestRunner {
|
|||||||
this.isRunning = true;
|
this.isRunning = true;
|
||||||
this.shouldStop = false;
|
this.shouldStop = false;
|
||||||
|
|
||||||
// Track submission-heavy suites for adaptive delays
|
for (const suite of suites) {
|
||||||
const submissionHeavySuites = [
|
await this.runSuite(suite);
|
||||||
'Entity Submission & Validation',
|
|
||||||
'Approval Pipeline',
|
|
||||||
'Unit Conversion Tests',
|
|
||||||
'Performance & Scalability'
|
|
||||||
];
|
|
||||||
|
|
||||||
for (let i = 0; i < suites.length; i++) {
|
|
||||||
const isHeavySuite = submissionHeavySuites.includes(suites[i].name);
|
|
||||||
|
|
||||||
// PREEMPTIVE delay BEFORE heavy suites start (prevents rate limit buildup)
|
|
||||||
if (isHeavySuite && i > 0) {
|
|
||||||
const preemptiveDelayMs = 8000; // 8s "cooldown" before heavy suite
|
|
||||||
const delaySeconds = preemptiveDelayMs / 1000;
|
|
||||||
const delayResult: TestResult = {
|
|
||||||
id: `preemptive-delay-${Date.now()}`,
|
|
||||||
name: `⏳ Pre-suite cooldown: ${delaySeconds}s (preparing for ${suites[i].name})`,
|
|
||||||
suite: 'System',
|
|
||||||
status: 'running',
|
|
||||||
duration: 0,
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
details: {
|
|
||||||
reason: 'Preemptive rate limit prevention before submission-heavy suite',
|
|
||||||
nextSuite: suites[i].name
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
if (this.onProgress) {
|
|
||||||
this.onProgress(delayResult);
|
|
||||||
}
|
|
||||||
|
|
||||||
await this.delay(preemptiveDelayMs);
|
|
||||||
|
|
||||||
if (this.onProgress) {
|
|
||||||
this.onProgress({
|
|
||||||
...delayResult,
|
|
||||||
status: 'skip',
|
|
||||||
duration: preemptiveDelayMs,
|
|
||||||
details: { reason: 'Cooldown completed' }
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
await this.runSuite(suites[i]);
|
|
||||||
|
|
||||||
if (this.shouldStop) {
|
if (this.shouldStop) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
// REACTIVE delay AFTER suites complete
|
|
||||||
if (i < suites.length - 1 && this.delayBetweenTests > 0) {
|
|
||||||
// Longer delay after submission-heavy suites
|
|
||||||
const delayMs = isHeavySuite
|
|
||||||
? this.delayBetweenTests * 2.25 // 18s delay after heavy suites (increased from 12s)
|
|
||||||
: this.delayBetweenTests; // 8s delay after others (increased from 6s)
|
|
||||||
|
|
||||||
const delaySeconds = delayMs / 1000;
|
|
||||||
const delayResult: TestResult = {
|
|
||||||
id: `suite-delay-${Date.now()}`,
|
|
||||||
name: `⏳ Suite completion delay: ${delaySeconds}s${isHeavySuite ? ' (submission-heavy)' : ''}`,
|
|
||||||
suite: 'System',
|
|
||||||
status: 'running',
|
|
||||||
duration: 0,
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
details: {
|
|
||||||
reason: 'Pausing between suites to prevent rate limiting',
|
|
||||||
isSubmissionHeavy: isHeavySuite
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
if (this.onProgress) {
|
|
||||||
this.onProgress(delayResult);
|
|
||||||
}
|
|
||||||
|
|
||||||
await this.delay(delayMs);
|
|
||||||
|
|
||||||
if (this.onProgress) {
|
|
||||||
this.onProgress({
|
|
||||||
...delayResult,
|
|
||||||
status: 'skip',
|
|
||||||
duration: delayMs,
|
|
||||||
details: { reason: 'Suite delay completed' }
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Run cleanup after all tests complete (if enabled)
|
|
||||||
if (this.cleanupEnabled && !this.shouldStop) {
|
|
||||||
const cleanupStartResult: TestResult = {
|
|
||||||
id: `cleanup-start-${Date.now()}`,
|
|
||||||
name: '🧹 Starting test data cleanup...',
|
|
||||||
suite: 'System',
|
|
||||||
status: 'running',
|
|
||||||
duration: 0,
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
details: { reason: 'Removing test fixtures to prevent database bloat' }
|
|
||||||
};
|
|
||||||
|
|
||||||
if (this.onProgress) {
|
|
||||||
this.onProgress(cleanupStartResult);
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
this.cleanupSummary = await cleanupTestData();
|
|
||||||
|
|
||||||
const cleanupCompleteResult: TestResult = {
|
|
||||||
id: `cleanup-complete-${Date.now()}`,
|
|
||||||
name: `✅ Cleanup complete: ${this.cleanupSummary.totalDeleted} records deleted`,
|
|
||||||
suite: 'System',
|
|
||||||
status: this.cleanupSummary.success ? 'pass' : 'fail',
|
|
||||||
duration: this.cleanupSummary.totalDuration,
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
details: {
|
|
||||||
totalDeleted: this.cleanupSummary.totalDeleted,
|
|
||||||
results: this.cleanupSummary.results,
|
|
||||||
success: this.cleanupSummary.success
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
if (this.onProgress) {
|
|
||||||
this.onProgress(cleanupCompleteResult);
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
const cleanupErrorResult: TestResult = {
|
|
||||||
id: `cleanup-error-${Date.now()}`,
|
|
||||||
name: '❌ Cleanup failed',
|
|
||||||
suite: 'System',
|
|
||||||
status: 'fail',
|
|
||||||
duration: 0,
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
error: error instanceof Error ? error.message : String(error)
|
|
||||||
};
|
|
||||||
|
|
||||||
if (this.onProgress) {
|
|
||||||
this.onProgress(cleanupErrorResult);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
this.isRunning = false;
|
this.isRunning = false;
|
||||||
@@ -366,7 +177,6 @@ export class IntegrationTestRunner {
|
|||||||
skipped: number;
|
skipped: number;
|
||||||
running: number;
|
running: number;
|
||||||
totalDuration: number;
|
totalDuration: number;
|
||||||
cleanup?: CleanupSummary;
|
|
||||||
} {
|
} {
|
||||||
const total = this.results.length;
|
const total = this.results.length;
|
||||||
const passed = this.results.filter(r => r.status === 'pass').length;
|
const passed = this.results.filter(r => r.status === 'pass').length;
|
||||||
@@ -375,15 +185,7 @@ export class IntegrationTestRunner {
|
|||||||
const running = this.results.filter(r => r.status === 'running').length;
|
const running = this.results.filter(r => r.status === 'running').length;
|
||||||
const totalDuration = this.results.reduce((sum, r) => sum + r.duration, 0);
|
const totalDuration = this.results.reduce((sum, r) => sum + r.duration, 0);
|
||||||
|
|
||||||
return {
|
return { total, passed, failed, skipped, running, totalDuration };
|
||||||
total,
|
|
||||||
passed,
|
|
||||||
failed,
|
|
||||||
skipped,
|
|
||||||
running,
|
|
||||||
totalDuration,
|
|
||||||
cleanup: this.cleanupSummary
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -400,20 +202,5 @@ export class IntegrationTestRunner {
|
|||||||
this.results = [];
|
this.results = [];
|
||||||
this.isRunning = false;
|
this.isRunning = false;
|
||||||
this.shouldStop = false;
|
this.shouldStop = false;
|
||||||
this.cleanupSummary = undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get cleanup summary
|
|
||||||
*/
|
|
||||||
getCleanupSummary(): CleanupSummary | undefined {
|
|
||||||
return this.cleanupSummary;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Enable or disable automatic cleanup
|
|
||||||
*/
|
|
||||||
setCleanupEnabled(enabled: boolean): void {
|
|
||||||
this.cleanupEnabled = enabled;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -57,6 +57,126 @@ export interface ModerationActionResult {
|
|||||||
shouldRemoveFromQueue: boolean;
|
shouldRemoveFromQueue: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Configuration for photo approval
|
||||||
|
*/
|
||||||
|
interface PhotoApprovalConfig {
|
||||||
|
submissionId: string;
|
||||||
|
moderatorId: string;
|
||||||
|
moderatorNotes?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Approve a photo submission
|
||||||
|
*
|
||||||
|
* Creates photo records in the database and updates submission status.
|
||||||
|
* Handles both new approvals and re-approvals (where photos already exist).
|
||||||
|
*
|
||||||
|
* @param supabase - Supabase client
|
||||||
|
* @param config - Photo approval configuration
|
||||||
|
* @returns Action result with success status and message
|
||||||
|
*/
|
||||||
|
export async function approvePhotoSubmission(
|
||||||
|
supabase: SupabaseClient,
|
||||||
|
config: PhotoApprovalConfig
|
||||||
|
): Promise<ModerationActionResult> {
|
||||||
|
try {
|
||||||
|
// Fetch photo submission from relational tables
|
||||||
|
const { data: photoSubmission, error: fetchError } = await supabase
|
||||||
|
.from('photo_submissions')
|
||||||
|
.select(`
|
||||||
|
*,
|
||||||
|
items:photo_submission_items(*),
|
||||||
|
submission:content_submissions!inner(user_id, status)
|
||||||
|
`)
|
||||||
|
.eq('submission_id', config.submissionId)
|
||||||
|
.single();
|
||||||
|
|
||||||
|
if (fetchError || !photoSubmission) {
|
||||||
|
throw new Error('Failed to fetch photo submission data');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!photoSubmission.items || photoSubmission.items.length === 0) {
|
||||||
|
throw new Error('No photos found in submission');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if photos already exist for this submission (re-approval case)
|
||||||
|
const { data: existingPhotos } = await supabase
|
||||||
|
.from('photos')
|
||||||
|
.select('id')
|
||||||
|
.eq('submission_id', config.submissionId);
|
||||||
|
|
||||||
|
if (!existingPhotos || existingPhotos.length === 0) {
|
||||||
|
// Create new photo records from photo_submission_items
|
||||||
|
const photoRecords = photoSubmission.items.map((item: any) => ({
|
||||||
|
entity_id: photoSubmission.entity_id,
|
||||||
|
entity_type: photoSubmission.entity_type,
|
||||||
|
cloudflare_image_id: item.cloudflare_image_id,
|
||||||
|
cloudflare_image_url: item.cloudflare_image_url,
|
||||||
|
title: item.title || null,
|
||||||
|
caption: item.caption || null,
|
||||||
|
date_taken: item.date_taken || null,
|
||||||
|
order_index: item.order_index,
|
||||||
|
submission_id: photoSubmission.submission_id,
|
||||||
|
submitted_by: photoSubmission.submission?.user_id,
|
||||||
|
approved_by: config.moderatorId,
|
||||||
|
approved_at: new Date().toISOString(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
const { error: insertError } = await supabase
|
||||||
|
.from('photos')
|
||||||
|
.insert(photoRecords);
|
||||||
|
|
||||||
|
if (insertError) {
|
||||||
|
throw insertError;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update submission status
|
||||||
|
const { error: updateError } = await supabase
|
||||||
|
.from('content_submissions')
|
||||||
|
.update({
|
||||||
|
status: 'approved' as const,
|
||||||
|
reviewer_id: config.moderatorId,
|
||||||
|
reviewed_at: new Date().toISOString(),
|
||||||
|
reviewer_notes: config.moderatorNotes,
|
||||||
|
})
|
||||||
|
.eq('id', config.submissionId);
|
||||||
|
|
||||||
|
if (updateError) {
|
||||||
|
throw updateError;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
message: `Successfully approved and published ${photoSubmission.items.length} photo(s)`,
|
||||||
|
shouldRemoveFromQueue: true,
|
||||||
|
};
|
||||||
|
} catch (error: unknown) {
|
||||||
|
handleError(error, {
|
||||||
|
action: 'Approve Photo Submission',
|
||||||
|
userId: config.moderatorId,
|
||||||
|
metadata: { submissionId: config.submissionId }
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
message: 'Failed to approve photo submission',
|
||||||
|
error: error instanceof Error ? error : new Error(getErrorMessage(error)),
|
||||||
|
shouldRemoveFromQueue: false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Approve a submission with submission_items
|
||||||
|
*
|
||||||
|
* Uses the edge function to process all pending submission items.
|
||||||
|
*
|
||||||
|
* @param supabase - Supabase client
|
||||||
|
* @param submissionId - Submission ID
|
||||||
|
* @param itemIds - Array of item IDs to approve
|
||||||
|
* @returns Action result
|
||||||
|
*/
|
||||||
/**
|
/**
|
||||||
* Approve submission items using atomic transaction RPC.
|
* Approve submission items using atomic transaction RPC.
|
||||||
*
|
*
|
||||||
@@ -118,6 +238,194 @@ export async function approveSubmissionItems(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reject a submission with submission_items
|
||||||
|
*
|
||||||
|
* Cascades rejection to all pending items.
|
||||||
|
*
|
||||||
|
* @param supabase - Supabase client
|
||||||
|
* @param submissionId - Submission ID
|
||||||
|
* @param rejectionReason - Reason for rejection
|
||||||
|
* @returns Action result
|
||||||
|
*/
|
||||||
|
export async function rejectSubmissionItems(
|
||||||
|
supabase: SupabaseClient,
|
||||||
|
submissionId: string,
|
||||||
|
rejectionReason?: string
|
||||||
|
): Promise<ModerationActionResult> {
|
||||||
|
try {
|
||||||
|
const { error: rejectError } = await supabase
|
||||||
|
.from('submission_items')
|
||||||
|
.update({
|
||||||
|
status: 'rejected' as const,
|
||||||
|
rejection_reason: rejectionReason || 'Parent submission rejected',
|
||||||
|
updated_at: new Date().toISOString(),
|
||||||
|
})
|
||||||
|
.eq('submission_id', submissionId)
|
||||||
|
.eq('status', 'pending');
|
||||||
|
|
||||||
|
if (rejectError) {
|
||||||
|
handleError(rejectError, {
|
||||||
|
action: 'Reject Submission Items (Cascade)',
|
||||||
|
metadata: { submissionId }
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
message: 'Submission items rejected',
|
||||||
|
shouldRemoveFromQueue: false, // Parent rejection will handle removal
|
||||||
|
};
|
||||||
|
} catch (error: unknown) {
|
||||||
|
handleError(error, {
|
||||||
|
action: 'Reject Submission Items',
|
||||||
|
metadata: { submissionId }
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
message: 'Failed to reject submission items',
|
||||||
|
error: error instanceof Error ? error : new Error(getErrorMessage(error)),
|
||||||
|
shouldRemoveFromQueue: false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Configuration for standard moderation actions
|
||||||
|
*/
|
||||||
|
export interface ModerationConfig {
|
||||||
|
item: ModerationItem;
|
||||||
|
action: 'approved' | 'rejected';
|
||||||
|
moderatorId: string;
|
||||||
|
moderatorNotes?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Perform a standard moderation action (approve/reject)
|
||||||
|
*
|
||||||
|
* Updates the submission or review status in the database.
|
||||||
|
* Handles both content_submissions and reviews.
|
||||||
|
*
|
||||||
|
* @param supabase - Supabase client
|
||||||
|
* @param config - Moderation configuration
|
||||||
|
* @returns Action result
|
||||||
|
*/
|
||||||
|
export async function performModerationAction(
|
||||||
|
supabase: SupabaseClient,
|
||||||
|
config: ModerationConfig
|
||||||
|
): Promise<ModerationActionResult> {
|
||||||
|
const { item, action, moderatorId, moderatorNotes } = config;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Handle photo submissions specially
|
||||||
|
if (
|
||||||
|
action === 'approved' &&
|
||||||
|
item.type === 'content_submission' &&
|
||||||
|
item.submission_type === 'photo'
|
||||||
|
) {
|
||||||
|
return await approvePhotoSubmission(supabase, {
|
||||||
|
submissionId: item.id,
|
||||||
|
moderatorId,
|
||||||
|
moderatorNotes,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this submission has submission_items
|
||||||
|
if (item.type === 'content_submission') {
|
||||||
|
const { data: submissionItems, error: itemsError } = await supabase
|
||||||
|
.from('submission_items')
|
||||||
|
.select('id, status')
|
||||||
|
.eq('submission_id', item.id)
|
||||||
|
.in('status', ['pending', 'rejected']);
|
||||||
|
|
||||||
|
if (!itemsError && submissionItems && submissionItems.length > 0) {
|
||||||
|
if (action === 'approved') {
|
||||||
|
return await approveSubmissionItems(
|
||||||
|
supabase,
|
||||||
|
item.id,
|
||||||
|
submissionItems.map(i => i.id)
|
||||||
|
);
|
||||||
|
} else if (action === 'rejected') {
|
||||||
|
await rejectSubmissionItems(supabase, item.id, moderatorNotes);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Standard moderation flow - Build update object with type-appropriate fields
|
||||||
|
let error: any = null;
|
||||||
|
let data: any = null;
|
||||||
|
|
||||||
|
// Use type-safe table queries based on item type
|
||||||
|
if (item.type === 'review') {
|
||||||
|
const reviewUpdate: {
|
||||||
|
moderation_status: 'approved' | 'rejected' | 'pending';
|
||||||
|
moderated_at: string;
|
||||||
|
moderated_by: string;
|
||||||
|
reviewer_notes?: string;
|
||||||
|
} = {
|
||||||
|
moderation_status: action,
|
||||||
|
moderated_at: new Date().toISOString(),
|
||||||
|
moderated_by: moderatorId,
|
||||||
|
...(moderatorNotes && { reviewer_notes: moderatorNotes }),
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = await createTableQuery('reviews')
|
||||||
|
.update(reviewUpdate)
|
||||||
|
.eq('id', item.id)
|
||||||
|
.select();
|
||||||
|
error = result.error;
|
||||||
|
data = result.data;
|
||||||
|
} else {
|
||||||
|
const submissionUpdate: {
|
||||||
|
status: 'approved' | 'rejected' | 'pending';
|
||||||
|
reviewed_at: string;
|
||||||
|
reviewer_id: string;
|
||||||
|
reviewer_notes?: string;
|
||||||
|
} = {
|
||||||
|
status: action,
|
||||||
|
reviewed_at: new Date().toISOString(),
|
||||||
|
reviewer_id: moderatorId,
|
||||||
|
...(moderatorNotes && { reviewer_notes: moderatorNotes }),
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = await createTableQuery('content_submissions')
|
||||||
|
.update(submissionUpdate)
|
||||||
|
.eq('id', item.id)
|
||||||
|
.select();
|
||||||
|
error = result.error;
|
||||||
|
data = result.data;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the update actually affected any rows
|
||||||
|
if (!data || data.length === 0) {
|
||||||
|
throw new Error(
|
||||||
|
'Failed to update item - no rows affected. You might not have permission to moderate this content.'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
message: `Content ${action}`,
|
||||||
|
shouldRemoveFromQueue: action === 'approved' || action === 'rejected',
|
||||||
|
};
|
||||||
|
} catch (error: unknown) {
|
||||||
|
handleError(error, {
|
||||||
|
action: `${config.action === 'approved' ? 'Approve' : 'Reject'} Content`,
|
||||||
|
userId: config.moderatorId,
|
||||||
|
metadata: { itemType: item.type, itemId: item.id }
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
message: `Failed to ${config.action} content`,
|
||||||
|
error: error instanceof Error ? error : new Error(getErrorMessage(error)),
|
||||||
|
shouldRemoveFromQueue: false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Configuration for submission deletion
|
* Configuration for submission deletion
|
||||||
|
|||||||
@@ -28,12 +28,16 @@ export type { ResolvedEntityNames } from './entities';
|
|||||||
|
|
||||||
// Moderation actions
|
// Moderation actions
|
||||||
export {
|
export {
|
||||||
|
approvePhotoSubmission,
|
||||||
approveSubmissionItems,
|
approveSubmissionItems,
|
||||||
|
rejectSubmissionItems,
|
||||||
|
performModerationAction,
|
||||||
deleteSubmission,
|
deleteSubmission,
|
||||||
} from './actions';
|
} from './actions';
|
||||||
|
|
||||||
export type {
|
export type {
|
||||||
ModerationActionResult,
|
ModerationActionResult,
|
||||||
|
ModerationConfig,
|
||||||
DeleteSubmissionConfig,
|
DeleteSubmissionConfig,
|
||||||
} from './actions';
|
} from './actions';
|
||||||
|
|
||||||
|
|||||||
@@ -23,97 +23,6 @@ export interface RetryOptions {
|
|||||||
shouldRetry?: (error: unknown) => boolean;
|
shouldRetry?: (error: unknown) => boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Extract Retry-After value from error headers
|
|
||||||
* @param error - The error object
|
|
||||||
* @returns Delay in milliseconds, or null if not found
|
|
||||||
*/
|
|
||||||
export function extractRetryAfter(error: unknown): number | null {
|
|
||||||
if (!error || typeof error !== 'object') return null;
|
|
||||||
|
|
||||||
// Check for Retry-After in error object
|
|
||||||
const errorWithHeaders = error as { headers?: Headers | Record<string, string>; retryAfter?: number | string };
|
|
||||||
|
|
||||||
// Direct retryAfter property
|
|
||||||
if (errorWithHeaders.retryAfter) {
|
|
||||||
const retryAfter = errorWithHeaders.retryAfter;
|
|
||||||
if (typeof retryAfter === 'number') {
|
|
||||||
return retryAfter * 1000; // Convert seconds to milliseconds
|
|
||||||
}
|
|
||||||
if (typeof retryAfter === 'string') {
|
|
||||||
// Try parsing as number first (delay-seconds)
|
|
||||||
const seconds = parseInt(retryAfter, 10);
|
|
||||||
if (!isNaN(seconds)) {
|
|
||||||
return seconds * 1000;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Try parsing as HTTP-date
|
|
||||||
const date = new Date(retryAfter);
|
|
||||||
if (!isNaN(date.getTime())) {
|
|
||||||
const delay = date.getTime() - Date.now();
|
|
||||||
return Math.max(0, delay);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check headers object
|
|
||||||
if (errorWithHeaders.headers) {
|
|
||||||
let retryAfterValue: string | null = null;
|
|
||||||
|
|
||||||
if (errorWithHeaders.headers instanceof Headers) {
|
|
||||||
retryAfterValue = errorWithHeaders.headers.get('retry-after');
|
|
||||||
} else if (typeof errorWithHeaders.headers === 'object') {
|
|
||||||
// Check both lowercase and capitalized versions
|
|
||||||
retryAfterValue = errorWithHeaders.headers['retry-after']
|
|
||||||
|| errorWithHeaders.headers['Retry-After']
|
|
||||||
|| null;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (retryAfterValue) {
|
|
||||||
// Try parsing as number first (delay-seconds)
|
|
||||||
const seconds = parseInt(retryAfterValue, 10);
|
|
||||||
if (!isNaN(seconds)) {
|
|
||||||
return seconds * 1000;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Try parsing as HTTP-date
|
|
||||||
const date = new Date(retryAfterValue);
|
|
||||||
if (!isNaN(date.getTime())) {
|
|
||||||
const delay = date.getTime() - Date.now();
|
|
||||||
return Math.max(0, delay);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if error is a rate limit (429) error
|
|
||||||
* @param error - The error to check
|
|
||||||
* @returns true if error is a rate limit error
|
|
||||||
*/
|
|
||||||
export function isRateLimitError(error: unknown): boolean {
|
|
||||||
if (!error || typeof error !== 'object') return false;
|
|
||||||
|
|
||||||
const errorWithStatus = error as { status?: number; code?: string };
|
|
||||||
|
|
||||||
// HTTP 429 status
|
|
||||||
if (errorWithStatus.status === 429) return true;
|
|
||||||
|
|
||||||
// Check error message for rate limit indicators
|
|
||||||
if (error instanceof Error) {
|
|
||||||
const message = error.message.toLowerCase();
|
|
||||||
if (message.includes('rate limit') ||
|
|
||||||
message.includes('too many requests') ||
|
|
||||||
message.includes('quota exceeded')) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Determines if an error is transient and retryable
|
* Determines if an error is transient and retryable
|
||||||
* @param error - The error to check
|
* @param error - The error to check
|
||||||
@@ -147,7 +56,7 @@ export function isRetryableError(error: unknown): boolean {
|
|||||||
if (supabaseError.code === 'PGRST000') return true; // Connection error
|
if (supabaseError.code === 'PGRST000') return true; // Connection error
|
||||||
|
|
||||||
// HTTP status codes indicating transient failures
|
// HTTP status codes indicating transient failures
|
||||||
if (supabaseError.status === 429) return true; // Rate limit - ALWAYS retry
|
if (supabaseError.status === 429) return true; // Rate limit
|
||||||
if (supabaseError.status === 503) return true; // Service unavailable
|
if (supabaseError.status === 503) return true; // Service unavailable
|
||||||
if (supabaseError.status === 504) return true; // Gateway timeout
|
if (supabaseError.status === 504) return true; // Gateway timeout
|
||||||
if (supabaseError.status && supabaseError.status >= 500 && supabaseError.status < 600) {
|
if (supabaseError.status && supabaseError.status >= 500 && supabaseError.status < 600) {
|
||||||
@@ -169,46 +78,12 @@ export function isRetryableError(error: unknown): boolean {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Calculates delay for next retry attempt using exponential backoff or Retry-After header
|
* Calculates delay for next retry attempt using exponential backoff
|
||||||
* @param attempt - Current attempt number (0-indexed)
|
* @param attempt - Current attempt number (0-indexed)
|
||||||
* @param options - Retry configuration
|
* @param options - Retry configuration
|
||||||
* @param error - The error that triggered the retry (to check for Retry-After)
|
|
||||||
* @returns Delay in milliseconds
|
* @returns Delay in milliseconds
|
||||||
*/
|
*/
|
||||||
function calculateBackoffDelay(
|
function calculateBackoffDelay(attempt: number, options: Required<RetryOptions>): number {
|
||||||
attempt: number,
|
|
||||||
options: Required<RetryOptions>,
|
|
||||||
error?: unknown
|
|
||||||
): number {
|
|
||||||
// Check for rate limit with Retry-After header
|
|
||||||
if (error && isRateLimitError(error)) {
|
|
||||||
const retryAfter = extractRetryAfter(error);
|
|
||||||
if (retryAfter !== null) {
|
|
||||||
// Respect the Retry-After header, but cap it at maxDelay
|
|
||||||
const cappedRetryAfter = Math.min(retryAfter, options.maxDelay);
|
|
||||||
|
|
||||||
logger.info('[Retry] Rate limit detected - respecting Retry-After header', {
|
|
||||||
retryAfterMs: retryAfter,
|
|
||||||
cappedMs: cappedRetryAfter,
|
|
||||||
attempt
|
|
||||||
});
|
|
||||||
|
|
||||||
return cappedRetryAfter;
|
|
||||||
}
|
|
||||||
|
|
||||||
// No Retry-After header but is rate limit - use aggressive backoff
|
|
||||||
const rateLimitDelay = options.baseDelay * Math.pow(options.backoffMultiplier, attempt + 1);
|
|
||||||
const cappedDelay = Math.min(rateLimitDelay, options.maxDelay);
|
|
||||||
|
|
||||||
logger.info('[Retry] Rate limit detected - using aggressive backoff', {
|
|
||||||
delayMs: cappedDelay,
|
|
||||||
attempt
|
|
||||||
});
|
|
||||||
|
|
||||||
return cappedDelay;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Standard exponential backoff
|
|
||||||
const exponentialDelay = options.baseDelay * Math.pow(options.backoffMultiplier, attempt);
|
const exponentialDelay = options.baseDelay * Math.pow(options.backoffMultiplier, attempt);
|
||||||
const cappedDelay = Math.min(exponentialDelay, options.maxDelay);
|
const cappedDelay = Math.min(exponentialDelay, options.maxDelay);
|
||||||
|
|
||||||
@@ -371,23 +246,18 @@ export async function withRetry<T>(
|
|||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Calculate delay for next attempt (respects Retry-After for rate limits)
|
// Calculate delay for next attempt
|
||||||
const delay = calculateBackoffDelay(attempt, config, error);
|
const delay = calculateBackoffDelay(attempt, config);
|
||||||
|
|
||||||
// Log retry attempt with rate limit detection
|
|
||||||
const isRateLimit = isRateLimitError(error);
|
|
||||||
const retryAfter = isRateLimit ? extractRetryAfter(error) : null;
|
|
||||||
|
|
||||||
|
// Log retry attempt
|
||||||
logger.warn('Retrying after error', {
|
logger.warn('Retrying after error', {
|
||||||
attempt: attempt + 1,
|
attempt: attempt + 1,
|
||||||
maxAttempts: config.maxAttempts,
|
maxAttempts: config.maxAttempts,
|
||||||
delay,
|
delay,
|
||||||
isRateLimit,
|
|
||||||
retryAfterMs: retryAfter,
|
|
||||||
error: error instanceof Error ? error.message : String(error)
|
error: error instanceof Error ? error.message : String(error)
|
||||||
});
|
});
|
||||||
|
|
||||||
// Invoke callback with additional context
|
// Invoke callback
|
||||||
config.onRetry(attempt + 1, error, delay);
|
config.onRetry(attempt + 1, error, delay);
|
||||||
|
|
||||||
// Wait before retrying
|
// Wait before retrying
|
||||||
|
|||||||
@@ -1,150 +0,0 @@
|
|||||||
/**
|
|
||||||
* Span Visualizer
|
|
||||||
* Reconstructs span trees from logs for debugging distributed traces
|
|
||||||
*/
|
|
||||||
|
|
||||||
import type { Span } from '@/types/tracing';
|
|
||||||
|
|
||||||
export interface SpanTree {
|
|
||||||
span: Span;
|
|
||||||
children: SpanTree[];
|
|
||||||
totalDuration: number;
|
|
||||||
selfDuration: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Build span tree from flat span logs
|
|
||||||
*/
|
|
||||||
export function buildSpanTree(spans: Span[]): SpanTree | null {
|
|
||||||
const spanMap = new Map<string, Span>();
|
|
||||||
const childrenMap = new Map<string, Span[]>();
|
|
||||||
|
|
||||||
// Index spans
|
|
||||||
for (const span of spans) {
|
|
||||||
spanMap.set(span.spanId, span);
|
|
||||||
|
|
||||||
if (span.parentSpanId) {
|
|
||||||
if (!childrenMap.has(span.parentSpanId)) {
|
|
||||||
childrenMap.set(span.parentSpanId, []);
|
|
||||||
}
|
|
||||||
childrenMap.get(span.parentSpanId)!.push(span);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Find root span
|
|
||||||
const rootSpan = spans.find(s => !s.parentSpanId);
|
|
||||||
if (!rootSpan) return null;
|
|
||||||
|
|
||||||
// Build tree recursively
|
|
||||||
function buildTree(span: Span): SpanTree {
|
|
||||||
const children = childrenMap.get(span.spanId) || [];
|
|
||||||
const childTrees = children.map(buildTree);
|
|
||||||
|
|
||||||
const totalDuration = span.duration || 0;
|
|
||||||
const childrenDuration = childTrees.reduce((sum, child) => sum + child.totalDuration, 0);
|
|
||||||
const selfDuration = totalDuration - childrenDuration;
|
|
||||||
|
|
||||||
return {
|
|
||||||
span,
|
|
||||||
children: childTrees,
|
|
||||||
totalDuration,
|
|
||||||
selfDuration,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
return buildTree(rootSpan);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Format span tree as ASCII art
|
|
||||||
*/
|
|
||||||
export function formatSpanTree(tree: SpanTree, indent: number = 0): string {
|
|
||||||
const prefix = ' '.repeat(indent);
|
|
||||||
const status = tree.span.status === 'error' ? '❌' : tree.span.status === 'ok' ? '✅' : '⏳';
|
|
||||||
const line = `${prefix}${status} ${tree.span.name} (${tree.span.duration}ms / self: ${tree.selfDuration}ms)`;
|
|
||||||
|
|
||||||
const childLines = tree.children.map(child => formatSpanTree(child, indent + 1));
|
|
||||||
|
|
||||||
return [line, ...childLines].join('\n');
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Calculate span statistics for a tree
|
|
||||||
*/
|
|
||||||
export function calculateSpanStats(tree: SpanTree): {
|
|
||||||
totalSpans: number;
|
|
||||||
errorCount: number;
|
|
||||||
maxDepth: number;
|
|
||||||
totalDuration: number;
|
|
||||||
criticalPath: string[];
|
|
||||||
} {
|
|
||||||
let totalSpans = 0;
|
|
||||||
let errorCount = 0;
|
|
||||||
let maxDepth = 0;
|
|
||||||
|
|
||||||
function traverse(node: SpanTree, depth: number) {
|
|
||||||
totalSpans++;
|
|
||||||
if (node.span.status === 'error') errorCount++;
|
|
||||||
maxDepth = Math.max(maxDepth, depth);
|
|
||||||
|
|
||||||
node.children.forEach(child => traverse(child, depth + 1));
|
|
||||||
}
|
|
||||||
|
|
||||||
traverse(tree, 0);
|
|
||||||
|
|
||||||
// Find critical path (longest duration path)
|
|
||||||
function findCriticalPath(node: SpanTree): string[] {
|
|
||||||
if (node.children.length === 0) {
|
|
||||||
return [node.span.name];
|
|
||||||
}
|
|
||||||
|
|
||||||
const longestChild = node.children.reduce((longest, child) =>
|
|
||||||
child.totalDuration > longest.totalDuration ? child : longest
|
|
||||||
);
|
|
||||||
|
|
||||||
return [node.span.name, ...findCriticalPath(longestChild)];
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
totalSpans,
|
|
||||||
errorCount,
|
|
||||||
maxDepth,
|
|
||||||
totalDuration: tree.totalDuration,
|
|
||||||
criticalPath: findCriticalPath(tree),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Extract all events from a span tree
|
|
||||||
*/
|
|
||||||
export function extractAllEvents(tree: SpanTree): Array<{
|
|
||||||
spanName: string;
|
|
||||||
eventName: string;
|
|
||||||
timestamp: number;
|
|
||||||
attributes?: Record<string, unknown>;
|
|
||||||
}> {
|
|
||||||
const events: Array<{
|
|
||||||
spanName: string;
|
|
||||||
eventName: string;
|
|
||||||
timestamp: number;
|
|
||||||
attributes?: Record<string, unknown>;
|
|
||||||
}> = [];
|
|
||||||
|
|
||||||
function traverse(node: SpanTree) {
|
|
||||||
node.span.events.forEach(event => {
|
|
||||||
events.push({
|
|
||||||
spanName: node.span.name,
|
|
||||||
eventName: event.name,
|
|
||||||
timestamp: event.timestamp,
|
|
||||||
attributes: event.attributes,
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
node.children.forEach(child => traverse(child));
|
|
||||||
}
|
|
||||||
|
|
||||||
traverse(tree);
|
|
||||||
|
|
||||||
// Sort by timestamp
|
|
||||||
return events.sort((a, b) => a.timestamp - b.timestamp);
|
|
||||||
}
|
|
||||||
@@ -1,7 +1,6 @@
|
|||||||
import { supabase } from '@/lib/supabaseClient';
|
import { supabase } from '@/lib/supabaseClient';
|
||||||
import { handleError, handleNonCriticalError, getErrorMessage } from './errorHandler';
|
import { handleError, handleNonCriticalError, getErrorMessage } from './errorHandler';
|
||||||
import { extractCloudflareImageId } from './cloudflareImageUtils';
|
import { extractCloudflareImageId } from './cloudflareImageUtils';
|
||||||
import { invokeWithTracking } from './edgeFunctionTracking';
|
|
||||||
|
|
||||||
// Core submission item interface with dependencies
|
// Core submission item interface with dependencies
|
||||||
// NOTE: item_data and original_data use `unknown` because they contain dynamic structures
|
// NOTE: item_data and original_data use `unknown` because they contain dynamic structures
|
||||||
@@ -1368,24 +1367,32 @@ export async function rejectSubmissionItems(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Update all items to rejected status
|
||||||
|
const updates = Array.from(itemsToReject).map(async (itemId) => {
|
||||||
|
const { error } = await supabase
|
||||||
|
.from('submission_items')
|
||||||
|
.update({
|
||||||
|
status: 'rejected' as const,
|
||||||
|
rejection_reason: reason,
|
||||||
|
updated_at: new Date().toISOString(),
|
||||||
|
})
|
||||||
|
.eq('id', itemId);
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
handleNonCriticalError(error, {
|
||||||
|
action: 'Reject Submission Item',
|
||||||
|
metadata: { itemId }
|
||||||
|
});
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
await Promise.all(updates);
|
||||||
|
|
||||||
|
// Update parent submission status
|
||||||
const submissionId = items[0]?.submission_id;
|
const submissionId = items[0]?.submission_id;
|
||||||
if (!submissionId) {
|
if (submissionId) {
|
||||||
throw new Error('Cannot reject items: missing submission ID');
|
await updateSubmissionStatusAfterRejection(submissionId);
|
||||||
}
|
|
||||||
|
|
||||||
// Use atomic edge function for rejection
|
|
||||||
const { data, error } = await invokeWithTracking(
|
|
||||||
'process-selective-rejection',
|
|
||||||
{
|
|
||||||
itemIds: Array.from(itemsToReject),
|
|
||||||
submissionId,
|
|
||||||
rejectionReason: reason,
|
|
||||||
},
|
|
||||||
userId
|
|
||||||
);
|
|
||||||
|
|
||||||
if (error) {
|
|
||||||
throw new Error(`Failed to reject items: ${error.message}`);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1509,21 +1516,27 @@ export async function editSubmissionItem(
|
|||||||
|
|
||||||
// Update relational table with new data based on item type
|
// Update relational table with new data based on item type
|
||||||
if (currentItem.item_type === 'park') {
|
if (currentItem.item_type === 'park') {
|
||||||
// First, get the park_submission_id
|
// For parks, store location in temp_location_data if provided
|
||||||
const { data: parkSub, error: parkSubError } = await supabase
|
|
||||||
.from('park_submissions')
|
|
||||||
.select('id')
|
|
||||||
.eq('submission_id', currentItem.submission_id)
|
|
||||||
.single();
|
|
||||||
|
|
||||||
if (parkSubError) throw parkSubError;
|
|
||||||
|
|
||||||
// Prepare update data (remove location from main update)
|
|
||||||
const updateData: any = { ...newData };
|
const updateData: any = { ...newData };
|
||||||
const locationData = updateData.location;
|
|
||||||
delete updateData.location; // Remove nested object before updating park_submissions
|
|
||||||
|
|
||||||
// Update park_submissions table (without temp_location_data!)
|
// If location object is provided, store it in temp_location_data
|
||||||
|
if (newData.location) {
|
||||||
|
updateData.temp_location_data = {
|
||||||
|
name: newData.location.name,
|
||||||
|
street_address: newData.location.street_address || null,
|
||||||
|
city: newData.location.city || null,
|
||||||
|
state_province: newData.location.state_province || null,
|
||||||
|
country: newData.location.country,
|
||||||
|
latitude: newData.location.latitude,
|
||||||
|
longitude: newData.location.longitude,
|
||||||
|
timezone: newData.location.timezone || null,
|
||||||
|
postal_code: newData.location.postal_code || null,
|
||||||
|
display_name: newData.location.display_name
|
||||||
|
};
|
||||||
|
delete updateData.location; // Remove the nested object
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update park_submissions table
|
||||||
const { error: parkUpdateError } = await supabase
|
const { error: parkUpdateError } = await supabase
|
||||||
.from('park_submissions')
|
.from('park_submissions')
|
||||||
.update(updateData)
|
.update(updateData)
|
||||||
@@ -1531,29 +1544,6 @@ export async function editSubmissionItem(
|
|||||||
|
|
||||||
if (parkUpdateError) throw parkUpdateError;
|
if (parkUpdateError) throw parkUpdateError;
|
||||||
|
|
||||||
// Handle location separately in relational table
|
|
||||||
if (locationData) {
|
|
||||||
const { error: locationError } = await supabase
|
|
||||||
.from('park_submission_locations' as any)
|
|
||||||
.upsert({
|
|
||||||
park_submission_id: parkSub.id,
|
|
||||||
name: locationData.name,
|
|
||||||
street_address: locationData.street_address || null,
|
|
||||||
city: locationData.city || null,
|
|
||||||
state_province: locationData.state_province || null,
|
|
||||||
country: locationData.country,
|
|
||||||
postal_code: locationData.postal_code || null,
|
|
||||||
latitude: locationData.latitude,
|
|
||||||
longitude: locationData.longitude,
|
|
||||||
timezone: locationData.timezone || null,
|
|
||||||
display_name: locationData.display_name || null
|
|
||||||
}, {
|
|
||||||
onConflict: 'park_submission_id'
|
|
||||||
});
|
|
||||||
|
|
||||||
if (locationError) throw locationError;
|
|
||||||
}
|
|
||||||
|
|
||||||
} else if (currentItem.item_type === 'ride') {
|
} else if (currentItem.item_type === 'ride') {
|
||||||
const { error: rideUpdateError } = await supabase
|
const { error: rideUpdateError } = await supabase
|
||||||
.from('ride_submissions')
|
.from('ride_submissions')
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ import { useAdminSettings } from '@/hooks/useAdminSettings';
|
|||||||
import { NovuMigrationUtility } from '@/components/admin/NovuMigrationUtility';
|
import { NovuMigrationUtility } from '@/components/admin/NovuMigrationUtility';
|
||||||
import { TestDataGenerator } from '@/components/admin/TestDataGenerator';
|
import { TestDataGenerator } from '@/components/admin/TestDataGenerator';
|
||||||
import { IntegrationTestRunner } from '@/components/admin/IntegrationTestRunner';
|
import { IntegrationTestRunner } from '@/components/admin/IntegrationTestRunner';
|
||||||
import { Loader2, Save, Clock, Users, Bell, Shield, Settings, Trash2, Plug, AlertTriangle, Lock, TestTube, RefreshCw, Info, AlertCircle, Database } from 'lucide-react';
|
import { Loader2, Save, Clock, Users, Bell, Shield, Settings, Trash2, Plug, AlertTriangle, Lock, TestTube, RefreshCw, Info, AlertCircle } from 'lucide-react';
|
||||||
import { useDocumentTitle } from '@/hooks/useDocumentTitle';
|
import { useDocumentTitle } from '@/hooks/useDocumentTitle';
|
||||||
|
|
||||||
export default function AdminSettings() {
|
export default function AdminSettings() {
|
||||||
@@ -772,9 +772,13 @@ export default function AdminSettings() {
|
|||||||
<span className="hidden sm:inline">Integrations</span>
|
<span className="hidden sm:inline">Integrations</span>
|
||||||
</TabsTrigger>
|
</TabsTrigger>
|
||||||
<TabsTrigger value="testing" className="flex items-center gap-2">
|
<TabsTrigger value="testing" className="flex items-center gap-2">
|
||||||
<TestTube className="w-4 h-4" />
|
<Loader2 className="w-4 h-4" />
|
||||||
<span className="hidden sm:inline">Testing</span>
|
<span className="hidden sm:inline">Testing</span>
|
||||||
</TabsTrigger>
|
</TabsTrigger>
|
||||||
|
<TabsTrigger value="integration-tests" className="flex items-center gap-2">
|
||||||
|
<TestTube className="w-4 h-4" />
|
||||||
|
<span className="hidden sm:inline">Integration Tests</span>
|
||||||
|
</TabsTrigger>
|
||||||
</TabsList>
|
</TabsList>
|
||||||
|
|
||||||
<TabsContent value="moderation">
|
<TabsContent value="moderation">
|
||||||
@@ -967,31 +971,11 @@ export default function AdminSettings() {
|
|||||||
</TabsContent>
|
</TabsContent>
|
||||||
|
|
||||||
<TabsContent value="testing">
|
<TabsContent value="testing">
|
||||||
<div className="space-y-6">
|
<TestDataGenerator />
|
||||||
{/* Test Data Generator Section */}
|
</TabsContent>
|
||||||
<div>
|
|
||||||
<h2 className="text-2xl font-bold mb-4 flex items-center gap-2">
|
|
||||||
<Database className="w-6 h-6" />
|
|
||||||
Test Data Generator
|
|
||||||
</h2>
|
|
||||||
<p className="text-muted-foreground mb-4">
|
|
||||||
Generate realistic test data for parks, rides, companies, and submissions.
|
|
||||||
</p>
|
|
||||||
<TestDataGenerator />
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Integration Test Runner Section */}
|
<TabsContent value="integration-tests">
|
||||||
<div>
|
<IntegrationTestRunner />
|
||||||
<h2 className="text-2xl font-bold mb-4 flex items-center gap-2">
|
|
||||||
<TestTube className="w-6 h-6" />
|
|
||||||
Integration Test Runner
|
|
||||||
</h2>
|
|
||||||
<p className="text-muted-foreground mb-4">
|
|
||||||
Run automated integration tests against your approval pipeline, moderation system, and data integrity checks.
|
|
||||||
</p>
|
|
||||||
<IntegrationTestRunner />
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</TabsContent>
|
</TabsContent>
|
||||||
</Tabs>
|
</Tabs>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -13,10 +13,6 @@ import { ErrorDetailsModal } from '@/components/admin/ErrorDetailsModal';
|
|||||||
import { ApprovalFailureModal } from '@/components/admin/ApprovalFailureModal';
|
import { ApprovalFailureModal } from '@/components/admin/ApprovalFailureModal';
|
||||||
import { ErrorAnalytics } from '@/components/admin/ErrorAnalytics';
|
import { ErrorAnalytics } from '@/components/admin/ErrorAnalytics';
|
||||||
import { PipelineHealthAlerts } from '@/components/admin/PipelineHealthAlerts';
|
import { PipelineHealthAlerts } from '@/components/admin/PipelineHealthAlerts';
|
||||||
import { EdgeFunctionLogs } from '@/components/admin/EdgeFunctionLogs';
|
|
||||||
import { DatabaseLogs } from '@/components/admin/DatabaseLogs';
|
|
||||||
import { UnifiedLogSearch } from '@/components/admin/UnifiedLogSearch';
|
|
||||||
import TraceViewer from './TraceViewer';
|
|
||||||
import { format } from 'date-fns';
|
import { format } from 'date-fns';
|
||||||
|
|
||||||
// Helper to calculate date threshold for filtering
|
// Helper to calculate date threshold for filtering
|
||||||
@@ -63,14 +59,6 @@ export default function ErrorMonitoring() {
|
|||||||
const [searchTerm, setSearchTerm] = useState('');
|
const [searchTerm, setSearchTerm] = useState('');
|
||||||
const [errorTypeFilter, setErrorTypeFilter] = useState<string>('all');
|
const [errorTypeFilter, setErrorTypeFilter] = useState<string>('all');
|
||||||
const [dateRange, setDateRange] = useState<'1h' | '24h' | '7d' | '30d'>('24h');
|
const [dateRange, setDateRange] = useState<'1h' | '24h' | '7d' | '30d'>('24h');
|
||||||
const [activeTab, setActiveTab] = useState('errors');
|
|
||||||
|
|
||||||
const handleNavigate = (tab: string, filters: Record<string, string>) => {
|
|
||||||
setActiveTab(tab);
|
|
||||||
if (filters.requestId) {
|
|
||||||
setSearchTerm(filters.requestId);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Fetch recent errors
|
// Fetch recent errors
|
||||||
const { data: errors, isLoading, refetch, isFetching } = useQuery({
|
const { data: errors, isLoading, refetch, isFetching } = useQuery({
|
||||||
@@ -182,8 +170,8 @@ export default function ErrorMonitoring() {
|
|||||||
<div className="space-y-6">
|
<div className="space-y-6">
|
||||||
<div className="flex justify-between items-center">
|
<div className="flex justify-between items-center">
|
||||||
<div>
|
<div>
|
||||||
<h1 className="text-3xl font-bold tracking-tight">Monitoring & Logs</h1>
|
<h1 className="text-3xl font-bold tracking-tight">Error Monitoring</h1>
|
||||||
<p className="text-muted-foreground">Unified monitoring hub for errors, logs, and distributed traces</p>
|
<p className="text-muted-foreground">Track and analyze application errors</p>
|
||||||
</div>
|
</div>
|
||||||
<RefreshButton
|
<RefreshButton
|
||||||
onRefresh={async () => { await refetch(); }}
|
onRefresh={async () => { await refetch(); }}
|
||||||
@@ -193,23 +181,17 @@ export default function ErrorMonitoring() {
|
|||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* Unified Log Search */}
|
|
||||||
<UnifiedLogSearch onNavigate={handleNavigate} />
|
|
||||||
|
|
||||||
{/* Pipeline Health Alerts */}
|
{/* Pipeline Health Alerts */}
|
||||||
<PipelineHealthAlerts />
|
<PipelineHealthAlerts />
|
||||||
|
|
||||||
{/* Analytics Section */}
|
{/* Analytics Section */}
|
||||||
<ErrorAnalytics errorSummary={errorSummary} approvalMetrics={approvalMetrics} />
|
<ErrorAnalytics errorSummary={errorSummary} approvalMetrics={approvalMetrics} />
|
||||||
|
|
||||||
{/* Tabs for All Log Types */}
|
{/* Tabs for Errors and Approval Failures */}
|
||||||
<Tabs value={activeTab} onValueChange={setActiveTab} className="w-full">
|
<Tabs defaultValue="errors" className="w-full">
|
||||||
<TabsList className="grid w-full grid-cols-5">
|
<TabsList>
|
||||||
<TabsTrigger value="errors">Application Errors</TabsTrigger>
|
<TabsTrigger value="errors">Application Errors</TabsTrigger>
|
||||||
<TabsTrigger value="approvals">Approval Failures</TabsTrigger>
|
<TabsTrigger value="approvals">Approval Failures</TabsTrigger>
|
||||||
<TabsTrigger value="edge-functions">Edge Functions</TabsTrigger>
|
|
||||||
<TabsTrigger value="database">Database Logs</TabsTrigger>
|
|
||||||
<TabsTrigger value="traces">Distributed Traces</TabsTrigger>
|
|
||||||
</TabsList>
|
</TabsList>
|
||||||
|
|
||||||
<TabsContent value="errors" className="space-y-4">
|
<TabsContent value="errors" className="space-y-4">
|
||||||
@@ -368,18 +350,6 @@ export default function ErrorMonitoring() {
|
|||||||
</CardContent>
|
</CardContent>
|
||||||
</Card>
|
</Card>
|
||||||
</TabsContent>
|
</TabsContent>
|
||||||
|
|
||||||
<TabsContent value="edge-functions">
|
|
||||||
<EdgeFunctionLogs />
|
|
||||||
</TabsContent>
|
|
||||||
|
|
||||||
<TabsContent value="database">
|
|
||||||
<DatabaseLogs />
|
|
||||||
</TabsContent>
|
|
||||||
|
|
||||||
<TabsContent value="traces">
|
|
||||||
<TraceViewer />
|
|
||||||
</TabsContent>
|
|
||||||
</Tabs>
|
</Tabs>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|||||||
@@ -1,255 +0,0 @@
|
|||||||
import { useState } from 'react';
|
|
||||||
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from '@/components/ui/card';
|
|
||||||
import { Input } from '@/components/ui/input';
|
|
||||||
import { Button } from '@/components/ui/button';
|
|
||||||
import { Accordion, AccordionContent, AccordionItem, AccordionTrigger } from '@/components/ui/accordion';
|
|
||||||
import { Alert, AlertDescription } from '@/components/ui/alert';
|
|
||||||
import { Badge } from '@/components/ui/badge';
|
|
||||||
import { buildSpanTree, formatSpanTree, calculateSpanStats, extractAllEvents } from '@/lib/spanVisualizer';
|
|
||||||
import type { Span } from '@/types/tracing';
|
|
||||||
import type { SpanTree } from '@/lib/spanVisualizer';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Admin Trace Viewer
|
|
||||||
*
|
|
||||||
* Visual tool for debugging distributed traces across the approval pipeline.
|
|
||||||
* Reconstructs and displays span hierarchies from edge function logs.
|
|
||||||
*/
|
|
||||||
export default function TraceViewer() {
|
|
||||||
const [traceId, setTraceId] = useState('');
|
|
||||||
const [spans, setSpans] = useState<Span[]>([]);
|
|
||||||
const [tree, setTree] = useState<SpanTree | null>(null);
|
|
||||||
const [isLoading, setIsLoading] = useState(false);
|
|
||||||
const [error, setError] = useState<string | null>(null);
|
|
||||||
|
|
||||||
const loadTrace = async () => {
|
|
||||||
if (!traceId.trim()) {
|
|
||||||
setError('Please enter a trace ID');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
setIsLoading(true);
|
|
||||||
setError(null);
|
|
||||||
|
|
||||||
try {
|
|
||||||
// TODO: Replace with actual edge function log query
|
|
||||||
// This would need an edge function that queries Supabase logs
|
|
||||||
// For now, using mock data structure
|
|
||||||
const mockSpans: Span[] = [
|
|
||||||
{
|
|
||||||
spanId: 'root-1',
|
|
||||||
traceId,
|
|
||||||
name: 'process-selective-approval',
|
|
||||||
kind: 'SERVER',
|
|
||||||
startTime: Date.now() - 5000,
|
|
||||||
endTime: Date.now(),
|
|
||||||
duration: 5000,
|
|
||||||
attributes: {
|
|
||||||
'http.method': 'POST',
|
|
||||||
'user.id': 'user-123',
|
|
||||||
'submission.id': 'sub-456',
|
|
||||||
},
|
|
||||||
events: [
|
|
||||||
{ timestamp: Date.now() - 4900, name: 'authentication_start' },
|
|
||||||
{ timestamp: Date.now() - 4800, name: 'authentication_success' },
|
|
||||||
{ timestamp: Date.now() - 4700, name: 'validation_complete' },
|
|
||||||
],
|
|
||||||
status: 'ok',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
spanId: 'child-1',
|
|
||||||
traceId,
|
|
||||||
parentSpanId: 'root-1',
|
|
||||||
name: 'process_approval_transaction',
|
|
||||||
kind: 'DATABASE',
|
|
||||||
startTime: Date.now() - 4500,
|
|
||||||
endTime: Date.now() - 500,
|
|
||||||
duration: 4000,
|
|
||||||
attributes: {
|
|
||||||
'db.operation': 'rpc',
|
|
||||||
'submission.id': 'sub-456',
|
|
||||||
},
|
|
||||||
events: [
|
|
||||||
{ timestamp: Date.now() - 4400, name: 'rpc_call_start' },
|
|
||||||
{ timestamp: Date.now() - 600, name: 'rpc_call_success' },
|
|
||||||
],
|
|
||||||
status: 'ok',
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
setSpans(mockSpans);
|
|
||||||
const builtTree = buildSpanTree(mockSpans);
|
|
||||||
setTree(builtTree);
|
|
||||||
|
|
||||||
if (!builtTree) {
|
|
||||||
setError('No root span found for this trace ID');
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
setError(err instanceof Error ? err.message : 'Failed to load trace');
|
|
||||||
} finally {
|
|
||||||
setIsLoading(false);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const stats = tree ? calculateSpanStats(tree) : null;
|
|
||||||
const events = tree ? extractAllEvents(tree) : [];
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="container mx-auto p-6 space-y-6">
|
|
||||||
<div>
|
|
||||||
<h1 className="text-3xl font-bold mb-2">Distributed Trace Viewer</h1>
|
|
||||||
<p className="text-muted-foreground">
|
|
||||||
Debug moderation pipeline execution by visualizing span hierarchies
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<Card>
|
|
||||||
<CardHeader>
|
|
||||||
<CardTitle>Load Trace</CardTitle>
|
|
||||||
<CardDescription>
|
|
||||||
Enter a trace ID from edge function logs to visualize the execution tree
|
|
||||||
</CardDescription>
|
|
||||||
</CardHeader>
|
|
||||||
<CardContent>
|
|
||||||
<div className="flex gap-2">
|
|
||||||
<Input
|
|
||||||
value={traceId}
|
|
||||||
onChange={(e) => setTraceId(e.target.value)}
|
|
||||||
placeholder="Enter trace ID (e.g., abc-123-def-456)"
|
|
||||||
className="flex-1"
|
|
||||||
/>
|
|
||||||
<Button onClick={loadTrace} disabled={isLoading}>
|
|
||||||
{isLoading ? 'Loading...' : 'Load Trace'}
|
|
||||||
</Button>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{error && (
|
|
||||||
<Alert variant="destructive" className="mt-4">
|
|
||||||
<AlertDescription>{error}</AlertDescription>
|
|
||||||
</Alert>
|
|
||||||
)}
|
|
||||||
</CardContent>
|
|
||||||
</Card>
|
|
||||||
|
|
||||||
{tree && stats && (
|
|
||||||
<>
|
|
||||||
<Card>
|
|
||||||
<CardHeader>
|
|
||||||
<CardTitle>Trace Statistics</CardTitle>
|
|
||||||
</CardHeader>
|
|
||||||
<CardContent>
|
|
||||||
<div className="grid grid-cols-2 md:grid-cols-4 gap-4">
|
|
||||||
<div>
|
|
||||||
<div className="text-sm text-muted-foreground">Total Duration</div>
|
|
||||||
<div className="text-2xl font-bold">{stats.totalDuration}ms</div>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<div className="text-sm text-muted-foreground">Total Spans</div>
|
|
||||||
<div className="text-2xl font-bold">{stats.totalSpans}</div>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<div className="text-sm text-muted-foreground">Max Depth</div>
|
|
||||||
<div className="text-2xl font-bold">{stats.maxDepth}</div>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<div className="text-sm text-muted-foreground">Errors</div>
|
|
||||||
<div className="text-2xl font-bold text-destructive">{stats.errorCount}</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="mt-4">
|
|
||||||
<div className="text-sm text-muted-foreground mb-2">Critical Path (Longest Duration):</div>
|
|
||||||
<div className="flex gap-2 flex-wrap">
|
|
||||||
{stats.criticalPath.map((spanName, i) => (
|
|
||||||
<Badge key={i} variant="secondary">
|
|
||||||
{spanName}
|
|
||||||
</Badge>
|
|
||||||
))}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</CardContent>
|
|
||||||
</Card>
|
|
||||||
|
|
||||||
<Card>
|
|
||||||
<CardHeader>
|
|
||||||
<CardTitle>Span Tree</CardTitle>
|
|
||||||
<CardDescription>
|
|
||||||
Hierarchical view of span execution with timing breakdown
|
|
||||||
</CardDescription>
|
|
||||||
</CardHeader>
|
|
||||||
<CardContent>
|
|
||||||
<pre className="bg-muted p-4 rounded-lg overflow-x-auto text-sm">
|
|
||||||
{formatSpanTree(tree)}
|
|
||||||
</pre>
|
|
||||||
</CardContent>
|
|
||||||
</Card>
|
|
||||||
|
|
||||||
<Card>
|
|
||||||
<CardHeader>
|
|
||||||
<CardTitle>Events Timeline</CardTitle>
|
|
||||||
<CardDescription>
|
|
||||||
Chronological list of all events across all spans
|
|
||||||
</CardDescription>
|
|
||||||
</CardHeader>
|
|
||||||
<CardContent>
|
|
||||||
<div className="space-y-2">
|
|
||||||
{events.map((event, i) => (
|
|
||||||
<div key={i} className="flex gap-2 text-sm border-l-2 border-primary pl-4 py-1">
|
|
||||||
<Badge variant="outline">{event.spanName}</Badge>
|
|
||||||
<span className="text-muted-foreground">→</span>
|
|
||||||
<span className="font-medium">{event.eventName}</span>
|
|
||||||
<span className="text-muted-foreground ml-auto">
|
|
||||||
{new Date(event.timestamp).toISOString()}
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
))}
|
|
||||||
</div>
|
|
||||||
</CardContent>
|
|
||||||
</Card>
|
|
||||||
|
|
||||||
<Card>
|
|
||||||
<CardHeader>
|
|
||||||
<CardTitle>Span Details</CardTitle>
|
|
||||||
<CardDescription>
|
|
||||||
Detailed breakdown of each span with attributes and events
|
|
||||||
</CardDescription>
|
|
||||||
</CardHeader>
|
|
||||||
<CardContent>
|
|
||||||
<Accordion type="single" collapsible className="w-full">
|
|
||||||
{spans.map((span) => (
|
|
||||||
<AccordionItem key={span.spanId} value={span.spanId}>
|
|
||||||
<AccordionTrigger>
|
|
||||||
<div className="flex items-center gap-2">
|
|
||||||
<Badge variant={span.status === 'error' ? 'destructive' : 'default'}>
|
|
||||||
{span.kind}
|
|
||||||
</Badge>
|
|
||||||
<span>{span.name}</span>
|
|
||||||
<span className="text-muted-foreground ml-2">
|
|
||||||
({span.duration}ms)
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
</AccordionTrigger>
|
|
||||||
<AccordionContent>
|
|
||||||
<pre className="bg-muted p-4 rounded-lg overflow-x-auto text-xs">
|
|
||||||
{JSON.stringify(span, null, 2)}
|
|
||||||
</pre>
|
|
||||||
</AccordionContent>
|
|
||||||
</AccordionItem>
|
|
||||||
))}
|
|
||||||
</Accordion>
|
|
||||||
</CardContent>
|
|
||||||
</Card>
|
|
||||||
</>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{!tree && !isLoading && !error && (
|
|
||||||
<Alert>
|
|
||||||
<AlertDescription>
|
|
||||||
Enter a trace ID to visualize the distributed trace. You can find trace IDs in edge function logs
|
|
||||||
under the "Span completed" messages.
|
|
||||||
</AlertDescription>
|
|
||||||
</Alert>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -1,35 +0,0 @@
|
|||||||
/**
|
|
||||||
* Distributed Tracing Types
|
|
||||||
* Mirrors the types defined in edge function logger
|
|
||||||
*/
|
|
||||||
|
|
||||||
export interface Span {
|
|
||||||
spanId: string;
|
|
||||||
traceId: string;
|
|
||||||
parentSpanId?: string;
|
|
||||||
name: string;
|
|
||||||
kind: 'SERVER' | 'CLIENT' | 'INTERNAL' | 'DATABASE';
|
|
||||||
startTime: number;
|
|
||||||
endTime?: number;
|
|
||||||
duration?: number;
|
|
||||||
attributes: Record<string, unknown>;
|
|
||||||
events: SpanEvent[];
|
|
||||||
status: 'ok' | 'error' | 'unset';
|
|
||||||
error?: {
|
|
||||||
type: string;
|
|
||||||
message: string;
|
|
||||||
stack?: string;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface SpanEvent {
|
|
||||||
timestamp: number;
|
|
||||||
name: string;
|
|
||||||
attributes?: Record<string, unknown>;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface SpanContext {
|
|
||||||
traceId: string;
|
|
||||||
spanId: string;
|
|
||||||
traceFlags?: number;
|
|
||||||
}
|
|
||||||
@@ -47,9 +47,6 @@ verify_jwt = true
|
|||||||
[functions.process-selective-approval]
|
[functions.process-selective-approval]
|
||||||
verify_jwt = false
|
verify_jwt = false
|
||||||
|
|
||||||
[functions.process-selective-rejection]
|
|
||||||
verify_jwt = false
|
|
||||||
|
|
||||||
[functions.send-escalation-notification]
|
[functions.send-escalation-notification]
|
||||||
verify_jwt = true
|
verify_jwt = true
|
||||||
|
|
||||||
|
|||||||
@@ -1,94 +0,0 @@
|
|||||||
/**
|
|
||||||
* Error Formatting Utility for Edge Functions
|
|
||||||
*
|
|
||||||
* Provides robust error message extraction from various error types:
|
|
||||||
* - Standard Error objects
|
|
||||||
* - Supabase PostgresError objects (plain objects with message/details/code/hint)
|
|
||||||
* - Raw objects and primitives
|
|
||||||
*
|
|
||||||
* Eliminates "[object Object]" errors by properly extracting error details.
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Format error objects for logging
|
|
||||||
* Handles Error objects, Supabase errors (plain objects), and primitives
|
|
||||||
*
|
|
||||||
* @param error - Any error value
|
|
||||||
* @returns Formatted, human-readable error message string
|
|
||||||
*/
|
|
||||||
export function formatEdgeError(error: unknown): string {
|
|
||||||
// Standard Error objects
|
|
||||||
if (error instanceof Error) {
|
|
||||||
return error.message;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Object-like errors (Supabase PostgresError, etc.)
|
|
||||||
if (typeof error === 'object' && error !== null) {
|
|
||||||
const err = error as any;
|
|
||||||
|
|
||||||
// Try common error message properties
|
|
||||||
if (err.message && typeof err.message === 'string') {
|
|
||||||
// Include additional Supabase error details if present
|
|
||||||
const parts: string[] = [err.message];
|
|
||||||
|
|
||||||
if (err.details && typeof err.details === 'string') {
|
|
||||||
parts.push(`Details: ${err.details}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (err.hint && typeof err.hint === 'string') {
|
|
||||||
parts.push(`Hint: ${err.hint}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (err.code && typeof err.code === 'string') {
|
|
||||||
parts.push(`Code: ${err.code}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return parts.join(' | ');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Some errors nest the actual error in an 'error' property
|
|
||||||
if (err.error) {
|
|
||||||
return formatEdgeError(err.error);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Some APIs use 'msg' instead of 'message'
|
|
||||||
if (err.msg && typeof err.msg === 'string') {
|
|
||||||
return err.msg;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Last resort: stringify the entire object
|
|
||||||
try {
|
|
||||||
const stringified = JSON.stringify(error, null, 2);
|
|
||||||
return stringified.length > 500
|
|
||||||
? stringified.substring(0, 500) + '... (truncated)'
|
|
||||||
: stringified;
|
|
||||||
} catch {
|
|
||||||
// JSON.stringify can fail on circular references
|
|
||||||
return 'Unknown error (could not stringify)';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Primitive values (strings, numbers, etc.)
|
|
||||||
return String(error);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Convert any error to a proper Error instance
|
|
||||||
* Use this before throwing to ensure proper stack traces
|
|
||||||
*
|
|
||||||
* @param error - Any error value
|
|
||||||
* @returns Error instance with formatted message
|
|
||||||
*/
|
|
||||||
export function toError(error: unknown): Error {
|
|
||||||
if (error instanceof Error) {
|
|
||||||
return error;
|
|
||||||
}
|
|
||||||
|
|
||||||
const message = formatEdgeError(error);
|
|
||||||
const newError = new Error(message);
|
|
||||||
|
|
||||||
// Preserve original error as property for debugging
|
|
||||||
(newError as any).originalError = error;
|
|
||||||
|
|
||||||
return newError;
|
|
||||||
}
|
|
||||||
@@ -3,8 +3,6 @@
|
|||||||
* Prevents sensitive data exposure and provides consistent log format
|
* Prevents sensitive data exposure and provides consistent log format
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { formatEdgeError } from './errorFormatter.ts';
|
|
||||||
|
|
||||||
type LogLevel = 'info' | 'warn' | 'error' | 'debug';
|
type LogLevel = 'info' | 'warn' | 'error' | 'debug';
|
||||||
|
|
||||||
interface LogContext {
|
interface LogContext {
|
||||||
@@ -16,39 +14,7 @@ interface LogContext {
|
|||||||
[key: string]: unknown;
|
[key: string]: unknown;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Span types for distributed tracing
|
// Request tracking utilities
|
||||||
export interface Span {
|
|
||||||
spanId: string;
|
|
||||||
traceId: string;
|
|
||||||
parentSpanId?: string;
|
|
||||||
name: string;
|
|
||||||
kind: 'SERVER' | 'CLIENT' | 'INTERNAL' | 'DATABASE';
|
|
||||||
startTime: number;
|
|
||||||
endTime?: number;
|
|
||||||
duration?: number;
|
|
||||||
attributes: Record<string, unknown>;
|
|
||||||
events: SpanEvent[];
|
|
||||||
status: 'ok' | 'error' | 'unset';
|
|
||||||
error?: {
|
|
||||||
type: string;
|
|
||||||
message: string;
|
|
||||||
stack?: string;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface SpanEvent {
|
|
||||||
timestamp: number;
|
|
||||||
name: string;
|
|
||||||
attributes?: Record<string, unknown>;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface SpanContext {
|
|
||||||
traceId: string;
|
|
||||||
spanId: string;
|
|
||||||
traceFlags?: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Request tracking utilities (legacy - use spans instead)
|
|
||||||
export interface RequestTracking {
|
export interface RequestTracking {
|
||||||
requestId: string;
|
requestId: string;
|
||||||
start: number;
|
start: number;
|
||||||
@@ -67,135 +33,6 @@ export function endRequest(tracking: RequestTracking): number {
|
|||||||
return Date.now() - tracking.start;
|
return Date.now() - tracking.start;
|
||||||
}
|
}
|
||||||
|
|
||||||
// ============================================================================
|
|
||||||
// Span Lifecycle Functions
|
|
||||||
// ============================================================================
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Start a new span
|
|
||||||
*/
|
|
||||||
export function startSpan(
|
|
||||||
name: string,
|
|
||||||
kind: Span['kind'],
|
|
||||||
parentSpan?: SpanContext,
|
|
||||||
attributes?: Record<string, unknown>
|
|
||||||
): Span {
|
|
||||||
const traceId = parentSpan?.traceId || crypto.randomUUID();
|
|
||||||
|
|
||||||
return {
|
|
||||||
spanId: crypto.randomUUID(),
|
|
||||||
traceId,
|
|
||||||
parentSpanId: parentSpan?.spanId,
|
|
||||||
name,
|
|
||||||
kind,
|
|
||||||
startTime: Date.now(),
|
|
||||||
attributes: attributes || {},
|
|
||||||
events: [],
|
|
||||||
status: 'unset',
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* End a span with final status
|
|
||||||
*/
|
|
||||||
export function endSpan(span: Span, status?: 'ok' | 'error', error?: unknown): Span {
|
|
||||||
span.endTime = Date.now();
|
|
||||||
span.duration = span.endTime - span.startTime;
|
|
||||||
span.status = status || 'ok';
|
|
||||||
|
|
||||||
if (error) {
|
|
||||||
const err = error instanceof Error ? error : new Error(formatEdgeError(error));
|
|
||||||
span.error = {
|
|
||||||
type: err.name,
|
|
||||||
message: err.message,
|
|
||||||
stack: err.stack,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
return span;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add event to span
|
|
||||||
*/
|
|
||||||
export function addSpanEvent(
|
|
||||||
span: Span,
|
|
||||||
name: string,
|
|
||||||
attributes?: Record<string, unknown>
|
|
||||||
): void {
|
|
||||||
span.events.push({
|
|
||||||
timestamp: Date.now(),
|
|
||||||
name,
|
|
||||||
attributes,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set span attributes
|
|
||||||
*/
|
|
||||||
export function setSpanAttributes(
|
|
||||||
span: Span,
|
|
||||||
attributes: Record<string, unknown>
|
|
||||||
): void {
|
|
||||||
span.attributes = { ...span.attributes, ...attributes };
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Extract span context for propagation
|
|
||||||
*/
|
|
||||||
export function getSpanContext(span: Span): SpanContext {
|
|
||||||
return {
|
|
||||||
traceId: span.traceId,
|
|
||||||
spanId: span.spanId,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Extract span context from HTTP headers (W3C Trace Context)
|
|
||||||
*/
|
|
||||||
export function extractSpanContextFromHeaders(headers: Headers): SpanContext | undefined {
|
|
||||||
const traceparent = headers.get('traceparent');
|
|
||||||
if (!traceparent) return undefined;
|
|
||||||
|
|
||||||
// Parse W3C traceparent: version-traceId-spanId-flags
|
|
||||||
const parts = traceparent.split('-');
|
|
||||||
if (parts.length !== 4) return undefined;
|
|
||||||
|
|
||||||
return {
|
|
||||||
traceId: parts[1],
|
|
||||||
spanId: parts[2],
|
|
||||||
traceFlags: parseInt(parts[3], 16),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Inject span context into headers
|
|
||||||
*/
|
|
||||||
export function injectSpanContextIntoHeaders(spanContext: SpanContext): Record<string, string> {
|
|
||||||
return {
|
|
||||||
'traceparent': `00-${spanContext.traceId}-${spanContext.spanId}-01`,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Log completed span
|
|
||||||
*/
|
|
||||||
export function logSpan(span: Span): void {
|
|
||||||
const sanitizedAttributes = sanitizeContext(span.attributes);
|
|
||||||
const sanitizedEvents = span.events.map(e => ({
|
|
||||||
...e,
|
|
||||||
attributes: e.attributes ? sanitizeContext(e.attributes) : undefined,
|
|
||||||
}));
|
|
||||||
|
|
||||||
edgeLogger.info('Span completed', {
|
|
||||||
span: {
|
|
||||||
...span,
|
|
||||||
attributes: sanitizedAttributes,
|
|
||||||
events: sanitizedEvents,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fields that should never be logged
|
// Fields that should never be logged
|
||||||
const SENSITIVE_FIELDS = [
|
const SENSITIVE_FIELDS = [
|
||||||
'password',
|
'password',
|
||||||
@@ -215,7 +52,7 @@ const SENSITIVE_FIELDS = [
|
|||||||
/**
|
/**
|
||||||
* Sanitize context to remove sensitive data
|
* Sanitize context to remove sensitive data
|
||||||
*/
|
*/
|
||||||
export function sanitizeContext(context: LogContext): LogContext {
|
function sanitizeContext(context: LogContext): LogContext {
|
||||||
const sanitized: LogContext = {};
|
const sanitized: LogContext = {};
|
||||||
|
|
||||||
for (const [key, value] of Object.entries(context)) {
|
for (const [key, value] of Object.entries(context)) {
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2.57.4';
|
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2.57.4';
|
||||||
import { edgeLogger, startRequest, endRequest } from '../_shared/logger.ts';
|
import { edgeLogger, startRequest, endRequest } from '../_shared/logger.ts';
|
||||||
import { formatEdgeError } from '../_shared/errorFormatter.ts';
|
|
||||||
|
|
||||||
const corsHeaders = {
|
const corsHeaders = {
|
||||||
'Access-Control-Allow-Origin': '*',
|
'Access-Control-Allow-Origin': '*',
|
||||||
@@ -349,7 +348,7 @@ Deno.serve(async (req) => {
|
|||||||
edgeLogger.warn('Error deleting avatar from Cloudflare', {
|
edgeLogger.warn('Error deleting avatar from Cloudflare', {
|
||||||
requestId: tracking.requestId,
|
requestId: tracking.requestId,
|
||||||
targetUserId,
|
targetUserId,
|
||||||
error: formatEdgeError(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
action: 'admin_delete_user'
|
action: 'admin_delete_user'
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -418,7 +417,7 @@ Deno.serve(async (req) => {
|
|||||||
edgeLogger.warn('Error removing Novu subscriber', {
|
edgeLogger.warn('Error removing Novu subscriber', {
|
||||||
requestId: tracking.requestId,
|
requestId: tracking.requestId,
|
||||||
targetUserId,
|
targetUserId,
|
||||||
error: formatEdgeError(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
action: 'admin_delete_user'
|
action: 'admin_delete_user'
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -522,7 +521,7 @@ Deno.serve(async (req) => {
|
|||||||
edgeLogger.warn('Error sending deletion notification email', {
|
edgeLogger.warn('Error sending deletion notification email', {
|
||||||
requestId: tracking.requestId,
|
requestId: tracking.requestId,
|
||||||
targetUserId,
|
targetUserId,
|
||||||
error: formatEdgeError(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
action: 'admin_delete_user'
|
action: 'admin_delete_user'
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -547,7 +546,7 @@ Deno.serve(async (req) => {
|
|||||||
edgeLogger.error('Unexpected error in admin delete user', {
|
edgeLogger.error('Unexpected error in admin delete user', {
|
||||||
requestId: tracking.requestId,
|
requestId: tracking.requestId,
|
||||||
duration,
|
duration,
|
||||||
error: formatEdgeError(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
action: 'admin_delete_user'
|
action: 'admin_delete_user'
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
import { serve } from 'https://deno.land/std@0.168.0/http/server.ts';
|
import { serve } from 'https://deno.land/std@0.168.0/http/server.ts';
|
||||||
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2';
|
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2';
|
||||||
import { edgeLogger, startRequest, endRequest } from '../_shared/logger.ts';
|
import { edgeLogger, startRequest, endRequest } from '../_shared/logger.ts';
|
||||||
import { formatEdgeError } from '../_shared/errorFormatter.ts';
|
|
||||||
|
|
||||||
const corsHeaders = {
|
const corsHeaders = {
|
||||||
'Access-Control-Allow-Origin': '*',
|
'Access-Control-Allow-Origin': '*',
|
||||||
@@ -138,7 +137,7 @@ serve(async (req) => {
|
|||||||
);
|
);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const duration = endRequest(tracking);
|
const duration = endRequest(tracking);
|
||||||
edgeLogger.error('Error cancelling deletion', { action: 'cancel_deletion_error', error: formatEdgeError(error), requestId: tracking.requestId, duration });
|
edgeLogger.error('Error cancelling deletion', { action: 'cancel_deletion_error', error: error instanceof Error ? error.message : String(error), requestId: tracking.requestId, duration });
|
||||||
return new Response(
|
return new Response(
|
||||||
JSON.stringify({ error: error.message, requestId: tracking.requestId }),
|
JSON.stringify({ error: error.message, requestId: tracking.requestId }),
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2.57.4';
|
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2.57.4';
|
||||||
import { edgeLogger, startRequest, endRequest } from '../_shared/logger.ts';
|
import { edgeLogger, startRequest, endRequest } from '../_shared/logger.ts';
|
||||||
import { formatEdgeError } from '../_shared/errorFormatter.ts';
|
|
||||||
|
|
||||||
const corsHeaders = {
|
const corsHeaders = {
|
||||||
'Access-Control-Allow-Origin': '*',
|
'Access-Control-Allow-Origin': '*',
|
||||||
@@ -150,7 +149,7 @@ Deno.serve(async (req) => {
|
|||||||
action: 'cancel_email_change',
|
action: 'cancel_email_change',
|
||||||
requestId: tracking.requestId,
|
requestId: tracking.requestId,
|
||||||
duration,
|
duration,
|
||||||
error: formatEdgeError(error)
|
error: error instanceof Error ? error.message : String(error)
|
||||||
});
|
});
|
||||||
return new Response(
|
return new Response(
|
||||||
JSON.stringify({
|
JSON.stringify({
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2';
|
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2';
|
||||||
import { edgeLogger } from '../_shared/logger.ts';
|
import { edgeLogger } from '../_shared/logger.ts';
|
||||||
import { formatEdgeError } from '../_shared/errorFormatter.ts';
|
|
||||||
|
|
||||||
const corsHeaders = {
|
const corsHeaders = {
|
||||||
'Access-Control-Allow-Origin': '*',
|
'Access-Control-Allow-Origin': '*',
|
||||||
@@ -184,7 +183,7 @@ Deno.serve(async (req) => {
|
|||||||
}
|
}
|
||||||
);
|
);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
edgeLogger.error('Cleanup job failed', { error: formatEdgeError(error) });
|
edgeLogger.error('Cleanup job failed', { error: error instanceof Error ? error.message : String(error) });
|
||||||
|
|
||||||
return new Response(
|
return new Response(
|
||||||
JSON.stringify({
|
JSON.stringify({
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
import { serve } from "https://deno.land/std@0.190.0/http/server.ts";
|
import { serve } from "https://deno.land/std@0.190.0/http/server.ts";
|
||||||
import { Novu } from "npm:@novu/api@1.6.0";
|
import { Novu } from "npm:@novu/api@1.6.0";
|
||||||
import { edgeLogger } from '../_shared/logger.ts';
|
import { edgeLogger } from '../_shared/logger.ts';
|
||||||
import { formatEdgeError } from '../_shared/errorFormatter.ts';
|
|
||||||
|
|
||||||
const corsHeaders = {
|
const corsHeaders = {
|
||||||
'Access-Control-Allow-Origin': '*',
|
'Access-Control-Allow-Origin': '*',
|
||||||
@@ -219,7 +218,7 @@ serve(async (req) => {
|
|||||||
} catch (topicError: unknown) {
|
} catch (topicError: unknown) {
|
||||||
// Non-blocking - log error but don't fail the request
|
// Non-blocking - log error but don't fail the request
|
||||||
edgeLogger.error('Failed to add subscriber to users topic', {
|
edgeLogger.error('Failed to add subscriber to users topic', {
|
||||||
error: formatEdgeError(topicError),
|
error: topicError instanceof Error ? topicError.message : String(topicError),
|
||||||
subscriberId,
|
subscriberId,
|
||||||
requestId: tracking.requestId
|
requestId: tracking.requestId
|
||||||
});
|
});
|
||||||
@@ -239,7 +238,7 @@ serve(async (req) => {
|
|||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
const duration = endRequest(tracking);
|
const duration = endRequest(tracking);
|
||||||
edgeLogger.error('Error creating Novu subscriber', {
|
edgeLogger.error('Error creating Novu subscriber', {
|
||||||
error: formatEdgeError(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
requestId: tracking.requestId,
|
requestId: tracking.requestId,
|
||||||
duration
|
duration
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
import { serve } from "https://deno.land/std@0.168.0/http/server.ts";
|
import { serve } from "https://deno.land/std@0.168.0/http/server.ts";
|
||||||
import { edgeLogger, startRequest, endRequest } from "../_shared/logger.ts";
|
import { edgeLogger, startRequest, endRequest } from "../_shared/logger.ts";
|
||||||
import { formatEdgeError } from "../_shared/errorFormatter.ts";
|
|
||||||
|
|
||||||
const corsHeaders = {
|
const corsHeaders = {
|
||||||
'Access-Control-Allow-Origin': '*',
|
'Access-Control-Allow-Origin': '*',
|
||||||
@@ -46,7 +45,7 @@ function cleanupExpiredEntries() {
|
|||||||
// CRITICAL: Increment failure counter and log detailed error information
|
// CRITICAL: Increment failure counter and log detailed error information
|
||||||
cleanupFailureCount++;
|
cleanupFailureCount++;
|
||||||
|
|
||||||
const errorMessage = formatEdgeError(error);
|
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||||
|
|
||||||
edgeLogger.error('Cleanup error', {
|
edgeLogger.error('Cleanup error', {
|
||||||
attempt: cleanupFailureCount,
|
attempt: cleanupFailureCount,
|
||||||
@@ -285,7 +284,7 @@ serve(async (req) => {
|
|||||||
|
|
||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
// Enhanced error logging for better visibility and debugging
|
// Enhanced error logging for better visibility and debugging
|
||||||
const errorMessage = formatEdgeError(error);
|
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||||
|
|
||||||
edgeLogger.error('Location detection error', {
|
edgeLogger.error('Location detection error', {
|
||||||
error: errorMessage,
|
error: errorMessage,
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ import { serve } from 'https://deno.land/std@0.168.0/http/server.ts';
|
|||||||
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2.57.4';
|
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2.57.4';
|
||||||
import { sanitizeError } from '../_shared/errorSanitizer.ts';
|
import { sanitizeError } from '../_shared/errorSanitizer.ts';
|
||||||
import { edgeLogger, startRequest, endRequest } from '../_shared/logger.ts';
|
import { edgeLogger, startRequest, endRequest } from '../_shared/logger.ts';
|
||||||
import { formatEdgeError } from '../_shared/errorFormatter.ts';
|
|
||||||
|
|
||||||
const corsHeaders = {
|
const corsHeaders = {
|
||||||
'Access-Control-Allow-Origin': '*',
|
'Access-Control-Allow-Origin': '*',
|
||||||
@@ -349,7 +348,7 @@ serve(async (req) => {
|
|||||||
action: 'export_error',
|
action: 'export_error',
|
||||||
requestId: tracking.requestId,
|
requestId: tracking.requestId,
|
||||||
duration,
|
duration,
|
||||||
error: formatEdgeError(error)
|
error: error instanceof Error ? error.message : String(error)
|
||||||
});
|
});
|
||||||
const sanitized = sanitizeError(error, 'export-user-data');
|
const sanitized = sanitizeError(error, 'export-user-data');
|
||||||
return new Response(
|
return new Response(
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2.57.4';
|
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2.57.4';
|
||||||
import { edgeLogger, startRequest, endRequest } from '../_shared/logger.ts';
|
import { edgeLogger, startRequest, endRequest } from '../_shared/logger.ts';
|
||||||
import { formatEdgeError } from '../_shared/errorFormatter.ts';
|
|
||||||
|
|
||||||
const corsHeaders = {
|
const corsHeaders = {
|
||||||
'Access-Control-Allow-Origin': '*',
|
'Access-Control-Allow-Origin': '*',
|
||||||
@@ -187,7 +186,7 @@ Deno.serve(async (req) => {
|
|||||||
action: 'mfa_unenroll_error',
|
action: 'mfa_unenroll_error',
|
||||||
requestId: tracking.requestId,
|
requestId: tracking.requestId,
|
||||||
duration,
|
duration,
|
||||||
error: formatEdgeError(error)
|
error: error instanceof Error ? error.message : String(error)
|
||||||
});
|
});
|
||||||
return new Response(
|
return new Response(
|
||||||
JSON.stringify({
|
JSON.stringify({
|
||||||
|
|||||||
@@ -2,18 +2,6 @@ import { serve } from 'https://deno.land/std@0.168.0/http/server.ts';
|
|||||||
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2.57.4';
|
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2.57.4';
|
||||||
import { corsHeaders } from './cors.ts';
|
import { corsHeaders } from './cors.ts';
|
||||||
import { rateLimiters, withRateLimit } from '../_shared/rateLimiter.ts';
|
import { rateLimiters, withRateLimit } from '../_shared/rateLimiter.ts';
|
||||||
import {
|
|
||||||
edgeLogger,
|
|
||||||
startSpan,
|
|
||||||
endSpan,
|
|
||||||
addSpanEvent,
|
|
||||||
setSpanAttributes,
|
|
||||||
getSpanContext,
|
|
||||||
logSpan,
|
|
||||||
extractSpanContextFromHeaders,
|
|
||||||
type Span
|
|
||||||
} from '../_shared/logger.ts';
|
|
||||||
import { formatEdgeError, toError } from '../_shared/errorFormatter.ts';
|
|
||||||
|
|
||||||
const SUPABASE_URL = Deno.env.get('SUPABASE_URL') || 'https://api.thrillwiki.com';
|
const SUPABASE_URL = Deno.env.get('SUPABASE_URL') || 'https://api.thrillwiki.com';
|
||||||
const SUPABASE_ANON_KEY = Deno.env.get('SUPABASE_ANON_KEY')!;
|
const SUPABASE_ANON_KEY = Deno.env.get('SUPABASE_ANON_KEY')!;
|
||||||
@@ -34,29 +22,13 @@ const handler = async (req: Request) => {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Extract parent span context from headers (if present)
|
// Generate request ID for tracking
|
||||||
const parentSpanContext = extractSpanContextFromHeaders(req.headers);
|
const requestId = crypto.randomUUID();
|
||||||
|
|
||||||
// Create root span for this edge function invocation
|
|
||||||
const rootSpan = startSpan(
|
|
||||||
'process-selective-approval',
|
|
||||||
'SERVER',
|
|
||||||
parentSpanContext,
|
|
||||||
{
|
|
||||||
'http.method': 'POST',
|
|
||||||
'function.name': 'process-selective-approval',
|
|
||||||
}
|
|
||||||
);
|
|
||||||
const requestId = rootSpan.spanId;
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// STEP 1: Authentication
|
// STEP 1: Authentication
|
||||||
addSpanEvent(rootSpan, 'authentication_start');
|
|
||||||
const authHeader = req.headers.get('Authorization');
|
const authHeader = req.headers.get('Authorization');
|
||||||
if (!authHeader) {
|
if (!authHeader) {
|
||||||
addSpanEvent(rootSpan, 'authentication_failed', { reason: 'missing_header' });
|
|
||||||
endSpan(rootSpan, 'error');
|
|
||||||
logSpan(rootSpan);
|
|
||||||
return new Response(
|
return new Response(
|
||||||
JSON.stringify({ error: 'Missing Authorization header' }),
|
JSON.stringify({ error: 'Missing Authorization header' }),
|
||||||
{
|
{
|
||||||
@@ -75,14 +47,6 @@ const handler = async (req: Request) => {
|
|||||||
|
|
||||||
const { data: { user }, error: authError } = await supabase.auth.getUser();
|
const { data: { user }, error: authError } = await supabase.auth.getUser();
|
||||||
if (authError || !user) {
|
if (authError || !user) {
|
||||||
addSpanEvent(rootSpan, 'authentication_failed', { error: authError?.message });
|
|
||||||
edgeLogger.warn('Authentication failed', {
|
|
||||||
requestId,
|
|
||||||
error: authError?.message,
|
|
||||||
action: 'process_approval'
|
|
||||||
});
|
|
||||||
endSpan(rootSpan, 'error', authError || new Error('Unauthorized'));
|
|
||||||
logSpan(rootSpan);
|
|
||||||
return new Response(
|
return new Response(
|
||||||
JSON.stringify({ error: 'Unauthorized' }),
|
JSON.stringify({ error: 'Unauthorized' }),
|
||||||
{
|
{
|
||||||
@@ -95,34 +59,13 @@ const handler = async (req: Request) => {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
setSpanAttributes(rootSpan, { 'user.id': user.id });
|
console.log(`[${requestId}] Approval request from moderator ${user.id}`);
|
||||||
addSpanEvent(rootSpan, 'authentication_success');
|
|
||||||
edgeLogger.info('Approval request received', {
|
|
||||||
requestId,
|
|
||||||
moderatorId: user.id,
|
|
||||||
action: 'process_approval'
|
|
||||||
});
|
|
||||||
|
|
||||||
// STEP 2: Parse request
|
// STEP 2: Parse request
|
||||||
addSpanEvent(rootSpan, 'validation_start');
|
|
||||||
const body: ApprovalRequest = await req.json();
|
const body: ApprovalRequest = await req.json();
|
||||||
const { submissionId, itemIds, idempotencyKey } = body;
|
const { submissionId, itemIds, idempotencyKey } = body;
|
||||||
|
|
||||||
if (!submissionId || !itemIds || itemIds.length === 0) {
|
if (!submissionId || !itemIds || itemIds.length === 0) {
|
||||||
addSpanEvent(rootSpan, 'validation_failed', {
|
|
||||||
hasSubmissionId: !!submissionId,
|
|
||||||
hasItemIds: !!itemIds,
|
|
||||||
itemCount: itemIds?.length || 0,
|
|
||||||
});
|
|
||||||
edgeLogger.warn('Invalid request payload', {
|
|
||||||
requestId,
|
|
||||||
hasSubmissionId: !!submissionId,
|
|
||||||
hasItemIds: !!itemIds,
|
|
||||||
itemCount: itemIds?.length || 0,
|
|
||||||
action: 'process_approval'
|
|
||||||
});
|
|
||||||
endSpan(rootSpan, 'error');
|
|
||||||
logSpan(rootSpan);
|
|
||||||
return new Response(
|
return new Response(
|
||||||
JSON.stringify({ error: 'Missing required fields: submissionId, itemIds' }),
|
JSON.stringify({ error: 'Missing required fields: submissionId, itemIds' }),
|
||||||
{
|
{
|
||||||
@@ -135,21 +78,7 @@ const handler = async (req: Request) => {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
setSpanAttributes(rootSpan, {
|
|
||||||
'submission.id': submissionId,
|
|
||||||
'submission.item_count': itemIds.length,
|
|
||||||
'idempotency.key': idempotencyKey,
|
|
||||||
});
|
|
||||||
addSpanEvent(rootSpan, 'validation_complete');
|
|
||||||
edgeLogger.info('Request validated', {
|
|
||||||
requestId,
|
|
||||||
submissionId,
|
|
||||||
itemCount: itemIds.length,
|
|
||||||
action: 'process_approval'
|
|
||||||
});
|
|
||||||
|
|
||||||
// STEP 3: Idempotency check
|
// STEP 3: Idempotency check
|
||||||
addSpanEvent(rootSpan, 'idempotency_check_start');
|
|
||||||
const { data: existingKey } = await supabase
|
const { data: existingKey } = await supabase
|
||||||
.from('submission_idempotency_keys')
|
.from('submission_idempotency_keys')
|
||||||
.select('*')
|
.select('*')
|
||||||
@@ -157,16 +86,7 @@ const handler = async (req: Request) => {
|
|||||||
.single();
|
.single();
|
||||||
|
|
||||||
if (existingKey?.status === 'completed') {
|
if (existingKey?.status === 'completed') {
|
||||||
addSpanEvent(rootSpan, 'idempotency_cache_hit');
|
console.log(`[${requestId}] Idempotency key already processed, returning cached result`);
|
||||||
setSpanAttributes(rootSpan, { 'cache.hit': true });
|
|
||||||
edgeLogger.info('Idempotency cache hit', {
|
|
||||||
requestId,
|
|
||||||
idempotencyKey,
|
|
||||||
cached: true,
|
|
||||||
action: 'process_approval'
|
|
||||||
});
|
|
||||||
endSpan(rootSpan, 'ok');
|
|
||||||
logSpan(rootSpan);
|
|
||||||
return new Response(
|
return new Response(
|
||||||
JSON.stringify(existingKey.result_data),
|
JSON.stringify(existingKey.result_data),
|
||||||
{
|
{
|
||||||
@@ -188,15 +108,7 @@ const handler = async (req: Request) => {
|
|||||||
.single();
|
.single();
|
||||||
|
|
||||||
if (submissionError || !submission) {
|
if (submissionError || !submission) {
|
||||||
addSpanEvent(rootSpan, 'submission_fetch_failed', { error: submissionError?.message });
|
console.error(`[${requestId}] Submission not found:`, submissionError);
|
||||||
edgeLogger.error('Submission not found', {
|
|
||||||
requestId,
|
|
||||||
submissionId,
|
|
||||||
error: submissionError?.message,
|
|
||||||
action: 'process_approval'
|
|
||||||
});
|
|
||||||
endSpan(rootSpan, 'error', submissionError || new Error('Submission not found'));
|
|
||||||
logSpan(rootSpan);
|
|
||||||
return new Response(
|
return new Response(
|
||||||
JSON.stringify({ error: 'Submission not found' }),
|
JSON.stringify({ error: 'Submission not found' }),
|
||||||
{
|
{
|
||||||
@@ -211,13 +123,7 @@ const handler = async (req: Request) => {
|
|||||||
|
|
||||||
// STEP 5: Verify moderator can approve this submission
|
// STEP 5: Verify moderator can approve this submission
|
||||||
if (submission.assigned_to && submission.assigned_to !== user.id) {
|
if (submission.assigned_to && submission.assigned_to !== user.id) {
|
||||||
edgeLogger.warn('Lock conflict', {
|
console.error(`[${requestId}] Submission locked by another moderator`);
|
||||||
requestId,
|
|
||||||
submissionId,
|
|
||||||
lockedBy: submission.assigned_to,
|
|
||||||
attemptedBy: user.id,
|
|
||||||
action: 'process_approval'
|
|
||||||
});
|
|
||||||
return new Response(
|
return new Response(
|
||||||
JSON.stringify({ error: 'Submission is locked by another moderator' }),
|
JSON.stringify({ error: 'Submission is locked by another moderator' }),
|
||||||
{
|
{
|
||||||
@@ -231,13 +137,7 @@ const handler = async (req: Request) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (!['pending', 'partially_approved'].includes(submission.status)) {
|
if (!['pending', 'partially_approved'].includes(submission.status)) {
|
||||||
edgeLogger.warn('Invalid submission status', {
|
console.error(`[${requestId}] Invalid submission status: ${submission.status}`);
|
||||||
requestId,
|
|
||||||
submissionId,
|
|
||||||
currentStatus: submission.status,
|
|
||||||
expectedStatuses: ['pending', 'partially_approved'],
|
|
||||||
action: 'process_approval'
|
|
||||||
});
|
|
||||||
return new Response(
|
return new Response(
|
||||||
JSON.stringify({ error: 'Submission already processed' }),
|
JSON.stringify({ error: 'Submission already processed' }),
|
||||||
{
|
{
|
||||||
@@ -250,59 +150,17 @@ const handler = async (req: Request) => {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// STEP 6: Register idempotency key as processing (atomic upsert)
|
// STEP 6: Register idempotency key as processing
|
||||||
// ✅ CRITICAL FIX: Use ON CONFLICT to prevent race conditions
|
|
||||||
if (!existingKey) {
|
if (!existingKey) {
|
||||||
const { data: insertedKey, error: idempotencyError } = await supabase
|
await supabase.from('submission_idempotency_keys').insert({
|
||||||
.from('submission_idempotency_keys')
|
idempotency_key: idempotencyKey,
|
||||||
.insert({
|
submission_id: submissionId,
|
||||||
idempotency_key: idempotencyKey,
|
moderator_id: user.id,
|
||||||
submission_id: submissionId,
|
status: 'processing'
|
||||||
moderator_id: user.id,
|
});
|
||||||
item_ids: itemIds,
|
|
||||||
status: 'processing'
|
|
||||||
})
|
|
||||||
.select()
|
|
||||||
.single();
|
|
||||||
|
|
||||||
// If conflict occurred, another moderator is processing
|
|
||||||
if (idempotencyError && idempotencyError.code === '23505') {
|
|
||||||
edgeLogger.warn('Idempotency key conflict - another request processing', {
|
|
||||||
requestId,
|
|
||||||
idempotencyKey,
|
|
||||||
moderatorId: user.id
|
|
||||||
});
|
|
||||||
return new Response(
|
|
||||||
JSON.stringify({ error: 'Another moderator is processing this submission' }),
|
|
||||||
{ status: 409, headers: { ...corsHeaders, 'Content-Type': 'application/json' } }
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (idempotencyError) {
|
|
||||||
throw toError(idempotencyError);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create child span for RPC transaction
|
console.log(`[${requestId}] Calling process_approval_transaction RPC`);
|
||||||
const rpcSpan = startSpan(
|
|
||||||
'process_approval_transaction',
|
|
||||||
'DATABASE',
|
|
||||||
getSpanContext(rootSpan),
|
|
||||||
{
|
|
||||||
'db.operation': 'rpc',
|
|
||||||
'db.function': 'process_approval_transaction',
|
|
||||||
'submission.id': submissionId,
|
|
||||||
'submission.item_count': itemIds.length,
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
addSpanEvent(rpcSpan, 'rpc_call_start');
|
|
||||||
edgeLogger.info('Calling approval transaction RPC', {
|
|
||||||
requestId,
|
|
||||||
submissionId,
|
|
||||||
itemCount: itemIds.length,
|
|
||||||
action: 'process_approval'
|
|
||||||
});
|
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
// STEP 7: Call RPC function with deadlock retry logic
|
// STEP 7: Call RPC function with deadlock retry logic
|
||||||
@@ -320,9 +178,7 @@ const handler = async (req: Request) => {
|
|||||||
p_item_ids: itemIds,
|
p_item_ids: itemIds,
|
||||||
p_moderator_id: user.id,
|
p_moderator_id: user.id,
|
||||||
p_submitter_id: submission.user_id,
|
p_submitter_id: submission.user_id,
|
||||||
p_request_id: requestId,
|
p_request_id: requestId
|
||||||
p_trace_id: rootSpan.traceId,
|
|
||||||
p_parent_span_id: rpcSpan.spanId
|
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -331,10 +187,6 @@ const handler = async (req: Request) => {
|
|||||||
|
|
||||||
if (!rpcError) {
|
if (!rpcError) {
|
||||||
// Success!
|
// Success!
|
||||||
addSpanEvent(rpcSpan, 'rpc_call_success', {
|
|
||||||
'result.status': data?.status,
|
|
||||||
'items.processed': itemIds.length,
|
|
||||||
});
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -342,51 +194,23 @@ const handler = async (req: Request) => {
|
|||||||
if (rpcError.code === '40P01' || rpcError.code === '40001') {
|
if (rpcError.code === '40P01' || rpcError.code === '40001') {
|
||||||
retryCount++;
|
retryCount++;
|
||||||
if (retryCount > MAX_DEADLOCK_RETRIES) {
|
if (retryCount > MAX_DEADLOCK_RETRIES) {
|
||||||
addSpanEvent(rpcSpan, 'max_retries_exceeded', { attempt: retryCount });
|
console.error(`[${requestId}] Max deadlock retries exceeded`);
|
||||||
edgeLogger.error('Max deadlock retries exceeded', {
|
|
||||||
requestId,
|
|
||||||
submissionId,
|
|
||||||
attempt: retryCount,
|
|
||||||
action: 'process_approval'
|
|
||||||
});
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
const backoffMs = 100 * Math.pow(2, retryCount);
|
const backoffMs = 100 * Math.pow(2, retryCount);
|
||||||
addSpanEvent(rpcSpan, 'deadlock_retry', { attempt: retryCount, backoffMs });
|
console.log(`[${requestId}] Deadlock detected, retrying in ${backoffMs}ms (attempt ${retryCount}/${MAX_DEADLOCK_RETRIES})`);
|
||||||
edgeLogger.warn('Deadlock detected, retrying', {
|
|
||||||
requestId,
|
|
||||||
attempt: retryCount,
|
|
||||||
maxAttempts: MAX_DEADLOCK_RETRIES,
|
|
||||||
backoffMs,
|
|
||||||
action: 'process_approval'
|
|
||||||
});
|
|
||||||
await new Promise(r => setTimeout(r, backoffMs));
|
await new Promise(r => setTimeout(r, backoffMs));
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Non-retryable error, break immediately
|
// Non-retryable error, break immediately
|
||||||
addSpanEvent(rpcSpan, 'rpc_call_failed', {
|
|
||||||
error: rpcError.message,
|
|
||||||
errorCode: rpcError.code
|
|
||||||
});
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (rpcError) {
|
if (rpcError) {
|
||||||
// Transaction failed - EVERYTHING rolled back automatically by PostgreSQL
|
// Transaction failed - EVERYTHING rolled back automatically by PostgreSQL
|
||||||
endSpan(rpcSpan, 'error', rpcError);
|
console.error(`[${requestId}] Approval transaction failed:`, rpcError);
|
||||||
logSpan(rpcSpan);
|
|
||||||
|
|
||||||
edgeLogger.error('Transaction failed', {
|
|
||||||
requestId,
|
|
||||||
duration: rpcSpan.duration,
|
|
||||||
submissionId,
|
|
||||||
error: rpcError.message,
|
|
||||||
errorCode: rpcError.code,
|
|
||||||
retries: retryCount,
|
|
||||||
action: 'process_approval'
|
|
||||||
});
|
|
||||||
|
|
||||||
// Update idempotency key to failed
|
// Update idempotency key to failed
|
||||||
try {
|
try {
|
||||||
@@ -399,19 +223,10 @@ const handler = async (req: Request) => {
|
|||||||
})
|
})
|
||||||
.eq('idempotency_key', idempotencyKey);
|
.eq('idempotency_key', idempotencyKey);
|
||||||
} catch (updateError) {
|
} catch (updateError) {
|
||||||
edgeLogger.warn('Failed to update idempotency key', {
|
console.error(`[${requestId}] Failed to update idempotency key to failed:`, updateError);
|
||||||
requestId,
|
|
||||||
idempotencyKey,
|
|
||||||
status: 'failed',
|
|
||||||
error: formatEdgeError(updateError),
|
|
||||||
action: 'process_approval'
|
|
||||||
});
|
|
||||||
// Non-blocking - continue with error response even if idempotency update fails
|
// Non-blocking - continue with error response even if idempotency update fails
|
||||||
}
|
}
|
||||||
|
|
||||||
endSpan(rootSpan, 'error', rpcError);
|
|
||||||
logSpan(rootSpan);
|
|
||||||
|
|
||||||
return new Response(
|
return new Response(
|
||||||
JSON.stringify({
|
JSON.stringify({
|
||||||
error: 'Approval transaction failed',
|
error: 'Approval transaction failed',
|
||||||
@@ -429,24 +244,7 @@ const handler = async (req: Request) => {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// RPC succeeded
|
console.log(`[${requestId}] Transaction completed successfully:`, result);
|
||||||
endSpan(rpcSpan, 'ok');
|
|
||||||
logSpan(rpcSpan);
|
|
||||||
|
|
||||||
setSpanAttributes(rootSpan, {
|
|
||||||
'result.status': result?.status,
|
|
||||||
'result.final_status': result?.status,
|
|
||||||
'retries': retryCount,
|
|
||||||
});
|
|
||||||
edgeLogger.info('Transaction completed successfully', {
|
|
||||||
requestId,
|
|
||||||
duration: rpcSpan.duration,
|
|
||||||
submissionId,
|
|
||||||
itemCount: itemIds.length,
|
|
||||||
retries: retryCount,
|
|
||||||
newStatus: result?.status,
|
|
||||||
action: 'process_approval'
|
|
||||||
});
|
|
||||||
|
|
||||||
// STEP 8: Success - update idempotency key
|
// STEP 8: Success - update idempotency key
|
||||||
try {
|
try {
|
||||||
@@ -459,19 +257,10 @@ const handler = async (req: Request) => {
|
|||||||
})
|
})
|
||||||
.eq('idempotency_key', idempotencyKey);
|
.eq('idempotency_key', idempotencyKey);
|
||||||
} catch (updateError) {
|
} catch (updateError) {
|
||||||
edgeLogger.warn('Failed to update idempotency key', {
|
console.error(`[${requestId}] Failed to update idempotency key to completed:`, updateError);
|
||||||
requestId,
|
|
||||||
idempotencyKey,
|
|
||||||
status: 'completed',
|
|
||||||
error: formatEdgeError(updateError),
|
|
||||||
action: 'process_approval'
|
|
||||||
});
|
|
||||||
// Non-blocking - transaction succeeded, so continue with success response
|
// Non-blocking - transaction succeeded, so continue with success response
|
||||||
}
|
}
|
||||||
|
|
||||||
endSpan(rootSpan, 'ok');
|
|
||||||
logSpan(rootSpan);
|
|
||||||
|
|
||||||
return new Response(
|
return new Response(
|
||||||
JSON.stringify(result),
|
JSON.stringify(result),
|
||||||
{
|
{
|
||||||
@@ -485,16 +274,7 @@ const handler = async (req: Request) => {
|
|||||||
);
|
);
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
endSpan(rootSpan, 'error', error instanceof Error ? error : toError(error));
|
console.error(`[${requestId}] Unexpected error:`, error);
|
||||||
logSpan(rootSpan);
|
|
||||||
|
|
||||||
edgeLogger.error('Unexpected error', {
|
|
||||||
requestId,
|
|
||||||
duration: rootSpan.duration,
|
|
||||||
error: formatEdgeError(error),
|
|
||||||
stack: error instanceof Error ? error.stack : undefined,
|
|
||||||
action: 'process_approval'
|
|
||||||
});
|
|
||||||
return new Response(
|
return new Response(
|
||||||
JSON.stringify({
|
JSON.stringify({
|
||||||
error: 'Internal server error',
|
error: 'Internal server error',
|
||||||
|
|||||||
@@ -1,4 +0,0 @@
|
|||||||
export const corsHeaders = {
|
|
||||||
'Access-Control-Allow-Origin': '*',
|
|
||||||
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type',
|
|
||||||
};
|
|
||||||
@@ -1,518 +0,0 @@
|
|||||||
import { serve } from 'https://deno.land/std@0.168.0/http/server.ts';
|
|
||||||
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2.57.4';
|
|
||||||
import { corsHeaders } from './cors.ts';
|
|
||||||
import { rateLimiters, withRateLimit } from '../_shared/rateLimiter.ts';
|
|
||||||
import {
|
|
||||||
edgeLogger,
|
|
||||||
startSpan,
|
|
||||||
endSpan,
|
|
||||||
addSpanEvent,
|
|
||||||
setSpanAttributes,
|
|
||||||
getSpanContext,
|
|
||||||
logSpan,
|
|
||||||
extractSpanContextFromHeaders,
|
|
||||||
type Span
|
|
||||||
} from '../_shared/logger.ts';
|
|
||||||
import { formatEdgeError, toError } from '../_shared/errorFormatter.ts';
|
|
||||||
|
|
||||||
const SUPABASE_URL = Deno.env.get('SUPABASE_URL') || 'https://api.thrillwiki.com';
|
|
||||||
const SUPABASE_ANON_KEY = Deno.env.get('SUPABASE_ANON_KEY')!;
|
|
||||||
|
|
||||||
interface RejectionRequest {
|
|
||||||
submissionId: string;
|
|
||||||
itemIds: string[];
|
|
||||||
rejectionReason: string;
|
|
||||||
idempotencyKey: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Main handler function
|
|
||||||
const handler = async (req: Request) => {
|
|
||||||
// Handle CORS preflight requests
|
|
||||||
if (req.method === 'OPTIONS') {
|
|
||||||
return new Response(null, {
|
|
||||||
status: 204,
|
|
||||||
headers: corsHeaders
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Extract parent span context from headers (if present)
|
|
||||||
const parentSpanContext = extractSpanContextFromHeaders(req.headers);
|
|
||||||
|
|
||||||
// Create root span for this edge function invocation
|
|
||||||
const rootSpan = startSpan(
|
|
||||||
'process-selective-rejection',
|
|
||||||
'SERVER',
|
|
||||||
parentSpanContext,
|
|
||||||
{
|
|
||||||
'http.method': 'POST',
|
|
||||||
'function.name': 'process-selective-rejection',
|
|
||||||
}
|
|
||||||
);
|
|
||||||
const requestId = rootSpan.spanId;
|
|
||||||
|
|
||||||
try {
|
|
||||||
// STEP 1: Authentication
|
|
||||||
addSpanEvent(rootSpan, 'authentication_start');
|
|
||||||
const authHeader = req.headers.get('Authorization');
|
|
||||||
if (!authHeader) {
|
|
||||||
addSpanEvent(rootSpan, 'authentication_failed', { reason: 'missing_header' });
|
|
||||||
endSpan(rootSpan, 'error');
|
|
||||||
logSpan(rootSpan);
|
|
||||||
return new Response(
|
|
||||||
JSON.stringify({ error: 'Missing Authorization header' }),
|
|
||||||
{
|
|
||||||
status: 401,
|
|
||||||
headers: {
|
|
||||||
...corsHeaders,
|
|
||||||
'Content-Type': 'application/json'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const supabase = createClient(SUPABASE_URL, SUPABASE_ANON_KEY, {
|
|
||||||
global: { headers: { Authorization: authHeader } }
|
|
||||||
});
|
|
||||||
|
|
||||||
const { data: { user }, error: authError } = await supabase.auth.getUser();
|
|
||||||
if (authError || !user) {
|
|
||||||
addSpanEvent(rootSpan, 'authentication_failed', { error: authError?.message });
|
|
||||||
edgeLogger.warn('Authentication failed', {
|
|
||||||
requestId,
|
|
||||||
error: authError?.message,
|
|
||||||
action: 'process_rejection'
|
|
||||||
});
|
|
||||||
endSpan(rootSpan, 'error', authError || new Error('Unauthorized'));
|
|
||||||
logSpan(rootSpan);
|
|
||||||
return new Response(
|
|
||||||
JSON.stringify({ error: 'Unauthorized' }),
|
|
||||||
{
|
|
||||||
status: 401,
|
|
||||||
headers: {
|
|
||||||
...corsHeaders,
|
|
||||||
'Content-Type': 'application/json'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
setSpanAttributes(rootSpan, { 'user.id': user.id });
|
|
||||||
addSpanEvent(rootSpan, 'authentication_success');
|
|
||||||
edgeLogger.info('Rejection request received', {
|
|
||||||
requestId,
|
|
||||||
moderatorId: user.id,
|
|
||||||
action: 'process_rejection'
|
|
||||||
});
|
|
||||||
|
|
||||||
// STEP 2: Parse request
|
|
||||||
addSpanEvent(rootSpan, 'validation_start');
|
|
||||||
const body: RejectionRequest = await req.json();
|
|
||||||
const { submissionId, itemIds, rejectionReason, idempotencyKey } = body;
|
|
||||||
|
|
||||||
if (!submissionId || !itemIds || itemIds.length === 0 || !rejectionReason) {
|
|
||||||
addSpanEvent(rootSpan, 'validation_failed', {
|
|
||||||
hasSubmissionId: !!submissionId,
|
|
||||||
hasItemIds: !!itemIds,
|
|
||||||
itemCount: itemIds?.length || 0,
|
|
||||||
hasReason: !!rejectionReason,
|
|
||||||
});
|
|
||||||
edgeLogger.warn('Invalid request payload', {
|
|
||||||
requestId,
|
|
||||||
hasSubmissionId: !!submissionId,
|
|
||||||
hasItemIds: !!itemIds,
|
|
||||||
itemCount: itemIds?.length || 0,
|
|
||||||
hasReason: !!rejectionReason,
|
|
||||||
action: 'process_rejection'
|
|
||||||
});
|
|
||||||
endSpan(rootSpan, 'error');
|
|
||||||
logSpan(rootSpan);
|
|
||||||
return new Response(
|
|
||||||
JSON.stringify({ error: 'Missing required fields: submissionId, itemIds, rejectionReason' }),
|
|
||||||
{
|
|
||||||
status: 400,
|
|
||||||
headers: {
|
|
||||||
...corsHeaders,
|
|
||||||
'Content-Type': 'application/json'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
setSpanAttributes(rootSpan, {
|
|
||||||
'submission.id': submissionId,
|
|
||||||
'submission.item_count': itemIds.length,
|
|
||||||
'idempotency.key': idempotencyKey,
|
|
||||||
});
|
|
||||||
addSpanEvent(rootSpan, 'validation_complete');
|
|
||||||
edgeLogger.info('Request validated', {
|
|
||||||
requestId,
|
|
||||||
submissionId,
|
|
||||||
itemCount: itemIds.length,
|
|
||||||
action: 'process_rejection'
|
|
||||||
});
|
|
||||||
|
|
||||||
// STEP 3: Idempotency check
|
|
||||||
addSpanEvent(rootSpan, 'idempotency_check_start');
|
|
||||||
const { data: existingKey } = await supabase
|
|
||||||
.from('submission_idempotency_keys')
|
|
||||||
.select('*')
|
|
||||||
.eq('idempotency_key', idempotencyKey)
|
|
||||||
.single();
|
|
||||||
|
|
||||||
if (existingKey?.status === 'completed') {
|
|
||||||
addSpanEvent(rootSpan, 'idempotency_cache_hit');
|
|
||||||
setSpanAttributes(rootSpan, { 'cache.hit': true });
|
|
||||||
edgeLogger.info('Idempotency cache hit', {
|
|
||||||
requestId,
|
|
||||||
idempotencyKey,
|
|
||||||
cached: true,
|
|
||||||
action: 'process_rejection'
|
|
||||||
});
|
|
||||||
endSpan(rootSpan, 'ok');
|
|
||||||
logSpan(rootSpan);
|
|
||||||
return new Response(
|
|
||||||
JSON.stringify(existingKey.result_data),
|
|
||||||
{
|
|
||||||
status: 200,
|
|
||||||
headers: {
|
|
||||||
...corsHeaders,
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
'X-Cache-Status': 'HIT'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// STEP 4: Fetch submission to get submitter_id
|
|
||||||
const { data: submission, error: submissionError } = await supabase
|
|
||||||
.from('content_submissions')
|
|
||||||
.select('user_id, status, assigned_to')
|
|
||||||
.eq('id', submissionId)
|
|
||||||
.single();
|
|
||||||
|
|
||||||
if (submissionError || !submission) {
|
|
||||||
addSpanEvent(rootSpan, 'submission_fetch_failed', { error: submissionError?.message });
|
|
||||||
edgeLogger.error('Submission not found', {
|
|
||||||
requestId,
|
|
||||||
submissionId,
|
|
||||||
error: submissionError?.message,
|
|
||||||
action: 'process_rejection'
|
|
||||||
});
|
|
||||||
endSpan(rootSpan, 'error', submissionError || new Error('Submission not found'));
|
|
||||||
logSpan(rootSpan);
|
|
||||||
return new Response(
|
|
||||||
JSON.stringify({ error: 'Submission not found' }),
|
|
||||||
{
|
|
||||||
status: 404,
|
|
||||||
headers: {
|
|
||||||
...corsHeaders,
|
|
||||||
'Content-Type': 'application/json'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// STEP 5: Verify moderator can reject this submission
|
|
||||||
if (submission.assigned_to && submission.assigned_to !== user.id) {
|
|
||||||
edgeLogger.warn('Lock conflict', {
|
|
||||||
requestId,
|
|
||||||
submissionId,
|
|
||||||
lockedBy: submission.assigned_to,
|
|
||||||
attemptedBy: user.id,
|
|
||||||
action: 'process_rejection'
|
|
||||||
});
|
|
||||||
return new Response(
|
|
||||||
JSON.stringify({ error: 'Submission is locked by another moderator' }),
|
|
||||||
{
|
|
||||||
status: 409,
|
|
||||||
headers: {
|
|
||||||
...corsHeaders,
|
|
||||||
'Content-Type': 'application/json'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!['pending', 'partially_approved'].includes(submission.status)) {
|
|
||||||
edgeLogger.warn('Invalid submission status', {
|
|
||||||
requestId,
|
|
||||||
submissionId,
|
|
||||||
currentStatus: submission.status,
|
|
||||||
expectedStatuses: ['pending', 'partially_approved'],
|
|
||||||
action: 'process_rejection'
|
|
||||||
});
|
|
||||||
return new Response(
|
|
||||||
JSON.stringify({ error: 'Submission already processed' }),
|
|
||||||
{
|
|
||||||
status: 400,
|
|
||||||
headers: {
|
|
||||||
...corsHeaders,
|
|
||||||
'Content-Type': 'application/json'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// STEP 6: Register idempotency key as processing (atomic upsert)
|
|
||||||
// ✅ CRITICAL FIX: Use ON CONFLICT to prevent race conditions
|
|
||||||
if (!existingKey) {
|
|
||||||
const { data: insertedKey, error: idempotencyError } = await supabase
|
|
||||||
.from('submission_idempotency_keys')
|
|
||||||
.insert({
|
|
||||||
idempotency_key: idempotencyKey,
|
|
||||||
submission_id: submissionId,
|
|
||||||
moderator_id: user.id,
|
|
||||||
item_ids: itemIds,
|
|
||||||
status: 'processing'
|
|
||||||
})
|
|
||||||
.select()
|
|
||||||
.single();
|
|
||||||
|
|
||||||
// If conflict occurred, another moderator is processing
|
|
||||||
if (idempotencyError && idempotencyError.code === '23505') {
|
|
||||||
edgeLogger.warn('Idempotency key conflict - another request processing', {
|
|
||||||
requestId,
|
|
||||||
idempotencyKey,
|
|
||||||
moderatorId: user.id
|
|
||||||
});
|
|
||||||
return new Response(
|
|
||||||
JSON.stringify({ error: 'Another moderator is processing this submission' }),
|
|
||||||
{ status: 409, headers: { ...corsHeaders, 'Content-Type': 'application/json' } }
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (idempotencyError) {
|
|
||||||
throw toError(idempotencyError);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create child span for RPC transaction
|
|
||||||
const rpcSpan = startSpan(
|
|
||||||
'process_rejection_transaction',
|
|
||||||
'DATABASE',
|
|
||||||
getSpanContext(rootSpan),
|
|
||||||
{
|
|
||||||
'db.operation': 'rpc',
|
|
||||||
'db.function': 'process_rejection_transaction',
|
|
||||||
'submission.id': submissionId,
|
|
||||||
'submission.item_count': itemIds.length,
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
addSpanEvent(rpcSpan, 'rpc_call_start');
|
|
||||||
edgeLogger.info('Calling rejection transaction RPC', {
|
|
||||||
requestId,
|
|
||||||
submissionId,
|
|
||||||
itemCount: itemIds.length,
|
|
||||||
action: 'process_rejection'
|
|
||||||
});
|
|
||||||
|
|
||||||
// ============================================================================
|
|
||||||
// STEP 7: Call RPC function with deadlock retry logic
|
|
||||||
// ============================================================================
|
|
||||||
let retryCount = 0;
|
|
||||||
const MAX_DEADLOCK_RETRIES = 3;
|
|
||||||
let result: any = null;
|
|
||||||
let rpcError: any = null;
|
|
||||||
|
|
||||||
while (retryCount <= MAX_DEADLOCK_RETRIES) {
|
|
||||||
const { data, error } = await supabase.rpc(
|
|
||||||
'process_rejection_transaction',
|
|
||||||
{
|
|
||||||
p_submission_id: submissionId,
|
|
||||||
p_item_ids: itemIds,
|
|
||||||
p_moderator_id: user.id,
|
|
||||||
p_rejection_reason: rejectionReason,
|
|
||||||
p_request_id: requestId,
|
|
||||||
p_trace_id: rootSpan.traceId,
|
|
||||||
p_parent_span_id: rpcSpan.spanId
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
result = data;
|
|
||||||
rpcError = error;
|
|
||||||
|
|
||||||
if (!rpcError) {
|
|
||||||
// Success!
|
|
||||||
addSpanEvent(rpcSpan, 'rpc_call_success', {
|
|
||||||
'result.status': data?.status,
|
|
||||||
'items.processed': itemIds.length,
|
|
||||||
});
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for deadlock (40P01) or serialization failure (40001)
|
|
||||||
if (rpcError.code === '40P01' || rpcError.code === '40001') {
|
|
||||||
retryCount++;
|
|
||||||
if (retryCount > MAX_DEADLOCK_RETRIES) {
|
|
||||||
addSpanEvent(rpcSpan, 'max_retries_exceeded', { attempt: retryCount });
|
|
||||||
edgeLogger.error('Max deadlock retries exceeded', {
|
|
||||||
requestId,
|
|
||||||
submissionId,
|
|
||||||
attempt: retryCount,
|
|
||||||
action: 'process_rejection'
|
|
||||||
});
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
const backoffMs = 100 * Math.pow(2, retryCount);
|
|
||||||
addSpanEvent(rpcSpan, 'deadlock_retry', { attempt: retryCount, backoffMs });
|
|
||||||
edgeLogger.warn('Deadlock detected, retrying', {
|
|
||||||
requestId,
|
|
||||||
attempt: retryCount,
|
|
||||||
maxAttempts: MAX_DEADLOCK_RETRIES,
|
|
||||||
backoffMs,
|
|
||||||
action: 'process_rejection'
|
|
||||||
});
|
|
||||||
await new Promise(r => setTimeout(r, backoffMs));
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Non-retryable error, break immediately
|
|
||||||
addSpanEvent(rpcSpan, 'rpc_call_failed', {
|
|
||||||
error: rpcError.message,
|
|
||||||
errorCode: rpcError.code
|
|
||||||
});
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (rpcError) {
|
|
||||||
// Transaction failed - EVERYTHING rolled back automatically by PostgreSQL
|
|
||||||
endSpan(rpcSpan, 'error', rpcError);
|
|
||||||
logSpan(rpcSpan);
|
|
||||||
|
|
||||||
edgeLogger.error('Transaction failed', {
|
|
||||||
requestId,
|
|
||||||
duration: rpcSpan.duration,
|
|
||||||
submissionId,
|
|
||||||
error: rpcError.message,
|
|
||||||
errorCode: rpcError.code,
|
|
||||||
retries: retryCount,
|
|
||||||
action: 'process_rejection'
|
|
||||||
});
|
|
||||||
|
|
||||||
// Update idempotency key to failed
|
|
||||||
try {
|
|
||||||
await supabase
|
|
||||||
.from('submission_idempotency_keys')
|
|
||||||
.update({
|
|
||||||
status: 'failed',
|
|
||||||
error_message: rpcError.message,
|
|
||||||
completed_at: new Date().toISOString()
|
|
||||||
})
|
|
||||||
.eq('idempotency_key', idempotencyKey);
|
|
||||||
} catch (updateError) {
|
|
||||||
edgeLogger.warn('Failed to update idempotency key', {
|
|
||||||
requestId,
|
|
||||||
idempotencyKey,
|
|
||||||
status: 'failed',
|
|
||||||
error: formatEdgeError(updateError),
|
|
||||||
action: 'process_rejection'
|
|
||||||
});
|
|
||||||
// Non-blocking - continue with error response even if idempotency update fails
|
|
||||||
}
|
|
||||||
|
|
||||||
endSpan(rootSpan, 'error', rpcError);
|
|
||||||
logSpan(rootSpan);
|
|
||||||
|
|
||||||
return new Response(
|
|
||||||
JSON.stringify({
|
|
||||||
error: 'Rejection transaction failed',
|
|
||||||
message: rpcError.message,
|
|
||||||
details: rpcError.details,
|
|
||||||
retries: retryCount
|
|
||||||
}),
|
|
||||||
{
|
|
||||||
status: 500,
|
|
||||||
headers: {
|
|
||||||
...corsHeaders,
|
|
||||||
'Content-Type': 'application/json'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// RPC succeeded
|
|
||||||
endSpan(rpcSpan, 'ok');
|
|
||||||
logSpan(rpcSpan);
|
|
||||||
|
|
||||||
setSpanAttributes(rootSpan, {
|
|
||||||
'result.status': result?.status,
|
|
||||||
'result.final_status': result?.status,
|
|
||||||
'retries': retryCount,
|
|
||||||
});
|
|
||||||
edgeLogger.info('Transaction completed successfully', {
|
|
||||||
requestId,
|
|
||||||
duration: rpcSpan.duration,
|
|
||||||
submissionId,
|
|
||||||
itemCount: itemIds.length,
|
|
||||||
retries: retryCount,
|
|
||||||
newStatus: result?.status,
|
|
||||||
action: 'process_rejection'
|
|
||||||
});
|
|
||||||
|
|
||||||
// STEP 8: Success - update idempotency key
|
|
||||||
try {
|
|
||||||
await supabase
|
|
||||||
.from('submission_idempotency_keys')
|
|
||||||
.update({
|
|
||||||
status: 'completed',
|
|
||||||
result_data: result,
|
|
||||||
completed_at: new Date().toISOString()
|
|
||||||
})
|
|
||||||
.eq('idempotency_key', idempotencyKey);
|
|
||||||
} catch (updateError) {
|
|
||||||
edgeLogger.warn('Failed to update idempotency key', {
|
|
||||||
requestId,
|
|
||||||
idempotencyKey,
|
|
||||||
status: 'completed',
|
|
||||||
error: formatEdgeError(updateError),
|
|
||||||
action: 'process_rejection'
|
|
||||||
});
|
|
||||||
// Non-blocking - transaction succeeded, so continue with success response
|
|
||||||
}
|
|
||||||
|
|
||||||
endSpan(rootSpan, 'ok');
|
|
||||||
logSpan(rootSpan);
|
|
||||||
|
|
||||||
return new Response(
|
|
||||||
JSON.stringify(result),
|
|
||||||
{
|
|
||||||
status: 200,
|
|
||||||
headers: {
|
|
||||||
...corsHeaders,
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
'X-Request-Id': requestId
|
|
||||||
}
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
} catch (error) {
|
|
||||||
endSpan(rootSpan, 'error', error instanceof Error ? error : toError(error));
|
|
||||||
logSpan(rootSpan);
|
|
||||||
|
|
||||||
edgeLogger.error('Unexpected error', {
|
|
||||||
requestId,
|
|
||||||
duration: rootSpan.duration,
|
|
||||||
error: formatEdgeError(error),
|
|
||||||
stack: error instanceof Error ? error.stack : undefined,
|
|
||||||
action: 'process_rejection'
|
|
||||||
});
|
|
||||||
return new Response(
|
|
||||||
JSON.stringify({
|
|
||||||
error: 'Internal server error',
|
|
||||||
message: error instanceof Error ? error.message : 'Unknown error'
|
|
||||||
}),
|
|
||||||
{
|
|
||||||
status: 500,
|
|
||||||
headers: {
|
|
||||||
...corsHeaders,
|
|
||||||
'Content-Type': 'application/json'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Apply rate limiting: 10 requests per minute per IP (standard tier)
|
|
||||||
serve(withRateLimit(handler, rateLimiters.standard, corsHeaders));
|
|
||||||
@@ -2,7 +2,6 @@ import { serve } from "https://deno.land/std@0.190.0/http/server.ts";
|
|||||||
import { createClient } from "https://esm.sh/@supabase/supabase-js@2.57.4";
|
import { createClient } from "https://esm.sh/@supabase/supabase-js@2.57.4";
|
||||||
import { edgeLogger, startRequest, endRequest } from "../_shared/logger.ts";
|
import { edgeLogger, startRequest, endRequest } from "../_shared/logger.ts";
|
||||||
import { createErrorResponse } from "../_shared/errorSanitizer.ts";
|
import { createErrorResponse } from "../_shared/errorSanitizer.ts";
|
||||||
import { formatEdgeError } from "../_shared/errorFormatter.ts";
|
|
||||||
|
|
||||||
const corsHeaders = {
|
const corsHeaders = {
|
||||||
'Access-Control-Allow-Origin': '*',
|
'Access-Control-Allow-Origin': '*',
|
||||||
@@ -262,7 +261,7 @@ const handler = async (req: Request): Promise<Response> => {
|
|||||||
} catch (error) {
|
} catch (error) {
|
||||||
edgeLogger.error('Unexpected error in receive-inbound-email', {
|
edgeLogger.error('Unexpected error in receive-inbound-email', {
|
||||||
requestId: tracking.requestId,
|
requestId: tracking.requestId,
|
||||||
error: formatEdgeError(error)
|
error: error instanceof Error ? error.message : String(error)
|
||||||
});
|
});
|
||||||
return createErrorResponse(error, 500, corsHeaders);
|
return createErrorResponse(error, 500, corsHeaders);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
import { serve } from 'https://deno.land/std@0.168.0/http/server.ts';
|
import { serve } from 'https://deno.land/std@0.168.0/http/server.ts';
|
||||||
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2.57.4';
|
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2.57.4';
|
||||||
import { edgeLogger } from '../_shared/logger.ts';
|
import { edgeLogger } from '../_shared/logger.ts';
|
||||||
import { formatEdgeError } from '../_shared/errorFormatter.ts';
|
|
||||||
|
|
||||||
const corsHeaders = {
|
const corsHeaders = {
|
||||||
'Access-Control-Allow-Origin': '*',
|
'Access-Control-Allow-Origin': '*',
|
||||||
@@ -56,7 +55,7 @@ serve(async (req: Request) => {
|
|||||||
} catch (error) {
|
} catch (error) {
|
||||||
edgeLogger.error('Maintenance exception', {
|
edgeLogger.error('Maintenance exception', {
|
||||||
requestId,
|
requestId,
|
||||||
error: formatEdgeError(error)
|
error: error instanceof Error ? error.message : String(error)
|
||||||
});
|
});
|
||||||
|
|
||||||
return new Response(
|
return new Response(
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ import { serve } from "https://deno.land/std@0.190.0/http/server.ts";
|
|||||||
import { createClient } from "https://esm.sh/@supabase/supabase-js@2.57.4";
|
import { createClient } from "https://esm.sh/@supabase/supabase-js@2.57.4";
|
||||||
import { edgeLogger, startRequest, endRequest } from "../_shared/logger.ts";
|
import { edgeLogger, startRequest, endRequest } from "../_shared/logger.ts";
|
||||||
import { createErrorResponse } from "../_shared/errorSanitizer.ts";
|
import { createErrorResponse } from "../_shared/errorSanitizer.ts";
|
||||||
import { formatEdgeError } from "../_shared/errorFormatter.ts";
|
|
||||||
|
|
||||||
const corsHeaders = {
|
const corsHeaders = {
|
||||||
'Access-Control-Allow-Origin': '*',
|
'Access-Control-Allow-Origin': '*',
|
||||||
@@ -238,7 +237,7 @@ const handler = async (req: Request): Promise<Response> => {
|
|||||||
} catch (error) {
|
} catch (error) {
|
||||||
edgeLogger.error('Unexpected error in send-admin-email-reply', {
|
edgeLogger.error('Unexpected error in send-admin-email-reply', {
|
||||||
requestId: tracking.requestId,
|
requestId: tracking.requestId,
|
||||||
error: formatEdgeError(error)
|
error: error instanceof Error ? error.message : String(error)
|
||||||
});
|
});
|
||||||
return createErrorResponse(error, 500, corsHeaders);
|
return createErrorResponse(error, 500, corsHeaders);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ import { serve } from "https://deno.land/std@0.190.0/http/server.ts";
|
|||||||
import { createClient } from "https://esm.sh/@supabase/supabase-js@2.57.4";
|
import { createClient } from "https://esm.sh/@supabase/supabase-js@2.57.4";
|
||||||
import { edgeLogger } from "../_shared/logger.ts";
|
import { edgeLogger } from "../_shared/logger.ts";
|
||||||
import { createErrorResponse } from "../_shared/errorSanitizer.ts";
|
import { createErrorResponse } from "../_shared/errorSanitizer.ts";
|
||||||
import { formatEdgeError } from "../_shared/errorFormatter.ts";
|
|
||||||
|
|
||||||
const corsHeaders = {
|
const corsHeaders = {
|
||||||
'Access-Control-Allow-Origin': '*',
|
'Access-Control-Allow-Origin': '*',
|
||||||
@@ -336,7 +335,7 @@ The ThrillWiki Team`,
|
|||||||
edgeLogger.error('Contact submission failed', {
|
edgeLogger.error('Contact submission failed', {
|
||||||
requestId,
|
requestId,
|
||||||
duration,
|
duration,
|
||||||
error: formatEdgeError(error)
|
error: error instanceof Error ? error.message : String(error)
|
||||||
});
|
});
|
||||||
return createErrorResponse(error, 500, corsHeaders);
|
return createErrorResponse(error, 500, corsHeaders);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2.57.4';
|
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2.57.4';
|
||||||
import { edgeLogger } from '../_shared/logger.ts';
|
import { edgeLogger } from '../_shared/logger.ts';
|
||||||
import { formatEdgeError } from '../_shared/errorFormatter.ts';
|
|
||||||
|
|
||||||
const BASE_URL = 'https://dev.thrillwiki.com';
|
const BASE_URL = 'https://dev.thrillwiki.com';
|
||||||
|
|
||||||
@@ -348,7 +347,7 @@ Deno.serve(async (req) => {
|
|||||||
|
|
||||||
edgeLogger.error('Sitemap generation failed', {
|
edgeLogger.error('Sitemap generation failed', {
|
||||||
requestId,
|
requestId,
|
||||||
error: formatEdgeError(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
duration,
|
duration,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ import { serve } from "https://deno.land/std@0.168.0/http/server.ts"
|
|||||||
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2'
|
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2'
|
||||||
import { edgeLogger, startRequest, endRequest } from '../_shared/logger.ts'
|
import { edgeLogger, startRequest, endRequest } from '../_shared/logger.ts'
|
||||||
import { rateLimiters, withRateLimit } from '../_shared/rateLimiter.ts'
|
import { rateLimiters, withRateLimit } from '../_shared/rateLimiter.ts'
|
||||||
import { formatEdgeError } from '../_shared/errorFormatter.ts'
|
|
||||||
|
|
||||||
// Environment-aware CORS configuration
|
// Environment-aware CORS configuration
|
||||||
const getAllowedOrigin = (requestOrigin: string | null): string | null => {
|
const getAllowedOrigin = (requestOrigin: string | null): string | null => {
|
||||||
@@ -95,7 +94,7 @@ async function reportBanEvasionToAlerts(
|
|||||||
} catch (error) {
|
} catch (error) {
|
||||||
// Non-blocking - log but don't fail the response
|
// Non-blocking - log but don't fail the response
|
||||||
edgeLogger.warn('Failed to report ban evasion', {
|
edgeLogger.warn('Failed to report ban evasion', {
|
||||||
error: formatEdgeError(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
requestId
|
requestId
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -228,7 +227,7 @@ serve(withRateLimit(async (req) => {
|
|||||||
try {
|
try {
|
||||||
requestBody = await req.json();
|
requestBody = await req.json();
|
||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
const errorMessage = formatEdgeError(error);
|
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||||
edgeLogger.error('Invalid JSON in delete request', {
|
edgeLogger.error('Invalid JSON in delete request', {
|
||||||
error: errorMessage,
|
error: errorMessage,
|
||||||
requestId: tracking.requestId
|
requestId: tracking.requestId
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
import { serve } from "https://deno.land/std@0.168.0/http/server.ts";
|
import { serve } from "https://deno.land/std@0.168.0/http/server.ts";
|
||||||
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2.39.3';
|
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2.39.3';
|
||||||
import { edgeLogger } from "../_shared/logger.ts";
|
import { edgeLogger } from "../_shared/logger.ts";
|
||||||
import { formatEdgeError } from "../_shared/errorFormatter.ts";
|
|
||||||
|
|
||||||
const corsHeaders = {
|
const corsHeaders = {
|
||||||
'Access-Control-Allow-Origin': '*',
|
'Access-Control-Allow-Origin': '*',
|
||||||
@@ -97,7 +96,7 @@ serve(async (req) => {
|
|||||||
);
|
);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const duration = endRequest(tracking);
|
const duration = endRequest(tracking);
|
||||||
const errorMessage = formatEdgeError(error);
|
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||||
edgeLogger.error('Email validation error', {
|
edgeLogger.error('Email validation error', {
|
||||||
error: errorMessage,
|
error: errorMessage,
|
||||||
requestId: tracking.requestId,
|
requestId: tracking.requestId,
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
import { serve } from "https://deno.land/std@0.190.0/http/server.ts";
|
import { serve } from "https://deno.land/std@0.190.0/http/server.ts";
|
||||||
import { startRequest, endRequest, edgeLogger } from "../_shared/logger.ts";
|
import { startRequest, endRequest, edgeLogger } from "../_shared/logger.ts";
|
||||||
import { formatEdgeError } from "../_shared/errorFormatter.ts";
|
|
||||||
|
|
||||||
const corsHeaders = {
|
const corsHeaders = {
|
||||||
'Access-Control-Allow-Origin': '*',
|
'Access-Control-Allow-Origin': '*',
|
||||||
@@ -181,7 +180,7 @@ const handler = async (req: Request): Promise<Response> => {
|
|||||||
);
|
);
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorMessage = formatEdgeError(error);
|
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||||
edgeLogger.error('Error in validate-email function', {
|
edgeLogger.error('Error in validate-email function', {
|
||||||
error: errorMessage,
|
error: errorMessage,
|
||||||
requestId: tracking.requestId
|
requestId: tracking.requestId
|
||||||
|
|||||||
@@ -1,159 +0,0 @@
|
|||||||
-- ============================================================================
|
|
||||||
-- CRITICAL: Add Atomic Rejection Transaction RPC
|
|
||||||
-- ============================================================================
|
|
||||||
-- This migration creates process_rejection_transaction to ensure atomic
|
|
||||||
-- rejection of submission items with proper audit logging and status updates.
|
|
||||||
--
|
|
||||||
-- Features:
|
|
||||||
-- - Atomic updates to submission_items.status = 'rejected'
|
|
||||||
-- - Sets rejection_reason for each item
|
|
||||||
-- - Updates parent submission status (rejected or partially_approved)
|
|
||||||
-- - Logs to moderation_audit_log
|
|
||||||
-- - Releases lock (assigned_to = NULL, locked_until = NULL)
|
|
||||||
-- - Returns transaction result
|
|
||||||
-- ============================================================================
|
|
||||||
|
|
||||||
CREATE OR REPLACE FUNCTION process_rejection_transaction(
|
|
||||||
p_submission_id UUID,
|
|
||||||
p_item_ids UUID[],
|
|
||||||
p_moderator_id UUID,
|
|
||||||
p_rejection_reason TEXT,
|
|
||||||
p_request_id TEXT DEFAULT NULL
|
|
||||||
)
|
|
||||||
RETURNS JSONB
|
|
||||||
LANGUAGE plpgsql
|
|
||||||
SECURITY DEFINER
|
|
||||||
SET search_path = public
|
|
||||||
AS $$
|
|
||||||
DECLARE
|
|
||||||
v_start_time TIMESTAMPTZ;
|
|
||||||
v_result JSONB;
|
|
||||||
v_rejected_count INTEGER := 0;
|
|
||||||
v_final_status TEXT;
|
|
||||||
v_some_pending BOOLEAN := FALSE;
|
|
||||||
BEGIN
|
|
||||||
v_start_time := clock_timestamp();
|
|
||||||
|
|
||||||
RAISE NOTICE '[%] Starting atomic rejection transaction for submission %',
|
|
||||||
COALESCE(p_request_id, 'NO_REQUEST_ID'),
|
|
||||||
p_submission_id;
|
|
||||||
|
|
||||||
-- ========================================================================
|
|
||||||
-- STEP 1: Set session variables (transaction-scoped)
|
|
||||||
-- ========================================================================
|
|
||||||
PERFORM set_config('app.moderator_id', p_moderator_id::text, true);
|
|
||||||
|
|
||||||
-- ========================================================================
|
|
||||||
-- STEP 2: Validate submission ownership and lock status
|
|
||||||
-- ========================================================================
|
|
||||||
IF NOT EXISTS (
|
|
||||||
SELECT 1 FROM content_submissions
|
|
||||||
WHERE id = p_submission_id
|
|
||||||
AND (assigned_to = p_moderator_id OR assigned_to IS NULL)
|
|
||||||
AND status IN ('pending', 'partially_approved')
|
|
||||||
) THEN
|
|
||||||
RAISE EXCEPTION 'Submission not found, locked by another moderator, or already processed'
|
|
||||||
USING ERRCODE = '42501';
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
-- ========================================================================
|
|
||||||
-- STEP 3: Update all items to rejected atomically
|
|
||||||
-- ========================================================================
|
|
||||||
UPDATE submission_items
|
|
||||||
SET
|
|
||||||
status = 'rejected',
|
|
||||||
rejection_reason = p_rejection_reason,
|
|
||||||
updated_at = NOW()
|
|
||||||
WHERE id = ANY(p_item_ids)
|
|
||||||
AND submission_id = p_submission_id
|
|
||||||
AND status IN ('pending', 'rejected');
|
|
||||||
|
|
||||||
GET DIAGNOSTICS v_rejected_count = ROW_COUNT;
|
|
||||||
|
|
||||||
RAISE NOTICE '[%] Rejected % items',
|
|
||||||
COALESCE(p_request_id, 'NO_REQUEST_ID'),
|
|
||||||
v_rejected_count;
|
|
||||||
|
|
||||||
-- ========================================================================
|
|
||||||
-- STEP 4: Determine final submission status
|
|
||||||
-- ========================================================================
|
|
||||||
-- Check if any items are still pending
|
|
||||||
SELECT EXISTS(
|
|
||||||
SELECT 1 FROM submission_items
|
|
||||||
WHERE submission_id = p_submission_id
|
|
||||||
AND status = 'pending'
|
|
||||||
) INTO v_some_pending;
|
|
||||||
|
|
||||||
-- Set final status
|
|
||||||
v_final_status := CASE
|
|
||||||
WHEN v_some_pending THEN 'partially_approved'
|
|
||||||
WHEN EXISTS(
|
|
||||||
SELECT 1 FROM submission_items
|
|
||||||
WHERE submission_id = p_submission_id
|
|
||||||
AND status = 'approved'
|
|
||||||
) THEN 'partially_approved'
|
|
||||||
ELSE 'rejected'
|
|
||||||
END;
|
|
||||||
|
|
||||||
-- ========================================================================
|
|
||||||
-- STEP 5: Update parent submission
|
|
||||||
-- ========================================================================
|
|
||||||
UPDATE content_submissions
|
|
||||||
SET
|
|
||||||
status = v_final_status,
|
|
||||||
reviewer_id = p_moderator_id,
|
|
||||||
reviewed_at = NOW(),
|
|
||||||
assigned_to = NULL,
|
|
||||||
locked_until = NULL,
|
|
||||||
reviewer_notes = p_rejection_reason
|
|
||||||
WHERE id = p_submission_id;
|
|
||||||
|
|
||||||
-- ========================================================================
|
|
||||||
-- STEP 6: Log to moderation_audit_log
|
|
||||||
-- ========================================================================
|
|
||||||
INSERT INTO moderation_audit_log (
|
|
||||||
submission_id,
|
|
||||||
moderator_id,
|
|
||||||
action,
|
|
||||||
details,
|
|
||||||
created_at
|
|
||||||
) VALUES (
|
|
||||||
p_submission_id,
|
|
||||||
p_moderator_id,
|
|
||||||
'rejection',
|
|
||||||
jsonb_build_object(
|
|
||||||
'item_ids', p_item_ids,
|
|
||||||
'rejection_reason', p_rejection_reason,
|
|
||||||
'rejected_count', v_rejected_count,
|
|
||||||
'final_status', v_final_status,
|
|
||||||
'request_id', p_request_id
|
|
||||||
),
|
|
||||||
NOW()
|
|
||||||
);
|
|
||||||
|
|
||||||
-- ========================================================================
|
|
||||||
-- STEP 7: Build result
|
|
||||||
-- ========================================================================
|
|
||||||
v_result := jsonb_build_object(
|
|
||||||
'success', TRUE,
|
|
||||||
'rejected_count', v_rejected_count,
|
|
||||||
'submission_status', v_final_status,
|
|
||||||
'duration_ms', EXTRACT(EPOCH FROM (clock_timestamp() - v_start_time)) * 1000
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Clear session variables
|
|
||||||
PERFORM set_config('app.moderator_id', '', true);
|
|
||||||
|
|
||||||
RAISE NOTICE '[%] Rejection transaction completed in %ms',
|
|
||||||
COALESCE(p_request_id, 'NO_REQUEST_ID'),
|
|
||||||
EXTRACT(EPOCH FROM (clock_timestamp() - v_start_time)) * 1000;
|
|
||||||
|
|
||||||
RETURN v_result;
|
|
||||||
END;
|
|
||||||
$$;
|
|
||||||
|
|
||||||
-- Grant execute permissions
|
|
||||||
GRANT EXECUTE ON FUNCTION process_rejection_transaction TO authenticated;
|
|
||||||
|
|
||||||
COMMENT ON FUNCTION process_rejection_transaction IS
|
|
||||||
'Atomic rejection transaction with audit logging and lock release';
|
|
||||||
@@ -1,172 +0,0 @@
|
|||||||
-- Fix create_submission_with_items to remove temp_location_data reference
|
|
||||||
-- This column was dropped but the function still references it, causing park submissions to fail
|
|
||||||
|
|
||||||
DROP FUNCTION IF EXISTS public.create_submission_with_items(uuid, text, text, jsonb, uuid);
|
|
||||||
|
|
||||||
CREATE OR REPLACE FUNCTION public.create_submission_with_items(
|
|
||||||
p_submission_id uuid,
|
|
||||||
p_entity_type text,
|
|
||||||
p_action_type text,
|
|
||||||
p_items jsonb,
|
|
||||||
p_user_id uuid
|
|
||||||
)
|
|
||||||
RETURNS uuid
|
|
||||||
LANGUAGE plpgsql
|
|
||||||
SECURITY DEFINER
|
|
||||||
SET search_path TO 'public'
|
|
||||||
AS $function$
|
|
||||||
DECLARE
|
|
||||||
v_item JSONB;
|
|
||||||
v_item_type TEXT;
|
|
||||||
v_item_data JSONB;
|
|
||||||
v_depends_on INTEGER;
|
|
||||||
v_order_index INTEGER;
|
|
||||||
v_created_ids UUID[] := ARRAY[]::UUID[];
|
|
||||||
v_submission_item_id UUID;
|
|
||||||
v_entity_submission_id UUID;
|
|
||||||
BEGIN
|
|
||||||
-- Loop through items array
|
|
||||||
FOR v_item IN SELECT * FROM jsonb_array_elements(p_items)
|
|
||||||
LOOP
|
|
||||||
v_item_type := v_item->>'item_type';
|
|
||||||
v_item_data := v_item->'item_data';
|
|
||||||
v_depends_on := (v_item->>'depends_on')::INTEGER;
|
|
||||||
v_order_index := (v_item->>'order_index')::INTEGER;
|
|
||||||
|
|
||||||
-- Resolve dependency references
|
|
||||||
IF v_depends_on IS NOT NULL THEN
|
|
||||||
v_item_data := v_item_data || jsonb_build_object(
|
|
||||||
v_item->>'dependency_field',
|
|
||||||
v_created_ids[v_depends_on + 1]
|
|
||||||
);
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
-- Create submission based on entity type
|
|
||||||
IF v_item_type = 'park' THEN
|
|
||||||
INSERT INTO park_submissions (
|
|
||||||
submission_id, name, slug, description, park_type, status,
|
|
||||||
opening_date, opening_date_precision, closing_date, closing_date_precision,
|
|
||||||
location_id, operator_id, property_owner_id,
|
|
||||||
website_url, phone, email,
|
|
||||||
banner_image_url, banner_image_id, card_image_url, card_image_id
|
|
||||||
) VALUES (
|
|
||||||
p_submission_id,
|
|
||||||
v_item_data->>'name',
|
|
||||||
v_item_data->>'slug',
|
|
||||||
v_item_data->>'description',
|
|
||||||
v_item_data->>'park_type',
|
|
||||||
v_item_data->>'status',
|
|
||||||
(v_item_data->>'opening_date')::DATE,
|
|
||||||
v_item_data->>'opening_date_precision',
|
|
||||||
(v_item_data->>'closing_date')::DATE,
|
|
||||||
v_item_data->>'closing_date_precision',
|
|
||||||
(v_item_data->>'location_id')::UUID,
|
|
||||||
(v_item_data->>'operator_id')::UUID,
|
|
||||||
(v_item_data->>'property_owner_id')::UUID,
|
|
||||||
v_item_data->>'website_url',
|
|
||||||
v_item_data->>'phone',
|
|
||||||
v_item_data->>'email',
|
|
||||||
v_item_data->>'banner_image_url',
|
|
||||||
v_item_data->>'banner_image_id',
|
|
||||||
v_item_data->>'card_image_url',
|
|
||||||
v_item_data->>'card_image_id'
|
|
||||||
) RETURNING id INTO v_entity_submission_id;
|
|
||||||
|
|
||||||
ELSIF v_item_type = 'ride' THEN
|
|
||||||
INSERT INTO ride_submissions (
|
|
||||||
submission_id, name, slug, description, category, status,
|
|
||||||
opening_date, opening_date_precision, closing_date, closing_date_precision,
|
|
||||||
park_id, manufacturer_id, designer_id, ride_model_id,
|
|
||||||
banner_image_url, banner_image_id, card_image_url, card_image_id
|
|
||||||
) VALUES (
|
|
||||||
p_submission_id,
|
|
||||||
v_item_data->>'name',
|
|
||||||
v_item_data->>'slug',
|
|
||||||
v_item_data->>'description',
|
|
||||||
v_item_data->>'category',
|
|
||||||
v_item_data->>'status',
|
|
||||||
(v_item_data->>'opening_date')::DATE,
|
|
||||||
v_item_data->>'opening_date_precision',
|
|
||||||
(v_item_data->>'closing_date')::DATE,
|
|
||||||
v_item_data->>'closing_date_precision',
|
|
||||||
(v_item_data->>'park_id')::UUID,
|
|
||||||
(v_item_data->>'manufacturer_id')::UUID,
|
|
||||||
(v_item_data->>'designer_id')::UUID,
|
|
||||||
(v_item_data->>'ride_model_id')::UUID,
|
|
||||||
v_item_data->>'banner_image_url',
|
|
||||||
v_item_data->>'banner_image_id',
|
|
||||||
v_item_data->>'card_image_url',
|
|
||||||
v_item_data->>'card_image_id'
|
|
||||||
) RETURNING id INTO v_entity_submission_id;
|
|
||||||
|
|
||||||
ELSIF v_item_type IN ('manufacturer', 'operator', 'designer', 'property_owner') THEN
|
|
||||||
INSERT INTO company_submissions (
|
|
||||||
submission_id, name, slug, description, company_type,
|
|
||||||
founded_year, headquarters_location, website_url,
|
|
||||||
banner_image_url, banner_image_id, card_image_url, card_image_id
|
|
||||||
) VALUES (
|
|
||||||
p_submission_id,
|
|
||||||
v_item_data->>'name',
|
|
||||||
v_item_data->>'slug',
|
|
||||||
v_item_data->>'description',
|
|
||||||
v_item_type,
|
|
||||||
(v_item_data->>'founded_year')::INTEGER,
|
|
||||||
v_item_data->>'headquarters_location',
|
|
||||||
v_item_data->>'website_url',
|
|
||||||
v_item_data->>'banner_image_url',
|
|
||||||
v_item_data->>'banner_image_id',
|
|
||||||
v_item_data->>'card_image_url',
|
|
||||||
v_item_data->>'card_image_id'
|
|
||||||
) RETURNING id INTO v_entity_submission_id;
|
|
||||||
|
|
||||||
ELSIF v_item_type = 'ride_model' THEN
|
|
||||||
INSERT INTO ride_model_submissions (
|
|
||||||
submission_id, name, slug, description, manufacturer_id, category,
|
|
||||||
banner_image_url, banner_image_id, card_image_url, card_image_id
|
|
||||||
) VALUES (
|
|
||||||
p_submission_id,
|
|
||||||
v_item_data->>'name',
|
|
||||||
v_item_data->>'slug',
|
|
||||||
v_item_data->>'description',
|
|
||||||
(v_item_data->>'manufacturer_id')::UUID,
|
|
||||||
v_item_data->>'category',
|
|
||||||
v_item_data->>'banner_image_url',
|
|
||||||
v_item_data->>'banner_image_id',
|
|
||||||
v_item_data->>'card_image_url',
|
|
||||||
v_item_data->>'card_image_id'
|
|
||||||
) RETURNING id INTO v_entity_submission_id;
|
|
||||||
|
|
||||||
ELSE
|
|
||||||
RAISE EXCEPTION 'Unsupported item type: %', v_item_type;
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
-- Create submission_item record linking to the entity submission
|
|
||||||
INSERT INTO submission_items (
|
|
||||||
submission_id,
|
|
||||||
item_type,
|
|
||||||
action_type,
|
|
||||||
order_index,
|
|
||||||
depends_on,
|
|
||||||
park_submission_id,
|
|
||||||
ride_submission_id,
|
|
||||||
company_submission_id,
|
|
||||||
ride_model_submission_id
|
|
||||||
) VALUES (
|
|
||||||
p_submission_id,
|
|
||||||
v_item_type,
|
|
||||||
p_action_type,
|
|
||||||
v_order_index,
|
|
||||||
CASE WHEN v_depends_on IS NOT NULL THEN v_created_ids[v_depends_on + 1] ELSE NULL END,
|
|
||||||
CASE WHEN v_item_type = 'park' THEN v_entity_submission_id ELSE NULL END,
|
|
||||||
CASE WHEN v_item_type = 'ride' THEN v_entity_submission_id ELSE NULL END,
|
|
||||||
CASE WHEN v_item_type IN ('manufacturer', 'operator', 'designer', 'property_owner') THEN v_entity_submission_id ELSE NULL END,
|
|
||||||
CASE WHEN v_item_type = 'ride_model' THEN v_entity_submission_id ELSE NULL END
|
|
||||||
) RETURNING id INTO v_submission_item_id;
|
|
||||||
|
|
||||||
-- Track created submission item IDs in order for dependency resolution
|
|
||||||
v_created_ids := array_append(v_created_ids, v_submission_item_id);
|
|
||||||
END LOOP;
|
|
||||||
|
|
||||||
RETURN p_submission_id;
|
|
||||||
END;
|
|
||||||
$function$;
|
|
||||||
@@ -1,474 +0,0 @@
|
|||||||
-- ============================================================================
|
|
||||||
-- CRITICAL FIX: Restore complete approval pipeline with tracing
|
|
||||||
-- ============================================================================
|
|
||||||
-- This fixes the broken RPC that deleted all non-park entity handling
|
|
||||||
-- and restores full functionality for all entity types
|
|
||||||
-- ============================================================================
|
|
||||||
|
|
||||||
DROP FUNCTION IF EXISTS process_approval_transaction(UUID, UUID[], UUID, UUID, TEXT, TEXT, TEXT);
|
|
||||||
|
|
||||||
CREATE OR REPLACE FUNCTION process_approval_transaction(
|
|
||||||
p_submission_id UUID,
|
|
||||||
p_item_ids UUID[],
|
|
||||||
p_moderator_id UUID,
|
|
||||||
p_submitter_id UUID,
|
|
||||||
p_request_id TEXT DEFAULT NULL,
|
|
||||||
p_trace_id TEXT DEFAULT NULL,
|
|
||||||
p_parent_span_id TEXT DEFAULT NULL
|
|
||||||
)
|
|
||||||
RETURNS JSONB
|
|
||||||
LANGUAGE plpgsql
|
|
||||||
SECURITY DEFINER
|
|
||||||
SET search_path = public
|
|
||||||
AS $$
|
|
||||||
DECLARE
|
|
||||||
v_start_time TIMESTAMPTZ;
|
|
||||||
v_result JSONB;
|
|
||||||
v_item RECORD;
|
|
||||||
v_item_data JSONB;
|
|
||||||
v_resolved_refs JSONB;
|
|
||||||
v_entity_id UUID;
|
|
||||||
v_approval_results JSONB[] := ARRAY[]::JSONB[];
|
|
||||||
v_final_status TEXT;
|
|
||||||
v_all_approved BOOLEAN := TRUE;
|
|
||||||
v_some_approved BOOLEAN := FALSE;
|
|
||||||
v_items_processed INTEGER := 0;
|
|
||||||
v_span_id TEXT;
|
|
||||||
BEGIN
|
|
||||||
v_start_time := clock_timestamp();
|
|
||||||
v_span_id := gen_random_uuid()::text;
|
|
||||||
|
|
||||||
-- Log span start with trace context
|
|
||||||
IF p_trace_id IS NOT NULL THEN
|
|
||||||
RAISE NOTICE 'SPAN: {"spanId": "%", "traceId": "%", "parentSpanId": "%", "name": "process_approval_transaction_rpc", "kind": "INTERNAL", "startTime": %, "attributes": {"submission.id": "%", "item_count": %}}',
|
|
||||||
v_span_id,
|
|
||||||
p_trace_id,
|
|
||||||
p_parent_span_id,
|
|
||||||
EXTRACT(EPOCH FROM v_start_time) * 1000,
|
|
||||||
p_submission_id,
|
|
||||||
array_length(p_item_ids, 1);
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
RAISE NOTICE '[%] Starting atomic approval transaction for submission %',
|
|
||||||
COALESCE(p_request_id, 'NO_REQUEST_ID'),
|
|
||||||
p_submission_id;
|
|
||||||
|
|
||||||
-- ========================================================================
|
|
||||||
-- STEP 1: Set session variables (transaction-scoped with is_local=true)
|
|
||||||
-- ========================================================================
|
|
||||||
PERFORM set_config('app.current_user_id', p_submitter_id::text, true);
|
|
||||||
PERFORM set_config('app.submission_id', p_submission_id::text, true);
|
|
||||||
PERFORM set_config('app.moderator_id', p_moderator_id::text, true);
|
|
||||||
|
|
||||||
-- ========================================================================
|
|
||||||
-- STEP 2: Validate submission ownership and lock status
|
|
||||||
-- ========================================================================
|
|
||||||
IF NOT EXISTS (
|
|
||||||
SELECT 1 FROM content_submissions
|
|
||||||
WHERE id = p_submission_id
|
|
||||||
AND (assigned_to = p_moderator_id OR assigned_to IS NULL)
|
|
||||||
AND status IN ('pending', 'partially_approved')
|
|
||||||
) THEN
|
|
||||||
RAISE EXCEPTION 'Submission not found, locked by another moderator, or already processed'
|
|
||||||
USING ERRCODE = '42501';
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
-- ========================================================================
|
|
||||||
-- STEP 3: Process each item sequentially within this transaction
|
|
||||||
-- ========================================================================
|
|
||||||
FOR v_item IN
|
|
||||||
SELECT
|
|
||||||
si.*,
|
|
||||||
ps.name as park_name,
|
|
||||||
ps.slug as park_slug,
|
|
||||||
ps.description as park_description,
|
|
||||||
ps.park_type,
|
|
||||||
ps.status as park_status,
|
|
||||||
ps.location_id,
|
|
||||||
ps.operator_id,
|
|
||||||
ps.property_owner_id,
|
|
||||||
ps.opening_date as park_opening_date,
|
|
||||||
ps.closing_date as park_closing_date,
|
|
||||||
ps.opening_date_precision as park_opening_date_precision,
|
|
||||||
ps.closing_date_precision as park_closing_date_precision,
|
|
||||||
ps.website_url as park_website_url,
|
|
||||||
ps.phone as park_phone,
|
|
||||||
ps.email as park_email,
|
|
||||||
ps.banner_image_url as park_banner_image_url,
|
|
||||||
ps.banner_image_id as park_banner_image_id,
|
|
||||||
ps.card_image_url as park_card_image_url,
|
|
||||||
ps.card_image_id as park_card_image_id,
|
|
||||||
rs.name as ride_name,
|
|
||||||
rs.slug as ride_slug,
|
|
||||||
rs.park_id as ride_park_id,
|
|
||||||
rs.ride_type,
|
|
||||||
rs.status as ride_status,
|
|
||||||
rs.manufacturer_id,
|
|
||||||
rs.ride_model_id,
|
|
||||||
rs.opening_date as ride_opening_date,
|
|
||||||
rs.closing_date as ride_closing_date,
|
|
||||||
rs.opening_date_precision as ride_opening_date_precision,
|
|
||||||
rs.closing_date_precision as ride_closing_date_precision,
|
|
||||||
rs.description as ride_description,
|
|
||||||
rs.banner_image_url as ride_banner_image_url,
|
|
||||||
rs.banner_image_id as ride_banner_image_id,
|
|
||||||
rs.card_image_url as ride_card_image_url,
|
|
||||||
rs.card_image_id as ride_card_image_id,
|
|
||||||
cs.name as company_name,
|
|
||||||
cs.slug as company_slug,
|
|
||||||
cs.description as company_description,
|
|
||||||
cs.website_url as company_website_url,
|
|
||||||
cs.founded_year,
|
|
||||||
cs.banner_image_url as company_banner_image_url,
|
|
||||||
cs.banner_image_id as company_banner_image_id,
|
|
||||||
cs.card_image_url as company_card_image_url,
|
|
||||||
cs.card_image_id as company_card_image_id,
|
|
||||||
rms.name as ride_model_name,
|
|
||||||
rms.slug as ride_model_slug,
|
|
||||||
rms.manufacturer_id as ride_model_manufacturer_id,
|
|
||||||
rms.ride_type as ride_model_ride_type,
|
|
||||||
rms.description as ride_model_description,
|
|
||||||
rms.banner_image_url as ride_model_banner_image_url,
|
|
||||||
rms.banner_image_id as ride_model_banner_image_id,
|
|
||||||
rms.card_image_url as ride_model_card_image_url,
|
|
||||||
rms.card_image_id as ride_model_card_image_id,
|
|
||||||
phs.entity_id as photo_entity_id,
|
|
||||||
phs.entity_type as photo_entity_type,
|
|
||||||
phs.title as photo_title
|
|
||||||
FROM submission_items si
|
|
||||||
LEFT JOIN park_submissions ps ON si.park_submission_id = ps.id
|
|
||||||
LEFT JOIN ride_submissions rs ON si.ride_submission_id = rs.id
|
|
||||||
LEFT JOIN company_submissions cs ON si.company_submission_id = cs.id
|
|
||||||
LEFT JOIN ride_model_submissions rms ON si.ride_model_submission_id = rms.id
|
|
||||||
LEFT JOIN photo_submissions phs ON si.photo_submission_id = phs.id
|
|
||||||
WHERE si.id = ANY(p_item_ids)
|
|
||||||
ORDER BY si.order_index, si.created_at
|
|
||||||
LOOP
|
|
||||||
BEGIN
|
|
||||||
v_items_processed := v_items_processed + 1;
|
|
||||||
|
|
||||||
-- Log item processing span event
|
|
||||||
IF p_trace_id IS NOT NULL THEN
|
|
||||||
RAISE NOTICE 'SPAN_EVENT: {"traceId": "%", "parentSpanId": "%", "name": "process_item", "timestamp": %, "attributes": {"item.id": "%", "item.type": "%", "item.action": "%"}}',
|
|
||||||
p_trace_id,
|
|
||||||
v_span_id,
|
|
||||||
EXTRACT(EPOCH FROM clock_timestamp()) * 1000,
|
|
||||||
v_item.id,
|
|
||||||
v_item.item_type,
|
|
||||||
v_item.action_type;
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
-- Build item data based on entity type
|
|
||||||
IF v_item.item_type = 'park' THEN
|
|
||||||
v_item_data := jsonb_build_object(
|
|
||||||
'name', v_item.park_name,
|
|
||||||
'slug', v_item.park_slug,
|
|
||||||
'description', v_item.park_description,
|
|
||||||
'park_type', v_item.park_type,
|
|
||||||
'status', v_item.park_status,
|
|
||||||
'location_id', v_item.location_id,
|
|
||||||
'operator_id', v_item.operator_id,
|
|
||||||
'property_owner_id', v_item.property_owner_id,
|
|
||||||
'opening_date', v_item.park_opening_date,
|
|
||||||
'closing_date', v_item.park_closing_date,
|
|
||||||
'opening_date_precision', v_item.park_opening_date_precision,
|
|
||||||
'closing_date_precision', v_item.park_closing_date_precision,
|
|
||||||
'website_url', v_item.park_website_url,
|
|
||||||
'phone', v_item.park_phone,
|
|
||||||
'email', v_item.park_email,
|
|
||||||
'banner_image_url', v_item.park_banner_image_url,
|
|
||||||
'banner_image_id', v_item.park_banner_image_id,
|
|
||||||
'card_image_url', v_item.park_card_image_url,
|
|
||||||
'card_image_id', v_item.park_card_image_id
|
|
||||||
);
|
|
||||||
ELSIF v_item.item_type = 'ride' THEN
|
|
||||||
v_item_data := jsonb_build_object(
|
|
||||||
'name', v_item.ride_name,
|
|
||||||
'slug', v_item.ride_slug,
|
|
||||||
'park_id', v_item.ride_park_id,
|
|
||||||
'ride_type', v_item.ride_type,
|
|
||||||
'status', v_item.ride_status,
|
|
||||||
'manufacturer_id', v_item.manufacturer_id,
|
|
||||||
'ride_model_id', v_item.ride_model_id,
|
|
||||||
'opening_date', v_item.ride_opening_date,
|
|
||||||
'closing_date', v_item.ride_closing_date,
|
|
||||||
'opening_date_precision', v_item.ride_opening_date_precision,
|
|
||||||
'closing_date_precision', v_item.ride_closing_date_precision,
|
|
||||||
'description', v_item.ride_description,
|
|
||||||
'banner_image_url', v_item.ride_banner_image_url,
|
|
||||||
'banner_image_id', v_item.ride_banner_image_id,
|
|
||||||
'card_image_url', v_item.ride_card_image_url,
|
|
||||||
'card_image_id', v_item.ride_card_image_id
|
|
||||||
);
|
|
||||||
ELSIF v_item.item_type IN ('manufacturer', 'operator', 'property_owner', 'designer') THEN
|
|
||||||
v_item_data := jsonb_build_object(
|
|
||||||
'name', v_item.company_name,
|
|
||||||
'slug', v_item.company_slug,
|
|
||||||
'description', v_item.company_description,
|
|
||||||
'website_url', v_item.company_website_url,
|
|
||||||
'founded_year', v_item.founded_year,
|
|
||||||
'banner_image_url', v_item.company_banner_image_url,
|
|
||||||
'banner_image_id', v_item.company_banner_image_id,
|
|
||||||
'card_image_url', v_item.company_card_image_url,
|
|
||||||
'card_image_id', v_item.company_card_image_id
|
|
||||||
);
|
|
||||||
ELSIF v_item.item_type = 'ride_model' THEN
|
|
||||||
v_item_data := jsonb_build_object(
|
|
||||||
'name', v_item.ride_model_name,
|
|
||||||
'slug', v_item.ride_model_slug,
|
|
||||||
'manufacturer_id', v_item.ride_model_manufacturer_id,
|
|
||||||
'ride_type', v_item.ride_model_ride_type,
|
|
||||||
'description', v_item.ride_model_description,
|
|
||||||
'banner_image_url', v_item.ride_model_banner_image_url,
|
|
||||||
'banner_image_id', v_item.ride_model_banner_image_id,
|
|
||||||
'card_image_url', v_item.ride_model_card_image_url,
|
|
||||||
'card_image_id', v_item.ride_model_card_image_id
|
|
||||||
);
|
|
||||||
ELSIF v_item.item_type = 'photo' THEN
|
|
||||||
v_item_data := jsonb_build_object(
|
|
||||||
'entity_id', v_item.photo_entity_id,
|
|
||||||
'entity_type', v_item.photo_entity_type,
|
|
||||||
'title', v_item.photo_title,
|
|
||||||
'photo_submission_id', v_item.photo_submission_id
|
|
||||||
);
|
|
||||||
ELSE
|
|
||||||
RAISE EXCEPTION 'Unsupported item_type: %', v_item.item_type;
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
-- ======================================================================
|
|
||||||
-- Resolve temp refs and update v_item_data with actual entity IDs
|
|
||||||
-- ======================================================================
|
|
||||||
v_resolved_refs := resolve_temp_refs_for_item(v_item.id, p_submission_id);
|
|
||||||
|
|
||||||
IF v_resolved_refs IS NOT NULL AND jsonb_typeof(v_resolved_refs) = 'object' THEN
|
|
||||||
-- Replace NULL foreign keys with resolved entity IDs
|
|
||||||
-- For parks: operator_id, property_owner_id
|
|
||||||
IF v_item.item_type = 'park' THEN
|
|
||||||
IF v_resolved_refs ? 'operator' AND (v_item_data->>'operator_id') IS NULL THEN
|
|
||||||
v_item_data := v_item_data || jsonb_build_object('operator_id', v_resolved_refs->>'operator');
|
|
||||||
RAISE NOTICE 'Resolved park.operator_id → %', v_resolved_refs->>'operator';
|
|
||||||
END IF;
|
|
||||||
IF v_resolved_refs ? 'property_owner' AND (v_item_data->>'property_owner_id') IS NULL THEN
|
|
||||||
v_item_data := v_item_data || jsonb_build_object('property_owner_id', v_resolved_refs->>'property_owner');
|
|
||||||
RAISE NOTICE 'Resolved park.property_owner_id → %', v_resolved_refs->>'property_owner';
|
|
||||||
END IF;
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
-- For rides: park_id, manufacturer_id, ride_model_id
|
|
||||||
IF v_item.item_type = 'ride' THEN
|
|
||||||
IF v_resolved_refs ? 'park' AND (v_item_data->>'park_id') IS NULL THEN
|
|
||||||
v_item_data := v_item_data || jsonb_build_object('park_id', v_resolved_refs->>'park');
|
|
||||||
RAISE NOTICE 'Resolved ride.park_id → %', v_resolved_refs->>'park';
|
|
||||||
END IF;
|
|
||||||
IF v_resolved_refs ? 'manufacturer' AND (v_item_data->>'manufacturer_id') IS NULL THEN
|
|
||||||
v_item_data := v_item_data || jsonb_build_object('manufacturer_id', v_resolved_refs->>'manufacturer');
|
|
||||||
RAISE NOTICE 'Resolved ride.manufacturer_id → %', v_resolved_refs->>'manufacturer';
|
|
||||||
END IF;
|
|
||||||
IF v_resolved_refs ? 'ride_model' AND (v_item_data->>'ride_model_id') IS NULL THEN
|
|
||||||
v_item_data := v_item_data || jsonb_build_object('ride_model_id', v_resolved_refs->>'ride_model');
|
|
||||||
RAISE NOTICE 'Resolved ride.ride_model_id → %', v_resolved_refs->>'ride_model';
|
|
||||||
END IF;
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
-- For ride_models: manufacturer_id
|
|
||||||
IF v_item.item_type = 'ride_model' THEN
|
|
||||||
IF v_resolved_refs ? 'manufacturer' AND (v_item_data->>'manufacturer_id') IS NULL THEN
|
|
||||||
v_item_data := v_item_data || jsonb_build_object('manufacturer_id', v_resolved_refs->>'manufacturer');
|
|
||||||
RAISE NOTICE 'Resolved ride_model.manufacturer_id → %', v_resolved_refs->>'manufacturer';
|
|
||||||
END IF;
|
|
||||||
END IF;
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
-- Execute action based on action_type and item_type
|
|
||||||
IF v_item.action_type = 'create' THEN
|
|
||||||
IF v_item.item_type = 'photo' THEN
|
|
||||||
-- Insert all photo_submission_items as photos atomically
|
|
||||||
INSERT INTO photos (
|
|
||||||
entity_id, entity_type, cloudflare_image_id, cloudflare_image_url,
|
|
||||||
title, caption, date_taken, date_taken_precision, order_index,
|
|
||||||
submission_id, submitted_by, approved_by, approved_at
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
(v_item_data->>'entity_id')::UUID,
|
|
||||||
v_item_data->>'entity_type',
|
|
||||||
psi.cloudflare_image_id,
|
|
||||||
psi.cloudflare_image_url,
|
|
||||||
psi.title,
|
|
||||||
psi.caption,
|
|
||||||
psi.date_taken,
|
|
||||||
psi.date_taken_precision,
|
|
||||||
psi.order_index,
|
|
||||||
p_submission_id,
|
|
||||||
p_submitter_id,
|
|
||||||
p_moderator_id,
|
|
||||||
NOW()
|
|
||||||
FROM photo_submission_items psi
|
|
||||||
WHERE psi.photo_submission_id = (v_item_data->>'photo_submission_id')::UUID;
|
|
||||||
|
|
||||||
-- Return the photo_submission entity_id as v_entity_id
|
|
||||||
v_entity_id := (v_item_data->>'entity_id')::UUID;
|
|
||||||
|
|
||||||
RAISE NOTICE '[%] Created % photos for entity % (type=%)',
|
|
||||||
COALESCE(p_request_id, 'NO_REQUEST_ID'),
|
|
||||||
(SELECT COUNT(*) FROM photo_submission_items WHERE photo_submission_id = (v_item_data->>'photo_submission_id')::UUID),
|
|
||||||
v_entity_id,
|
|
||||||
v_item_data->>'entity_type';
|
|
||||||
ELSE
|
|
||||||
-- Standard entity creation - FIXED: Pass v_item_data instead of v_item.id
|
|
||||||
v_entity_id := create_entity_from_submission(
|
|
||||||
v_item.item_type,
|
|
||||||
v_item_data,
|
|
||||||
p_submitter_id
|
|
||||||
);
|
|
||||||
END IF;
|
|
||||||
ELSIF v_item.action_type = 'update' THEN
|
|
||||||
v_entity_id := update_entity_from_submission(
|
|
||||||
v_item.item_type,
|
|
||||||
v_item_data,
|
|
||||||
v_item.target_entity_id,
|
|
||||||
p_submitter_id
|
|
||||||
);
|
|
||||||
ELSIF v_item.action_type = 'delete' THEN
|
|
||||||
PERFORM delete_entity_from_submission(
|
|
||||||
v_item.item_type,
|
|
||||||
v_item.target_entity_id,
|
|
||||||
p_submitter_id
|
|
||||||
);
|
|
||||||
v_entity_id := v_item.target_entity_id;
|
|
||||||
ELSE
|
|
||||||
RAISE EXCEPTION 'Unknown action_type: %', v_item.action_type;
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
-- Update submission_item to approved status
|
|
||||||
UPDATE submission_items
|
|
||||||
SET
|
|
||||||
status = 'approved',
|
|
||||||
approved_entity_id = v_entity_id,
|
|
||||||
updated_at = NOW()
|
|
||||||
WHERE id = v_item.id;
|
|
||||||
|
|
||||||
-- Track success
|
|
||||||
v_approval_results := array_append(
|
|
||||||
v_approval_results,
|
|
||||||
jsonb_build_object(
|
|
||||||
'itemId', v_item.id,
|
|
||||||
'entityId', v_entity_id,
|
|
||||||
'itemType', v_item.item_type,
|
|
||||||
'actionType', v_item.action_type,
|
|
||||||
'success', true
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
v_some_approved := TRUE;
|
|
||||||
|
|
||||||
RAISE NOTICE '[%] Approved item % (type=%s, action=%s, entityId=%s)',
|
|
||||||
COALESCE(p_request_id, 'NO_REQUEST_ID'),
|
|
||||||
v_item.id,
|
|
||||||
v_item.item_type,
|
|
||||||
v_item.action_type,
|
|
||||||
v_entity_id;
|
|
||||||
|
|
||||||
EXCEPTION WHEN OTHERS THEN
|
|
||||||
-- Log error but continue processing remaining items
|
|
||||||
RAISE WARNING '[%] Item % failed: % (SQLSTATE: %)',
|
|
||||||
COALESCE(p_request_id, 'NO_REQUEST_ID'),
|
|
||||||
v_item.id,
|
|
||||||
SQLERRM,
|
|
||||||
SQLSTATE;
|
|
||||||
|
|
||||||
-- Update submission_item to rejected status
|
|
||||||
UPDATE submission_items
|
|
||||||
SET
|
|
||||||
status = 'rejected',
|
|
||||||
rejection_reason = SQLERRM,
|
|
||||||
updated_at = NOW()
|
|
||||||
WHERE id = v_item.id;
|
|
||||||
|
|
||||||
-- Track failure
|
|
||||||
v_approval_results := array_append(
|
|
||||||
v_approval_results,
|
|
||||||
jsonb_build_object(
|
|
||||||
'itemId', v_item.id,
|
|
||||||
'itemType', v_item.item_type,
|
|
||||||
'actionType', v_item.action_type,
|
|
||||||
'success', false,
|
|
||||||
'error', SQLERRM
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
v_all_approved := FALSE;
|
|
||||||
END;
|
|
||||||
END LOOP;
|
|
||||||
|
|
||||||
-- ========================================================================
|
|
||||||
-- STEP 4: Determine final submission status
|
|
||||||
-- ========================================================================
|
|
||||||
v_final_status := CASE
|
|
||||||
WHEN v_all_approved THEN 'approved'
|
|
||||||
WHEN v_some_approved THEN 'partially_approved'
|
|
||||||
ELSE 'rejected'
|
|
||||||
END;
|
|
||||||
|
|
||||||
-- ========================================================================
|
|
||||||
-- STEP 5: Update submission status
|
|
||||||
-- ========================================================================
|
|
||||||
UPDATE content_submissions
|
|
||||||
SET
|
|
||||||
status = v_final_status,
|
|
||||||
reviewer_id = p_moderator_id,
|
|
||||||
reviewed_at = NOW(),
|
|
||||||
assigned_to = NULL,
|
|
||||||
locked_until = NULL
|
|
||||||
WHERE id = p_submission_id;
|
|
||||||
|
|
||||||
-- ========================================================================
|
|
||||||
-- STEP 6: Log metrics
|
|
||||||
-- ========================================================================
|
|
||||||
INSERT INTO approval_transaction_metrics (
|
|
||||||
submission_id,
|
|
||||||
moderator_id,
|
|
||||||
submitter_id,
|
|
||||||
items_count,
|
|
||||||
duration_ms,
|
|
||||||
success,
|
|
||||||
request_id
|
|
||||||
) VALUES (
|
|
||||||
p_submission_id,
|
|
||||||
p_moderator_id,
|
|
||||||
p_submitter_id,
|
|
||||||
v_items_processed,
|
|
||||||
EXTRACT(EPOCH FROM (clock_timestamp() - v_start_time)) * 1000,
|
|
||||||
v_all_approved,
|
|
||||||
p_request_id
|
|
||||||
);
|
|
||||||
|
|
||||||
-- ========================================================================
|
|
||||||
-- STEP 7: Build result
|
|
||||||
-- ========================================================================
|
|
||||||
v_result := jsonb_build_object(
|
|
||||||
'success', v_all_approved,
|
|
||||||
'submissionId', p_submission_id,
|
|
||||||
'finalStatus', v_final_status,
|
|
||||||
'itemsProcessed', v_items_processed,
|
|
||||||
'results', v_approval_results,
|
|
||||||
'durationMs', EXTRACT(EPOCH FROM (clock_timestamp() - v_start_time)) * 1000
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Log span completion
|
|
||||||
IF p_trace_id IS NOT NULL THEN
|
|
||||||
RAISE NOTICE 'SPAN_EVENT: {"traceId": "%", "parentSpanId": "%", "name": "transaction_complete", "timestamp": %, "attributes": {"items_processed": %, "final_status": "%"}}',
|
|
||||||
p_trace_id,
|
|
||||||
v_span_id,
|
|
||||||
EXTRACT(EPOCH FROM clock_timestamp()) * 1000,
|
|
||||||
v_items_processed,
|
|
||||||
v_final_status;
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
RAISE NOTICE '[%] Transaction complete: % items processed, status=%s',
|
|
||||||
COALESCE(p_request_id, 'NO_REQUEST_ID'),
|
|
||||||
v_items_processed,
|
|
||||||
v_final_status;
|
|
||||||
|
|
||||||
RETURN v_result;
|
|
||||||
END;
|
|
||||||
$$;
|
|
||||||
@@ -1,227 +0,0 @@
|
|||||||
-- Add distributed tracing support to RPC functions
|
|
||||||
-- Adds trace_id and parent_span_id parameters for span context propagation
|
|
||||||
|
|
||||||
-- Update process_approval_transaction to accept trace context
|
|
||||||
CREATE OR REPLACE FUNCTION process_approval_transaction(
|
|
||||||
p_submission_id UUID,
|
|
||||||
p_item_ids UUID[],
|
|
||||||
p_moderator_id UUID,
|
|
||||||
p_submitter_id UUID,
|
|
||||||
p_request_id TEXT DEFAULT NULL,
|
|
||||||
p_trace_id TEXT DEFAULT NULL,
|
|
||||||
p_parent_span_id TEXT DEFAULT NULL
|
|
||||||
)
|
|
||||||
RETURNS jsonb
|
|
||||||
LANGUAGE plpgsql
|
|
||||||
SECURITY DEFINER
|
|
||||||
SET search_path = public
|
|
||||||
AS $$
|
|
||||||
DECLARE
|
|
||||||
v_item submission_items;
|
|
||||||
v_approved_count INTEGER := 0;
|
|
||||||
v_total_items INTEGER;
|
|
||||||
v_new_status TEXT;
|
|
||||||
v_entity_id UUID;
|
|
||||||
v_all_items_processed BOOLEAN;
|
|
||||||
BEGIN
|
|
||||||
-- Log span start with trace context
|
|
||||||
IF p_trace_id IS NOT NULL THEN
|
|
||||||
RAISE NOTICE 'SPAN: {"spanId": "%", "traceId": "%", "parentSpanId": "%", "name": "process_approval_transaction_rpc", "kind": "INTERNAL", "startTime": %, "attributes": {"submission.id": "%", "item_count": %}}',
|
|
||||||
gen_random_uuid()::text,
|
|
||||||
p_trace_id,
|
|
||||||
p_parent_span_id,
|
|
||||||
extract(epoch from clock_timestamp()) * 1000,
|
|
||||||
p_submission_id,
|
|
||||||
array_length(p_item_ids, 1);
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
-- Get total items for this submission
|
|
||||||
SELECT COUNT(*) INTO v_total_items
|
|
||||||
FROM submission_items
|
|
||||||
WHERE submission_id = p_submission_id;
|
|
||||||
|
|
||||||
-- Process each item
|
|
||||||
FOREACH v_item IN ARRAY (
|
|
||||||
SELECT ARRAY_AGG(si ORDER BY si.order_index)
|
|
||||||
FROM submission_items si
|
|
||||||
WHERE si.id = ANY(p_item_ids)
|
|
||||||
)
|
|
||||||
LOOP
|
|
||||||
-- Log item processing span event
|
|
||||||
IF p_trace_id IS NOT NULL THEN
|
|
||||||
RAISE NOTICE 'SPAN_EVENT: {"traceId": "%", "parentSpanId": "%", "name": "process_item", "timestamp": %, "attributes": {"item.id": "%", "item.type": "%", "item.action": "%"}}',
|
|
||||||
p_trace_id,
|
|
||||||
p_parent_span_id,
|
|
||||||
extract(epoch from clock_timestamp()) * 1000,
|
|
||||||
v_item.id,
|
|
||||||
v_item.item_type,
|
|
||||||
v_item.action;
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
-- Create or update entity based on item type
|
|
||||||
IF v_item.item_type = 'park' THEN
|
|
||||||
IF v_item.action = 'create' THEN
|
|
||||||
-- Log entity creation
|
|
||||||
IF p_trace_id IS NOT NULL THEN
|
|
||||||
RAISE NOTICE 'SPAN_EVENT: {"traceId": "%", "name": "create_entity_park", "timestamp": %, "attributes": {"action": "create"}}',
|
|
||||||
p_trace_id,
|
|
||||||
extract(epoch from clock_timestamp()) * 1000;
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
v_entity_id := create_entity_from_submission('park', v_item.id, p_submitter_id, p_request_id);
|
|
||||||
ELSIF v_item.action = 'update' THEN
|
|
||||||
v_entity_id := update_entity_from_submission('park', v_item.id, v_item.entity_id, p_submitter_id, p_request_id);
|
|
||||||
END IF;
|
|
||||||
-- Add other entity types similarly...
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
-- Update item status
|
|
||||||
UPDATE submission_items
|
|
||||||
SET
|
|
||||||
status = 'approved',
|
|
||||||
processed_at = NOW(),
|
|
||||||
processed_by = p_moderator_id,
|
|
||||||
entity_id = v_entity_id
|
|
||||||
WHERE id = v_item.id;
|
|
||||||
|
|
||||||
v_approved_count := v_approved_count + 1;
|
|
||||||
END LOOP;
|
|
||||||
|
|
||||||
-- Determine final submission status
|
|
||||||
SELECT
|
|
||||||
COUNT(*) = array_length(p_item_ids, 1)
|
|
||||||
INTO v_all_items_processed
|
|
||||||
FROM submission_items
|
|
||||||
WHERE submission_id = p_submission_id
|
|
||||||
AND status IN ('approved', 'rejected');
|
|
||||||
|
|
||||||
IF v_all_items_processed THEN
|
|
||||||
v_new_status := 'approved';
|
|
||||||
ELSE
|
|
||||||
v_new_status := 'partially_approved';
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
-- Update submission status
|
|
||||||
UPDATE content_submissions
|
|
||||||
SET
|
|
||||||
status = v_new_status,
|
|
||||||
processed_at = CASE WHEN v_new_status = 'approved' THEN NOW() ELSE processed_at END,
|
|
||||||
assigned_to = NULL,
|
|
||||||
lock_expires_at = NULL
|
|
||||||
WHERE id = p_submission_id;
|
|
||||||
|
|
||||||
-- Log completion
|
|
||||||
IF p_trace_id IS NOT NULL THEN
|
|
||||||
RAISE NOTICE 'SPAN_EVENT: {"traceId": "%", "name": "transaction_complete", "timestamp": %, "attributes": {"items_processed": %, "new_status": "%"}}',
|
|
||||||
p_trace_id,
|
|
||||||
extract(epoch from clock_timestamp()) * 1000,
|
|
||||||
v_approved_count,
|
|
||||||
v_new_status;
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
RETURN jsonb_build_object(
|
|
||||||
'success', true,
|
|
||||||
'status', v_new_status,
|
|
||||||
'approved_count', v_approved_count,
|
|
||||||
'total_items', v_total_items
|
|
||||||
);
|
|
||||||
END;
|
|
||||||
$$;
|
|
||||||
|
|
||||||
-- Update process_rejection_transaction similarly
|
|
||||||
CREATE OR REPLACE FUNCTION process_rejection_transaction(
|
|
||||||
p_submission_id UUID,
|
|
||||||
p_item_ids UUID[],
|
|
||||||
p_moderator_id UUID,
|
|
||||||
p_rejection_reason TEXT,
|
|
||||||
p_request_id TEXT DEFAULT NULL,
|
|
||||||
p_trace_id TEXT DEFAULT NULL,
|
|
||||||
p_parent_span_id TEXT DEFAULT NULL
|
|
||||||
)
|
|
||||||
RETURNS jsonb
|
|
||||||
LANGUAGE plpgsql
|
|
||||||
SECURITY DEFINER
|
|
||||||
SET search_path = public
|
|
||||||
AS $$
|
|
||||||
DECLARE
|
|
||||||
v_rejected_count INTEGER := 0;
|
|
||||||
v_total_items INTEGER;
|
|
||||||
v_new_status TEXT;
|
|
||||||
v_all_items_processed BOOLEAN;
|
|
||||||
BEGIN
|
|
||||||
-- Log span start
|
|
||||||
IF p_trace_id IS NOT NULL THEN
|
|
||||||
RAISE NOTICE 'SPAN: {"spanId": "%", "traceId": "%", "parentSpanId": "%", "name": "process_rejection_transaction_rpc", "kind": "INTERNAL", "startTime": %, "attributes": {"submission.id": "%", "item_count": %}}',
|
|
||||||
gen_random_uuid()::text,
|
|
||||||
p_trace_id,
|
|
||||||
p_parent_span_id,
|
|
||||||
extract(epoch from clock_timestamp()) * 1000,
|
|
||||||
p_submission_id,
|
|
||||||
array_length(p_item_ids, 1);
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
-- Get total items
|
|
||||||
SELECT COUNT(*) INTO v_total_items
|
|
||||||
FROM submission_items
|
|
||||||
WHERE submission_id = p_submission_id;
|
|
||||||
|
|
||||||
-- Reject items
|
|
||||||
UPDATE submission_items
|
|
||||||
SET
|
|
||||||
status = 'rejected',
|
|
||||||
rejection_reason = p_rejection_reason,
|
|
||||||
processed_at = NOW(),
|
|
||||||
processed_by = p_moderator_id
|
|
||||||
WHERE id = ANY(p_item_ids);
|
|
||||||
|
|
||||||
GET DIAGNOSTICS v_rejected_count = ROW_COUNT;
|
|
||||||
|
|
||||||
-- Check if all items processed
|
|
||||||
SELECT
|
|
||||||
COUNT(*) = (SELECT COUNT(*) FROM submission_items WHERE submission_id = p_submission_id)
|
|
||||||
INTO v_all_items_processed
|
|
||||||
FROM submission_items
|
|
||||||
WHERE submission_id = p_submission_id
|
|
||||||
AND status IN ('approved', 'rejected');
|
|
||||||
|
|
||||||
IF v_all_items_processed THEN
|
|
||||||
-- Check if any items were approved
|
|
||||||
SELECT EXISTS(
|
|
||||||
SELECT 1 FROM submission_items
|
|
||||||
WHERE submission_id = p_submission_id AND status = 'approved'
|
|
||||||
) INTO v_all_items_processed;
|
|
||||||
|
|
||||||
v_new_status := CASE
|
|
||||||
WHEN v_all_items_processed THEN 'partially_approved'
|
|
||||||
ELSE 'rejected'
|
|
||||||
END;
|
|
||||||
ELSE
|
|
||||||
v_new_status := 'partially_approved';
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
-- Update submission
|
|
||||||
UPDATE content_submissions
|
|
||||||
SET
|
|
||||||
status = v_new_status,
|
|
||||||
processed_at = CASE WHEN v_new_status = 'rejected' THEN NOW() ELSE processed_at END,
|
|
||||||
assigned_to = NULL,
|
|
||||||
lock_expires_at = NULL
|
|
||||||
WHERE id = p_submission_id;
|
|
||||||
|
|
||||||
-- Log completion
|
|
||||||
IF p_trace_id IS NOT NULL THEN
|
|
||||||
RAISE NOTICE 'SPAN_EVENT: {"traceId": "%", "name": "rejection_complete", "timestamp": %, "attributes": {"items_rejected": %, "new_status": "%"}}',
|
|
||||||
p_trace_id,
|
|
||||||
extract(epoch from clock_timestamp()) * 1000,
|
|
||||||
v_rejected_count,
|
|
||||||
v_new_status;
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
RETURN jsonb_build_object(
|
|
||||||
'success', true,
|
|
||||||
'status', v_new_status,
|
|
||||||
'rejected_count', v_rejected_count,
|
|
||||||
'total_items', v_total_items
|
|
||||||
);
|
|
||||||
END;
|
|
||||||
$$;
|
|
||||||
@@ -1,444 +0,0 @@
|
|||||||
-- ============================================================================
|
|
||||||
-- Phase 8: Fix RLS Policies, SQL Schema, and Rate Limiting
|
|
||||||
-- ============================================================================
|
|
||||||
-- This migration addresses critical test failures:
|
|
||||||
-- 1. Adds missing INSERT policies for submission tables (26 tests)
|
|
||||||
-- 2. Fixes ride_type → category column references (2 tests)
|
|
||||||
-- ============================================================================
|
|
||||||
|
|
||||||
-- ============================================================================
|
|
||||||
-- PART 1: Add INSERT Policies for Submission Tables
|
|
||||||
-- ============================================================================
|
|
||||||
|
|
||||||
-- Park Submissions: Users can insert their own submissions
|
|
||||||
CREATE POLICY "park_submissions_insert_own"
|
|
||||||
ON public.park_submissions
|
|
||||||
FOR INSERT
|
|
||||||
TO authenticated
|
|
||||||
WITH CHECK (
|
|
||||||
EXISTS (
|
|
||||||
SELECT 1 FROM content_submissions cs
|
|
||||||
WHERE cs.id = submission_id
|
|
||||||
AND cs.user_id = auth.uid()
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Park Submissions: Moderators can insert any submissions
|
|
||||||
CREATE POLICY "park_submissions_insert_moderators"
|
|
||||||
ON public.park_submissions
|
|
||||||
FOR INSERT
|
|
||||||
TO authenticated
|
|
||||||
WITH CHECK (
|
|
||||||
is_moderator(auth.uid())
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Ride Submissions: Users can insert their own submissions
|
|
||||||
CREATE POLICY "ride_submissions_insert_own"
|
|
||||||
ON public.ride_submissions
|
|
||||||
FOR INSERT
|
|
||||||
TO authenticated
|
|
||||||
WITH CHECK (
|
|
||||||
EXISTS (
|
|
||||||
SELECT 1 FROM content_submissions cs
|
|
||||||
WHERE cs.id = submission_id
|
|
||||||
AND cs.user_id = auth.uid()
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Ride Submissions: Moderators can insert any submissions
|
|
||||||
CREATE POLICY "ride_submissions_insert_moderators"
|
|
||||||
ON public.ride_submissions
|
|
||||||
FOR INSERT
|
|
||||||
TO authenticated
|
|
||||||
WITH CHECK (
|
|
||||||
is_moderator(auth.uid())
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Company Submissions: Users can insert their own submissions
|
|
||||||
CREATE POLICY "company_submissions_insert_own"
|
|
||||||
ON public.company_submissions
|
|
||||||
FOR INSERT
|
|
||||||
TO authenticated
|
|
||||||
WITH CHECK (
|
|
||||||
EXISTS (
|
|
||||||
SELECT 1 FROM content_submissions cs
|
|
||||||
WHERE cs.id = submission_id
|
|
||||||
AND cs.user_id = auth.uid()
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Company Submissions: Moderators can insert any submissions
|
|
||||||
CREATE POLICY "company_submissions_insert_moderators"
|
|
||||||
ON public.company_submissions
|
|
||||||
FOR INSERT
|
|
||||||
TO authenticated
|
|
||||||
WITH CHECK (
|
|
||||||
is_moderator(auth.uid())
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Ride Model Submissions: Users can insert their own submissions
|
|
||||||
CREATE POLICY "ride_model_submissions_insert_own"
|
|
||||||
ON public.ride_model_submissions
|
|
||||||
FOR INSERT
|
|
||||||
TO authenticated
|
|
||||||
WITH CHECK (
|
|
||||||
EXISTS (
|
|
||||||
SELECT 1 FROM content_submissions cs
|
|
||||||
WHERE cs.id = submission_id
|
|
||||||
AND cs.user_id = auth.uid()
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Ride Model Submissions: Moderators can insert any submissions
|
|
||||||
CREATE POLICY "ride_model_submissions_insert_moderators"
|
|
||||||
ON public.ride_model_submissions
|
|
||||||
FOR INSERT
|
|
||||||
TO authenticated
|
|
||||||
WITH CHECK (
|
|
||||||
is_moderator(auth.uid())
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Photo Submissions: Users can insert their own submissions
|
|
||||||
CREATE POLICY "photo_submissions_insert_own"
|
|
||||||
ON public.photo_submissions
|
|
||||||
FOR INSERT
|
|
||||||
TO authenticated
|
|
||||||
WITH CHECK (
|
|
||||||
EXISTS (
|
|
||||||
SELECT 1 FROM content_submissions cs
|
|
||||||
WHERE cs.id = submission_id
|
|
||||||
AND cs.user_id = auth.uid()
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Photo Submissions: Moderators can insert any submissions
|
|
||||||
CREATE POLICY "photo_submissions_insert_moderators"
|
|
||||||
ON public.photo_submissions
|
|
||||||
FOR INSERT
|
|
||||||
TO authenticated
|
|
||||||
WITH CHECK (
|
|
||||||
is_moderator(auth.uid())
|
|
||||||
);
|
|
||||||
|
|
||||||
-- ============================================================================
|
|
||||||
-- PART 2: Fix SQL Column Names (ride_type → category)
|
|
||||||
-- ============================================================================
|
|
||||||
|
|
||||||
DROP FUNCTION IF EXISTS process_approval_transaction(UUID, UUID[], UUID, UUID, TEXT, TEXT, TEXT);
|
|
||||||
|
|
||||||
CREATE OR REPLACE FUNCTION process_approval_transaction(
|
|
||||||
p_submission_id UUID,
|
|
||||||
p_item_ids UUID[],
|
|
||||||
p_moderator_id UUID,
|
|
||||||
p_submitter_id UUID,
|
|
||||||
p_request_id TEXT DEFAULT NULL,
|
|
||||||
p_trace_id TEXT DEFAULT NULL,
|
|
||||||
p_parent_span_id TEXT DEFAULT NULL
|
|
||||||
)
|
|
||||||
RETURNS JSONB
|
|
||||||
LANGUAGE plpgsql
|
|
||||||
SECURITY DEFINER
|
|
||||||
SET search_path = public
|
|
||||||
AS $$
|
|
||||||
DECLARE
|
|
||||||
v_start_time TIMESTAMPTZ;
|
|
||||||
v_result JSONB;
|
|
||||||
v_item RECORD;
|
|
||||||
v_item_data JSONB;
|
|
||||||
v_resolved_refs JSONB;
|
|
||||||
v_entity_id UUID;
|
|
||||||
v_approval_results JSONB[] := ARRAY[]::JSONB[];
|
|
||||||
v_final_status TEXT;
|
|
||||||
v_all_approved BOOLEAN := TRUE;
|
|
||||||
v_some_approved BOOLEAN := FALSE;
|
|
||||||
v_items_processed INTEGER := 0;
|
|
||||||
v_span_id TEXT;
|
|
||||||
BEGIN
|
|
||||||
v_start_time := clock_timestamp();
|
|
||||||
v_span_id := gen_random_uuid()::text;
|
|
||||||
|
|
||||||
-- Log span start with trace context
|
|
||||||
IF p_trace_id IS NOT NULL THEN
|
|
||||||
RAISE NOTICE 'SPAN: {"spanId": "%", "traceId": "%", "parentSpanId": "%", "name": "process_approval_transaction_rpc", "kind": "INTERNAL", "startTime": %, "attributes": {"submission.id": "%", "item_count": %}}',
|
|
||||||
v_span_id,
|
|
||||||
p_trace_id,
|
|
||||||
p_parent_span_id,
|
|
||||||
EXTRACT(EPOCH FROM v_start_time) * 1000,
|
|
||||||
p_submission_id,
|
|
||||||
array_length(p_item_ids, 1);
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
RAISE NOTICE '[%] Starting atomic approval transaction for submission %',
|
|
||||||
COALESCE(p_request_id, 'NO_REQUEST_ID'),
|
|
||||||
p_submission_id;
|
|
||||||
|
|
||||||
-- ========================================================================
|
|
||||||
-- STEP 1: Set session variables (transaction-scoped with is_local=true)
|
|
||||||
-- ========================================================================
|
|
||||||
PERFORM set_config('app.current_user_id', p_submitter_id::text, true);
|
|
||||||
PERFORM set_config('app.submission_id', p_submission_id::text, true);
|
|
||||||
PERFORM set_config('app.moderator_id', p_moderator_id::text, true);
|
|
||||||
|
|
||||||
-- ========================================================================
|
|
||||||
-- STEP 2: Validate submission ownership and lock status
|
|
||||||
-- ========================================================================
|
|
||||||
IF NOT EXISTS (
|
|
||||||
SELECT 1 FROM content_submissions
|
|
||||||
WHERE id = p_submission_id
|
|
||||||
AND (assigned_to = p_moderator_id OR assigned_to IS NULL)
|
|
||||||
AND status IN ('pending', 'partially_approved')
|
|
||||||
) THEN
|
|
||||||
RAISE EXCEPTION 'Submission not found, locked by another moderator, or already processed'
|
|
||||||
USING ERRCODE = '42501';
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
-- ========================================================================
|
|
||||||
-- STEP 3: Process each item sequentially within this transaction
|
|
||||||
-- ========================================================================
|
|
||||||
FOR v_item IN
|
|
||||||
SELECT
|
|
||||||
si.*,
|
|
||||||
ps.name as park_name,
|
|
||||||
ps.slug as park_slug,
|
|
||||||
ps.description as park_description,
|
|
||||||
ps.park_type,
|
|
||||||
ps.status as park_status,
|
|
||||||
ps.location_id,
|
|
||||||
ps.operator_id,
|
|
||||||
ps.property_owner_id,
|
|
||||||
ps.opening_date as park_opening_date,
|
|
||||||
ps.closing_date as park_closing_date,
|
|
||||||
ps.opening_date_precision as park_opening_date_precision,
|
|
||||||
ps.closing_date_precision as park_closing_date_precision,
|
|
||||||
ps.website_url as park_website_url,
|
|
||||||
ps.phone as park_phone,
|
|
||||||
ps.email as park_email,
|
|
||||||
ps.banner_image_url as park_banner_image_url,
|
|
||||||
ps.banner_image_id as park_banner_image_id,
|
|
||||||
ps.card_image_url as park_card_image_url,
|
|
||||||
ps.card_image_id as park_card_image_id,
|
|
||||||
rs.name as ride_name,
|
|
||||||
rs.slug as ride_slug,
|
|
||||||
rs.park_id as ride_park_id,
|
|
||||||
rs.category as ride_category,
|
|
||||||
rs.status as ride_status,
|
|
||||||
rs.manufacturer_id,
|
|
||||||
rs.ride_model_id,
|
|
||||||
rs.opening_date as ride_opening_date,
|
|
||||||
rs.closing_date as ride_closing_date,
|
|
||||||
rs.opening_date_precision as ride_opening_date_precision,
|
|
||||||
rs.closing_date_precision as ride_closing_date_precision,
|
|
||||||
rs.description as ride_description,
|
|
||||||
rs.banner_image_url as ride_banner_image_url,
|
|
||||||
rs.banner_image_id as ride_banner_image_id,
|
|
||||||
rs.card_image_url as ride_card_image_url,
|
|
||||||
rs.card_image_id as ride_card_image_id,
|
|
||||||
cs.name as company_name,
|
|
||||||
cs.slug as company_slug,
|
|
||||||
cs.description as company_description,
|
|
||||||
cs.website_url as company_website_url,
|
|
||||||
cs.founded_year,
|
|
||||||
cs.banner_image_url as company_banner_image_url,
|
|
||||||
cs.banner_image_id as company_banner_image_id,
|
|
||||||
cs.card_image_url as company_card_image_url,
|
|
||||||
cs.card_image_id as company_card_image_id,
|
|
||||||
rms.name as ride_model_name,
|
|
||||||
rms.slug as ride_model_slug,
|
|
||||||
rms.manufacturer_id as ride_model_manufacturer_id,
|
|
||||||
rms.category as ride_model_category,
|
|
||||||
rms.description as ride_model_description,
|
|
||||||
rms.banner_image_url as ride_model_banner_image_url,
|
|
||||||
rms.banner_image_id as ride_model_banner_image_id,
|
|
||||||
rms.card_image_url as ride_model_card_image_url,
|
|
||||||
rms.card_image_id as ride_model_card_image_id,
|
|
||||||
phs.entity_id as photo_entity_id,
|
|
||||||
phs.entity_type as photo_entity_type,
|
|
||||||
phs.title as photo_title
|
|
||||||
FROM submission_items si
|
|
||||||
LEFT JOIN park_submissions ps ON si.park_submission_id = ps.id
|
|
||||||
LEFT JOIN ride_submissions rs ON si.ride_submission_id = rs.id
|
|
||||||
LEFT JOIN company_submissions cs ON si.company_submission_id = cs.id
|
|
||||||
LEFT JOIN ride_model_submissions rms ON si.ride_model_submission_id = rms.id
|
|
||||||
LEFT JOIN photo_submissions phs ON si.photo_submission_id = phs.id
|
|
||||||
WHERE si.id = ANY(p_item_ids)
|
|
||||||
ORDER BY si.order_index, si.created_at
|
|
||||||
LOOP
|
|
||||||
BEGIN
|
|
||||||
v_items_processed := v_items_processed + 1;
|
|
||||||
|
|
||||||
-- Log item processing span event
|
|
||||||
IF p_trace_id IS NOT NULL THEN
|
|
||||||
RAISE NOTICE 'SPAN_EVENT: {"traceId": "%", "parentSpanId": "%", "name": "process_item", "timestamp": %, "attributes": {"item.id": "%", "item.type": "%", "item.action": "%"}}',
|
|
||||||
p_trace_id,
|
|
||||||
v_span_id,
|
|
||||||
EXTRACT(EPOCH FROM clock_timestamp()) * 1000,
|
|
||||||
v_item.id,
|
|
||||||
v_item.item_type,
|
|
||||||
v_item.action_type;
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
-- Build item data based on entity type
|
|
||||||
IF v_item.item_type = 'park' THEN
|
|
||||||
v_item_data := jsonb_build_object(
|
|
||||||
'name', v_item.park_name,
|
|
||||||
'slug', v_item.park_slug,
|
|
||||||
'description', v_item.park_description,
|
|
||||||
'park_type', v_item.park_type,
|
|
||||||
'status', v_item.park_status,
|
|
||||||
'location_id', v_item.location_id,
|
|
||||||
'operator_id', v_item.operator_id,
|
|
||||||
'property_owner_id', v_item.property_owner_id,
|
|
||||||
'opening_date', v_item.park_opening_date,
|
|
||||||
'closing_date', v_item.park_closing_date,
|
|
||||||
'opening_date_precision', v_item.park_opening_date_precision,
|
|
||||||
'closing_date_precision', v_item.park_closing_date_precision,
|
|
||||||
'website_url', v_item.park_website_url,
|
|
||||||
'phone', v_item.park_phone,
|
|
||||||
'email', v_item.park_email,
|
|
||||||
'banner_image_url', v_item.park_banner_image_url,
|
|
||||||
'banner_image_id', v_item.park_banner_image_id,
|
|
||||||
'card_image_url', v_item.park_card_image_url,
|
|
||||||
'card_image_id', v_item.park_card_image_id
|
|
||||||
);
|
|
||||||
ELSIF v_item.item_type = 'ride' THEN
|
|
||||||
v_item_data := jsonb_build_object(
|
|
||||||
'name', v_item.ride_name,
|
|
||||||
'slug', v_item.ride_slug,
|
|
||||||
'park_id', v_item.ride_park_id,
|
|
||||||
'category', v_item.ride_category,
|
|
||||||
'status', v_item.ride_status,
|
|
||||||
'manufacturer_id', v_item.manufacturer_id,
|
|
||||||
'ride_model_id', v_item.ride_model_id,
|
|
||||||
'opening_date', v_item.ride_opening_date,
|
|
||||||
'closing_date', v_item.ride_closing_date,
|
|
||||||
'opening_date_precision', v_item.ride_opening_date_precision,
|
|
||||||
'closing_date_precision', v_item.ride_closing_date_precision,
|
|
||||||
'description', v_item.ride_description,
|
|
||||||
'banner_image_url', v_item.ride_banner_image_url,
|
|
||||||
'banner_image_id', v_item.ride_banner_image_id,
|
|
||||||
'card_image_url', v_item.ride_card_image_url,
|
|
||||||
'card_image_id', v_item.ride_card_image_id
|
|
||||||
);
|
|
||||||
ELSIF v_item.item_type = 'company' THEN
|
|
||||||
v_item_data := jsonb_build_object(
|
|
||||||
'name', v_item.company_name,
|
|
||||||
'slug', v_item.company_slug,
|
|
||||||
'description', v_item.company_description,
|
|
||||||
'website_url', v_item.company_website_url,
|
|
||||||
'founded_year', v_item.founded_year,
|
|
||||||
'banner_image_url', v_item.company_banner_image_url,
|
|
||||||
'banner_image_id', v_item.company_banner_image_id,
|
|
||||||
'card_image_url', v_item.company_card_image_url,
|
|
||||||
'card_image_id', v_item.company_card_image_id
|
|
||||||
);
|
|
||||||
ELSIF v_item.item_type = 'ride_model' THEN
|
|
||||||
v_item_data := jsonb_build_object(
|
|
||||||
'name', v_item.ride_model_name,
|
|
||||||
'slug', v_item.ride_model_slug,
|
|
||||||
'manufacturer_id', v_item.ride_model_manufacturer_id,
|
|
||||||
'category', v_item.ride_model_category,
|
|
||||||
'description', v_item.ride_model_description,
|
|
||||||
'banner_image_url', v_item.ride_model_banner_image_url,
|
|
||||||
'banner_image_id', v_item.ride_model_banner_image_id,
|
|
||||||
'card_image_url', v_item.ride_model_card_image_url,
|
|
||||||
'card_image_id', v_item.ride_model_card_image_id
|
|
||||||
);
|
|
||||||
ELSIF v_item.item_type = 'photo' THEN
|
|
||||||
v_item_data := jsonb_build_object(
|
|
||||||
'entity_id', v_item.photo_entity_id,
|
|
||||||
'entity_type', v_item.photo_entity_type,
|
|
||||||
'title', v_item.photo_title
|
|
||||||
);
|
|
||||||
ELSE
|
|
||||||
RAISE EXCEPTION 'Unknown item type: %', v_item.item_type;
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
-- Resolve temporary references
|
|
||||||
v_resolved_refs := resolve_temp_references(v_item_data, p_submission_id);
|
|
||||||
|
|
||||||
-- Perform the action
|
|
||||||
IF v_item.action_type = 'create' THEN
|
|
||||||
v_entity_id := perform_create(v_item.item_type, v_resolved_refs, p_submitter_id, p_submission_id);
|
|
||||||
ELSIF v_item.action_type = 'update' THEN
|
|
||||||
IF v_item.entity_id IS NULL THEN
|
|
||||||
RAISE EXCEPTION 'Update action requires entity_id';
|
|
||||||
END IF;
|
|
||||||
PERFORM perform_update(v_item.item_type, v_item.entity_id, v_resolved_refs, p_submitter_id, p_submission_id);
|
|
||||||
v_entity_id := v_item.entity_id;
|
|
||||||
ELSE
|
|
||||||
RAISE EXCEPTION 'Unknown action type: %', v_item.action_type;
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
-- Update submission_item with approved entity
|
|
||||||
UPDATE submission_items
|
|
||||||
SET approved_entity_id = v_entity_id,
|
|
||||||
approved_at = now(),
|
|
||||||
status = 'approved'
|
|
||||||
WHERE id = v_item.id;
|
|
||||||
|
|
||||||
-- Track approval results
|
|
||||||
v_approval_results := array_append(v_approval_results, jsonb_build_object(
|
|
||||||
'item_id', v_item.id,
|
|
||||||
'status', 'approved',
|
|
||||||
'entity_id', v_entity_id
|
|
||||||
));
|
|
||||||
|
|
||||||
v_some_approved := TRUE;
|
|
||||||
|
|
||||||
EXCEPTION
|
|
||||||
WHEN OTHERS THEN
|
|
||||||
-- Log the error
|
|
||||||
RAISE WARNING 'Failed to process item %: % - %', v_item.id, SQLERRM, SQLSTATE;
|
|
||||||
|
|
||||||
-- Track failure
|
|
||||||
v_approval_results := array_append(v_approval_results, jsonb_build_object(
|
|
||||||
'item_id', v_item.id,
|
|
||||||
'status', 'failed',
|
|
||||||
'error', SQLERRM
|
|
||||||
));
|
|
||||||
|
|
||||||
v_all_approved := FALSE;
|
|
||||||
|
|
||||||
-- Re-raise to rollback transaction
|
|
||||||
RAISE;
|
|
||||||
END;
|
|
||||||
END LOOP;
|
|
||||||
|
|
||||||
-- ========================================================================
|
|
||||||
-- STEP 4: Update submission status
|
|
||||||
-- ========================================================================
|
|
||||||
IF v_all_approved THEN
|
|
||||||
v_final_status := 'approved';
|
|
||||||
ELSIF v_some_approved THEN
|
|
||||||
v_final_status := 'partially_approved';
|
|
||||||
ELSE
|
|
||||||
v_final_status := 'rejected';
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
UPDATE content_submissions
|
|
||||||
SET status = v_final_status,
|
|
||||||
resolved_at = CASE WHEN v_all_approved THEN now() ELSE NULL END,
|
|
||||||
reviewer_id = p_moderator_id,
|
|
||||||
reviewed_at = now()
|
|
||||||
WHERE id = p_submission_id;
|
|
||||||
|
|
||||||
-- Log span end
|
|
||||||
IF p_trace_id IS NOT NULL THEN
|
|
||||||
RAISE NOTICE 'SPAN: {"spanId": "%", "traceId": "%", "name": "process_approval_transaction_rpc", "kind": "INTERNAL", "endTime": %, "attributes": {"items_processed": %, "final_status": "%"}}',
|
|
||||||
v_span_id,
|
|
||||||
p_trace_id,
|
|
||||||
EXTRACT(EPOCH FROM clock_timestamp()) * 1000,
|
|
||||||
v_items_processed,
|
|
||||||
v_final_status;
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
-- Return result
|
|
||||||
RETURN jsonb_build_object(
|
|
||||||
'success', v_all_approved,
|
|
||||||
'status', v_final_status,
|
|
||||||
'items_processed', v_items_processed,
|
|
||||||
'results', v_approval_results,
|
|
||||||
'duration_ms', EXTRACT(EPOCH FROM (clock_timestamp() - v_start_time)) * 1000
|
|
||||||
);
|
|
||||||
END;
|
|
||||||
$$;
|
|
||||||
@@ -1,474 +0,0 @@
|
|||||||
-- ============================================================================
|
|
||||||
-- CRITICAL FIX: Restore complete approval pipeline with tracing
|
|
||||||
-- ============================================================================
|
|
||||||
-- This fixes the broken RPC that deleted all non-park entity handling
|
|
||||||
-- and restores full functionality for all entity types
|
|
||||||
-- ============================================================================
|
|
||||||
|
|
||||||
DROP FUNCTION IF EXISTS process_approval_transaction(UUID, UUID[], UUID, UUID, TEXT, TEXT, TEXT);
|
|
||||||
|
|
||||||
CREATE OR REPLACE FUNCTION process_approval_transaction(
|
|
||||||
p_submission_id UUID,
|
|
||||||
p_item_ids UUID[],
|
|
||||||
p_moderator_id UUID,
|
|
||||||
p_submitter_id UUID,
|
|
||||||
p_request_id TEXT DEFAULT NULL,
|
|
||||||
p_trace_id TEXT DEFAULT NULL,
|
|
||||||
p_parent_span_id TEXT DEFAULT NULL
|
|
||||||
)
|
|
||||||
RETURNS JSONB
|
|
||||||
LANGUAGE plpgsql
|
|
||||||
SECURITY DEFINER
|
|
||||||
SET search_path = public
|
|
||||||
AS $$
|
|
||||||
DECLARE
|
|
||||||
v_start_time TIMESTAMPTZ;
|
|
||||||
v_result JSONB;
|
|
||||||
v_item RECORD;
|
|
||||||
v_item_data JSONB;
|
|
||||||
v_resolved_refs JSONB;
|
|
||||||
v_entity_id UUID;
|
|
||||||
v_approval_results JSONB[] := ARRAY[]::JSONB[];
|
|
||||||
v_final_status TEXT;
|
|
||||||
v_all_approved BOOLEAN := TRUE;
|
|
||||||
v_some_approved BOOLEAN := FALSE;
|
|
||||||
v_items_processed INTEGER := 0;
|
|
||||||
v_span_id TEXT;
|
|
||||||
BEGIN
|
|
||||||
v_start_time := clock_timestamp();
|
|
||||||
v_span_id := gen_random_uuid()::text;
|
|
||||||
|
|
||||||
-- Log span start with trace context
|
|
||||||
IF p_trace_id IS NOT NULL THEN
|
|
||||||
RAISE NOTICE 'SPAN: {"spanId": "%", "traceId": "%", "parentSpanId": "%", "name": "process_approval_transaction_rpc", "kind": "INTERNAL", "startTime": %, "attributes": {"submission.id": "%", "item_count": %}}',
|
|
||||||
v_span_id,
|
|
||||||
p_trace_id,
|
|
||||||
p_parent_span_id,
|
|
||||||
EXTRACT(EPOCH FROM v_start_time) * 1000,
|
|
||||||
p_submission_id,
|
|
||||||
array_length(p_item_ids, 1);
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
RAISE NOTICE '[%] Starting atomic approval transaction for submission %',
|
|
||||||
COALESCE(p_request_id, 'NO_REQUEST_ID'),
|
|
||||||
p_submission_id;
|
|
||||||
|
|
||||||
-- ========================================================================
|
|
||||||
-- STEP 1: Set session variables (transaction-scoped with is_local=true)
|
|
||||||
-- ========================================================================
|
|
||||||
PERFORM set_config('app.current_user_id', p_submitter_id::text, true);
|
|
||||||
PERFORM set_config('app.submission_id', p_submission_id::text, true);
|
|
||||||
PERFORM set_config('app.moderator_id', p_moderator_id::text, true);
|
|
||||||
|
|
||||||
-- ========================================================================
|
|
||||||
-- STEP 2: Validate submission ownership and lock status
|
|
||||||
-- ========================================================================
|
|
||||||
IF NOT EXISTS (
|
|
||||||
SELECT 1 FROM content_submissions
|
|
||||||
WHERE id = p_submission_id
|
|
||||||
AND (assigned_to = p_moderator_id OR assigned_to IS NULL)
|
|
||||||
AND status IN ('pending', 'partially_approved')
|
|
||||||
) THEN
|
|
||||||
RAISE EXCEPTION 'Submission not found, locked by another moderator, or already processed'
|
|
||||||
USING ERRCODE = '42501';
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
-- ========================================================================
|
|
||||||
-- STEP 3: Process each item sequentially within this transaction
|
|
||||||
-- ========================================================================
|
|
||||||
FOR v_item IN
|
|
||||||
SELECT
|
|
||||||
si.*,
|
|
||||||
ps.name as park_name,
|
|
||||||
ps.slug as park_slug,
|
|
||||||
ps.description as park_description,
|
|
||||||
ps.park_type,
|
|
||||||
ps.status as park_status,
|
|
||||||
ps.location_id,
|
|
||||||
ps.operator_id,
|
|
||||||
ps.property_owner_id,
|
|
||||||
ps.opening_date as park_opening_date,
|
|
||||||
ps.closing_date as park_closing_date,
|
|
||||||
ps.opening_date_precision as park_opening_date_precision,
|
|
||||||
ps.closing_date_precision as park_closing_date_precision,
|
|
||||||
ps.website_url as park_website_url,
|
|
||||||
ps.phone as park_phone,
|
|
||||||
ps.email as park_email,
|
|
||||||
ps.banner_image_url as park_banner_image_url,
|
|
||||||
ps.banner_image_id as park_banner_image_id,
|
|
||||||
ps.card_image_url as park_card_image_url,
|
|
||||||
ps.card_image_id as park_card_image_id,
|
|
||||||
rs.name as ride_name,
|
|
||||||
rs.slug as ride_slug,
|
|
||||||
rs.park_id as ride_park_id,
|
|
||||||
rs.ride_type,
|
|
||||||
rs.status as ride_status,
|
|
||||||
rs.manufacturer_id,
|
|
||||||
rs.ride_model_id,
|
|
||||||
rs.opening_date as ride_opening_date,
|
|
||||||
rs.closing_date as ride_closing_date,
|
|
||||||
rs.opening_date_precision as ride_opening_date_precision,
|
|
||||||
rs.closing_date_precision as ride_closing_date_precision,
|
|
||||||
rs.description as ride_description,
|
|
||||||
rs.banner_image_url as ride_banner_image_url,
|
|
||||||
rs.banner_image_id as ride_banner_image_id,
|
|
||||||
rs.card_image_url as ride_card_image_url,
|
|
||||||
rs.card_image_id as ride_card_image_id,
|
|
||||||
cs.name as company_name,
|
|
||||||
cs.slug as company_slug,
|
|
||||||
cs.description as company_description,
|
|
||||||
cs.website_url as company_website_url,
|
|
||||||
cs.founded_year,
|
|
||||||
cs.banner_image_url as company_banner_image_url,
|
|
||||||
cs.banner_image_id as company_banner_image_id,
|
|
||||||
cs.card_image_url as company_card_image_url,
|
|
||||||
cs.card_image_id as company_card_image_id,
|
|
||||||
rms.name as ride_model_name,
|
|
||||||
rms.slug as ride_model_slug,
|
|
||||||
rms.manufacturer_id as ride_model_manufacturer_id,
|
|
||||||
rms.ride_type as ride_model_ride_type,
|
|
||||||
rms.description as ride_model_description,
|
|
||||||
rms.banner_image_url as ride_model_banner_image_url,
|
|
||||||
rms.banner_image_id as ride_model_banner_image_id,
|
|
||||||
rms.card_image_url as ride_model_card_image_url,
|
|
||||||
rms.card_image_id as ride_model_card_image_id,
|
|
||||||
phs.entity_id as photo_entity_id,
|
|
||||||
phs.entity_type as photo_entity_type,
|
|
||||||
phs.title as photo_title
|
|
||||||
FROM submission_items si
|
|
||||||
LEFT JOIN park_submissions ps ON si.park_submission_id = ps.id
|
|
||||||
LEFT JOIN ride_submissions rs ON si.ride_submission_id = rs.id
|
|
||||||
LEFT JOIN company_submissions cs ON si.company_submission_id = cs.id
|
|
||||||
LEFT JOIN ride_model_submissions rms ON si.ride_model_submission_id = rms.id
|
|
||||||
LEFT JOIN photo_submissions phs ON si.photo_submission_id = phs.id
|
|
||||||
WHERE si.id = ANY(p_item_ids)
|
|
||||||
ORDER BY si.order_index, si.created_at
|
|
||||||
LOOP
|
|
||||||
BEGIN
|
|
||||||
v_items_processed := v_items_processed + 1;
|
|
||||||
|
|
||||||
-- Log item processing span event
|
|
||||||
IF p_trace_id IS NOT NULL THEN
|
|
||||||
RAISE NOTICE 'SPAN_EVENT: {"traceId": "%", "parentSpanId": "%", "name": "process_item", "timestamp": %, "attributes": {"item.id": "%", "item.type": "%", "item.action": "%"}}',
|
|
||||||
p_trace_id,
|
|
||||||
v_span_id,
|
|
||||||
EXTRACT(EPOCH FROM clock_timestamp()) * 1000,
|
|
||||||
v_item.id,
|
|
||||||
v_item.item_type,
|
|
||||||
v_item.action_type;
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
-- Build item data based on entity type
|
|
||||||
IF v_item.item_type = 'park' THEN
|
|
||||||
v_item_data := jsonb_build_object(
|
|
||||||
'name', v_item.park_name,
|
|
||||||
'slug', v_item.park_slug,
|
|
||||||
'description', v_item.park_description,
|
|
||||||
'park_type', v_item.park_type,
|
|
||||||
'status', v_item.park_status,
|
|
||||||
'location_id', v_item.location_id,
|
|
||||||
'operator_id', v_item.operator_id,
|
|
||||||
'property_owner_id', v_item.property_owner_id,
|
|
||||||
'opening_date', v_item.park_opening_date,
|
|
||||||
'closing_date', v_item.park_closing_date,
|
|
||||||
'opening_date_precision', v_item.park_opening_date_precision,
|
|
||||||
'closing_date_precision', v_item.park_closing_date_precision,
|
|
||||||
'website_url', v_item.park_website_url,
|
|
||||||
'phone', v_item.park_phone,
|
|
||||||
'email', v_item.park_email,
|
|
||||||
'banner_image_url', v_item.park_banner_image_url,
|
|
||||||
'banner_image_id', v_item.park_banner_image_id,
|
|
||||||
'card_image_url', v_item.park_card_image_url,
|
|
||||||
'card_image_id', v_item.park_card_image_id
|
|
||||||
);
|
|
||||||
ELSIF v_item.item_type = 'ride' THEN
|
|
||||||
v_item_data := jsonb_build_object(
|
|
||||||
'name', v_item.ride_name,
|
|
||||||
'slug', v_item.ride_slug,
|
|
||||||
'park_id', v_item.ride_park_id,
|
|
||||||
'ride_type', v_item.ride_type,
|
|
||||||
'status', v_item.ride_status,
|
|
||||||
'manufacturer_id', v_item.manufacturer_id,
|
|
||||||
'ride_model_id', v_item.ride_model_id,
|
|
||||||
'opening_date', v_item.ride_opening_date,
|
|
||||||
'closing_date', v_item.ride_closing_date,
|
|
||||||
'opening_date_precision', v_item.ride_opening_date_precision,
|
|
||||||
'closing_date_precision', v_item.ride_closing_date_precision,
|
|
||||||
'description', v_item.ride_description,
|
|
||||||
'banner_image_url', v_item.ride_banner_image_url,
|
|
||||||
'banner_image_id', v_item.ride_banner_image_id,
|
|
||||||
'card_image_url', v_item.ride_card_image_url,
|
|
||||||
'card_image_id', v_item.ride_card_image_id
|
|
||||||
);
|
|
||||||
ELSIF v_item.item_type IN ('manufacturer', 'operator', 'property_owner', 'designer') THEN
|
|
||||||
v_item_data := jsonb_build_object(
|
|
||||||
'name', v_item.company_name,
|
|
||||||
'slug', v_item.company_slug,
|
|
||||||
'description', v_item.company_description,
|
|
||||||
'website_url', v_item.company_website_url,
|
|
||||||
'founded_year', v_item.founded_year,
|
|
||||||
'banner_image_url', v_item.company_banner_image_url,
|
|
||||||
'banner_image_id', v_item.company_banner_image_id,
|
|
||||||
'card_image_url', v_item.company_card_image_url,
|
|
||||||
'card_image_id', v_item.company_card_image_id
|
|
||||||
);
|
|
||||||
ELSIF v_item.item_type = 'ride_model' THEN
|
|
||||||
v_item_data := jsonb_build_object(
|
|
||||||
'name', v_item.ride_model_name,
|
|
||||||
'slug', v_item.ride_model_slug,
|
|
||||||
'manufacturer_id', v_item.ride_model_manufacturer_id,
|
|
||||||
'ride_type', v_item.ride_model_ride_type,
|
|
||||||
'description', v_item.ride_model_description,
|
|
||||||
'banner_image_url', v_item.ride_model_banner_image_url,
|
|
||||||
'banner_image_id', v_item.ride_model_banner_image_id,
|
|
||||||
'card_image_url', v_item.ride_model_card_image_url,
|
|
||||||
'card_image_id', v_item.ride_model_card_image_id
|
|
||||||
);
|
|
||||||
ELSIF v_item.item_type = 'photo' THEN
|
|
||||||
v_item_data := jsonb_build_object(
|
|
||||||
'entity_id', v_item.photo_entity_id,
|
|
||||||
'entity_type', v_item.photo_entity_type,
|
|
||||||
'title', v_item.photo_title,
|
|
||||||
'photo_submission_id', v_item.photo_submission_id
|
|
||||||
);
|
|
||||||
ELSE
|
|
||||||
RAISE EXCEPTION 'Unsupported item_type: %', v_item.item_type;
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
-- ======================================================================
|
|
||||||
-- Resolve temp refs and update v_item_data with actual entity IDs
|
|
||||||
-- ======================================================================
|
|
||||||
v_resolved_refs := resolve_temp_refs_for_item(v_item.id, p_submission_id);
|
|
||||||
|
|
||||||
IF v_resolved_refs IS NOT NULL AND jsonb_typeof(v_resolved_refs) = 'object' THEN
|
|
||||||
-- Replace NULL foreign keys with resolved entity IDs
|
|
||||||
-- For parks: operator_id, property_owner_id
|
|
||||||
IF v_item.item_type = 'park' THEN
|
|
||||||
IF v_resolved_refs ? 'operator' AND (v_item_data->>'operator_id') IS NULL THEN
|
|
||||||
v_item_data := v_item_data || jsonb_build_object('operator_id', v_resolved_refs->>'operator');
|
|
||||||
RAISE NOTICE 'Resolved park.operator_id → %', v_resolved_refs->>'operator';
|
|
||||||
END IF;
|
|
||||||
IF v_resolved_refs ? 'property_owner' AND (v_item_data->>'property_owner_id') IS NULL THEN
|
|
||||||
v_item_data := v_item_data || jsonb_build_object('property_owner_id', v_resolved_refs->>'property_owner');
|
|
||||||
RAISE NOTICE 'Resolved park.property_owner_id → %', v_resolved_refs->>'property_owner';
|
|
||||||
END IF;
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
-- For rides: park_id, manufacturer_id, ride_model_id
|
|
||||||
IF v_item.item_type = 'ride' THEN
|
|
||||||
IF v_resolved_refs ? 'park' AND (v_item_data->>'park_id') IS NULL THEN
|
|
||||||
v_item_data := v_item_data || jsonb_build_object('park_id', v_resolved_refs->>'park');
|
|
||||||
RAISE NOTICE 'Resolved ride.park_id → %', v_resolved_refs->>'park';
|
|
||||||
END IF;
|
|
||||||
IF v_resolved_refs ? 'manufacturer' AND (v_item_data->>'manufacturer_id') IS NULL THEN
|
|
||||||
v_item_data := v_item_data || jsonb_build_object('manufacturer_id', v_resolved_refs->>'manufacturer');
|
|
||||||
RAISE NOTICE 'Resolved ride.manufacturer_id → %', v_resolved_refs->>'manufacturer';
|
|
||||||
END IF;
|
|
||||||
IF v_resolved_refs ? 'ride_model' AND (v_item_data->>'ride_model_id') IS NULL THEN
|
|
||||||
v_item_data := v_item_data || jsonb_build_object('ride_model_id', v_resolved_refs->>'ride_model');
|
|
||||||
RAISE NOTICE 'Resolved ride.ride_model_id → %', v_resolved_refs->>'ride_model';
|
|
||||||
END IF;
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
-- For ride_models: manufacturer_id
|
|
||||||
IF v_item.item_type = 'ride_model' THEN
|
|
||||||
IF v_resolved_refs ? 'manufacturer' AND (v_item_data->>'manufacturer_id') IS NULL THEN
|
|
||||||
v_item_data := v_item_data || jsonb_build_object('manufacturer_id', v_resolved_refs->>'manufacturer');
|
|
||||||
RAISE NOTICE 'Resolved ride_model.manufacturer_id → %', v_resolved_refs->>'manufacturer';
|
|
||||||
END IF;
|
|
||||||
END IF;
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
-- Execute action based on action_type and item_type
|
|
||||||
IF v_item.action_type = 'create' THEN
|
|
||||||
IF v_item.item_type = 'photo' THEN
|
|
||||||
-- Insert all photo_submission_items as photos atomically
|
|
||||||
INSERT INTO photos (
|
|
||||||
entity_id, entity_type, cloudflare_image_id, cloudflare_image_url,
|
|
||||||
title, caption, date_taken, date_taken_precision, order_index,
|
|
||||||
submission_id, submitted_by, approved_by, approved_at
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
(v_item_data->>'entity_id')::UUID,
|
|
||||||
v_item_data->>'entity_type',
|
|
||||||
psi.cloudflare_image_id,
|
|
||||||
psi.cloudflare_image_url,
|
|
||||||
psi.title,
|
|
||||||
psi.caption,
|
|
||||||
psi.date_taken,
|
|
||||||
psi.date_taken_precision,
|
|
||||||
psi.order_index,
|
|
||||||
p_submission_id,
|
|
||||||
p_submitter_id,
|
|
||||||
p_moderator_id,
|
|
||||||
NOW()
|
|
||||||
FROM photo_submission_items psi
|
|
||||||
WHERE psi.photo_submission_id = (v_item_data->>'photo_submission_id')::UUID;
|
|
||||||
|
|
||||||
-- Return the photo_submission entity_id as v_entity_id
|
|
||||||
v_entity_id := (v_item_data->>'entity_id')::UUID;
|
|
||||||
|
|
||||||
RAISE NOTICE '[%] Created % photos for entity % (type=%)',
|
|
||||||
COALESCE(p_request_id, 'NO_REQUEST_ID'),
|
|
||||||
(SELECT COUNT(*) FROM photo_submission_items WHERE photo_submission_id = (v_item_data->>'photo_submission_id')::UUID),
|
|
||||||
v_entity_id,
|
|
||||||
v_item_data->>'entity_type';
|
|
||||||
ELSE
|
|
||||||
-- Standard entity creation - FIXED: Pass v_item_data instead of v_item.id
|
|
||||||
v_entity_id := create_entity_from_submission(
|
|
||||||
v_item.item_type,
|
|
||||||
v_item_data,
|
|
||||||
p_submitter_id
|
|
||||||
);
|
|
||||||
END IF;
|
|
||||||
ELSIF v_item.action_type = 'update' THEN
|
|
||||||
v_entity_id := update_entity_from_submission(
|
|
||||||
v_item.item_type,
|
|
||||||
v_item_data,
|
|
||||||
v_item.target_entity_id,
|
|
||||||
p_submitter_id
|
|
||||||
);
|
|
||||||
ELSIF v_item.action_type = 'delete' THEN
|
|
||||||
PERFORM delete_entity_from_submission(
|
|
||||||
v_item.item_type,
|
|
||||||
v_item.target_entity_id,
|
|
||||||
p_submitter_id
|
|
||||||
);
|
|
||||||
v_entity_id := v_item.target_entity_id;
|
|
||||||
ELSE
|
|
||||||
RAISE EXCEPTION 'Unknown action_type: %', v_item.action_type;
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
-- Update submission_item to approved status
|
|
||||||
UPDATE submission_items
|
|
||||||
SET
|
|
||||||
status = 'approved',
|
|
||||||
approved_entity_id = v_entity_id,
|
|
||||||
updated_at = NOW()
|
|
||||||
WHERE id = v_item.id;
|
|
||||||
|
|
||||||
-- Track success
|
|
||||||
v_approval_results := array_append(
|
|
||||||
v_approval_results,
|
|
||||||
jsonb_build_object(
|
|
||||||
'itemId', v_item.id,
|
|
||||||
'entityId', v_entity_id,
|
|
||||||
'itemType', v_item.item_type,
|
|
||||||
'actionType', v_item.action_type,
|
|
||||||
'success', true
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
v_some_approved := TRUE;
|
|
||||||
|
|
||||||
RAISE NOTICE '[%] Approved item % (type=%s, action=%s, entityId=%s)',
|
|
||||||
COALESCE(p_request_id, 'NO_REQUEST_ID'),
|
|
||||||
v_item.id,
|
|
||||||
v_item.item_type,
|
|
||||||
v_item.action_type,
|
|
||||||
v_entity_id;
|
|
||||||
|
|
||||||
EXCEPTION WHEN OTHERS THEN
|
|
||||||
-- Log error but continue processing remaining items
|
|
||||||
RAISE WARNING '[%] Item % failed: % (SQLSTATE: %)',
|
|
||||||
COALESCE(p_request_id, 'NO_REQUEST_ID'),
|
|
||||||
v_item.id,
|
|
||||||
SQLERRM,
|
|
||||||
SQLSTATE;
|
|
||||||
|
|
||||||
-- Update submission_item to rejected status
|
|
||||||
UPDATE submission_items
|
|
||||||
SET
|
|
||||||
status = 'rejected',
|
|
||||||
rejection_reason = SQLERRM,
|
|
||||||
updated_at = NOW()
|
|
||||||
WHERE id = v_item.id;
|
|
||||||
|
|
||||||
-- Track failure
|
|
||||||
v_approval_results := array_append(
|
|
||||||
v_approval_results,
|
|
||||||
jsonb_build_object(
|
|
||||||
'itemId', v_item.id,
|
|
||||||
'itemType', v_item.item_type,
|
|
||||||
'actionType', v_item.action_type,
|
|
||||||
'success', false,
|
|
||||||
'error', SQLERRM
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
v_all_approved := FALSE;
|
|
||||||
END;
|
|
||||||
END LOOP;
|
|
||||||
|
|
||||||
-- ========================================================================
|
|
||||||
-- STEP 4: Determine final submission status
|
|
||||||
-- ========================================================================
|
|
||||||
v_final_status := CASE
|
|
||||||
WHEN v_all_approved THEN 'approved'
|
|
||||||
WHEN v_some_approved THEN 'partially_approved'
|
|
||||||
ELSE 'rejected'
|
|
||||||
END;
|
|
||||||
|
|
||||||
-- ========================================================================
|
|
||||||
-- STEP 5: Update submission status
|
|
||||||
-- ========================================================================
|
|
||||||
UPDATE content_submissions
|
|
||||||
SET
|
|
||||||
status = v_final_status,
|
|
||||||
reviewer_id = p_moderator_id,
|
|
||||||
reviewed_at = NOW(),
|
|
||||||
assigned_to = NULL,
|
|
||||||
locked_until = NULL
|
|
||||||
WHERE id = p_submission_id;
|
|
||||||
|
|
||||||
-- ========================================================================
|
|
||||||
-- STEP 6: Log metrics
|
|
||||||
-- ========================================================================
|
|
||||||
INSERT INTO approval_transaction_metrics (
|
|
||||||
submission_id,
|
|
||||||
moderator_id,
|
|
||||||
submitter_id,
|
|
||||||
items_count,
|
|
||||||
duration_ms,
|
|
||||||
success,
|
|
||||||
request_id
|
|
||||||
) VALUES (
|
|
||||||
p_submission_id,
|
|
||||||
p_moderator_id,
|
|
||||||
p_submitter_id,
|
|
||||||
v_items_processed,
|
|
||||||
EXTRACT(EPOCH FROM (clock_timestamp() - v_start_time)) * 1000,
|
|
||||||
v_all_approved,
|
|
||||||
p_request_id
|
|
||||||
);
|
|
||||||
|
|
||||||
-- ========================================================================
|
|
||||||
-- STEP 7: Build result
|
|
||||||
-- ========================================================================
|
|
||||||
v_result := jsonb_build_object(
|
|
||||||
'success', v_all_approved,
|
|
||||||
'submissionId', p_submission_id,
|
|
||||||
'finalStatus', v_final_status,
|
|
||||||
'itemsProcessed', v_items_processed,
|
|
||||||
'results', v_approval_results,
|
|
||||||
'durationMs', EXTRACT(EPOCH FROM (clock_timestamp() - v_start_time)) * 1000
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Log span completion
|
|
||||||
IF p_trace_id IS NOT NULL THEN
|
|
||||||
RAISE NOTICE 'SPAN_EVENT: {"traceId": "%", "parentSpanId": "%", "name": "transaction_complete", "timestamp": %, "attributes": {"items_processed": %, "final_status": "%"}}',
|
|
||||||
p_trace_id,
|
|
||||||
v_span_id,
|
|
||||||
EXTRACT(EPOCH FROM clock_timestamp()) * 1000,
|
|
||||||
v_items_processed,
|
|
||||||
v_final_status;
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
RAISE NOTICE '[%] Transaction complete: % items processed, status=%s',
|
|
||||||
COALESCE(p_request_id, 'NO_REQUEST_ID'),
|
|
||||||
v_items_processed,
|
|
||||||
v_final_status;
|
|
||||||
|
|
||||||
RETURN v_result;
|
|
||||||
END;
|
|
||||||
$$;
|
|
||||||
@@ -1,245 +0,0 @@
|
|||||||
# Integration Tests
|
|
||||||
|
|
||||||
This directory contains integration tests for the ThrillWiki submission pipeline and data integrity.
|
|
||||||
|
|
||||||
## Schema Validation Tests
|
|
||||||
|
|
||||||
**File**: `schema-validation.test.ts`
|
|
||||||
|
|
||||||
### Purpose
|
|
||||||
|
|
||||||
Automated tests that validate schema consistency across the entire submission pipeline:
|
|
||||||
|
|
||||||
- **Submission Tables**: Ensures submission tables match their corresponding main entity tables
|
|
||||||
- **Version Tables**: Validates version tables have all main table fields plus version metadata
|
|
||||||
- **Critical Fields**: Checks for known problematic fields (e.g., `ride_type` vs `category`)
|
|
||||||
- **Function Alignment**: Verifies critical database functions exist and are accessible
|
|
||||||
|
|
||||||
### Why This Matters
|
|
||||||
|
|
||||||
The submission pipeline depends on exact schema alignment between:
|
|
||||||
1. Main entity tables (`parks`, `rides`, `companies`, `ride_models`)
|
|
||||||
2. Submission tables (`park_submissions`, `ride_submissions`, etc.)
|
|
||||||
3. Version tables (`park_versions`, `ride_versions`, etc.)
|
|
||||||
|
|
||||||
**Without these tests**, schema mismatches can cause:
|
|
||||||
- ❌ Approval failures with cryptic "column does not exist" errors
|
|
||||||
- ❌ Data loss when fields are missing from submission tables
|
|
||||||
- ❌ Version history corruption when fields don't match
|
|
||||||
- ❌ Production incidents that are difficult to debug
|
|
||||||
|
|
||||||
**With these tests**, we catch issues:
|
|
||||||
- ✅ During development, before they reach production
|
|
||||||
- ✅ In CI/CD, preventing bad migrations from deploying
|
|
||||||
- ✅ Immediately after schema changes, with clear error messages
|
|
||||||
|
|
||||||
### Test Categories
|
|
||||||
|
|
||||||
#### 1. Entity Table Validation
|
|
||||||
Compares main entity tables with their submission counterparts:
|
|
||||||
```typescript
|
|
||||||
parks ↔ park_submissions
|
|
||||||
rides ↔ ride_submissions
|
|
||||||
companies ↔ company_submissions
|
|
||||||
ride_models ↔ ride_model_submissions
|
|
||||||
```
|
|
||||||
|
|
||||||
**Checks**:
|
|
||||||
- All fields from main table exist in submission table (except excluded metadata)
|
|
||||||
- Data types match exactly
|
|
||||||
- Required fields are marked NOT NULL in both
|
|
||||||
|
|
||||||
#### 2. Version Table Validation
|
|
||||||
Ensures version tables have complete field coverage:
|
|
||||||
```typescript
|
|
||||||
parks → park_versions
|
|
||||||
rides → ride_versions
|
|
||||||
companies → company_versions
|
|
||||||
ride_models → ride_model_versions
|
|
||||||
```
|
|
||||||
|
|
||||||
**Checks**:
|
|
||||||
- All main table fields exist (accounting for known name variations)
|
|
||||||
- Version metadata fields are present (`version_id`, `version_number`, etc.)
|
|
||||||
- Change tracking fields are properly defined
|
|
||||||
|
|
||||||
#### 3. Critical Field Validation
|
|
||||||
Tests specific known problem areas:
|
|
||||||
|
|
||||||
**Critical Test Cases**:
|
|
||||||
- ✅ `rides` table does NOT have `ride_type` (prevents "column does not exist" error)
|
|
||||||
- ✅ `rides` table DOES have `category` as NOT NULL
|
|
||||||
- ✅ `ride_models` table has BOTH `category` and `ride_type`
|
|
||||||
- ✅ All entities have required base fields (`id`, `name`, `slug`, etc.)
|
|
||||||
- ✅ All submission tables have `submission_id` foreign key
|
|
||||||
|
|
||||||
#### 4. Function Alignment
|
|
||||||
Validates critical database functions:
|
|
||||||
- `create_entity_from_submission`
|
|
||||||
- `update_entity_from_submission`
|
|
||||||
- `process_approval_transaction`
|
|
||||||
|
|
||||||
#### 5. Field Name Variations
|
|
||||||
Documents and validates known column name differences:
|
|
||||||
```typescript
|
|
||||||
ride_versions.height_requirement_cm ↔ rides.height_requirement
|
|
||||||
ride_versions.gforce_max ↔ rides.max_g_force
|
|
||||||
ride_versions.inversions_count ↔ rides.inversions
|
|
||||||
ride_versions.height_meters ↔ rides.max_height_meters
|
|
||||||
ride_versions.drop_meters ↔ rides.drop_height_meters
|
|
||||||
```
|
|
||||||
|
|
||||||
### Running the Tests
|
|
||||||
|
|
||||||
**Run all schema validation tests:**
|
|
||||||
```bash
|
|
||||||
npm run test:schema
|
|
||||||
```
|
|
||||||
|
|
||||||
**Run specific test suite:**
|
|
||||||
```bash
|
|
||||||
npx playwright test schema-validation --grep "Entity Tables"
|
|
||||||
```
|
|
||||||
|
|
||||||
**Run in UI mode for debugging:**
|
|
||||||
```bash
|
|
||||||
npx playwright test schema-validation --ui
|
|
||||||
```
|
|
||||||
|
|
||||||
**Generate detailed report:**
|
|
||||||
```bash
|
|
||||||
npx playwright test schema-validation --reporter=html
|
|
||||||
```
|
|
||||||
|
|
||||||
### Environment Setup
|
|
||||||
|
|
||||||
These tests require:
|
|
||||||
- `SUPABASE_SERVICE_ROLE_KEY` environment variable
|
|
||||||
- Access to the Supabase project database
|
|
||||||
- Playwright test runner
|
|
||||||
|
|
||||||
**Example `.env.test`:**
|
|
||||||
```env
|
|
||||||
SUPABASE_SERVICE_ROLE_KEY=your_service_role_key_here
|
|
||||||
```
|
|
||||||
|
|
||||||
### Expected Output
|
|
||||||
|
|
||||||
**✅ All passing (healthy schema):**
|
|
||||||
```
|
|
||||||
✓ parks: submission table matches main table schema (245ms)
|
|
||||||
✓ rides: submission table matches main table schema (198ms)
|
|
||||||
✓ companies: submission table matches main table schema (187ms)
|
|
||||||
✓ ride_models: submission table matches main table schema (203ms)
|
|
||||||
✓ park_versions: has all main table fields plus version metadata (256ms)
|
|
||||||
✓ ride_versions: has all main table fields plus version metadata (234ms)
|
|
||||||
✓ rides table does NOT have ride_type column (145ms)
|
|
||||||
✓ rides table DOES have category column (NOT NULL) (152ms)
|
|
||||||
```
|
|
||||||
|
|
||||||
**❌ Failure example (schema mismatch):**
|
|
||||||
```
|
|
||||||
✕ rides: submission table matches main table schema (203ms)
|
|
||||||
|
|
||||||
Error: ride_submissions is missing fields: category
|
|
||||||
|
|
||||||
Expected: 0
|
|
||||||
Received: 1
|
|
||||||
```
|
|
||||||
|
|
||||||
### Continuous Integration
|
|
||||||
|
|
||||||
Add to your CI/CD pipeline:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
# .github/workflows/test.yml
|
|
||||||
- name: Run Schema Validation Tests
|
|
||||||
run: npm run test:schema
|
|
||||||
env:
|
|
||||||
SUPABASE_SERVICE_ROLE_KEY: ${{ secrets.SUPABASE_SERVICE_ROLE_KEY }}
|
|
||||||
```
|
|
||||||
|
|
||||||
This prevents schema mismatches from reaching production.
|
|
||||||
|
|
||||||
### When to Run
|
|
||||||
|
|
||||||
**Always run these tests:**
|
|
||||||
- ✅ After any database migration
|
|
||||||
- ✅ Before deploying submission pipeline changes
|
|
||||||
- ✅ After modifying entity schemas
|
|
||||||
- ✅ When adding new entity types
|
|
||||||
- ✅ In CI/CD for every pull request
|
|
||||||
|
|
||||||
**Especially critical after:**
|
|
||||||
- Adding/removing columns from entity tables
|
|
||||||
- Modifying data types
|
|
||||||
- Changing NOT NULL constraints
|
|
||||||
- Updating database functions
|
|
||||||
|
|
||||||
### Maintenance
|
|
||||||
|
|
||||||
**When adding new entity types:**
|
|
||||||
1. Add validation tests for the new entity
|
|
||||||
2. Add tests for submission table
|
|
||||||
3. Add tests for version table (if applicable)
|
|
||||||
4. Update this README
|
|
||||||
|
|
||||||
**When schema changes are intentional:**
|
|
||||||
1. Review failing tests carefully
|
|
||||||
2. Update `EXCLUDED_FIELDS` or `VERSION_METADATA_FIELDS` if needed
|
|
||||||
3. Document any new field name variations in `normalizeColumnName()`
|
|
||||||
4. Update `docs/submission-pipeline/SCHEMA_REFERENCE.md`
|
|
||||||
|
|
||||||
### Debugging Failed Tests
|
|
||||||
|
|
||||||
**"Missing fields" error:**
|
|
||||||
1. Check if field was recently added to main table
|
|
||||||
2. Verify migration added it to submission table too
|
|
||||||
3. Run migration to add missing field
|
|
||||||
4. Re-run tests
|
|
||||||
|
|
||||||
**"Type mismatch" error:**
|
|
||||||
1. Compare data types in both tables
|
|
||||||
2. Check for accidental type change in migration
|
|
||||||
3. Fix type inconsistency
|
|
||||||
4. Re-run tests
|
|
||||||
|
|
||||||
**"Column does not exist" in production:**
|
|
||||||
1. Run schema validation tests immediately
|
|
||||||
2. Identify which table is missing the field
|
|
||||||
3. Create emergency migration to add field
|
|
||||||
4. Deploy with high priority
|
|
||||||
|
|
||||||
### Related Documentation
|
|
||||||
|
|
||||||
- [Schema Reference](../../docs/submission-pipeline/SCHEMA_REFERENCE.md) - Complete field mappings
|
|
||||||
- [Submission Pipeline](../../docs/submission-pipeline/README.md) - Pipeline overview
|
|
||||||
- [Versioning System](../../docs/versioning/README.md) - Version table details
|
|
||||||
- [Moderation Workflow](../../docs/moderation/README.md) - Approval process
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Other Integration Tests
|
|
||||||
|
|
||||||
### Moderation Security Tests
|
|
||||||
|
|
||||||
**File**: `moderation-security.test.ts`
|
|
||||||
|
|
||||||
Tests role validation, lock enforcement, and rate limiting in the moderation system.
|
|
||||||
|
|
||||||
**Run:**
|
|
||||||
```bash
|
|
||||||
npx playwright test moderation-security
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Contributing
|
|
||||||
|
|
||||||
When adding new integration tests:
|
|
||||||
1. Follow existing test structure
|
|
||||||
2. Use descriptive test names
|
|
||||||
3. Add comments explaining what's being tested
|
|
||||||
4. Update this README
|
|
||||||
5. Ensure tests are idempotent (can run multiple times)
|
|
||||||
6. Clean up test data after completion
|
|
||||||
@@ -1,545 +0,0 @@
|
|||||||
import { test, expect } from '@playwright/test';
|
|
||||||
import { createClient } from '@supabase/supabase-js';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Schema Validation Tests
|
|
||||||
*
|
|
||||||
* These tests validate that submission tables, version tables, and main entity tables
|
|
||||||
* have consistent schemas to prevent field mismatches during the approval pipeline.
|
|
||||||
*
|
|
||||||
* Critical validations:
|
|
||||||
* 1. Submission tables must have all fields from main tables (except auto-generated)
|
|
||||||
* 2. Version tables must have all fields from main tables plus version metadata
|
|
||||||
* 3. Critical functions must reference correct column names
|
|
||||||
* 4. Required NOT NULL fields must be present in all tables
|
|
||||||
*/
|
|
||||||
|
|
||||||
const supabase = createClient(
|
|
||||||
'https://ydvtmnrszybqnbcqbdcy.supabase.co',
|
|
||||||
process.env.SUPABASE_SERVICE_ROLE_KEY || ''
|
|
||||||
);
|
|
||||||
|
|
||||||
interface ColumnDefinition {
|
|
||||||
column_name: string;
|
|
||||||
data_type: string;
|
|
||||||
is_nullable: string;
|
|
||||||
column_default: string | null;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface TableSchema {
|
|
||||||
[columnName: string]: ColumnDefinition;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fields that are expected to be different or missing in submission tables
|
|
||||||
const EXCLUDED_FIELDS = [
|
|
||||||
'id', // Submission tables have their own ID
|
|
||||||
'created_at', // Managed differently in submissions
|
|
||||||
'updated_at', // Managed differently in submissions
|
|
||||||
'view_count_all', // Calculated fields not in submissions
|
|
||||||
'view_count_30d',
|
|
||||||
'view_count_7d',
|
|
||||||
'average_rating',
|
|
||||||
'review_count',
|
|
||||||
'installations_count', // Only for ride_models
|
|
||||||
'is_test_data', // Test data flag
|
|
||||||
];
|
|
||||||
|
|
||||||
// Version-specific metadata fields (expected to be extra in version tables)
|
|
||||||
const VERSION_METADATA_FIELDS = [
|
|
||||||
'version_id',
|
|
||||||
'version_number',
|
|
||||||
'change_type',
|
|
||||||
'change_reason',
|
|
||||||
'is_current',
|
|
||||||
'created_by',
|
|
||||||
'created_at',
|
|
||||||
'submission_id',
|
|
||||||
'is_test_data',
|
|
||||||
];
|
|
||||||
|
|
||||||
async function getTableSchema(tableName: string): Promise<TableSchema> {
|
|
||||||
const { data, error } = await supabase
|
|
||||||
.from('information_schema.columns' as any)
|
|
||||||
.select('column_name, data_type, is_nullable, column_default')
|
|
||||||
.eq('table_schema', 'public')
|
|
||||||
.eq('table_name', tableName);
|
|
||||||
|
|
||||||
if (error) throw error;
|
|
||||||
|
|
||||||
const schema: TableSchema = {};
|
|
||||||
data?.forEach((col: any) => {
|
|
||||||
schema[col.column_name] = col;
|
|
||||||
});
|
|
||||||
|
|
||||||
return schema;
|
|
||||||
}
|
|
||||||
|
|
||||||
function normalizeColumnName(name: string): string {
|
|
||||||
// Handle known version table variations
|
|
||||||
const mapping: { [key: string]: string } = {
|
|
||||||
'height_requirement_cm': 'height_requirement',
|
|
||||||
'gforce_max': 'max_g_force',
|
|
||||||
'inversions_count': 'inversions',
|
|
||||||
'height_meters': 'max_height_meters',
|
|
||||||
'drop_meters': 'drop_height_meters',
|
|
||||||
};
|
|
||||||
|
|
||||||
return mapping[name] || name;
|
|
||||||
}
|
|
||||||
|
|
||||||
test.describe('Schema Validation - Entity Tables', () => {
|
|
||||||
test('parks: submission table matches main table schema', async () => {
|
|
||||||
const mainSchema = await getTableSchema('parks');
|
|
||||||
const submissionSchema = await getTableSchema('park_submissions');
|
|
||||||
|
|
||||||
const mismatches: string[] = [];
|
|
||||||
const missingFields: string[] = [];
|
|
||||||
|
|
||||||
// Check each field in main table exists in submission table
|
|
||||||
for (const [fieldName, fieldDef] of Object.entries(mainSchema)) {
|
|
||||||
if (EXCLUDED_FIELDS.includes(fieldName)) continue;
|
|
||||||
|
|
||||||
if (!submissionSchema[fieldName]) {
|
|
||||||
missingFields.push(fieldName);
|
|
||||||
} else {
|
|
||||||
// Check data type matches
|
|
||||||
const mainType = fieldDef.data_type;
|
|
||||||
const submissionType = submissionSchema[fieldName].data_type;
|
|
||||||
|
|
||||||
if (mainType !== submissionType) {
|
|
||||||
mismatches.push(
|
|
||||||
`${fieldName}: main=${mainType}, submission=${submissionType}`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
expect(missingFields,
|
|
||||||
`park_submissions is missing fields: ${missingFields.join(', ')}`
|
|
||||||
).toHaveLength(0);
|
|
||||||
|
|
||||||
expect(mismatches,
|
|
||||||
`park_submissions has type mismatches: ${mismatches.join('; ')}`
|
|
||||||
).toHaveLength(0);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('rides: submission table matches main table schema', async () => {
|
|
||||||
const mainSchema = await getTableSchema('rides');
|
|
||||||
const submissionSchema = await getTableSchema('ride_submissions');
|
|
||||||
|
|
||||||
const mismatches: string[] = [];
|
|
||||||
const missingFields: string[] = [];
|
|
||||||
|
|
||||||
for (const [fieldName, fieldDef] of Object.entries(mainSchema)) {
|
|
||||||
if (EXCLUDED_FIELDS.includes(fieldName)) continue;
|
|
||||||
|
|
||||||
if (!submissionSchema[fieldName]) {
|
|
||||||
missingFields.push(fieldName);
|
|
||||||
} else {
|
|
||||||
const mainType = fieldDef.data_type;
|
|
||||||
const submissionType = submissionSchema[fieldName].data_type;
|
|
||||||
|
|
||||||
if (mainType !== submissionType) {
|
|
||||||
mismatches.push(
|
|
||||||
`${fieldName}: main=${mainType}, submission=${submissionType}`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
expect(missingFields,
|
|
||||||
`ride_submissions is missing fields: ${missingFields.join(', ')}`
|
|
||||||
).toHaveLength(0);
|
|
||||||
|
|
||||||
expect(mismatches,
|
|
||||||
`ride_submissions has type mismatches: ${mismatches.join('; ')}`
|
|
||||||
).toHaveLength(0);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('companies: submission table matches main table schema', async () => {
|
|
||||||
const mainSchema = await getTableSchema('companies');
|
|
||||||
const submissionSchema = await getTableSchema('company_submissions');
|
|
||||||
|
|
||||||
const mismatches: string[] = [];
|
|
||||||
const missingFields: string[] = [];
|
|
||||||
|
|
||||||
for (const [fieldName, fieldDef] of Object.entries(mainSchema)) {
|
|
||||||
if (EXCLUDED_FIELDS.includes(fieldName)) continue;
|
|
||||||
|
|
||||||
if (!submissionSchema[fieldName]) {
|
|
||||||
missingFields.push(fieldName);
|
|
||||||
} else {
|
|
||||||
const mainType = fieldDef.data_type;
|
|
||||||
const submissionType = submissionSchema[fieldName].data_type;
|
|
||||||
|
|
||||||
if (mainType !== submissionType) {
|
|
||||||
mismatches.push(
|
|
||||||
`${fieldName}: main=${mainType}, submission=${submissionType}`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
expect(missingFields,
|
|
||||||
`company_submissions is missing fields: ${missingFields.join(', ')}`
|
|
||||||
).toHaveLength(0);
|
|
||||||
|
|
||||||
expect(mismatches,
|
|
||||||
`company_submissions has type mismatches: ${mismatches.join('; ')}`
|
|
||||||
).toHaveLength(0);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('ride_models: submission table matches main table schema', async () => {
|
|
||||||
const mainSchema = await getTableSchema('ride_models');
|
|
||||||
const submissionSchema = await getTableSchema('ride_model_submissions');
|
|
||||||
|
|
||||||
const mismatches: string[] = [];
|
|
||||||
const missingFields: string[] = [];
|
|
||||||
|
|
||||||
for (const [fieldName, fieldDef] of Object.entries(mainSchema)) {
|
|
||||||
if (EXCLUDED_FIELDS.includes(fieldName)) continue;
|
|
||||||
|
|
||||||
if (!submissionSchema[fieldName]) {
|
|
||||||
missingFields.push(fieldName);
|
|
||||||
} else {
|
|
||||||
const mainType = fieldDef.data_type;
|
|
||||||
const submissionType = submissionSchema[fieldName].data_type;
|
|
||||||
|
|
||||||
if (mainType !== submissionType) {
|
|
||||||
mismatches.push(
|
|
||||||
`${fieldName}: main=${mainType}, submission=${submissionType}`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
expect(missingFields,
|
|
||||||
`ride_model_submissions is missing fields: ${missingFields.join(', ')}`
|
|
||||||
).toHaveLength(0);
|
|
||||||
|
|
||||||
expect(mismatches,
|
|
||||||
`ride_model_submissions has type mismatches: ${mismatches.join('; ')}`
|
|
||||||
).toHaveLength(0);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
test.describe('Schema Validation - Version Tables', () => {
|
|
||||||
test('park_versions: has all main table fields plus version metadata', async () => {
|
|
||||||
const mainSchema = await getTableSchema('parks');
|
|
||||||
const versionSchema = await getTableSchema('park_versions');
|
|
||||||
|
|
||||||
const missingFields: string[] = [];
|
|
||||||
|
|
||||||
// Check all main table fields exist in version table
|
|
||||||
for (const [fieldName] of Object.entries(mainSchema)) {
|
|
||||||
if (EXCLUDED_FIELDS.includes(fieldName)) continue;
|
|
||||||
|
|
||||||
const normalizedName = normalizeColumnName(fieldName);
|
|
||||||
if (!versionSchema[fieldName] && !versionSchema[normalizedName]) {
|
|
||||||
missingFields.push(fieldName);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check all version metadata fields exist
|
|
||||||
const missingMetadata: string[] = [];
|
|
||||||
for (const metaField of VERSION_METADATA_FIELDS) {
|
|
||||||
if (!versionSchema[metaField]) {
|
|
||||||
missingMetadata.push(metaField);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
expect(missingFields,
|
|
||||||
`park_versions is missing main table fields: ${missingFields.join(', ')}`
|
|
||||||
).toHaveLength(0);
|
|
||||||
|
|
||||||
expect(missingMetadata,
|
|
||||||
`park_versions is missing version metadata: ${missingMetadata.join(', ')}`
|
|
||||||
).toHaveLength(0);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('ride_versions: has all main table fields plus version metadata', async () => {
|
|
||||||
const mainSchema = await getTableSchema('rides');
|
|
||||||
const versionSchema = await getTableSchema('ride_versions');
|
|
||||||
|
|
||||||
const missingFields: string[] = [];
|
|
||||||
|
|
||||||
for (const [fieldName] of Object.entries(mainSchema)) {
|
|
||||||
if (EXCLUDED_FIELDS.includes(fieldName)) continue;
|
|
||||||
|
|
||||||
const normalizedName = normalizeColumnName(fieldName);
|
|
||||||
if (!versionSchema[fieldName] && !versionSchema[normalizedName]) {
|
|
||||||
missingFields.push(fieldName);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const missingMetadata: string[] = [];
|
|
||||||
for (const metaField of VERSION_METADATA_FIELDS) {
|
|
||||||
if (!versionSchema[metaField]) {
|
|
||||||
missingMetadata.push(metaField);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
expect(missingFields,
|
|
||||||
`ride_versions is missing main table fields: ${missingFields.join(', ')}`
|
|
||||||
).toHaveLength(0);
|
|
||||||
|
|
||||||
expect(missingMetadata,
|
|
||||||
`ride_versions is missing version metadata: ${missingMetadata.join(', ')}`
|
|
||||||
).toHaveLength(0);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('company_versions: has all main table fields plus version metadata', async () => {
|
|
||||||
const mainSchema = await getTableSchema('companies');
|
|
||||||
const versionSchema = await getTableSchema('company_versions');
|
|
||||||
|
|
||||||
const missingFields: string[] = [];
|
|
||||||
|
|
||||||
for (const [fieldName] of Object.entries(mainSchema)) {
|
|
||||||
if (EXCLUDED_FIELDS.includes(fieldName)) continue;
|
|
||||||
|
|
||||||
const normalizedName = normalizeColumnName(fieldName);
|
|
||||||
if (!versionSchema[fieldName] && !versionSchema[normalizedName]) {
|
|
||||||
missingFields.push(fieldName);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const missingMetadata: string[] = [];
|
|
||||||
for (const metaField of VERSION_METADATA_FIELDS) {
|
|
||||||
if (!versionSchema[metaField]) {
|
|
||||||
missingMetadata.push(metaField);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
expect(missingFields,
|
|
||||||
`company_versions is missing main table fields: ${missingFields.join(', ')}`
|
|
||||||
).toHaveLength(0);
|
|
||||||
|
|
||||||
expect(missingMetadata,
|
|
||||||
`company_versions is missing version metadata: ${missingMetadata.join(', ')}`
|
|
||||||
).toHaveLength(0);
|
|
||||||
});
|
|
||||||
|
|
||||||
test('ride_model_versions: has all main table fields plus version metadata', async () => {
|
|
||||||
const mainSchema = await getTableSchema('ride_models');
|
|
||||||
const versionSchema = await getTableSchema('ride_model_versions');
|
|
||||||
|
|
||||||
const missingFields: string[] = [];
|
|
||||||
|
|
||||||
for (const [fieldName] of Object.entries(mainSchema)) {
|
|
||||||
if (EXCLUDED_FIELDS.includes(fieldName)) continue;
|
|
||||||
|
|
||||||
const normalizedName = normalizeColumnName(fieldName);
|
|
||||||
if (!versionSchema[fieldName] && !versionSchema[normalizedName]) {
|
|
||||||
missingFields.push(fieldName);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const missingMetadata: string[] = [];
|
|
||||||
for (const metaField of VERSION_METADATA_FIELDS) {
|
|
||||||
if (!versionSchema[metaField]) {
|
|
||||||
missingMetadata.push(metaField);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
expect(missingFields,
|
|
||||||
`ride_model_versions is missing main table fields: ${missingFields.join(', ')}`
|
|
||||||
).toHaveLength(0);
|
|
||||||
|
|
||||||
expect(missingMetadata,
|
|
||||||
`ride_model_versions is missing version metadata: ${missingMetadata.join(', ')}`
|
|
||||||
).toHaveLength(0);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
test.describe('Schema Validation - Critical Fields', () => {
|
|
||||||
test('rides table does NOT have ride_type column', async () => {
|
|
||||||
const ridesSchema = await getTableSchema('rides');
|
|
||||||
|
|
||||||
expect(ridesSchema['ride_type']).toBeUndefined();
|
|
||||||
});
|
|
||||||
|
|
||||||
test('rides table DOES have category column (NOT NULL)', async () => {
|
|
||||||
const ridesSchema = await getTableSchema('rides');
|
|
||||||
|
|
||||||
expect(ridesSchema['category']).toBeDefined();
|
|
||||||
expect(ridesSchema['category'].is_nullable).toBe('NO');
|
|
||||||
});
|
|
||||||
|
|
||||||
test('ride_models table DOES have both category and ride_type columns', async () => {
|
|
||||||
const rideModelsSchema = await getTableSchema('ride_models');
|
|
||||||
|
|
||||||
expect(rideModelsSchema['category']).toBeDefined();
|
|
||||||
expect(rideModelsSchema['category'].is_nullable).toBe('NO');
|
|
||||||
expect(rideModelsSchema['ride_type']).toBeDefined();
|
|
||||||
});
|
|
||||||
|
|
||||||
test('all entity tables have required base fields', async () => {
|
|
||||||
const requiredFields = ['id', 'name', 'slug', 'created_at', 'updated_at'];
|
|
||||||
const tables = ['parks', 'rides', 'companies', 'ride_models'];
|
|
||||||
|
|
||||||
for (const table of tables) {
|
|
||||||
const schema = await getTableSchema(table);
|
|
||||||
|
|
||||||
for (const field of requiredFields) {
|
|
||||||
expect(schema[field],
|
|
||||||
`${table} is missing required field: ${field}`
|
|
||||||
).toBeDefined();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
test('all submission tables have submission_id foreign key', async () => {
|
|
||||||
const submissionTables = [
|
|
||||||
'park_submissions',
|
|
||||||
'ride_submissions',
|
|
||||||
'company_submissions',
|
|
||||||
'ride_model_submissions',
|
|
||||||
'photo_submissions',
|
|
||||||
];
|
|
||||||
|
|
||||||
for (const table of submissionTables) {
|
|
||||||
const schema = await getTableSchema(table);
|
|
||||||
|
|
||||||
expect(schema['submission_id'],
|
|
||||||
`${table} is missing submission_id foreign key`
|
|
||||||
).toBeDefined();
|
|
||||||
expect(schema['submission_id'].is_nullable).toBe('NO');
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
test('all version tables have version metadata fields', async () => {
|
|
||||||
const versionTables = [
|
|
||||||
'park_versions',
|
|
||||||
'ride_versions',
|
|
||||||
'company_versions',
|
|
||||||
'ride_model_versions',
|
|
||||||
];
|
|
||||||
|
|
||||||
const requiredVersionFields = [
|
|
||||||
'version_id',
|
|
||||||
'version_number',
|
|
||||||
'change_type',
|
|
||||||
'is_current',
|
|
||||||
];
|
|
||||||
|
|
||||||
for (const table of versionTables) {
|
|
||||||
const schema = await getTableSchema(table);
|
|
||||||
|
|
||||||
for (const field of requiredVersionFields) {
|
|
||||||
expect(schema[field],
|
|
||||||
`${table} is missing required version field: ${field}`
|
|
||||||
).toBeDefined();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
test.describe('Schema Validation - Function Parameter Alignment', () => {
|
|
||||||
test('verify create_entity_from_submission function exists', async () => {
|
|
||||||
const { data, error } = await supabase
|
|
||||||
.rpc('pg_get_functiondef', {
|
|
||||||
funcid: 'create_entity_from_submission'::any
|
|
||||||
} as any)
|
|
||||||
.single();
|
|
||||||
|
|
||||||
// Function should exist (will error if not)
|
|
||||||
expect(error).toBeNull();
|
|
||||||
});
|
|
||||||
|
|
||||||
test('verify update_entity_from_submission function exists', async () => {
|
|
||||||
const { data, error } = await supabase
|
|
||||||
.rpc('pg_get_functiondef', {
|
|
||||||
funcid: 'update_entity_from_submission'::any
|
|
||||||
} as any)
|
|
||||||
.single();
|
|
||||||
|
|
||||||
// Function should exist (will error if not)
|
|
||||||
expect(error).toBeNull();
|
|
||||||
});
|
|
||||||
|
|
||||||
test('verify process_approval_transaction function exists', async () => {
|
|
||||||
const { data, error } = await supabase.rpc('pg_catalog.pg_function_is_visible', {
|
|
||||||
funcid: 'process_approval_transaction'::any
|
|
||||||
} as any);
|
|
||||||
|
|
||||||
// Function should be visible
|
|
||||||
expect(data).toBeTruthy();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
test.describe('Schema Validation - Known Field Name Variations', () => {
|
|
||||||
test('ride_versions uses height_requirement_cm instead of height_requirement', async () => {
|
|
||||||
const versionSchema = await getTableSchema('ride_versions');
|
|
||||||
|
|
||||||
expect(versionSchema['height_requirement_cm']).toBeDefined();
|
|
||||||
expect(versionSchema['height_requirement']).toBeUndefined();
|
|
||||||
});
|
|
||||||
|
|
||||||
test('ride_versions uses gforce_max instead of max_g_force', async () => {
|
|
||||||
const versionSchema = await getTableSchema('ride_versions');
|
|
||||||
|
|
||||||
expect(versionSchema['gforce_max']).toBeDefined();
|
|
||||||
expect(versionSchema['max_g_force']).toBeUndefined();
|
|
||||||
});
|
|
||||||
|
|
||||||
test('ride_versions uses inversions_count instead of inversions', async () => {
|
|
||||||
const versionSchema = await getTableSchema('ride_versions');
|
|
||||||
|
|
||||||
expect(versionSchema['inversions_count']).toBeDefined();
|
|
||||||
expect(versionSchema['inversions']).toBeUndefined();
|
|
||||||
});
|
|
||||||
|
|
||||||
test('ride_versions uses height_meters instead of max_height_meters', async () => {
|
|
||||||
const versionSchema = await getTableSchema('ride_versions');
|
|
||||||
|
|
||||||
expect(versionSchema['height_meters']).toBeDefined();
|
|
||||||
expect(versionSchema['max_height_meters']).toBeUndefined();
|
|
||||||
});
|
|
||||||
|
|
||||||
test('ride_versions uses drop_meters instead of drop_height_meters', async () => {
|
|
||||||
const versionSchema = await getTableSchema('ride_versions');
|
|
||||||
|
|
||||||
expect(versionSchema['drop_meters']).toBeDefined();
|
|
||||||
expect(versionSchema['drop_height_meters']).toBeUndefined();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
test.describe('Schema Validation - Submission Items', () => {
|
|
||||||
test('submission_items has all required foreign key columns', async () => {
|
|
||||||
const schema = await getTableSchema('submission_items');
|
|
||||||
|
|
||||||
const requiredFKs = [
|
|
||||||
'submission_id',
|
|
||||||
'park_submission_id',
|
|
||||||
'ride_submission_id',
|
|
||||||
'company_submission_id',
|
|
||||||
'ride_model_submission_id',
|
|
||||||
'photo_submission_id',
|
|
||||||
'timeline_event_submission_id',
|
|
||||||
'depends_on', // For dependency chain
|
|
||||||
];
|
|
||||||
|
|
||||||
for (const fk of requiredFKs) {
|
|
||||||
expect(schema[fk],
|
|
||||||
`submission_items is missing FK: ${fk}`
|
|
||||||
).toBeDefined();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
test('submission_items has required metadata fields', async () => {
|
|
||||||
const schema = await getTableSchema('submission_items');
|
|
||||||
|
|
||||||
const requiredFields = [
|
|
||||||
'item_type',
|
|
||||||
'action_type',
|
|
||||||
'status',
|
|
||||||
'order_index',
|
|
||||||
];
|
|
||||||
|
|
||||||
for (const field of requiredFields) {
|
|
||||||
expect(schema[field],
|
|
||||||
`submission_items is missing field: ${field}`
|
|
||||||
).toBeDefined();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
Reference in New Issue
Block a user