Compare commits

..

1 Commits

Author SHA1 Message Date
Claude
2c2a6c90f0 docs: Comprehensive Django migration audit and implementation plan
- Analyzed Supabase database schema (100+ tables)
- Reviewed Django implementation (~2,200 lines of models, ~3,700 lines of API)
- Catalogued 42 Edge Functions requiring migration
- Identified 325 frontend component files needing backend support
- Created 6-phase migration plan with timeline estimates
- Prioritized missing features by user impact
- Migration currently ~40% complete

Key Findings:
- Core entities (Parks, Rides, Companies) fully implemented
- Reviews, User Lists, Notifications systems not implemented
- 60+ database tables still needed
- Edge Functions need migration to Celery tasks
- Estimated 6-8 weeks for complete migration

See DJANGO_MIGRATION_AUDIT.md for detailed breakdown.
2025-11-08 20:44:02 +00:00
119 changed files with 2491 additions and 15110 deletions

View File

@@ -1,186 +0,0 @@
name: Schema Validation
on:
pull_request:
paths:
- 'supabase/migrations/**'
- 'src/lib/moderation/**'
- 'supabase/functions/**'
push:
branches:
- main
- develop
paths:
- 'supabase/migrations/**'
- 'src/lib/moderation/**'
- 'supabase/functions/**'
workflow_dispatch: # Allow manual triggering
jobs:
validate-schema:
name: Validate Database Schema
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '20'
cache: 'npm'
- name: Install dependencies
run: npm ci
- name: Run schema validation script
env:
SUPABASE_SERVICE_ROLE_KEY: ${{ secrets.SUPABASE_SERVICE_ROLE_KEY }}
run: |
echo "🔍 Running schema validation checks..."
npm run validate-schema
- name: Run Playwright schema validation tests
env:
SUPABASE_SERVICE_ROLE_KEY: ${{ secrets.SUPABASE_SERVICE_ROLE_KEY }}
run: |
echo "🧪 Running integration tests..."
npx playwright test schema-validation --reporter=list
- name: Upload test results
if: failure()
uses: actions/upload-artifact@v4
with:
name: schema-validation-results
path: |
playwright-report/
test-results/
retention-days: 7
- name: Comment PR with validation results
if: failure() && github.event_name == 'pull_request'
uses: actions/github-script@v7
with:
script: |
github.rest.issues.createComment({
issue_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo,
body: `## ❌ Schema Validation Failed
The schema validation checks have detected inconsistencies in your database changes.
**Common issues:**
- Missing fields in submission tables
- Mismatched data types between tables
- Missing version metadata fields
- Invalid column names (e.g., \`ride_type\` in \`rides\` table)
**Next steps:**
1. Review the failed tests in the Actions log
2. Check the [Schema Reference documentation](https://github.com/${context.repo.owner}/${context.repo.repo}/blob/main/docs/submission-pipeline/SCHEMA_REFERENCE.md)
3. Fix the identified issues
4. Push your fixes to re-run validation
**Need help?** Consult the [Integration Tests README](https://github.com/${context.repo.owner}/${context.repo.repo}/blob/main/tests/integration/README.md).`
})
migration-safety-check:
name: Migration Safety Check
runs-on: ubuntu-latest
if: github.event_name == 'pull_request'
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Check for breaking changes in migrations
run: |
echo "🔍 Checking for potentially breaking migration patterns..."
# Check if any migrations contain DROP COLUMN
if git diff origin/main...HEAD -- 'supabase/migrations/**' | grep -i "DROP COLUMN"; then
echo "⚠️ Warning: Migration contains DROP COLUMN"
echo "::warning::Migration contains DROP COLUMN - ensure data migration plan exists"
fi
# Check if any migrations alter NOT NULL constraints
if git diff origin/main...HEAD -- 'supabase/migrations/**' | grep -i "ALTER COLUMN.*NOT NULL"; then
echo "⚠️ Warning: Migration alters NOT NULL constraints"
echo "::warning::Migration alters NOT NULL constraints - ensure data backfill is complete"
fi
# Check if any migrations rename columns
if git diff origin/main...HEAD -- 'supabase/migrations/**' | grep -i "RENAME COLUMN"; then
echo "⚠️ Warning: Migration renames columns"
echo "::warning::Migration renames columns - ensure all code references are updated"
fi
- name: Validate migration file naming
run: |
echo "🔍 Validating migration file names..."
# Check that all migration files follow the timestamp pattern
for file in supabase/migrations/*.sql; do
if [[ ! $(basename "$file") =~ ^[0-9]{14}_ ]]; then
echo "❌ Invalid migration filename: $(basename "$file")"
echo "::error::Migration files must start with a 14-digit timestamp (YYYYMMDDHHMMSS)"
exit 1
fi
done
echo "✅ All migration filenames are valid"
documentation-check:
name: Documentation Check
runs-on: ubuntu-latest
if: github.event_name == 'pull_request'
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Check if schema docs need updating
run: |
echo "📚 Checking if schema documentation is up to date..."
# Check if migrations changed but SCHEMA_REFERENCE.md didn't
MIGRATIONS_CHANGED=$(git diff origin/main...HEAD --name-only | grep -c "supabase/migrations/" || true)
SCHEMA_DOCS_CHANGED=$(git diff origin/main...HEAD --name-only | grep -c "docs/submission-pipeline/SCHEMA_REFERENCE.md" || true)
if [ "$MIGRATIONS_CHANGED" -gt 0 ] && [ "$SCHEMA_DOCS_CHANGED" -eq 0 ]; then
echo "⚠️ Warning: Migrations were changed but SCHEMA_REFERENCE.md was not updated"
echo "::warning::Consider updating docs/submission-pipeline/SCHEMA_REFERENCE.md to reflect schema changes"
else
echo "✅ Documentation check passed"
fi
- name: Comment PR with documentation reminder
if: success()
uses: actions/github-script@v7
with:
script: |
const fs = require('fs');
const migrationsChanged = (await exec.getExecOutput('git', ['diff', 'origin/main...HEAD', '--name-only'])).stdout.includes('supabase/migrations/');
const docsChanged = (await exec.getExecOutput('git', ['diff', 'origin/main...HEAD', '--name-only'])).stdout.includes('docs/submission-pipeline/SCHEMA_REFERENCE.md');
if (migrationsChanged && !docsChanged) {
github.rest.issues.createComment({
issue_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo,
body: `## 📚 Documentation Reminder
This PR includes database migrations but doesn't update the schema reference documentation.
**If you added/modified fields**, please update:
- \`docs/submission-pipeline/SCHEMA_REFERENCE.md\`
**If this is a minor change** (e.g., fixing typos, adding indexes), you can ignore this message.`
})
}

963
DJANGO_MIGRATION_AUDIT.md Normal file
View File

@@ -0,0 +1,963 @@
# Django Migration Audit & Plan
**Date**: 2025-11-08
**Project**: ThrillTrack Explorer
**Objective**: Complete migration from Supabase to Django backend
## Executive Summary
This audit examines the current state of the Django migration for ThrillTrack Explorer, a comprehensive amusement park and roller coaster tracking platform. The migration is approximately **40% complete** in terms of core functionality.
**Key Findings:**
- ✅ Core entity models (Parks, Rides, Companies, RideModels) are implemented
- ✅ Photo/media system is implemented
- ✅ Versioning system is implemented
- ✅ Moderation workflow with FSM is implemented
- ✅ Basic API endpoints (~3,700 lines) are implemented
- ❌ Reviews system is NOT implemented
- ❌ User features (lists, credits, blocking) are NOT implemented
- ❌ Notifications system is NOT implemented (model file is empty)
- ❌ Admin features are NOT implemented
- ❌ 42 Edge Functions need migration to Django
- ❌ Blog/content features are NOT implemented
- ❌ Advanced submission features are partially missing
---
## 1. Database Schema Comparison
### 1.1 Core Entities - ✅ COMPLETE
| Entity | Supabase | Django | Status | Notes |
|--------|----------|--------|--------|-------|
| Companies | ✅ | ✅ | **DONE** | Includes manufacturers, operators, designers |
| Parks | ✅ | ✅ | **DONE** | Location tracking, operating status |
| Rides | ✅ | ✅ | **DONE** | Full specs, coaster stats |
| Ride Models | ✅ | ✅ | **DONE** | Manufacturer templates |
| Locations | ✅ | ✅ | **DONE** | Country, Subdivision, Locality hierarchy |
### 1.2 User & Profile - ⚠️ PARTIAL
| Feature | Supabase | Django | Status | Notes |
|---------|----------|--------|--------|-------|
| User (auth.users) | ✅ | ✅ | **DONE** | Custom user model with OAuth, MFA |
| User Profile | ✅ (profiles) | ✅ (UserProfile) | **DONE** | Extended profile info |
| User Roles | ✅ (user_roles) | ✅ (UserRole) | **DONE** | admin/moderator/user |
| User Sessions | ✅ | ❌ | **MISSING** | Session tracking table |
| User Preferences | ✅ | ❌ | **MISSING** | Theme, notification settings |
| User Notification Preferences | ✅ | ❌ | **MISSING** | Per-channel notification prefs |
| User Blocks | ✅ | ❌ | **MISSING** | User blocking system |
### 1.3 User Content - ❌ NOT IMPLEMENTED
| Feature | Supabase | Django | Status | Notes |
|---------|----------|--------|--------|-------|
| Reviews | ✅ (reviews) | ❌ | **MISSING** | Park & ride reviews |
| Review Photos | ✅ (review_photos) | ❌ | **MISSING** | Photos attached to reviews |
| Review Deletions | ✅ (review_deletions) | ❌ | **MISSING** | Soft delete tracking |
| User Ride Credits | ✅ (user_ride_credits) | ❌ | **MISSING** | Track rides users have been on |
| User Top Lists | ✅ (user_top_lists) | ❌ | **MISSING** | Custom ranked lists |
| List Items | ✅ (list_items) | ❌ | **MISSING** | Items within lists |
| User Top List Items | ✅ | ❌ | **MISSING** | Detailed list item data |
### 1.4 Media & Photos - ✅ COMPLETE
| Feature | Supabase | Django | Status | Notes |
|---------|----------|--------|--------|-------|
| Photos | ✅ | ✅ | **DONE** | CloudFlare Images integration |
| Photo Submissions | ✅ | ⚠️ | **PARTIAL** | Through moderation system |
| Generic Photo Relations | ✅ | ✅ | **DONE** | Photos attach to any entity |
### 1.5 Moderation & Submissions - ✅ MOSTLY COMPLETE
| Feature | Supabase | Django | Status | Notes |
|---------|----------|--------|--------|-------|
| Content Submissions | ✅ | ✅ | **DONE** | FSM-based workflow |
| Submission Items | ✅ | ✅ | **DONE** | Individual field changes |
| Moderation Locks | ✅ | ✅ | **DONE** | 15-minute review locks |
| Park Submissions | ✅ | ⚠️ | **PARTIAL** | Need specialized submission types |
| Ride Submissions | ✅ | ⚠️ | **PARTIAL** | Need specialized submission types |
| Company Submissions | ✅ | ⚠️ | **PARTIAL** | Need specialized submission types |
| Ride Model Submissions | ✅ | ⚠️ | **PARTIAL** | Need specialized submission types |
| Photo Submissions | ✅ | ⚠️ | **PARTIAL** | Need specialized submission types |
| Submission Dependencies | ✅ | ❌ | **MISSING** | Track dependent submissions |
| Submission Idempotency Keys | ✅ | ❌ | **MISSING** | Prevent duplicate submissions |
| Submission Item Temp Refs | ✅ | ❌ | **MISSING** | Temporary reference handling |
| Conflict Resolutions | ✅ | ❌ | **MISSING** | Handle edit conflicts |
### 1.6 Versioning & History - ✅ COMPLETE
| Feature | Supabase | Django | Status | Notes |
|---------|----------|--------|--------|-------|
| Entity Versions | ✅ | ✅ | **DONE** | Generic version tracking |
| Version Diffs | ✅ | ⚠️ | **PARTIAL** | Stored in changed_fields JSON |
| Company Versions | ✅ | ✅ | **DONE** | Via generic EntityVersion |
| Park Versions | ✅ | ✅ | **DONE** | Via generic EntityVersion |
| Ride Versions | ✅ | ✅ | **DONE** | Via generic EntityVersion |
| Ride Model Versions | ✅ | ✅ | **DONE** | Via generic EntityVersion |
| Entity Versions Archive | ✅ | ❌ | **MISSING** | Old version archival |
| Item Edit History | ✅ | ❌ | **MISSING** | Detailed edit tracking |
| Item Field Changes | ✅ | ❌ | **MISSING** | Field-level change tracking |
| Entity Field History | ✅ | ❌ | **MISSING** | Historical field values |
| Entity Relationships History | ✅ | ❌ | **MISSING** | Track relationship changes |
### 1.7 Ride-Specific Details - ❌ NOT IMPLEMENTED
| Feature | Supabase | Django | Status | Notes |
|---------|----------|--------|--------|-------|
| Ride Coaster Stats | ✅ | ❌ | **MISSING** | Detailed coaster statistics |
| Ride Technical Specs | ✅ | ⚠️ | **PARTIAL** | Using JSONField, need dedicated table |
| Ride Water Details | ✅ | ❌ | **MISSING** | Water ride specifics |
| Ride Dark Details | ✅ | ❌ | **MISSING** | Dark ride specifics |
| Ride Flat Details | ✅ | ❌ | **MISSING** | Flat ride specifics |
| Ride Kiddie Details | ✅ | ❌ | **MISSING** | Kiddie ride specifics |
| Ride Transportation Details | ✅ | ❌ | **MISSING** | Transport ride specifics |
| Ride Former Names | ✅ | ❌ | **MISSING** | Historical ride names |
| Ride Name History | ✅ | ❌ | **MISSING** | Track name changes |
| Ride Model Technical Specs | ✅ | ❌ | **MISSING** | Model-specific specs |
### 1.8 Notifications - ❌ NOT IMPLEMENTED
| Feature | Supabase | Django | Status | Notes |
|---------|----------|--------|--------|-------|
| Notification Channels | ✅ | ❌ | **MISSING** | Email, push, in-app channels |
| Notification Templates | ✅ | ❌ | **MISSING** | Template system |
| Notification Logs | ✅ | ❌ | **MISSING** | Delivery tracking |
| Notification Event Data | ✅ | ❌ | **MISSING** | Event-specific data |
| Notification Duplicate Stats | ✅ | ❌ | **MISSING** | Prevent duplicate notifications |
### 1.9 Admin & Audit - ❌ NOT IMPLEMENTED
| Feature | Supabase | Django | Status | Notes |
|---------|----------|--------|--------|-------|
| Admin Settings | ✅ | ❌ | **MISSING** | System-wide settings |
| Admin Audit Log | ✅ | ❌ | **MISSING** | Admin action tracking |
| Admin Audit Details | ✅ | ❌ | **MISSING** | Detailed audit data |
| Moderation Audit Log | ✅ | ❌ | **MISSING** | Moderation action tracking |
| Moderation Audit Metadata | ✅ | ❌ | **MISSING** | Additional audit context |
| Profile Audit Log | ✅ | ❌ | **MISSING** | Profile change tracking |
| Profile Change Fields | ✅ | ❌ | **MISSING** | Field-level profile changes |
### 1.10 Timeline & Events - ❌ NOT IMPLEMENTED
| Feature | Supabase | Django | Status | Notes |
|---------|----------|--------|--------|-------|
| Entity Timeline Events | ✅ | ❌ | **MISSING** | Significant entity events |
| Timeline Event Submissions | ✅ | ❌ | **MISSING** | User-submitted events |
### 1.11 Reports & Contact - ❌ NOT IMPLEMENTED
| Feature | Supabase | Django | Status | Notes |
|---------|----------|--------|--------|-------|
| Reports | ✅ (reports table) | ❌ | **MISSING** | User reports/flagging |
| Contact Submissions | ✅ | ❌ | **MISSING** | Contact form submissions |
| Contact Email Threads | ✅ | ❌ | **MISSING** | Email thread tracking |
| Contact Rate Limits | ✅ | ❌ | **MISSING** | Prevent spam |
### 1.12 Historical Data - ❌ NOT IMPLEMENTED
| Feature | Supabase | Django | Status | Notes |
|---------|----------|--------|--------|-------|
| Historical Parks | ✅ | ❌ | **MISSING** | Closed/defunct parks |
| Historical Rides | ✅ | ❌ | **MISSING** | Closed/defunct rides |
| Park Location History | ✅ | ❌ | **MISSING** | Track relocations |
### 1.13 Content & Blog - ❌ NOT IMPLEMENTED
| Feature | Supabase | Django | Status | Notes |
|---------|----------|--------|--------|-------|
| Blog Posts | ✅ | ❌ | **MISSING** | Blog/news system |
### 1.14 System Tables - ❌ NOT IMPLEMENTED
| Feature | Supabase | Django | Status | Notes |
|---------|----------|--------|--------|-------|
| Entity Page Views | ✅ | ❌ | **MISSING** | Analytics/view tracking |
| Rate Limits | ✅ | ❌ | **MISSING** | API rate limiting |
| Account Deletion Requests | ✅ | ❌ | **MISSING** | GDPR compliance |
| Cleanup Job Log | ✅ | ❌ | **MISSING** | Maintenance job tracking |
| Orphaned Images | ✅ | ❌ | **MISSING** | Media cleanup |
| Orphaned Images Log | ✅ | ❌ | **MISSING** | Cleanup history |
| Test Data Registry | ✅ | ❌ | **MISSING** | Test data management |
| Approval Transaction Metrics | ✅ | ❌ | **MISSING** | Performance tracking |
| Request Metadata | ✅ | ❌ | **MISSING** | Request tracking |
| Request Breadcrumbs | ✅ | ❌ | **MISSING** | Request flow tracking |
| System Alerts | ✅ | ❌ | **MISSING** | System-wide alerts |
### 1.15 Park Operating Details - ⚠️ PARTIAL
| Feature | Supabase | Django | Status | Notes |
|---------|----------|--------|--------|-------|
| Park Operating Hours | ✅ | ❌ | **MISSING** | Schedule by day/season |
---
## 2. API Endpoints Comparison
### 2.1 Implemented Endpoints - ✅ COMPLETE
| Category | Supabase | Django | Lines of Code | Status |
|----------|----------|--------|---------------|--------|
| Authentication | ✅ | ✅ | 596 | **DONE** - JWT, OAuth, MFA |
| Companies | ✅ | ✅ | 254 | **DONE** - CRUD + search |
| Ride Models | ✅ | ✅ | 247 | **DONE** - CRUD + search |
| Parks | ✅ | ✅ | 362 | **DONE** - CRUD + nearby search |
| Rides | ✅ | ✅ | 360 | **DONE** - CRUD + search |
| Photos | ✅ | ✅ | 600 | **DONE** - Upload + moderation |
| Moderation | ✅ | ✅ | 496 | **DONE** - Submission workflow |
| Versioning | ✅ | ✅ | 369 | **DONE** - History + diffs |
| Search | ✅ | ✅ | 438 | **DONE** - Full-text search |
**Total API Code**: ~3,725 lines across 9 endpoint modules
### 2.2 Missing Endpoints - ❌ NOT IMPLEMENTED
| Category | Required | Status | Priority |
|----------|----------|--------|----------|
| Reviews | ✅ | ❌ **MISSING** | **HIGH** |
| User Lists | ✅ | ❌ **MISSING** | **HIGH** |
| User Credits | ✅ | ❌ **MISSING** | **MEDIUM** |
| Notifications | ✅ | ❌ **MISSING** | **HIGH** |
| Admin | ✅ | ❌ **MISSING** | **MEDIUM** |
| Reports | ✅ | ❌ **MISSING** | **MEDIUM** |
| Contact | ✅ | ❌ **MISSING** | **LOW** |
| Blog | ✅ | ❌ **MISSING** | **LOW** |
| Analytics | ✅ | ❌ **MISSING** | **LOW** |
| Timeline Events | ✅ | ❌ **MISSING** | **LOW** |
---
## 3. Supabase Edge Functions Analysis
**Total Edge Functions**: 42 functions
### 3.1 Edge Function Categories
#### 3.1.1 Authentication & User Management (9 functions)
-`admin-delete-user` - Admin user deletion
-`cancel-account-deletion` - Cancel pending deletion
-`cancel-email-change` - Cancel email change
-`confirm-account-deletion` - Confirm account deletion
-`export-user-data` - GDPR data export
-`mfa-unenroll` - Disable MFA
-`process-oauth-profile` - OAuth profile sync
-`request-account-deletion` - Request account deletion
-`resend-deletion-code` - Resend deletion confirmation
**Migration Strategy**: Implement as Django management commands + API endpoints
#### 3.1.2 Notifications (9 functions)
-`create-novu-subscriber` - Create notification subscriber
-`migrate-novu-users` - Migrate notification users
-`notify-moderators-report` - Notify mods of reports
-`notify-moderators-submission` - Notify mods of submissions
-`notify-system-announcement` - System announcements
-`notify-user-submission-status` - Submission status updates
-`novu-webhook` - Webhook receiver
-`remove-novu-subscriber` - Remove subscriber
-`trigger-notification` - Generic notification trigger
-`update-novu-preferences` - Update notification prefs
-`update-novu-subscriber` - Update subscriber info
**Migration Strategy**: Replace Novu with Django + Celery + email/push service
#### 3.1.3 Moderation & Content (5 functions)
-`manage-moderator-topic` - Manage mod topics/assignments
-`process-selective-approval` - Selective item approval
-`send-escalation-notification` - Escalate to senior mods
-`sync-all-moderators-to-topic` - Sync mod assignments
-`check-transaction-status` - Transaction monitoring
**Migration Strategy**: Implement as Celery tasks + API endpoints
#### 3.1.4 Maintenance & Cleanup (4 functions)
-`cleanup-old-versions` - Version history cleanup
-`process-expired-bans` - Process ban expirations
-`process-scheduled-deletions` - Process scheduled deletions
-`run-cleanup-jobs` - General maintenance
-`scheduled-maintenance` - Scheduled maintenance tasks
**Migration Strategy**: Implement as Celery periodic tasks
#### 3.1.5 Communication (3 functions)
-`merge-contact-tickets` - Merge duplicate tickets
-`receive-inbound-email` - Email receiver
-`send-admin-email-reply` - Admin email responses
-`send-contact-message` - Send contact message
-`send-password-added-email` - Password set notification
**Migration Strategy**: Implement with Django email backend
#### 3.1.6 Utilities (6 functions)
-`detect-location` - IP geolocation
-`seed-test-data` - Test data generation
-`sitemap` - Generate sitemap
-`upload-image` - Image upload to CloudFlare
-`validate-email` - Email validation
-`validate-email-backend` - Backend email validation
**Migration Strategy**: Mix of Celery tasks, management commands, and API endpoints
---
## 4. Frontend Feature Analysis
**Total Component Files**: 325 TypeScript/TSX files
**Component Directories**: 36 directories
**Page Directories**: 43 directories
### 4.1 Frontend Components Requiring Backend Support
Based on directory structure, the following features need backend support:
#### ✅ Implemented in Django
- Companies (manufacturers, operators)
- Parks (listings, details, maps)
- Rides (listings, details, search)
- Moderation (submissions, approval workflow)
- Versioning (history, diffs)
- Photos (upload, gallery, moderation)
- Search (full-text, filters)
- Auth (login, register, OAuth, MFA)
#### ❌ Missing from Django
- **Reviews** (`src/components/reviews/`) - **HIGH PRIORITY**
- **User Lists** (`src/components/lists/`) - **HIGH PRIORITY**
- **Notifications** (`src/components/notifications/`) - **HIGH PRIORITY**
- **Profile** (full features in `src/components/profile/`) - **MEDIUM PRIORITY**
- **Analytics** (`src/components/analytics/`) - **LOW PRIORITY**
- **Blog** (`src/components/blog/`) - **LOW PRIORITY**
- **Contact** (`src/components/contact/`) - **LOW PRIORITY**
- **Settings** (full features in `src/components/settings/`) - **MEDIUM PRIORITY**
- **Timeline** (`src/components/timeline/`) - **LOW PRIORITY**
- **Designers** (`src/components/designers/`) - **LOW PRIORITY**
- **Park Owners** (`src/components/park-owners/`) - **LOW PRIORITY**
- **Operators** (`src/components/operators/`) - **MEDIUM PRIORITY**
- **Manufacturers** (`src/components/manufacturers/`) - **MEDIUM PRIORITY**
---
## 5. Critical Missing Features
### 5.1 HIGHEST PRIORITY (Core User Features)
#### Reviews System
**Impact**: Critical - core feature for users
**Tables Needed**:
- `reviews` - Main review table
- `review_photos` - Photo attachments
- `review_deletions` - Soft delete tracking
**API Endpoints Needed**:
- `POST /api/v1/reviews/` - Create review
- `GET /api/v1/reviews/` - List reviews
- `GET /api/v1/reviews/{id}/` - Get review
- `PATCH /api/v1/reviews/{id}/` - Update review
- `DELETE /api/v1/reviews/{id}/` - Delete review
- `POST /api/v1/reviews/{id}/helpful/` - Mark as helpful
- `GET /api/v1/parks/{id}/reviews/` - Park reviews
- `GET /api/v1/rides/{id}/reviews/` - Ride reviews
**Estimated Effort**: 2-3 days
#### User Lists System
**Impact**: Critical - popular feature for enthusiasts
**Tables Needed**:
- `user_top_lists` - List metadata
- `list_items` - List entries
- `user_top_list_items` - Extended item data
**API Endpoints Needed**:
- `POST /api/v1/lists/` - Create list
- `GET /api/v1/lists/` - List all lists
- `GET /api/v1/lists/{id}/` - Get list
- `PATCH /api/v1/lists/{id}/` - Update list
- `DELETE /api/v1/lists/{id}/` - Delete list
- `POST /api/v1/lists/{id}/items/` - Add item
- `DELETE /api/v1/lists/{id}/items/{item_id}/` - Remove item
- `PATCH /api/v1/lists/{id}/reorder/` - Reorder items
**Estimated Effort**: 2-3 days
#### Notifications System
**Impact**: Critical - user engagement
**Tables Needed**:
- `notification_channels` - Channel config
- `notification_templates` - Templates
- `notification_logs` - Delivery tracking
- `notification_event_data` - Event data
- `user_notification_preferences` - User preferences
**API Endpoints Needed**:
- `GET /api/v1/notifications/` - List notifications
- `PATCH /api/v1/notifications/{id}/read/` - Mark as read
- `PATCH /api/v1/notifications/read-all/` - Mark all as read
- `GET /api/v1/notifications/preferences/` - Get preferences
- `PATCH /api/v1/notifications/preferences/` - Update preferences
**Background Tasks**:
- Send email notifications (Celery)
- Send push notifications (Celery)
- Batch notification processing
**Estimated Effort**: 3-4 days
### 5.2 HIGH PRIORITY (Enhanced Features)
#### User Ride Credits
**Impact**: High - tracks user's ride history
**Tables Needed**:
- `user_ride_credits` - Credit tracking
**API Endpoints Needed**:
- `POST /api/v1/credits/` - Add credit
- `GET /api/v1/credits/` - List user's credits
- `GET /api/v1/users/{id}/credits/` - User's public credits
- `DELETE /api/v1/credits/{id}/` - Remove credit
**Estimated Effort**: 1 day
#### Ride Detail Tables
**Impact**: High - richer data for enthusiasts
**Tables Needed**:
- `ride_coaster_stats` - Coaster-specific stats
- `ride_water_details` - Water ride details
- `ride_dark_details` - Dark ride details
- `ride_flat_details` - Flat ride details
- `ride_kiddie_details` - Kiddie ride details
- `ride_transportation_details` - Transport details
- `ride_former_names` - Name history
- `ride_technical_specs` - Technical specifications
**API Endpoints**: Extend existing ride endpoints
**Estimated Effort**: 2 days
#### User Sessions & Preferences
**Impact**: High - better UX
**Tables Needed**:
- `user_sessions` - Session tracking
- `user_preferences` - User settings
**API Endpoints**:
- `GET /api/v1/auth/sessions/` - List sessions
- `DELETE /api/v1/auth/sessions/{id}/` - Revoke session
- `GET /api/v1/users/preferences/` - Get preferences
- `PATCH /api/v1/users/preferences/` - Update preferences
**Estimated Effort**: 1 day
### 5.3 MEDIUM PRIORITY (Operational Features)
#### Reports System
**Impact**: Medium - content moderation
**Tables Needed**:
- `reports` - User reports
**API Endpoints**:
- `POST /api/v1/reports/` - Submit report
- `GET /api/v1/moderation/reports/` - List reports (mods only)
- `PATCH /api/v1/moderation/reports/{id}/` - Process report
**Estimated Effort**: 1-2 days
#### Admin Audit System
**Impact**: Medium - admin oversight
**Tables Needed**:
- `admin_audit_log` - Admin actions
- `admin_audit_details` - Detailed audit data
- `moderation_audit_log` - Mod actions
- `profile_audit_log` - Profile changes
**API Endpoints**: Admin-only endpoints
**Estimated Effort**: 2 days
#### Account Management
**Impact**: Medium - GDPR compliance
**Tables Needed**:
- `account_deletion_requests` - Deletion workflow
**API Endpoints**:
- `POST /api/v1/auth/request-deletion/` - Request deletion
- `POST /api/v1/auth/confirm-deletion/` - Confirm deletion
- `POST /api/v1/auth/cancel-deletion/` - Cancel deletion
- `GET /api/v1/auth/export-data/` - Export user data
**Estimated Effort**: 2 days
#### Contact System
**Impact**: Medium - customer support
**Tables Needed**:
- `contact_submissions` - Contact messages
- `contact_email_threads` - Email threads
- `contact_rate_limits` - Spam prevention
**API Endpoints**:
- `POST /api/v1/contact/` - Submit message
- `GET /api/v1/admin/contact/` - List messages
**Estimated Effort**: 1 day
### 5.4 LOW PRIORITY (Nice-to-Have)
#### Blog System
**Impact**: Low - content marketing
**Tables Needed**:
- `blog_posts` - Blog content
**Estimated Effort**: 1-2 days
#### Analytics System
**Impact**: Low - insights
**Tables Needed**:
- `entity_page_views` - View tracking
**Estimated Effort**: 1 day
#### Timeline Events
**Impact**: Low - historical tracking
**Tables Needed**:
- `entity_timeline_events` - Events
- `timeline_event_submissions` - User submissions
**Estimated Effort**: 1-2 days
---
## 6. Migration Phases
### Phase 1: Critical User Features (1-2 weeks)
**Goal**: Enable core user functionality
1. **Reviews System** (3 days)
- Models: Review, ReviewPhoto, ReviewDeletion
- API: Full CRUD + helpful voting
- Frontend integration
2. **User Lists System** (3 days)
- Models: UserTopList, ListItem
- API: CRUD + reordering
- Frontend integration
3. **Notifications System** (4 days)
- Models: NotificationChannel, NotificationTemplate, NotificationLog, UserNotificationPreferences
- API: List, mark read, preferences
- Background tasks: Email, push notifications
- Replace Novu integration
4. **User Ride Credits** (1 day)
- Model: UserRideCredit
- API: CRUD
- Frontend integration
**Deliverable**: Users can review, create lists, track rides, receive notifications
### Phase 2: Enhanced Data & Features (1 week)
**Goal**: Richer data and improved UX
1. **Ride Detail Tables** (2 days)
- Models: RideCoasterStats, RideWaterDetails, RideDarkDetails, etc.
- API: Extend ride endpoints
- Frontend: Display detailed stats
2. **User Sessions & Preferences** (1 day)
- Models: UserSession, UserPreferences
- API: Session management, preferences
- Frontend: Settings page
3. **User Blocking** (1 day)
- Model: UserBlock
- API: Block/unblock users
- Frontend: Block UI
4. **Park Operating Hours** (1 day)
- Model: ParkOperatingHours
- API: CRUD
- Frontend: Display hours
**Deliverable**: Richer entity data, better user control
### Phase 3: Moderation & Admin (1 week)
**Goal**: Complete moderation tools
1. **Reports System** (2 days)
- Model: Report
- API: Submit + moderate reports
- Frontend: Report UI + mod queue
2. **Admin Audit System** (2 days)
- Models: AdminAuditLog, ModerationAuditLog, ProfileAuditLog
- API: Admin audit views
- Frontend: Audit log viewer
3. **Enhanced Submission Features** (3 days)
- Models: SubmissionDependency, SubmissionIdempotencyKey, ConflictResolution
- API: Dependency tracking, conflict resolution
- Frontend: Advanced submission UI
**Deliverable**: Complete moderation workflow
### Phase 4: Account & Compliance (3-4 days)
**Goal**: GDPR compliance and account management
1. **Account Deletion Workflow** (2 days)
- Model: AccountDeletionRequest
- API: Request, confirm, cancel deletion
- Management commands: Process deletions
- Frontend: Account settings
2. **Data Export** (1 day)
- API: Export user data (GDPR)
- Background task: Generate export
3. **Contact System** (1 day)
- Models: ContactSubmission, ContactEmailThread, ContactRateLimit
- API: Submit contact messages
- Frontend: Contact form
**Deliverable**: GDPR compliance, user account management
### Phase 5: Background Tasks & Automation (1 week)
**Goal**: Replace Edge Functions with Celery tasks
1. **Setup Celery** (1 day)
- Configure Celery with Redis/RabbitMQ
- Set up periodic tasks
2. **Authentication Tasks** (1 day)
- OAuth profile sync
- MFA management
- Session cleanup
3. **Moderation Tasks** (2 days)
- Selective approval processing
- Escalation notifications
- Transaction monitoring
4. **Maintenance Tasks** (2 days)
- Version cleanup
- Ban expiration
- Scheduled deletions
- Orphaned image cleanup
- Test data management
5. **Utility Tasks** (1 day)
- Sitemap generation
- Email validation
- Location detection
**Deliverable**: All Edge Functions migrated to Celery
### Phase 6: Content & Analytics (Optional - 1 week)
**Goal**: Content features and insights
1. **Blog System** (2 days)
- Model: BlogPost
- API: CRUD
- Frontend: Blog pages
2. **Analytics System** (2 days)
- Model: EntityPageView
- API: Analytics endpoints
- Frontend: Analytics dashboard
3. **Timeline Events** (2 days)
- Models: EntityTimelineEvent, TimelineEventSubmission
- API: CRUD
- Frontend: Timeline view
4. **Historical Data** (1 day)
- Models: HistoricalPark, HistoricalRide, ParkLocationHistory
- API: Historical queries
- Frontend: History display
**Deliverable**: Content management, user insights
---
## 7. Technical Debt & Architecture
### 7.1 What's Working Well ✅
1. **Clean Architecture**
- Separation of concerns (models, services, API endpoints)
- Generic versioning system using ContentType
- FSM-based moderation workflow
2. **Django Packages Used**
- `django-ninja`: Modern API framework (excellent choice)
- `django-fsm`: State machine for moderation
- `django-lifecycle`: Model lifecycle hooks
- `dirtyfields`: Track field changes
3. **Database Design**
- UUID primary keys
- Proper indexing
- JSON fields for flexibility
- PostGIS conditional support
4. **Code Quality**
- Well-documented models
- Type hints in API
- Consistent naming
### 7.2 Areas for Improvement ⚠️
1. **Empty Models**
- `apps/notifications/models.py` is essentially empty
- `apps/reviews/models.py` doesn't exist
2. **Missing Services**
- Need service layer for complex business logic
- Edge Function logic needs to be translated to services
3. **Testing**
- No Django tests found
- Need comprehensive test suite
4. **Background Tasks**
- Celery not yet configured
- All Edge Function logic currently synchronous
5. **Rate Limiting**
- Not implemented in Django yet
- Supabase has rate limiting tables
### 7.3 Recommended Architecture Changes
1. **Add Celery**
```
django/
celery.py # Celery app configuration
tasks/
__init__.py
notifications.py # Notification tasks
moderation.py # Moderation tasks
maintenance.py # Cleanup tasks
auth.py # Auth tasks
```
2. **Add Service Layer**
```
django/apps/*/services/
__init__.py
business_logic.py # Complex operations
email.py # Email sending
notifications.py # Notification logic
```
3. **Add Tests**
```
django/apps/*/tests/
__init__.py
test_models.py
test_services.py
test_api.py
```
---
## 8. Estimated Timeline
### Minimum Viable Migration (Core Features Only)
**Timeline**: 3-4 weeks
- Phase 1: Critical User Features (2 weeks)
- Phase 2: Enhanced Data (1 week)
- Phase 5: Background Tasks (1 week)
**Result**: Feature parity for 80% of users
### Complete Migration (All Features)
**Timeline**: 6-8 weeks
- Phase 1: Critical User Features (2 weeks)
- Phase 2: Enhanced Data (1 week)
- Phase 3: Moderation & Admin (1 week)
- Phase 4: Account & Compliance (4 days)
- Phase 5: Background Tasks (1 week)
- Phase 6: Content & Analytics (1 week)
- Testing & Polish (1 week)
**Result**: 100% feature parity with Supabase
---
## 9. Risk Assessment
### High Risk
1. **Notification System Migration**
- Currently using Novu (third-party service)
- Need to replace with Django + Celery + email/push provider
- Risk: Feature gap if not implemented properly
- Mitigation: Implement core notifications first, enhance later
2. **Background Task Migration**
- 42 Edge Functions to migrate
- Complex business logic in functions
- Risk: Missing functionality
- Mitigation: Systematic function-by-function migration
### Medium Risk
1. **Data Migration**
- No existing data (stated: "no data to be worried about")
- Risk: Low
2. **Frontend Integration**
- Frontend expects specific Supabase patterns
- Risk: API contract changes
- Mitigation: Maintain compatible API responses
### Low Risk
1. **Core Entity Models**
- Already implemented
- Well-tested architecture
2. **Authentication**
- Already implemented with JWT, OAuth, MFA
- Solid foundation
---
## 10. Recommendations
### Immediate Actions (This Week)
1. ✅ Complete this audit
2. Implement Reviews system (highest user impact)
3. Implement User Lists system (popular feature)
4. Set up Celery infrastructure
### Short Term (Next 2 Weeks)
1. Implement Notifications system
2. Implement User Ride Credits
3. Add Ride detail tables
4. Begin Edge Function migration
### Medium Term (Next Month)
1. Complete all moderation features
2. Implement GDPR compliance features
3. Add admin audit system
4. Complete Edge Function migration
### Long Term (Next 2 Months)
1. Add blog/content features
2. Implement analytics
3. Add timeline features
4. Comprehensive testing
5. Performance optimization
---
## 11. Success Criteria
### Migration Complete When:
- ✅ All core entity CRUD operations work
- ✅ All user features work (reviews, lists, credits)
- ✅ Notification system functional
- ✅ Moderation workflow complete
- ✅ All Edge Functions replaced
- ✅ GDPR compliance features implemented
- ✅ Test coverage >80%
- ✅ Frontend fully integrated
- ✅ Performance meets or exceeds Supabase
### Optional (Nice-to-Have):
- Blog system
- Analytics dashboard
- Timeline features
- Advanced admin features
---
## 12. Next Steps
1. **Review this audit** with stakeholders
2. **Prioritize phases** based on business needs
3. **Assign resources** to each phase
4. **Begin Phase 1** (Critical User Features)
5. **Set up CI/CD** for Django backend
6. **Create staging environment** for testing
7. **Plan data cutover** (when ready to switch from Supabase)
---
## Appendix A: File Structure Analysis
```
django/
├── api/
│ └── v1/
│ ├── endpoints/
│ │ ├── auth.py (596 lines) ✅
│ │ ├── companies.py (254 lines) ✅
│ │ ├── moderation.py (496 lines) ✅
│ │ ├── parks.py (362 lines) ✅
│ │ ├── photos.py (600 lines) ✅
│ │ ├── ride_models.py (247 lines) ✅
│ │ ├── rides.py (360 lines) ✅
│ │ ├── search.py (438 lines) ✅
│ │ └── versioning.py (369 lines) ✅
│ └── api.py (159 lines) ✅
├── apps/
│ ├── core/ ✅ Complete
│ │ └── models.py (265 lines)
│ ├── users/ ✅ Complete (basic)
│ │ └── models.py (258 lines)
│ ├── entities/ ✅ Complete
│ │ └── models.py (931 lines)
│ ├── media/ ✅ Complete
│ │ └── models.py (267 lines)
│ ├── moderation/ ✅ Complete
│ │ └── models.py (478 lines)
│ ├── versioning/ ✅ Complete
│ │ └── models.py (288 lines)
│ ├── notifications/ ❌ Empty (1 line)
│ └── reviews/ ❌ Missing
└── config/ ✅ Complete
└── settings/
```
## Appendix B: Database Table Checklist
**✅ Implemented (19 tables)**:
- users (via Django auth)
- user_roles
- user_profiles
- countries
- subdivisions
- localities
- companies
- parks
- rides
- ride_models
- photos
- content_submissions
- submission_items
- moderation_locks
- entity_versions
**❌ Missing (60+ tables)**:
- reviews & review_photos
- user_ride_credits
- user_top_lists & list_items
- user_blocks
- user_sessions
- user_preferences
- user_notification_preferences
- notification_channels, notification_templates, notification_logs
- ride_coaster_stats, ride_*_details (7 tables)
- ride_former_names, ride_name_history
- reports
- contact_submissions, contact_email_threads
- admin_audit_log, moderation_audit_log, profile_audit_log
- account_deletion_requests
- park_operating_hours
- historical_parks, historical_rides
- entity_timeline_events
- blog_posts
- entity_page_views
- And 30+ more system/tracking tables
---
**End of Audit**

View File

@@ -1,210 +0,0 @@
# Rate Limit Monitoring Setup
This document explains how to set up automated rate limit monitoring with alerts.
## Overview
The rate limit monitoring system consists of:
1. **Metrics Collection** - Tracks all rate limit checks in-memory
2. **Alert Configuration** - Database table with configurable thresholds
3. **Monitor Function** - Edge function that checks metrics and triggers alerts
4. **Cron Job** - Scheduled job that runs the monitor function periodically
## Setup Instructions
### Step 1: Enable Required Extensions
Run this SQL in your Supabase SQL Editor:
```sql
-- Enable pg_cron for scheduling
CREATE EXTENSION IF NOT EXISTS pg_cron;
-- Enable pg_net for HTTP requests
CREATE EXTENSION IF NOT EXISTS pg_net;
```
### Step 2: Create the Cron Job
Run this SQL to schedule the monitor to run every 5 minutes:
```sql
SELECT cron.schedule(
'monitor-rate-limits',
'*/5 * * * *', -- Every 5 minutes
$$
SELECT
net.http_post(
url:='https://api.thrillwiki.com/functions/v1/monitor-rate-limits',
headers:='{"Content-Type": "application/json", "Authorization": "Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6InlkdnRtbnJzenlicW5iY3FiZGN5Iiwicm9sZSI6ImFub24iLCJpYXQiOjE3NTgzMjYzNTYsImV4cCI6MjA3MzkwMjM1Nn0.DM3oyapd_omP5ZzIlrT0H9qBsiQBxBRgw2tYuqgXKX4"}'::jsonb,
body:='{}'::jsonb
) as request_id;
$$
);
```
### Step 3: Verify the Cron Job
Check that the cron job was created:
```sql
SELECT * FROM cron.job WHERE jobname = 'monitor-rate-limits';
```
### Step 4: Configure Alert Thresholds
Visit the admin dashboard at `/admin/rate-limit-metrics` and navigate to the "Configuration" tab to:
- Enable/disable specific alerts
- Adjust threshold values
- Modify time windows
Default configurations are automatically created:
- **Block Rate Alert**: Triggers when >50% of requests are blocked in 5 minutes
- **Total Requests Alert**: Triggers when >1000 requests/minute
- **Unique IPs Alert**: Triggers when >100 unique IPs in 5 minutes (disabled by default)
## How It Works
### 1. Metrics Collection
Every rate limit check (both allowed and blocked) is recorded with:
- Timestamp
- Function name
- Client IP
- User ID (if authenticated)
- Result (allowed/blocked)
- Remaining quota
- Rate limit tier
Metrics are stored in-memory for the last 10,000 checks.
### 2. Monitoring Process
Every 5 minutes, the monitor function:
1. Fetches enabled alert configurations from the database
2. Analyzes current metrics for each configuration's time window
3. Compares metrics against configured thresholds
4. For exceeded thresholds:
- Records the alert in `rate_limit_alerts` table
- Sends notification to moderators via Novu
- Skips if a recent unresolved alert already exists (prevents spam)
### 3. Alert Deduplication
Alerts are deduplicated using a 15-minute window. If an alert for the same configuration was triggered in the last 15 minutes and hasn't been resolved, no new alert is sent.
### 4. Notifications
Alerts are sent to all moderators via the "moderators" topic in Novu, including:
- Email notifications
- In-app notifications (if configured)
- Custom notification channels (if configured)
## Monitoring the Monitor
### Check Cron Job Status
```sql
-- View recent cron job runs
SELECT * FROM cron.job_run_details
WHERE jobid = (SELECT jobid FROM cron.job WHERE jobname = 'monitor-rate-limits')
ORDER BY start_time DESC
LIMIT 10;
```
### View Function Logs
Check the edge function logs in Supabase Dashboard:
`https://supabase.com/dashboard/project/ydvtmnrszybqnbcqbdcy/functions/monitor-rate-limits/logs`
### Test Manually
You can test the monitor function manually by calling it via HTTP:
```bash
curl -X POST https://api.thrillwiki.com/functions/v1/monitor-rate-limits \
-H "Content-Type: application/json"
```
## Adjusting the Schedule
To change how often the monitor runs, update the cron schedule:
```sql
-- Update to run every 10 minutes instead
SELECT cron.alter_job('monitor-rate-limits', schedule:='*/10 * * * *');
-- Update to run every hour
SELECT cron.alter_job('monitor-rate-limits', schedule:='0 * * * *');
-- Update to run every minute (not recommended - may generate too many alerts)
SELECT cron.alter_job('monitor-rate-limits', schedule:='* * * * *');
```
## Removing the Cron Job
If you need to disable monitoring:
```sql
SELECT cron.unschedule('monitor-rate-limits');
```
## Troubleshooting
### No Alerts Being Triggered
1. Check if any alert configurations are enabled:
```sql
SELECT * FROM rate_limit_alert_config WHERE enabled = true;
```
2. Check if metrics are being collected:
- Visit `/admin/rate-limit-metrics` and check the "Recent Activity" tab
- If no activity, the rate limiter might not be in use
3. Check monitor function logs for errors
### Too Many Alerts
- Increase threshold values in the configuration
- Increase time windows for less sensitive detection
- Disable specific alert types that are too noisy
### Monitor Not Running
1. Verify cron job exists and is active
2. Check `cron.job_run_details` for error messages
3. Verify edge function deployed successfully
4. Check network connectivity between cron scheduler and edge function
## Database Tables
### `rate_limit_alert_config`
Stores alert threshold configurations. Only admins can modify.
### `rate_limit_alerts`
Stores history of all triggered alerts. Moderators can view and resolve.
## Security
- Alert configurations can only be modified by admin/superuser roles
- Alert history is only accessible to moderators and above
- The monitor function runs without JWT verification (as a cron job)
- All database operations respect Row Level Security policies
## Performance Considerations
- In-memory metrics store max 10,000 entries (auto-trimmed)
- Metrics older than the longest configured time window are not useful
- Monitor function typically runs in <500ms
- No significant database load (simple queries on small tables)
## Future Enhancements
Possible improvements:
- Function-specific alert thresholds
- Alert aggregation (daily/weekly summaries)
- Custom notification channels per alert type
- Machine learning-based anomaly detection
- Integration with external monitoring tools (Datadog, New Relic, etc.)

View File

@@ -1,636 +0,0 @@
# Submission Pipeline Schema Reference
**Critical Document**: This reference maps all entity types to their exact database schema fields across the entire submission pipeline to prevent schema mismatches.
**Last Updated**: 2025-11-08
**Status**: ✅ All schemas audited and verified
---
## Table of Contents
1. [Overview](#overview)
2. [Parks](#parks)
3. [Rides](#rides)
4. [Companies](#companies)
5. [Ride Models](#ride-models)
6. [Photos](#photos)
7. [Timeline Events](#timeline-events)
8. [Critical Functions Reference](#critical-functions-reference)
9. [Common Pitfalls](#common-pitfalls)
---
## Overview
### Pipeline Flow
```
User Input → *_submissions table → submission_items → Moderation →
process_approval_transaction → create/update_entity_from_submission →
Main entity table → Version trigger → *_versions table
```
### Entity Types
- `park` - Theme parks and amusement parks
- `ride` - Individual rides and attractions
- `company` - Used for: `manufacturer`, `operator`, `designer`, `property_owner`
- `ride_model` - Ride model templates
- `photo` - Entity photos
- `timeline_event` - Historical events
---
## Parks
### Main Table: `parks`
**Required Fields:**
- `id` (uuid, PK)
- `name` (text, NOT NULL)
- `slug` (text, NOT NULL, UNIQUE)
- `park_type` (text, NOT NULL) - Values: `theme_park`, `amusement_park`, `water_park`, etc.
- `status` (text, NOT NULL) - Values: `operating`, `closed`, `under_construction`, etc.
**Optional Fields:**
- `description` (text)
- `location_id` (uuid, FK → locations)
- `operator_id` (uuid, FK → companies)
- `property_owner_id` (uuid, FK → companies)
- `opening_date` (date)
- `closing_date` (date)
- `opening_date_precision` (text) - Values: `year`, `month`, `day`
- `closing_date_precision` (text)
- `website_url` (text)
- `phone` (text)
- `email` (text)
- `banner_image_url` (text)
- `banner_image_id` (text)
- `card_image_url` (text)
- `card_image_id` (text)
**Metadata Fields:**
- `view_count_all` (integer, default: 0)
- `view_count_30d` (integer, default: 0)
- `view_count_7d` (integer, default: 0)
- `average_rating` (numeric, default: 0.00)
- `review_count` (integer, default: 0)
- `created_at` (timestamptz)
- `updated_at` (timestamptz)
- `is_test_data` (boolean, default: false)
### Submission Table: `park_submissions`
**Schema Identical to Main Table** (excluding auto-generated fields like `id`, timestamps)
**Additional Field:**
- `submission_id` (uuid, NOT NULL, FK → content_submissions)
- `temp_location_data` (jsonb) - For pending location creation
### Version Table: `park_versions`
**All Main Table Fields PLUS:**
- `version_id` (uuid, PK)
- `park_id` (uuid, NOT NULL, FK → parks)
- `version_number` (integer, NOT NULL)
- `change_type` (version_change_type, NOT NULL) - Values: `created`, `updated`, `restored`
- `change_reason` (text)
- `is_current` (boolean, default: true)
- `created_by` (uuid, FK → auth.users)
- `created_at` (timestamptz)
- `submission_id` (uuid, FK → content_submissions)
---
## Rides
### Main Table: `rides`
**Required Fields:**
- `id` (uuid, PK)
- `name` (text, NOT NULL)
- `slug` (text, NOT NULL, UNIQUE)
- `park_id` (uuid, NOT NULL, FK → parks)
- `category` (text, NOT NULL) ⚠️ **CRITICAL: This field is required**
- Values: `roller_coaster`, `water_ride`, `dark_ride`, `flat_ride`, `transport`, `kids_ride`
- `status` (text, NOT NULL)
- Values: `operating`, `closed`, `under_construction`, `sbno`, etc.
**⚠️ IMPORTANT: `rides` table does NOT have `ride_type` column!**
- `ride_type` only exists in `ride_models` table
- Using `ride_type` in rides updates will cause "column does not exist" error
**Optional Relationship Fields:**
- `manufacturer_id` (uuid, FK → companies)
- `designer_id` (uuid, FK → companies)
- `ride_model_id` (uuid, FK → ride_models)
**Optional Descriptive Fields:**
- `description` (text)
- `opening_date` (date)
- `closing_date` (date)
- `opening_date_precision` (text)
- `closing_date_precision` (text)
**Optional Technical Fields:**
- `height_requirement` (integer) - Height requirement in cm
- `age_requirement` (integer)
- `max_speed_kmh` (numeric)
- `duration_seconds` (integer)
- `capacity_per_hour` (integer)
- `max_g_force` (numeric)
- `inversions` (integer) - Number of inversions
- `length_meters` (numeric)
- `max_height_meters` (numeric)
- `drop_height_meters` (numeric)
**Category-Specific Fields:**
*Roller Coasters:*
- `ride_sub_type` (text)
- `coaster_type` (text)
- `seating_type` (text)
- `intensity_level` (text)
- `track_material` (text)
- `support_material` (text)
- `propulsion_method` (text)
*Water Rides:*
- `water_depth_cm` (integer)
- `splash_height_meters` (numeric)
- `wetness_level` (text)
- `flume_type` (text)
- `boat_capacity` (integer)
*Dark Rides:*
- `theme_name` (text)
- `story_description` (text)
- `show_duration_seconds` (integer)
- `animatronics_count` (integer)
- `projection_type` (text)
- `ride_system` (text)
- `scenes_count` (integer)
*Flat Rides:*
- `rotation_type` (text)
- `motion_pattern` (text)
- `platform_count` (integer)
- `swing_angle_degrees` (numeric)
- `rotation_speed_rpm` (numeric)
- `arm_length_meters` (numeric)
- `max_height_reached_meters` (numeric)
*Kids Rides:*
- `min_age` (integer)
- `max_age` (integer)
- `educational_theme` (text)
- `character_theme` (text)
*Transport:*
- `transport_type` (text)
- `route_length_meters` (numeric)
- `stations_count` (integer)
- `vehicle_capacity` (integer)
- `vehicles_count` (integer)
- `round_trip_duration_seconds` (integer)
**Image Fields:**
- `banner_image_url` (text)
- `banner_image_id` (text)
- `card_image_url` (text)
- `card_image_id` (text)
- `image_url` (text) - Legacy field
**Metadata Fields:**
- `view_count_all` (integer, default: 0)
- `view_count_30d` (integer, default: 0)
- `view_count_7d` (integer, default: 0)
- `average_rating` (numeric, default: 0.00)
- `review_count` (integer, default: 0)
- `created_at` (timestamptz)
- `updated_at` (timestamptz)
- `is_test_data` (boolean, default: false)
### Submission Table: `ride_submissions`
**Schema Identical to Main Table** (excluding auto-generated fields)
**Additional Fields:**
- `submission_id` (uuid, NOT NULL, FK → content_submissions)
### Version Table: `ride_versions`
**All Main Table Fields PLUS:**
- `version_id` (uuid, PK)
- `ride_id` (uuid, NOT NULL, FK → rides)
- `version_number` (integer, NOT NULL)
- `change_type` (version_change_type, NOT NULL)
- `change_reason` (text)
- `is_current` (boolean, default: true)
- `created_by` (uuid, FK → auth.users)
- `created_at` (timestamptz)
- `submission_id` (uuid, FK → content_submissions)
**⚠️ Field Name Differences (Version Table vs Main Table):**
- `height_requirement_cm` in versions → `height_requirement` in rides
- `gforce_max` in versions → `max_g_force` in rides
- `inversions_count` in versions → `inversions` in rides
- `height_meters` in versions → `max_height_meters` in rides
- `drop_meters` in versions → `drop_height_meters` in rides
---
## Companies
**Used For**: `manufacturer`, `operator`, `designer`, `property_owner`
### Main Table: `companies`
**Required Fields:**
- `id` (uuid, PK)
- `name` (text, NOT NULL)
- `slug` (text, NOT NULL, UNIQUE)
- `company_type` (text, NOT NULL)
- Values: `manufacturer`, `operator`, `designer`, `property_owner`
**Optional Fields:**
- `description` (text)
- `person_type` (text, default: 'company')
- Values: `company`, `individual`
- `founded_year` (integer)
- `founded_date` (date)
- `founded_date_precision` (text)
- `headquarters_location` (text)
- `website_url` (text)
- `logo_url` (text)
- `banner_image_url` (text)
- `banner_image_id` (text)
- `card_image_url` (text)
- `card_image_id` (text)
**Metadata Fields:**
- `view_count_all` (integer, default: 0)
- `view_count_30d` (integer, default: 0)
- `view_count_7d` (integer, default: 0)
- `average_rating` (numeric, default: 0.00)
- `review_count` (integer, default: 0)
- `created_at` (timestamptz)
- `updated_at` (timestamptz)
- `is_test_data` (boolean, default: false)
### Submission Table: `company_submissions`
**Schema Identical to Main Table** (excluding auto-generated fields)
**Additional Field:**
- `submission_id` (uuid, NOT NULL, FK → content_submissions)
### Version Table: `company_versions`
**All Main Table Fields PLUS:**
- `version_id` (uuid, PK)
- `company_id` (uuid, NOT NULL, FK → companies)
- `version_number` (integer, NOT NULL)
- `change_type` (version_change_type, NOT NULL)
- `change_reason` (text)
- `is_current` (boolean, default: true)
- `created_by` (uuid, FK → auth.users)
- `created_at` (timestamptz)
- `submission_id` (uuid, FK → content_submissions)
---
## Ride Models
### Main Table: `ride_models`
**Required Fields:**
- `id` (uuid, PK)
- `name` (text, NOT NULL)
- `slug` (text, NOT NULL, UNIQUE)
- `manufacturer_id` (uuid, NOT NULL, FK → companies)
- `category` (text, NOT NULL) ⚠️ **CRITICAL: This field is required**
- Values: `roller_coaster`, `water_ride`, `dark_ride`, `flat_ride`, `transport`, `kids_ride`
**Optional Fields:**
- `ride_type` (text) ⚠️ **This field exists in ride_models but NOT in rides**
- More specific classification than category
- Example: category = `roller_coaster`, ride_type = `inverted_coaster`
- `description` (text)
- `banner_image_url` (text)
- `banner_image_id` (text)
- `card_image_url` (text)
- `card_image_id` (text)
**Metadata Fields:**
- `view_count_all` (integer, default: 0)
- `view_count_30d` (integer, default: 0)
- `view_count_7d` (integer, default: 0)
- `average_rating` (numeric, default: 0.00)
- `review_count` (integer, default: 0)
- `installations_count` (integer, default: 0)
- `created_at` (timestamptz)
- `updated_at` (timestamptz)
- `is_test_data` (boolean, default: false)
### Submission Table: `ride_model_submissions`
**Schema Identical to Main Table** (excluding auto-generated fields)
**Additional Field:**
- `submission_id` (uuid, NOT NULL, FK → content_submissions)
### Version Table: `ride_model_versions`
**All Main Table Fields PLUS:**
- `version_id` (uuid, PK)
- `ride_model_id` (uuid, NOT NULL, FK → ride_models)
- `version_number` (integer, NOT NULL)
- `change_type` (version_change_type, NOT NULL)
- `change_reason` (text)
- `is_current` (boolean, default: true)
- `created_by` (uuid, FK → auth.users)
- `created_at` (timestamptz)
- `submission_id` (uuid, FK → content_submissions)
---
## Photos
### Main Table: `photos`
**Required Fields:**
- `id` (uuid, PK)
- `cloudflare_id` (text, NOT NULL)
- `url` (text, NOT NULL)
- `entity_type` (text, NOT NULL)
- `entity_id` (uuid, NOT NULL)
- `uploader_id` (uuid, NOT NULL, FK → auth.users)
**Optional Fields:**
- `title` (text)
- `caption` (text)
- `taken_date` (date)
- `taken_date_precision` (text)
- `photographer_name` (text)
- `order_index` (integer, default: 0)
- `is_primary` (boolean, default: false)
- `status` (text, default: 'active')
**Metadata Fields:**
- `created_at` (timestamptz)
- `updated_at` (timestamptz)
- `is_test_data` (boolean, default: false)
### Submission Table: `photo_submissions`
**Required Fields:**
- `id` (uuid, PK)
- `submission_id` (uuid, NOT NULL, FK → content_submissions)
- `entity_type` (text, NOT NULL)
- `entity_id` (uuid, NOT NULL)
- `cloudflare_id` (text, NOT NULL)
- `url` (text, NOT NULL)
**Optional Fields:**
- `title` (text)
- `caption` (text)
- `taken_date` (date)
- `taken_date_precision` (text)
- `photographer_name` (text)
- `order_index` (integer)
**Note**: Photos do NOT have version tables - they are immutable after approval
---
## Timeline Events
### Main Table: `entity_timeline_events`
**Required Fields:**
- `id` (uuid, PK)
- `entity_type` (text, NOT NULL)
- `entity_id` (uuid, NOT NULL)
- `event_type` (text, NOT NULL)
- Values: `opening`, `closing`, `relocation`, `renovation`, `name_change`, `ownership_change`, etc.
- `title` (text, NOT NULL)
- `event_date` (date, NOT NULL)
**Optional Fields:**
- `description` (text)
- `event_date_precision` (text, default: 'day')
- `from_value` (text)
- `to_value` (text)
- `from_entity_id` (uuid)
- `to_entity_id` (uuid)
- `from_location_id` (uuid)
- `to_location_id` (uuid)
- `is_public` (boolean, default: true)
- `display_order` (integer, default: 0)
**Approval Fields:**
- `created_by` (uuid, FK → auth.users)
- `approved_by` (uuid, FK → auth.users)
- `submission_id` (uuid, FK → content_submissions)
**Metadata Fields:**
- `created_at` (timestamptz)
- `updated_at` (timestamptz)
### Submission Table: `timeline_event_submissions`
**Schema Identical to Main Table** (excluding auto-generated fields)
**Additional Field:**
- `submission_id` (uuid, NOT NULL, FK → content_submissions)
**Note**: Timeline events do NOT have version tables
---
## Critical Functions Reference
### 1. `create_entity_from_submission`
**Purpose**: Creates new entities from approved submissions
**Parameters**:
- `p_entity_type` (text) - Entity type identifier
- `p_data` (jsonb) - Entity data from submission
- `p_created_by` (uuid) - User who created it
- `p_submission_id` (uuid) - Source submission
**Critical Requirements**:
- ✅ MUST extract `category` for rides and ride_models
- ✅ MUST NOT use `ride_type` for rides (doesn't exist)
- ✅ MUST use `ride_type` for ride_models (does exist)
- ✅ MUST handle all required NOT NULL fields
**Returns**: `uuid` - New entity ID
### 2. `update_entity_from_submission`
**Purpose**: Updates existing entities from approved edits
**Parameters**:
- `p_entity_type` (text) - Entity type identifier
- `p_data` (jsonb) - Updated entity data
- `p_entity_id` (uuid) - Existing entity ID
- `p_changed_by` (uuid) - User who changed it
**Critical Requirements**:
- ✅ MUST use COALESCE to preserve existing values
- ✅ MUST include `category` for rides and ride_models
- ✅ MUST NOT use `ride_type` for rides
- ✅ MUST use `ride_type` for ride_models
- ✅ MUST update `updated_at` timestamp
**Returns**: `uuid` - Updated entity ID
### 3. `process_approval_transaction`
**Purpose**: Atomic transaction for selective approval
**Parameters**:
- `p_submission_id` (uuid)
- `p_item_ids` (uuid[]) - Specific items to approve
- `p_moderator_id` (uuid)
- `p_change_reason` (text)
**Critical Requirements**:
- ✅ MUST validate all item dependencies first
- ✅ MUST extract correct fields from submission tables
- ✅ MUST set session variables for triggers
- ✅ MUST handle rollback on any error
**Called By**: Edge function `process-selective-approval`
### 4. `create_submission_with_items`
**Purpose**: Creates multi-item submissions atomically
**Parameters**:
- `p_submission_id` (uuid)
- `p_entity_type` (text)
- `p_action_type` (text) - `create` or `edit`
- `p_items` (jsonb) - Array of submission items
- `p_user_id` (uuid)
**Critical Requirements**:
- ✅ MUST resolve dependencies in order
- ✅ MUST validate all required fields per entity type
- ✅ MUST link items to submission correctly
---
## Common Pitfalls
### 1. ❌ Using `ride_type` for rides
```sql
-- WRONG
UPDATE rides SET ride_type = 'inverted_coaster' WHERE id = $1;
-- ERROR: column "ride_type" does not exist
-- CORRECT
UPDATE rides SET category = 'roller_coaster' WHERE id = $1;
```
### 2. ❌ Missing `category` field
```sql
-- WRONG - Missing required category
INSERT INTO rides (name, slug, park_id, status) VALUES (...);
-- ERROR: null value violates not-null constraint
-- CORRECT
INSERT INTO rides (name, slug, park_id, category, status) VALUES (..., 'roller_coaster', ...);
```
### 3. ❌ Wrong column names in version tables
```sql
-- WRONG
SELECT height_requirement FROM ride_versions WHERE ride_id = $1;
-- Returns null
-- CORRECT
SELECT height_requirement_cm FROM ride_versions WHERE ride_id = $1;
```
### 4. ❌ Forgetting COALESCE in updates
```sql
-- WRONG - Overwrites fields with NULL
UPDATE rides SET
name = (p_data->>'name'),
description = (p_data->>'description')
WHERE id = $1;
-- CORRECT - Preserves existing values if not provided
UPDATE rides SET
name = COALESCE(p_data->>'name', name),
description = COALESCE(p_data->>'description', description)
WHERE id = $1;
```
### 5. ❌ Not handling submission_id in version triggers
```sql
-- WRONG - Version doesn't link back to submission
INSERT INTO ride_versions (ride_id, ...) VALUES (...);
-- CORRECT - Trigger must read session variable
v_submission_id := current_setting('app.submission_id', true)::uuid;
INSERT INTO ride_versions (ride_id, submission_id, ...) VALUES (..., v_submission_id, ...);
```
---
## Validation Checklist
Before deploying any submission pipeline changes:
- [ ] All entity tables have matching submission tables
- [ ] All required NOT NULL fields are included in CREATE functions
- [ ] All required NOT NULL fields are included in UPDATE functions
- [ ] `category` is extracted for rides and ride_models
- [ ] `ride_type` is NOT used for rides
- [ ] `ride_type` IS used for ride_models
- [ ] COALESCE is used for all UPDATE statements
- [ ] Version table column name differences are handled
- [ ] Session variables are set for version triggers
- [ ] Foreign key relationships are validated
- [ ] Dependency resolution works correctly
- [ ] Error handling and rollback logic is present
---
## Maintenance
**When adding new entity types:**
1. Create main table with all fields
2. Create matching submission table + `submission_id` FK
3. Create version table with all fields + version metadata
4. Add case to `create_entity_from_submission`
5. Add case to `update_entity_from_submission`
6. Add case to `process_approval_transaction`
7. Add case to `create_submission_with_items`
8. Create version trigger for main table
9. Update this documentation
10. Run full test suite
**When modifying schemas:**
1. Check if field exists in ALL three tables (main, submission, version)
2. Update ALL three tables in migration
3. Update ALL functions that reference the field
4. Update this documentation
5. Test create, update, and rollback flows
---
## Related Documentation
- [Submission Pipeline Overview](./README.md)
- [Versioning System](../versioning/README.md)
- [Moderation Workflow](../moderation/README.md)
- [Migration Guide](../versioning/MIGRATION.md)

View File

@@ -1,402 +0,0 @@
# Schema Validation Setup Guide
This guide explains how to set up and use the automated schema validation tools to prevent field mismatches in the submission pipeline.
## Overview
The validation system consists of three layers:
1. **Pre-migration Script** - Quick validation before deploying migrations
2. **Integration Tests** - Comprehensive Playwright tests for CI/CD
3. **GitHub Actions** - Automated checks on every pull request
## Quick Start
### 1. Add NPM Scripts
Add these scripts to your `package.json`:
```json
{
"scripts": {
"validate-schema": "tsx scripts/validate-schema.ts",
"test:schema": "playwright test schema-validation",
"test:schema:ui": "playwright test schema-validation --ui",
"pre-migrate": "npm run validate-schema"
}
}
```
### 2. Environment Variables
Create a `.env.test` file:
```env
SUPABASE_SERVICE_ROLE_KEY=your_service_role_key_here
```
**⚠️ Important**: Never commit this file! Add it to `.gitignore`:
```gitignore
.env.test
.env.local
```
### 3. Install Dependencies
If not already installed:
```bash
npm install --save-dev @supabase/supabase-js @playwright/test tsx
```
## Using the Validation Tools
### Pre-Migration Validation Script
**When to use**: Before applying any database migration
**Run manually:**
```bash
npm run validate-schema
```
**What it checks:**
- ✅ Submission tables match main tables
- ✅ Version tables have all required fields
- ✅ Critical fields are correct (e.g., `category` vs `ride_type`)
- ✅ Database functions exist and are accessible
**Example output:**
```
🔍 Starting schema validation...
Submission Tables:
────────────────────────────────────────────────────────────────────────────────
✅ Parks: submission table matches main table
✅ Rides: submission table matches main table
✅ Companies: submission table matches main table
✅ Ride Models: submission table matches main table
Version Tables:
────────────────────────────────────────────────────────────────────────────────
✅ Parks: version table has all fields
✅ Rides: version table has all fields
✅ Companies: version table has all fields
✅ Ride Models: version table has all fields
Critical Fields:
────────────────────────────────────────────────────────────────────────────────
✅ rides table does NOT have ride_type column
✅ rides table has category column
✅ ride_models has both category and ride_type
Functions:
────────────────────────────────────────────────────────────────────────────────
✅ create_entity_from_submission exists and is accessible
✅ update_entity_from_submission exists and is accessible
✅ process_approval_transaction exists and is accessible
════════════════════════════════════════════════════════════════════════════════
Total: 15 passed, 0 failed
════════════════════════════════════════════════════════════════════════════════
✅ All schema validations passed. Safe to deploy.
```
### Integration Tests
**When to use**: In CI/CD, before merging PRs, after major changes
**Run all tests:**
```bash
npm run test:schema
```
**Run in UI mode (for debugging):**
```bash
npm run test:schema:ui
```
**Run specific test suite:**
```bash
npx playwright test schema-validation --grep "Entity Tables"
```
**What it tests:**
- All pre-migration script checks PLUS:
- Field-by-field data type comparison
- NOT NULL constraint validation
- Foreign key existence checks
- Known field name variations (e.g., `height_requirement_cm` vs `height_requirement`)
### GitHub Actions (Automated)
**Automatically runs on:**
- Every pull request that touches:
- `supabase/migrations/**`
- `src/lib/moderation/**`
- `supabase/functions/**`
- Pushes to `main` or `develop` branches
- Manual workflow dispatch
**What it does:**
1. Runs validation script
2. Runs integration tests
3. Checks for breaking migration patterns
4. Validates migration file naming
5. Comments on PRs with helpful guidance if tests fail
## Workflow Examples
### Before Creating a Migration
```bash
# 1. Make schema changes locally
# 2. Validate before creating migration
npm run validate-schema
# 3. If validation passes, create migration
supabase db diff -f add_new_field
# 4. Run validation again
npm run validate-schema
# 5. Commit and push
git add .
git commit -m "Add new field to rides table"
git push
```
### After Modifying Entity Schemas
```bash
# 1. Modified rides table schema
# 2. Run full test suite
npm run test:schema
# 3. Check specific validation
npx playwright test schema-validation --grep "rides"
# 4. Fix any issues
# 5. Re-run tests
npm run test:schema
```
### During Code Review
**PR Author:**
1. Ensure all validation tests pass locally
2. Push changes
3. Wait for GitHub Actions to complete
4. Address any automated feedback
**Reviewer:**
1. Check that GitHub Actions passed
2. Review schema changes in migrations
3. Verify documentation was updated
4. Approve if all checks pass
## Common Issues and Solutions
### Issue: "Missing fields" Error
**Symptom:**
```
❌ Rides: submission table matches main table
└─ Missing fields: category
```
**Cause**: Field was added to main table but not submission table
**Solution:**
```sql
-- In your migration file
ALTER TABLE ride_submissions ADD COLUMN category TEXT NOT NULL;
```
### Issue: "Type mismatch" Error
**Symptom:**
```
❌ Rides: submission table matches main table
└─ Type mismatches: max_speed_kmh: main=numeric, submission=integer
```
**Cause**: Data types don't match between tables
**Solution:**
```sql
-- In your migration file
ALTER TABLE ride_submissions
ALTER COLUMN max_speed_kmh TYPE NUMERIC USING max_speed_kmh::numeric;
```
### Issue: "Column does not exist" in Production
**Symptom**: Approval fails with `column "category" does not exist`
**Immediate action:**
1. Run validation script to identify issue
2. Create emergency migration to add missing field
3. Deploy immediately
4. Update functions if needed
**Prevention**: Always run validation before deploying
### Issue: Tests Pass Locally but Fail in CI
**Possible causes:**
- Different database state in CI vs local
- Missing environment variables
- Outdated schema in test database
**Solution:**
```bash
# Pull latest schema
supabase db pull
# Reset local database
supabase db reset
# Re-run tests
npm run test:schema
```
## Best Practices
### ✅ Do's
- ✅ Run validation script before every migration
- ✅ Run integration tests before merging PRs
- ✅ Update all three tables when adding fields (main, submission, version)
- ✅ Document field name variations in tests
- ✅ Check GitHub Actions results before merging
- ✅ Keep SCHEMA_REFERENCE.md up to date
### ❌ Don'ts
- ❌ Don't skip validation "because it's a small change"
- ❌ Don't add fields to only main tables
- ❌ Don't ignore failing tests
- ❌ Don't bypass CI checks
- ❌ Don't commit service role keys
- ❌ Don't modify submission pipeline functions without testing
## Continuous Integration Setup
### GitHub Secrets
Add to your repository secrets:
```
SUPABASE_SERVICE_ROLE_KEY=your_service_role_key_here
```
**Steps:**
1. Go to repository Settings → Secrets and variables → Actions
2. Click "New repository secret"
3. Name: `SUPABASE_SERVICE_ROLE_KEY`
4. Value: Your service role key from Supabase dashboard
5. Save
### Branch Protection Rules
Recommended settings:
```
Branch: main
✓ Require status checks to pass before merging
✓ validate-schema (Schema Validation)
✓ migration-safety-check (Migration Safety Check)
✓ Require branches to be up to date before merging
```
## Troubleshooting
### Script Won't Run
**Error:** `tsx: command not found`
**Solution:**
```bash
npm install -g tsx
# or
npx tsx scripts/validate-schema.ts
```
### Authentication Errors
**Error:** `Invalid API key`
**Solution:**
1. Check `.env.test` has correct service role key
2. Verify key has not expired
3. Ensure environment variable is loaded:
```bash
source .env.test
npm run validate-schema
```
### Tests Timeout
**Error:** Tests timeout after 30 seconds
**Solution:**
```bash
# Increase timeout
npx playwright test schema-validation --timeout=60000
```
## Maintenance
### Adding New Entity Types
When adding a new entity type (e.g., `events`):
1. **Update validation script:**
```typescript
// In scripts/validate-schema.ts
await validateSubmissionTable('events', 'event_submissions', 'Events');
await validateVersionTable('events', 'event_versions', 'Events');
```
2. **Update integration tests:**
```typescript
// In tests/integration/schema-validation.test.ts
test('events: submission table matches main table schema', async () => {
// Add test logic
});
```
3. **Update documentation:**
- `docs/submission-pipeline/SCHEMA_REFERENCE.md`
- This file (`VALIDATION_SETUP.md`)
### Updating Field Mappings
When version tables use different field names:
```typescript
// In both script and tests
const fieldMapping: { [key: string]: string } = {
'new_main_field': 'version_field_name',
};
```
## Related Documentation
- [Schema Reference](./SCHEMA_REFERENCE.md) - Complete field mappings
- [Integration Tests README](../../tests/integration/README.md) - Detailed test documentation
- [Submission Pipeline](./README.md) - Pipeline overview
- [Versioning System](../versioning/README.md) - Version table details
## Support
**Questions?** Check the documentation above or review existing migration files.
**Found a bug in validation?** Open an issue with:
- Expected behavior
- Actual behavior
- Validation script output
- Database schema snippets

View File

@@ -1,332 +0,0 @@
#!/usr/bin/env tsx
/**
* Schema Validation Script
*
* Pre-migration validation script that checks schema consistency
* across the submission pipeline before deploying changes.
*
* Usage:
* npm run validate-schema
* or
* tsx scripts/validate-schema.ts
*
* Exit codes:
* 0 = All validations passed
* 1 = Validation failures detected
*/
import { createClient } from '@supabase/supabase-js';
const SUPABASE_URL = 'https://ydvtmnrszybqnbcqbdcy.supabase.co';
const SUPABASE_KEY = process.env.SUPABASE_SERVICE_ROLE_KEY;
if (!SUPABASE_KEY) {
console.error('❌ SUPABASE_SERVICE_ROLE_KEY environment variable is required');
process.exit(1);
}
const supabase = createClient(SUPABASE_URL, SUPABASE_KEY);
interface ValidationResult {
category: string;
test: string;
passed: boolean;
message?: string;
}
const results: ValidationResult[] = [];
async function getTableColumns(tableName: string): Promise<Set<string>> {
const { data, error } = await supabase
.from('information_schema.columns' as any)
.select('column_name')
.eq('table_schema', 'public')
.eq('table_name', tableName);
if (error) throw error;
return new Set(data?.map((row: any) => row.column_name) || []);
}
async function validateSubmissionTable(
mainTable: string,
submissionTable: string,
entityName: string
): Promise<void> {
const mainColumns = await getTableColumns(mainTable);
const submissionColumns = await getTableColumns(submissionTable);
const excludedFields = new Set([
'id', 'created_at', 'updated_at', 'is_test_data',
'view_count_all', 'view_count_30d', 'view_count_7d',
'average_rating', 'review_count', 'installations_count',
]);
const missingFields: string[] = [];
for (const field of mainColumns) {
if (excludedFields.has(field)) continue;
if (!submissionColumns.has(field)) {
missingFields.push(field);
}
}
if (missingFields.length === 0) {
results.push({
category: 'Submission Tables',
test: `${entityName}: submission table matches main table`,
passed: true,
});
} else {
results.push({
category: 'Submission Tables',
test: `${entityName}: submission table matches main table`,
passed: false,
message: `Missing fields: ${missingFields.join(', ')}`,
});
}
}
async function validateVersionTable(
mainTable: string,
versionTable: string,
entityName: string
): Promise<void> {
const mainColumns = await getTableColumns(mainTable);
const versionColumns = await getTableColumns(versionTable);
const excludedFields = new Set([
'id', 'created_at', 'updated_at', 'is_test_data',
'view_count_all', 'view_count_30d', 'view_count_7d',
'average_rating', 'review_count', 'installations_count',
]);
const fieldMapping: { [key: string]: string } = {
'height_requirement': 'height_requirement_cm',
'max_g_force': 'gforce_max',
'inversions': 'inversions_count',
'max_height_meters': 'height_meters',
'drop_height_meters': 'drop_meters',
};
const requiredVersionFields = new Set([
'version_id', 'version_number', 'change_type', 'change_reason',
'is_current', 'created_by', 'submission_id', 'is_test_data',
]);
const missingMainFields: string[] = [];
const missingVersionFields: string[] = [];
// Check main table fields exist in version table
for (const field of mainColumns) {
if (excludedFields.has(field)) continue;
const mappedField = fieldMapping[field] || field;
if (!versionColumns.has(field) && !versionColumns.has(mappedField)) {
missingMainFields.push(field);
}
}
// Check version metadata fields exist
for (const field of requiredVersionFields) {
if (!versionColumns.has(field)) {
missingVersionFields.push(field);
}
}
if (missingMainFields.length === 0 && missingVersionFields.length === 0) {
results.push({
category: 'Version Tables',
test: `${entityName}: version table has all fields`,
passed: true,
});
} else {
const messages: string[] = [];
if (missingMainFields.length > 0) {
messages.push(`Missing main fields: ${missingMainFields.join(', ')}`);
}
if (missingVersionFields.length > 0) {
messages.push(`Missing version fields: ${missingVersionFields.join(', ')}`);
}
results.push({
category: 'Version Tables',
test: `${entityName}: version table has all fields`,
passed: false,
message: messages.join('; '),
});
}
}
async function validateCriticalFields(): Promise<void> {
const ridesColumns = await getTableColumns('rides');
const rideModelsColumns = await getTableColumns('ride_models');
// Rides should NOT have ride_type
if (!ridesColumns.has('ride_type')) {
results.push({
category: 'Critical Fields',
test: 'rides table does NOT have ride_type column',
passed: true,
});
} else {
results.push({
category: 'Critical Fields',
test: 'rides table does NOT have ride_type column',
passed: false,
message: 'rides table incorrectly has ride_type column',
});
}
// Rides MUST have category
if (ridesColumns.has('category')) {
results.push({
category: 'Critical Fields',
test: 'rides table has category column',
passed: true,
});
} else {
results.push({
category: 'Critical Fields',
test: 'rides table has category column',
passed: false,
message: 'rides table is missing required category column',
});
}
// Ride models must have both category and ride_type
if (rideModelsColumns.has('category') && rideModelsColumns.has('ride_type')) {
results.push({
category: 'Critical Fields',
test: 'ride_models has both category and ride_type',
passed: true,
});
} else {
const missing: string[] = [];
if (!rideModelsColumns.has('category')) missing.push('category');
if (!rideModelsColumns.has('ride_type')) missing.push('ride_type');
results.push({
category: 'Critical Fields',
test: 'ride_models has both category and ride_type',
passed: false,
message: `ride_models is missing: ${missing.join(', ')}`,
});
}
}
async function validateFunctions(): Promise<void> {
const functionsToCheck = [
'create_entity_from_submission',
'update_entity_from_submission',
'process_approval_transaction',
];
for (const funcName of functionsToCheck) {
try {
const { data, error } = await supabase
.rpc('pg_catalog.pg_function_is_visible' as any, {
funcid: `public.${funcName}`::any
} as any);
if (!error) {
results.push({
category: 'Functions',
test: `${funcName} exists and is accessible`,
passed: true,
});
} else {
results.push({
category: 'Functions',
test: `${funcName} exists and is accessible`,
passed: false,
message: error.message,
});
}
} catch (err) {
results.push({
category: 'Functions',
test: `${funcName} exists and is accessible`,
passed: false,
message: err instanceof Error ? err.message : String(err),
});
}
}
}
function printResults(): void {
console.log('\n' + '='.repeat(80));
console.log('Schema Validation Results');
console.log('='.repeat(80) + '\n');
const categories = [...new Set(results.map(r => r.category))];
let totalPassed = 0;
let totalFailed = 0;
for (const category of categories) {
const categoryResults = results.filter(r => r.category === category);
const passed = categoryResults.filter(r => r.passed).length;
const failed = categoryResults.filter(r => !r.passed).length;
console.log(`\n${category}:`);
console.log('-'.repeat(80));
for (const result of categoryResults) {
const icon = result.passed ? '✅' : '❌';
console.log(`${icon} ${result.test}`);
if (result.message) {
console.log(` └─ ${result.message}`);
}
}
totalPassed += passed;
totalFailed += failed;
}
console.log('\n' + '='.repeat(80));
console.log(`Total: ${totalPassed} passed, ${totalFailed} failed`);
console.log('='.repeat(80) + '\n');
}
async function main(): Promise<void> {
console.log('🔍 Starting schema validation...\n');
try {
// Validate submission tables
await validateSubmissionTable('parks', 'park_submissions', 'Parks');
await validateSubmissionTable('rides', 'ride_submissions', 'Rides');
await validateSubmissionTable('companies', 'company_submissions', 'Companies');
await validateSubmissionTable('ride_models', 'ride_model_submissions', 'Ride Models');
// Validate version tables
await validateVersionTable('parks', 'park_versions', 'Parks');
await validateVersionTable('rides', 'ride_versions', 'Rides');
await validateVersionTable('companies', 'company_versions', 'Companies');
await validateVersionTable('ride_models', 'ride_model_versions', 'Ride Models');
// Validate critical fields
await validateCriticalFields();
// Validate functions
await validateFunctions();
// Print results
printResults();
// Exit with appropriate code
const hasFailures = results.some(r => !r.passed);
if (hasFailures) {
console.error('❌ Schema validation failed. Please fix the issues above before deploying.\n');
process.exit(1);
} else {
console.log('✅ All schema validations passed. Safe to deploy.\n');
process.exit(0);
}
} catch (error) {
console.error('❌ Fatal error during validation:');
console.error(error);
process.exit(1);
}
}
main();

View File

@@ -73,8 +73,6 @@ const AdminContact = lazy(() => import("./pages/admin/AdminContact"));
const AdminEmailSettings = lazy(() => import("./pages/admin/AdminEmailSettings"));
const ErrorMonitoring = lazy(() => import("./pages/admin/ErrorMonitoring"));
const ErrorLookup = lazy(() => import("./pages/admin/ErrorLookup"));
const TraceViewer = lazy(() => import("./pages/admin/TraceViewer"));
const RateLimitMetrics = lazy(() => import("./pages/admin/RateLimitMetrics"));
// User routes (lazy-loaded)
const Profile = lazy(() => import("./pages/Profile"));
@@ -389,22 +387,6 @@ function AppContent(): React.JSX.Element {
</AdminErrorBoundary>
}
/>
<Route
path="/admin/trace-viewer"
element={
<AdminErrorBoundary section="Trace Viewer">
<TraceViewer />
</AdminErrorBoundary>
}
/>
<Route
path="/admin/rate-limit-metrics"
element={
<AdminErrorBoundary section="Rate Limit Metrics">
<RateLimitMetrics />
</AdminErrorBoundary>
}
/>
{/* Utility routes - lazy loaded */}
<Route path="/force-logout" element={<ForceLogout />} />

View File

@@ -1,6 +1,5 @@
import { Dialog, DialogContent, DialogHeader, DialogTitle } from '@/components/ui/dialog';
import { Badge } from '@/components/ui/badge';
import { Button } from '@/components/ui/button';
import { Tabs, TabsContent, TabsList, TabsTrigger } from '@/components/ui/tabs';
import { Card, CardContent } from '@/components/ui/card';
import { format } from 'date-fns';
@@ -197,27 +196,6 @@ export function ApprovalFailureModal({ failure, onClose }: ApprovalFailureModalP
</Card>
</TabsContent>
</Tabs>
<div className="flex justify-end gap-2 mt-4">
{failure.request_id && (
<>
<Button
variant="outline"
size="sm"
onClick={() => window.open(`/admin/error-monitoring?tab=edge-functions&requestId=${failure.request_id}`, '_blank')}
>
View Edge Logs
</Button>
<Button
variant="outline"
size="sm"
onClick={() => window.open(`/admin/error-monitoring?tab=traces&traceId=${failure.request_id}`, '_blank')}
>
View Full Trace
</Button>
</>
)}
</div>
</DialogContent>
</Dialog>
);

View File

@@ -1,161 +0,0 @@
import { useQuery } from '@tanstack/react-query';
import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card';
import { Badge } from '@/components/ui/badge';
import { Loader2, Clock } from 'lucide-react';
import { format } from 'date-fns';
import { supabase } from '@/lib/supabaseClient';
interface CorrelatedLogsViewProps {
requestId: string;
traceId?: string;
}
interface TimelineEvent {
timestamp: Date;
type: 'error' | 'edge' | 'database' | 'approval';
message: string;
severity?: string;
metadata?: Record<string, any>;
}
export function CorrelatedLogsView({ requestId, traceId }: CorrelatedLogsViewProps) {
const { data: events, isLoading } = useQuery({
queryKey: ['correlated-logs', requestId, traceId],
queryFn: async () => {
const events: TimelineEvent[] = [];
// Fetch application error
const { data: error } = await supabase
.from('request_metadata')
.select('*')
.eq('request_id', requestId)
.single();
if (error) {
events.push({
timestamp: new Date(error.created_at),
type: 'error',
message: error.error_message || 'Unknown error',
severity: error.error_type || undefined,
metadata: {
endpoint: error.endpoint,
method: error.method,
status_code: error.status_code,
},
});
}
// Fetch approval metrics
const { data: approval } = await supabase
.from('approval_transaction_metrics')
.select('*')
.eq('request_id', requestId)
.maybeSingle();
if (approval && approval.created_at) {
events.push({
timestamp: new Date(approval.created_at),
type: 'approval',
message: approval.success ? 'Approval successful' : (approval.error_message || 'Approval failed'),
severity: approval.success ? 'success' : 'error',
metadata: {
items_count: approval.items_count,
duration_ms: approval.duration_ms || undefined,
},
});
}
// TODO: Fetch edge function logs (requires Management API access)
// TODO: Fetch database logs (requires analytics API access)
// Sort chronologically
events.sort((a, b) => a.timestamp.getTime() - b.timestamp.getTime());
return events;
},
});
const getTypeColor = (type: string): "default" | "destructive" | "outline" | "secondary" => {
switch (type) {
case 'error': return 'destructive';
case 'approval': return 'destructive';
case 'edge': return 'default';
case 'database': return 'secondary';
default: return 'outline';
}
};
if (isLoading) {
return (
<div className="flex items-center justify-center py-12">
<Loader2 className="w-6 h-6 animate-spin text-muted-foreground" />
</div>
);
}
if (!events || events.length === 0) {
return (
<Card>
<CardContent className="pt-6">
<p className="text-center text-muted-foreground">
No correlated logs found for this request.
</p>
</CardContent>
</Card>
);
}
return (
<Card>
<CardHeader>
<CardTitle className="text-lg flex items-center gap-2">
<Clock className="w-5 h-5" />
Timeline for Request {requestId.slice(0, 8)}
</CardTitle>
</CardHeader>
<CardContent>
<div className="relative space-y-4">
{/* Timeline line */}
<div className="absolute left-6 top-0 bottom-0 w-0.5 bg-border" />
{events.map((event, index) => (
<div key={index} className="relative pl-14">
{/* Timeline dot */}
<div className="absolute left-[18px] top-2 w-4 h-4 rounded-full bg-background border-2 border-primary" />
<Card>
<CardContent className="pt-4">
<div className="space-y-2">
<div className="flex items-center gap-2">
<Badge variant={getTypeColor(event.type)}>
{event.type.toUpperCase()}
</Badge>
{event.severity && (
<Badge variant="outline" className="text-xs">
{event.severity}
</Badge>
)}
<span className="text-xs text-muted-foreground">
{format(event.timestamp, 'HH:mm:ss.SSS')}
</span>
</div>
<p className="text-sm">{event.message}</p>
{event.metadata && Object.keys(event.metadata).length > 0 && (
<div className="text-xs text-muted-foreground space-y-1">
{Object.entries(event.metadata).map(([key, value]) => (
<div key={key}>
<span className="font-medium">{key}:</span> {String(value)}
</div>
))}
</div>
)}
</div>
</CardContent>
</Card>
</div>
))}
</div>
</CardContent>
</Card>
);
}

View File

@@ -1,172 +0,0 @@
import { useState } from 'react';
import { useQuery } from '@tanstack/react-query';
import { Card, CardContent, CardHeader } from '@/components/ui/card';
import { Badge } from '@/components/ui/badge';
import { Input } from '@/components/ui/input';
import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from '@/components/ui/select';
import { Loader2, Search, ChevronDown, ChevronRight } from 'lucide-react';
import { format } from 'date-fns';
import { supabase } from '@/lib/supabaseClient';
interface DatabaseLog {
id: string;
timestamp: number;
identifier: string;
error_severity: string;
event_message: string;
}
export function DatabaseLogs() {
const [searchTerm, setSearchTerm] = useState('');
const [severity, setSeverity] = useState<string>('all');
const [timeRange, setTimeRange] = useState<'1h' | '24h' | '7d'>('24h');
const [expandedLog, setExpandedLog] = useState<string | null>(null);
const { data: logs, isLoading } = useQuery({
queryKey: ['database-logs', severity, timeRange],
queryFn: async () => {
// For now, return empty array as we need proper permissions for analytics query
// In production, this would use Supabase Analytics API
// const hoursAgo = timeRange === '1h' ? 1 : timeRange === '24h' ? 24 : 168;
// const startTime = Date.now() * 1000 - (hoursAgo * 60 * 60 * 1000 * 1000);
return [] as DatabaseLog[];
},
refetchInterval: 30000,
});
const filteredLogs = logs?.filter(log => {
if (searchTerm && !log.event_message.toLowerCase().includes(searchTerm.toLowerCase())) {
return false;
}
return true;
}) || [];
const getSeverityColor = (severity: string): "default" | "destructive" | "outline" | "secondary" => {
switch (severity.toUpperCase()) {
case 'ERROR': return 'destructive';
case 'WARNING': return 'destructive';
case 'NOTICE': return 'default';
case 'LOG': return 'secondary';
default: return 'outline';
}
};
const isSpanLog = (message: string) => {
return message.includes('SPAN:') || message.includes('SPAN_EVENT:');
};
const toggleExpand = (logId: string) => {
setExpandedLog(expandedLog === logId ? null : logId);
};
return (
<div className="space-y-4">
<div className="flex flex-col md:flex-row gap-4">
<div className="flex-1">
<div className="relative">
<Search className="absolute left-3 top-1/2 -translate-y-1/2 w-4 h-4 text-muted-foreground" />
<Input
placeholder="Search database logs..."
value={searchTerm}
onChange={(e) => setSearchTerm(e.target.value)}
className="pl-10"
/>
</div>
</div>
<Select value={severity} onValueChange={setSeverity}>
<SelectTrigger className="w-[150px]">
<SelectValue placeholder="Severity" />
</SelectTrigger>
<SelectContent>
<SelectItem value="all">All Levels</SelectItem>
<SelectItem value="ERROR">Error</SelectItem>
<SelectItem value="WARNING">Warning</SelectItem>
<SelectItem value="NOTICE">Notice</SelectItem>
<SelectItem value="LOG">Log</SelectItem>
</SelectContent>
</Select>
<Select value={timeRange} onValueChange={(v) => setTimeRange(v as any)}>
<SelectTrigger className="w-[120px]">
<SelectValue />
</SelectTrigger>
<SelectContent>
<SelectItem value="1h">Last Hour</SelectItem>
<SelectItem value="24h">Last 24h</SelectItem>
<SelectItem value="7d">Last 7 Days</SelectItem>
</SelectContent>
</Select>
</div>
{isLoading ? (
<div className="flex items-center justify-center py-12">
<Loader2 className="w-6 h-6 animate-spin text-muted-foreground" />
</div>
) : filteredLogs.length === 0 ? (
<Card>
<CardContent className="pt-6">
<p className="text-center text-muted-foreground">
No database logs found for the selected criteria.
</p>
</CardContent>
</Card>
) : (
<div className="space-y-2">
{filteredLogs.map((log) => (
<Card key={log.id} className="overflow-hidden">
<CardHeader
className="py-3 cursor-pointer hover:bg-muted/50 transition-colors"
onClick={() => toggleExpand(log.id)}
>
<div className="flex items-center justify-between">
<div className="flex items-center gap-3">
{expandedLog === log.id ? (
<ChevronDown className="w-4 h-4 text-muted-foreground" />
) : (
<ChevronRight className="w-4 h-4 text-muted-foreground" />
)}
<Badge variant={getSeverityColor(log.error_severity)}>
{log.error_severity}
</Badge>
{isSpanLog(log.event_message) && (
<Badge variant="outline" className="text-xs">
TRACE
</Badge>
)}
<span className="text-sm text-muted-foreground">
{format(log.timestamp / 1000, 'HH:mm:ss.SSS')}
</span>
</div>
<span className="text-sm truncate max-w-[500px]">
{log.event_message.slice(0, 100)}
{log.event_message.length > 100 && '...'}
</span>
</div>
</CardHeader>
{expandedLog === log.id && (
<CardContent className="pt-0 pb-4 border-t">
<div className="space-y-2 mt-4">
<div>
<span className="text-xs text-muted-foreground">Full Message:</span>
<pre className="text-xs font-mono mt-1 whitespace-pre-wrap break-all">
{log.event_message}
</pre>
</div>
<div>
<span className="text-xs text-muted-foreground">Timestamp:</span>
<p className="text-sm">{format(log.timestamp / 1000, 'PPpp')}</p>
</div>
<div>
<span className="text-xs text-muted-foreground">Identifier:</span>
<p className="text-sm font-mono">{log.identifier}</p>
</div>
</div>
</CardContent>
)}
</Card>
))}
</div>
)}
</div>
);
}

View File

@@ -1,168 +0,0 @@
import { useState } from 'react';
import { useQuery } from '@tanstack/react-query';
import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card';
import { Badge } from '@/components/ui/badge';
import { Input } from '@/components/ui/input';
import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from '@/components/ui/select';
import { Loader2, Search, ChevronDown, ChevronRight } from 'lucide-react';
import { format } from 'date-fns';
import { supabase } from '@/lib/supabaseClient';
interface EdgeFunctionLog {
id: string;
timestamp: number;
event_type: string;
event_message: string;
function_id: string;
level: string;
}
const FUNCTION_NAMES = [
'detect-location',
'process-selective-approval',
'process-selective-rejection',
];
export function EdgeFunctionLogs() {
const [selectedFunction, setSelectedFunction] = useState<string>('all');
const [searchTerm, setSearchTerm] = useState('');
const [timeRange, setTimeRange] = useState<'1h' | '24h' | '7d'>('24h');
const [expandedLog, setExpandedLog] = useState<string | null>(null);
const { data: logs, isLoading } = useQuery({
queryKey: ['edge-function-logs', selectedFunction, timeRange],
queryFn: async () => {
// Query Supabase edge function logs
// Note: This uses the analytics endpoint which requires specific permissions
const hoursAgo = timeRange === '1h' ? 1 : timeRange === '24h' ? 24 : 168;
const startTime = Date.now() - (hoursAgo * 60 * 60 * 1000);
// For now, return the logs from context as an example
// In production, this would call the Supabase Management API
const allLogs: EdgeFunctionLog[] = [];
return allLogs;
},
refetchInterval: 30000, // Refresh every 30 seconds
});
const filteredLogs = logs?.filter(log => {
if (searchTerm && !log.event_message.toLowerCase().includes(searchTerm.toLowerCase())) {
return false;
}
return true;
}) || [];
const getLevelColor = (level: string): "default" | "destructive" | "secondary" => {
switch (level.toLowerCase()) {
case 'error': return 'destructive';
case 'warn': return 'destructive';
case 'info': return 'default';
default: return 'secondary';
}
};
const toggleExpand = (logId: string) => {
setExpandedLog(expandedLog === logId ? null : logId);
};
return (
<div className="space-y-4">
<div className="flex flex-col md:flex-row gap-4">
<div className="flex-1">
<div className="relative">
<Search className="absolute left-3 top-1/2 -translate-y-1/2 w-4 h-4 text-muted-foreground" />
<Input
placeholder="Search logs..."
value={searchTerm}
onChange={(e) => setSearchTerm(e.target.value)}
className="pl-10"
/>
</div>
</div>
<Select value={selectedFunction} onValueChange={setSelectedFunction}>
<SelectTrigger className="w-[200px]">
<SelectValue placeholder="Select function" />
</SelectTrigger>
<SelectContent>
<SelectItem value="all">All Functions</SelectItem>
{FUNCTION_NAMES.map(name => (
<SelectItem key={name} value={name}>{name}</SelectItem>
))}
</SelectContent>
</Select>
<Select value={timeRange} onValueChange={(v) => setTimeRange(v as any)}>
<SelectTrigger className="w-[120px]">
<SelectValue />
</SelectTrigger>
<SelectContent>
<SelectItem value="1h">Last Hour</SelectItem>
<SelectItem value="24h">Last 24h</SelectItem>
<SelectItem value="7d">Last 7 Days</SelectItem>
</SelectContent>
</Select>
</div>
{isLoading ? (
<div className="flex items-center justify-center py-12">
<Loader2 className="w-6 h-6 animate-spin text-muted-foreground" />
</div>
) : filteredLogs.length === 0 ? (
<Card>
<CardContent className="pt-6">
<p className="text-center text-muted-foreground">
No edge function logs found. Logs will appear here when edge functions are invoked.
</p>
</CardContent>
</Card>
) : (
<div className="space-y-2">
{filteredLogs.map((log) => (
<Card key={log.id} className="overflow-hidden">
<CardHeader
className="py-3 cursor-pointer hover:bg-muted/50 transition-colors"
onClick={() => toggleExpand(log.id)}
>
<div className="flex items-center justify-between">
<div className="flex items-center gap-3">
{expandedLog === log.id ? (
<ChevronDown className="w-4 h-4 text-muted-foreground" />
) : (
<ChevronRight className="w-4 h-4 text-muted-foreground" />
)}
<Badge variant={getLevelColor(log.level)}>
{log.level}
</Badge>
<span className="text-sm text-muted-foreground">
{format(log.timestamp, 'HH:mm:ss.SSS')}
</span>
<Badge variant="outline" className="text-xs">
{log.event_type}
</Badge>
</div>
<span className="text-sm truncate max-w-[400px]">
{log.event_message}
</span>
</div>
</CardHeader>
{expandedLog === log.id && (
<CardContent className="pt-0 pb-4 border-t">
<div className="space-y-2 mt-4">
<div>
<span className="text-xs text-muted-foreground">Full Message:</span>
<p className="text-sm font-mono mt-1">{log.event_message}</p>
</div>
<div>
<span className="text-xs text-muted-foreground">Timestamp:</span>
<p className="text-sm">{format(log.timestamp, 'PPpp')}</p>
</div>
</div>
</CardContent>
)}
</Card>
))}
</div>
)}
</div>
);
}

View File

@@ -222,30 +222,12 @@ ${error.error_stack ? `Stack Trace:\n${error.error_stack}` : ''}
</TabsContent>
</Tabs>
<div className="flex justify-between items-center">
<div className="flex gap-2">
<Button
variant="outline"
size="sm"
onClick={() => window.open(`/admin/error-monitoring?tab=edge-functions&requestId=${error.request_id}`, '_blank')}
>
View Edge Logs
</Button>
<Button
variant="outline"
size="sm"
onClick={() => window.open(`/admin/error-monitoring?tab=database&requestId=${error.request_id}`, '_blank')}
>
View DB Logs
</Button>
</div>
<div className="flex gap-2">
<Button variant="outline" onClick={copyErrorReport}>
<Copy className="w-4 h-4 mr-2" />
Copy Report
</Button>
<Button onClick={onClose}>Close</Button>
</div>
<div className="flex justify-end gap-2">
<Button variant="outline" onClick={copyErrorReport}>
<Copy className="w-4 h-4 mr-2" />
Copy Report
</Button>
<Button onClick={onClose}>Close</Button>
</div>
</DialogContent>
</Dialog>

View File

@@ -14,11 +14,10 @@ import { ScrollArea } from '@/components/ui/scroll-area';
import { Badge } from '@/components/ui/badge';
import { Collapsible, CollapsibleContent, CollapsibleTrigger } from '@/components/ui/collapsible';
import { useSuperuserGuard } from '@/hooks/useSuperuserGuard';
import { IntegrationTestRunner as TestRunner, allTestSuites, type TestResult, formatResultsAsMarkdown, formatSingleTestAsMarkdown } from '@/lib/integrationTests';
import { Play, Square, Download, ChevronDown, CheckCircle2, XCircle, Clock, SkipForward, Copy, ClipboardX } from 'lucide-react';
import { IntegrationTestRunner as TestRunner, allTestSuites, type TestResult } from '@/lib/integrationTests';
import { Play, Square, Download, ChevronDown, CheckCircle2, XCircle, Clock, SkipForward } from 'lucide-react';
import { toast } from 'sonner';
import { handleError } from '@/lib/errorHandler';
import { CleanupReport } from '@/components/ui/cleanup-report';
export function IntegrationTestRunner() {
const superuserGuard = useSuperuserGuard();
@@ -106,38 +105,6 @@ export function IntegrationTestRunner() {
toast.success('Test results exported');
}, [runner]);
const copyAllResults = useCallback(async () => {
const summary = runner.getSummary();
const results = runner.getResults();
const markdown = formatResultsAsMarkdown(results, summary);
await navigator.clipboard.writeText(markdown);
toast.success('All test results copied to clipboard');
}, [runner]);
const copyFailedTests = useCallback(async () => {
const summary = runner.getSummary();
const failedResults = runner.getResults().filter(r => r.status === 'fail');
if (failedResults.length === 0) {
toast.info('No failed tests to copy');
return;
}
const markdown = formatResultsAsMarkdown(failedResults, summary, true);
await navigator.clipboard.writeText(markdown);
toast.success(`${failedResults.length} failed test(s) copied to clipboard`);
}, [runner]);
const copyTestResult = useCallback(async (result: TestResult) => {
const markdown = formatSingleTestAsMarkdown(result);
await navigator.clipboard.writeText(markdown);
toast.success('Test result copied to clipboard');
}, []);
// Guard is handled by the route/page, no loading state needed here
const summary = runner.getSummary();
@@ -199,22 +166,10 @@ export function IntegrationTestRunner() {
</Button>
)}
{results.length > 0 && !isRunning && (
<>
<Button onClick={exportResults} variant="outline">
<Download className="w-4 h-4 mr-2" />
Export JSON
</Button>
<Button onClick={copyAllResults} variant="outline">
<Copy className="w-4 h-4 mr-2" />
Copy All
</Button>
{summary.failed > 0 && (
<Button onClick={copyFailedTests} variant="outline">
<ClipboardX className="w-4 h-4 mr-2" />
Copy Failed ({summary.failed})
</Button>
)}
</>
<Button onClick={exportResults} variant="outline">
<Download className="w-4 h-4 mr-2" />
Export Results
</Button>
)}
</div>
@@ -253,11 +208,6 @@ export function IntegrationTestRunner() {
</CardContent>
</Card>
{/* Cleanup Report */}
{!isRunning && summary.cleanup && (
<CleanupReport summary={summary.cleanup} />
)}
{/* Results */}
{results.length > 0 && (
<Card>
@@ -270,13 +220,11 @@ export function IntegrationTestRunner() {
{results.map(result => (
<Collapsible key={result.id}>
<div className="flex items-start gap-3 p-3 rounded-lg border bg-card">
<div className="pt-0.5">
<div className="pt-0.5">
{result.status === 'pass' && <CheckCircle2 className="w-4 h-4 text-green-500" />}
{result.status === 'fail' && <XCircle className="w-4 h-4 text-destructive" />}
{result.status === 'skip' && !result.name.includes('⏳') && <SkipForward className="w-4 h-4 text-muted-foreground" />}
{result.status === 'skip' && result.name.includes('⏳') && <Clock className="w-4 h-4 text-muted-foreground" />}
{result.status === 'running' && !result.name.includes('⏳') && <Clock className="w-4 h-4 text-blue-500 animate-pulse" />}
{result.status === 'running' && result.name.includes('⏳') && <Clock className="w-4 h-4 text-amber-500 animate-pulse" />}
{result.status === 'skip' && <SkipForward className="w-4 h-4 text-muted-foreground" />}
{result.status === 'running' && <Clock className="w-4 h-4 text-blue-500 animate-pulse" />}
</div>
<div className="flex-1 space-y-1">
<div className="flex items-start justify-between gap-2">
@@ -288,14 +236,6 @@ export function IntegrationTestRunner() {
<Badge variant="outline" className="text-xs">
{result.duration}ms
</Badge>
<Button
variant="ghost"
size="sm"
className="h-6 w-6 p-0"
onClick={() => copyTestResult(result)}
>
<Copy className="h-3 w-3" />
</Button>
{(result.error || result.details) && (
<CollapsibleTrigger asChild>
<Button variant="ghost" size="sm" className="h-6 w-6 p-0">

View File

@@ -1,203 +0,0 @@
import { useState } from 'react';
import { useQuery } from '@tanstack/react-query';
import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card';
import { Input } from '@/components/ui/input';
import { Button } from '@/components/ui/button';
import { Badge } from '@/components/ui/badge';
import { Search, Loader2, ExternalLink } from 'lucide-react';
import { format } from 'date-fns';
import { supabase } from '@/lib/supabaseClient';
interface SearchResult {
type: 'error' | 'approval' | 'edge' | 'database';
id: string;
timestamp: string;
message: string;
severity?: string;
metadata?: Record<string, any>;
}
interface UnifiedLogSearchProps {
onNavigate: (tab: string, filters: Record<string, string>) => void;
}
export function UnifiedLogSearch({ onNavigate }: UnifiedLogSearchProps) {
const [searchQuery, setSearchQuery] = useState('');
const [searchTerm, setSearchTerm] = useState('');
const { data: results, isLoading } = useQuery({
queryKey: ['unified-log-search', searchTerm],
queryFn: async () => {
if (!searchTerm) return [];
const results: SearchResult[] = [];
// Search application errors
const { data: errors } = await supabase
.from('request_metadata')
.select('request_id, created_at, error_type, error_message')
.or(`request_id.ilike.%${searchTerm}%,error_message.ilike.%${searchTerm}%`)
.order('created_at', { ascending: false })
.limit(10);
if (errors) {
results.push(...errors.map(e => ({
type: 'error' as const,
id: e.request_id,
timestamp: e.created_at,
message: e.error_message || 'Unknown error',
severity: e.error_type || undefined,
})));
}
// Search approval failures
const { data: approvals } = await supabase
.from('approval_transaction_metrics')
.select('id, created_at, error_message, request_id')
.eq('success', false)
.or(`request_id.ilike.%${searchTerm}%,error_message.ilike.%${searchTerm}%`)
.order('created_at', { ascending: false })
.limit(10);
if (approvals) {
results.push(...approvals
.filter(a => a.created_at)
.map(a => ({
type: 'approval' as const,
id: a.id,
timestamp: a.created_at!,
message: a.error_message || 'Approval failed',
metadata: { request_id: a.request_id },
})));
}
// Sort by timestamp
results.sort((a, b) => new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime());
return results;
},
enabled: !!searchTerm,
});
const handleSearch = () => {
setSearchTerm(searchQuery);
};
const getTypeColor = (type: string): "default" | "destructive" | "outline" | "secondary" => {
switch (type) {
case 'error': return 'destructive';
case 'approval': return 'destructive';
case 'edge': return 'default';
case 'database': return 'secondary';
default: return 'outline';
}
};
const getTypeLabel = (type: string) => {
switch (type) {
case 'error': return 'Application Error';
case 'approval': return 'Approval Failure';
case 'edge': return 'Edge Function';
case 'database': return 'Database Log';
default: return type;
}
};
const handleResultClick = (result: SearchResult) => {
switch (result.type) {
case 'error':
onNavigate('errors', { requestId: result.id });
break;
case 'approval':
onNavigate('approvals', { failureId: result.id });
break;
case 'edge':
onNavigate('edge-functions', { search: result.message });
break;
case 'database':
onNavigate('database', { search: result.message });
break;
}
};
return (
<Card>
<CardHeader>
<CardTitle className="text-lg">Unified Log Search</CardTitle>
</CardHeader>
<CardContent className="space-y-4">
<div className="flex gap-2">
<div className="relative flex-1">
<Search className="absolute left-3 top-1/2 -translate-y-1/2 w-4 h-4 text-muted-foreground" />
<Input
placeholder="Search across all logs (request ID, error message, trace ID...)"
value={searchQuery}
onChange={(e) => setSearchQuery(e.target.value)}
onKeyDown={(e) => e.key === 'Enter' && handleSearch()}
className="pl-10"
/>
</div>
<Button onClick={handleSearch} disabled={!searchQuery || isLoading}>
{isLoading ? (
<Loader2 className="w-4 h-4 animate-spin" />
) : (
<Search className="w-4 h-4" />
)}
</Button>
</div>
{searchTerm && (
<div className="space-y-2">
{isLoading ? (
<div className="flex items-center justify-center py-8">
<Loader2 className="w-6 h-6 animate-spin text-muted-foreground" />
</div>
) : results && results.length > 0 ? (
<>
<div className="text-sm text-muted-foreground">
Found {results.length} results
</div>
{results.map((result) => (
<Card
key={`${result.type}-${result.id}`}
className="cursor-pointer hover:bg-muted/50 transition-colors"
onClick={() => handleResultClick(result)}
>
<CardContent className="pt-4 pb-3">
<div className="flex items-start justify-between gap-4">
<div className="flex-1 space-y-1">
<div className="flex items-center gap-2">
<Badge variant={getTypeColor(result.type)}>
{getTypeLabel(result.type)}
</Badge>
{result.severity && (
<Badge variant="outline" className="text-xs">
{result.severity}
</Badge>
)}
<span className="text-xs text-muted-foreground">
{format(new Date(result.timestamp), 'PPp')}
</span>
</div>
<p className="text-sm line-clamp-2">{result.message}</p>
<code className="text-xs text-muted-foreground">
{result.id.slice(0, 16)}...
</code>
</div>
<ExternalLink className="w-4 h-4 text-muted-foreground flex-shrink-0" />
</div>
</CardContent>
</Card>
))}
</>
) : (
<p className="text-center text-muted-foreground py-8">
No results found for "{searchTerm}"
</p>
)}
</div>
)}
</CardContent>
</Card>
);
}

View File

@@ -1,4 +1,4 @@
import { LayoutDashboard, FileText, Flag, Users, Settings, ArrowLeft, ScrollText, BookOpen, Inbox, Mail, AlertTriangle, Shield } from 'lucide-react';
import { LayoutDashboard, FileText, Flag, Users, Settings, ArrowLeft, ScrollText, BookOpen, Inbox, Mail, AlertTriangle } from 'lucide-react';
import { NavLink } from 'react-router-dom';
import { useUserRole } from '@/hooks/useUserRole';
import { useSidebar } from '@/hooks/useSidebar';
@@ -49,15 +49,10 @@ export function AdminSidebar() {
icon: ScrollText,
},
{
title: 'Monitoring & Logs',
title: 'Error Monitoring',
url: '/admin/error-monitoring',
icon: AlertTriangle,
},
{
title: 'Rate Limit Metrics',
url: '/admin/rate-limit-metrics',
icon: Shield,
},
{
title: 'Users',
url: '/admin/users',

View File

@@ -1,221 +0,0 @@
/**
* Cleanup Verification Report Component
*
* Displays detailed results of test data cleanup after integration tests complete.
* Shows tables cleaned, records deleted, errors, and verification status.
*/
import { CheckCircle2, XCircle, AlertCircle, Database, Trash2, Clock } from 'lucide-react';
import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card';
import { Badge } from '@/components/ui/badge';
import { Progress } from '@/components/ui/progress';
import type { CleanupSummary } from '@/lib/integrationTests/testCleanup';
interface CleanupReportProps {
summary: CleanupSummary;
className?: string;
}
export function CleanupReport({ summary, className = '' }: CleanupReportProps) {
const successCount = summary.results.filter(r => !r.error).length;
const errorCount = summary.results.filter(r => r.error).length;
const successRate = summary.results.length > 0
? (successCount / summary.results.length) * 100
: 0;
return (
<Card className={`border-border ${className}`}>
<CardHeader>
<CardTitle className="flex items-center gap-2">
<Trash2 className="h-5 w-5 text-muted-foreground" />
Test Data Cleanup Report
</CardTitle>
</CardHeader>
<CardContent className="space-y-4">
{/* Summary Stats */}
<div className="grid grid-cols-2 md:grid-cols-4 gap-4">
<div className="space-y-1">
<p className="text-sm text-muted-foreground">Total Deleted</p>
<p className="text-2xl font-bold text-foreground">
{summary.totalDeleted.toLocaleString()}
</p>
</div>
<div className="space-y-1">
<p className="text-sm text-muted-foreground">Tables Cleaned</p>
<p className="text-2xl font-bold text-foreground">
{successCount}/{summary.results.length}
</p>
</div>
<div className="space-y-1">
<p className="text-sm text-muted-foreground">Duration</p>
<p className="text-2xl font-bold text-foreground flex items-center gap-1">
<Clock className="h-4 w-4" />
{(summary.totalDuration / 1000).toFixed(1)}s
</p>
</div>
<div className="space-y-1">
<p className="text-sm text-muted-foreground">Status</p>
<Badge
variant={summary.success ? "default" : "destructive"}
className="text-base font-semibold"
>
{summary.success ? (
<span className="flex items-center gap-1">
<CheckCircle2 className="h-4 w-4" />
Complete
</span>
) : (
<span className="flex items-center gap-1">
<XCircle className="h-4 w-4" />
Failed
</span>
)}
</Badge>
</div>
</div>
{/* Success Rate Progress */}
<div className="space-y-2">
<div className="flex justify-between text-sm">
<span className="text-muted-foreground">Success Rate</span>
<span className="font-medium text-foreground">{successRate.toFixed(1)}%</span>
</div>
<Progress value={successRate} className="h-2" />
</div>
{/* Table-by-Table Results */}
<div className="space-y-2">
<h3 className="text-sm font-semibold text-foreground flex items-center gap-2">
<Database className="h-4 w-4" />
Cleanup Details
</h3>
<div className="space-y-1 max-h-64 overflow-y-auto border border-border rounded-md">
{summary.results.map((result, index) => (
<div
key={`${result.table}-${index}`}
className="flex items-center justify-between p-3 hover:bg-accent/50 transition-colors border-b border-border last:border-b-0"
>
<div className="flex items-center gap-3 flex-1 min-w-0">
{result.error ? (
<XCircle className="h-4 w-4 text-destructive flex-shrink-0" />
) : result.deleted > 0 ? (
<CheckCircle2 className="h-4 w-4 text-green-600 dark:text-green-400 flex-shrink-0" />
) : (
<AlertCircle className="h-4 w-4 text-muted-foreground flex-shrink-0" />
)}
<div className="flex-1 min-w-0">
<p className="font-mono text-sm text-foreground truncate">
{result.table}
</p>
{result.error && (
<p className="text-xs text-destructive truncate">
{result.error}
</p>
)}
</div>
</div>
<div className="flex items-center gap-3 flex-shrink-0">
<Badge
variant={result.deleted > 0 ? "default" : "secondary"}
className="font-mono"
>
{result.deleted} deleted
</Badge>
<span className="text-xs text-muted-foreground font-mono w-16 text-right">
{result.duration}ms
</span>
</div>
</div>
))}
</div>
</div>
{/* Error Summary (if any) */}
{errorCount > 0 && (
<div className="p-3 bg-destructive/10 border border-destructive/20 rounded-md">
<div className="flex items-start gap-2">
<AlertCircle className="h-5 w-5 text-destructive flex-shrink-0 mt-0.5" />
<div>
<p className="text-sm font-semibold text-destructive">
{errorCount} {errorCount === 1 ? 'table' : 'tables'} failed to clean
</p>
<p className="text-xs text-destructive/80 mt-1">
Check error messages above for details. Test data may remain in database.
</p>
</div>
</div>
</div>
)}
{/* Success Message */}
{summary.success && summary.totalDeleted > 0 && (
<div className="p-3 bg-green-500/10 border border-green-500/20 rounded-md">
<div className="flex items-start gap-2">
<CheckCircle2 className="h-5 w-5 text-green-600 dark:text-green-400 flex-shrink-0 mt-0.5" />
<div>
<p className="text-sm font-semibold text-green-700 dark:text-green-300">
Cleanup completed successfully
</p>
<p className="text-xs text-green-600 dark:text-green-400 mt-1">
All test data has been removed from the database.
</p>
</div>
</div>
</div>
)}
{/* No Data Message */}
{summary.success && summary.totalDeleted === 0 && (
<div className="p-3 bg-muted border border-border rounded-md">
<div className="flex items-start gap-2">
<AlertCircle className="h-5 w-5 text-muted-foreground flex-shrink-0 mt-0.5" />
<div>
<p className="text-sm font-semibold text-muted-foreground">
No test data found
</p>
<p className="text-xs text-muted-foreground mt-1">
Database is already clean or no test data was created during this run.
</p>
</div>
</div>
</div>
)}
</CardContent>
</Card>
);
}
/**
* Compact version for inline display in test results
*/
export function CleanupReportCompact({ summary }: CleanupReportProps) {
return (
<div className="flex items-center gap-3 p-3 bg-accent/50 rounded-md border border-border">
<Trash2 className="h-5 w-5 text-muted-foreground flex-shrink-0" />
<div className="flex-1 min-w-0">
<p className="text-sm font-medium text-foreground">
Cleanup: {summary.totalDeleted} records deleted
</p>
<p className="text-xs text-muted-foreground">
{summary.results.filter(r => !r.error).length}/{summary.results.length} tables cleaned
{' • '}
{(summary.totalDuration / 1000).toFixed(1)}s
</p>
</div>
{summary.success ? (
<CheckCircle2 className="h-5 w-5 text-green-600 dark:text-green-400 flex-shrink-0" />
) : (
<XCircle className="h-5 w-5 text-destructive flex-shrink-0" />
)}
</div>
);
}

View File

@@ -12,8 +12,6 @@ interface RetryStatus {
type: string;
state: 'retrying' | 'success' | 'failed';
errorId?: string;
isRateLimit?: boolean;
retryAfter?: number;
}
/**
@@ -26,22 +24,12 @@ export function RetryStatusIndicator() {
useEffect(() => {
const handleRetry = (event: Event) => {
const customEvent = event as CustomEvent<Omit<RetryStatus, 'state' | 'countdown'>>;
const { id, attempt, maxAttempts, delay, type, isRateLimit, retryAfter } = customEvent.detail;
const customEvent = event as CustomEvent<Omit<RetryStatus, 'state'>>;
const { id, attempt, maxAttempts, delay, type } = customEvent.detail;
setRetries(prev => {
const next = new Map(prev);
next.set(id, {
id,
attempt,
maxAttempts,
delay,
type,
state: 'retrying',
countdown: delay,
isRateLimit,
retryAfter
});
next.set(id, { id, attempt, maxAttempts, delay, type, state: 'retrying', countdown: delay });
return next;
});
};
@@ -173,17 +161,6 @@ function RetryCard({ retry }: { retry: RetryStatus & { countdown: number } }) {
// Retrying state
const progress = retry.delay > 0 ? ((retry.delay - retry.countdown) / retry.delay) * 100 : 0;
// Customize message based on rate limit status
const getMessage = () => {
if (retry.isRateLimit) {
if (retry.retryAfter) {
return `Rate limit reached. Waiting ${Math.ceil(retry.countdown / 1000)}s as requested by server...`;
}
return `Rate limit reached. Using smart backoff - retrying in ${Math.ceil(retry.countdown / 1000)}s...`;
}
return `Network issue detected. Retrying ${retry.type} submission in ${Math.ceil(retry.countdown / 1000)}s`;
};
return (
<Card className="p-4 shadow-lg border-amber-500 bg-amber-50 dark:bg-amber-950 w-80 animate-in slide-in-from-bottom-4">
<div className="flex items-start gap-3">
@@ -191,7 +168,7 @@ function RetryCard({ retry }: { retry: RetryStatus & { countdown: number } }) {
<div className="flex-1 space-y-2">
<div className="flex items-center justify-between">
<p className="text-sm font-medium text-amber-900 dark:text-amber-100">
{retry.isRateLimit ? 'Rate Limited' : 'Retrying submission...'}
Retrying submission...
</p>
<span className="text-xs font-mono text-amber-700 dark:text-amber-300">
{retry.attempt}/{retry.maxAttempts}
@@ -199,7 +176,7 @@ function RetryCard({ retry }: { retry: RetryStatus & { countdown: number } }) {
</div>
<p className="text-xs text-amber-700 dark:text-amber-300">
{getMessage()}
Network issue detected. Retrying {retry.type} submission in {Math.ceil(retry.countdown / 1000)}s
</p>
<Progress value={progress} className="h-1" />

View File

@@ -52,31 +52,6 @@ export function UppyPhotoSubmissionUpload({
const { user } = useAuth();
const { toast } = useToast();
/**
* ✅ CRITICAL FIX: Cleanup orphaned Cloudflare images
* Called when DB transaction fails after successful uploads
*/
const cleanupOrphanedImages = async (imageIds: string[]) => {
if (imageIds.length === 0) return;
logger.warn('Cleaning up orphaned images', { count: imageIds.length });
try {
await Promise.allSettled(
imageIds.map(id =>
invokeWithTracking('upload-image', { action: 'delete', imageId: id }, user?.id)
)
);
logger.info('Orphaned images cleaned up', { count: imageIds.length });
} catch (error) {
// Non-blocking cleanup - log but don't fail
logger.error('Failed to cleanup orphaned images', {
error: getErrorMessage(error),
imageIds
});
}
};
const handleFilesSelected = (files: File[]) => {
// Convert files to photo objects with object URLs for preview
const newPhotos: PhotoWithCaption[] = files.map((file, index) => ({
@@ -449,22 +424,6 @@ export function UppyPhotoSubmissionUpload({
throw photoSubmissionError || new Error("Failed to create photo submission");
}
// ✅ CRITICAL FIX: Create submission_items record for moderation queue
const { error: submissionItemError } = await supabase
.from('submission_items')
.insert({
submission_id: submissionData.id,
item_type: 'photo',
action_type: 'create',
status: 'pending',
order_index: 0,
photo_submission_id: photoSubmissionData.id
});
if (submissionItemError) {
throw submissionItemError;
}
// Insert only successful photo items
const photoItems = successfulPhotos.map((photo, index) => ({
photo_submission_id: photoSubmissionData.id,
@@ -568,13 +527,6 @@ export function UppyPhotoSubmissionUpload({
} catch (error: unknown) {
const errorMsg = sanitizeErrorMessage(error);
// ✅ CRITICAL FIX: Cleanup orphaned images on failure
if (orphanedCloudflareIds.length > 0) {
cleanupOrphanedImages(orphanedCloudflareIds).catch(() => {
// Non-blocking - log already handled in cleanupOrphanedImages
});
}
logger.error('Photo submission failed', {
error: errorMsg,
photoCount: photos.length,

View File

@@ -306,6 +306,75 @@ export function useModerationActions(config: ModerationActionsConfig): Moderatio
action: 'approved' | 'rejected';
moderatorNotes?: string;
}) => {
// Handle photo submissions
if (action === 'approved' && item.submission_type === 'photo') {
const { data: photoSubmission, error: fetchError } = await supabase
.from('photo_submissions')
.select(`
*,
items:photo_submission_items(*),
submission:content_submissions!inner(user_id)
`)
.eq('submission_id', item.id)
.single();
// Add explicit error handling
if (fetchError) {
throw new Error(`Failed to fetch photo submission: ${fetchError.message}`);
}
if (!photoSubmission) {
throw new Error('Photo submission not found');
}
// Type assertion with validation
const typedPhotoSubmission = photoSubmission as {
id: string;
entity_id: string;
entity_type: string;
items: Array<{
id: string;
cloudflare_image_id: string;
cloudflare_image_url: string;
caption?: string;
title?: string;
date_taken?: string;
date_taken_precision?: string;
order_index: number;
}>;
submission: { user_id: string };
};
// Validate required fields
if (!typedPhotoSubmission.items || typedPhotoSubmission.items.length === 0) {
throw new Error('No photo items found in submission');
}
const { data: existingPhotos } = await supabase
.from('photos')
.select('id')
.eq('submission_id', item.id);
if (!existingPhotos || existingPhotos.length === 0) {
const photoRecords = typedPhotoSubmission.items.map((photoItem) => ({
entity_id: typedPhotoSubmission.entity_id,
entity_type: typedPhotoSubmission.entity_type,
cloudflare_image_id: photoItem.cloudflare_image_id,
cloudflare_image_url: photoItem.cloudflare_image_url,
title: photoItem.title || null,
caption: photoItem.caption || null,
date_taken: photoItem.date_taken || null,
order_index: photoItem.order_index,
submission_id: item.id,
submitted_by: typedPhotoSubmission.submission?.user_id,
approved_by: user?.id,
approved_at: new Date().toISOString(),
}));
await supabase.from('photos').insert(photoRecords);
}
}
// Check for submission items
const { data: submissionItems } = await supabase
.from('submission_items')
@@ -374,61 +443,15 @@ export function useModerationActions(config: ModerationActionsConfig): Moderatio
});
return;
} else if (action === 'rejected') {
// Use atomic rejection transaction for submission items
const {
data,
error,
requestId,
attempts,
cached,
conflictRetries
} = await invokeWithResilience(
'process-selective-rejection',
{
itemIds: submissionItems.map((i) => i.id),
submissionId: item.id,
rejectionReason: moderatorNotes || 'Parent submission rejected',
},
'rejection',
submissionItems.map((i) => i.id),
config.user?.id,
3, // Max 3 conflict retries
30000 // 30s timeout
);
// Log retry attempts
if (attempts && attempts > 1) {
logger.log(`Rejection succeeded after ${attempts} network retries`, {
submissionId: item.id,
requestId,
});
}
if (conflictRetries && conflictRetries > 0) {
logger.log(`Resolved 409 conflict after ${conflictRetries} retries`, {
submissionId: item.id,
requestId,
cached: !!cached,
});
}
if (error) {
// Enhance error with context for better UI feedback
if (is409Conflict(error)) {
throw new Error(
'This rejection is being processed by another request. Please wait and try again if it does not complete.'
);
}
throw error;
}
toast({
title: cached ? 'Cached Result' : 'Submission Rejected',
description: cached
? `Returned cached result for ${submissionItems.length} item(s)`
: `Successfully rejected ${submissionItems.length} item(s)${requestId ? ` (Request: ${requestId.substring(0, 8)})` : ''}`,
});
return;
await supabase
.from('submission_items')
.update({
status: 'rejected',
rejection_reason: moderatorNotes || 'Parent submission rejected',
updated_at: new Date().toISOString(),
})
.eq('submission_id', item.id)
.eq('status', 'pending');
}
}

View File

@@ -1,173 +0,0 @@
import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query';
import { supabase } from '@/integrations/supabase/client';
import { toast } from 'sonner';
export interface AlertConfig {
id: string;
metric_type: 'block_rate' | 'total_requests' | 'unique_ips' | 'function_specific';
threshold_value: number;
time_window_ms: number;
function_name?: string;
enabled: boolean;
created_at: string;
updated_at: string;
}
export interface Alert {
id: string;
config_id: string;
metric_type: string;
metric_value: number;
threshold_value: number;
time_window_ms: number;
function_name?: string;
alert_message: string;
resolved_at?: string;
created_at: string;
}
export function useAlertConfigs() {
return useQuery({
queryKey: ['rateLimitAlertConfigs'],
queryFn: async () => {
const { data, error } = await supabase
.from('rate_limit_alert_config')
.select('*')
.order('metric_type');
if (error) throw error;
return data as AlertConfig[];
},
});
}
export function useAlertHistory(limit: number = 50) {
return useQuery({
queryKey: ['rateLimitAlerts', limit],
queryFn: async () => {
const { data, error } = await supabase
.from('rate_limit_alerts')
.select('*')
.order('created_at', { ascending: false })
.limit(limit);
if (error) throw error;
return data as Alert[];
},
refetchInterval: 30000, // Refetch every 30 seconds
});
}
export function useUnresolvedAlerts() {
return useQuery({
queryKey: ['rateLimitAlertsUnresolved'],
queryFn: async () => {
const { data, error } = await supabase
.from('rate_limit_alerts')
.select('*')
.is('resolved_at', null)
.order('created_at', { ascending: false });
if (error) throw error;
return data as Alert[];
},
refetchInterval: 15000, // Refetch every 15 seconds
});
}
export function useUpdateAlertConfig() {
const queryClient = useQueryClient();
return useMutation({
mutationFn: async ({ id, updates }: { id: string; updates: Partial<AlertConfig> }) => {
const { data, error } = await supabase
.from('rate_limit_alert_config')
.update(updates)
.eq('id', id)
.select()
.single();
if (error) throw error;
return data;
},
onSuccess: () => {
queryClient.invalidateQueries({ queryKey: ['rateLimitAlertConfigs'] });
toast.success('Alert configuration updated');
},
onError: (error) => {
toast.error(`Failed to update alert config: ${error.message}`);
},
});
}
export function useCreateAlertConfig() {
const queryClient = useQueryClient();
return useMutation({
mutationFn: async (config: Omit<AlertConfig, 'id' | 'created_at' | 'updated_at'>) => {
const { data, error } = await supabase
.from('rate_limit_alert_config')
.insert(config)
.select()
.single();
if (error) throw error;
return data;
},
onSuccess: () => {
queryClient.invalidateQueries({ queryKey: ['rateLimitAlertConfigs'] });
toast.success('Alert configuration created');
},
onError: (error) => {
toast.error(`Failed to create alert config: ${error.message}`);
},
});
}
export function useDeleteAlertConfig() {
const queryClient = useQueryClient();
return useMutation({
mutationFn: async (id: string) => {
const { error } = await supabase
.from('rate_limit_alert_config')
.delete()
.eq('id', id);
if (error) throw error;
},
onSuccess: () => {
queryClient.invalidateQueries({ queryKey: ['rateLimitAlertConfigs'] });
toast.success('Alert configuration deleted');
},
onError: (error) => {
toast.error(`Failed to delete alert config: ${error.message}`);
},
});
}
export function useResolveAlert() {
const queryClient = useQueryClient();
return useMutation({
mutationFn: async (id: string) => {
const { data, error } = await supabase
.from('rate_limit_alerts')
.update({ resolved_at: new Date().toISOString() })
.eq('id', id)
.select()
.single();
if (error) throw error;
return data;
},
onSuccess: () => {
queryClient.invalidateQueries({ queryKey: ['rateLimitAlerts'] });
queryClient.invalidateQueries({ queryKey: ['rateLimitAlertsUnresolved'] });
toast.success('Alert resolved');
},
onError: (error) => {
toast.error(`Failed to resolve alert: ${error.message}`);
},
});
}

View File

@@ -1,75 +0,0 @@
import { useQuery } from '@tanstack/react-query';
import { supabase } from '@/integrations/supabase/client';
export interface RateLimitMetric {
timestamp: number;
functionName: string;
clientIP: string;
userId?: string;
allowed: boolean;
remaining: number;
retryAfter?: number;
tier: string;
}
export interface MetricsStats {
totalRequests: number;
allowedRequests: number;
blockedRequests: number;
blockRate: number;
uniqueIPs: number;
uniqueUsers: number;
topBlockedIPs: Array<{ ip: string; count: number }>;
topBlockedUsers: Array<{ userId: string; count: number }>;
tierDistribution: Record<string, number>;
}
interface MetricsQueryParams {
action: 'stats' | 'recent' | 'function' | 'user' | 'ip';
limit?: number;
timeWindow?: number;
functionName?: string;
userId?: string;
clientIP?: string;
}
export function useRateLimitMetrics(params: MetricsQueryParams) {
return useQuery({
queryKey: ['rateLimitMetrics', params],
queryFn: async () => {
const queryParams = new URLSearchParams();
queryParams.set('action', params.action);
if (params.limit) queryParams.set('limit', params.limit.toString());
if (params.timeWindow) queryParams.set('timeWindow', params.timeWindow.toString());
if (params.functionName) queryParams.set('functionName', params.functionName);
if (params.userId) queryParams.set('userId', params.userId);
if (params.clientIP) queryParams.set('clientIP', params.clientIP);
const { data, error } = await supabase.functions.invoke('rate-limit-metrics', {
method: 'GET',
headers: {
'Content-Type': 'application/json',
},
body: queryParams,
});
if (error) throw error;
return data;
},
refetchInterval: 30000, // Refetch every 30 seconds
staleTime: 15000, // Consider data stale after 15 seconds
});
}
export function useRateLimitStats(timeWindow: number = 60000) {
return useRateLimitMetrics({ action: 'stats', timeWindow });
}
export function useRecentMetrics(limit: number = 100) {
return useRateLimitMetrics({ action: 'recent', limit });
}
export function useFunctionMetrics(functionName: string, limit: number = 100) {
return useRateLimitMetrics({ action: 'function', functionName, limit });
}

View File

@@ -2950,89 +2950,6 @@ export type Database = {
},
]
}
rate_limit_alert_config: {
Row: {
created_at: string
created_by: string | null
enabled: boolean
function_name: string | null
id: string
metric_type: string
threshold_value: number
time_window_ms: number
updated_at: string
}
Insert: {
created_at?: string
created_by?: string | null
enabled?: boolean
function_name?: string | null
id?: string
metric_type: string
threshold_value: number
time_window_ms?: number
updated_at?: string
}
Update: {
created_at?: string
created_by?: string | null
enabled?: boolean
function_name?: string | null
id?: string
metric_type?: string
threshold_value?: number
time_window_ms?: number
updated_at?: string
}
Relationships: []
}
rate_limit_alerts: {
Row: {
alert_message: string
config_id: string | null
created_at: string
function_name: string | null
id: string
metric_type: string
metric_value: number
resolved_at: string | null
threshold_value: number
time_window_ms: number
}
Insert: {
alert_message: string
config_id?: string | null
created_at?: string
function_name?: string | null
id?: string
metric_type: string
metric_value: number
resolved_at?: string | null
threshold_value: number
time_window_ms: number
}
Update: {
alert_message?: string
config_id?: string | null
created_at?: string
function_name?: string | null
id?: string
metric_type?: string
metric_value?: number
resolved_at?: string | null
threshold_value?: number
time_window_ms?: number
}
Relationships: [
{
foreignKeyName: "rate_limit_alerts_config_id_fkey"
columns: ["config_id"]
isOneToOne: false
referencedRelation: "rate_limit_alert_config"
referencedColumns: ["id"]
},
]
}
rate_limits: {
Row: {
action: string
@@ -6428,52 +6345,16 @@ export type Database = {
monitor_ban_attempts: { Args: never; Returns: undefined }
monitor_failed_submissions: { Args: never; Returns: undefined }
monitor_slow_approvals: { Args: never; Returns: undefined }
process_approval_transaction:
| {
Args: {
p_item_ids: string[]
p_moderator_id: string
p_request_id?: string
p_submission_id: string
p_submitter_id: string
}
Returns: Json
}
| {
Args: {
p_item_ids: string[]
p_moderator_id: string
p_parent_span_id?: string
p_request_id?: string
p_submission_id: string
p_submitter_id: string
p_trace_id?: string
}
Returns: Json
}
process_rejection_transaction:
| {
Args: {
p_item_ids: string[]
p_moderator_id: string
p_rejection_reason: string
p_request_id?: string
p_submission_id: string
}
Returns: Json
}
| {
Args: {
p_item_ids: string[]
p_moderator_id: string
p_parent_span_id?: string
p_rejection_reason: string
p_request_id?: string
p_submission_id: string
p_trace_id?: string
}
Returns: Json
}
process_approval_transaction: {
Args: {
p_item_ids: string[]
p_moderator_id: string
p_request_id?: string
p_submission_id: string
p_submitter_id: string
}
Returns: Json
}
release_expired_locks: { Args: never; Returns: number }
release_submission_lock: {
Args: { moderator_id: string; submission_id: string }

View File

@@ -33,7 +33,7 @@ export async function invokeWithTracking<T = any>(
timeout: number = 30000,
retryOptions?: Partial<RetryOptions>,
customHeaders?: Record<string, string>
): Promise<{ data: T | null; error: any; requestId: string; duration: number; attempts?: number; status?: number; traceId?: string }> {
): Promise<{ data: T | null; error: any; requestId: string; duration: number; attempts?: number; status?: number }> {
// Configure retry options with defaults
const effectiveRetryOptions: RetryOptions = {
maxAttempts: retryOptions?.maxAttempts ?? 3,
@@ -75,30 +75,11 @@ export async function invokeWithTracking<T = any>(
const controller = new AbortController();
const timeoutId = setTimeout(() => controller.abort(), timeout);
// Generate W3C Trace Context header
const effectiveTraceId = context.traceId || crypto.randomUUID();
const spanId = crypto.randomUUID();
const traceparent = `00-${effectiveTraceId}-${spanId}-01`;
// Add breadcrumb with trace context
breadcrumb.apiCall(
`/functions/${functionName}`,
'POST',
undefined
);
try {
const { data, error } = await supabase.functions.invoke<T>(functionName, {
body: {
...payload,
clientRequestId: context.requestId,
traceId: effectiveTraceId,
},
body: { ...payload, clientRequestId: context.requestId },
signal: controller.signal,
headers: {
...customHeaders,
'traceparent': traceparent,
},
headers: customHeaders,
});
clearTimeout(timeoutId);
@@ -122,15 +103,7 @@ export async function invokeWithTracking<T = any>(
}
);
return {
data: result,
error: null,
requestId,
duration,
attempts: attemptCount,
status: 200,
traceId,
};
return { data: result, error: null, requestId, duration, attempts: attemptCount, status: 200 };
} catch (error: unknown) {
// Handle AbortError specifically
if (error instanceof Error && error.name === 'AbortError') {
@@ -144,22 +117,20 @@ export async function invokeWithTracking<T = any>(
duration: timeout,
attempts: attemptCount,
status: 408,
traceId: undefined,
};
}
const errorMessage = getErrorMessage(error);
return {
data: null,
error: { message: errorMessage, status: (error as any)?.status },
requestId: 'unknown',
duration: 0,
attempts: attemptCount,
status: (error as any)?.status,
traceId: undefined,
};
}
data: null,
error: { message: errorMessage, status: (error as any)?.status },
requestId: 'unknown',
duration: 0,
attempts: attemptCount,
status: (error as any)?.status,
};
}
}
/**
* Invoke multiple edge functions in parallel with batch tracking

View File

@@ -9,7 +9,7 @@ import { logger } from './logger';
import { handleError } from './errorHandler';
import type { TimelineEventFormData, EntityType } from '@/types/timeline';
import { breadcrumb } from './errorBreadcrumbs';
import { isRetryableError, isRateLimitError, extractRetryAfter } from './retryHelpers';
import { isRetryableError } from './retryHelpers';
import {
validateParkCreateFields,
validateRideCreateFields,
@@ -773,8 +773,6 @@ export async function submitParkCreation(
}
// Create submission with retry logic
const retryId = crypto.randomUUID();
const result = await withRetry(
async () => {
// Create the main submission record
@@ -884,30 +882,12 @@ export async function submitParkCreation(
},
{
maxAttempts: 3,
baseDelay: 1000,
onRetry: (attempt, error, delay) => {
const isRateLimit = isRateLimitError(error);
const retryAfter = isRateLimit ? extractRetryAfter(error) : null;
logger.warn('Retrying park submission', { attempt, delay });
logger.warn('Retrying park submission', {
attempt,
delay,
isRateLimit,
retryAfter,
error: error instanceof Error ? error.message : String(error)
});
// Emit event for UI indicator with rate limit info
// Emit event for UI indicator
window.dispatchEvent(new CustomEvent('submission-retry', {
detail: {
id: retryId,
attempt,
maxAttempts: 3,
delay,
type: 'park',
isRateLimit,
retryAfter
}
detail: { attempt, maxAttempts: 3, delay, type: 'park' }
}));
},
shouldRetry: (error) => {
@@ -916,35 +896,18 @@ export async function submitParkCreation(
const message = error.message.toLowerCase();
if (message.includes('required')) return false;
if (message.includes('banned')) return false;
if (message.includes('suspended')) return false;
if (message.includes('slug')) return false;
if (message.includes('already exists')) return false;
if (message.includes('duplicate')) return false;
if (message.includes('permission')) return false;
if (message.includes('forbidden')) return false;
if (message.includes('unauthorized')) return false;
}
return isRetryableError(error);
}
}
).then((data) => {
// Emit success event
window.dispatchEvent(new CustomEvent('submission-retry-success', {
detail: { id: retryId }
}));
return data;
}).catch((error) => {
const errorId = handleError(error, {
).catch((error) => {
handleError(error, {
action: 'Park submission',
metadata: { retriesExhausted: true },
});
// Emit failure event
window.dispatchEvent(new CustomEvent('submission-retry-failed', {
detail: { id: retryId, errorId }
}));
throw error;
});
@@ -1140,31 +1103,17 @@ export async function submitParkUpdate(
},
{
maxAttempts: 3,
baseDelay: 1000,
onRetry: (attempt, error, delay) => {
const isRateLimit = isRateLimitError(error);
const retryAfter = isRateLimit ? extractRetryAfter(error) : null;
logger.warn('Retrying park update submission', {
attempt,
delay,
parkId,
isRateLimit,
retryAfter,
error: error instanceof Error ? error.message : String(error)
});
// Emit event for UI retry indicator with rate limit info
// Emit event for UI retry indicator
window.dispatchEvent(new CustomEvent('submission-retry', {
detail: {
id: retryId,
attempt,
maxAttempts: 3,
delay,
type: 'park update',
isRateLimit,
retryAfter
}
detail: { id: retryId, attempt, maxAttempts: 3, delay, type: 'park update' }
}));
},
shouldRetry: (error) => {
@@ -1557,30 +1506,12 @@ export async function submitRideCreation(
},
{
maxAttempts: 3,
baseDelay: 1000,
onRetry: (attempt, error, delay) => {
const isRateLimit = isRateLimitError(error);
const retryAfter = isRateLimit ? extractRetryAfter(error) : null;
logger.warn('Retrying ride submission', { attempt, delay });
logger.warn('Retrying ride submission', {
attempt,
delay,
isRateLimit,
retryAfter,
error: error instanceof Error ? error.message : String(error)
});
// Emit event for UI indicator with rate limit info
// Emit event for UI indicator
window.dispatchEvent(new CustomEvent('submission-retry', {
detail: {
id: retryId,
attempt,
maxAttempts: 3,
delay,
type: 'ride',
isRateLimit,
retryAfter
}
detail: { id: retryId, attempt, maxAttempts: 3, delay, type: 'ride' }
}));
},
shouldRetry: (error) => {
@@ -1589,13 +1520,8 @@ export async function submitRideCreation(
const message = error.message.toLowerCase();
if (message.includes('required')) return false;
if (message.includes('banned')) return false;
if (message.includes('suspended')) return false;
if (message.includes('slug')) return false;
if (message.includes('already exists')) return false;
if (message.includes('duplicate')) return false;
if (message.includes('permission')) return false;
if (message.includes('forbidden')) return false;
if (message.includes('unauthorized')) return false;
}
return isRetryableError(error);
@@ -1788,31 +1714,17 @@ export async function submitRideUpdate(
},
{
maxAttempts: 3,
baseDelay: 1000,
onRetry: (attempt, error, delay) => {
const isRateLimit = isRateLimitError(error);
const retryAfter = isRateLimit ? extractRetryAfter(error) : null;
logger.warn('Retrying ride update submission', {
attempt,
delay,
rideId,
isRateLimit,
retryAfter,
error: error instanceof Error ? error.message : String(error)
});
// Emit event for UI retry indicator with rate limit info
// Emit event for UI retry indicator
window.dispatchEvent(new CustomEvent('submission-retry', {
detail: {
id: retryId,
attempt,
maxAttempts: 3,
delay,
type: 'ride update',
isRateLimit,
retryAfter
}
detail: { id: retryId, attempt, maxAttempts: 3, delay, type: 'ride update' }
}));
},
shouldRetry: (error) => {
@@ -1821,13 +1733,8 @@ export async function submitRideUpdate(
const message = error.message.toLowerCase();
if (message.includes('required')) return false;
if (message.includes('banned')) return false;
if (message.includes('suspended')) return false;
if (message.includes('slug')) return false;
if (message.includes('already exists')) return false;
if (message.includes('duplicate')) return false;
if (message.includes('permission')) return false;
if (message.includes('forbidden')) return false;
if (message.includes('unauthorized')) return false;
if (message.includes('not found')) return false;
if (message.includes('not allowed')) return false;
}
@@ -1931,8 +1838,6 @@ export async function submitRideModelCreation(
// Submit with retry logic
breadcrumb.apiCall('content_submissions', 'INSERT');
const retryId = crypto.randomUUID();
const result = await withRetry(
async () => {
// Create the main submission record
@@ -2020,28 +1925,10 @@ export async function submitRideModelCreation(
},
{
maxAttempts: 3,
baseDelay: 1000,
onRetry: (attempt, error, delay) => {
const isRateLimit = isRateLimitError(error);
const retryAfter = isRateLimit ? extractRetryAfter(error) : null;
logger.warn('Retrying ride model submission', {
attempt,
delay,
isRateLimit,
retryAfter,
error: error instanceof Error ? error.message : String(error)
});
logger.warn('Retrying ride model submission', { attempt, delay });
window.dispatchEvent(new CustomEvent('submission-retry', {
detail: {
id: retryId,
attempt,
maxAttempts: 3,
delay,
type: 'ride_model',
isRateLimit,
retryAfter
}
detail: { attempt, maxAttempts: 3, delay, type: 'ride_model' }
}));
},
shouldRetry: (error) => {
@@ -2049,36 +1936,12 @@ export async function submitRideModelCreation(
const message = error.message.toLowerCase();
if (message.includes('required')) return false;
if (message.includes('banned')) return false;
if (message.includes('suspended')) return false;
if (message.includes('slug')) return false;
if (message.includes('already exists')) return false;
if (message.includes('duplicate')) return false;
if (message.includes('permission')) return false;
if (message.includes('forbidden')) return false;
if (message.includes('unauthorized')) return false;
}
return isRetryableError(error);
}
}
).then((data) => {
// Emit success event
window.dispatchEvent(new CustomEvent('submission-retry-success', {
detail: { id: retryId }
}));
return data;
}).catch((error) => {
const errorId = handleError(error, {
action: 'Ride model submission',
metadata: { retriesExhausted: true },
});
// Emit failure event
window.dispatchEvent(new CustomEvent('submission-retry-failed', {
detail: { id: retryId, errorId }
}));
throw error;
});
);
return result;
}
@@ -2143,8 +2006,6 @@ export async function submitRideModelUpdate(
// Submit with retry logic
breadcrumb.apiCall('content_submissions', 'INSERT');
const retryId = crypto.randomUUID();
const result = await withRetry(
async () => {
// Create the main submission record
@@ -2230,28 +2091,10 @@ export async function submitRideModelUpdate(
},
{
maxAttempts: 3,
baseDelay: 1000,
onRetry: (attempt, error, delay) => {
const isRateLimit = isRateLimitError(error);
const retryAfter = isRateLimit ? extractRetryAfter(error) : null;
logger.warn('Retrying ride model update', {
attempt,
delay,
isRateLimit,
retryAfter,
error: error instanceof Error ? error.message : String(error)
});
logger.warn('Retrying ride model update', { attempt, delay });
window.dispatchEvent(new CustomEvent('submission-retry', {
detail: {
id: retryId,
attempt,
maxAttempts: 3,
delay,
type: 'ride_model_update',
isRateLimit,
retryAfter
}
detail: { attempt, maxAttempts: 3, delay, type: 'ride_model_update' }
}));
},
shouldRetry: (error) => {
@@ -2259,34 +2102,12 @@ export async function submitRideModelUpdate(
const message = error.message.toLowerCase();
if (message.includes('required')) return false;
if (message.includes('banned')) return false;
if (message.includes('suspended')) return false;
if (message.includes('slug')) return false;
if (message.includes('already exists')) return false;
if (message.includes('duplicate')) return false;
if (message.includes('permission')) return false;
if (message.includes('forbidden')) return false;
if (message.includes('unauthorized')) return false;
}
return isRetryableError(error);
}
}
).then((data) => {
window.dispatchEvent(new CustomEvent('submission-retry-success', {
detail: { id: retryId }
}));
return data;
}).catch((error) => {
const errorId = handleError(error, {
action: 'Ride model update submission',
metadata: { retriesExhausted: true },
});
window.dispatchEvent(new CustomEvent('submission-retry-failed', {
detail: { id: retryId, errorId }
}));
throw error;
});
);
return result;
}
@@ -2349,8 +2170,6 @@ export async function submitManufacturerCreation(
// Submit with retry logic
breadcrumb.apiCall('content_submissions', 'INSERT');
const retryId = crypto.randomUUID();
const result = await withRetry(
async () => {
const { data: submissionData, error: submissionError } = await supabase
@@ -2390,28 +2209,10 @@ export async function submitManufacturerCreation(
},
{
maxAttempts: 3,
baseDelay: 1000,
onRetry: (attempt, error, delay) => {
const isRateLimit = isRateLimitError(error);
const retryAfter = isRateLimit ? extractRetryAfter(error) : null;
logger.warn('Retrying manufacturer submission', {
attempt,
delay,
isRateLimit,
retryAfter,
error: error instanceof Error ? error.message : String(error)
});
logger.warn('Retrying manufacturer submission', { attempt, delay });
window.dispatchEvent(new CustomEvent('submission-retry', {
detail: {
id: retryId,
attempt,
maxAttempts: 3,
delay,
type: 'manufacturer',
isRateLimit,
retryAfter
}
detail: { attempt, maxAttempts: 3, delay, type: 'manufacturer' }
}));
},
shouldRetry: (error) => {
@@ -2419,34 +2220,12 @@ export async function submitManufacturerCreation(
const message = error.message.toLowerCase();
if (message.includes('required')) return false;
if (message.includes('banned')) return false;
if (message.includes('suspended')) return false;
if (message.includes('slug')) return false;
if (message.includes('already exists')) return false;
if (message.includes('duplicate')) return false;
if (message.includes('permission')) return false;
if (message.includes('forbidden')) return false;
if (message.includes('unauthorized')) return false;
}
return isRetryableError(error);
}
}
).then((data) => {
window.dispatchEvent(new CustomEvent('submission-retry-success', {
detail: { id: retryId }
}));
return data;
}).catch((error) => {
const errorId = handleError(error, {
action: 'Manufacturer submission',
metadata: { retriesExhausted: true },
});
window.dispatchEvent(new CustomEvent('submission-retry-failed', {
detail: { id: retryId, errorId }
}));
throw error;
});
);
return result;
}
@@ -2504,8 +2283,6 @@ export async function submitManufacturerUpdate(
// Submit with retry logic
breadcrumb.apiCall('content_submissions', 'INSERT');
const retryId = crypto.randomUUID();
const result = await withRetry(
async () => {
const { data: submissionData, error: submissionError } = await supabase
@@ -2543,28 +2320,10 @@ export async function submitManufacturerUpdate(
},
{
maxAttempts: 3,
baseDelay: 1000,
onRetry: (attempt, error, delay) => {
const isRateLimit = isRateLimitError(error);
const retryAfter = isRateLimit ? extractRetryAfter(error) : null;
logger.warn('Retrying manufacturer update', {
attempt,
delay,
isRateLimit,
retryAfter,
error: error instanceof Error ? error.message : String(error)
});
logger.warn('Retrying manufacturer update', { attempt, delay });
window.dispatchEvent(new CustomEvent('submission-retry', {
detail: {
id: retryId,
attempt,
maxAttempts: 3,
delay,
type: 'manufacturer_update',
isRateLimit,
retryAfter
}
detail: { attempt, maxAttempts: 3, delay, type: 'manufacturer_update' }
}));
},
shouldRetry: (error) => {
@@ -2635,8 +2394,6 @@ export async function submitDesignerCreation(
// Submit with retry logic
breadcrumb.apiCall('content_submissions', 'INSERT');
const retryId = crypto.randomUUID();
const result = await withRetry(
async () => {
const { data: submissionData, error: submissionError } = await supabase
@@ -2676,28 +2433,10 @@ export async function submitDesignerCreation(
},
{
maxAttempts: 3,
baseDelay: 1000,
onRetry: (attempt, error, delay) => {
const isRateLimit = isRateLimitError(error);
const retryAfter = isRateLimit ? extractRetryAfter(error) : null;
logger.warn('Retrying designer submission', {
attempt,
delay,
isRateLimit,
retryAfter,
error: error instanceof Error ? error.message : String(error)
});
logger.warn('Retrying designer submission', { attempt, delay });
window.dispatchEvent(new CustomEvent('submission-retry', {
detail: {
id: retryId,
attempt,
maxAttempts: 3,
delay,
type: 'designer',
isRateLimit,
retryAfter
}
detail: { attempt, maxAttempts: 3, delay, type: 'designer' }
}));
},
shouldRetry: (error) => {
@@ -2768,8 +2507,6 @@ export async function submitDesignerUpdate(
// Submit with retry logic
breadcrumb.apiCall('content_submissions', 'INSERT');
const retryId = crypto.randomUUID();
const result = await withRetry(
async () => {
const { data: submissionData, error: submissionError } = await supabase
@@ -2807,28 +2544,10 @@ export async function submitDesignerUpdate(
},
{
maxAttempts: 3,
baseDelay: 1000,
onRetry: (attempt, error, delay) => {
const isRateLimit = isRateLimitError(error);
const retryAfter = isRateLimit ? extractRetryAfter(error) : null;
logger.warn('Retrying designer update', {
attempt,
delay,
isRateLimit,
retryAfter,
error: error instanceof Error ? error.message : String(error)
});
logger.warn('Retrying designer update', { attempt, delay });
window.dispatchEvent(new CustomEvent('submission-retry', {
detail: {
id: retryId,
attempt,
maxAttempts: 3,
delay,
type: 'designer_update',
isRateLimit,
retryAfter
}
detail: { attempt, maxAttempts: 3, delay, type: 'designer_update' }
}));
},
shouldRetry: (error) => {
@@ -2899,8 +2618,6 @@ export async function submitOperatorCreation(
// Submit with retry logic
breadcrumb.apiCall('content_submissions', 'INSERT');
const retryId = crypto.randomUUID();
const result = await withRetry(
async () => {
const { data: submissionData, error: submissionError } = await supabase
@@ -2940,15 +2657,10 @@ export async function submitOperatorCreation(
},
{
maxAttempts: 3,
baseDelay: 1000,
onRetry: (attempt, error, delay) => {
logger.warn('Retrying operator submission', {
attempt,
delay,
error: error instanceof Error ? error.message : String(error)
});
logger.warn('Retrying operator submission', { attempt, delay });
window.dispatchEvent(new CustomEvent('submission-retry', {
detail: { id: retryId, attempt, maxAttempts: 3, delay, type: 'operator' }
detail: { attempt, maxAttempts: 3, delay, type: 'operator' }
}));
},
shouldRetry: (error) => {
@@ -2956,34 +2668,12 @@ export async function submitOperatorCreation(
const message = error.message.toLowerCase();
if (message.includes('required')) return false;
if (message.includes('banned')) return false;
if (message.includes('suspended')) return false;
if (message.includes('slug')) return false;
if (message.includes('already exists')) return false;
if (message.includes('duplicate')) return false;
if (message.includes('permission')) return false;
if (message.includes('forbidden')) return false;
if (message.includes('unauthorized')) return false;
}
return isRetryableError(error);
}
}
).then((data) => {
window.dispatchEvent(new CustomEvent('submission-retry-success', {
detail: { id: retryId }
}));
return data;
}).catch((error) => {
const errorId = handleError(error, {
action: 'Operator submission',
metadata: { retriesExhausted: true },
});
window.dispatchEvent(new CustomEvent('submission-retry-failed', {
detail: { id: retryId, errorId }
}));
throw error;
});
);
return result;
}
@@ -3041,8 +2731,6 @@ export async function submitOperatorUpdate(
// Submit with retry logic
breadcrumb.apiCall('content_submissions', 'INSERT');
const retryId = crypto.randomUUID();
const result = await withRetry(
async () => {
const { data: submissionData, error: submissionError } = await supabase
@@ -3080,28 +2768,10 @@ export async function submitOperatorUpdate(
},
{
maxAttempts: 3,
baseDelay: 1000,
onRetry: (attempt, error, delay) => {
const isRateLimit = isRateLimitError(error);
const retryAfter = isRateLimit ? extractRetryAfter(error) : null;
logger.warn('Retrying operator update', {
attempt,
delay,
isRateLimit,
retryAfter,
error: error instanceof Error ? error.message : String(error)
});
logger.warn('Retrying operator update', { attempt, delay });
window.dispatchEvent(new CustomEvent('submission-retry', {
detail: {
id: retryId,
attempt,
maxAttempts: 3,
delay,
type: 'operator_update',
isRateLimit,
retryAfter
}
detail: { attempt, maxAttempts: 3, delay, type: 'operator_update' }
}));
},
shouldRetry: (error) => {
@@ -3172,8 +2842,6 @@ export async function submitPropertyOwnerCreation(
// Submit with retry logic
breadcrumb.apiCall('content_submissions', 'INSERT');
const retryId = crypto.randomUUID();
const result = await withRetry(
async () => {
const { data: submissionData, error: submissionError } = await supabase
@@ -3213,15 +2881,10 @@ export async function submitPropertyOwnerCreation(
},
{
maxAttempts: 3,
baseDelay: 1000,
onRetry: (attempt, error, delay) => {
logger.warn('Retrying property owner submission', {
attempt,
delay,
error: error instanceof Error ? error.message : String(error)
});
logger.warn('Retrying property owner submission', { attempt, delay });
window.dispatchEvent(new CustomEvent('submission-retry', {
detail: { id: retryId, attempt, maxAttempts: 3, delay, type: 'property_owner' }
detail: { attempt, maxAttempts: 3, delay, type: 'property_owner' }
}));
},
shouldRetry: (error) => {
@@ -3229,34 +2892,12 @@ export async function submitPropertyOwnerCreation(
const message = error.message.toLowerCase();
if (message.includes('required')) return false;
if (message.includes('banned')) return false;
if (message.includes('suspended')) return false;
if (message.includes('slug')) return false;
if (message.includes('already exists')) return false;
if (message.includes('duplicate')) return false;
if (message.includes('permission')) return false;
if (message.includes('forbidden')) return false;
if (message.includes('unauthorized')) return false;
}
return isRetryableError(error);
}
}
).then((data) => {
window.dispatchEvent(new CustomEvent('submission-retry-success', {
detail: { id: retryId }
}));
return data;
}).catch((error) => {
const errorId = handleError(error, {
action: 'Property owner submission',
metadata: { retriesExhausted: true },
});
window.dispatchEvent(new CustomEvent('submission-retry-failed', {
detail: { id: retryId, errorId }
}));
throw error;
});
);
return result;
}
@@ -3314,8 +2955,6 @@ export async function submitPropertyOwnerUpdate(
// Submit with retry logic
breadcrumb.apiCall('content_submissions', 'INSERT');
const retryId = crypto.randomUUID();
const result = await withRetry(
async () => {
const { data: submissionData, error: submissionError } = await supabase
@@ -3353,28 +2992,10 @@ export async function submitPropertyOwnerUpdate(
},
{
maxAttempts: 3,
baseDelay: 1000,
onRetry: (attempt, error, delay) => {
const isRateLimit = isRateLimitError(error);
const retryAfter = isRateLimit ? extractRetryAfter(error) : null;
logger.warn('Retrying property owner update', {
attempt,
delay,
isRateLimit,
retryAfter,
error: error instanceof Error ? error.message : String(error)
});
logger.warn('Retrying property owner update', { attempt, delay });
window.dispatchEvent(new CustomEvent('submission-retry', {
detail: {
id: retryId,
attempt,
maxAttempts: 3,
delay,
type: 'property_owner_update',
isRateLimit,
retryAfter
}
detail: { attempt, maxAttempts: 3, delay, type: 'property_owner_update' }
}));
},
shouldRetry: (error) => {

View File

@@ -1,152 +0,0 @@
/**
* Test Error Formatting Utility
*
* Provides robust error formatting for test results to avoid "[object Object]" messages
* Includes pattern matching for common Supabase/Postgres constraint violations
*/
/**
* Error pattern matchers for common database constraint violations
*/
const ERROR_PATTERNS = [
{
// RLS policy violations
pattern: /new row violates row-level security policy for table "(\w+)"/i,
format: (match: RegExpMatchArray) =>
`RLS Policy Violation: Cannot insert into table "${match[1]}". Check that RLS policies allow this operation and user has proper authentication.`
},
{
// NOT NULL constraint violations
pattern: /null value in column "(\w+)" of relation "(\w+)" violates not-null constraint/i,
format: (match: RegExpMatchArray) =>
`NOT NULL Constraint: Column "${match[1]}" in table "${match[2]}" cannot be null. Provide a value for this required field.`
},
{
// UNIQUE constraint violations
pattern: /duplicate key value violates unique constraint "(\w+)"/i,
format: (match: RegExpMatchArray) =>
`UNIQUE Constraint: Duplicate value violates constraint "${match[1]}". This value already exists in the database.`
},
{
// Foreign key violations
pattern: /insert or update on table "(\w+)" violates foreign key constraint "(\w+)"/i,
format: (match: RegExpMatchArray) =>
`Foreign Key Violation: Table "${match[1]}" references non-existent record (constraint: "${match[2]}"). Ensure the referenced entity exists first.`
},
{
// Foreign key violations (alternative format)
pattern: /violates foreign key constraint/i,
format: () =>
`Foreign Key Violation: Referenced record does not exist. Create the parent entity before creating this dependent entity.`
},
{
// Check constraint violations
pattern: /new row for relation "(\w+)" violates check constraint "(\w+)"/i,
format: (match: RegExpMatchArray) =>
`Check Constraint: Validation failed for table "${match[1]}" (constraint: "${match[2]}"). The provided value does not meet validation requirements.`
},
{
// Column does not exist
pattern: /column "(\w+)" of relation "(\w+)" does not exist/i,
format: (match: RegExpMatchArray) =>
`Schema Error: Column "${match[1]}" does not exist in table "${match[2]}". Check database schema or migration status.`
},
{
// Could not find column in schema cache
pattern: /Could not find the '(\w+)' column of '(\w+)' in the schema cache/i,
format: (match: RegExpMatchArray) =>
`Schema Cache Error: Column "${match[1]}" not found in table "${match[2]}". The schema may have changed - try refreshing the database connection.`
},
{
// Table does not exist
pattern: /relation "(\w+)" does not exist/i,
format: (match: RegExpMatchArray) =>
`Schema Error: Table "${match[1]}" does not exist. Run migrations or check database schema.`
},
{
// Permission denied
pattern: /permission denied for (?:table|relation) "?(\w+)"?/i,
format: (match: RegExpMatchArray) =>
`Permission Denied: Insufficient permissions to access table "${match[1]}". Check RLS policies and user roles.`
},
{
// Rate limit errors
pattern: /Rate limit exceeded\. Please wait (\d+) seconds?/i,
format: (match: RegExpMatchArray) =>
`Rate Limited: Too many requests. Wait ${match[1]} seconds before retrying.`
},
{
// Rate limit errors (alternative format)
pattern: /Too many submissions in a short time\. Please wait (\d+) seconds?/i,
format: (match: RegExpMatchArray) =>
`Rate Limited: Submission throttled. Wait ${match[1]} seconds before submitting again.`
}
];
/**
* Format error for test result display
* Handles Error objects, PostgresError objects, and plain objects
*
* @param error - Any error value thrown in a test
* @returns Formatted, human-readable error string
*/
export function formatTestError(error: unknown): string {
let errorMessage = '';
// Extract base error message
if (error instanceof Error) {
errorMessage = error.message;
} else if (typeof error === 'object' && error !== null) {
const err = error as any;
// Try common error message properties
if (err.message && typeof err.message === 'string') {
errorMessage = err.message;
// Include additional Supabase error details if present
if (err.details && typeof err.details === 'string') {
errorMessage += ` | Details: ${err.details}`;
}
if (err.hint && typeof err.hint === 'string') {
errorMessage += ` | Hint: ${err.hint}`;
}
if (err.code && typeof err.code === 'string') {
errorMessage += ` | Code: ${err.code}`;
}
}
// Some errors nest the actual error in an 'error' property
else if (err.error) {
return formatTestError(err.error);
}
// Some APIs use 'msg' instead of 'message'
else if (err.msg && typeof err.msg === 'string') {
errorMessage = err.msg;
}
// Last resort: stringify the entire object
else {
try {
const stringified = JSON.stringify(error, null, 2);
errorMessage = stringified.length > 500
? stringified.substring(0, 500) + '... (truncated)'
: stringified;
} catch {
// JSON.stringify can fail on circular references
errorMessage = String(error);
}
}
} else {
// Primitive values (strings, numbers, etc.)
errorMessage = String(error);
}
// Apply pattern matching to format known constraint violations
for (const { pattern, format } of ERROR_PATTERNS) {
const match = errorMessage.match(pattern);
if (match) {
return format(match);
}
}
// Return original message if no patterns matched
return errorMessage;
}

View File

@@ -1,76 +0,0 @@
/**
* Test Result Formatters
*
* Utilities for formatting test results into different formats for easy sharing and debugging.
*/
import type { TestResult } from './testRunner';
export function formatResultsAsMarkdown(
results: TestResult[],
summary: { total: number; passed: number; failed: number; skipped: number; totalDuration: number },
failedOnly: boolean = false
): string {
const timestamp = new Date().toISOString();
const title = failedOnly ? 'Failed Test Results' : 'Test Results';
let markdown = `# ${title} - ${timestamp}\n\n`;
// Summary section
markdown += `## Summary\n`;
markdown += `✅ Passed: ${summary.passed}\n`;
markdown += `❌ Failed: ${summary.failed}\n`;
markdown += `⏭️ Skipped: ${summary.skipped}\n`;
markdown += `⏱️ Duration: ${(summary.totalDuration / 1000).toFixed(2)}s\n\n`;
// Results by status
if (!failedOnly && summary.failed > 0) {
markdown += `## Failed Tests\n\n`;
results.filter(r => r.status === 'fail').forEach(result => {
markdown += formatTestResultMarkdown(result);
});
}
if (failedOnly) {
results.forEach(result => {
markdown += formatTestResultMarkdown(result);
});
} else {
// Include passed tests in summary
if (summary.passed > 0) {
markdown += `## Passed Tests\n\n`;
results.filter(r => r.status === 'pass').forEach(result => {
markdown += `### ✅ ${result.name} (${result.suite})\n`;
markdown += `**Duration:** ${result.duration}ms\n\n`;
});
}
}
return markdown;
}
export function formatSingleTestAsMarkdown(result: TestResult): string {
return formatTestResultMarkdown(result);
}
function formatTestResultMarkdown(result: TestResult): string {
const icon = result.status === 'fail' ? '❌' : result.status === 'pass' ? '✅' : '⏭️';
let markdown = `### ${icon} ${result.name} (${result.suite})\n`;
markdown += `**Duration:** ${result.duration}ms\n`;
markdown += `**Status:** ${result.status}\n`;
if (result.error) {
markdown += `**Error:** ${result.error}\n\n`;
}
if (result.stack) {
markdown += `**Stack Trace:**\n\`\`\`\n${result.stack}\n\`\`\`\n\n`;
}
if (result.details) {
markdown += `**Details:**\n\`\`\`json\n${JSON.stringify(result.details, null, 2)}\n\`\`\`\n\n`;
}
return markdown;
}

View File

@@ -1,697 +0,0 @@
/**
* Approval Pipeline Test Helpers
*
* Reusable helper functions for approval pipeline integration tests.
* These helpers abstract common patterns for submission creation, approval,
* and verification across all entity types.
*/
import { supabase } from '@/lib/supabaseClient';
import { TestDataTracker } from '../TestDataTracker';
import { formatTestError } from '../formatTestError';
import {
submitParkCreation,
submitRideCreation,
submitManufacturerCreation,
submitOperatorCreation,
submitDesignerCreation,
submitPropertyOwnerCreation,
submitRideModelCreation
} from '@/lib/entitySubmissionHelpers';
// Re-export formatTestError for use in test suites
export { formatTestError } from '../formatTestError';
// ============================================
// AUTHENTICATION
// ============================================
/**
* Get current user auth token for edge function calls
*/
export async function getAuthToken(): Promise<string> {
const { data: { session }, error } = await supabase.auth.getSession();
if (error || !session) {
throw new Error('Not authenticated - cannot run approval tests');
}
return session.access_token;
}
/**
* Get current user ID
*/
export async function getCurrentUserId(): Promise<string> {
const { data: { user }, error } = await supabase.auth.getUser();
if (error || !user) {
throw new Error('Not authenticated - cannot get user ID');
}
return user.id;
}
// ============================================
// EDGE FUNCTION CONFIGURATION
// ============================================
/**
* Get edge function base URL (hardcoded per project requirements)
*/
export function getEdgeFunctionUrl(): string {
return 'https://api.thrillwiki.com/functions/v1';
}
/**
* Get Supabase anon key (hardcoded per project requirements)
*/
export function getSupabaseAnonKey(): string {
return 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6ImRka2VueWdwcHlzZ3NlcmJ5aW9hIiwicm9sZSI6ImFub24iLCJpYXQiOjE3Mjg0ODY0MTIsImV4cCI6MjA0NDA2MjQxMn0.0qfDbOvh-Hs5n7HHZ0cRQzH5oEL_1D7kj7v6nh4PqgI';
}
// ============================================
// TEST DATA GENERATORS
// ============================================
/**
* Generate unique park submission data
*/
export function generateUniqueParkData(testId: string): any {
const timestamp = Date.now();
const slug = `test-park-${testId}-${timestamp}`;
return {
name: `Test Park ${testId} ${timestamp}`,
slug,
description: `Test park for ${testId}`,
park_type: 'theme_park',
status: 'operating',
opening_date: '2000-01-01',
opening_date_precision: 'year',
location: {
name: 'Test Location',
city: 'Test City',
country: 'US',
latitude: 40.7128,
longitude: -74.0060,
},
is_test_data: true,
};
}
/**
* Generate unique ride submission data
*/
export function generateUniqueRideData(parkId: string, testId: string): any {
const timestamp = Date.now();
const slug = `test-ride-${testId}-${timestamp}`;
return {
name: `Test Ride ${testId} ${timestamp}`,
slug,
description: `Test ride for ${testId}`,
category: 'roller_coaster',
status: 'operating',
park_id: parkId,
opening_date: '2005-01-01',
opening_date_precision: 'year',
max_speed_kmh: 100,
max_height_meters: 50,
length_meters: 1000,
is_test_data: true,
};
}
/**
* Generate unique company submission data
*/
export function generateUniqueCompanyData(companyType: string, testId: string): any {
const timestamp = Date.now();
const slug = `test-${companyType}-${testId}-${timestamp}`;
return {
name: `Test ${companyType} ${testId} ${timestamp}`,
slug,
description: `Test ${companyType} for ${testId}`,
person_type: 'company',
founded_year: 1990,
is_test_data: true,
};
}
/**
* Generate unique ride model submission data
*/
export function generateUniqueRideModelData(manufacturerId: string, testId: string): any {
const timestamp = Date.now();
const slug = `test-model-${testId}-${timestamp}`;
return {
name: `Test Model ${testId} ${timestamp}`,
slug,
manufacturer_id: manufacturerId,
category: 'roller_coaster',
ride_type: 'steel',
description: `Test ride model for ${testId}`,
is_test_data: true,
};
}
// ============================================
// SUBMISSION CREATION HELPERS
// ============================================
/**
* Create a test park submission
*/
export async function createTestParkSubmission(
data: any,
userId: string,
tracker: TestDataTracker
): Promise<{ submissionId: string; itemId: string }> {
const result = await submitParkCreation(data, userId);
if (!result.submissionId) {
throw new Error('Park submission creation failed - no submission ID returned');
}
// Track submission for cleanup
tracker.track('content_submissions', result.submissionId);
// Get the submission item ID
const { data: items } = await supabase
.from('submission_items')
.select('id')
.eq('submission_id', result.submissionId)
.single();
if (!items?.id) {
throw new Error('Failed to get submission item ID');
}
tracker.track('submission_items', items.id);
return {
submissionId: result.submissionId,
itemId: items.id,
};
}
/**
* Create a test ride submission
*/
export async function createTestRideSubmission(
data: any,
userId: string,
tracker: TestDataTracker
): Promise<{ submissionId: string; itemId: string }> {
const result = await submitRideCreation(data, userId);
if (!result.submissionId) {
throw new Error('Ride submission creation failed - no submission ID returned');
}
tracker.track('content_submissions', result.submissionId);
const { data: items } = await supabase
.from('submission_items')
.select('id')
.eq('submission_id', result.submissionId)
.single();
if (!items?.id) {
throw new Error('Failed to get submission item ID');
}
tracker.track('submission_items', items.id);
return {
submissionId: result.submissionId,
itemId: items.id,
};
}
/**
* Create a test company submission
*/
export async function createTestCompanySubmission(
companyType: 'manufacturer' | 'operator' | 'designer' | 'property_owner',
data: any,
userId: string,
tracker: TestDataTracker
): Promise<{ submissionId: string; itemId: string }> {
// Call the appropriate company type-specific submission function
let result: { submitted: boolean; submissionId: string };
switch (companyType) {
case 'manufacturer':
result = await submitManufacturerCreation(data, userId);
break;
case 'operator':
result = await submitOperatorCreation(data, userId);
break;
case 'designer':
result = await submitDesignerCreation(data, userId);
break;
case 'property_owner':
result = await submitPropertyOwnerCreation(data, userId);
break;
default:
throw new Error(`Unknown company type: ${companyType}`);
}
if (!result.submissionId) {
throw new Error('Company submission creation failed - no submission ID returned');
}
tracker.track('content_submissions', result.submissionId);
const { data: items } = await supabase
.from('submission_items')
.select('id')
.eq('submission_id', result.submissionId)
.single();
if (!items?.id) {
throw new Error('Failed to get submission item ID');
}
tracker.track('submission_items', items.id);
return {
submissionId: result.submissionId,
itemId: items.id,
};
}
/**
* Create a test ride model submission
*/
export async function createTestRideModelSubmission(
data: any,
userId: string,
tracker: TestDataTracker
): Promise<{ submissionId: string; itemId: string }> {
const result = await submitRideModelCreation(data, userId);
if (!result.submissionId) {
throw new Error('Ride model submission creation failed - no submission ID returned');
}
tracker.track('content_submissions', result.submissionId);
const { data: items } = await supabase
.from('submission_items')
.select('id')
.eq('submission_id', result.submissionId)
.single();
if (!items?.id) {
throw new Error('Failed to get submission item ID');
}
tracker.track('submission_items', items.id);
return {
submissionId: result.submissionId,
itemId: items.id,
};
}
/**
* Create a composite submission with dependencies
*/
export async function createCompositeSubmission(
primaryEntity: { type: 'park' | 'ride'; data: any },
dependencies: Array<{ type: string; data: any; tempId: string; companyType?: string }>,
userId: string,
tracker: TestDataTracker
): Promise<{ submissionId: string; itemIds: string[] }> {
// Create main submission
const { data: submission, error: submissionError } = await supabase
.from('content_submissions')
.insert({
user_id: userId,
submission_type: primaryEntity.type === 'park' ? 'park' : 'ride',
status: 'pending',
is_test_data: true,
})
.select()
.single();
if (submissionError || !submission) {
throw new Error(`Failed to create submission: ${submissionError?.message}`);
}
tracker.track('content_submissions', submission.id);
const itemIds: string[] = [];
// Note: This is a simplified composite submission creation
// In reality, the actual implementation uses specialized submission tables
// (park_submissions, company_submissions, etc.) which are more complex
// For testing purposes, we'll track items but note this is incomplete
// Track submission for cleanup
itemIds.push(submission.id);
return {
submissionId: submission.id,
itemIds,
};
}
// ============================================
// APPROVAL INVOCATION
// ============================================
/**
* Approve submission via edge function
*/
export async function approveSubmission(
submissionId: string,
itemIds: string[],
authToken: string,
idempotencyKey?: string
): Promise<{
success: boolean;
status?: string;
error?: string;
duration: number;
}> {
const startTime = performance.now();
const key = idempotencyKey || `test-${Date.now()}-${Math.random()}`;
try {
const response = await fetch(
`${getEdgeFunctionUrl()}/process-selective-approval`,
{
method: 'POST',
headers: {
'Authorization': `Bearer ${authToken}`,
'Content-Type': 'application/json',
'apikey': getSupabaseAnonKey(),
},
body: JSON.stringify({
submissionId,
itemIds,
idempotencyKey: key,
}),
}
);
const duration = performance.now() - startTime;
if (!response.ok) {
const errorText = await response.text();
return {
success: false,
error: `HTTP ${response.status}: ${errorText}`,
duration,
};
}
const result = await response.json();
return {
success: true,
status: result.status || 'approved',
duration,
};
} catch (error) {
const duration = performance.now() - startTime;
return {
success: false,
error: formatTestError(error),
duration,
};
}
}
// ============================================
// POLLING & VERIFICATION
// ============================================
/**
* Poll for entity creation
*/
export async function pollForEntity(
table: 'parks' | 'rides' | 'companies' | 'ride_models',
id: string,
maxWaitMs: number = 10000
): Promise<any | null> {
const pollInterval = 200;
const startTime = Date.now();
while (Date.now() - startTime < maxWaitMs) {
const { data, error } = await supabase
.from(table)
.select('*')
.eq('id', id)
.single();
if (data && !error) {
return data;
}
await new Promise(resolve => setTimeout(resolve, pollInterval));
}
return null;
}
/**
* Poll for version creation
*/
export async function pollForVersion(
entityType: 'park' | 'ride' | 'company' | 'ride_model',
entityId: string,
expectedVersionNumber: number,
maxWaitMs: number = 10000
): Promise<any | null> {
const versionTable = `${entityType}_versions` as 'park_versions' | 'ride_versions' | 'company_versions' | 'ride_model_versions';
const pollInterval = 200;
const startTime = Date.now();
while (Date.now() - startTime < maxWaitMs) {
const { data, error } = await supabase
.from(versionTable)
.select('*')
.eq(`${entityType}_id`, entityId)
.eq('version_number', expectedVersionNumber)
.single();
if (data && !error) {
return data;
}
await new Promise(resolve => setTimeout(resolve, pollInterval));
}
return null;
}
/**
* Verify submission item is approved
*/
export async function verifySubmissionItemApproved(
itemId: string
): Promise<{ approved: boolean; entityId: string | null; error?: string }> {
const { data, error } = await supabase
.from('submission_items')
.select('status, approved_entity_id')
.eq('id', itemId)
.single();
if (error) {
return { approved: false, entityId: null, error: error.message };
}
return {
approved: data.status === 'approved' && !!data.approved_entity_id,
entityId: data.approved_entity_id,
};
}
/**
* Verify submission status
*/
export async function verifySubmissionStatus(
submissionId: string,
expectedStatus: 'approved' | 'partially_approved' | 'pending'
): Promise<boolean> {
const { data, error } = await supabase
.from('content_submissions')
.select('status')
.eq('id', submissionId)
.single();
if (error || !data) {
return false;
}
return data.status === expectedStatus;
}
/**
* Create entity directly (bypass moderation for setup)
*/
export async function createParkDirectly(
data: any,
tracker: TestDataTracker
): Promise<string> {
// First create location if provided
let locationId: string | undefined;
if (data.location) {
const { data: location, error: locError } = await supabase
.from('locations')
.insert({
name: data.location.name,
city: data.location.city,
country: data.location.country,
latitude: data.location.latitude,
longitude: data.location.longitude,
})
.select()
.single();
if (locError || !location) {
throw new Error(`Failed to create location: ${locError?.message}`);
}
locationId = location.id;
tracker.track('locations', locationId);
}
const parkData = { ...data };
delete parkData.location;
if (locationId) {
parkData.location_id = locationId;
}
const { data: park, error } = await supabase
.from('parks')
.insert(parkData)
.select()
.single();
if (error || !park) {
throw new Error(`Failed to create park directly: ${error?.message}`);
}
tracker.track('parks', park.id);
return park.id;
}
/**
* Create ride directly (bypass moderation for setup)
*/
export async function createRideDirectly(
data: any,
tracker: TestDataTracker
): Promise<string> {
const { data: ride, error } = await supabase
.from('rides')
.insert(data)
.select()
.single();
if (error || !ride) {
throw new Error(`Failed to create ride directly: ${error?.message}`);
}
tracker.track('rides', ride.id);
return ride.id;
}
/**
* Create test photo gallery submission
*/
export async function createTestPhotoGallerySubmission(
entityId: string,
entityType: 'park' | 'ride',
photoCount: number,
userId: string,
tracker: TestDataTracker
): Promise<{ submissionId: string; itemId: string }> {
// Create content submission first
const { data: submission, error: submissionError } = await supabase
.from('content_submissions')
.insert({
user_id: userId,
submission_type: 'photo_gallery',
status: 'pending',
is_test_data: true,
})
.select()
.single();
if (submissionError || !submission) {
throw new Error(`Failed to create content submission: ${submissionError?.message}`);
}
tracker.track('content_submissions', submission.id);
// Create photo submission
const { data: photoSubmission, error: photoSubError } = await supabase
.from('photo_submissions')
.insert({
entity_id: entityId,
entity_type: entityType,
submission_id: submission.id,
is_test_data: true,
})
.select()
.single();
if (photoSubError || !photoSubmission) {
throw new Error(`Failed to create photo submission: ${photoSubError?.message}`);
}
tracker.track('photo_submissions', photoSubmission.id);
// Create submission item linking to photo submission
const { data: item, error: itemError } = await supabase
.from('submission_items')
.insert({
submission_id: submission.id,
photo_submission_id: photoSubmission.id,
item_type: 'photo_gallery',
status: 'pending',
is_test_data: true,
})
.select()
.single();
if (itemError || !item) {
throw new Error(`Failed to create submission item: ${itemError?.message}`);
}
tracker.track('submission_items', item.id);
// Create photo submission items
for (let i = 0; i < photoCount; i++) {
const { data: photoItem, error: photoItemError } = await supabase
.from('photo_submission_items')
.insert({
photo_submission_id: photoSubmission.id,
cloudflare_image_id: `test-image-${Date.now()}-${i}`,
cloudflare_image_url: `https://test.com/image-${i}.jpg`,
caption: `Test photo ${i + 1}`,
order_index: i,
is_test_data: true,
})
.select()
.single();
if (photoItemError || !photoItem) {
throw new Error(`Failed to create photo item ${i}: ${photoItemError?.message}`);
}
tracker.track('photo_submission_items', photoItem.id);
}
return {
submissionId: submission.id,
itemId: item.id,
};
}

View File

@@ -6,7 +6,5 @@
export { IntegrationTestRunner } from './testRunner';
export { allTestSuites } from './suites';
export { formatResultsAsMarkdown, formatSingleTestAsMarkdown } from './formatters';
export { formatTestError } from './formatTestError';
export type { TestResult, Test, TestSuite } from './testRunner';

File diff suppressed because it is too large Load Diff

View File

@@ -6,7 +6,6 @@
import { supabase } from '@/lib/supabaseClient';
import type { TestSuite, TestResult } from '../testRunner';
import { formatTestError } from '../formatTestError';
export const authTestSuite: TestSuite = {
id: 'auth',
@@ -65,7 +64,7 @@ export const authTestSuite: TestSuite = {
suite: 'Authentication & Authorization',
status: 'fail',
duration,
error: formatTestError(error),
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
timestamp: new Date().toISOString()
};
@@ -138,7 +137,7 @@ export const authTestSuite: TestSuite = {
suite: 'Authentication & Authorization',
status: 'fail',
duration,
error: formatTestError(error),
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
timestamp: new Date().toISOString()
};
@@ -188,7 +187,7 @@ export const authTestSuite: TestSuite = {
suite: 'Authentication & Authorization',
status: 'fail',
duration,
error: formatTestError(error),
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
timestamp: new Date().toISOString()
};
@@ -249,7 +248,7 @@ export const authTestSuite: TestSuite = {
suite: 'Authentication & Authorization',
status: 'fail',
duration,
error: formatTestError(error),
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
timestamp: new Date().toISOString()
};

View File

@@ -7,7 +7,6 @@
import { supabase } from '@/lib/supabaseClient';
import type { TestSuite, TestResult } from '../testRunner';
import { TestDataTracker } from '../TestDataTracker';
import { formatTestError } from '../formatTestError';
export const dataIntegrityTestSuite: TestSuite = {
id: 'data-integrity',
@@ -78,7 +77,7 @@ export const dataIntegrityTestSuite: TestSuite = {
suite: 'Data Integrity & Constraints',
status: 'fail',
duration,
error: formatTestError(error),
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
timestamp: new Date().toISOString()
};
@@ -140,7 +139,7 @@ export const dataIntegrityTestSuite: TestSuite = {
suite: 'Data Integrity & Constraints',
status: 'fail',
duration,
error: formatTestError(error),
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
timestamp: new Date().toISOString()
};
@@ -150,69 +149,52 @@ export const dataIntegrityTestSuite: TestSuite = {
{
id: 'integrity-003',
name: 'Unique Constraint Enforcement',
description: 'Tests unique constraints prevent duplicate slugs via approval pipeline',
description: 'Tests unique constraints prevent duplicate slugs',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
const tracker = new TestDataTracker();
let parkId: string | null = null;
try {
// Import necessary helpers
const {
getCurrentUserId,
getAuthToken,
generateUniqueParkData,
createTestParkSubmission,
approveSubmission
} = await import('../helpers/approvalTestHelpers');
const userId = await getCurrentUserId();
const authToken = await getAuthToken();
// Create first park with unique slug
const baseSlug = `unique-test-${Date.now()}`;
const parkData1 = {
...generateUniqueParkData('integrity-003-1'),
slug: baseSlug // Override with our controlled slug
};
// Create and approve first submission
const { submissionId: sub1Id, itemId: item1Id } = await createTestParkSubmission(parkData1, userId, tracker);
const approval1 = await approveSubmission(sub1Id, [item1Id], authToken);
if (!approval1.success) {
throw new Error(`First park approval failed: ${approval1.error}`);
}
// Get first park ID
const { data: item1 } = await supabase
.from('submission_items')
.select('approved_entity_id')
.eq('id', item1Id)
// Create a park
const slug = `unique-test-${Date.now()}`;
const { data: park, error: createError } = await supabase
.from('parks')
.insert({
name: 'Unique Test Park',
slug,
park_type: 'theme_park',
status: 'operating',
is_test_data: true
})
.select('id')
.single();
if (!item1?.approved_entity_id) throw new Error('First park not created');
tracker.track('parks', item1.approved_entity_id);
if (createError) throw new Error(`Park creation failed: ${createError.message}`);
if (!park) throw new Error('No park returned');
// Create second submission with SAME slug
const parkData2 = {
...generateUniqueParkData('integrity-003-2'),
slug: baseSlug // Same slug - should fail on approval
};
parkId = park.id;
tracker.track('parks', parkId);
const { submissionId: sub2Id, itemId: item2Id } = await createTestParkSubmission(parkData2, userId, tracker);
// Try to create another park with same slug
const { error: duplicateError } = await supabase
.from('parks')
.insert({
name: 'Duplicate Park',
slug, // Same slug
park_type: 'theme_park',
status: 'operating',
is_test_data: true
});
// Try to approve second submission (should fail due to unique constraint)
const approval2 = await approveSubmission(sub2Id, [item2Id], authToken);
// Approval should fail
if (approval2.success) {
throw new Error('Second approval succeeded when it should have failed (duplicate slug)');
// This SHOULD fail with unique violation
if (!duplicateError) {
throw new Error('Unique constraint not enforced - duplicate slug was accepted');
}
// Verify the error mentions unique constraint or duplicate
const errorMsg = approval2.error?.toLowerCase() || '';
if (!errorMsg.includes('unique') && !errorMsg.includes('duplicate') && !errorMsg.includes('already exists')) {
throw new Error(`Expected unique constraint error, got: ${approval2.error}`);
// Verify it's a unique violation
if (!duplicateError.message.includes('unique') && !duplicateError.message.includes('duplicate')) {
throw new Error(`Expected unique constraint error, got: ${duplicateError.message}`);
}
const duration = Date.now() - startTime;
@@ -226,10 +208,7 @@ export const dataIntegrityTestSuite: TestSuite = {
timestamp: new Date().toISOString(),
details: {
constraintEnforced: true,
firstParkCreated: true,
secondParkBlocked: true,
errorMessage: approval2.error,
followedPipeline: true
errorMessage: duplicateError.message
}
};
} catch (error) {
@@ -240,12 +219,16 @@ export const dataIntegrityTestSuite: TestSuite = {
suite: 'Data Integrity & Constraints',
status: 'fail',
duration,
error: formatTestError(error),
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
timestamp: new Date().toISOString()
};
} finally {
await tracker.cleanup();
const remaining = await tracker.verifyCleanup();
if (remaining.length > 0) {
console.warn('integrity-003 cleanup incomplete:', remaining);
}
}
}
},
@@ -306,7 +289,7 @@ export const dataIntegrityTestSuite: TestSuite = {
suite: 'Data Integrity & Constraints',
status: 'fail',
duration,
error: formatTestError(error),
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
timestamp: new Date().toISOString()
};

View File

@@ -6,7 +6,6 @@
import { supabase } from '@/lib/supabaseClient';
import type { TestSuite, TestResult } from '../testRunner';
import { formatTestError } from '../formatTestError';
export const edgeFunctionTestSuite: TestSuite = {
id: 'edge-functions',
@@ -69,7 +68,7 @@ export const edgeFunctionTestSuite: TestSuite = {
suite: 'Edge Function Tests',
status: 'fail',
duration: Date.now() - startTime,
error: formatTestError(error),
error: error instanceof Error ? error.message : String(error),
timestamp: new Date().toISOString()
};
}
@@ -122,7 +121,7 @@ export const edgeFunctionTestSuite: TestSuite = {
suite: 'Edge Function Tests',
status: 'fail',
duration: Date.now() - startTime,
error: formatTestError(error),
error: error instanceof Error ? error.message : String(error),
timestamp: new Date().toISOString()
};
}
@@ -188,7 +187,7 @@ export const edgeFunctionTestSuite: TestSuite = {
suite: 'Edge Function Tests',
status: 'fail',
duration: Date.now() - startTime,
error: formatTestError(error),
error: error instanceof Error ? error.message : String(error),
timestamp: new Date().toISOString()
};
}

View File

@@ -8,9 +8,7 @@ import { authTestSuite } from './authTests';
import { versioningTestSuite } from './versioningTests';
import { dataIntegrityTestSuite } from './dataIntegrityTests';
import { submissionTestSuite } from './submissionTests';
import { approvalPipelineTestSuite } from './approvalPipelineTests';
import { moderationTestSuite } from './moderationTests';
import { moderationDependencyTestSuite } from './moderationDependencyTests';
import { edgeFunctionTestSuite } from './edgeFunctionTests';
import { unitConversionTestSuite } from './unitConversionTests';
import { performanceTestSuite } from './performanceTests';
@@ -21,9 +19,7 @@ export const allTestSuites: TestSuite[] = [
versioningTestSuite,
dataIntegrityTestSuite,
submissionTestSuite,
approvalPipelineTestSuite,
moderationTestSuite,
moderationDependencyTestSuite,
edgeFunctionTestSuite,
unitConversionTestSuite,
performanceTestSuite,
@@ -34,9 +30,7 @@ export {
versioningTestSuite,
dataIntegrityTestSuite,
submissionTestSuite,
approvalPipelineTestSuite,
moderationTestSuite,
moderationDependencyTestSuite,
edgeFunctionTestSuite,
unitConversionTestSuite,
performanceTestSuite,

View File

@@ -5,9 +5,7 @@
*/
import { supabase } from '@/lib/supabaseClient';
import { submitParkCreation } from '@/lib/entitySubmissionHelpers';
import type { TestSuite, TestResult } from '../testRunner';
import { formatTestError } from '../formatTestError';
export const moderationDependencyTestSuite: TestSuite = {
id: 'moderation-dependencies',
@@ -25,55 +23,49 @@ export const moderationDependencyTestSuite: TestSuite = {
const { data: userData } = await supabase.auth.getUser();
if (!userData.user) throw new Error('No authenticated user');
// Create two independent park submissions using proper helpers
const park1Result = await submitParkCreation(
{
name: 'Test Park 1 Dependency',
slug: 'test-park-1-dep',
park_type: 'theme_park',
status: 'operating',
location: {
name: 'Test Location 1',
country: 'US',
latitude: 40.7128,
longitude: -74.0060,
display_name: 'Test Location 1, US'
// Create submission with 2 independent park items
const { data: submission, error: createError } = await supabase
.from('content_submissions')
.insert({
user_id: userData.user.id,
submission_type: 'park',
status: 'pending',
content: { test: true }
})
.select()
.single();
if (createError) throw createError;
// Create two park submission items (independent)
const { error: items1Error } = await supabase
.from('submission_items')
.insert([
{
submission_id: submission.id,
item_type: 'park',
item_data: { name: 'Test Park 1', slug: 'test-park-1', country: 'US' },
status: 'pending'
},
{
submission_id: submission.id,
item_type: 'park',
item_data: { name: 'Test Park 2', slug: 'test-park-2', country: 'US' },
status: 'pending'
}
},
userData.user.id
);
]);
const park2Result = await submitParkCreation(
{
name: 'Test Park 2 Dependency',
slug: 'test-park-2-dep',
park_type: 'theme_park',
status: 'operating',
location: {
name: 'Test Location 2',
country: 'US',
latitude: 34.0522,
longitude: -118.2437,
display_name: 'Test Location 2, US'
}
},
userData.user.id
);
if (items1Error) throw items1Error;
if (!park1Result.submitted || !park2Result.submitted) {
throw new Error('Failed to create park submissions');
}
// Get submission items for both parks
// Get items
const { data: items } = await supabase
.from('submission_items')
.select('id, submission_id')
.in('submission_id', [park1Result.submissionId!, park2Result.submissionId!])
.eq('item_type', 'park')
.select('id')
.eq('submission_id', submission.id)
.order('created_at', { ascending: true });
if (!items || items.length < 2) {
throw new Error('Failed to find submission items');
if (!items || items.length !== 2) {
throw new Error('Failed to create submission items');
}
// Approve second item first (should work - no dependencies)
@@ -93,10 +85,7 @@ export const moderationDependencyTestSuite: TestSuite = {
if (approve1Error) throw new Error('Failed to approve first item second');
// Cleanup
await supabase.from('content_submissions').delete().in('id', [
park1Result.submissionId!,
park2Result.submissionId!
]);
await supabase.from('content_submissions').delete().eq('id', submission.id);
return {
id: 'dep-001',
@@ -113,7 +102,7 @@ export const moderationDependencyTestSuite: TestSuite = {
suite: 'Multi-Item Dependency Resolution',
status: 'fail',
duration: Date.now() - startTime,
error: formatTestError(error),
error: error instanceof Error ? error.message : String(error),
timestamp: new Date().toISOString()
};
}
@@ -122,77 +111,40 @@ export const moderationDependencyTestSuite: TestSuite = {
{
id: 'dep-002',
name: 'Verify Submission Item Relational Structure',
description: 'Verifies that submission items use proper relational foreign keys',
name: 'Verify Submission Item Dependencies Exist',
description: 'Verifies that submission items have proper dependency tracking',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
try {
const { data: userData } = await supabase.auth.getUser();
if (!userData.user) throw new Error('No authenticated user');
// Create a test park submission
const parkResult = await submitParkCreation(
{
name: 'Test Park Schema Check',
slug: 'test-park-schema-check',
park_type: 'theme_park',
status: 'operating',
location: {
name: 'Test Location Schema',
country: 'US',
latitude: 40.7128,
longitude: -74.0060,
display_name: 'Test Location Schema, US'
}
},
userData.user.id
);
if (!parkResult.submitted) {
throw new Error('Failed to create test park submission');
}
// Verify submission item has proper structure
const { data: item, error: itemError } = await supabase
// Verify submission_items table has dependency columns
const { data: testItem } = await supabase
.from('submission_items')
.select('id, status, depends_on, order_index, item_type, action_type')
.eq('submission_id', parkResult.submissionId!)
.eq('item_type', 'park')
.single();
if (itemError) throw itemError;
if (!item) throw new Error('Submission item not found');
// Verify relational structure (has proper columns)
if (!item.item_type || !item.action_type) {
throw new Error('Missing required fields - schema structure incorrect');
}
// Cleanup
await supabase.from('content_submissions').delete().eq('id', parkResult.submissionId!);
.select('id, status')
.limit(1)
.maybeSingle();
// If query succeeds, table exists and is accessible
return {
id: 'dep-002',
name: 'Verify Submission Item Relational Structure',
name: 'Verify Submission Item Dependencies Exist',
suite: 'Multi-Item Dependency Resolution',
status: 'pass',
duration: Date.now() - startTime,
timestamp: new Date().toISOString(),
details: {
relationalStructure: true,
hasForeignKeys: true,
message: 'Submission items properly use relational foreign keys'
tableAccessible: true,
testQuery: 'submission_items table verified'
}
};
} catch (error) {
return {
id: 'dep-002',
name: 'Verify Submission Item Relational Structure',
name: 'Verify Submission Item Dependencies Exist',
suite: 'Multi-Item Dependency Resolution',
status: 'fail',
duration: Date.now() - startTime,
error: formatTestError(error),
error: error instanceof Error ? error.message : String(error),
timestamp: new Date().toISOString()
};
}

View File

@@ -6,7 +6,6 @@
import { supabase } from '@/lib/supabaseClient';
import type { TestSuite, TestResult } from '../testRunner';
import { formatTestError } from '../formatTestError';
export const moderationLockTestSuite: TestSuite = {
id: 'moderation-locks',
@@ -98,7 +97,7 @@ export const moderationLockTestSuite: TestSuite = {
suite: 'Moderation Lock Management',
status: 'fail',
duration: Date.now() - startTime,
error: formatTestError(error),
error: error instanceof Error ? error.message : String(error),
timestamp: new Date().toISOString()
};
}
@@ -184,7 +183,7 @@ export const moderationLockTestSuite: TestSuite = {
suite: 'Moderation Lock Management',
status: 'fail',
duration: Date.now() - startTime,
error: formatTestError(error),
error: error instanceof Error ? error.message : String(error),
timestamp: new Date().toISOString()
};
}
@@ -285,7 +284,7 @@ export const moderationLockTestSuite: TestSuite = {
suite: 'Moderation Lock Management',
status: 'fail',
duration: Date.now() - startTime,
error: formatTestError(error),
error: error instanceof Error ? error.message : String(error),
timestamp: new Date().toISOString()
};
}

View File

@@ -6,7 +6,6 @@
import { supabase } from '@/lib/supabaseClient';
import type { TestSuite, TestResult } from '../testRunner';
import { formatTestError } from '../formatTestError';
export const moderationTestSuite: TestSuite = {
id: 'moderation',
@@ -54,7 +53,7 @@ export const moderationTestSuite: TestSuite = {
suite: 'Moderation Queue & Workflow',
status: 'fail',
duration: Date.now() - startTime,
error: formatTestError(error),
error: error instanceof Error ? error.message : String(error),
timestamp: new Date().toISOString()
};
}

View File

@@ -7,7 +7,6 @@
import { supabase } from '@/lib/supabaseClient';
import type { TestSuite, TestResult } from '../testRunner';
import { TestDataTracker } from '../TestDataTracker';
import { formatTestError } from '../formatTestError';
export const performanceTestSuite: TestSuite = {
id: 'performance',
@@ -97,7 +96,7 @@ export const performanceTestSuite: TestSuite = {
suite: 'Performance & Scalability',
status: 'fail',
duration: Date.now() - startTime,
error: formatTestError(error),
error: error instanceof Error ? error.message : String(error),
timestamp: new Date().toISOString()
};
}
@@ -113,36 +112,22 @@ export const performanceTestSuite: TestSuite = {
let parkId: string | null = null;
try {
// Import helpers and create park via pipeline
const {
getCurrentUserId,
getAuthToken,
generateUniqueParkData,
createTestParkSubmission,
approveSubmission
} = await import('../helpers/approvalTestHelpers');
const userId = await getCurrentUserId();
const authToken = await getAuthToken();
const parkData = generateUniqueParkData('perf-002');
const { submissionId, itemId } = await createTestParkSubmission(parkData, userId, tracker);
const approval = await approveSubmission(submissionId, [itemId], authToken);
if (!approval.success) {
throw new Error(`Park creation failed: ${approval.error || 'Unknown error'}`);
}
// Get park ID from submission item
const { data: parkItem } = await supabase
.from('submission_items')
.select('approved_entity_id')
.eq('id', itemId)
// Create test park
const parkSlug = `test-park-perf-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
const { data: park, error: parkError } = await supabase
.from('parks')
.insert({
name: 'Test Park Performance',
slug: parkSlug,
park_type: 'theme_park',
status: 'operating',
is_test_data: true
})
.select('id')
.single();
parkId = parkItem?.approved_entity_id || null;
if (!parkId) throw new Error('No park ID after approval');
if (parkError) throw parkError;
parkId = park.id;
tracker.track('parks', parkId);
// Create multiple versions (updates)
@@ -197,7 +182,7 @@ export const performanceTestSuite: TestSuite = {
suite: 'Performance & Scalability',
status: 'fail',
duration: Date.now() - startTime,
error: formatTestError(error),
error: error instanceof Error ? error.message : String(error),
timestamp: new Date().toISOString()
};
} finally {
@@ -229,7 +214,7 @@ export const performanceTestSuite: TestSuite = {
const modDuration = Date.now() - modStart;
if (modError) throw new Error(`Moderator check failed: ${modError.message}`);
if (modError) throw modError;
// Test is_user_banned function performance
const banStart = Date.now();
@@ -240,7 +225,7 @@ export const performanceTestSuite: TestSuite = {
const banDuration = Date.now() - banStart;
if (banError) throw new Error(`Ban check failed: ${banError.message}`);
if (banError) throw banError;
// Performance threshold: 200ms for simple functions
const threshold = 200;
@@ -280,7 +265,7 @@ export const performanceTestSuite: TestSuite = {
suite: 'Performance & Scalability',
status: 'fail',
duration: Date.now() - startTime,
error: formatTestError(error),
error: error instanceof Error ? error.message : String(error),
timestamp: new Date().toISOString()
};
}

View File

@@ -1,96 +1,71 @@
/**
* Submission Pipeline Validation Tests
* Entity Submission & Validation Integration Tests
*
* Tests submission creation, validation, and the full approval flow.
* All tests follow the sacred pipeline architecture.
* Tests for submission validation, schema validation, and entity creation.
*/
import { supabase } from '@/lib/supabaseClient';
import type { TestSuite, TestResult } from '../testRunner';
import { TestDataTracker } from '../TestDataTracker';
import { formatTestError } from '../formatTestError';
import {
generateUniqueParkData,
generateUniqueRideData,
generateUniqueCompanyData,
generateUniqueRideModelData,
createTestParkSubmission,
createTestRideSubmission,
createTestCompanySubmission,
createTestRideModelSubmission,
approveSubmission,
pollForEntity,
getAuthToken,
getCurrentUserId,
} from '../helpers/approvalTestHelpers';
export const submissionTestSuite: TestSuite = {
id: 'submission',
name: 'Entity Submission & Validation',
description: 'Tests submission creation, validation, and approval pipeline',
description: 'Tests for entity submission workflows and validation schemas',
tests: [
{
id: 'submission-001',
name: 'Park Creation Validation',
description: 'Validates park submission and approval creates entity',
description: 'Validates park submission and creation',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
const tracker = new TestDataTracker();
let parkId: string | null = null;
try {
const userId = await getCurrentUserId();
const authToken = await getAuthToken();
const parkData = generateUniqueParkData('submission-001');
const parkSlug = `test-park-submit-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
// Create submission
const { submissionId, itemId } = await createTestParkSubmission(parkData, userId, tracker);
// Verify submission was created
const { data: submission } = await supabase
.from('content_submissions')
.select('status, submission_type')
.eq('id', submissionId)
// Create park with valid data
const { data: park, error: createError } = await supabase
.from('parks')
.insert({
name: 'Test Park Submission',
slug: parkSlug,
park_type: 'theme_park',
status: 'operating',
description: 'Test park for submission validation'
})
.select('id, name, slug, park_type, status')
.single();
if (!submission) throw new Error('Submission not found');
if (submission.status !== 'pending') {
throw new Error(`Expected status "pending", got "${submission.status}"`);
if (createError) throw new Error(`Park creation failed: ${createError.message}`);
if (!park) throw new Error('Park not returned after creation');
parkId = park.id;
// Validate created park has correct data
if (park.name !== 'Test Park Submission') {
throw new Error(`Expected name "Test Park Submission", got "${park.name}"`);
}
if (submission.submission_type !== 'park') {
throw new Error(`Expected type "park", got "${submission.submission_type}"`);
if (park.slug !== parkSlug) {
throw new Error(`Expected slug "${parkSlug}", got "${park.slug}"`);
}
if (park.park_type !== 'theme_park') {
throw new Error(`Expected park_type "theme_park", got "${park.park_type}"`);
}
// Approve submission
const approval = await approveSubmission(submissionId, [itemId], authToken);
if (!approval.success) {
throw new Error(`Approval failed: ${approval.error}`);
}
// Test slug uniqueness constraint
const { error: duplicateError } = await supabase
.from('parks')
.insert({
name: 'Duplicate Slug Park',
slug: parkSlug, // Same slug
park_type: 'theme_park',
status: 'operating'
});
// Verify entity was created
const { data: item } = await supabase
.from('submission_items')
.select('approved_entity_id, status')
.eq('id', itemId)
.single();
if (!item?.approved_entity_id) {
throw new Error('No entity created after approval');
}
if (item.status !== 'approved') {
throw new Error(`Expected item status "approved", got "${item.status}"`);
}
tracker.track('parks', item.approved_entity_id);
// Verify park data
const park = await pollForEntity('parks', item.approved_entity_id);
if (!park) throw new Error('Park entity not found');
if (park.name !== parkData.name) {
throw new Error(`Expected name "${parkData.name}", got "${park.name}"`);
}
if (park.slug !== parkData.slug) {
throw new Error(`Expected slug "${parkData.slug}", got "${park.slug}"`);
if (!duplicateError) {
throw new Error('Duplicate slug was allowed (uniqueness constraint failed)');
}
const duration = Date.now() - startTime;
@@ -103,9 +78,9 @@ export const submissionTestSuite: TestSuite = {
duration,
timestamp: new Date().toISOString(),
details: {
submissionId,
parkId: item.approved_entity_id,
validationsPassed: ['submission_created', 'approval_succeeded', 'entity_created']
parkId,
parkSlug,
validationsPassed: ['name', 'slug', 'park_type', 'uniqueness_constraint']
}
};
@@ -116,72 +91,81 @@ export const submissionTestSuite: TestSuite = {
suite: 'Entity Submission & Validation',
status: 'fail',
duration: Date.now() - startTime,
error: formatTestError(error),
error: error instanceof Error ? error.message : String(error),
timestamp: new Date().toISOString()
};
} finally {
await tracker.cleanup();
const remaining = await tracker.verifyCleanup();
if (remaining.length > 0) {
console.warn('submission-001 cleanup incomplete:', remaining);
}
}
}
},
{
id: 'submission-002',
name: 'Ride Creation with Dependencies',
description: 'Validates ride submission requires valid park and creates correctly',
description: 'Validates ride submission requires valid park_id',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
const tracker = new TestDataTracker();
let parkId: string | null = null;
let rideId: string | null = null;
try {
const userId = await getCurrentUserId();
const authToken = await getAuthToken();
// First create and approve a park
const parkData = generateUniqueParkData('submission-002-park');
const { submissionId: parkSubId, itemId: parkItemId } = await createTestParkSubmission(parkData, userId, tracker);
const parkApproval = await approveSubmission(parkSubId, [parkItemId], authToken);
if (!parkApproval.success) {
throw new Error(`Park approval failed: ${parkApproval.error}`);
}
const { data: parkItem } = await supabase
.from('submission_items')
.select('approved_entity_id')
.eq('id', parkItemId)
// First create a park
const parkSlug = `test-park-ride-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
const { data: park, error: parkError } = await supabase
.from('parks')
.insert({
name: 'Test Park for Ride',
slug: parkSlug,
park_type: 'theme_park',
status: 'operating',
is_test_data: true
})
.select('id')
.single();
const parkId = parkItem?.approved_entity_id;
if (!parkId) throw new Error('Park not created');
tracker.track('parks', parkId);
if (parkError) throw new Error(`Park creation failed: ${parkError.message}`);
parkId = park.id;
// Now create ride submission
const rideData = generateUniqueRideData(parkId, 'submission-002');
const { submissionId: rideSubId, itemId: rideItemId } = await createTestRideSubmission(rideData, userId, tracker);
// Try to create ride with invalid park_id (should fail)
const invalidParkId = '00000000-0000-0000-0000-000000000000';
const { error: invalidError } = await supabase
.from('rides')
.insert({
name: 'Test Ride Invalid Park',
slug: `test-ride-invalid-${Date.now()}`,
park_id: invalidParkId,
category: 'roller_coaster',
status: 'operating'
});
// Approve ride
const rideApproval = await approveSubmission(rideSubId, [rideItemId], authToken);
if (!rideApproval.success) {
throw new Error(`Ride approval failed: ${rideApproval.error}`);
if (!invalidError) {
throw new Error('Ride with invalid park_id was allowed (foreign key constraint failed)');
}
// Verify ride created
const { data: rideItem } = await supabase
.from('submission_items')
.select('approved_entity_id')
.eq('id', rideItemId)
// Create ride with valid park_id (should succeed)
const rideSlug = `test-ride-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
const { data: ride, error: rideError } = await supabase
.from('rides')
.insert({
name: 'Test Ride Valid Park',
slug: rideSlug,
park_id: parkId,
category: 'roller_coaster',
status: 'operating'
})
.select('id, name, park_id')
.single();
const rideId = rideItem?.approved_entity_id;
if (!rideId) throw new Error('Ride not created after approval');
if (rideError) throw new Error(`Ride creation failed: ${rideError.message}`);
if (!ride) throw new Error('Ride not returned after creation');
tracker.track('rides', rideId);
rideId = ride.id;
// Verify ride data
const ride = await pollForEntity('rides', rideId);
if (!ride) throw new Error('Ride entity not found');
if (ride.park_id !== parkId) {
throw new Error(`Expected park_id "${parkId}", got "${ride.park_id}"`);
}
@@ -198,7 +182,7 @@ export const submissionTestSuite: TestSuite = {
details: {
parkId,
rideId,
validationsPassed: ['park_created', 'ride_created', 'dependency_valid']
validationsPassed: ['foreign_key_constraint', 'valid_dependency']
}
};
@@ -209,68 +193,57 @@ export const submissionTestSuite: TestSuite = {
suite: 'Entity Submission & Validation',
status: 'fail',
duration: Date.now() - startTime,
error: formatTestError(error),
error: error instanceof Error ? error.message : String(error),
timestamp: new Date().toISOString()
};
} finally {
await tracker.cleanup();
const remaining = await tracker.verifyCleanup();
if (remaining.length > 0) {
console.warn('submission-002 cleanup incomplete:', remaining);
}
}
}
},
{
id: 'submission-003',
name: 'Company Creation All Types',
description: 'Validates company submission for all company types',
description: 'Validates company creation for all company types',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
const tracker = new TestDataTracker();
const companyIds: string[] = [];
try {
const userId = await getCurrentUserId();
const authToken = await getAuthToken();
const companyTypes = ['manufacturer', 'operator', 'designer', 'property_owner'] as const;
const createdCompanies: Array<{ type: string; id: string }> = [];
for (const companyType of companyTypes) {
const companyData = generateUniqueCompanyData(companyType, `submission-003-${companyType}`);
const slug = `test-company-${companyType}-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
// Create submission
const { submissionId, itemId } = await createTestCompanySubmission(
companyType,
companyData,
userId,
tracker
);
// Approve submission
const approval = await approveSubmission(submissionId, [itemId], authToken);
if (!approval.success) {
throw new Error(`${companyType} approval failed: ${approval.error}`);
}
// Verify entity created
const { data: item } = await supabase
.from('submission_items')
.select('approved_entity_id')
.eq('id', itemId)
const { data: company, error: createError } = await supabase
.from('companies')
.insert({
name: `Test ${companyType} Company`,
slug,
company_type: companyType,
description: `Test company of type ${companyType}`
})
.select('id, company_type')
.single();
const companyId = item?.approved_entity_id;
if (!companyId) {
throw new Error(`${companyType} not created after approval`);
if (createError) {
throw new Error(`${companyType} creation failed: ${createError.message}`);
}
if (!company) {
throw new Error(`${companyType} not returned after creation`);
}
tracker.track('companies', companyId);
companyIds.push(company.id);
tracker.track('companies', company.id);
// Verify company type
const company = await pollForEntity('companies', companyId);
if (!company) throw new Error(`${companyType} entity not found`);
if (company.company_type !== companyType) {
throw new Error(`Expected company_type "${companyType}", got "${company.company_type}"`);
}
createdCompanies.push({ type: companyType, id: companyId });
}
const duration = Date.now() - startTime;
@@ -283,9 +256,9 @@ export const submissionTestSuite: TestSuite = {
duration,
timestamp: new Date().toISOString(),
details: {
companiesCreated: createdCompanies.length,
companiesCreated: companyIds.length,
companyTypes: companyTypes,
companies: createdCompanies
companyIds
}
};
@@ -296,90 +269,105 @@ export const submissionTestSuite: TestSuite = {
suite: 'Entity Submission & Validation',
status: 'fail',
duration: Date.now() - startTime,
error: formatTestError(error),
error: error instanceof Error ? error.message : String(error),
timestamp: new Date().toISOString()
};
} finally {
await tracker.cleanup();
const remaining = await tracker.verifyCleanup();
if (remaining.length > 0) {
console.warn('submission-003 cleanup incomplete:', remaining);
}
}
}
},
{
id: 'submission-004',
name: 'Ride Model with Images',
description: 'Validates ride model submission with image fields',
description: 'Validates ride model creation with image fields',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
const tracker = new TestDataTracker();
let manufacturerId: string | null = null;
let modelId: string | null = null;
try {
const userId = await getCurrentUserId();
const authToken = await getAuthToken();
// Create and approve manufacturer
const mfgData = generateUniqueCompanyData('manufacturer', 'submission-004-mfg');
const { submissionId: mfgSubId, itemId: mfgItemId } = await createTestCompanySubmission(
'manufacturer',
mfgData,
userId,
tracker
);
const mfgApproval = await approveSubmission(mfgSubId, [mfgItemId], authToken);
if (!mfgApproval.success) {
throw new Error(`Manufacturer approval failed: ${mfgApproval.error}`);
}
const { data: mfgItem } = await supabase
.from('submission_items')
.select('approved_entity_id')
.eq('id', mfgItemId)
// Create manufacturer first
const mfgSlug = `test-mfg-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
const { data: manufacturer, error: mfgError } = await supabase
.from('companies')
.insert({
name: 'Test Manufacturer',
slug: mfgSlug,
company_type: 'manufacturer'
})
.select('id')
.single();
const manufacturerId = mfgItem?.approved_entity_id;
if (!manufacturerId) throw new Error('Manufacturer not created');
tracker.track('companies', manufacturerId);
if (mfgError) throw new Error(`Manufacturer creation failed: ${mfgError.message}`);
manufacturerId = manufacturer.id;
// Create ride model submission
const modelData = generateUniqueRideModelData(manufacturerId, 'submission-004');
const { submissionId, itemId } = await createTestRideModelSubmission(modelData, userId, tracker);
// Create ride model with images
const modelSlug = `test-model-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
const testImageUrl = 'https://imagedelivery.net/test-account/test-image-id/public';
const testImageId = 'test-image-id';
// Approve ride model
const approval = await approveSubmission(submissionId, [itemId], authToken);
if (!approval.success) {
throw new Error(`Ride model approval failed: ${approval.error}`);
}
// Verify entity created
const { data: item } = await supabase
.from('submission_items')
.select('approved_entity_id')
.eq('id', itemId)
const { data: model, error: modelError } = await supabase
.from('ride_models')
.insert({
name: 'Test Ride Model',
slug: modelSlug,
manufacturer_id: manufacturerId,
category: 'roller_coaster',
ride_type: 'steel_coaster',
banner_image_url: testImageUrl,
banner_image_id: testImageId,
card_image_url: testImageUrl,
card_image_id: testImageId
})
.select('id, banner_image_url, banner_image_id, card_image_url, card_image_id')
.single();
const modelId = item?.approved_entity_id;
if (!modelId) throw new Error('Ride model not created after approval');
if (modelError) throw new Error(`Ride model creation failed: ${modelError.message}`);
if (!model) throw new Error('Ride model not returned after creation');
tracker.track('ride_models', modelId);
modelId = model.id;
// Verify model data
const model = await pollForEntity('ride_models', modelId);
if (!model) throw new Error('Ride model entity not found');
if (model.manufacturer_id !== manufacturerId) {
throw new Error(`Expected manufacturer_id "${manufacturerId}", got "${model.manufacturer_id}"`);
// Validate image fields
if (model.banner_image_url !== testImageUrl) {
throw new Error(`banner_image_url mismatch: expected "${testImageUrl}", got "${model.banner_image_url}"`);
}
if (model.banner_image_id !== testImageId) {
throw new Error(`banner_image_id mismatch: expected "${testImageId}", got "${model.banner_image_id}"`);
}
if (model.card_image_url !== testImageUrl) {
throw new Error(`card_image_url mismatch`);
}
if (model.card_image_id !== testImageId) {
throw new Error(`card_image_id mismatch`);
}
// Verify version created
const { data: version } = await supabase
.from('ride_model_versions')
.select('version_number')
.eq('ride_model_id', modelId)
.eq('version_number', 1)
.single();
// Verify version was created with images
let version: any = null;
const pollStart = Date.now();
while (!version && Date.now() - pollStart < 5000) {
const { data } = await supabase
.from('ride_model_versions')
.select('banner_image_url, banner_image_id, card_image_url, card_image_id')
.eq('ride_model_id', modelId)
.eq('version_number', 1)
.single();
if (data) {
version = data;
break;
}
await new Promise(resolve => setTimeout(resolve, 100));
}
if (!version) throw new Error('Version not created for ride model');
if (!version) throw new Error('Version not created after 5s timeout');
if (version.banner_image_url !== testImageUrl) {
throw new Error('Version missing banner_image_url');
}
const duration = Date.now() - startTime;
@@ -393,8 +381,8 @@ export const submissionTestSuite: TestSuite = {
details: {
modelId,
manufacturerId,
versionCreated: true,
followedPipeline: true
imageFieldsValidated: ['banner_image_url', 'banner_image_id', 'card_image_url', 'card_image_id'],
versionCreated: true
}
};
@@ -405,11 +393,16 @@ export const submissionTestSuite: TestSuite = {
suite: 'Entity Submission & Validation',
status: 'fail',
duration: Date.now() - startTime,
error: formatTestError(error),
error: error instanceof Error ? error.message : String(error),
timestamp: new Date().toISOString()
};
} finally {
await tracker.cleanup();
if (modelId) {
await supabase.from('ride_models').delete().eq('id', modelId);
}
if (manufacturerId) {
await supabase.from('companies').delete().eq('id', manufacturerId);
}
}
}
}

View File

@@ -7,7 +7,6 @@
import { supabase } from '@/lib/supabaseClient';
import type { TestSuite, TestResult } from '../testRunner';
import { TestDataTracker } from '../TestDataTracker';
import { formatTestError } from '../formatTestError';
export const unitConversionTestSuite: TestSuite = {
id: 'unit-conversion',
@@ -25,93 +24,65 @@ export const unitConversionTestSuite: TestSuite = {
let rideId: string | null = null;
try {
// Import helpers and create via pipeline
const {
getCurrentUserId,
getAuthToken,
generateUniqueParkData,
generateUniqueRideData,
createTestParkSubmission,
createTestRideSubmission,
approveSubmission
} = await import('../helpers/approvalTestHelpers');
const userId = await getCurrentUserId();
const authToken = await getAuthToken();
// Create and approve park
const parkData = generateUniqueParkData('unit-001-park');
const { submissionId: parkSubId, itemId: parkItemId } = await createTestParkSubmission(parkData, userId, tracker);
const parkApproval = await approveSubmission(parkSubId, [parkItemId], authToken);
if (!parkApproval.success) {
throw new Error(`Park creation failed: ${parkApproval.error || 'Unknown error'}`);
}
// Get park ID from submission item
const { data: parkItem } = await supabase
.from('submission_items')
.select('approved_entity_id')
.eq('id', parkItemId)
// Create test park
const parkSlug = `test-park-units-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
const { data: park, error: parkError } = await supabase
.from('parks')
.insert({
name: 'Test Park Units',
slug: parkSlug,
park_type: 'theme_park',
status: 'operating',
is_test_data: true
})
.select('id')
.single();
parkId = parkItem?.approved_entity_id || null;
if (!parkId) throw new Error('No park ID after approval');
if (parkError) throw parkError;
parkId = park.id;
tracker.track('parks', parkId);
// Create and approve ride with metric values
const rideData = {
...generateUniqueRideData(parkId, 'unit-001-ride'),
max_speed_kmh: 100.0,
max_height_meters: 50.0,
length_meters: 1000.0,
drop_height_meters: 45.0,
height_requirement: 120
// Create ride with metric values
const rideSlug = `test-ride-units-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
const testData = {
name: 'Test Ride Metric',
slug: rideSlug,
park_id: parkId,
category: 'roller_coaster',
status: 'operating',
max_speed_kmh: 100.0, // km/h (metric)
max_height_meters: 50.0, // meters (metric)
length_meters: 1000.0, // meters (metric)
drop_height_meters: 45.0, // meters (metric)
height_requirement: 120 // cm (metric)
};
const { submissionId: rideSubId, itemId: rideItemId } = await createTestRideSubmission(rideData, userId, tracker);
const rideApproval = await approveSubmission(rideSubId, [rideItemId], authToken);
if (!rideApproval.success) {
throw new Error(`Ride creation failed: ${rideApproval.error || 'Unknown error'}`);
}
// Get ride ID from submission item
const { data: rideItem } = await supabase
.from('submission_items')
.select('approved_entity_id')
.eq('id', rideItemId)
.single();
rideId = rideItem?.approved_entity_id || null;
if (!rideId) throw new Error('No ride ID after approval');
tracker.track('rides', rideId);
// Fetch ride data for validation
const { data: ride, error: rideError } = await supabase
.from('rides')
.insert({ ...testData, is_test_data: true })
.select('id, max_speed_kmh, max_height_meters, length_meters, drop_height_meters, height_requirement')
.eq('id', rideId)
.single();
if (rideError || !ride) throw new Error('Ride not found after creation');
if (rideError) throw new Error(`Ride creation failed: ${rideError.message}`);
if (!ride) throw new Error('Ride not returned');
rideId = ride.id;
tracker.track('rides', rideId);
// Validate values are stored in metric
const tolerance = 0.01;
const tolerance = 0.01; // Allow small floating point differences
if (Math.abs((ride.max_speed_kmh ?? 0) - 100.0) > tolerance) {
throw new Error(`max_speed_kmh mismatch: expected 100.0, got ${ride.max_speed_kmh}`);
if (Math.abs((ride.max_speed_kmh ?? 0) - testData.max_speed_kmh) > tolerance) {
throw new Error(`max_speed_kmh mismatch: expected ${testData.max_speed_kmh}, got ${ride.max_speed_kmh}`);
}
if (Math.abs((ride.max_height_meters ?? 0) - 50.0) > tolerance) {
throw new Error(`max_height_meters mismatch: expected 50.0, got ${ride.max_height_meters}`);
if (Math.abs((ride.max_height_meters ?? 0) - testData.max_height_meters) > tolerance) {
throw new Error(`max_height_meters mismatch: expected ${testData.max_height_meters}, got ${ride.max_height_meters}`);
}
if (Math.abs((ride.length_meters ?? 0) - 1000.0) > tolerance) {
throw new Error(`length_meters mismatch: expected 1000.0, got ${ride.length_meters}`);
if (Math.abs((ride.length_meters ?? 0) - testData.length_meters) > tolerance) {
throw new Error(`length_meters mismatch: expected ${testData.length_meters}, got ${ride.length_meters}`);
}
if (Math.abs((ride.height_requirement ?? 0) - 120) > tolerance) {
throw new Error(`height_requirement mismatch: expected 120 cm, got ${ride.height_requirement}`);
if (Math.abs((ride.height_requirement ?? 0) - testData.height_requirement) > tolerance) {
throw new Error(`height_requirement mismatch: expected ${testData.height_requirement} cm, got ${ride.height_requirement}`);
}
const duration = Date.now() - startTime;
@@ -137,7 +108,7 @@ export const unitConversionTestSuite: TestSuite = {
suite: 'Unit Conversion Tests',
status: 'fail',
duration: Date.now() - startTime,
error: formatTestError(error),
error: error instanceof Error ? error.message : String(error),
timestamp: new Date().toISOString()
};
} finally {
@@ -160,66 +131,44 @@ export const unitConversionTestSuite: TestSuite = {
let rideId: string | null = null;
try {
// Import helpers and create via pipeline
const {
getCurrentUserId,
getAuthToken,
generateUniqueParkData,
generateUniqueRideData,
createTestParkSubmission,
createTestRideSubmission,
approveSubmission
} = await import('../helpers/approvalTestHelpers');
const userId = await getCurrentUserId();
const authToken = await getAuthToken();
// Create and approve park
const parkData = generateUniqueParkData('unit-002-park');
const { submissionId: parkSubId, itemId: parkItemId } = await createTestParkSubmission(parkData, userId, tracker);
const parkApproval = await approveSubmission(parkSubId, [parkItemId], authToken);
if (!parkApproval.success) {
throw new Error(`Park creation failed: ${parkApproval.error || 'Unknown error'}`);
}
// Get park ID from submission item
const { data: parkItem } = await supabase
.from('submission_items')
.select('approved_entity_id')
.eq('id', parkItemId)
// Create test park
const parkSlug = `test-park-ver-units-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
const { data: park, error: parkError } = await supabase
.from('parks')
.insert({
name: 'Test Park Version Units',
slug: parkSlug,
park_type: 'theme_park',
status: 'operating',
is_test_data: true
})
.select('id')
.single();
parkId = parkItem?.approved_entity_id || null;
if (!parkId) throw new Error('No park ID after approval');
if (parkError) throw parkError;
parkId = park.id;
tracker.track('parks', parkId);
// Create and approve ride with metric values
const rideData = {
...generateUniqueRideData(parkId, 'unit-002-ride'),
max_speed_kmh: 120.0,
max_height_meters: 60.0,
height_requirement: 140
};
const { submissionId: rideSubId, itemId: rideItemId } = await createTestRideSubmission(rideData, userId, tracker);
const rideApproval = await approveSubmission(rideSubId, [rideItemId], authToken);
if (!rideApproval.success) {
throw new Error(`Ride creation failed: ${rideApproval.error || 'Unknown error'}`);
}
// Get ride ID from submission item
const { data: rideItem } = await supabase
.from('submission_items')
.select('approved_entity_id')
.eq('id', rideItemId)
// Create ride with metric values
const rideSlug = `test-ride-ver-units-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
const { data: ride, error: rideError } = await supabase
.from('rides')
.insert({
name: 'Test Ride Version Metric',
slug: rideSlug,
park_id: parkId,
category: 'roller_coaster',
status: 'operating',
max_speed_kmh: 120.0,
max_height_meters: 60.0,
height_requirement: 140,
is_test_data: true
})
.select('id')
.single();
rideId = rideItem?.approved_entity_id || null;
if (!rideId) throw new Error('No ride ID after approval');
if (rideError) throw rideError;
rideId = ride.id;
tracker.track('rides', rideId);
// Poll for version creation
@@ -277,7 +226,7 @@ export const unitConversionTestSuite: TestSuite = {
suite: 'Unit Conversion Tests',
status: 'fail',
duration: Date.now() - startTime,
error: formatTestError(error),
error: error instanceof Error ? error.message : String(error),
timestamp: new Date().toISOString()
};
} finally {
@@ -358,7 +307,7 @@ export const unitConversionTestSuite: TestSuite = {
suite: 'Unit Conversion Tests',
status: 'fail',
duration: Date.now() - startTime,
error: formatTestError(error),
error: error instanceof Error ? error.message : String(error),
timestamp: new Date().toISOString()
};
}

View File

@@ -3,83 +3,76 @@
*
* Tests the complete versioning system end-to-end including automatic
* version creation, attribution, and rollback functionality.
*
* All tests follow the sacred pipeline: submitParkCreation → approve → verify versioning
*/
import { supabase } from '@/lib/supabaseClient';
import type { TestSuite, TestResult } from '../testRunner';
import { TestDataTracker } from '../TestDataTracker';
import { formatTestError } from '../formatTestError';
import {
generateUniqueParkData,
createTestParkSubmission,
approveSubmission,
pollForEntity,
pollForVersion,
getAuthToken,
getCurrentUserId,
} from '../helpers/approvalTestHelpers';
export const versioningTestSuite: TestSuite = {
id: 'versioning',
name: 'Versioning & Rollback',
description: 'Tests version creation, attribution, rollback, and cleanup via sacred pipeline',
description: 'Tests version creation, attribution, rollback, and cleanup',
tests: [
{
id: 'version-001',
name: 'Automatic Version Creation on Insert',
description: 'Verifies version 1 is created automatically when entity is approved',
description: 'Verifies version 1 is created automatically when entity is created',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
const tracker = new TestDataTracker();
let parkId: string | null = null;
try {
// Follow sacred pipeline: Form → Submission → Approval → Versioning
const userId = await getCurrentUserId();
const authToken = await getAuthToken();
const parkData = generateUniqueParkData('version-001');
// Create submission
const { submissionId, itemId } = await createTestParkSubmission(parkData, userId, tracker);
// Approve submission
const approval = await approveSubmission(submissionId, [itemId], authToken);
if (!approval.success) {
throw new Error(`Approval failed: ${approval.error}`);
}
// Get approved entity ID
const { data: item } = await supabase
.from('submission_items')
.select('approved_entity_id')
.eq('id', itemId)
// Create a park
const slug = `test-park-${Date.now()}`;
const { data: park, error: createError } = await supabase
.from('parks')
.insert({
name: 'Version Test Park',
slug,
park_type: 'theme_park',
status: 'operating'
})
.select('id')
.single();
if (!item?.approved_entity_id) {
throw new Error('No entity ID returned after approval');
if (createError) throw new Error(`Park creation failed: ${createError.message}`);
if (!park) throw new Error('No park returned from insert');
parkId = park.id;
// Poll for version creation
let v1: any = null;
const pollStart = Date.now();
while (!v1 && Date.now() - pollStart < 5000) {
const { data } = await supabase
.from('park_versions')
.select('version_id')
.eq('park_id', park.id)
.eq('version_number', 1)
.single();
if (data) {
v1 = data;
break;
}
await new Promise(resolve => setTimeout(resolve, 100));
}
const parkId = item.approved_entity_id;
tracker.track('parks', parkId);
// Check version was created
const { data: version, error: versionError } = await supabase
.from('park_versions')
.select('*')
.eq('park_id', park.id)
.eq('version_number', 1)
.single();
// Poll for park entity
const park = await pollForEntity('parks', parkId);
if (!park) throw new Error('Park not created after approval');
// Verify version 1 was created automatically
const version = await pollForVersion('park', parkId, 1);
if (versionError) throw new Error(`Version query failed: ${versionError.message}`);
if (!version) throw new Error('Version 1 not created');
if (version.name !== parkData.name) {
throw new Error(`Version has incorrect name: expected "${parkData.name}", got "${version.name}"`);
}
if (version.change_type !== 'created') {
throw new Error(`Expected change_type "created", got "${version.change_type}"`);
}
if (!version.is_current) {
throw new Error('Version is not marked as current');
}
if (version.name !== 'Version Test Park') throw new Error('Version has incorrect name');
if (version.change_type !== 'created') throw new Error(`Expected change_type "created", got "${version.change_type}"`);
if (!version.is_current) throw new Error('Version is not marked as current');
const duration = Date.now() - startTime;
@@ -91,12 +84,10 @@ export const versioningTestSuite: TestSuite = {
duration,
timestamp: new Date().toISOString(),
details: {
parkId,
submissionId,
parkId: park.id,
versionNumber: version.version_number,
changeType: version.change_type,
isCurrent: version.is_current,
followedPipeline: true
isCurrent: version.is_current
}
};
} catch (error) {
@@ -107,86 +98,84 @@ export const versioningTestSuite: TestSuite = {
suite: 'Versioning & Rollback',
status: 'fail',
duration,
error: formatTestError(error),
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
timestamp: new Date().toISOString()
};
} finally {
await tracker.cleanup();
// Cleanup
if (parkId) {
await supabase.from('parks').delete().eq('id', parkId);
}
}
}
},
{
id: 'version-002',
name: 'Automatic Version Creation on Update',
description: 'Verifies version 2 is created when entity is updated via pipeline',
description: 'Verifies version 2 is created when entity is updated',
run: async (): Promise<TestResult> => {
const startTime = Date.now();
const tracker = new TestDataTracker();
let parkId: string | null = null;
try {
// Create and approve initial park
const userId = await getCurrentUserId();
const authToken = await getAuthToken();
const parkData = generateUniqueParkData('version-002');
const { submissionId, itemId } = await createTestParkSubmission(parkData, userId, tracker);
const approval = await approveSubmission(submissionId, [itemId], authToken);
if (!approval.success) {
throw new Error(`Initial approval failed: ${approval.error}`);
}
// Get park ID
const { data: item } = await supabase
.from('submission_items')
.select('approved_entity_id')
.eq('id', itemId)
// Create a park
const slug = `test-park-${Date.now()}`;
const { data: park, error: createError } = await supabase
.from('parks')
.insert({
name: 'Original Name',
slug,
park_type: 'theme_park',
status: 'operating'
})
.select('id')
.single();
const parkId = item?.approved_entity_id;
if (!parkId) throw new Error('No park ID after approval');
if (createError) throw new Error(`Park creation failed: ${createError.message}`);
if (!park) throw new Error('No park returned');
tracker.track('parks', parkId);
parkId = park.id;
// Wait for version 1
const v1 = await pollForVersion('park', parkId, 1);
if (!v1) throw new Error('Version 1 not created');
await new Promise(resolve => setTimeout(resolve, 100));
// Update park directly (simulating approved edit)
// In production, this would go through edit submission pipeline
// Update the park
const { error: updateError } = await supabase
.from('parks')
.update({ name: 'Updated Name', description: 'Updated Description' })
.eq('id', parkId);
.update({ name: 'Updated Name' })
.eq('id', park.id);
if (updateError) throw new Error(`Park update failed: ${updateError.message}`);
// Verify version 2 created
const v2 = await pollForVersion('park', parkId, 2);
if (!v2) throw new Error('Version 2 not created after update');
// Wait for version 2
await new Promise(resolve => setTimeout(resolve, 100));
if (v2.name !== 'Updated Name') {
throw new Error(`Version 2 has incorrect name: expected "Updated Name", got "${v2.name}"`);
}
if (v2.change_type !== 'updated') {
throw new Error(`Expected change_type "updated", got "${v2.change_type}"`);
}
if (!v2.is_current) {
throw new Error('Version 2 is not marked as current');
}
// Check version 2 exists
const { data: v2, error: v2Error } = await supabase
.from('park_versions')
.select('*')
.eq('park_id', park.id)
.eq('version_number', 2)
.single();
// Verify version 1 is no longer current
const { data: v1Updated } = await supabase
if (v2Error) throw new Error(`Version 2 query failed: ${v2Error.message}`);
if (!v2) throw new Error('Version 2 not created');
if (v2.name !== 'Updated Name') throw new Error('Version 2 has incorrect name');
if (v2.change_type !== 'updated') throw new Error(`Expected change_type "updated", got "${v2.change_type}"`);
if (!v2.is_current) throw new Error('Version 2 is not marked as current');
// Check version 1 is no longer current
const { data: v1, error: v1Error } = await supabase
.from('park_versions')
.select('is_current')
.eq('park_id', parkId)
.eq('park_id', park.id)
.eq('version_number', 1)
.single();
if (v1Updated?.is_current) {
throw new Error('Version 1 is still marked as current');
}
if (v1Error) throw new Error(`Version 1 query failed: ${v1Error.message}`);
if (v1?.is_current) throw new Error('Version 1 is still marked as current');
const duration = Date.now() - startTime;
@@ -198,8 +187,8 @@ export const versioningTestSuite: TestSuite = {
duration,
timestamp: new Date().toISOString(),
details: {
parkId,
v1IsCurrent: v1Updated?.is_current,
parkId: park.id,
v1IsCurrent: v1?.is_current,
v2IsCurrent: v2.is_current,
v2ChangeType: v2.change_type
}
@@ -212,12 +201,16 @@ export const versioningTestSuite: TestSuite = {
suite: 'Versioning & Rollback',
status: 'fail',
duration,
error: formatTestError(error),
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
timestamp: new Date().toISOString()
};
} finally {
await tracker.cleanup();
const remaining = await tracker.verifyCleanup();
if (remaining.length > 0) {
console.warn('version-001 cleanup incomplete:', remaining);
}
}
}
},
@@ -228,37 +221,48 @@ export const versioningTestSuite: TestSuite = {
run: async (): Promise<TestResult> => {
const startTime = Date.now();
const tracker = new TestDataTracker();
let parkId: string | null = null;
try {
// Create and approve park
const userId = await getCurrentUserId();
const authToken = await getAuthToken();
const parkData = generateUniqueParkData('version-003');
const { submissionId, itemId } = await createTestParkSubmission(parkData, userId, tracker);
const approval = await approveSubmission(submissionId, [itemId], authToken);
if (!approval.success) {
throw new Error(`Approval failed: ${approval.error}`);
}
// Get park ID
const { data: item } = await supabase
.from('submission_items')
.select('approved_entity_id')
.eq('id', itemId)
// Create a park
const slug = `test-park-${Date.now()}`;
const { data: park, error: createError } = await supabase
.from('parks')
.insert({
name: 'Rollback Test Park',
slug,
park_type: 'theme_park',
status: 'operating'
})
.select('id')
.single();
const parkId = item?.approved_entity_id;
if (!parkId) throw new Error('No park ID after approval');
if (createError) throw new Error(`Park creation failed: ${createError.message}`);
if (!park) throw new Error('No park returned');
tracker.track('parks', parkId);
parkId = park.id;
// Wait for version 1
const v1 = await pollForVersion('park', parkId, 1);
if (!v1) throw new Error('Version 1 not created');
// Poll for version creation
let v1: any = null;
const pollStart = Date.now();
while (!v1 && Date.now() - pollStart < 5000) {
const { data } = await supabase
.from('park_versions')
.select('version_id')
.eq('park_id', park.id)
.eq('version_number', 1)
.single();
if (data) {
v1 = data;
break;
}
await new Promise(resolve => setTimeout(resolve, 100));
}
// Check current user role
if (!v1) throw new Error('Version 1 not created after 5s timeout');
// Check current user is moderator
const { data: { user } } = await supabase.auth.getUser();
if (!user) throw new Error('No authenticated user');
@@ -267,13 +271,14 @@ export const versioningTestSuite: TestSuite = {
// Try rollback
const { error: rollbackError } = await supabase.rpc('rollback_to_version', {
p_entity_type: 'park',
p_entity_id: parkId,
p_entity_id: park.id,
p_target_version_id: v1.version_id,
p_changed_by: user.id,
p_reason: 'Authorization test'
});
// Verify authorization enforcement
// If user is moderator, rollback should succeed
// If not, rollback should fail with permission error
if (isMod && rollbackError) {
throw new Error(`Rollback failed for moderator: ${rollbackError.message}`);
}
@@ -305,12 +310,16 @@ export const versioningTestSuite: TestSuite = {
suite: 'Versioning & Rollback',
status: 'fail',
duration,
error: formatTestError(error),
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
timestamp: new Date().toISOString()
};
} finally {
await tracker.cleanup();
const remaining = await tracker.verifyCleanup();
if (remaining.length > 0) {
console.warn('version-002 cleanup incomplete:', remaining);
}
}
}
},
@@ -321,6 +330,7 @@ export const versioningTestSuite: TestSuite = {
run: async (): Promise<TestResult> => {
const startTime = Date.now();
const tracker = new TestDataTracker();
let parkId: string | null = null;
try {
// Check if user is moderator
@@ -330,6 +340,7 @@ export const versioningTestSuite: TestSuite = {
const { data: isMod } = await supabase.rpc('is_moderator', { _user_id: user.id });
if (!isMod) {
// Skip test if not moderator
const duration = Date.now() - startTime;
return {
id: 'version-004',
@@ -342,54 +353,61 @@ export const versioningTestSuite: TestSuite = {
};
}
// Create and approve park
const userId = await getCurrentUserId();
const authToken = await getAuthToken();
const parkData = {
...generateUniqueParkData('version-004'),
description: 'Original Description'
};
const { submissionId, itemId } = await createTestParkSubmission(parkData, userId, tracker);
const approval = await approveSubmission(submissionId, [itemId], authToken);
if (!approval.success) {
throw new Error(`Approval failed: ${approval.error}`);
}
// Get park ID
const { data: item } = await supabase
.from('submission_items')
.select('approved_entity_id')
.eq('id', itemId)
// Create park
const slug = `test-park-${Date.now()}`;
const { data: park, error: createError } = await supabase
.from('parks')
.insert({
name: 'Original Name',
slug,
park_type: 'theme_park',
status: 'operating',
description: 'Original Description'
})
.select('id')
.single();
const parkId = item?.approved_entity_id;
if (!parkId) throw new Error('No park ID after approval');
if (createError) throw new Error(`Park creation failed: ${createError.message}`);
if (!park) throw new Error('No park returned');
tracker.track('parks', parkId);
parkId = park.id;
await new Promise(resolve => setTimeout(resolve, 100));
// Wait for version 1
const v1 = await pollForVersion('park', parkId, 1);
if (!v1) throw new Error('Version 1 not created');
// Get version 1
const { data: v1, error: v1Error } = await supabase
.from('park_versions')
.select('version_id, name, description')
.eq('park_id', park.id)
.eq('version_number', 1)
.single();
if (v1Error || !v1) throw new Error('Version 1 not found');
// Update park
const { error: updateError } = await supabase
.from('parks')
.update({ name: 'Modified Name', description: 'Modified Description' })
.eq('id', parkId);
.eq('id', park.id);
if (updateError) throw new Error(`Park update failed: ${updateError.message}`);
// Wait for version 2
const v2 = await pollForVersion('park', parkId, 2);
await new Promise(resolve => setTimeout(resolve, 100));
// Verify version 2
const { data: v2 } = await supabase
.from('park_versions')
.select('version_number, name')
.eq('park_id', park.id)
.eq('version_number', 2)
.single();
if (!v2) throw new Error('Version 2 not created');
if (v2.name !== 'Modified Name') throw new Error('Version 2 has incorrect data');
// Rollback to version 1
const { error: rollbackError } = await supabase.rpc('rollback_to_version', {
p_entity_type: 'park',
p_entity_id: parkId,
p_entity_id: park.id,
p_target_version_id: v1.version_id,
p_changed_by: user.id,
p_reason: 'Integration test rollback'
@@ -397,24 +415,37 @@ export const versioningTestSuite: TestSuite = {
if (rollbackError) throw new Error(`Rollback failed: ${rollbackError.message}`);
// Verify park data restored
const restored = await pollForEntity('parks', parkId, 3000);
if (!restored) throw new Error('Could not fetch restored park');
await new Promise(resolve => setTimeout(resolve, 200));
if (restored.name !== parkData.name) {
throw new Error(`Rollback failed: expected "${parkData.name}", got "${restored.name}"`);
// Verify park data restored
const { data: restored, error: restoredError } = await supabase
.from('parks')
.select('name, description')
.eq('id', park.id)
.single();
if (restoredError) throw new Error(`Failed to fetch restored park: ${restoredError.message}`);
if (!restored) throw new Error('Restored park not found');
if (restored.name !== 'Original Name') {
throw new Error(`Rollback failed: expected "Original Name", got "${restored.name}"`);
}
if (restored.description !== 'Original Description') {
throw new Error(`Description not restored: got "${restored.description}"`);
throw new Error(`Description not restored: expected "Original Description", got "${restored.description}"`);
}
// Verify version 3 created with change_type = 'restored'
const v3 = await pollForVersion('park', parkId, 3, 3000);
if (!v3) throw new Error('Version 3 (restored) not created');
const { data: v3, error: v3Error } = await supabase
.from('park_versions')
.select('*')
.eq('park_id', park.id)
.eq('version_number', 3)
.single();
if (v3Error || !v3) throw new Error('Version 3 (restored) not created');
if (v3.change_type !== 'restored') {
throw new Error(`Expected change_type "restored", got "${v3.change_type}"`);
}
if (v3.name !== parkData.name) throw new Error('Version 3 has incorrect data');
if (v3.name !== 'Original Name') throw new Error('Version 3 has incorrect data');
if (!v3.is_current) throw new Error('Version 3 is not marked as current');
const duration = Date.now() - startTime;
@@ -427,7 +458,7 @@ export const versioningTestSuite: TestSuite = {
duration,
timestamp: new Date().toISOString(),
details: {
parkId,
parkId: park.id,
versionsCreated: 3,
dataRestored: true,
v3ChangeType: v3.change_type,
@@ -442,12 +473,16 @@ export const versioningTestSuite: TestSuite = {
suite: 'Versioning & Rollback',
status: 'fail',
duration,
error: formatTestError(error),
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
timestamp: new Date().toISOString()
};
} finally {
await tracker.cleanup();
const remaining = await tracker.verifyCleanup();
if (remaining.length > 0) {
console.warn('version-003 cleanup incomplete:', remaining);
}
}
}
}

View File

@@ -1,441 +0,0 @@
/**
* Test Data Cleanup Utility
*
* Safely removes test fixtures created during integration tests.
*
* SAFETY FEATURES:
* - Only deletes records marked with is_test_data = true
* - Only deletes records with test-specific naming patterns
* - Cascading deletes handled by database foreign keys
* - Detailed logging of all deletions
* - Rollback support via transactions
*/
import { supabase } from '@/lib/supabaseClient';
import { handleError } from '@/lib/errorHandler';
export interface CleanupResult {
table: string;
deleted: number;
duration: number;
error?: string;
}
export interface CleanupSummary {
totalDeleted: number;
totalDuration: number;
results: CleanupResult[];
success: boolean;
}
/**
* Delete test data from a specific table using type-safe queries
*/
async function cleanupParks(): Promise<CleanupResult> {
const startTime = Date.now();
try {
const { error, count } = await supabase
.from('parks')
.delete()
.eq('is_test_data', true);
if (error) throw error;
console.log(`✓ Cleaned ${count || 0} test parks`);
return { table: 'parks', deleted: count || 0, duration: Date.now() - startTime };
} catch (error) {
return {
table: 'parks',
deleted: 0,
duration: Date.now() - startTime,
error: error instanceof Error ? error.message : String(error)
};
}
}
async function cleanupRides(): Promise<CleanupResult> {
const startTime = Date.now();
try {
const { error, count } = await supabase
.from('rides')
.delete()
.eq('is_test_data', true);
if (error) throw error;
console.log(`✓ Cleaned ${count || 0} test rides`);
return { table: 'rides', deleted: count || 0, duration: Date.now() - startTime };
} catch (error) {
return {
table: 'rides',
deleted: 0,
duration: Date.now() - startTime,
error: error instanceof Error ? error.message : String(error)
};
}
}
async function cleanupCompanies(): Promise<CleanupResult> {
const startTime = Date.now();
try {
const { error, count } = await supabase
.from('companies')
.delete()
.eq('is_test_data', true);
if (error) throw error;
console.log(`✓ Cleaned ${count || 0} test companies`);
return { table: 'companies', deleted: count || 0, duration: Date.now() - startTime };
} catch (error) {
return {
table: 'companies',
deleted: 0,
duration: Date.now() - startTime,
error: error instanceof Error ? error.message : String(error)
};
}
}
async function cleanupRideModels(): Promise<CleanupResult> {
const startTime = Date.now();
try {
const { error, count } = await supabase
.from('ride_models')
.delete()
.eq('is_test_data', true);
if (error) throw error;
console.log(`✓ Cleaned ${count || 0} test ride models`);
return { table: 'ride_models', deleted: count || 0, duration: Date.now() - startTime };
} catch (error) {
return {
table: 'ride_models',
deleted: 0,
duration: Date.now() - startTime,
error: error instanceof Error ? error.message : String(error)
};
}
}
async function cleanupLocations(): Promise<CleanupResult> {
const startTime = Date.now();
try {
const { error, count } = await supabase
.from('locations')
.delete()
.eq('is_test_data', true);
if (error) throw error;
console.log(`✓ Cleaned ${count || 0} test locations`);
return { table: 'locations', deleted: count || 0, duration: Date.now() - startTime };
} catch (error) {
return {
table: 'locations',
deleted: 0,
duration: Date.now() - startTime,
error: error instanceof Error ? error.message : String(error)
};
}
}
/**
* Clean up test submissions (must be done before entities due to FK constraints)
*/
async function cleanupSubmissions(): Promise<CleanupResult[]> {
const results: CleanupResult[] = [];
// Clean content_submissions (cascade will handle related tables)
const startTime = Date.now();
try {
const { error, count } = await supabase
.from('content_submissions')
.delete()
.eq('is_test_data', true);
if (!error) {
results.push({
table: 'content_submissions',
deleted: count || 0,
duration: Date.now() - startTime
});
console.log(`✓ Cleaned ${count || 0} test submissions (cascade cleanup)`);
} else {
results.push({
table: 'content_submissions',
deleted: 0,
duration: Date.now() - startTime,
error: error.message
});
}
} catch (error) {
results.push({
table: 'content_submissions',
deleted: 0,
duration: Date.now() - startTime,
error: error instanceof Error ? error.message : String(error)
});
}
return results;
}
/**
* Clean up test versions (historical records)
*/
async function cleanupVersions(): Promise<CleanupResult[]> {
const results: CleanupResult[] = [];
// Clean park versions
try {
const { error, count } = await supabase.from('park_versions').delete().eq('is_test_data', true);
results.push({
table: 'park_versions',
deleted: error ? 0 : (count || 0),
duration: 0,
error: error?.message
});
} catch (e) {
results.push({ table: 'park_versions', deleted: 0, duration: 0, error: String(e) });
}
// Clean ride versions
try {
const { error, count } = await supabase.from('ride_versions').delete().eq('is_test_data', true);
results.push({
table: 'ride_versions',
deleted: error ? 0 : (count || 0),
duration: 0,
error: error?.message
});
} catch (e) {
results.push({ table: 'ride_versions', deleted: 0, duration: 0, error: String(e) });
}
// Clean company versions
try {
const { error, count } = await supabase.from('company_versions').delete().eq('is_test_data', true);
results.push({
table: 'company_versions',
deleted: error ? 0 : (count || 0),
duration: 0,
error: error?.message
});
} catch (e) {
results.push({ table: 'company_versions', deleted: 0, duration: 0, error: String(e) });
}
// Clean ride_model versions
try {
const { error, count } = await supabase.from('ride_model_versions').delete().eq('is_test_data', true);
results.push({
table: 'ride_model_versions',
deleted: error ? 0 : (count || 0),
duration: 0,
error: error?.message
});
} catch (e) {
results.push({ table: 'ride_model_versions', deleted: 0, duration: 0, error: String(e) });
}
console.log(`✓ Cleaned ${results.reduce((sum, r) => sum + r.deleted, 0)} version records`);
return results;
}
/**
* Clean up test entities (main tables)
*/
async function cleanupEntities(): Promise<CleanupResult[]> {
const results: CleanupResult[] = [];
// Order matters: clean dependent entities first
results.push(await cleanupRides());
results.push(await cleanupParks());
results.push(await cleanupRideModels());
results.push(await cleanupCompanies());
results.push(await cleanupLocations());
return results;
}
/**
* Clean up test-related metadata and tracking tables
*/
async function cleanupMetadata(): Promise<CleanupResult[]> {
const results: CleanupResult[] = [];
// Clean approval metrics for test submissions
try {
const { data: testSubmissions } = await supabase
.from('content_submissions')
.select('id')
.eq('is_test_data', true);
if (testSubmissions && testSubmissions.length > 0) {
const submissionIds = testSubmissions.map(s => s.id);
const { error, count } = await supabase
.from('approval_transaction_metrics')
.delete()
.in('submission_id', submissionIds);
if (!error) {
results.push({
table: 'approval_transaction_metrics',
deleted: count || 0,
duration: 0
});
}
}
} catch (error) {
console.error('Failed to cleanup metadata:', error);
}
return results;
}
/**
* Run complete test data cleanup
*
* Executes cleanup in proper order to respect foreign key constraints:
* 1. Submissions (depend on entities)
* 2. Versions (historical records)
* 3. Metadata (metrics, audit logs)
* 4. Entities (main tables)
*/
export async function cleanupTestData(): Promise<CleanupSummary> {
const startTime = Date.now();
const allResults: CleanupResult[] = [];
console.log('🧹 Starting test data cleanup...');
try {
// Phase 1: Clean submissions first (they reference entities)
console.log('\n📋 Phase 1: Cleaning submissions...');
const submissionResults = await cleanupSubmissions();
allResults.push(...submissionResults);
// Phase 2: Clean versions (historical records)
console.log('\n📚 Phase 2: Cleaning version history...');
const versionResults = await cleanupVersions();
allResults.push(...versionResults);
// Phase 3: Clean metadata
console.log('\n📊 Phase 3: Cleaning metadata...');
const metadataResults = await cleanupMetadata();
allResults.push(...metadataResults);
// Phase 4: Clean entities (main tables)
console.log('\n🏗 Phase 4: Cleaning entities...');
const entityResults = await cleanupEntities();
allResults.push(...entityResults);
const totalDeleted = allResults.reduce((sum, r) => sum + r.deleted, 0);
const totalDuration = Date.now() - startTime;
const hasErrors = allResults.some(r => r.error);
console.log(`\n✅ Cleanup complete: ${totalDeleted} records deleted in ${totalDuration}ms`);
return {
totalDeleted,
totalDuration,
results: allResults,
success: !hasErrors
};
} catch (error) {
console.error('❌ Cleanup failed:', error);
return {
totalDeleted: allResults.reduce((sum, r) => sum + r.deleted, 0),
totalDuration: Date.now() - startTime,
results: allResults,
success: false
};
}
}
/**
* Clean up only specific entity types (selective cleanup)
*/
export async function cleanupEntityType(
entityType: 'parks' | 'rides' | 'companies' | 'ride_models' | 'locations'
): Promise<CleanupResult> {
console.log(`🧹 Cleaning test ${entityType}...`);
switch (entityType) {
case 'parks':
return cleanupParks();
case 'rides':
return cleanupRides();
case 'companies':
return cleanupCompanies();
case 'ride_models':
return cleanupRideModels();
case 'locations':
return cleanupLocations();
}
}
/**
* Verify cleanup was successful (safety check)
*/
export async function verifyCleanup(): Promise<{
remainingTestData: number;
tables: Record<string, number>;
}> {
const counts: Record<string, number> = {};
let total = 0;
// Check parks
const { count: parksCount } = await supabase
.from('parks')
.select('*', { count: 'exact', head: true })
.eq('is_test_data', true);
if (parksCount !== null) {
counts.parks = parksCount;
total += parksCount;
}
// Check rides
const { count: ridesCount } = await supabase
.from('rides')
.select('*', { count: 'exact', head: true })
.eq('is_test_data', true);
if (ridesCount !== null) {
counts.rides = ridesCount;
total += ridesCount;
}
// Check companies
const { count: companiesCount } = await supabase
.from('companies')
.select('*', { count: 'exact', head: true })
.eq('is_test_data', true);
if (companiesCount !== null) {
counts.companies = companiesCount;
total += companiesCount;
}
// Check ride_models
const { count: rideModelsCount } = await supabase
.from('ride_models')
.select('*', { count: 'exact', head: true })
.eq('is_test_data', true);
if (rideModelsCount !== null) {
counts.ride_models = rideModelsCount;
total += rideModelsCount;
}
// Check locations
const { count: locationsCount } = await supabase
.from('locations')
.select('*', { count: 'exact', head: true })
.eq('is_test_data', true);
if (locationsCount !== null) {
counts.locations = locationsCount;
total += locationsCount;
}
return {
remainingTestData: total,
tables: counts
};
}

View File

@@ -8,8 +8,6 @@
import { moderationTestSuite } from './suites/moderationTests';
import { moderationLockTestSuite } from './suites/moderationLockTests';
import { moderationDependencyTestSuite } from './suites/moderationDependencyTests';
import { approvalPipelineTestSuite } from './suites/approvalPipelineTests';
import { cleanupTestData, type CleanupSummary } from './testCleanup';
/**
* Registry of all available test suites
@@ -17,8 +15,7 @@ import { cleanupTestData, type CleanupSummary } from './testCleanup';
export const ALL_TEST_SUITES = [
moderationTestSuite,
moderationLockTestSuite,
moderationDependencyTestSuite,
approvalPipelineTestSuite,
moderationDependencyTestSuite
];
export interface TestResult {
@@ -52,25 +49,9 @@ export class IntegrationTestRunner {
private isRunning = false;
private shouldStop = false;
private onProgress?: (result: TestResult) => void;
private delayBetweenTests: number;
private cleanupEnabled: boolean;
private cleanupSummary?: CleanupSummary;
constructor(
onProgress?: (result: TestResult) => void,
delayBetweenTests: number = 8000,
cleanupEnabled: boolean = true
) {
constructor(onProgress?: (result: TestResult) => void) {
this.onProgress = onProgress;
this.delayBetweenTests = delayBetweenTests; // Default 8 seconds to prevent rate limiting
this.cleanupEnabled = cleanupEnabled;
}
/**
* Wait for specified milliseconds (for rate limiting prevention)
*/
private async delay(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms));
}
/**
@@ -140,50 +121,13 @@ export class IntegrationTestRunner {
async runSuite(suite: TestSuite): Promise<TestResult[]> {
const suiteResults: TestResult[] = [];
for (let i = 0; i < suite.tests.length; i++) {
const test = suite.tests[i];
for (const test of suite.tests) {
const result = await this.runTest(test, suite.name);
suiteResults.push(result);
if (this.shouldStop) {
break;
}
// Add delay between tests to prevent rate limiting (except after the last test)
if (i < suite.tests.length - 1 && this.delayBetweenTests > 0) {
// Report delay status with countdown
const delaySeconds = this.delayBetweenTests / 1000;
const delayResult: TestResult = {
id: `delay-${Date.now()}`,
name: `⏳ Rate limit delay: ${delaySeconds}s`,
suite: suite.name,
status: 'running',
duration: 0,
timestamp: new Date().toISOString(),
details: {
reason: 'Pausing to prevent rate limiting',
delayMs: this.delayBetweenTests
}
};
if (this.onProgress) {
this.onProgress(delayResult);
}
await this.delay(this.delayBetweenTests);
// Mark delay as complete
const delayCompleteResult: TestResult = {
...delayResult,
status: 'skip',
duration: this.delayBetweenTests,
details: { reason: 'Rate limit delay completed' }
};
if (this.onProgress) {
this.onProgress(delayCompleteResult);
}
}
}
return suiteResults;
@@ -197,145 +141,12 @@ export class IntegrationTestRunner {
this.isRunning = true;
this.shouldStop = false;
// Track submission-heavy suites for adaptive delays
const submissionHeavySuites = [
'Entity Submission & Validation',
'Approval Pipeline',
'Unit Conversion Tests',
'Performance & Scalability'
];
for (let i = 0; i < suites.length; i++) {
const isHeavySuite = submissionHeavySuites.includes(suites[i].name);
// PREEMPTIVE delay BEFORE heavy suites start (prevents rate limit buildup)
if (isHeavySuite && i > 0) {
const preemptiveDelayMs = 8000; // 8s "cooldown" before heavy suite
const delaySeconds = preemptiveDelayMs / 1000;
const delayResult: TestResult = {
id: `preemptive-delay-${Date.now()}`,
name: `⏳ Pre-suite cooldown: ${delaySeconds}s (preparing for ${suites[i].name})`,
suite: 'System',
status: 'running',
duration: 0,
timestamp: new Date().toISOString(),
details: {
reason: 'Preemptive rate limit prevention before submission-heavy suite',
nextSuite: suites[i].name
}
};
if (this.onProgress) {
this.onProgress(delayResult);
}
await this.delay(preemptiveDelayMs);
if (this.onProgress) {
this.onProgress({
...delayResult,
status: 'skip',
duration: preemptiveDelayMs,
details: { reason: 'Cooldown completed' }
});
}
}
await this.runSuite(suites[i]);
for (const suite of suites) {
await this.runSuite(suite);
if (this.shouldStop) {
break;
}
// REACTIVE delay AFTER suites complete
if (i < suites.length - 1 && this.delayBetweenTests > 0) {
// Longer delay after submission-heavy suites
const delayMs = isHeavySuite
? this.delayBetweenTests * 2.25 // 18s delay after heavy suites (increased from 12s)
: this.delayBetweenTests; // 8s delay after others (increased from 6s)
const delaySeconds = delayMs / 1000;
const delayResult: TestResult = {
id: `suite-delay-${Date.now()}`,
name: `⏳ Suite completion delay: ${delaySeconds}s${isHeavySuite ? ' (submission-heavy)' : ''}`,
suite: 'System',
status: 'running',
duration: 0,
timestamp: new Date().toISOString(),
details: {
reason: 'Pausing between suites to prevent rate limiting',
isSubmissionHeavy: isHeavySuite
}
};
if (this.onProgress) {
this.onProgress(delayResult);
}
await this.delay(delayMs);
if (this.onProgress) {
this.onProgress({
...delayResult,
status: 'skip',
duration: delayMs,
details: { reason: 'Suite delay completed' }
});
}
}
}
// Run cleanup after all tests complete (if enabled)
if (this.cleanupEnabled && !this.shouldStop) {
const cleanupStartResult: TestResult = {
id: `cleanup-start-${Date.now()}`,
name: '🧹 Starting test data cleanup...',
suite: 'System',
status: 'running',
duration: 0,
timestamp: new Date().toISOString(),
details: { reason: 'Removing test fixtures to prevent database bloat' }
};
if (this.onProgress) {
this.onProgress(cleanupStartResult);
}
try {
this.cleanupSummary = await cleanupTestData();
const cleanupCompleteResult: TestResult = {
id: `cleanup-complete-${Date.now()}`,
name: `✅ Cleanup complete: ${this.cleanupSummary.totalDeleted} records deleted`,
suite: 'System',
status: this.cleanupSummary.success ? 'pass' : 'fail',
duration: this.cleanupSummary.totalDuration,
timestamp: new Date().toISOString(),
details: {
totalDeleted: this.cleanupSummary.totalDeleted,
results: this.cleanupSummary.results,
success: this.cleanupSummary.success
}
};
if (this.onProgress) {
this.onProgress(cleanupCompleteResult);
}
} catch (error) {
const cleanupErrorResult: TestResult = {
id: `cleanup-error-${Date.now()}`,
name: '❌ Cleanup failed',
suite: 'System',
status: 'fail',
duration: 0,
timestamp: new Date().toISOString(),
error: error instanceof Error ? error.message : String(error)
};
if (this.onProgress) {
this.onProgress(cleanupErrorResult);
}
}
}
this.isRunning = false;
@@ -366,7 +177,6 @@ export class IntegrationTestRunner {
skipped: number;
running: number;
totalDuration: number;
cleanup?: CleanupSummary;
} {
const total = this.results.length;
const passed = this.results.filter(r => r.status === 'pass').length;
@@ -375,15 +185,7 @@ export class IntegrationTestRunner {
const running = this.results.filter(r => r.status === 'running').length;
const totalDuration = this.results.reduce((sum, r) => sum + r.duration, 0);
return {
total,
passed,
failed,
skipped,
running,
totalDuration,
cleanup: this.cleanupSummary
};
return { total, passed, failed, skipped, running, totalDuration };
}
/**
@@ -400,20 +202,5 @@ export class IntegrationTestRunner {
this.results = [];
this.isRunning = false;
this.shouldStop = false;
this.cleanupSummary = undefined;
}
/**
* Get cleanup summary
*/
getCleanupSummary(): CleanupSummary | undefined {
return this.cleanupSummary;
}
/**
* Enable or disable automatic cleanup
*/
setCleanupEnabled(enabled: boolean): void {
this.cleanupEnabled = enabled;
}
}

View File

@@ -57,6 +57,126 @@ export interface ModerationActionResult {
shouldRemoveFromQueue: boolean;
}
/**
* Configuration for photo approval
*/
interface PhotoApprovalConfig {
submissionId: string;
moderatorId: string;
moderatorNotes?: string;
}
/**
* Approve a photo submission
*
* Creates photo records in the database and updates submission status.
* Handles both new approvals and re-approvals (where photos already exist).
*
* @param supabase - Supabase client
* @param config - Photo approval configuration
* @returns Action result with success status and message
*/
export async function approvePhotoSubmission(
supabase: SupabaseClient,
config: PhotoApprovalConfig
): Promise<ModerationActionResult> {
try {
// Fetch photo submission from relational tables
const { data: photoSubmission, error: fetchError } = await supabase
.from('photo_submissions')
.select(`
*,
items:photo_submission_items(*),
submission:content_submissions!inner(user_id, status)
`)
.eq('submission_id', config.submissionId)
.single();
if (fetchError || !photoSubmission) {
throw new Error('Failed to fetch photo submission data');
}
if (!photoSubmission.items || photoSubmission.items.length === 0) {
throw new Error('No photos found in submission');
}
// Check if photos already exist for this submission (re-approval case)
const { data: existingPhotos } = await supabase
.from('photos')
.select('id')
.eq('submission_id', config.submissionId);
if (!existingPhotos || existingPhotos.length === 0) {
// Create new photo records from photo_submission_items
const photoRecords = photoSubmission.items.map((item: any) => ({
entity_id: photoSubmission.entity_id,
entity_type: photoSubmission.entity_type,
cloudflare_image_id: item.cloudflare_image_id,
cloudflare_image_url: item.cloudflare_image_url,
title: item.title || null,
caption: item.caption || null,
date_taken: item.date_taken || null,
order_index: item.order_index,
submission_id: photoSubmission.submission_id,
submitted_by: photoSubmission.submission?.user_id,
approved_by: config.moderatorId,
approved_at: new Date().toISOString(),
}));
const { error: insertError } = await supabase
.from('photos')
.insert(photoRecords);
if (insertError) {
throw insertError;
}
}
// Update submission status
const { error: updateError } = await supabase
.from('content_submissions')
.update({
status: 'approved' as const,
reviewer_id: config.moderatorId,
reviewed_at: new Date().toISOString(),
reviewer_notes: config.moderatorNotes,
})
.eq('id', config.submissionId);
if (updateError) {
throw updateError;
}
return {
success: true,
message: `Successfully approved and published ${photoSubmission.items.length} photo(s)`,
shouldRemoveFromQueue: true,
};
} catch (error: unknown) {
handleError(error, {
action: 'Approve Photo Submission',
userId: config.moderatorId,
metadata: { submissionId: config.submissionId }
});
return {
success: false,
message: 'Failed to approve photo submission',
error: error instanceof Error ? error : new Error(getErrorMessage(error)),
shouldRemoveFromQueue: false,
};
}
}
/**
* Approve a submission with submission_items
*
* Uses the edge function to process all pending submission items.
*
* @param supabase - Supabase client
* @param submissionId - Submission ID
* @param itemIds - Array of item IDs to approve
* @returns Action result
*/
/**
* Approve submission items using atomic transaction RPC.
*
@@ -118,6 +238,194 @@ export async function approveSubmissionItems(
}
}
/**
* Reject a submission with submission_items
*
* Cascades rejection to all pending items.
*
* @param supabase - Supabase client
* @param submissionId - Submission ID
* @param rejectionReason - Reason for rejection
* @returns Action result
*/
export async function rejectSubmissionItems(
supabase: SupabaseClient,
submissionId: string,
rejectionReason?: string
): Promise<ModerationActionResult> {
try {
const { error: rejectError } = await supabase
.from('submission_items')
.update({
status: 'rejected' as const,
rejection_reason: rejectionReason || 'Parent submission rejected',
updated_at: new Date().toISOString(),
})
.eq('submission_id', submissionId)
.eq('status', 'pending');
if (rejectError) {
handleError(rejectError, {
action: 'Reject Submission Items (Cascade)',
metadata: { submissionId }
});
}
return {
success: true,
message: 'Submission items rejected',
shouldRemoveFromQueue: false, // Parent rejection will handle removal
};
} catch (error: unknown) {
handleError(error, {
action: 'Reject Submission Items',
metadata: { submissionId }
});
return {
success: false,
message: 'Failed to reject submission items',
error: error instanceof Error ? error : new Error(getErrorMessage(error)),
shouldRemoveFromQueue: false,
};
}
}
/**
* Configuration for standard moderation actions
*/
export interface ModerationConfig {
item: ModerationItem;
action: 'approved' | 'rejected';
moderatorId: string;
moderatorNotes?: string;
}
/**
* Perform a standard moderation action (approve/reject)
*
* Updates the submission or review status in the database.
* Handles both content_submissions and reviews.
*
* @param supabase - Supabase client
* @param config - Moderation configuration
* @returns Action result
*/
export async function performModerationAction(
supabase: SupabaseClient,
config: ModerationConfig
): Promise<ModerationActionResult> {
const { item, action, moderatorId, moderatorNotes } = config;
try {
// Handle photo submissions specially
if (
action === 'approved' &&
item.type === 'content_submission' &&
item.submission_type === 'photo'
) {
return await approvePhotoSubmission(supabase, {
submissionId: item.id,
moderatorId,
moderatorNotes,
});
}
// Check if this submission has submission_items
if (item.type === 'content_submission') {
const { data: submissionItems, error: itemsError } = await supabase
.from('submission_items')
.select('id, status')
.eq('submission_id', item.id)
.in('status', ['pending', 'rejected']);
if (!itemsError && submissionItems && submissionItems.length > 0) {
if (action === 'approved') {
return await approveSubmissionItems(
supabase,
item.id,
submissionItems.map(i => i.id)
);
} else if (action === 'rejected') {
await rejectSubmissionItems(supabase, item.id, moderatorNotes);
}
}
}
// Standard moderation flow - Build update object with type-appropriate fields
let error: any = null;
let data: any = null;
// Use type-safe table queries based on item type
if (item.type === 'review') {
const reviewUpdate: {
moderation_status: 'approved' | 'rejected' | 'pending';
moderated_at: string;
moderated_by: string;
reviewer_notes?: string;
} = {
moderation_status: action,
moderated_at: new Date().toISOString(),
moderated_by: moderatorId,
...(moderatorNotes && { reviewer_notes: moderatorNotes }),
};
const result = await createTableQuery('reviews')
.update(reviewUpdate)
.eq('id', item.id)
.select();
error = result.error;
data = result.data;
} else {
const submissionUpdate: {
status: 'approved' | 'rejected' | 'pending';
reviewed_at: string;
reviewer_id: string;
reviewer_notes?: string;
} = {
status: action,
reviewed_at: new Date().toISOString(),
reviewer_id: moderatorId,
...(moderatorNotes && { reviewer_notes: moderatorNotes }),
};
const result = await createTableQuery('content_submissions')
.update(submissionUpdate)
.eq('id', item.id)
.select();
error = result.error;
data = result.data;
}
if (error) {
throw error;
}
// Check if the update actually affected any rows
if (!data || data.length === 0) {
throw new Error(
'Failed to update item - no rows affected. You might not have permission to moderate this content.'
);
}
return {
success: true,
message: `Content ${action}`,
shouldRemoveFromQueue: action === 'approved' || action === 'rejected',
};
} catch (error: unknown) {
handleError(error, {
action: `${config.action === 'approved' ? 'Approve' : 'Reject'} Content`,
userId: config.moderatorId,
metadata: { itemType: item.type, itemId: item.id }
});
return {
success: false,
message: `Failed to ${config.action} content`,
error: error instanceof Error ? error : new Error(getErrorMessage(error)),
shouldRemoveFromQueue: false,
};
}
}
/**
* Configuration for submission deletion

View File

@@ -28,12 +28,16 @@ export type { ResolvedEntityNames } from './entities';
// Moderation actions
export {
approvePhotoSubmission,
approveSubmissionItems,
rejectSubmissionItems,
performModerationAction,
deleteSubmission,
} from './actions';
export type {
ModerationActionResult,
ModerationConfig,
DeleteSubmissionConfig,
} from './actions';

View File

@@ -23,97 +23,6 @@ export interface RetryOptions {
shouldRetry?: (error: unknown) => boolean;
}
/**
* Extract Retry-After value from error headers
* @param error - The error object
* @returns Delay in milliseconds, or null if not found
*/
export function extractRetryAfter(error: unknown): number | null {
if (!error || typeof error !== 'object') return null;
// Check for Retry-After in error object
const errorWithHeaders = error as { headers?: Headers | Record<string, string>; retryAfter?: number | string };
// Direct retryAfter property
if (errorWithHeaders.retryAfter) {
const retryAfter = errorWithHeaders.retryAfter;
if (typeof retryAfter === 'number') {
return retryAfter * 1000; // Convert seconds to milliseconds
}
if (typeof retryAfter === 'string') {
// Try parsing as number first (delay-seconds)
const seconds = parseInt(retryAfter, 10);
if (!isNaN(seconds)) {
return seconds * 1000;
}
// Try parsing as HTTP-date
const date = new Date(retryAfter);
if (!isNaN(date.getTime())) {
const delay = date.getTime() - Date.now();
return Math.max(0, delay);
}
}
}
// Check headers object
if (errorWithHeaders.headers) {
let retryAfterValue: string | null = null;
if (errorWithHeaders.headers instanceof Headers) {
retryAfterValue = errorWithHeaders.headers.get('retry-after');
} else if (typeof errorWithHeaders.headers === 'object') {
// Check both lowercase and capitalized versions
retryAfterValue = errorWithHeaders.headers['retry-after']
|| errorWithHeaders.headers['Retry-After']
|| null;
}
if (retryAfterValue) {
// Try parsing as number first (delay-seconds)
const seconds = parseInt(retryAfterValue, 10);
if (!isNaN(seconds)) {
return seconds * 1000;
}
// Try parsing as HTTP-date
const date = new Date(retryAfterValue);
if (!isNaN(date.getTime())) {
const delay = date.getTime() - Date.now();
return Math.max(0, delay);
}
}
}
return null;
}
/**
* Check if error is a rate limit (429) error
* @param error - The error to check
* @returns true if error is a rate limit error
*/
export function isRateLimitError(error: unknown): boolean {
if (!error || typeof error !== 'object') return false;
const errorWithStatus = error as { status?: number; code?: string };
// HTTP 429 status
if (errorWithStatus.status === 429) return true;
// Check error message for rate limit indicators
if (error instanceof Error) {
const message = error.message.toLowerCase();
if (message.includes('rate limit') ||
message.includes('too many requests') ||
message.includes('quota exceeded')) {
return true;
}
}
return false;
}
/**
* Determines if an error is transient and retryable
* @param error - The error to check
@@ -147,7 +56,7 @@ export function isRetryableError(error: unknown): boolean {
if (supabaseError.code === 'PGRST000') return true; // Connection error
// HTTP status codes indicating transient failures
if (supabaseError.status === 429) return true; // Rate limit - ALWAYS retry
if (supabaseError.status === 429) return true; // Rate limit
if (supabaseError.status === 503) return true; // Service unavailable
if (supabaseError.status === 504) return true; // Gateway timeout
if (supabaseError.status && supabaseError.status >= 500 && supabaseError.status < 600) {
@@ -169,46 +78,12 @@ export function isRetryableError(error: unknown): boolean {
}
/**
* Calculates delay for next retry attempt using exponential backoff or Retry-After header
* Calculates delay for next retry attempt using exponential backoff
* @param attempt - Current attempt number (0-indexed)
* @param options - Retry configuration
* @param error - The error that triggered the retry (to check for Retry-After)
* @returns Delay in milliseconds
*/
function calculateBackoffDelay(
attempt: number,
options: Required<RetryOptions>,
error?: unknown
): number {
// Check for rate limit with Retry-After header
if (error && isRateLimitError(error)) {
const retryAfter = extractRetryAfter(error);
if (retryAfter !== null) {
// Respect the Retry-After header, but cap it at maxDelay
const cappedRetryAfter = Math.min(retryAfter, options.maxDelay);
logger.info('[Retry] Rate limit detected - respecting Retry-After header', {
retryAfterMs: retryAfter,
cappedMs: cappedRetryAfter,
attempt
});
return cappedRetryAfter;
}
// No Retry-After header but is rate limit - use aggressive backoff
const rateLimitDelay = options.baseDelay * Math.pow(options.backoffMultiplier, attempt + 1);
const cappedDelay = Math.min(rateLimitDelay, options.maxDelay);
logger.info('[Retry] Rate limit detected - using aggressive backoff', {
delayMs: cappedDelay,
attempt
});
return cappedDelay;
}
// Standard exponential backoff
function calculateBackoffDelay(attempt: number, options: Required<RetryOptions>): number {
const exponentialDelay = options.baseDelay * Math.pow(options.backoffMultiplier, attempt);
const cappedDelay = Math.min(exponentialDelay, options.maxDelay);
@@ -371,23 +246,18 @@ export async function withRetry<T>(
throw error;
}
// Calculate delay for next attempt (respects Retry-After for rate limits)
const delay = calculateBackoffDelay(attempt, config, error);
// Log retry attempt with rate limit detection
const isRateLimit = isRateLimitError(error);
const retryAfter = isRateLimit ? extractRetryAfter(error) : null;
// Calculate delay for next attempt
const delay = calculateBackoffDelay(attempt, config);
// Log retry attempt
logger.warn('Retrying after error', {
attempt: attempt + 1,
maxAttempts: config.maxAttempts,
delay,
isRateLimit,
retryAfterMs: retryAfter,
error: error instanceof Error ? error.message : String(error)
});
// Invoke callback with additional context
// Invoke callback
config.onRetry(attempt + 1, error, delay);
// Wait before retrying

View File

@@ -1,150 +0,0 @@
/**
* Span Visualizer
* Reconstructs span trees from logs for debugging distributed traces
*/
import type { Span } from '@/types/tracing';
export interface SpanTree {
span: Span;
children: SpanTree[];
totalDuration: number;
selfDuration: number;
}
/**
* Build span tree from flat span logs
*/
export function buildSpanTree(spans: Span[]): SpanTree | null {
const spanMap = new Map<string, Span>();
const childrenMap = new Map<string, Span[]>();
// Index spans
for (const span of spans) {
spanMap.set(span.spanId, span);
if (span.parentSpanId) {
if (!childrenMap.has(span.parentSpanId)) {
childrenMap.set(span.parentSpanId, []);
}
childrenMap.get(span.parentSpanId)!.push(span);
}
}
// Find root span
const rootSpan = spans.find(s => !s.parentSpanId);
if (!rootSpan) return null;
// Build tree recursively
function buildTree(span: Span): SpanTree {
const children = childrenMap.get(span.spanId) || [];
const childTrees = children.map(buildTree);
const totalDuration = span.duration || 0;
const childrenDuration = childTrees.reduce((sum, child) => sum + child.totalDuration, 0);
const selfDuration = totalDuration - childrenDuration;
return {
span,
children: childTrees,
totalDuration,
selfDuration,
};
}
return buildTree(rootSpan);
}
/**
* Format span tree as ASCII art
*/
export function formatSpanTree(tree: SpanTree, indent: number = 0): string {
const prefix = ' '.repeat(indent);
const status = tree.span.status === 'error' ? '❌' : tree.span.status === 'ok' ? '✅' : '⏳';
const line = `${prefix}${status} ${tree.span.name} (${tree.span.duration}ms / self: ${tree.selfDuration}ms)`;
const childLines = tree.children.map(child => formatSpanTree(child, indent + 1));
return [line, ...childLines].join('\n');
}
/**
* Calculate span statistics for a tree
*/
export function calculateSpanStats(tree: SpanTree): {
totalSpans: number;
errorCount: number;
maxDepth: number;
totalDuration: number;
criticalPath: string[];
} {
let totalSpans = 0;
let errorCount = 0;
let maxDepth = 0;
function traverse(node: SpanTree, depth: number) {
totalSpans++;
if (node.span.status === 'error') errorCount++;
maxDepth = Math.max(maxDepth, depth);
node.children.forEach(child => traverse(child, depth + 1));
}
traverse(tree, 0);
// Find critical path (longest duration path)
function findCriticalPath(node: SpanTree): string[] {
if (node.children.length === 0) {
return [node.span.name];
}
const longestChild = node.children.reduce((longest, child) =>
child.totalDuration > longest.totalDuration ? child : longest
);
return [node.span.name, ...findCriticalPath(longestChild)];
}
return {
totalSpans,
errorCount,
maxDepth,
totalDuration: tree.totalDuration,
criticalPath: findCriticalPath(tree),
};
}
/**
* Extract all events from a span tree
*/
export function extractAllEvents(tree: SpanTree): Array<{
spanName: string;
eventName: string;
timestamp: number;
attributes?: Record<string, unknown>;
}> {
const events: Array<{
spanName: string;
eventName: string;
timestamp: number;
attributes?: Record<string, unknown>;
}> = [];
function traverse(node: SpanTree) {
node.span.events.forEach(event => {
events.push({
spanName: node.span.name,
eventName: event.name,
timestamp: event.timestamp,
attributes: event.attributes,
});
});
node.children.forEach(child => traverse(child));
}
traverse(tree);
// Sort by timestamp
return events.sort((a, b) => a.timestamp - b.timestamp);
}

View File

@@ -1,7 +1,6 @@
import { supabase } from '@/lib/supabaseClient';
import { handleError, handleNonCriticalError, getErrorMessage } from './errorHandler';
import { extractCloudflareImageId } from './cloudflareImageUtils';
import { invokeWithTracking } from './edgeFunctionTracking';
// Core submission item interface with dependencies
// NOTE: item_data and original_data use `unknown` because they contain dynamic structures
@@ -1368,24 +1367,32 @@ export async function rejectSubmissionItems(
}
}
// Update all items to rejected status
const updates = Array.from(itemsToReject).map(async (itemId) => {
const { error } = await supabase
.from('submission_items')
.update({
status: 'rejected' as const,
rejection_reason: reason,
updated_at: new Date().toISOString(),
})
.eq('id', itemId);
if (error) {
handleNonCriticalError(error, {
action: 'Reject Submission Item',
metadata: { itemId }
});
throw error;
}
});
await Promise.all(updates);
// Update parent submission status
const submissionId = items[0]?.submission_id;
if (!submissionId) {
throw new Error('Cannot reject items: missing submission ID');
}
// Use atomic edge function for rejection
const { data, error } = await invokeWithTracking(
'process-selective-rejection',
{
itemIds: Array.from(itemsToReject),
submissionId,
rejectionReason: reason,
},
userId
);
if (error) {
throw new Error(`Failed to reject items: ${error.message}`);
if (submissionId) {
await updateSubmissionStatusAfterRejection(submissionId);
}
}
@@ -1509,21 +1516,27 @@ export async function editSubmissionItem(
// Update relational table with new data based on item type
if (currentItem.item_type === 'park') {
// First, get the park_submission_id
const { data: parkSub, error: parkSubError } = await supabase
.from('park_submissions')
.select('id')
.eq('submission_id', currentItem.submission_id)
.single();
if (parkSubError) throw parkSubError;
// Prepare update data (remove location from main update)
// For parks, store location in temp_location_data if provided
const updateData: any = { ...newData };
const locationData = updateData.location;
delete updateData.location; // Remove nested object before updating park_submissions
// Update park_submissions table (without temp_location_data!)
// If location object is provided, store it in temp_location_data
if (newData.location) {
updateData.temp_location_data = {
name: newData.location.name,
street_address: newData.location.street_address || null,
city: newData.location.city || null,
state_province: newData.location.state_province || null,
country: newData.location.country,
latitude: newData.location.latitude,
longitude: newData.location.longitude,
timezone: newData.location.timezone || null,
postal_code: newData.location.postal_code || null,
display_name: newData.location.display_name
};
delete updateData.location; // Remove the nested object
}
// Update park_submissions table
const { error: parkUpdateError } = await supabase
.from('park_submissions')
.update(updateData)
@@ -1531,29 +1544,6 @@ export async function editSubmissionItem(
if (parkUpdateError) throw parkUpdateError;
// Handle location separately in relational table
if (locationData) {
const { error: locationError } = await supabase
.from('park_submission_locations' as any)
.upsert({
park_submission_id: parkSub.id,
name: locationData.name,
street_address: locationData.street_address || null,
city: locationData.city || null,
state_province: locationData.state_province || null,
country: locationData.country,
postal_code: locationData.postal_code || null,
latitude: locationData.latitude,
longitude: locationData.longitude,
timezone: locationData.timezone || null,
display_name: locationData.display_name || null
}, {
onConflict: 'park_submission_id'
});
if (locationError) throw locationError;
}
} else if (currentItem.item_type === 'ride') {
const { error: rideUpdateError } = await supabase
.from('ride_submissions')

View File

@@ -14,7 +14,7 @@ import { useAdminSettings } from '@/hooks/useAdminSettings';
import { NovuMigrationUtility } from '@/components/admin/NovuMigrationUtility';
import { TestDataGenerator } from '@/components/admin/TestDataGenerator';
import { IntegrationTestRunner } from '@/components/admin/IntegrationTestRunner';
import { Loader2, Save, Clock, Users, Bell, Shield, Settings, Trash2, Plug, AlertTriangle, Lock, TestTube, RefreshCw, Info, AlertCircle, Database } from 'lucide-react';
import { Loader2, Save, Clock, Users, Bell, Shield, Settings, Trash2, Plug, AlertTriangle, Lock, TestTube, RefreshCw, Info, AlertCircle } from 'lucide-react';
import { useDocumentTitle } from '@/hooks/useDocumentTitle';
export default function AdminSettings() {
@@ -772,9 +772,13 @@ export default function AdminSettings() {
<span className="hidden sm:inline">Integrations</span>
</TabsTrigger>
<TabsTrigger value="testing" className="flex items-center gap-2">
<TestTube className="w-4 h-4" />
<Loader2 className="w-4 h-4" />
<span className="hidden sm:inline">Testing</span>
</TabsTrigger>
<TabsTrigger value="integration-tests" className="flex items-center gap-2">
<TestTube className="w-4 h-4" />
<span className="hidden sm:inline">Integration Tests</span>
</TabsTrigger>
</TabsList>
<TabsContent value="moderation">
@@ -967,31 +971,11 @@ export default function AdminSettings() {
</TabsContent>
<TabsContent value="testing">
<div className="space-y-6">
{/* Test Data Generator Section */}
<div>
<h2 className="text-2xl font-bold mb-4 flex items-center gap-2">
<Database className="w-6 h-6" />
Test Data Generator
</h2>
<p className="text-muted-foreground mb-4">
Generate realistic test data for parks, rides, companies, and submissions.
</p>
<TestDataGenerator />
</div>
<TestDataGenerator />
</TabsContent>
{/* Integration Test Runner Section */}
<div>
<h2 className="text-2xl font-bold mb-4 flex items-center gap-2">
<TestTube className="w-6 h-6" />
Integration Test Runner
</h2>
<p className="text-muted-foreground mb-4">
Run automated integration tests against your approval pipeline, moderation system, and data integrity checks.
</p>
<IntegrationTestRunner />
</div>
</div>
<TabsContent value="integration-tests">
<IntegrationTestRunner />
</TabsContent>
</Tabs>
</div>

View File

@@ -13,10 +13,6 @@ import { ErrorDetailsModal } from '@/components/admin/ErrorDetailsModal';
import { ApprovalFailureModal } from '@/components/admin/ApprovalFailureModal';
import { ErrorAnalytics } from '@/components/admin/ErrorAnalytics';
import { PipelineHealthAlerts } from '@/components/admin/PipelineHealthAlerts';
import { EdgeFunctionLogs } from '@/components/admin/EdgeFunctionLogs';
import { DatabaseLogs } from '@/components/admin/DatabaseLogs';
import { UnifiedLogSearch } from '@/components/admin/UnifiedLogSearch';
import TraceViewer from './TraceViewer';
import { format } from 'date-fns';
// Helper to calculate date threshold for filtering
@@ -63,14 +59,6 @@ export default function ErrorMonitoring() {
const [searchTerm, setSearchTerm] = useState('');
const [errorTypeFilter, setErrorTypeFilter] = useState<string>('all');
const [dateRange, setDateRange] = useState<'1h' | '24h' | '7d' | '30d'>('24h');
const [activeTab, setActiveTab] = useState('errors');
const handleNavigate = (tab: string, filters: Record<string, string>) => {
setActiveTab(tab);
if (filters.requestId) {
setSearchTerm(filters.requestId);
}
};
// Fetch recent errors
const { data: errors, isLoading, refetch, isFetching } = useQuery({
@@ -182,8 +170,8 @@ export default function ErrorMonitoring() {
<div className="space-y-6">
<div className="flex justify-between items-center">
<div>
<h1 className="text-3xl font-bold tracking-tight">Monitoring & Logs</h1>
<p className="text-muted-foreground">Unified monitoring hub for errors, logs, and distributed traces</p>
<h1 className="text-3xl font-bold tracking-tight">Error Monitoring</h1>
<p className="text-muted-foreground">Track and analyze application errors</p>
</div>
<RefreshButton
onRefresh={async () => { await refetch(); }}
@@ -193,23 +181,17 @@ export default function ErrorMonitoring() {
/>
</div>
{/* Unified Log Search */}
<UnifiedLogSearch onNavigate={handleNavigate} />
{/* Pipeline Health Alerts */}
<PipelineHealthAlerts />
{/* Analytics Section */}
<ErrorAnalytics errorSummary={errorSummary} approvalMetrics={approvalMetrics} />
{/* Tabs for All Log Types */}
<Tabs value={activeTab} onValueChange={setActiveTab} className="w-full">
<TabsList className="grid w-full grid-cols-5">
{/* Tabs for Errors and Approval Failures */}
<Tabs defaultValue="errors" className="w-full">
<TabsList>
<TabsTrigger value="errors">Application Errors</TabsTrigger>
<TabsTrigger value="approvals">Approval Failures</TabsTrigger>
<TabsTrigger value="edge-functions">Edge Functions</TabsTrigger>
<TabsTrigger value="database">Database Logs</TabsTrigger>
<TabsTrigger value="traces">Distributed Traces</TabsTrigger>
</TabsList>
<TabsContent value="errors" className="space-y-4">
@@ -368,18 +350,6 @@ export default function ErrorMonitoring() {
</CardContent>
</Card>
</TabsContent>
<TabsContent value="edge-functions">
<EdgeFunctionLogs />
</TabsContent>
<TabsContent value="database">
<DatabaseLogs />
</TabsContent>
<TabsContent value="traces">
<TraceViewer />
</TabsContent>
</Tabs>
</div>

View File

@@ -1,539 +0,0 @@
import { useState } from 'react';
import { useNavigate } from 'react-router-dom';
import { useAuth } from '@/hooks/useAuth';
import { useUserRole } from '@/hooks/useUserRole';
import { useRateLimitStats, useRecentMetrics } from '@/hooks/useRateLimitMetrics';
import { useAlertConfigs, useAlertHistory, useUnresolvedAlerts, useUpdateAlertConfig, useResolveAlert } from '@/hooks/useRateLimitAlerts';
import { useDocumentTitle } from '@/hooks/useDocumentTitle';
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from '@/components/ui/card';
import { Tabs, TabsContent, TabsList, TabsTrigger } from '@/components/ui/tabs';
import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from '@/components/ui/select';
import { Badge } from '@/components/ui/badge';
import { Button } from '@/components/ui/button';
import { Switch } from '@/components/ui/switch';
import { Input } from '@/components/ui/input';
import { Label } from '@/components/ui/label';
import { BarChart, Bar, XAxis, YAxis, CartesianGrid, Tooltip, ResponsiveContainer, PieChart, Pie, Cell, LineChart, Line, Legend } from 'recharts';
import { Activity, Shield, TrendingUp, Users, Clock, AlertTriangle, Bell, BellOff, CheckCircle } from 'lucide-react';
import { Skeleton } from '@/components/ui/skeleton';
import { Alert, AlertDescription } from '@/components/ui/alert';
import { ConfirmationDialog } from '@/components/moderation/ConfirmationDialog';
import { format } from 'date-fns';
const COLORS = ['hsl(var(--primary))', 'hsl(var(--secondary))', 'hsl(var(--accent))', 'hsl(var(--muted))', 'hsl(var(--destructive))'];
export default function RateLimitMetrics() {
useDocumentTitle('Rate Limit Metrics');
const navigate = useNavigate();
const { user } = useAuth();
const { isModerator, loading: rolesLoading } = useUserRole();
const [timeWindow, setTimeWindow] = useState(60000); // 1 minute default
const [resolvingAlertId, setResolvingAlertId] = useState<string | null>(null);
const { data: stats, isLoading: statsLoading, error: statsError } = useRateLimitStats(timeWindow);
const { data: recentData, isLoading: recentLoading } = useRecentMetrics(50);
const { data: alertConfigs, isLoading: alertConfigsLoading } = useAlertConfigs();
const { data: alertHistory, isLoading: alertHistoryLoading } = useAlertHistory(50);
const { data: unresolvedAlerts } = useUnresolvedAlerts();
const updateConfig = useUpdateAlertConfig();
const resolveAlert = useResolveAlert();
// Redirect if not authorized
if (!rolesLoading && !isModerator()) {
navigate('/');
return null;
}
if (!user || rolesLoading) {
return (
<div className="container mx-auto p-6 space-y-6">
<Skeleton className="h-12 w-64" />
<div className="grid gap-6 md:grid-cols-2 lg:grid-cols-4">
{[1, 2, 3, 4].map((i) => (
<Skeleton key={i} className="h-32" />
))}
</div>
</div>
);
}
const recentMetrics = recentData?.metrics || [];
// Prepare chart data
const tierData = stats?.tierDistribution ? Object.entries(stats.tierDistribution).map(([name, value]) => ({
name,
value,
})) : [];
const topBlockedIPsData = stats?.topBlockedIPs || [];
const topBlockedUsersData = stats?.topBlockedUsers || [];
// Calculate block rate percentage
const blockRatePercentage = stats?.blockRate ? (stats.blockRate * 100).toFixed(1) : '0.0';
return (
<div className="container mx-auto p-6 space-y-6">
{/* Header */}
<div className="flex items-center justify-between">
<div>
<h1 className="text-3xl font-bold tracking-tight">Rate Limit Metrics</h1>
<p className="text-muted-foreground">Monitor rate limiting activity and patterns</p>
</div>
<Select value={timeWindow.toString()} onValueChange={(v) => setTimeWindow(parseInt(v))}>
<SelectTrigger className="w-[180px]">
<SelectValue placeholder="Time window" />
</SelectTrigger>
<SelectContent>
<SelectItem value="60000">Last minute</SelectItem>
<SelectItem value="300000">Last 5 minutes</SelectItem>
<SelectItem value="900000">Last 15 minutes</SelectItem>
<SelectItem value="3600000">Last hour</SelectItem>
</SelectContent>
</Select>
</div>
{statsError && (
<Alert variant="destructive">
<AlertTriangle className="h-4 w-4" />
<AlertDescription>
Failed to load metrics: {statsError instanceof Error ? statsError.message : 'Unknown error'}
</AlertDescription>
</Alert>
)}
{/* Overview Stats */}
{statsLoading ? (
<div className="grid gap-6 md:grid-cols-2 lg:grid-cols-4">
{[1, 2, 3, 4].map((i) => (
<Skeleton key={i} className="h-32" />
))}
</div>
) : (
<div className="grid gap-6 md:grid-cols-2 lg:grid-cols-4">
<Card>
<CardHeader className="flex flex-row items-center justify-between space-y-0 pb-2">
<CardTitle className="text-sm font-medium">Total Requests</CardTitle>
<Activity className="h-4 w-4 text-muted-foreground" />
</CardHeader>
<CardContent>
<div className="text-2xl font-bold">{stats?.totalRequests || 0}</div>
<p className="text-xs text-muted-foreground">
{stats?.allowedRequests || 0} allowed, {stats?.blockedRequests || 0} blocked
</p>
</CardContent>
</Card>
<Card>
<CardHeader className="flex flex-row items-center justify-between space-y-0 pb-2">
<CardTitle className="text-sm font-medium">Block Rate</CardTitle>
<Shield className="h-4 w-4 text-muted-foreground" />
</CardHeader>
<CardContent>
<div className="text-2xl font-bold">{blockRatePercentage}%</div>
<p className="text-xs text-muted-foreground">
Percentage of blocked requests
</p>
</CardContent>
</Card>
<Card>
<CardHeader className="flex flex-row items-center justify-between space-y-0 pb-2">
<CardTitle className="text-sm font-medium">Unique IPs</CardTitle>
<TrendingUp className="h-4 w-4 text-muted-foreground" />
</CardHeader>
<CardContent>
<div className="text-2xl font-bold">{stats?.uniqueIPs || 0}</div>
<p className="text-xs text-muted-foreground">
Distinct client addresses
</p>
</CardContent>
</Card>
<Card>
<CardHeader className="flex flex-row items-center justify-between space-y-0 pb-2">
<CardTitle className="text-sm font-medium">Unique Users</CardTitle>
<Users className="h-4 w-4 text-muted-foreground" />
</CardHeader>
<CardContent>
<div className="text-2xl font-bold">{stats?.uniqueUsers || 0}</div>
<p className="text-xs text-muted-foreground">
Authenticated users
</p>
</CardContent>
</Card>
</div>
)}
<Tabs defaultValue="overview" className="space-y-6">
<TabsList>
<TabsTrigger value="overview">Overview</TabsTrigger>
<TabsTrigger value="blocked">Blocked Requests</TabsTrigger>
<TabsTrigger value="recent">Recent Activity</TabsTrigger>
<TabsTrigger value="alerts">
Alerts
{unresolvedAlerts && unresolvedAlerts.length > 0 && (
<Badge variant="destructive" className="ml-2">{unresolvedAlerts.length}</Badge>
)}
</TabsTrigger>
<TabsTrigger value="config">Configuration</TabsTrigger>
</TabsList>
<TabsContent value="overview" className="space-y-6">
<div className="grid gap-6 md:grid-cols-2">
{/* Tier Distribution */}
<Card>
<CardHeader>
<CardTitle>Tier Distribution</CardTitle>
<CardDescription>Requests by rate limit tier</CardDescription>
</CardHeader>
<CardContent>
{tierData.length > 0 ? (
<ResponsiveContainer width="100%" height={300}>
<PieChart>
<Pie
data={tierData}
cx="50%"
cy="50%"
labelLine={false}
label={({ name, percent }) => `${name}: ${(percent * 100).toFixed(0)}%`}
outerRadius={80}
fill="hsl(var(--primary))"
dataKey="value"
>
{tierData.map((entry, index) => (
<Cell key={`cell-${index}`} fill={COLORS[index % COLORS.length]} />
))}
</Pie>
<Tooltip />
</PieChart>
</ResponsiveContainer>
) : (
<div className="flex h-[300px] items-center justify-center text-muted-foreground">
No data available
</div>
)}
</CardContent>
</Card>
{/* Request Status */}
<Card>
<CardHeader>
<CardTitle>Request Status</CardTitle>
<CardDescription>Allowed vs blocked requests</CardDescription>
</CardHeader>
<CardContent>
<ResponsiveContainer width="100%" height={300}>
<BarChart
data={[
{ name: 'Allowed', count: stats?.allowedRequests || 0 },
{ name: 'Blocked', count: stats?.blockedRequests || 0 },
]}
>
<CartesianGrid strokeDasharray="3 3" />
<XAxis dataKey="name" />
<YAxis />
<Tooltip />
<Bar dataKey="count" fill="hsl(var(--primary))" />
</BarChart>
</ResponsiveContainer>
</CardContent>
</Card>
</div>
</TabsContent>
<TabsContent value="blocked" className="space-y-6">
<div className="grid gap-6 md:grid-cols-2">
{/* Top Blocked IPs */}
<Card>
<CardHeader>
<CardTitle>Top Blocked IPs</CardTitle>
<CardDescription>Most frequently blocked IP addresses</CardDescription>
</CardHeader>
<CardContent>
{topBlockedIPsData.length > 0 ? (
<ResponsiveContainer width="100%" height={300}>
<BarChart data={topBlockedIPsData} layout="vertical">
<CartesianGrid strokeDasharray="3 3" />
<XAxis type="number" />
<YAxis dataKey="ip" type="category" width={100} />
<Tooltip />
<Bar dataKey="count" fill="hsl(var(--destructive))" />
</BarChart>
</ResponsiveContainer>
) : (
<div className="flex h-[300px] items-center justify-center text-muted-foreground">
No blocked IPs in this time window
</div>
)}
</CardContent>
</Card>
{/* Top Blocked Users */}
<Card>
<CardHeader>
<CardTitle>Top Blocked Users</CardTitle>
<CardDescription>Most frequently blocked authenticated users</CardDescription>
</CardHeader>
<CardContent>
{topBlockedUsersData.length > 0 ? (
<div className="space-y-2">
{topBlockedUsersData.map((user, idx) => (
<div key={idx} className="flex items-center justify-between p-2 border rounded">
<span className="text-sm font-mono truncate flex-1">{user.userId}</span>
<Badge variant="destructive">{user.count}</Badge>
</div>
))}
</div>
) : (
<div className="flex h-[300px] items-center justify-center text-muted-foreground">
No blocked users in this time window
</div>
)}
</CardContent>
</Card>
</div>
</TabsContent>
<TabsContent value="recent" className="space-y-6">
<Card>
<CardHeader>
<CardTitle>Recent Activity</CardTitle>
<CardDescription>Last 50 rate limit checks</CardDescription>
</CardHeader>
<CardContent>
{recentLoading ? (
<div className="space-y-2">
{[1, 2, 3].map((i) => (
<Skeleton key={i} className="h-16" />
))}
</div>
) : recentMetrics.length > 0 ? (
<div className="space-y-2 max-h-[600px] overflow-y-auto">
{recentMetrics.map((metric, idx) => (
<div
key={idx}
className={`flex items-center justify-between p-3 border rounded ${
metric.allowed ? 'border-border' : 'border-destructive/50 bg-destructive/5'
}`}
>
<div className="flex items-center gap-4 flex-1">
<Clock className="h-4 w-4 text-muted-foreground" />
<div className="flex-1">
<div className="flex items-center gap-2">
<span className="font-mono text-sm">{metric.functionName}</span>
<Badge variant={metric.allowed ? 'secondary' : 'destructive'}>
{metric.allowed ? 'Allowed' : 'Blocked'}
</Badge>
<Badge variant="outline">{metric.tier}</Badge>
</div>
<div className="text-xs text-muted-foreground mt-1">
IP: {metric.clientIP} {metric.userId && `• User: ${metric.userId.slice(0, 8)}...`}
</div>
</div>
</div>
<div className="text-right">
<div className="text-sm font-medium">
{metric.allowed ? `${metric.remaining} left` : `Retry: ${metric.retryAfter}s`}
</div>
<div className="text-xs text-muted-foreground">
{format(new Date(metric.timestamp), 'HH:mm:ss')}
</div>
</div>
</div>
))}
</div>
) : (
<div className="flex h-[300px] items-center justify-center text-muted-foreground">
No recent activity
</div>
)}
</CardContent>
</Card>
</TabsContent>
<TabsContent value="alerts" className="space-y-6">
<Card>
<CardHeader>
<CardTitle>Alert History</CardTitle>
<CardDescription>Recent rate limit threshold violations</CardDescription>
</CardHeader>
<CardContent>
{alertHistoryLoading ? (
<div className="space-y-2">
{[1, 2, 3].map((i) => (
<Skeleton key={i} className="h-20" />
))}
</div>
) : alertHistory && alertHistory.length > 0 ? (
<div className="space-y-2 max-h-[600px] overflow-y-auto">
{alertHistory.map((alert) => (
<div
key={alert.id}
className={`flex items-start justify-between p-4 border rounded ${
alert.resolved_at ? 'border-border bg-muted/30' : 'border-destructive/50 bg-destructive/5'
}`}
>
<div className="flex-1">
<div className="flex items-center gap-2 mb-2">
{alert.resolved_at ? (
<CheckCircle className="h-4 w-4 text-muted-foreground" />
) : (
<AlertTriangle className="h-4 w-4 text-destructive" />
)}
<Badge variant={alert.resolved_at ? 'secondary' : 'destructive'}>
{alert.metric_type}
</Badge>
<span className="text-xs text-muted-foreground">
{format(new Date(alert.created_at), 'PPp')}
</span>
</div>
<p className="text-sm mb-2">{alert.alert_message}</p>
<div className="flex gap-4 text-xs text-muted-foreground">
<span>Value: {alert.metric_value.toFixed(2)}</span>
<span>Threshold: {alert.threshold_value.toFixed(2)}</span>
<span>Window: {alert.time_window_ms / 1000}s</span>
</div>
{alert.resolved_at && (
<p className="text-xs text-muted-foreground mt-1">
Resolved: {format(new Date(alert.resolved_at), 'PPp')}
</p>
)}
</div>
{!alert.resolved_at && (
<Button
size="sm"
variant="ghost"
onClick={() => setResolvingAlertId(alert.id)}
loading={resolveAlert.isPending && resolvingAlertId === alert.id}
disabled={resolveAlert.isPending}
className="gap-2"
>
<CheckCircle className="h-4 w-4" />
Resolve
</Button>
)}
</div>
))}
</div>
) : (
<div className="flex h-[300px] items-center justify-center text-muted-foreground">
No alerts triggered yet
</div>
)}
</CardContent>
</Card>
<ConfirmationDialog
open={resolvingAlertId !== null}
onOpenChange={(open) => !open && setResolvingAlertId(null)}
title="Resolve Alert"
description="Are you sure you want to mark this alert as resolved? This action cannot be undone."
confirmLabel="Resolve"
onConfirm={() => {
if (resolvingAlertId) {
resolveAlert.mutate(resolvingAlertId);
setResolvingAlertId(null);
}
}}
/>
</TabsContent>
<TabsContent value="config" className="space-y-6">
<Card>
<CardHeader>
<CardTitle>Alert Configuration</CardTitle>
<CardDescription>Configure thresholds for automated alerts</CardDescription>
</CardHeader>
<CardContent>
{alertConfigsLoading ? (
<div className="space-y-4">
{[1, 2, 3].map((i) => (
<Skeleton key={i} className="h-24" />
))}
</div>
) : alertConfigs && alertConfigs.length > 0 ? (
<div className="space-y-4">
{alertConfigs.map((config) => (
<div key={config.id} className="p-4 border rounded space-y-3">
<div className="flex items-center justify-between">
<div className="flex items-center gap-3">
<Badge variant="outline">{config.metric_type}</Badge>
<Switch
checked={config.enabled}
onCheckedChange={(enabled) =>
updateConfig.mutate({ id: config.id, updates: { enabled } })
}
/>
{config.enabled ? (
<span className="text-sm text-muted-foreground flex items-center gap-1">
<Bell className="h-3 w-3" /> Enabled
</span>
) : (
<span className="text-sm text-muted-foreground flex items-center gap-1">
<BellOff className="h-3 w-3" /> Disabled
</span>
)}
</div>
</div>
<div className="grid grid-cols-2 gap-4">
<div>
<Label className="text-xs">Threshold Value</Label>
<Input
type="number"
step="0.01"
value={config.threshold_value}
onChange={(e) => {
const value = parseFloat(e.target.value);
if (!isNaN(value)) {
updateConfig.mutate({
id: config.id,
updates: { threshold_value: value }
});
}
}}
className="mt-1"
/>
<p className="text-xs text-muted-foreground mt-1">
{config.metric_type === 'block_rate' && 'Value between 0 and 1 (e.g., 0.5 = 50%)'}
{config.metric_type === 'total_requests' && 'Number of requests'}
{config.metric_type === 'unique_ips' && 'Number of unique IPs'}
</p>
</div>
<div>
<Label className="text-xs">Time Window (ms)</Label>
<Input
type="number"
step="1000"
value={config.time_window_ms}
onChange={(e) => {
const value = parseInt(e.target.value);
if (!isNaN(value)) {
updateConfig.mutate({
id: config.id,
updates: { time_window_ms: value }
});
}
}}
className="mt-1"
/>
<p className="text-xs text-muted-foreground mt-1">
Currently: {config.time_window_ms / 1000}s
</p>
</div>
</div>
</div>
))}
</div>
) : (
<div className="flex h-[200px] items-center justify-center text-muted-foreground">
No alert configurations found
</div>
)}
</CardContent>
</Card>
</TabsContent>
</Tabs>
</div>
);
}

View File

@@ -1,255 +0,0 @@
import { useState } from 'react';
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from '@/components/ui/card';
import { Input } from '@/components/ui/input';
import { Button } from '@/components/ui/button';
import { Accordion, AccordionContent, AccordionItem, AccordionTrigger } from '@/components/ui/accordion';
import { Alert, AlertDescription } from '@/components/ui/alert';
import { Badge } from '@/components/ui/badge';
import { buildSpanTree, formatSpanTree, calculateSpanStats, extractAllEvents } from '@/lib/spanVisualizer';
import type { Span } from '@/types/tracing';
import type { SpanTree } from '@/lib/spanVisualizer';
/**
* Admin Trace Viewer
*
* Visual tool for debugging distributed traces across the approval pipeline.
* Reconstructs and displays span hierarchies from edge function logs.
*/
export default function TraceViewer() {
const [traceId, setTraceId] = useState('');
const [spans, setSpans] = useState<Span[]>([]);
const [tree, setTree] = useState<SpanTree | null>(null);
const [isLoading, setIsLoading] = useState(false);
const [error, setError] = useState<string | null>(null);
const loadTrace = async () => {
if (!traceId.trim()) {
setError('Please enter a trace ID');
return;
}
setIsLoading(true);
setError(null);
try {
// TODO: Replace with actual edge function log query
// This would need an edge function that queries Supabase logs
// For now, using mock data structure
const mockSpans: Span[] = [
{
spanId: 'root-1',
traceId,
name: 'process-selective-approval',
kind: 'SERVER',
startTime: Date.now() - 5000,
endTime: Date.now(),
duration: 5000,
attributes: {
'http.method': 'POST',
'user.id': 'user-123',
'submission.id': 'sub-456',
},
events: [
{ timestamp: Date.now() - 4900, name: 'authentication_start' },
{ timestamp: Date.now() - 4800, name: 'authentication_success' },
{ timestamp: Date.now() - 4700, name: 'validation_complete' },
],
status: 'ok',
},
{
spanId: 'child-1',
traceId,
parentSpanId: 'root-1',
name: 'process_approval_transaction',
kind: 'DATABASE',
startTime: Date.now() - 4500,
endTime: Date.now() - 500,
duration: 4000,
attributes: {
'db.operation': 'rpc',
'submission.id': 'sub-456',
},
events: [
{ timestamp: Date.now() - 4400, name: 'rpc_call_start' },
{ timestamp: Date.now() - 600, name: 'rpc_call_success' },
],
status: 'ok',
},
];
setSpans(mockSpans);
const builtTree = buildSpanTree(mockSpans);
setTree(builtTree);
if (!builtTree) {
setError('No root span found for this trace ID');
}
} catch (err) {
setError(err instanceof Error ? err.message : 'Failed to load trace');
} finally {
setIsLoading(false);
}
};
const stats = tree ? calculateSpanStats(tree) : null;
const events = tree ? extractAllEvents(tree) : [];
return (
<div className="container mx-auto p-6 space-y-6">
<div>
<h1 className="text-3xl font-bold mb-2">Distributed Trace Viewer</h1>
<p className="text-muted-foreground">
Debug moderation pipeline execution by visualizing span hierarchies
</p>
</div>
<Card>
<CardHeader>
<CardTitle>Load Trace</CardTitle>
<CardDescription>
Enter a trace ID from edge function logs to visualize the execution tree
</CardDescription>
</CardHeader>
<CardContent>
<div className="flex gap-2">
<Input
value={traceId}
onChange={(e) => setTraceId(e.target.value)}
placeholder="Enter trace ID (e.g., abc-123-def-456)"
className="flex-1"
/>
<Button onClick={loadTrace} disabled={isLoading}>
{isLoading ? 'Loading...' : 'Load Trace'}
</Button>
</div>
{error && (
<Alert variant="destructive" className="mt-4">
<AlertDescription>{error}</AlertDescription>
</Alert>
)}
</CardContent>
</Card>
{tree && stats && (
<>
<Card>
<CardHeader>
<CardTitle>Trace Statistics</CardTitle>
</CardHeader>
<CardContent>
<div className="grid grid-cols-2 md:grid-cols-4 gap-4">
<div>
<div className="text-sm text-muted-foreground">Total Duration</div>
<div className="text-2xl font-bold">{stats.totalDuration}ms</div>
</div>
<div>
<div className="text-sm text-muted-foreground">Total Spans</div>
<div className="text-2xl font-bold">{stats.totalSpans}</div>
</div>
<div>
<div className="text-sm text-muted-foreground">Max Depth</div>
<div className="text-2xl font-bold">{stats.maxDepth}</div>
</div>
<div>
<div className="text-sm text-muted-foreground">Errors</div>
<div className="text-2xl font-bold text-destructive">{stats.errorCount}</div>
</div>
</div>
<div className="mt-4">
<div className="text-sm text-muted-foreground mb-2">Critical Path (Longest Duration):</div>
<div className="flex gap-2 flex-wrap">
{stats.criticalPath.map((spanName, i) => (
<Badge key={i} variant="secondary">
{spanName}
</Badge>
))}
</div>
</div>
</CardContent>
</Card>
<Card>
<CardHeader>
<CardTitle>Span Tree</CardTitle>
<CardDescription>
Hierarchical view of span execution with timing breakdown
</CardDescription>
</CardHeader>
<CardContent>
<pre className="bg-muted p-4 rounded-lg overflow-x-auto text-sm">
{formatSpanTree(tree)}
</pre>
</CardContent>
</Card>
<Card>
<CardHeader>
<CardTitle>Events Timeline</CardTitle>
<CardDescription>
Chronological list of all events across all spans
</CardDescription>
</CardHeader>
<CardContent>
<div className="space-y-2">
{events.map((event, i) => (
<div key={i} className="flex gap-2 text-sm border-l-2 border-primary pl-4 py-1">
<Badge variant="outline">{event.spanName}</Badge>
<span className="text-muted-foreground"></span>
<span className="font-medium">{event.eventName}</span>
<span className="text-muted-foreground ml-auto">
{new Date(event.timestamp).toISOString()}
</span>
</div>
))}
</div>
</CardContent>
</Card>
<Card>
<CardHeader>
<CardTitle>Span Details</CardTitle>
<CardDescription>
Detailed breakdown of each span with attributes and events
</CardDescription>
</CardHeader>
<CardContent>
<Accordion type="single" collapsible className="w-full">
{spans.map((span) => (
<AccordionItem key={span.spanId} value={span.spanId}>
<AccordionTrigger>
<div className="flex items-center gap-2">
<Badge variant={span.status === 'error' ? 'destructive' : 'default'}>
{span.kind}
</Badge>
<span>{span.name}</span>
<span className="text-muted-foreground ml-2">
({span.duration}ms)
</span>
</div>
</AccordionTrigger>
<AccordionContent>
<pre className="bg-muted p-4 rounded-lg overflow-x-auto text-xs">
{JSON.stringify(span, null, 2)}
</pre>
</AccordionContent>
</AccordionItem>
))}
</Accordion>
</CardContent>
</Card>
</>
)}
{!tree && !isLoading && !error && (
<Alert>
<AlertDescription>
Enter a trace ID to visualize the distributed trace. You can find trace IDs in edge function logs
under the "Span completed" messages.
</AlertDescription>
</Alert>
)}
</div>
);
}

View File

@@ -22,6 +22,7 @@ export interface ParkSubmissionData {
operator_id?: string | null;
property_owner_id?: string | null;
location_id?: string | null;
temp_location_data?: LocationData | null;
banner_image_url?: string | null;
banner_image_id?: string | null;
card_image_url?: string | null;

View File

@@ -1,35 +0,0 @@
/**
* Distributed Tracing Types
* Mirrors the types defined in edge function logger
*/
export interface Span {
spanId: string;
traceId: string;
parentSpanId?: string;
name: string;
kind: 'SERVER' | 'CLIENT' | 'INTERNAL' | 'DATABASE';
startTime: number;
endTime?: number;
duration?: number;
attributes: Record<string, unknown>;
events: SpanEvent[];
status: 'ok' | 'error' | 'unset';
error?: {
type: string;
message: string;
stack?: string;
};
}
export interface SpanEvent {
timestamp: number;
name: string;
attributes?: Record<string, unknown>;
}
export interface SpanContext {
traceId: string;
spanId: string;
traceFlags?: number;
}

View File

@@ -45,10 +45,7 @@ verify_jwt = false
verify_jwt = true
[functions.process-selective-approval]
verify_jwt = true
[functions.process-selective-rejection]
verify_jwt = true
verify_jwt = false
[functions.send-escalation-notification]
verify_jwt = true
@@ -85,9 +82,3 @@ verify_jwt = false
[functions.scheduled-maintenance]
verify_jwt = false
[functions.rate-limit-metrics]
verify_jwt = true
[functions.monitor-rate-limits]
verify_jwt = false

View File

@@ -1,277 +0,0 @@
# Rate Limiting Guide for Edge Functions
This guide helps you choose the appropriate rate limit tier for each edge function and explains how to implement rate limiting consistently across the application.
## Quick Reference
### Rate Limit Tiers
| Tier | Requests/Min | Use Case |
|------|--------------|----------|
| **STRICT** | 5 | Expensive operations (uploads, exports, batch processing) |
| **MODERATE** | 10 | Moderation actions, content submission, security operations |
| **STANDARD** | 20 | Typical read/write operations, account management |
| **LENIENT** | 30 | Lightweight reads, public data, validation |
| **GENEROUS** | 60 | High-frequency operations (webhooks, polling, health checks) |
### Per-User Tiers (Rate limits by user ID instead of IP)
| Tier | Requests/Min | Use Case |
|------|--------------|----------|
| **PER_USER_STRICT** | 5 | User-specific expensive operations |
| **PER_USER_MODERATE** | 10 | User-specific moderation actions |
| **PER_USER_STANDARD** | 20 | User-specific standard operations |
| **PER_USER_LENIENT** | 40 | User-specific frequent operations |
## How to Implement Rate Limiting
### Basic Implementation
```typescript
import { serve } from 'https://deno.land/std@0.168.0/http/server.ts';
import { corsHeaders } from '../_shared/cors.ts';
import { rateLimiters, withRateLimit } from '../_shared/rateLimiter.ts';
// Your handler function
const handler = async (req: Request): Promise<Response> => {
// Your edge function logic here
return new Response(JSON.stringify({ success: true }), {
headers: { ...corsHeaders, 'Content-Type': 'application/json' }
});
};
// Apply rate limiting with appropriate tier
serve(withRateLimit(handler, rateLimiters.moderate, corsHeaders));
```
### Per-User Rate Limiting
```typescript
// Rate limit by user ID instead of IP address
serve(withRateLimit(handler, rateLimiters.perUserModerate, corsHeaders));
```
### Custom Rate Limiting
```typescript
import { createRateLimiter } from '../_shared/rateLimiter.ts';
// Create a custom rate limiter
const customLimiter = createRateLimiter({
windowMs: 60000,
maxRequests: 15,
keyGenerator: (req) => {
// Custom key logic
return req.headers.get('x-custom-key') || 'default';
}
});
serve(withRateLimit(handler, customLimiter, corsHeaders));
```
## Recommended Rate Limits by Function Category
### 🔴 STRICT (5 req/min)
**Currently Implemented:**
-`upload-image` - CloudFlare image upload
**Recommended:**
- `export-user-data` - Data export operations
- Any function that makes expensive external API calls
- Batch data processing operations
- Functions that manipulate large datasets
### 🟠 MODERATE (10 req/min)
**Currently Implemented:**
-`process-selective-approval` - Moderation approvals
-`process-selective-rejection` - Moderation rejections
**Recommended:**
- `admin-delete-user` - Admin user deletion
- `manage-moderator-topic` - Admin moderation management
- `merge-contact-tickets` - Admin ticket management
- `mfa-unenroll` - Security operations
- `resend-deletion-code` - Prevent code spam
- `send-escalation-notification` - Admin escalations
- `send-password-added-email` - Security emails
- User submission functions (parks, rides, edits)
### 🟡 STANDARD (20 req/min)
**Recommended:**
- `cancel-account-deletion` - Account management
- `cancel-email-change` - Account management
- `confirm-account-deletion` - Account management
- `request-account-deletion` - Account management
- `create-novu-subscriber` - User registration
- `send-contact-message` - Contact form submissions
- Email validation functions
- Authentication-related functions
### 🟢 LENIENT (30 req/min)
**Recommended:**
- `detect-location` - Lightweight location lookup
- `check-transaction-status` - Status polling
- `validate-email-backend` - Email validation
- `sitemap` - Public sitemap generation
- Read-only public endpoints
### 🔵 GENEROUS (60 req/min)
**Recommended:**
- `novu-webhook` - External webhook receiver
- `scheduled-maintenance` - Health checks
- Internal service-to-service communication
- Real-time status endpoints
### ⚫ NO RATE LIMITING NEEDED
These functions are typically called internally or on a schedule:
- `cleanup-old-versions` - Scheduled cleanup
- `process-expired-bans` - Scheduled task
- `process-scheduled-deletions` - Scheduled task
- `run-cleanup-jobs` - Scheduled task
- `migrate-novu-users` - One-time migration
- Internal notification functions (notify-*)
- `seed-test-data` - Development only
## Best Practices
### 1. Choose the Right Tier
- **Start restrictive**: Begin with a lower tier and increase if needed
- **Consider cost**: Match the rate limit to the operation's resource cost
- **Think about abuse**: Higher abuse risk = stricter limits
- **Monitor usage**: Use edge function logs to track rate limit hits
### 2. Use Per-User Limits for Authenticated Endpoints
```typescript
// ✅ Good: Rate limit authenticated operations per user
serve(withRateLimit(handler, rateLimiters.perUserModerate, corsHeaders));
// ❌ Less effective: Rate limit authenticated operations per IP
// (Multiple users behind same IP can hit each other's limits)
serve(withRateLimit(handler, rateLimiters.moderate, corsHeaders));
```
### 3. Handle Rate Limit Errors Gracefully
Rate limit responses automatically include:
- `429 Too Many Requests` status code
- `Retry-After` header (seconds to wait)
- `X-RateLimit-Limit` header (max requests allowed)
- `X-RateLimit-Remaining` header (requests remaining)
### 4. Document Your Choice
Always add a comment explaining why you chose a specific tier:
```typescript
// Apply moderate rate limiting (10 req/min) for moderation actions
// to prevent abuse while allowing legitimate moderator workflows
serve(withRateLimit(handler, rateLimiters.moderate, corsHeaders));
```
### 5. Test Rate Limits
```bash
# Test rate limiting locally
for i in {1..15}; do
curl -X POST https://your-project.supabase.co/functions/v1/your-function \
-H "Authorization: Bearer YOUR_ANON_KEY" \
-H "Content-Type: application/json" \
-d '{"test": true}'
echo " - Request $i"
sleep 1
done
```
## Migration Checklist
When adding rate limiting to an existing function:
1. ✅ Determine the appropriate tier based on operation cost
2. ✅ Import `rateLimiters` and `withRateLimit` from `_shared/rateLimiter.ts`
3. ✅ Import `corsHeaders` from `_shared/cors.ts`
4. ✅ Wrap your handler with `withRateLimit(handler, rateLimiters.TIER, corsHeaders)`
5. ✅ Add a comment explaining the tier choice
6. ✅ Test the rate limit works correctly
7. ✅ Monitor edge function logs for rate limit hits
8. ✅ Adjust tier if needed based on real usage
## Troubleshooting
### Rate Limits Too Strict
**Symptoms:** Legitimate users hitting rate limits frequently
**Solutions:**
- Increase to next tier up (strict → moderate → standard → lenient)
- Consider per-user rate limiting instead of per-IP
- Check if the operation can be optimized to reduce frequency
### Rate Limits Too Lenient
**Symptoms:** Abuse patterns, high costs, slow performance
**Solutions:**
- Decrease to next tier down
- Add additional validation before expensive operations
- Consider implementing captcha for public endpoints
### Per-User Rate Limiting Not Working
**Check:**
- Is the Authorization header being sent?
- Is the JWT valid and parsable?
- Are logs showing IP-based limits instead of user-based?
## Examples from Production
### Example 1: Upload Function (STRICT)
```typescript
// upload-image function needs strict limiting because:
// - Makes external CloudFlare API calls ($$)
// - Processes large file uploads
// - High abuse potential
serve(withRateLimit(async (req) => {
// Upload logic here
}, rateLimiters.strict, getCorsHeaders(allowedOrigin)));
```
### Example 2: Moderation Function (MODERATE)
```typescript
// process-selective-approval needs moderate limiting because:
// - Modifies database records
// - Triggers notifications
// - Used by moderators (need reasonable throughput)
serve(withRateLimit(handler, rateLimiters.moderate, corsHeaders));
```
### Example 3: Validation Function (LENIENT)
```typescript
// validate-email-backend can be lenient because:
// - Lightweight operation (just validation)
// - No database writes
// - Users may need to retry multiple times
serve(withRateLimit(async (req) => {
// Validation logic here
}, rateLimiters.lenient, corsHeaders));
```
## Future Enhancements
Potential improvements to consider:
1. **Dynamic Rate Limits**: Adjust limits based on user role/tier
2. **Distributed Rate Limiting**: Use Redis for multi-region support
3. **Rate Limit Analytics**: Track and visualize rate limit metrics
4. **Custom Error Messages**: Provide context-specific retry guidance
5. **Whitelist Support**: Bypass limits for trusted IPs/users

View File

@@ -1,142 +0,0 @@
/**
* Authentication Helper Functions
*
* Utilities for extracting user information from requests,
* handling JWTs, and generating rate limit keys.
*/
import { createClient } from 'jsr:@supabase/supabase-js@2';
/**
* Extract user ID from Authorization header JWT
* Returns null if not authenticated or invalid token
*/
export function extractUserIdFromAuth(req: Request): string | null {
try {
const authHeader = req.headers.get('Authorization');
if (!authHeader || !authHeader.startsWith('Bearer ')) {
return null;
}
const token = authHeader.substring(7);
// Decode JWT (just the payload, no verification needed for ID extraction)
const parts = token.split('.');
if (parts.length !== 3) {
return null;
}
const payload = JSON.parse(atob(parts[1]));
return payload.sub || null;
} catch (error) {
console.error('Error extracting user ID from auth:', error);
return null;
}
}
/**
* Get client IP address from request
* Handles various proxy headers
*/
export function getClientIP(req: Request): string {
// Check common proxy headers in order of preference
const forwardedFor = req.headers.get('x-forwarded-for');
if (forwardedFor) {
// x-forwarded-for can contain multiple IPs, take the first one
return forwardedFor.split(',')[0].trim();
}
const realIP = req.headers.get('x-real-ip');
if (realIP) {
return realIP.trim();
}
const cfConnectingIP = req.headers.get('cf-connecting-ip');
if (cfConnectingIP) {
return cfConnectingIP.trim();
}
// Fallback to a default value
return 'unknown';
}
/**
* Generate a rate limit key for the request
* Prefers user ID, falls back to IP
*/
export function getRateLimitKey(req: Request, prefix: string = 'user'): string {
const userId = extractUserIdFromAuth(req);
if (userId) {
return `${prefix}:${userId}`;
}
const clientIP = getClientIP(req);
return `${prefix}:ip:${clientIP}`;
}
/**
* Verify JWT token and get user ID using Supabase client
* More robust than manual decoding, verifies signature
*/
export async function verifyAuthAndGetUserId(
req: Request,
supabaseUrl: string,
supabaseServiceKey: string
): Promise<string | null> {
try {
const authHeader = req.headers.get('Authorization');
if (!authHeader || !authHeader.startsWith('Bearer ')) {
return null;
}
const token = authHeader.substring(7);
// Create a Supabase client for verification
const supabase = createClient(supabaseUrl, supabaseServiceKey);
// Verify the JWT
const { data: { user }, error } = await supabase.auth.getUser(token);
if (error || !user) {
return null;
}
return user.id;
} catch (error) {
console.error('Error verifying auth token:', error);
return null;
}
}
/**
* Check if request has valid authentication
*/
export function hasValidAuth(req: Request): boolean {
const authHeader = req.headers.get('Authorization');
return authHeader !== null && authHeader.startsWith('Bearer ');
}
/**
* Extract request metadata for logging
*/
export interface RequestMetadata {
userId: string | null;
clientIP: string;
userAgent: string | null;
referer: string | null;
method: string;
path: string;
}
export function extractRequestMetadata(req: Request): RequestMetadata {
const url = new URL(req.url);
return {
userId: extractUserIdFromAuth(req),
clientIP: getClientIP(req),
userAgent: req.headers.get('user-agent'),
referer: req.headers.get('referer'),
method: req.method,
path: url.pathname,
};
}

View File

@@ -1,119 +0,0 @@
/**
* Centralized CORS configuration for all edge functions
* Provides consistent header handling across the application
*/
// Standard headers that should be allowed across all functions
const STANDARD_HEADERS = [
'authorization',
'x-client-info',
'apikey',
'content-type',
];
// Tracing headers for distributed tracing and request tracking
const TRACING_HEADERS = [
'traceparent',
'x-request-id',
];
// All headers combined
const ALL_HEADERS = [...STANDARD_HEADERS, ...TRACING_HEADERS];
/**
* Basic CORS headers - allows all origins
* Use for most edge functions that need public access
*/
export const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': STANDARD_HEADERS.join(', '),
};
/**
* Extended CORS headers - includes tracing headers
* Use for functions that participate in distributed tracing
*/
export const corsHeadersWithTracing = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': ALL_HEADERS.join(', '),
};
/**
* CORS headers with methods - for functions with multiple HTTP verbs
*/
export const corsHeadersWithMethods = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': ALL_HEADERS.join(', '),
'Access-Control-Allow-Methods': 'GET, POST, PUT, DELETE, PATCH, OPTIONS',
};
/**
* CORS headers with credentials - for authenticated requests requiring cookies
*/
export const corsHeadersWithCredentials = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': ALL_HEADERS.join(', '),
'Access-Control-Allow-Credentials': 'true',
};
/**
* Environment-aware CORS configuration
* Validates origin against allowlist (production) or localhost (development)
*/
export const getAllowedOrigin = (requestOrigin: string | null): string | null => {
// If no origin header, it's not a CORS request (same-origin or server-to-server)
if (!requestOrigin) {
return null;
}
const environment = Deno.env.get('ENVIRONMENT') || 'development';
// Production allowlist - configure via ALLOWED_ORIGINS environment variable
const allowedOriginsEnv = Deno.env.get('ALLOWED_ORIGINS') || '';
const allowedOrigins = allowedOriginsEnv.split(',').filter(origin => origin.trim());
// In development, only allow localhost and Replit domains
if (environment === 'development') {
if (
requestOrigin.includes('localhost') ||
requestOrigin.includes('127.0.0.1') ||
requestOrigin.includes('.repl.co') ||
requestOrigin.includes('.replit.dev')
) {
return requestOrigin;
}
return null;
}
// In production, only allow specific domains from environment variable
if (allowedOrigins.includes(requestOrigin)) {
return requestOrigin;
}
return null;
};
/**
* Get CORS headers with validated origin
* Use for functions requiring strict origin validation (e.g., upload-image)
*/
export const getCorsHeaders = (allowedOrigin: string | null): Record<string, string> => {
if (!allowedOrigin) {
return {};
}
return {
'Access-Control-Allow-Origin': allowedOrigin,
'Access-Control-Allow-Headers': ALL_HEADERS.join(', '),
'Access-Control-Allow-Methods': 'GET, POST, PUT, DELETE, PATCH, OPTIONS',
'Access-Control-Allow-Credentials': 'true',
};
};
/**
* Handle OPTIONS preflight request
* Returns a Response with appropriate CORS headers
*/
export const handleCorsPreFlight = (corsHeaders: Record<string, string>): Response => {
return new Response(null, { headers: corsHeaders });
};

View File

@@ -1,94 +0,0 @@
/**
* Error Formatting Utility for Edge Functions
*
* Provides robust error message extraction from various error types:
* - Standard Error objects
* - Supabase PostgresError objects (plain objects with message/details/code/hint)
* - Raw objects and primitives
*
* Eliminates "[object Object]" errors by properly extracting error details.
*/
/**
* Format error objects for logging
* Handles Error objects, Supabase errors (plain objects), and primitives
*
* @param error - Any error value
* @returns Formatted, human-readable error message string
*/
export function formatEdgeError(error: unknown): string {
// Standard Error objects
if (error instanceof Error) {
return error.message;
}
// Object-like errors (Supabase PostgresError, etc.)
if (typeof error === 'object' && error !== null) {
const err = error as any;
// Try common error message properties
if (err.message && typeof err.message === 'string') {
// Include additional Supabase error details if present
const parts: string[] = [err.message];
if (err.details && typeof err.details === 'string') {
parts.push(`Details: ${err.details}`);
}
if (err.hint && typeof err.hint === 'string') {
parts.push(`Hint: ${err.hint}`);
}
if (err.code && typeof err.code === 'string') {
parts.push(`Code: ${err.code}`);
}
return parts.join(' | ');
}
// Some errors nest the actual error in an 'error' property
if (err.error) {
return formatEdgeError(err.error);
}
// Some APIs use 'msg' instead of 'message'
if (err.msg && typeof err.msg === 'string') {
return err.msg;
}
// Last resort: stringify the entire object
try {
const stringified = JSON.stringify(error, null, 2);
return stringified.length > 500
? stringified.substring(0, 500) + '... (truncated)'
: stringified;
} catch {
// JSON.stringify can fail on circular references
return 'Unknown error (could not stringify)';
}
}
// Primitive values (strings, numbers, etc.)
return String(error);
}
/**
* Convert any error to a proper Error instance
* Use this before throwing to ensure proper stack traces
*
* @param error - Any error value
* @returns Error instance with formatted message
*/
export function toError(error: unknown): Error {
if (error instanceof Error) {
return error;
}
const message = formatEdgeError(error);
const newError = new Error(message);
// Preserve original error as property for debugging
(newError as any).originalError = error;
return newError;
}

View File

@@ -3,8 +3,6 @@
* Prevents sensitive data exposure and provides consistent log format
*/
import { formatEdgeError } from './errorFormatter.ts';
type LogLevel = 'info' | 'warn' | 'error' | 'debug';
interface LogContext {
@@ -16,39 +14,7 @@ interface LogContext {
[key: string]: unknown;
}
// Span types for distributed tracing
export interface Span {
spanId: string;
traceId: string;
parentSpanId?: string;
name: string;
kind: 'SERVER' | 'CLIENT' | 'INTERNAL' | 'DATABASE';
startTime: number;
endTime?: number;
duration?: number;
attributes: Record<string, unknown>;
events: SpanEvent[];
status: 'ok' | 'error' | 'unset';
error?: {
type: string;
message: string;
stack?: string;
};
}
export interface SpanEvent {
timestamp: number;
name: string;
attributes?: Record<string, unknown>;
}
export interface SpanContext {
traceId: string;
spanId: string;
traceFlags?: number;
}
// Request tracking utilities (legacy - use spans instead)
// Request tracking utilities
export interface RequestTracking {
requestId: string;
start: number;
@@ -67,135 +33,6 @@ export function endRequest(tracking: RequestTracking): number {
return Date.now() - tracking.start;
}
// ============================================================================
// Span Lifecycle Functions
// ============================================================================
/**
* Start a new span
*/
export function startSpan(
name: string,
kind: Span['kind'],
parentSpan?: SpanContext,
attributes?: Record<string, unknown>
): Span {
const traceId = parentSpan?.traceId || crypto.randomUUID();
return {
spanId: crypto.randomUUID(),
traceId,
parentSpanId: parentSpan?.spanId,
name,
kind,
startTime: Date.now(),
attributes: attributes || {},
events: [],
status: 'unset',
};
}
/**
* End a span with final status
*/
export function endSpan(span: Span, status?: 'ok' | 'error', error?: unknown): Span {
span.endTime = Date.now();
span.duration = span.endTime - span.startTime;
span.status = status || 'ok';
if (error) {
const err = error instanceof Error ? error : new Error(formatEdgeError(error));
span.error = {
type: err.name,
message: err.message,
stack: err.stack,
};
}
return span;
}
/**
* Add event to span
*/
export function addSpanEvent(
span: Span,
name: string,
attributes?: Record<string, unknown>
): void {
span.events.push({
timestamp: Date.now(),
name,
attributes,
});
}
/**
* Set span attributes
*/
export function setSpanAttributes(
span: Span,
attributes: Record<string, unknown>
): void {
span.attributes = { ...span.attributes, ...attributes };
}
/**
* Extract span context for propagation
*/
export function getSpanContext(span: Span): SpanContext {
return {
traceId: span.traceId,
spanId: span.spanId,
};
}
/**
* Extract span context from HTTP headers (W3C Trace Context)
*/
export function extractSpanContextFromHeaders(headers: Headers): SpanContext | undefined {
const traceparent = headers.get('traceparent');
if (!traceparent) return undefined;
// Parse W3C traceparent: version-traceId-spanId-flags
const parts = traceparent.split('-');
if (parts.length !== 4) return undefined;
return {
traceId: parts[1],
spanId: parts[2],
traceFlags: parseInt(parts[3], 16),
};
}
/**
* Inject span context into headers
*/
export function injectSpanContextIntoHeaders(spanContext: SpanContext): Record<string, string> {
return {
'traceparent': `00-${spanContext.traceId}-${spanContext.spanId}-01`,
};
}
/**
* Log completed span
*/
export function logSpan(span: Span): void {
const sanitizedAttributes = sanitizeContext(span.attributes);
const sanitizedEvents = span.events.map(e => ({
...e,
attributes: e.attributes ? sanitizeContext(e.attributes) : undefined,
}));
edgeLogger.info('Span completed', {
span: {
...span,
attributes: sanitizedAttributes,
events: sanitizedEvents,
},
});
}
// Fields that should never be logged
const SENSITIVE_FIELDS = [
'password',
@@ -215,7 +52,7 @@ const SENSITIVE_FIELDS = [
/**
* Sanitize context to remove sensitive data
*/
export function sanitizeContext(context: LogContext): LogContext {
function sanitizeContext(context: LogContext): LogContext {
const sanitized: LogContext = {};
for (const [key, value] of Object.entries(context)) {

View File

@@ -1,174 +0,0 @@
/**
* Centralized Rate Limiting Configuration for Edge Functions
*
* Provides standardized rate limit tiers that can be imported by any edge function.
* This ensures consistent rate limiting behavior across the application.
*/
import { RateLimitConfig } from './rateLimiter.ts';
/**
* Rate Limit Tier Definitions
*
* Choose the appropriate tier based on the operation cost and abuse risk:
*
* - **STRICT**: For expensive operations (uploads, exports, data modifications)
* - **MODERATE**: For standard API operations (moderation actions, content creation)
* - **STANDARD**: For typical read/write operations (most endpoints)
* - **LENIENT**: For lightweight read operations (cached data, public endpoints)
* - **GENEROUS**: For high-frequency operations (polling, real-time updates)
*/
// Base time window for all rate limiters (1 minute)
const RATE_LIMIT_WINDOW_MS = 60000;
/**
* STRICT: 5 requests per minute
*
* Use for:
* - File uploads (images, documents)
* - Data exports
* - Batch operations
* - Resource-intensive computations
* - CloudFlare API calls
*
* Examples: upload-image, export-user-data
*/
export const RATE_LIMIT_STRICT: RateLimitConfig = {
windowMs: RATE_LIMIT_WINDOW_MS,
maxRequests: 5,
};
/**
* MODERATE: 10 requests per minute
*
* Use for:
* - Moderation actions (approve, reject)
* - Content submission
* - User profile updates
* - Email sending
* - Notification triggers
*
* Examples: process-selective-approval, process-selective-rejection, submit-entity-edit
*/
export const RATE_LIMIT_MODERATE: RateLimitConfig = {
windowMs: RATE_LIMIT_WINDOW_MS,
maxRequests: 10,
};
/**
* STANDARD: 20 requests per minute
*
* Use for:
* - Standard read/write operations
* - Search endpoints
* - Contact forms
* - Account management
* - Authentication operations
*
* Examples: send-contact-message, request-account-deletion, validate-email
*/
export const RATE_LIMIT_STANDARD: RateLimitConfig = {
windowMs: RATE_LIMIT_WINDOW_MS,
maxRequests: 20,
};
/**
* LENIENT: 30 requests per minute
*
* Use for:
* - Lightweight read operations
* - Cached data retrieval
* - Public endpoint queries
* - Status checks
* - Location detection
*
* Examples: detect-location, check-transaction-status
*/
export const RATE_LIMIT_LENIENT: RateLimitConfig = {
windowMs: RATE_LIMIT_WINDOW_MS,
maxRequests: 30,
};
/**
* GENEROUS: 60 requests per minute
*
* Use for:
* - High-frequency polling
* - Real-time updates
* - Webhook receivers
* - Health checks
* - Internal service-to-service calls
*
* Examples: novu-webhook, scheduled-maintenance
*/
export const RATE_LIMIT_GENEROUS: RateLimitConfig = {
windowMs: RATE_LIMIT_WINDOW_MS,
maxRequests: 60,
};
/**
* PER_USER: 20 requests per minute (default)
*
* Use for authenticated endpoints where you want to rate limit per user ID
* rather than per IP address. Useful for:
* - User-specific operations
* - Preventing account abuse
* - Per-user quotas
*
* Can be customized with different request counts:
* - perUserStrict: 5 req/min
* - perUserModerate: 10 req/min
* - perUserStandard: 20 req/min (default)
* - perUserLenient: 40 req/min
*/
export const RATE_LIMIT_PER_USER_STRICT: RateLimitConfig = {
windowMs: RATE_LIMIT_WINDOW_MS,
maxRequests: 5,
keyGenerator: (req: Request) => {
// Extract user ID from Authorization header JWT
const authHeader = req.headers.get('Authorization');
if (authHeader) {
try {
const token = authHeader.replace('Bearer ', '');
const payload = JSON.parse(atob(token.split('.')[1]));
return `user:${payload.sub}`;
} catch {
// Fall back to IP if JWT parsing fails
return req.headers.get('x-forwarded-for')?.split(',')[0] || '0.0.0.0';
}
}
return req.headers.get('x-forwarded-for')?.split(',')[0] || '0.0.0.0';
}
};
export const RATE_LIMIT_PER_USER_MODERATE: RateLimitConfig = {
...RATE_LIMIT_PER_USER_STRICT,
maxRequests: 10,
};
export const RATE_LIMIT_PER_USER_STANDARD: RateLimitConfig = {
...RATE_LIMIT_PER_USER_STRICT,
maxRequests: 20,
};
export const RATE_LIMIT_PER_USER_LENIENT: RateLimitConfig = {
...RATE_LIMIT_PER_USER_STRICT,
maxRequests: 40,
};
/**
* Rate Limit Tier Summary
*
* | Tier | Requests/Min | Use Case |
* |-------------------|--------------|-----------------------------------|
* | STRICT | 5 | Expensive operations, uploads |
* | MODERATE | 10 | Moderation, submissions |
* | STANDARD | 20 | Standard read/write operations |
* | LENIENT | 30 | Lightweight reads, public data |
* | GENEROUS | 60 | Polling, webhooks, health checks |
* | PER_USER_STRICT | 5/user | User-specific expensive ops |
* | PER_USER_MODERATE | 10/user | User-specific moderation |
* | PER_USER_STANDARD | 20/user | User-specific standard ops |
* | PER_USER_LENIENT | 40/user | User-specific frequent ops |
*/

View File

@@ -1,144 +0,0 @@
/**
* Rate Limit Metrics Tracking
*
* In-memory metrics collection for rate limiting operations.
* Tracks accepted/rejected requests, patterns, and provides analytics.
*/
export interface RateLimitMetric {
timestamp: number;
functionName: string;
clientIP: string;
userId?: string;
allowed: boolean;
remaining: number;
retryAfter?: number;
tier: string;
}
export interface MetricsStats {
totalRequests: number;
allowedRequests: number;
blockedRequests: number;
blockRate: number;
uniqueIPs: number;
uniqueUsers: number;
topBlockedIPs: Array<{ ip: string; count: number }>;
topBlockedUsers: Array<{ userId: string; count: number }>;
tierDistribution: Record<string, number>;
}
// In-memory storage for metrics
const metricsStore: RateLimitMetric[] = [];
const MAX_METRICS = 10000; // Keep last 10k metrics
/**
* Record a rate limit check result
*/
export function recordRateLimitMetric(metric: RateLimitMetric): void {
metricsStore.push(metric);
// Trim oldest metrics if we exceed max
if (metricsStore.length > MAX_METRICS) {
metricsStore.splice(0, metricsStore.length - MAX_METRICS);
}
}
/**
* Get recent metrics
*/
export function getRecentMetrics(limit: number = 100): RateLimitMetric[] {
return metricsStore.slice(-limit);
}
/**
* Get aggregated statistics for a time window
*/
export function getMetricsStats(timeWindowMs: number = 60000): MetricsStats {
const now = Date.now();
const cutoff = now - timeWindowMs;
const recentMetrics = metricsStore.filter(m => m.timestamp >= cutoff);
const allowedRequests = recentMetrics.filter(m => m.allowed).length;
const blockedRequests = recentMetrics.filter(m => !m.allowed).length;
const totalRequests = recentMetrics.length;
// Track unique IPs and users
const uniqueIPs = new Set(recentMetrics.map(m => m.clientIP)).size;
const uniqueUsers = new Set(
recentMetrics.filter(m => m.userId).map(m => m.userId)
).size;
// Find top blocked IPs
const ipBlockCounts = new Map<string, number>();
const userBlockCounts = new Map<string, number>();
const tierCounts = new Map<string, number>();
recentMetrics.forEach(metric => {
if (!metric.allowed) {
ipBlockCounts.set(metric.clientIP, (ipBlockCounts.get(metric.clientIP) || 0) + 1);
if (metric.userId) {
userBlockCounts.set(metric.userId, (userBlockCounts.get(metric.userId) || 0) + 1);
}
}
tierCounts.set(metric.tier, (tierCounts.get(metric.tier) || 0) + 1);
});
const topBlockedIPs = Array.from(ipBlockCounts.entries())
.map(([ip, count]) => ({ ip, count }))
.sort((a, b) => b.count - a.count)
.slice(0, 10);
const topBlockedUsers = Array.from(userBlockCounts.entries())
.map(([userId, count]) => ({ userId, count }))
.sort((a, b) => b.count - a.count)
.slice(0, 10);
const tierDistribution = Object.fromEntries(tierCounts);
return {
totalRequests,
allowedRequests,
blockedRequests,
blockRate: totalRequests > 0 ? blockedRequests / totalRequests : 0,
uniqueIPs,
uniqueUsers,
topBlockedIPs,
topBlockedUsers,
tierDistribution,
};
}
/**
* Clear all metrics (useful for testing)
*/
export function clearMetrics(): void {
metricsStore.length = 0;
}
/**
* Get metrics for a specific function
*/
export function getFunctionMetrics(functionName: string, limit: number = 100): RateLimitMetric[] {
return metricsStore
.filter(m => m.functionName === functionName)
.slice(-limit);
}
/**
* Get metrics for a specific user
*/
export function getUserMetrics(userId: string, limit: number = 100): RateLimitMetric[] {
return metricsStore
.filter(m => m.userId === userId)
.slice(-limit);
}
/**
* Get metrics for a specific IP
*/
export function getIPMetrics(clientIP: string, limit: number = 100): RateLimitMetric[] {
return metricsStore
.filter(m => m.clientIP === clientIP)
.slice(-limit);
}

View File

@@ -3,9 +3,6 @@
* Prevents abuse and DoS attacks with in-memory rate limiting
*/
import { recordRateLimitMetric } from './rateLimitMetrics.ts';
import { extractUserIdFromAuth, getClientIP } from './authHelpers.ts';
export interface RateLimitConfig {
windowMs: number; // Time window in milliseconds
maxRequests: number; // Max requests per window
@@ -24,12 +21,8 @@ class RateLimiter {
private rateLimitMap = new Map<string, { count: number; resetAt: number }>();
private config: Required<RateLimitConfig>;
private cleanupInterval: number;
private tierName: string;
private functionName?: string;
constructor(config: RateLimitConfig, tierName: string = 'custom', functionName?: string) {
this.tierName = tierName;
this.functionName = functionName;
constructor(config: RateLimitConfig) {
this.config = {
maxMapSize: 10000,
keyGenerator: (req: Request) => this.getClientIP(req),
@@ -45,8 +38,16 @@ class RateLimiter {
}
private getClientIP(req: Request): string {
// Use centralized auth helper for consistent IP extraction
return getClientIP(req);
if (this.config.trustProxy) {
const forwarded = req.headers.get('x-forwarded-for');
if (forwarded) return forwarded.split(',')[0].trim();
const realIP = req.headers.get('x-real-ip');
if (realIP) return realIP;
}
// Fallback for testing
return '0.0.0.0';
}
private cleanupExpiredEntries(): void {
@@ -72,33 +73,15 @@ class RateLimiter {
}
}
check(req: Request, functionName?: string): RateLimitResult {
check(req: Request): RateLimitResult {
const key = this.config.keyGenerator(req);
const now = Date.now();
const existing = this.rateLimitMap.get(key);
// Extract metadata for metrics
const clientIP = getClientIP(req);
const userId = extractUserIdFromAuth(req);
const actualFunctionName = functionName || this.functionName || 'unknown';
// Check existing entry
if (existing && now <= existing.resetAt) {
if (existing.count >= this.config.maxRequests) {
const retryAfter = Math.ceil((existing.resetAt - now) / 1000);
// Record blocked request metric
recordRateLimitMetric({
timestamp: now,
functionName: actualFunctionName,
clientIP,
userId: userId || undefined,
allowed: false,
remaining: 0,
retryAfter,
tier: this.tierName,
});
return {
allowed: false,
retryAfter,
@@ -106,22 +89,9 @@ class RateLimiter {
};
}
existing.count++;
const remaining = this.config.maxRequests - existing.count;
// Record allowed request metric
recordRateLimitMetric({
timestamp: now,
functionName: actualFunctionName,
clientIP,
userId: userId || undefined,
allowed: true,
remaining,
tier: this.tierName,
});
return {
allowed: true,
remaining
remaining: this.config.maxRequests - existing.count
};
}
@@ -147,22 +117,9 @@ class RateLimiter {
resetAt: now + this.config.windowMs
});
const remaining = this.config.maxRequests - 1;
// Record allowed request metric
recordRateLimitMetric({
timestamp: now,
functionName: actualFunctionName,
clientIP,
userId: userId || undefined,
allowed: true,
remaining,
tier: this.tierName,
});
return {
allowed: true,
remaining
remaining: this.config.maxRequests - 1
};
}
@@ -172,68 +129,61 @@ class RateLimiter {
}
}
// Import centralized rate limit configurations
import {
RATE_LIMIT_STRICT,
RATE_LIMIT_MODERATE,
RATE_LIMIT_STANDARD,
RATE_LIMIT_LENIENT,
RATE_LIMIT_GENEROUS,
RATE_LIMIT_PER_USER_STRICT,
RATE_LIMIT_PER_USER_MODERATE,
RATE_LIMIT_PER_USER_STANDARD,
RATE_LIMIT_PER_USER_LENIENT,
} from './rateLimitConfig.ts';
// Export factory function for creating custom rate limiters
export function createRateLimiter(config: RateLimitConfig, tierName?: string, functionName?: string): RateLimiter {
return new RateLimiter(config, tierName, functionName);
// Export factory function for different rate limit tiers
export function createRateLimiter(config: RateLimitConfig): RateLimiter {
return new RateLimiter(config);
}
/**
* Pre-configured rate limiters using centralized tier definitions
*
* These are singleton instances that should be imported and used by edge functions.
* See rateLimitConfig.ts for detailed documentation on when to use each tier.
*/
// Pre-configured rate limiters for common use cases
export const rateLimiters = {
// Strict: 5 requests/minute - For expensive operations
strict: createRateLimiter(RATE_LIMIT_STRICT, 'strict'),
// Strict: For expensive operations (file uploads, data exports)
strict: createRateLimiter({
windowMs: 60000, // 1 minute
maxRequests: 5, // 5 requests per minute
}),
// Moderate: 10 requests/minute - For moderation and submissions
moderate: createRateLimiter(RATE_LIMIT_MODERATE, 'moderate'),
// Standard: For most API endpoints
standard: createRateLimiter({
windowMs: 60000, // 1 minute
maxRequests: 10, // 10 requests per minute
}),
// Standard: 20 requests/minute - For typical operations (DEPRECATED: use 'moderate' for 10/min or 'standard' for 20/min)
standard: createRateLimiter(RATE_LIMIT_MODERATE, 'standard'), // Keeping for backward compatibility
// Lenient: For read-only, cached endpoints
lenient: createRateLimiter({
windowMs: 60000, // 1 minute
maxRequests: 30, // 30 requests per minute
}),
// Lenient: 30 requests/minute - For lightweight reads
lenient: createRateLimiter(RATE_LIMIT_LENIENT, 'lenient'),
// Generous: 60 requests/minute - For high-frequency operations
generous: createRateLimiter(RATE_LIMIT_GENEROUS, 'generous'),
// Per-user rate limiters (key by user ID instead of IP)
perUserStrict: createRateLimiter(RATE_LIMIT_PER_USER_STRICT, 'perUserStrict'),
perUserModerate: createRateLimiter(RATE_LIMIT_PER_USER_MODERATE, 'perUserModerate'),
perUserStandard: createRateLimiter(RATE_LIMIT_PER_USER_STANDARD, 'perUserStandard'),
perUserLenient: createRateLimiter(RATE_LIMIT_PER_USER_LENIENT, 'perUserLenient'),
// Legacy per-user factory function (DEPRECATED: use perUserStrict, perUserModerate, etc.)
// Per-user: For authenticated endpoints (uses user ID as key)
perUser: (maxRequests: number = 20) => createRateLimiter({
...RATE_LIMIT_PER_USER_STANDARD,
windowMs: 60000,
maxRequests,
}, 'perUser'),
keyGenerator: (req: Request) => {
// Extract user ID from Authorization header JWT
const authHeader = req.headers.get('Authorization');
if (authHeader) {
try {
const token = authHeader.replace('Bearer ', '');
const payload = JSON.parse(atob(token.split('.')[1]));
return `user:${payload.sub}`;
} catch {
// Fall back to IP if JWT parsing fails
return req.headers.get('x-forwarded-for')?.split(',')[0] || '0.0.0.0';
}
}
return req.headers.get('x-forwarded-for')?.split(',')[0] || '0.0.0.0';
}
}),
};
// Middleware helper
export function withRateLimit(
handler: (req: Request) => Promise<Response>,
limiter: RateLimiter,
corsHeaders: Record<string, string> = {},
functionName?: string
corsHeaders: Record<string, string> = {}
): (req: Request) => Promise<Response> {
return async (req: Request) => {
const result = limiter.check(req, functionName);
const result = limiter.check(req);
if (!result.allowed) {
return new Response(

View File

@@ -1,8 +1,10 @@
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2.57.4';
import { corsHeaders } from '../_shared/cors.ts';
import { rateLimiters, withRateLimit } from '../_shared/rateLimiter.ts';
import { edgeLogger, startRequest, endRequest } from '../_shared/logger.ts';
import { formatEdgeError } from '../_shared/errorFormatter.ts';
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type',
};
interface DeleteUserRequest {
targetUserId: string;
@@ -14,9 +16,7 @@ interface DeleteUserResponse {
errorCode?: 'aal2_required' | 'permission_denied' | 'invalid_request' | 'deletion_failed';
}
// Apply moderate rate limiting (10 req/min) for admin user deletion
// Prevents abuse of this sensitive administrative operation
Deno.serve(withRateLimit(async (req) => {
Deno.serve(async (req) => {
if (req.method === 'OPTIONS') {
return new Response(null, { headers: corsHeaders });
}
@@ -348,7 +348,7 @@ Deno.serve(withRateLimit(async (req) => {
edgeLogger.warn('Error deleting avatar from Cloudflare', {
requestId: tracking.requestId,
targetUserId,
error: formatEdgeError(error),
error: error instanceof Error ? error.message : String(error),
action: 'admin_delete_user'
});
}
@@ -417,7 +417,7 @@ Deno.serve(withRateLimit(async (req) => {
edgeLogger.warn('Error removing Novu subscriber', {
requestId: tracking.requestId,
targetUserId,
error: formatEdgeError(error),
error: error instanceof Error ? error.message : String(error),
action: 'admin_delete_user'
});
}
@@ -521,7 +521,7 @@ Deno.serve(withRateLimit(async (req) => {
edgeLogger.warn('Error sending deletion notification email', {
requestId: tracking.requestId,
targetUserId,
error: formatEdgeError(error),
error: error instanceof Error ? error.message : String(error),
action: 'admin_delete_user'
});
}
@@ -546,7 +546,7 @@ Deno.serve(withRateLimit(async (req) => {
edgeLogger.error('Unexpected error in admin delete user', {
requestId: tracking.requestId,
duration,
error: formatEdgeError(error),
error: error instanceof Error ? error.message : String(error),
action: 'admin_delete_user'
});
@@ -559,4 +559,4 @@ Deno.serve(withRateLimit(async (req) => {
{ status: 500, headers: { ...corsHeaders, 'Content-Type': 'application/json' } }
);
}
}, rateLimiters.moderate, corsHeaders));
});

View File

@@ -1,8 +1,11 @@
import { serve } from 'https://deno.land/std@0.168.0/http/server.ts';
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2';
import { corsHeaders } from '../_shared/cors.ts';
import { edgeLogger, startRequest, endRequest } from '../_shared/logger.ts';
import { formatEdgeError } from '../_shared/errorFormatter.ts';
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type',
};
serve(async (req) => {
const tracking = startRequest();
@@ -134,7 +137,7 @@ serve(async (req) => {
);
} catch (error) {
const duration = endRequest(tracking);
edgeLogger.error('Error cancelling deletion', { action: 'cancel_deletion_error', error: formatEdgeError(error), requestId: tracking.requestId, duration });
edgeLogger.error('Error cancelling deletion', { action: 'cancel_deletion_error', error: error instanceof Error ? error.message : String(error), requestId: tracking.requestId, duration });
return new Response(
JSON.stringify({ error: error.message, requestId: tracking.requestId }),
{

View File

@@ -1,7 +1,10 @@
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2.57.4';
import { corsHeaders } from '../_shared/cors.ts';
import { edgeLogger, startRequest, endRequest } from '../_shared/logger.ts';
import { formatEdgeError } from '../_shared/errorFormatter.ts';
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type',
};
Deno.serve(async (req) => {
const tracking = startRequest();
@@ -146,7 +149,7 @@ Deno.serve(async (req) => {
action: 'cancel_email_change',
requestId: tracking.requestId,
duration,
error: formatEdgeError(error)
error: error instanceof Error ? error.message : String(error)
});
return new Response(
JSON.stringify({

View File

@@ -8,9 +8,13 @@
*/
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2.57.4';
import { corsHeaders } from '../_shared/cors.ts';
import { edgeLogger, startRequest, endRequest } from '../_shared/logger.ts';
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type',
};
interface StatusRequest {
idempotencyKey: string;
}

View File

@@ -1,7 +1,10 @@
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2';
import { corsHeaders } from '../_shared/cors.ts';
import { edgeLogger } from '../_shared/logger.ts';
import { formatEdgeError } from '../_shared/errorFormatter.ts';
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type',
};
interface CleanupStats {
item_edit_history_deleted: number;
@@ -180,7 +183,7 @@ Deno.serve(async (req) => {
}
);
} catch (error) {
edgeLogger.error('Cleanup job failed', { error: formatEdgeError(error) });
edgeLogger.error('Cleanup job failed', { error: error instanceof Error ? error.message : String(error) });
return new Response(
JSON.stringify({

View File

@@ -1,8 +1,12 @@
import { serve } from 'https://deno.land/std@0.168.0/http/server.ts';
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2';
import { corsHeaders } from '../_shared/cors.ts';
import { edgeLogger, startRequest, endRequest } from '../_shared/logger.ts';
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type',
};
serve(async (req) => {
const tracking = startRequest();

View File

@@ -1,8 +1,11 @@
import { serve } from "https://deno.land/std@0.190.0/http/server.ts";
import { Novu } from "npm:@novu/api@1.6.0";
import { corsHeaders } from '../_shared/cors.ts';
import { edgeLogger } from '../_shared/logger.ts';
import { formatEdgeError } from '../_shared/errorFormatter.ts';
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type',
};
// Simple request tracking
const startRequest = () => ({ requestId: crypto.randomUUID(), start: Date.now() });
@@ -215,7 +218,7 @@ serve(async (req) => {
} catch (topicError: unknown) {
// Non-blocking - log error but don't fail the request
edgeLogger.error('Failed to add subscriber to users topic', {
error: formatEdgeError(topicError),
error: topicError instanceof Error ? topicError.message : String(topicError),
subscriberId,
requestId: tracking.requestId
});
@@ -235,7 +238,7 @@ serve(async (req) => {
} catch (error: unknown) {
const duration = endRequest(tracking);
edgeLogger.error('Error creating Novu subscriber', {
error: formatEdgeError(error),
error: error instanceof Error ? error.message : String(error),
requestId: tracking.requestId,
duration
});

View File

@@ -1,7 +1,10 @@
import { serve } from "https://deno.land/std@0.168.0/http/server.ts";
import { corsHeadersWithTracing as corsHeaders } from '../_shared/cors.ts';
import { edgeLogger, startRequest, endRequest } from "../_shared/logger.ts";
import { formatEdgeError } from "../_shared/errorFormatter.ts";
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type, x-request-id',
};
interface IPLocationResponse {
country: string;
@@ -42,7 +45,7 @@ function cleanupExpiredEntries() {
// CRITICAL: Increment failure counter and log detailed error information
cleanupFailureCount++;
const errorMessage = formatEdgeError(error);
const errorMessage = error instanceof Error ? error.message : String(error);
edgeLogger.error('Cleanup error', {
attempt: cleanupFailureCount,
@@ -281,7 +284,7 @@ serve(async (req) => {
} catch (error: unknown) {
// Enhanced error logging for better visibility and debugging
const errorMessage = formatEdgeError(error);
const errorMessage = error instanceof Error ? error.message : String(error);
edgeLogger.error('Location detection error', {
error: errorMessage,

View File

@@ -1,10 +1,12 @@
import { serve } from 'https://deno.land/std@0.168.0/http/server.ts';
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2.57.4';
import { corsHeaders } from '../_shared/cors.ts';
import { rateLimiters, withRateLimit } from '../_shared/rateLimiter.ts';
import { sanitizeError } from '../_shared/errorSanitizer.ts';
import { edgeLogger, startRequest, endRequest } from '../_shared/logger.ts';
import { formatEdgeError } from '../_shared/errorFormatter.ts';
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type',
};
interface ExportOptions {
include_reviews: boolean;
@@ -14,9 +16,7 @@ interface ExportOptions {
format: 'json';
}
// Apply strict rate limiting (5 req/min) for expensive data export operations
// This prevents abuse and manages server load from large data exports
serve(withRateLimit(async (req) => {
serve(async (req) => {
const tracking = startRequest();
// Handle CORS preflight requests
@@ -348,7 +348,7 @@ serve(withRateLimit(async (req) => {
action: 'export_error',
requestId: tracking.requestId,
duration,
error: formatEdgeError(error)
error: error instanceof Error ? error.message : String(error)
});
const sanitized = sanitizeError(error, 'export-user-data');
return new Response(
@@ -367,4 +367,4 @@ serve(withRateLimit(async (req) => {
}
);
}
}, rateLimiters.strict, corsHeaders));
});

View File

@@ -1,10 +1,14 @@
import { serve } from "https://deno.land/std@0.190.0/http/server.ts";
import { createClient } from "https://esm.sh/@supabase/supabase-js@2.57.4";
import { Novu } from "npm:@novu/api@1.6.0";
import { corsHeadersWithTracing as corsHeaders } from '../_shared/cors.ts';
import { edgeLogger, startRequest, endRequest } from "../_shared/logger.ts";
import { withEdgeRetry } from '../_shared/retryHelper.ts';
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type, x-request-id',
};
const TOPICS = {
MODERATION_SUBMISSIONS: 'moderation-submissions',
MODERATION_REPORTS: 'moderation-reports',

View File

@@ -1,9 +1,13 @@
import { serve } from 'https://deno.land/std@0.168.0/http/server.ts';
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2.57.4';
import { corsHeaders } from '../_shared/cors.ts';
import { edgeLogger, startRequest, endRequest } from '../_shared/logger.ts';
import { createErrorResponse, sanitizeError } from '../_shared/errorSanitizer.ts';
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type',
};
interface MergeTicketsRequest {
primaryTicketId: string;
mergeTicketIds: string[];

View File

@@ -1,7 +1,10 @@
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2.57.4';
import { corsHeaders } from '../_shared/cors.ts';
import { edgeLogger, startRequest, endRequest } from '../_shared/logger.ts';
import { formatEdgeError } from '../_shared/errorFormatter.ts';
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type',
};
Deno.serve(async (req) => {
const tracking = startRequest();
@@ -183,7 +186,7 @@ Deno.serve(async (req) => {
action: 'mfa_unenroll_error',
requestId: tracking.requestId,
duration,
error: formatEdgeError(error)
error: error instanceof Error ? error.message : String(error)
});
return new Response(
JSON.stringify({

View File

@@ -1,9 +1,13 @@
import { serve } from "https://deno.land/std@0.168.0/http/server.ts";
import { createClient } from "https://esm.sh/@supabase/supabase-js@2.57.4";
import { Novu } from "npm:@novu/api@1.6.0";
import { corsHeadersWithTracing as corsHeaders } from '../_shared/cors.ts';
import { edgeLogger, startRequest, endRequest } from "../_shared/logger.ts";
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type, x-request-id',
};
serve(async (req) => {
if (req.method === 'OPTIONS') {
return new Response(null, { headers: corsHeaders });

View File

@@ -1,282 +0,0 @@
/**
* Rate Limit Monitor
*
* Periodically checks rate limit metrics against configured thresholds
* and triggers alerts when limits are exceeded.
*
* Designed to run as a cron job every 5 minutes.
*/
import { createClient } from 'jsr:@supabase/supabase-js@2';
import { getMetricsStats } from '../_shared/rateLimitMetrics.ts';
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type',
};
interface AlertConfig {
id: string;
metric_type: 'block_rate' | 'total_requests' | 'unique_ips' | 'function_specific';
threshold_value: number;
time_window_ms: number;
function_name?: string;
enabled: boolean;
}
interface AlertCheck {
configId: string;
metricType: string;
metricValue: number;
thresholdValue: number;
timeWindowMs: number;
functionName?: string;
exceeded: boolean;
message: string;
}
async function checkAlertConditions(configs: AlertConfig[]): Promise<AlertCheck[]> {
const checks: AlertCheck[] = [];
for (const config of configs) {
if (!config.enabled) continue;
const stats = getMetricsStats(config.time_window_ms);
let metricValue = 0;
let exceeded = false;
let message = '';
switch (config.metric_type) {
case 'block_rate':
metricValue = stats.blockRate;
exceeded = metricValue > config.threshold_value;
message = `Rate limit block rate (${(metricValue * 100).toFixed(1)}%) exceeded threshold (${(config.threshold_value * 100).toFixed(1)}%) in last ${config.time_window_ms / 1000}s. ${stats.blockedRequests} of ${stats.totalRequests} requests blocked.`;
break;
case 'total_requests':
metricValue = stats.totalRequests;
exceeded = metricValue > config.threshold_value;
message = `Total requests (${metricValue}) exceeded threshold (${config.threshold_value}) in last ${config.time_window_ms / 1000}s.`;
break;
case 'unique_ips':
metricValue = stats.uniqueIPs;
exceeded = metricValue > config.threshold_value;
message = `Unique IPs (${metricValue}) exceeded threshold (${config.threshold_value}) in last ${config.time_window_ms / 1000}s. Possible DDoS attack.`;
break;
case 'function_specific':
// For function-specific alerts, we'd need to track metrics per function
// This would require enhancing the metrics system
console.log('Function-specific alerts not yet implemented');
continue;
}
checks.push({
configId: config.id,
metricType: config.metric_type,
metricValue,
thresholdValue: config.threshold_value,
timeWindowMs: config.time_window_ms,
functionName: config.function_name,
exceeded,
message,
});
}
return checks;
}
async function recordAlert(
supabase: any,
check: AlertCheck
): Promise<{ success: boolean; error?: string }> {
try {
const { error } = await supabase
.from('rate_limit_alerts')
.insert({
config_id: check.configId,
metric_type: check.metricType,
metric_value: check.metricValue,
threshold_value: check.thresholdValue,
time_window_ms: check.timeWindowMs,
function_name: check.functionName,
alert_message: check.message,
});
if (error) {
console.error('Failed to record alert:', error);
return { success: false, error: error.message };
}
return { success: true };
} catch (error) {
console.error('Exception recording alert:', error);
return {
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
};
}
}
async function sendNotification(
supabase: any,
check: AlertCheck
): Promise<{ success: boolean; error?: string }> {
try {
// Send notification to moderators via the moderator topic
const { data, error } = await supabase.functions.invoke('trigger-notification', {
body: {
workflowId: 'rate-limit-alert',
topicKey: 'moderators',
payload: {
message: check.message,
metricType: check.metricType,
metricValue: check.metricValue,
thresholdValue: check.thresholdValue,
functionName: check.functionName || 'all',
},
overrides: {
email: {
subject: '🚨 Rate Limit Alert',
},
},
},
});
if (error) {
console.error('Failed to send notification:', error);
return { success: false, error: error.message };
}
return { success: true };
} catch (error) {
console.error('Exception sending notification:', error);
return {
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
};
}
}
async function handler(req: Request): Promise<Response> {
// Handle CORS preflight
if (req.method === 'OPTIONS') {
return new Response(null, { headers: corsHeaders });
}
const startTime = Date.now();
console.log('Rate limit monitor starting...');
try {
const supabaseUrl = Deno.env.get('SUPABASE_URL')!;
const supabaseServiceKey = Deno.env.get('SUPABASE_SERVICE_ROLE_KEY')!;
const supabase = createClient(supabaseUrl, supabaseServiceKey);
// Fetch enabled alert configurations
const { data: configs, error: configError } = await supabase
.from('rate_limit_alert_config')
.select('*')
.eq('enabled', true);
if (configError) {
console.error('Failed to fetch alert configs:', configError);
return new Response(
JSON.stringify({
success: false,
error: 'Failed to fetch alert configurations',
details: configError.message
}),
{ status: 500, headers: { ...corsHeaders, 'Content-Type': 'application/json' } }
);
}
if (!configs || configs.length === 0) {
console.log('No enabled alert configurations found');
return new Response(
JSON.stringify({
success: true,
message: 'No enabled alert configurations',
checked: 0
}),
{ status: 200, headers: { ...corsHeaders, 'Content-Type': 'application/json' } }
);
}
console.log(`Checking ${configs.length} alert configurations...`);
// Check all alert conditions
const checks = await checkAlertConditions(configs);
const exceededChecks = checks.filter(c => c.exceeded);
console.log(`Found ${exceededChecks.length} threshold violations`);
// Process exceeded thresholds
const alertResults = [];
for (const check of exceededChecks) {
console.log(`Processing alert: ${check.message}`);
// Check if we've already sent a recent alert for this config
const { data: recentAlerts } = await supabase
.from('rate_limit_alerts')
.select('created_at')
.eq('config_id', check.configId)
.is('resolved_at', null)
.gte('created_at', new Date(Date.now() - 15 * 60 * 1000).toISOString()) // Last 15 minutes
.order('created_at', { ascending: false })
.limit(1);
if (recentAlerts && recentAlerts.length > 0) {
console.log(`Skipping alert - recent unresolved alert exists for config ${check.configId}`);
alertResults.push({
configId: check.configId,
skipped: true,
reason: 'Recent alert exists',
});
continue;
}
// Record the alert
const recordResult = await recordAlert(supabase, check);
// Send notification
const notifyResult = await sendNotification(supabase, check);
alertResults.push({
configId: check.configId,
metricType: check.metricType,
recorded: recordResult.success,
notified: notifyResult.success,
recordError: recordResult.error,
notifyError: notifyResult.error,
});
}
const duration = Date.now() - startTime;
console.log(`Monitor completed in ${duration}ms`);
return new Response(
JSON.stringify({
success: true,
checked: configs.length,
exceeded: exceededChecks.length,
alerts: alertResults,
duration_ms: duration,
}),
{ status: 200, headers: { ...corsHeaders, 'Content-Type': 'application/json' } }
);
} catch (error) {
console.error('Error in rate limit monitor:', error);
return new Response(
JSON.stringify({
success: false,
error: 'Internal server error',
message: error instanceof Error ? error.message : 'Unknown error',
}),
{ status: 500, headers: { ...corsHeaders, 'Content-Type': 'application/json' } }
);
}
}
Deno.serve(handler);

View File

@@ -1,9 +1,13 @@
import { serve } from "https://deno.land/std@0.168.0/http/server.ts";
import { createClient } from "https://esm.sh/@supabase/supabase-js@2.57.4";
import { corsHeadersWithTracing as corsHeaders } from '../_shared/cors.ts';
import { edgeLogger, startRequest, endRequest } from "../_shared/logger.ts";
import { withEdgeRetry } from '../_shared/retryHelper.ts';
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type, x-request-id',
};
interface NotificationPayload {
reportId: string;
reportType: string;

View File

@@ -1,9 +1,13 @@
import { serve } from "https://deno.land/std@0.168.0/http/server.ts";
import { createClient } from "https://esm.sh/@supabase/supabase-js@2.57.4";
import { corsHeaders } from '../_shared/cors.ts';
import { edgeLogger, startRequest, endRequest } from '../_shared/logger.ts';
import { withEdgeRetry } from '../_shared/retryHelper.ts';
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type',
};
interface NotificationPayload {
submission_id: string;
submission_type: string;

View File

@@ -1,8 +1,12 @@
import { serve } from "https://deno.land/std@0.168.0/http/server.ts";
import { createClient } from "https://esm.sh/@supabase/supabase-js@2.57.4";
import { corsHeadersWithTracing as corsHeaders } from '../_shared/cors.ts';
import { edgeLogger, startRequest, endRequest } from "../_shared/logger.ts";
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type, x-request-id',
};
interface AnnouncementPayload {
title: string;
message: string;

View File

@@ -1,8 +1,12 @@
import { serve } from "https://deno.land/std@0.190.0/http/server.ts";
import { createClient } from "https://esm.sh/@supabase/supabase-js@2.57.4";
import { corsHeadersWithTracing as corsHeaders } from '../_shared/cors.ts';
import { edgeLogger, startRequest, endRequest } from "../_shared/logger.ts";
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type, x-request-id',
};
interface RequestBody {
submission_id: string;
user_id: string;

View File

@@ -1,8 +1,12 @@
import { serve } from "https://deno.land/std@0.168.0/http/server.ts";
import { createClient } from "https://esm.sh/@supabase/supabase-js@2.57.4";
import { corsHeaders } from '../_shared/cors.ts';
import { edgeLogger } from '../_shared/logger.ts';
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type',
};
// Simple request tracking
const startRequest = () => ({ requestId: crypto.randomUUID(), start: Date.now() });
const endRequest = (tracking: { start: number }) => Date.now() - tracking.start;

View File

@@ -1,7 +1,11 @@
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2.57.4';
import { corsHeaders } from '../_shared/cors.ts';
import { edgeLogger } from '../_shared/logger.ts';
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type',
};
Deno.serve(async (req) => {
// Handle CORS preflight
if (req.method === 'OPTIONS') {

View File

@@ -1,8 +1,12 @@
import "jsr:@supabase/functions-js/edge-runtime.d.ts";
import { createClient } from "https://esm.sh/@supabase/supabase-js@2.57.4";
import { corsHeaders } from '../_shared/cors.ts';
import { edgeLogger, startRequest, endRequest } from '../_shared/logger.ts';
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type',
};
const CLOUDFLARE_ACCOUNT_ID = Deno.env.get('CLOUDFLARE_ACCOUNT_ID');
const CLOUDFLARE_API_TOKEN = Deno.env.get('CLOUDFLARE_IMAGES_API_TOKEN');

View File

@@ -1,8 +1,12 @@
import { serve } from 'https://deno.land/std@0.168.0/http/server.ts';
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2';
import { corsHeadersWithTracing as corsHeaders } from '../_shared/cors.ts';
import { edgeLogger, startRequest, endRequest } from "../_shared/logger.ts";
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type, x-request-id',
};
serve(async (req) => {
if (req.method === 'OPTIONS') {
return new Response(null, { headers: corsHeaders });

View File

@@ -0,0 +1,4 @@
export const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type',
};

View File

@@ -1,46 +1,10 @@
import { serve } from 'https://deno.land/std@0.168.0/http/server.ts';
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2.57.4';
import { corsHeadersWithTracing as corsHeaders } from '../_shared/cors.ts';
import { corsHeaders } from './cors.ts';
import { rateLimiters, withRateLimit } from '../_shared/rateLimiter.ts';
import {
edgeLogger,
startSpan,
endSpan,
addSpanEvent,
setSpanAttributes,
getSpanContext,
logSpan,
extractSpanContextFromHeaders,
type Span
} from '../_shared/logger.ts';
import { formatEdgeError, toError } from '../_shared/errorFormatter.ts';
const SUPABASE_URL = Deno.env.get('SUPABASE_URL') || 'https://api.thrillwiki.com';
const SUPABASE_ANON_KEY = Deno.env.get('SUPABASE_ANON_KEY');
// ============================================================================
// CRITICAL: Validate environment variables at startup
// ============================================================================
if (!SUPABASE_ANON_KEY) {
const errorMsg = 'CRITICAL: SUPABASE_ANON_KEY environment variable is not set!';
console.error(errorMsg, {
timestamp: new Date().toISOString(),
hasUrl: !!SUPABASE_URL,
url: SUPABASE_URL,
availableEnvVars: Object.keys(Deno.env.toObject()).filter(k =>
k.includes('SUPABASE') || k.includes('URL')
)
});
throw new Error('Missing required environment variable: SUPABASE_ANON_KEY');
}
console.log('Edge function initialized successfully', {
timestamp: new Date().toISOString(),
function: 'process-selective-approval',
hasUrl: !!SUPABASE_URL,
hasKey: !!SUPABASE_ANON_KEY,
keyLength: SUPABASE_ANON_KEY.length
});
const SUPABASE_ANON_KEY = Deno.env.get('SUPABASE_ANON_KEY')!;
interface ApprovalRequest {
submissionId: string;
@@ -50,20 +14,6 @@ interface ApprovalRequest {
// Main handler function
const handler = async (req: Request) => {
// ============================================================================
// Log every incoming request immediately
// ============================================================================
console.log('Request received', {
timestamp: new Date().toISOString(),
method: req.method,
url: req.url,
headers: {
authorization: req.headers.has('Authorization') ? '[PRESENT]' : '[MISSING]',
contentType: req.headers.get('Content-Type'),
traceparent: req.headers.get('traceparent') || '[NONE]'
}
});
// Handle CORS preflight requests
if (req.method === 'OPTIONS') {
return new Response(null, {
@@ -72,29 +22,13 @@ const handler = async (req: Request) => {
});
}
// Extract parent span context from headers (if present)
const parentSpanContext = extractSpanContextFromHeaders(req.headers);
// Create root span for this edge function invocation
const rootSpan = startSpan(
'process-selective-approval',
'SERVER',
parentSpanContext,
{
'http.method': 'POST',
'function.name': 'process-selective-approval',
}
);
const requestId = rootSpan.spanId;
// Generate request ID for tracking
const requestId = crypto.randomUUID();
try {
// STEP 1: Authentication
addSpanEvent(rootSpan, 'authentication_start');
const authHeader = req.headers.get('Authorization');
if (!authHeader) {
addSpanEvent(rootSpan, 'authentication_failed', { reason: 'missing_header' });
endSpan(rootSpan, 'error');
logSpan(rootSpan);
return new Response(
JSON.stringify({ error: 'Missing Authorization header' }),
{
@@ -113,14 +47,6 @@ const handler = async (req: Request) => {
const { data: { user }, error: authError } = await supabase.auth.getUser();
if (authError || !user) {
addSpanEvent(rootSpan, 'authentication_failed', { error: authError?.message });
edgeLogger.warn('Authentication failed', {
requestId,
error: authError?.message,
action: 'process_approval'
});
endSpan(rootSpan, 'error', authError || new Error('Unauthorized'));
logSpan(rootSpan);
return new Response(
JSON.stringify({ error: 'Unauthorized' }),
{
@@ -133,34 +59,13 @@ const handler = async (req: Request) => {
);
}
setSpanAttributes(rootSpan, { 'user.id': user.id });
addSpanEvent(rootSpan, 'authentication_success');
edgeLogger.info('Approval request received', {
requestId,
moderatorId: user.id,
action: 'process_approval'
});
console.log(`[${requestId}] Approval request from moderator ${user.id}`);
// STEP 2: Parse request
addSpanEvent(rootSpan, 'validation_start');
const body: ApprovalRequest = await req.json();
const { submissionId, itemIds, idempotencyKey } = body;
if (!submissionId || !itemIds || itemIds.length === 0) {
addSpanEvent(rootSpan, 'validation_failed', {
hasSubmissionId: !!submissionId,
hasItemIds: !!itemIds,
itemCount: itemIds?.length || 0,
});
edgeLogger.warn('Invalid request payload', {
requestId,
hasSubmissionId: !!submissionId,
hasItemIds: !!itemIds,
itemCount: itemIds?.length || 0,
action: 'process_approval'
});
endSpan(rootSpan, 'error');
logSpan(rootSpan);
return new Response(
JSON.stringify({ error: 'Missing required fields: submissionId, itemIds' }),
{
@@ -173,21 +78,7 @@ const handler = async (req: Request) => {
);
}
setSpanAttributes(rootSpan, {
'submission.id': submissionId,
'submission.item_count': itemIds.length,
'idempotency.key': idempotencyKey,
});
addSpanEvent(rootSpan, 'validation_complete');
edgeLogger.info('Request validated', {
requestId,
submissionId,
itemCount: itemIds.length,
action: 'process_approval'
});
// STEP 3: Idempotency check
addSpanEvent(rootSpan, 'idempotency_check_start');
const { data: existingKey } = await supabase
.from('submission_idempotency_keys')
.select('*')
@@ -195,16 +86,7 @@ const handler = async (req: Request) => {
.single();
if (existingKey?.status === 'completed') {
addSpanEvent(rootSpan, 'idempotency_cache_hit');
setSpanAttributes(rootSpan, { 'cache.hit': true });
edgeLogger.info('Idempotency cache hit', {
requestId,
idempotencyKey,
cached: true,
action: 'process_approval'
});
endSpan(rootSpan, 'ok');
logSpan(rootSpan);
console.log(`[${requestId}] Idempotency key already processed, returning cached result`);
return new Response(
JSON.stringify(existingKey.result_data),
{
@@ -226,15 +108,7 @@ const handler = async (req: Request) => {
.single();
if (submissionError || !submission) {
addSpanEvent(rootSpan, 'submission_fetch_failed', { error: submissionError?.message });
edgeLogger.error('Submission not found', {
requestId,
submissionId,
error: submissionError?.message,
action: 'process_approval'
});
endSpan(rootSpan, 'error', submissionError || new Error('Submission not found'));
logSpan(rootSpan);
console.error(`[${requestId}] Submission not found:`, submissionError);
return new Response(
JSON.stringify({ error: 'Submission not found' }),
{
@@ -249,13 +123,7 @@ const handler = async (req: Request) => {
// STEP 5: Verify moderator can approve this submission
if (submission.assigned_to && submission.assigned_to !== user.id) {
edgeLogger.warn('Lock conflict', {
requestId,
submissionId,
lockedBy: submission.assigned_to,
attemptedBy: user.id,
action: 'process_approval'
});
console.error(`[${requestId}] Submission locked by another moderator`);
return new Response(
JSON.stringify({ error: 'Submission is locked by another moderator' }),
{
@@ -269,13 +137,7 @@ const handler = async (req: Request) => {
}
if (!['pending', 'partially_approved'].includes(submission.status)) {
edgeLogger.warn('Invalid submission status', {
requestId,
submissionId,
currentStatus: submission.status,
expectedStatuses: ['pending', 'partially_approved'],
action: 'process_approval'
});
console.error(`[${requestId}] Invalid submission status: ${submission.status}`);
return new Response(
JSON.stringify({ error: 'Submission already processed' }),
{
@@ -288,59 +150,17 @@ const handler = async (req: Request) => {
);
}
// STEP 6: Register idempotency key as processing (atomic upsert)
// ✅ CRITICAL FIX: Use ON CONFLICT to prevent race conditions
// STEP 6: Register idempotency key as processing
if (!existingKey) {
const { data: insertedKey, error: idempotencyError } = await supabase
.from('submission_idempotency_keys')
.insert({
idempotency_key: idempotencyKey,
submission_id: submissionId,
moderator_id: user.id,
item_ids: itemIds,
status: 'processing'
})
.select()
.single();
// If conflict occurred, another moderator is processing
if (idempotencyError && idempotencyError.code === '23505') {
edgeLogger.warn('Idempotency key conflict - another request processing', {
requestId,
idempotencyKey,
moderatorId: user.id
});
return new Response(
JSON.stringify({ error: 'Another moderator is processing this submission' }),
{ status: 409, headers: { ...corsHeaders, 'Content-Type': 'application/json' } }
);
}
if (idempotencyError) {
throw toError(idempotencyError);
}
await supabase.from('submission_idempotency_keys').insert({
idempotency_key: idempotencyKey,
submission_id: submissionId,
moderator_id: user.id,
status: 'processing'
});
}
// Create child span for RPC transaction
const rpcSpan = startSpan(
'process_approval_transaction',
'DATABASE',
getSpanContext(rootSpan),
{
'db.operation': 'rpc',
'db.function': 'process_approval_transaction',
'submission.id': submissionId,
'submission.item_count': itemIds.length,
}
);
addSpanEvent(rpcSpan, 'rpc_call_start');
edgeLogger.info('Calling approval transaction RPC', {
requestId,
submissionId,
itemCount: itemIds.length,
action: 'process_approval'
});
console.log(`[${requestId}] Calling process_approval_transaction RPC`);
// ============================================================================
// STEP 7: Call RPC function with deadlock retry logic
@@ -358,9 +178,7 @@ const handler = async (req: Request) => {
p_item_ids: itemIds,
p_moderator_id: user.id,
p_submitter_id: submission.user_id,
p_request_id: requestId,
p_trace_id: rootSpan.traceId,
p_parent_span_id: rpcSpan.spanId
p_request_id: requestId
}
);
@@ -369,10 +187,6 @@ const handler = async (req: Request) => {
if (!rpcError) {
// Success!
addSpanEvent(rpcSpan, 'rpc_call_success', {
'result.status': data?.status,
'items.processed': itemIds.length,
});
break;
}
@@ -380,51 +194,23 @@ const handler = async (req: Request) => {
if (rpcError.code === '40P01' || rpcError.code === '40001') {
retryCount++;
if (retryCount > MAX_DEADLOCK_RETRIES) {
addSpanEvent(rpcSpan, 'max_retries_exceeded', { attempt: retryCount });
edgeLogger.error('Max deadlock retries exceeded', {
requestId,
submissionId,
attempt: retryCount,
action: 'process_approval'
});
console.error(`[${requestId}] Max deadlock retries exceeded`);
break;
}
const backoffMs = 100 * Math.pow(2, retryCount);
addSpanEvent(rpcSpan, 'deadlock_retry', { attempt: retryCount, backoffMs });
edgeLogger.warn('Deadlock detected, retrying', {
requestId,
attempt: retryCount,
maxAttempts: MAX_DEADLOCK_RETRIES,
backoffMs,
action: 'process_approval'
});
console.log(`[${requestId}] Deadlock detected, retrying in ${backoffMs}ms (attempt ${retryCount}/${MAX_DEADLOCK_RETRIES})`);
await new Promise(r => setTimeout(r, backoffMs));
continue;
}
// Non-retryable error, break immediately
addSpanEvent(rpcSpan, 'rpc_call_failed', {
error: rpcError.message,
errorCode: rpcError.code
});
break;
}
if (rpcError) {
// Transaction failed - EVERYTHING rolled back automatically by PostgreSQL
endSpan(rpcSpan, 'error', rpcError);
logSpan(rpcSpan);
edgeLogger.error('Transaction failed', {
requestId,
duration: rpcSpan.duration,
submissionId,
error: rpcError.message,
errorCode: rpcError.code,
retries: retryCount,
action: 'process_approval'
});
console.error(`[${requestId}] Approval transaction failed:`, rpcError);
// Update idempotency key to failed
try {
@@ -437,19 +223,10 @@ const handler = async (req: Request) => {
})
.eq('idempotency_key', idempotencyKey);
} catch (updateError) {
edgeLogger.warn('Failed to update idempotency key', {
requestId,
idempotencyKey,
status: 'failed',
error: formatEdgeError(updateError),
action: 'process_approval'
});
console.error(`[${requestId}] Failed to update idempotency key to failed:`, updateError);
// Non-blocking - continue with error response even if idempotency update fails
}
endSpan(rootSpan, 'error', rpcError);
logSpan(rootSpan);
return new Response(
JSON.stringify({
error: 'Approval transaction failed',
@@ -467,24 +244,7 @@ const handler = async (req: Request) => {
);
}
// RPC succeeded
endSpan(rpcSpan, 'ok');
logSpan(rpcSpan);
setSpanAttributes(rootSpan, {
'result.status': result?.status,
'result.final_status': result?.status,
'retries': retryCount,
});
edgeLogger.info('Transaction completed successfully', {
requestId,
duration: rpcSpan.duration,
submissionId,
itemCount: itemIds.length,
retries: retryCount,
newStatus: result?.status,
action: 'process_approval'
});
console.log(`[${requestId}] Transaction completed successfully:`, result);
// STEP 8: Success - update idempotency key
try {
@@ -497,19 +257,10 @@ const handler = async (req: Request) => {
})
.eq('idempotency_key', idempotencyKey);
} catch (updateError) {
edgeLogger.warn('Failed to update idempotency key', {
requestId,
idempotencyKey,
status: 'completed',
error: formatEdgeError(updateError),
action: 'process_approval'
});
console.error(`[${requestId}] Failed to update idempotency key to completed:`, updateError);
// Non-blocking - transaction succeeded, so continue with success response
}
endSpan(rootSpan, 'ok');
logSpan(rootSpan);
return new Response(
JSON.stringify(result),
{
@@ -523,29 +274,11 @@ const handler = async (req: Request) => {
);
} catch (error) {
// Enhanced error logging with full details
const errorDetails = {
timestamp: new Date().toISOString(),
requestId: rootSpan?.spanId || 'unknown',
duration: rootSpan?.duration || 0,
error: formatEdgeError(error),
errorType: error instanceof Error ? error.constructor.name : typeof error,
stack: error instanceof Error ? error.stack : undefined,
action: 'process_approval'
};
console.error('Uncaught error in handler', errorDetails);
endSpan(rootSpan, 'error', error instanceof Error ? error : toError(error));
logSpan(rootSpan);
edgeLogger.error('Unexpected error', errorDetails);
console.error(`[${requestId}] Unexpected error:`, error);
return new Response(
JSON.stringify({
error: 'Internal server error',
message: error instanceof Error ? error.message : 'Unknown error',
requestId: rootSpan?.spanId || 'unknown'
message: error instanceof Error ? error.message : 'Unknown error'
}),
{
status: 500,
@@ -558,5 +291,5 @@ const handler = async (req: Request) => {
}
};
// Apply rate limiting: 10 requests per minute per IP (moderate tier for moderation actions)
serve(withRateLimit(handler, rateLimiters.moderate, corsHeaders));
// Apply rate limiting: 10 requests per minute per IP (standard tier)
serve(withRateLimit(handler, rateLimiters.standard, corsHeaders));

View File

@@ -1,518 +0,0 @@
import { serve } from 'https://deno.land/std@0.168.0/http/server.ts';
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2.57.4';
import { corsHeadersWithTracing as corsHeaders } from '../_shared/cors.ts';
import { rateLimiters, withRateLimit } from '../_shared/rateLimiter.ts';
import {
edgeLogger,
startSpan,
endSpan,
addSpanEvent,
setSpanAttributes,
getSpanContext,
logSpan,
extractSpanContextFromHeaders,
type Span
} from '../_shared/logger.ts';
import { formatEdgeError, toError } from '../_shared/errorFormatter.ts';
const SUPABASE_URL = Deno.env.get('SUPABASE_URL') || 'https://api.thrillwiki.com';
const SUPABASE_ANON_KEY = Deno.env.get('SUPABASE_ANON_KEY')!;
interface RejectionRequest {
submissionId: string;
itemIds: string[];
rejectionReason: string;
idempotencyKey: string;
}
// Main handler function
const handler = async (req: Request) => {
// Handle CORS preflight requests
if (req.method === 'OPTIONS') {
return new Response(null, {
status: 204,
headers: corsHeaders
});
}
// Extract parent span context from headers (if present)
const parentSpanContext = extractSpanContextFromHeaders(req.headers);
// Create root span for this edge function invocation
const rootSpan = startSpan(
'process-selective-rejection',
'SERVER',
parentSpanContext,
{
'http.method': 'POST',
'function.name': 'process-selective-rejection',
}
);
const requestId = rootSpan.spanId;
try {
// STEP 1: Authentication
addSpanEvent(rootSpan, 'authentication_start');
const authHeader = req.headers.get('Authorization');
if (!authHeader) {
addSpanEvent(rootSpan, 'authentication_failed', { reason: 'missing_header' });
endSpan(rootSpan, 'error');
logSpan(rootSpan);
return new Response(
JSON.stringify({ error: 'Missing Authorization header' }),
{
status: 401,
headers: {
...corsHeaders,
'Content-Type': 'application/json'
}
}
);
}
const supabase = createClient(SUPABASE_URL, SUPABASE_ANON_KEY, {
global: { headers: { Authorization: authHeader } }
});
const { data: { user }, error: authError } = await supabase.auth.getUser();
if (authError || !user) {
addSpanEvent(rootSpan, 'authentication_failed', { error: authError?.message });
edgeLogger.warn('Authentication failed', {
requestId,
error: authError?.message,
action: 'process_rejection'
});
endSpan(rootSpan, 'error', authError || new Error('Unauthorized'));
logSpan(rootSpan);
return new Response(
JSON.stringify({ error: 'Unauthorized' }),
{
status: 401,
headers: {
...corsHeaders,
'Content-Type': 'application/json'
}
}
);
}
setSpanAttributes(rootSpan, { 'user.id': user.id });
addSpanEvent(rootSpan, 'authentication_success');
edgeLogger.info('Rejection request received', {
requestId,
moderatorId: user.id,
action: 'process_rejection'
});
// STEP 2: Parse request
addSpanEvent(rootSpan, 'validation_start');
const body: RejectionRequest = await req.json();
const { submissionId, itemIds, rejectionReason, idempotencyKey } = body;
if (!submissionId || !itemIds || itemIds.length === 0 || !rejectionReason) {
addSpanEvent(rootSpan, 'validation_failed', {
hasSubmissionId: !!submissionId,
hasItemIds: !!itemIds,
itemCount: itemIds?.length || 0,
hasReason: !!rejectionReason,
});
edgeLogger.warn('Invalid request payload', {
requestId,
hasSubmissionId: !!submissionId,
hasItemIds: !!itemIds,
itemCount: itemIds?.length || 0,
hasReason: !!rejectionReason,
action: 'process_rejection'
});
endSpan(rootSpan, 'error');
logSpan(rootSpan);
return new Response(
JSON.stringify({ error: 'Missing required fields: submissionId, itemIds, rejectionReason' }),
{
status: 400,
headers: {
...corsHeaders,
'Content-Type': 'application/json'
}
}
);
}
setSpanAttributes(rootSpan, {
'submission.id': submissionId,
'submission.item_count': itemIds.length,
'idempotency.key': idempotencyKey,
});
addSpanEvent(rootSpan, 'validation_complete');
edgeLogger.info('Request validated', {
requestId,
submissionId,
itemCount: itemIds.length,
action: 'process_rejection'
});
// STEP 3: Idempotency check
addSpanEvent(rootSpan, 'idempotency_check_start');
const { data: existingKey } = await supabase
.from('submission_idempotency_keys')
.select('*')
.eq('idempotency_key', idempotencyKey)
.single();
if (existingKey?.status === 'completed') {
addSpanEvent(rootSpan, 'idempotency_cache_hit');
setSpanAttributes(rootSpan, { 'cache.hit': true });
edgeLogger.info('Idempotency cache hit', {
requestId,
idempotencyKey,
cached: true,
action: 'process_rejection'
});
endSpan(rootSpan, 'ok');
logSpan(rootSpan);
return new Response(
JSON.stringify(existingKey.result_data),
{
status: 200,
headers: {
...corsHeaders,
'Content-Type': 'application/json',
'X-Cache-Status': 'HIT'
}
}
);
}
// STEP 4: Fetch submission to get submitter_id
const { data: submission, error: submissionError } = await supabase
.from('content_submissions')
.select('user_id, status, assigned_to')
.eq('id', submissionId)
.single();
if (submissionError || !submission) {
addSpanEvent(rootSpan, 'submission_fetch_failed', { error: submissionError?.message });
edgeLogger.error('Submission not found', {
requestId,
submissionId,
error: submissionError?.message,
action: 'process_rejection'
});
endSpan(rootSpan, 'error', submissionError || new Error('Submission not found'));
logSpan(rootSpan);
return new Response(
JSON.stringify({ error: 'Submission not found' }),
{
status: 404,
headers: {
...corsHeaders,
'Content-Type': 'application/json'
}
}
);
}
// STEP 5: Verify moderator can reject this submission
if (submission.assigned_to && submission.assigned_to !== user.id) {
edgeLogger.warn('Lock conflict', {
requestId,
submissionId,
lockedBy: submission.assigned_to,
attemptedBy: user.id,
action: 'process_rejection'
});
return new Response(
JSON.stringify({ error: 'Submission is locked by another moderator' }),
{
status: 409,
headers: {
...corsHeaders,
'Content-Type': 'application/json'
}
}
);
}
if (!['pending', 'partially_approved'].includes(submission.status)) {
edgeLogger.warn('Invalid submission status', {
requestId,
submissionId,
currentStatus: submission.status,
expectedStatuses: ['pending', 'partially_approved'],
action: 'process_rejection'
});
return new Response(
JSON.stringify({ error: 'Submission already processed' }),
{
status: 400,
headers: {
...corsHeaders,
'Content-Type': 'application/json'
}
}
);
}
// STEP 6: Register idempotency key as processing (atomic upsert)
// ✅ CRITICAL FIX: Use ON CONFLICT to prevent race conditions
if (!existingKey) {
const { data: insertedKey, error: idempotencyError } = await supabase
.from('submission_idempotency_keys')
.insert({
idempotency_key: idempotencyKey,
submission_id: submissionId,
moderator_id: user.id,
item_ids: itemIds,
status: 'processing'
})
.select()
.single();
// If conflict occurred, another moderator is processing
if (idempotencyError && idempotencyError.code === '23505') {
edgeLogger.warn('Idempotency key conflict - another request processing', {
requestId,
idempotencyKey,
moderatorId: user.id
});
return new Response(
JSON.stringify({ error: 'Another moderator is processing this submission' }),
{ status: 409, headers: { ...corsHeaders, 'Content-Type': 'application/json' } }
);
}
if (idempotencyError) {
throw toError(idempotencyError);
}
}
// Create child span for RPC transaction
const rpcSpan = startSpan(
'process_rejection_transaction',
'DATABASE',
getSpanContext(rootSpan),
{
'db.operation': 'rpc',
'db.function': 'process_rejection_transaction',
'submission.id': submissionId,
'submission.item_count': itemIds.length,
}
);
addSpanEvent(rpcSpan, 'rpc_call_start');
edgeLogger.info('Calling rejection transaction RPC', {
requestId,
submissionId,
itemCount: itemIds.length,
action: 'process_rejection'
});
// ============================================================================
// STEP 7: Call RPC function with deadlock retry logic
// ============================================================================
let retryCount = 0;
const MAX_DEADLOCK_RETRIES = 3;
let result: any = null;
let rpcError: any = null;
while (retryCount <= MAX_DEADLOCK_RETRIES) {
const { data, error } = await supabase.rpc(
'process_rejection_transaction',
{
p_submission_id: submissionId,
p_item_ids: itemIds,
p_moderator_id: user.id,
p_rejection_reason: rejectionReason,
p_request_id: requestId,
p_trace_id: rootSpan.traceId,
p_parent_span_id: rpcSpan.spanId
}
);
result = data;
rpcError = error;
if (!rpcError) {
// Success!
addSpanEvent(rpcSpan, 'rpc_call_success', {
'result.status': data?.status,
'items.processed': itemIds.length,
});
break;
}
// Check for deadlock (40P01) or serialization failure (40001)
if (rpcError.code === '40P01' || rpcError.code === '40001') {
retryCount++;
if (retryCount > MAX_DEADLOCK_RETRIES) {
addSpanEvent(rpcSpan, 'max_retries_exceeded', { attempt: retryCount });
edgeLogger.error('Max deadlock retries exceeded', {
requestId,
submissionId,
attempt: retryCount,
action: 'process_rejection'
});
break;
}
const backoffMs = 100 * Math.pow(2, retryCount);
addSpanEvent(rpcSpan, 'deadlock_retry', { attempt: retryCount, backoffMs });
edgeLogger.warn('Deadlock detected, retrying', {
requestId,
attempt: retryCount,
maxAttempts: MAX_DEADLOCK_RETRIES,
backoffMs,
action: 'process_rejection'
});
await new Promise(r => setTimeout(r, backoffMs));
continue;
}
// Non-retryable error, break immediately
addSpanEvent(rpcSpan, 'rpc_call_failed', {
error: rpcError.message,
errorCode: rpcError.code
});
break;
}
if (rpcError) {
// Transaction failed - EVERYTHING rolled back automatically by PostgreSQL
endSpan(rpcSpan, 'error', rpcError);
logSpan(rpcSpan);
edgeLogger.error('Transaction failed', {
requestId,
duration: rpcSpan.duration,
submissionId,
error: rpcError.message,
errorCode: rpcError.code,
retries: retryCount,
action: 'process_rejection'
});
// Update idempotency key to failed
try {
await supabase
.from('submission_idempotency_keys')
.update({
status: 'failed',
error_message: rpcError.message,
completed_at: new Date().toISOString()
})
.eq('idempotency_key', idempotencyKey);
} catch (updateError) {
edgeLogger.warn('Failed to update idempotency key', {
requestId,
idempotencyKey,
status: 'failed',
error: formatEdgeError(updateError),
action: 'process_rejection'
});
// Non-blocking - continue with error response even if idempotency update fails
}
endSpan(rootSpan, 'error', rpcError);
logSpan(rootSpan);
return new Response(
JSON.stringify({
error: 'Rejection transaction failed',
message: rpcError.message,
details: rpcError.details,
retries: retryCount
}),
{
status: 500,
headers: {
...corsHeaders,
'Content-Type': 'application/json'
}
}
);
}
// RPC succeeded
endSpan(rpcSpan, 'ok');
logSpan(rpcSpan);
setSpanAttributes(rootSpan, {
'result.status': result?.status,
'result.final_status': result?.status,
'retries': retryCount,
});
edgeLogger.info('Transaction completed successfully', {
requestId,
duration: rpcSpan.duration,
submissionId,
itemCount: itemIds.length,
retries: retryCount,
newStatus: result?.status,
action: 'process_rejection'
});
// STEP 8: Success - update idempotency key
try {
await supabase
.from('submission_idempotency_keys')
.update({
status: 'completed',
result_data: result,
completed_at: new Date().toISOString()
})
.eq('idempotency_key', idempotencyKey);
} catch (updateError) {
edgeLogger.warn('Failed to update idempotency key', {
requestId,
idempotencyKey,
status: 'completed',
error: formatEdgeError(updateError),
action: 'process_rejection'
});
// Non-blocking - transaction succeeded, so continue with success response
}
endSpan(rootSpan, 'ok');
logSpan(rootSpan);
return new Response(
JSON.stringify(result),
{
status: 200,
headers: {
...corsHeaders,
'Content-Type': 'application/json',
'X-Request-Id': requestId
}
}
);
} catch (error) {
endSpan(rootSpan, 'error', error instanceof Error ? error : toError(error));
logSpan(rootSpan);
edgeLogger.error('Unexpected error', {
requestId,
duration: rootSpan.duration,
error: formatEdgeError(error),
stack: error instanceof Error ? error.stack : undefined,
action: 'process_rejection'
});
return new Response(
JSON.stringify({
error: 'Internal server error',
message: error instanceof Error ? error.message : 'Unknown error'
}),
{
status: 500,
headers: {
...corsHeaders,
'Content-Type': 'application/json'
}
}
);
}
};
// Apply rate limiting: 10 requests per minute per IP (moderate tier for moderation actions)
serve(withRateLimit(handler, rateLimiters.moderate, corsHeaders));

View File

@@ -1,200 +0,0 @@
/**
* Rate Limit Metrics API
*
* Exposes rate limiting metrics for monitoring and analysis.
* Requires admin/moderator authentication.
*/
import { createClient } from 'jsr:@supabase/supabase-js@2';
import { withRateLimit, rateLimiters } from '../_shared/rateLimiter.ts';
import {
getRecentMetrics,
getMetricsStats,
getFunctionMetrics,
getUserMetrics,
getIPMetrics,
clearMetrics,
} from '../_shared/rateLimitMetrics.ts';
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type',
};
interface QueryParams {
action?: string;
limit?: string;
timeWindow?: string;
functionName?: string;
userId?: string;
clientIP?: string;
}
async function handler(req: Request): Promise<Response> {
// Handle CORS preflight
if (req.method === 'OPTIONS') {
return new Response(null, { headers: corsHeaders });
}
try {
// Verify authentication
const authHeader = req.headers.get('Authorization');
if (!authHeader) {
return new Response(
JSON.stringify({ error: 'Authentication required' }),
{ status: 401, headers: { ...corsHeaders, 'Content-Type': 'application/json' } }
);
}
const supabaseUrl = Deno.env.get('SUPABASE_URL')!;
const supabaseServiceKey = Deno.env.get('SUPABASE_SERVICE_ROLE_KEY')!;
const supabase = createClient(supabaseUrl, supabaseServiceKey, {
global: {
headers: { Authorization: authHeader },
},
});
// Get authenticated user
const { data: { user }, error: authError } = await supabase.auth.getUser();
if (authError || !user) {
return new Response(
JSON.stringify({ error: 'Invalid authentication' }),
{ status: 401, headers: { ...corsHeaders, 'Content-Type': 'application/json' } }
);
}
// Check if user has admin or moderator role
const { data: roles } = await supabase
.from('user_roles')
.select('role')
.eq('user_id', user.id);
const userRoles = roles?.map(r => r.role) || [];
const isAuthorized = userRoles.some(role =>
['admin', 'moderator', 'superuser'].includes(role)
);
if (!isAuthorized) {
return new Response(
JSON.stringify({ error: 'Insufficient permissions' }),
{ status: 403, headers: { ...corsHeaders, 'Content-Type': 'application/json' } }
);
}
// Parse query parameters
const url = new URL(req.url);
const action = url.searchParams.get('action') || 'stats';
const limit = parseInt(url.searchParams.get('limit') || '100', 10);
const timeWindow = parseInt(url.searchParams.get('timeWindow') || '60000', 10);
const functionName = url.searchParams.get('functionName');
const userId = url.searchParams.get('userId');
const clientIP = url.searchParams.get('clientIP');
let responseData: any;
// Route to appropriate metrics handler
switch (action) {
case 'recent':
responseData = {
metrics: getRecentMetrics(limit),
count: getRecentMetrics(limit).length,
};
break;
case 'stats':
responseData = getMetricsStats(timeWindow);
break;
case 'function':
if (!functionName) {
return new Response(
JSON.stringify({ error: 'functionName parameter required for function action' }),
{ status: 400, headers: { ...corsHeaders, 'Content-Type': 'application/json' } }
);
}
responseData = {
functionName,
metrics: getFunctionMetrics(functionName, limit),
count: getFunctionMetrics(functionName, limit).length,
};
break;
case 'user':
if (!userId) {
return new Response(
JSON.stringify({ error: 'userId parameter required for user action' }),
{ status: 400, headers: { ...corsHeaders, 'Content-Type': 'application/json' } }
);
}
responseData = {
userId,
metrics: getUserMetrics(userId, limit),
count: getUserMetrics(userId, limit).length,
};
break;
case 'ip':
if (!clientIP) {
return new Response(
JSON.stringify({ error: 'clientIP parameter required for ip action' }),
{ status: 400, headers: { ...corsHeaders, 'Content-Type': 'application/json' } }
);
}
responseData = {
clientIP,
metrics: getIPMetrics(clientIP, limit),
count: getIPMetrics(clientIP, limit).length,
};
break;
case 'clear':
// Only superusers can clear metrics
const isSuperuser = userRoles.includes('superuser');
if (!isSuperuser) {
return new Response(
JSON.stringify({ error: 'Only superusers can clear metrics' }),
{ status: 403, headers: { ...corsHeaders, 'Content-Type': 'application/json' } }
);
}
clearMetrics();
responseData = { success: true, message: 'Metrics cleared' };
break;
default:
return new Response(
JSON.stringify({
error: 'Invalid action',
validActions: ['recent', 'stats', 'function', 'user', 'ip', 'clear']
}),
{ status: 400, headers: { ...corsHeaders, 'Content-Type': 'application/json' } }
);
}
return new Response(
JSON.stringify(responseData),
{
status: 200,
headers: {
...corsHeaders,
'Content-Type': 'application/json',
}
}
);
} catch (error) {
console.error('Error in rate-limit-metrics function:', error);
return new Response(
JSON.stringify({
error: 'Internal server error',
message: error instanceof Error ? error.message : 'Unknown error'
}),
{
status: 500,
headers: { ...corsHeaders, 'Content-Type': 'application/json' }
}
);
}
}
// Apply rate limiting (lenient tier for admin monitoring)
Deno.serve(withRateLimit(handler, rateLimiters.lenient, corsHeaders, 'rate-limit-metrics'));

View File

@@ -1,9 +1,12 @@
import { serve } from "https://deno.land/std@0.190.0/http/server.ts";
import { createClient } from "https://esm.sh/@supabase/supabase-js@2.57.4";
import { corsHeaders } from '../_shared/cors.ts';
import { edgeLogger, startRequest, endRequest } from "../_shared/logger.ts";
import { createErrorResponse } from "../_shared/errorSanitizer.ts";
import { formatEdgeError } from "../_shared/errorFormatter.ts";
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type',
};
interface InboundEmailPayload {
from: string;
@@ -258,7 +261,7 @@ const handler = async (req: Request): Promise<Response> => {
} catch (error) {
edgeLogger.error('Unexpected error in receive-inbound-email', {
requestId: tracking.requestId,
error: formatEdgeError(error)
error: error instanceof Error ? error.message : String(error)
});
return createErrorResponse(error, 500, corsHeaders);
}

View File

@@ -1,8 +1,12 @@
import { serve } from "https://deno.land/std@0.190.0/http/server.ts";
import { Novu } from "npm:@novu/api@1.6.0";
import { corsHeaders } from '../_shared/cors.ts';
import { edgeLogger, startRequest, endRequest } from "../_shared/logger.ts";
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type, x-request-id',
};
serve(async (req) => {
if (req.method === 'OPTIONS') {
return new Response(null, { headers: corsHeaders });

View File

@@ -1,12 +1,13 @@
import { serve } from 'https://deno.land/std@0.168.0/http/server.ts';
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2';
import { corsHeaders } from '../_shared/cors.ts';
import { rateLimiters, withRateLimit } from '../_shared/rateLimiter.ts';
import { edgeLogger, startRequest, endRequest } from '../_shared/logger.ts';
// Apply standard rate limiting (20 req/min) for account deletion requests
// Balances user needs with protection against automated abuse
serve(withRateLimit(async (req) => {
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type',
};
serve(async (req) => {
const tracking = startRequest();
if (req.method === 'OPTIONS') {
@@ -221,4 +222,4 @@ serve(withRateLimit(async (req) => {
}
);
}
}, rateLimiters.standard, corsHeaders));
});

View File

@@ -1,12 +1,13 @@
import { serve } from 'https://deno.land/std@0.168.0/http/server.ts';
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2';
import { corsHeaders } from '../_shared/cors.ts';
import { rateLimiters, withRateLimit } from '../_shared/rateLimiter.ts';
import { edgeLogger, startRequest, endRequest } from '../_shared/logger.ts';
// Apply moderate rate limiting (10 req/min) to prevent deletion code spam
// Protects against abuse while allowing legitimate resend requests
serve(withRateLimit(async (req) => {
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type',
};
serve(async (req) => {
const tracking = startRequest();
if (req.method === 'OPTIONS') {
@@ -180,4 +181,4 @@ serve(withRateLimit(async (req) => {
}
);
}
}, rateLimiters.moderate, corsHeaders));
});

View File

@@ -11,9 +11,13 @@
*/
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2.57.4';
import { corsHeaders } from '../_shared/cors.ts';
import { edgeLogger } from '../_shared/logger.ts';
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type',
};
interface CleanupResult {
idempotency_keys?: {
deleted: number;

View File

@@ -1,8 +1,11 @@
import { serve } from 'https://deno.land/std@0.168.0/http/server.ts';
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2.57.4';
import { corsHeaders } from '../_shared/cors.ts';
import { edgeLogger } from '../_shared/logger.ts';
import { formatEdgeError } from '../_shared/errorFormatter.ts';
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type',
};
serve(async (req: Request) => {
if (req.method === 'OPTIONS') {
@@ -52,7 +55,7 @@ serve(async (req: Request) => {
} catch (error) {
edgeLogger.error('Maintenance exception', {
requestId,
error: formatEdgeError(error)
error: error instanceof Error ? error.message : String(error)
});
return new Response(

View File

@@ -1,7 +1,11 @@
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2.57.4';
import { corsHeaders } from '../_shared/cors.ts';
import { edgeLogger, startRequest, endRequest } from '../_shared/logger.ts';
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type',
};
interface SeedOptions {
preset: 'small' | 'medium' | 'large' | 'stress';
entityTypes: string[];

View File

@@ -1,9 +1,12 @@
import { serve } from "https://deno.land/std@0.190.0/http/server.ts";
import { createClient } from "https://esm.sh/@supabase/supabase-js@2.57.4";
import { corsHeaders } from '../_shared/cors.ts';
import { edgeLogger, startRequest, endRequest } from "../_shared/logger.ts";
import { createErrorResponse } from "../_shared/errorSanitizer.ts";
import { formatEdgeError } from "../_shared/errorFormatter.ts";
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type',
};
interface AdminReplyRequest {
submissionId: string;
@@ -234,7 +237,7 @@ const handler = async (req: Request): Promise<Response> => {
} catch (error) {
edgeLogger.error('Unexpected error in send-admin-email-reply', {
requestId: tracking.requestId,
error: formatEdgeError(error)
error: error instanceof Error ? error.message : String(error)
});
return createErrorResponse(error, 500, corsHeaders);
}

View File

@@ -1,10 +1,12 @@
import { serve } from "https://deno.land/std@0.190.0/http/server.ts";
import { createClient } from "https://esm.sh/@supabase/supabase-js@2.57.4";
import { corsHeaders } from '../_shared/cors.ts';
import { rateLimiters, withRateLimit } from '../_shared/rateLimiter.ts';
import { edgeLogger } from "../_shared/logger.ts";
import { createErrorResponse } from "../_shared/errorSanitizer.ts";
import { formatEdgeError } from "../_shared/errorFormatter.ts";
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type',
};
interface ContactSubmission {
name: string;
@@ -333,12 +335,10 @@ The ThrillWiki Team`,
edgeLogger.error('Contact submission failed', {
requestId,
duration,
error: formatEdgeError(error)
error: error instanceof Error ? error.message : String(error)
});
return createErrorResponse(error, 500, corsHeaders);
}
};
// Apply standard rate limiting (20 req/min) for contact form submissions
// Balances legitimate user needs with spam prevention
serve(withRateLimit(handler, rateLimiters.standard, corsHeaders));
serve(handler);

View File

@@ -1,9 +1,13 @@
import { serve } from "https://deno.land/std@0.190.0/http/server.ts";
import { createClient } from "https://esm.sh/@supabase/supabase-js@2.57.4";
import { corsHeaders } from '../_shared/cors.ts';
import { edgeLogger, startRequest, endRequest } from '../_shared/logger.ts';
import { withEdgeRetry } from '../_shared/retryHelper.ts';
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type',
};
interface EscalationRequest {
submissionId: string;
escalationReason: string;

View File

@@ -1,8 +1,12 @@
import { serve } from 'https://deno.land/std@0.168.0/http/server.ts';
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2';
import { corsHeaders } from '../_shared/cors.ts';
import { edgeLogger, startRequest, endRequest } from '../_shared/logger.ts';
const corsHeaders = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'authorization, x-client-info, apikey, content-type',
};
interface EmailRequest {
email: string;
displayName?: string;

View File

@@ -1,6 +1,5 @@
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2.57.4';
import { edgeLogger } from '../_shared/logger.ts';
import { formatEdgeError } from '../_shared/errorFormatter.ts';
const BASE_URL = 'https://dev.thrillwiki.com';
@@ -348,7 +347,7 @@ Deno.serve(async (req) => {
edgeLogger.error('Sitemap generation failed', {
requestId,
error: formatEdgeError(error),
error: error instanceof Error ? error.message : String(error),
duration,
});

Some files were not shown because too many files have changed in this diff Show More