diff --git a/.gitignore b/.gitignore index 6990c7ee..2f4226b3 100644 --- a/.gitignore +++ b/.gitignore @@ -1,198 +1,8 @@ -/.vscode -/dev.sh -/flake.nix -venv -/venv -./venv -venv/sour -.DS_Store -.DS_Store -.DS_Store -accounts/__pycache__/ -__pycache__ -thrillwiki/__pycache__ -reviews/__pycache__ -parks/__pycache__ -media/__pycache__ -email_service/__pycache__ -core/__pycache__ -companies/__pycache__ -accounts/__pycache__ -venv -accounts/__pycache__ -thrillwiki/__pycache__/settings.cpython-311.pyc -accounts/migrations/__pycache__/__init__.cpython-311.pyc -accounts/migrations/__pycache__/0001_initial.cpython-311.pyc -companies/migrations/__pycache__ -moderation/__pycache__ -rides/__pycache__ -ssh_tools.jsonc -thrillwiki/__pycache__/settings.cpython-312.pyc -parks/__pycache__/views.cpython-312.pyc -.venv/lib/python3.12/site-packages -thrillwiki/__pycache__/urls.cpython-312.pyc -thrillwiki/__pycache__/views.cpython-312.pyc -.pytest_cache.github -static/css/tailwind.css -static/css/tailwind.css -.venv -location/__pycache__ -analytics/__pycache__ -designers/__pycache__ -history_tracking/__pycache__ -media/migrations/__pycache__/0001_initial.cpython-312.pyc -accounts/__pycache__/__init__.cpython-312.pyc -accounts/__pycache__/adapters.cpython-312.pyc -accounts/__pycache__/admin.cpython-312.pyc -accounts/__pycache__/apps.cpython-312.pyc -accounts/__pycache__/models.cpython-312.pyc -accounts/__pycache__/signals.cpython-312.pyc -accounts/__pycache__/urls.cpython-312.pyc -accounts/__pycache__/views.cpython-312.pyc -accounts/migrations/__pycache__/__init__.cpython-312.pyc -accounts/migrations/__pycache__/0001_initial.cpython-312.pyc -companies/__pycache__/__init__.cpython-312.pyc -companies/__pycache__/admin.cpython-312.pyc -companies/__pycache__/apps.cpython-312.pyc -companies/__pycache__/models.cpython-312.pyc -companies/__pycache__/signals.cpython-312.pyc -companies/__pycache__/urls.cpython-312.pyc -companies/__pycache__/views.cpython-312.pyc -companies/migrations/__pycache__/__init__.cpython-312.pyc -companies/migrations/__pycache__/0001_initial.cpython-312.pyc -core/__pycache__/__init__.cpython-312.pyc -core/__pycache__/admin.cpython-312.pyc -core/__pycache__/apps.cpython-312.pyc -core/__pycache__/models.cpython-312.pyc -core/__pycache__/views.cpython-312.pyc -core/migrations/__pycache__/__init__.cpython-312.pyc -core/migrations/__pycache__/0001_initial.cpython-312.pyc -email_service/__pycache__/__init__.cpython-312.pyc -email_service/__pycache__/admin.cpython-312.pyc -email_service/__pycache__/apps.cpython-312.pyc -email_service/__pycache__/models.cpython-312.pyc -email_service/__pycache__/services.cpython-312.pyc -email_service/migrations/__pycache__/__init__.cpython-312.pyc -email_service/migrations/__pycache__/0001_initial.cpython-312.pyc -media/__pycache__/__init__.cpython-312.pyc -media/__pycache__/admin.cpython-312.pyc -media/__pycache__/apps.cpython-312.pyc -media/__pycache__/models.cpython-312.pyc -media/migrations/__pycache__/__init__.cpython-312.pyc -media/migrations/__pycache__/0001_initial.cpython-312.pyc -parks/__pycache__/__init__.cpython-312.pyc -parks/__pycache__/admin.cpython-312.pyc -parks/__pycache__/apps.cpython-312.pyc -parks/__pycache__/models.cpython-312.pyc -parks/__pycache__/signals.cpython-312.pyc -parks/__pycache__/urls.cpython-312.pyc -parks/__pycache__/views.cpython-312.pyc -parks/migrations/__pycache__/__init__.cpython-312.pyc -parks/migrations/__pycache__/0001_initial.cpython-312.pyc -reviews/__pycache__/__init__.cpython-312.pyc -reviews/__pycache__/admin.cpython-312.pyc -reviews/__pycache__/apps.cpython-312.pyc -reviews/__pycache__/models.cpython-312.pyc -reviews/__pycache__/signals.cpython-312.pyc -reviews/__pycache__/urls.cpython-312.pyc -reviews/__pycache__/views.cpython-312.pyc -reviews/migrations/__pycache__/__init__.cpython-312.pyc -reviews/migrations/__pycache__/0001_initial.cpython-312.pyc -rides/__pycache__/__init__.cpython-312.pyc -rides/__pycache__/admin.cpython-312.pyc -rides/__pycache__/apps.cpython-312.pyc -rides/__pycache__/models.cpython-312.pyc -rides/__pycache__/signals.cpython-312.pyc -rides/__pycache__/urls.cpython-312.pyc -rides/__pycache__/views.cpython-312.pyc -rides/migrations/__pycache__/__init__.cpython-312.pyc -rides/migrations/__pycache__/0001_initial.cpython-312.pyc -thrillwiki/__pycache__/__init__.cpython-312.pyc -thrillwiki/__pycache__/settings.cpython-312.pyc -thrillwiki/__pycache__/urls.cpython-312.pyc -thrillwiki/__pycache__/views.cpython-312.pyc -thrillwiki/__pycache__/wsgi.cpython-312.pyc -accounts/__pycache__/__init__.cpython-312.pyc -accounts/__pycache__/adapters.cpython-312.pyc -accounts/__pycache__/admin.cpython-312.pyc -accounts/__pycache__/apps.cpython-312.pyc -accounts/__pycache__/models.cpython-312.pyc -accounts/__pycache__/signals.cpython-312.pyc -accounts/__pycache__/urls.cpython-312.pyc -accounts/__pycache__/views.cpython-312.pyc -accounts/migrations/__pycache__/__init__.cpython-312.pyc -accounts/migrations/__pycache__/0001_initial.cpython-312.pyc -companies/__pycache__/__init__.cpython-312.pyc -companies/__pycache__/admin.cpython-312.pyc -companies/__pycache__/apps.cpython-312.pyc -companies/__pycache__/models.cpython-312.pyc -companies/__pycache__/signals.cpython-312.pyc -companies/__pycache__/urls.cpython-312.pyc -companies/__pycache__/views.cpython-312.pyc -companies/migrations/__pycache__/__init__.cpython-312.pyc -companies/migrations/__pycache__/0001_initial.cpython-312.pyc -core/__pycache__/__init__.cpython-312.pyc -core/__pycache__/admin.cpython-312.pyc -core/__pycache__/apps.cpython-312.pyc -core/__pycache__/models.cpython-312.pyc -core/__pycache__/views.cpython-312.pyc -core/migrations/__pycache__/__init__.cpython-312.pyc -core/migrations/__pycache__/0001_initial.cpython-312.pyc -email_service/__pycache__/__init__.cpython-312.pyc -email_service/__pycache__/admin.cpython-312.pyc -email_service/__pycache__/apps.cpython-312.pyc -email_service/__pycache__/models.cpython-312.pyc -email_service/__pycache__/services.cpython-312.pyc -email_service/migrations/__pycache__/__init__.cpython-312.pyc -email_service/migrations/__pycache__/0001_initial.cpython-312.pyc -media/__pycache__/__init__.cpython-312.pyc -media/__pycache__/admin.cpython-312.pyc -media/__pycache__/apps.cpython-312.pyc -media/__pycache__/models.cpython-312.pyc -media/migrations/__pycache__/__init__.cpython-312.pyc -media/migrations/__pycache__/0001_initial.cpython-312.pyc -parks/__pycache__/__init__.cpython-312.pyc -parks/__pycache__/admin.cpython-312.pyc -parks/__pycache__/apps.cpython-312.pyc -parks/__pycache__/models.cpython-312.pyc -parks/__pycache__/signals.cpython-312.pyc -parks/__pycache__/urls.cpython-312.pyc -parks/__pycache__/views.cpython-312.pyc -parks/migrations/__pycache__/__init__.cpython-312.pyc -parks/migrations/__pycache__/0001_initial.cpython-312.pyc -reviews/__pycache__/__init__.cpython-312.pyc -reviews/__pycache__/admin.cpython-312.pyc -reviews/__pycache__/apps.cpython-312.pyc -reviews/__pycache__/models.cpython-312.pyc -reviews/__pycache__/signals.cpython-312.pyc -reviews/__pycache__/urls.cpython-312.pyc -reviews/__pycache__/views.cpython-312.pyc -reviews/migrations/__pycache__/__init__.cpython-312.pyc -reviews/migrations/__pycache__/0001_initial.cpython-312.pyc -rides/__pycache__/__init__.cpython-312.pyc -rides/__pycache__/admin.cpython-312.pyc -rides/__pycache__/apps.cpython-312.pyc -rides/__pycache__/models.cpython-312.pyc -rides/__pycache__/signals.cpython-312.pyc -rides/__pycache__/urls.cpython-312.pyc -rides/__pycache__/views.cpython-312.pyc -rides/migrations/__pycache__/__init__.cpython-312.pyc -rides/migrations/__pycache__/0001_initial.cpython-312.pyc -thrillwiki/__pycache__/__init__.cpython-312.pyc -thrillwiki/__pycache__/settings.cpython-312.pyc -thrillwiki/__pycache__/urls.cpython-312.pyc -thrillwiki/__pycache__/views.cpython-312.pyc -thrillwiki/__pycache__/wsgi.cpython-312.pyc - -# Byte-compiled / optimized / DLL files +# Python __pycache__/ *.py[cod] *$py.class - -# C extensions *.so - -# Distribution / packaging .Python build/ develop-eggs/ @@ -212,189 +22,95 @@ share/python-wheels/ *.egg MANIFEST -# PyInstaller -# Usually these files are written by a python script from a template -# before PyInstaller builds the exe, so as to inject date/other infos into it. -*.manifest -*.spec - -# Installer logs -pip-log.txt -pip-delete-this-directory.txt - -# Unit test / coverage reports -htmlcov/ -.tox/ -.nox/ -.coverage -.coverage.* -.cache -nosetests.xml -coverage.xml -*.cover -*.py,cover -.hypothesis/ -.pytest_cache/ -cover/ - -# Translations -*.mo -*.pot - -# Django stuff: +# Django *.log local_settings.py db.sqlite3 db.sqlite3-journal +/backend/staticfiles/ +/backend/media/ -# Flask stuff: -instance/ -.webassets-cache +# UV +.uv/ +backend/.uv/ -# Scrapy stuff: -.scrapy +# Node.js +node_modules/ +npm-debug.log* +yarn-debug.log* +yarn-error.log* +pnpm-debug.log* +lerna-debug.log* +.pnpm-store/ -# Sphinx documentation -docs/_build/ +# Vue.js / Vite +/frontend/dist/ +/frontend/dist-ssr/ +*.local -# PyBuilder -.pybuilder/ -target/ +# Environment variables +.env +.env.local +.env.development.local +.env.test.local +.env.production.local +backend/.env +frontend/.env -# Jupyter Notebook -.ipynb_checkpoints +# IDEs +.vscode/ +.idea/ +*.swp +*.swo +*.sublime-project +*.sublime-workspace -# IPython -profile_default/ -ipython_config.py - -# pyenv -# For a library or package, you might want to ignore these files since the code is -# intended to run in multiple environments; otherwise, check them in: -# .python-version - -# pipenv -# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. -# However, in case of collaboration, if having platform-specific dependencies or dependencies -# having no cross-platform support, pipenv may install dependencies that don't work, or not -# install all needed dependencies. -#Pipfile.lock - -# poetry -# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. -# This is especially recommended for binary packages to ensure reproducibility, and is more -# commonly ignored for libraries. -# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control -#poetry.lock - -# pdm -# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. -#pdm.lock -# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it -# in version control. -# https://pdm.fming.dev/latest/usage/project/#working-with-version-control -.pdm.toml -.pdm-python -.pdm-build/ - -# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm -__pypackages__/ - -# Celery stuff -celerybeat-schedule -celerybeat.pid - -# SageMath parsed files -*.sage.py - -# Environments -***REMOVED*** -.venv -env/ -venv/ -ENV/ -env.bak/ -venv.bak/ - -# Spyder project settings -.spyderproject -.spyproject - -# Rope project settings -.ropeproject - -# mkdocs documentation -/site - -# mypy -.mypy_cache/ -.dmypy.json -dmypy.json - -# Pyre type checker -.pyre/ - -# pytype static type analyzer -.pytype/ - -# Cython debug symbols -cython_debug/ - -# PyCharm -# JetBrains specific template is maintained in a separate JetBrains.gitignore that can -# be found at https://github.[AWS-SECRET-REMOVED]tBrains.gitignore -# and can be added to the global gitignore or merged into this file. For a more nuclear -# option (not recommended) you can uncomment the following to ignore the entire idea folder. -#.idea/ - -# Pixi package manager -.pixi/ - -# Django Tailwind CLI -.django_tailwind_cli/ - -# General +# OS .DS_Store -.AppleDouble -.LSOverride +Thumbs.db +Desktop.ini -# Icon must end with two \r -Icon - -# Thumbnails -._* - -# Files that might appear in the root of a volume -.DocumentRevisions-V100 -.fseventsd -.Spotlight-V100 -.TemporaryItems -.Trashes -.VolumeIcon.icns -.com.apple.timemachine.donotpresent - -# Directories potentially created on remote AFP share -.AppleDB -.AppleDesktop -Network Trash Folder -Temporary Items -.apdisk - - -# ThrillWiki CI/CD Configuration -.thrillwiki-config -***REMOVED***.unraid -***REMOVED***.webhook -.github-token +# Logs logs/ -profiles -.thrillwiki-github-token -.thrillwiki-template-config +*.log -# Environment files with potential secrets -scripts/systemd/thrillwiki-automation***REMOVED*** -scripts/systemd/thrillwiki-deployment***REMOVED*** -scripts/systemd/****REMOVED*** -logs/ -profiles/ -uv.lock +# Coverage +coverage/ +*.lcov +.nyc_output +htmlcov/ +.coverage +.coverage.* + +# Testing +.pytest_cache/ +.cache + +# Temporary files +tmp/ +temp/ +*.tmp +*.temp + +# Build outputs +/dist/ +/build/ + +# Backup files +*.bak +*.orig +*.swp + +# Archive files +*.tar.gz +*.zip +*.rar + +# Security +*.pem +*.key +*.cert + +# Local development +/uploads/ +/backups/ diff --git a/README.md b/README.md index 57b8174a..462c7ddd 100644 --- a/README.md +++ b/README.md @@ -1,391 +1,150 @@ -# ThrillWiki Development Environment Setup +# ThrillWiki Django + Vue.js Monorepo -ThrillWiki is a modern Django web application for theme park and roller coaster enthusiasts, featuring a sophisticated dark theme design with purple-to-blue gradients, HTMX interactivity, and comprehensive park/ride information management. +A modern monorepo architecture for ThrillWiki, combining a Django REST API backend with a Vue.js frontend. -## 🏗️ Technology Stack +## 🏗️ Architecture -- **Backend**: Django 5.0+ with GeoDjango (PostGIS) -- **Frontend**: HTMX + Alpine.js + Tailwind CSS -- **Database**: PostgreSQL with PostGIS extension -- **Package Management**: UV (Python package manager) -- **Authentication**: Django Allauth with Google/Discord OAuth -- **Styling**: Tailwind CSS with custom dark theme -- **History Tracking**: django-pghistory for audit trails -- **Testing**: Pytest + Playwright for E2E testing +This project uses a monorepo structure that cleanly separates backend and frontend concerns: -## 📋 Prerequisites - -### Required Software - -1. **Python 3.11+** - ```bash - python --version # Should be 3.11 or higher - ``` - -2. **UV Package Manager** - ```bash - # Install UV if not already installed - curl -LsSf https://astral.sh/uv/install.sh | sh - # or - pip install uv - ``` - -3. **PostgreSQL with PostGIS** - ```bash - # macOS (Homebrew) - brew install postgresql postgis - - # Ubuntu/Debian - sudo apt-get install postgresql postgresql-contrib postgis - - # Start PostgreSQL service - brew services start postgresql # macOS - sudo systemctl start postgresql # Linux - ``` - -4. **GDAL/GEOS Libraries** (for GeoDjango) - ```bash - # macOS (Homebrew) - brew install gdal geos - - # Ubuntu/Debian - sudo apt-get install gdal-bin libgdal-dev libgeos-dev - ``` - -5. **Node.js** (for Tailwind CSS) - ```bash - # Install Node.js 18+ for Tailwind CSS compilation - node --version # Should be 18 or higher - ``` +``` +thrillwiki-monorepo/ +├── backend/ # Django REST API +├── frontend/ # Vue.js SPA +└── shared/ # Shared resources and documentation +``` ## 🚀 Quick Start -### 1. Clone and Setup Project +### Prerequisites -```bash -# Clone the repository -git clone -cd thrillwiki_django_no_react +- **Python 3.11+** with [uv](https://docs.astral.sh/uv/) for backend dependencies +- **Node.js 18+** with [pnpm](https://pnpm.io/) for frontend dependencies -# Install Python dependencies using UV -uv sync -``` +### Development Setup -### 2. Database Setup +1. **Clone the repository** + ```bash + git clone + cd thrillwiki-monorepo + ``` -```bash -# Create PostgreSQL database and user -createdb thrillwiki -createuser wiki +2. **Install dependencies** + ```bash + # Install frontend dependencies + pnpm install + + # Install backend dependencies + cd backend && uv sync + ``` -# Connect to PostgreSQL and setup -psql postgres -``` +3. **Start development servers** + ```bash + # Start both frontend and backend + pnpm run dev + + # Or start individually + pnpm run dev:frontend # Vue.js on :3000 + pnpm run dev:backend # Django on :8000 + ``` -In the PostgreSQL shell: -```sql --- Set password for wiki user -ALTER USER wiki WITH PASSWORD 'thrillwiki'; +## 📁 Project Structure --- Grant privileges -GRANT ALL PRIVILEGES ON DATABASE thrillwiki TO wiki; +### Backend (`/backend`) +- **Django REST API** with modular app architecture +- **UV package management** for Python dependencies +- **PostgreSQL** database (configurable) +- **Redis** for caching and sessions --- Enable PostGIS extension -\c thrillwiki -CREATE EXTENSION postgis; -\q -``` +### Frontend (`/frontend`) +- **Vue 3** with Composition API +- **TypeScript** for type safety +- **Vite** for fast development and building +- **Tailwind CSS** for styling +- **Pinia** for state management -### 3. Environment Configuration - -The project uses these database settings (configured in [`thrillwiki/settings.py`](thrillwiki/settings.py)): -```python -DATABASES = { - "default": { - "ENGINE": "django.contrib.gis.db.backends.postgis", - "NAME": "thrillwiki", - "USER": "wiki", - "PASSWORD": "thrillwiki", - "HOST": "192.168.86.3", # Update to your PostgreSQL host - "PORT": "5432", - } -} -``` - -**Important**: Update the `HOST` setting in [`thrillwiki/settings.py`](thrillwiki/settings.py) to match your PostgreSQL server location: -- Use `"localhost"` or `"127.0.0.1"` for local development -- Current setting is `"192.168.86.3"` - update this to your PostgreSQL server IP -- For local development, change to `"localhost"` in settings.py - -### 4. Database Migration - -```bash -# Run database migrations -uv run manage.py migrate - -# Create a superuser account -uv run manage.py createsuperuser -``` - -**Note**: If you're setting up for local development, first update the database HOST in [`thrillwiki/settings.py`](thrillwiki/settings.py) from `"192.168.86.3"` to `"localhost"` before running migrations. - -### 5. Start Development Server - -**CRITICAL**: Always use this exact command sequence for starting the development server: - -```bash -lsof -ti :8000 | xargs kill -9; find . -type d -name "__pycache__" -exec rm -r {} +; uv run manage.py tailwind runserver -``` - -This command: -- Kills any existing processes on port 8000 -- Cleans Python cache files -- Starts Tailwind CSS compilation -- Runs the Django development server - -The application will be available at: http://localhost:8000 +### Shared (`/shared`) +- Documentation and deployment guides +- Shared TypeScript types +- Build and deployment scripts +- Docker configurations ## 🛠️ Development Workflow -### Package Management - -**ALWAYS use UV for package management**: +### Available Scripts ```bash -# Add new Python packages -uv add +# Development +pnpm run dev # Start both servers +pnpm run dev:frontend # Frontend only +pnpm run dev:backend # Backend only -# Add development dependencies -uv add --dev +# Building +pnpm run build # Build for production +pnpm run build:frontend # Frontend build only -# Never use pip install - always use UV +# Testing +pnpm run test # Run all tests +pnpm run test:frontend # Frontend tests +pnpm run test:backend # Backend tests + +# Code Quality +pnpm run lint # Lint all code +pnpm run format # Format all code ``` -### Django Management Commands - -**ALWAYS use UV for Django commands**: +### Backend Commands ```bash -# Correct way to run Django commands -uv run manage.py +cd backend -# Examples: -uv run manage.py makemigrations +# Django management uv run manage.py migrate -uv run manage.py shell uv run manage.py createsuperuser uv run manage.py collectstatic -# NEVER use these patterns: -# python manage.py ❌ Wrong -# uv run python manage.py ❌ Wrong +# Testing +uv run manage.py test ``` -### CSS Development +## 🔧 Configuration -The project uses **Tailwind CSS v4** with a custom dark theme. CSS files are located in: -- Source: [`static/css/src/input.css`](static/css/src/input.css) -- Compiled: [`static/css/`](static/css/) (auto-generated) +### Environment Variables -Tailwind automatically compiles when using the `tailwind runserver` command. - -#### Tailwind CSS v4 Migration - -This project has been migrated from Tailwind CSS v3 to v4. For complete migration details: - -- **📖 Full Migration Documentation**: [`TAILWIND_V4_MIGRATION.md`](TAILWIND_V4_MIGRATION.md) -- **⚡ Quick Reference Guide**: [`TAILWIND_V4_QUICK_REFERENCE.md`](TAILWIND_V4_QUICK_REFERENCE.md) - -**Key v4 Changes**: -- New CSS-first approach with `@theme` blocks -- Updated utility class names (e.g., `outline-none` → `outline-hidden`) -- New opacity syntax (e.g., `bg-blue-500/50` instead of `bg-blue-500 bg-opacity-50`) -- Enhanced performance and smaller bundle sizes - -**Custom Theme Variables** (available in CSS): -```css -var(--color-primary) /* #4f46e5 - Indigo-600 */ -var(--color-secondary) /* #e11d48 - Rose-600 */ -var(--color-accent) /* #8b5cf6 - Violet-500 */ -var(--font-family-sans) /* Poppins, sans-serif */ -``` - -## 🏗️ Project Structure - -``` -thrillwiki_django_no_react/ -├── accounts/ # User account management -├── analytics/ # Analytics and tracking -├── companies/ # Theme park companies -├── core/ # Core application logic -├── designers/ # Ride designers -├── history/ # History timeline features -├── location/ # Geographic location handling -├── media/ # Media file management -├── moderation/ # Content moderation -├── parks/ # Theme park management -├── reviews/ # User reviews -├── rides/ # Roller coaster/ride management -├── search/ # Search functionality -├── static/ # Static assets (CSS, JS, images) -├── templates/ # Django templates -├── thrillwiki/ # Main Django project settings -├── memory-bank/ # Development documentation -└── .clinerules # Project development rules -``` - -## 🔧 Key Features - -### Authentication System -- Django Allauth integration -- Google OAuth authentication -- Discord OAuth authentication -- Custom user profiles with avatars - -### Geographic Features -- PostGIS integration for location data -- Interactive park maps -- Location-based search and filtering - -### Content Management -- Park and ride information management -- Photo galleries with upload capabilities -- User-generated reviews and ratings -- Content moderation system - -### Modern Frontend -- HTMX for dynamic interactions -- Alpine.js for client-side behavior -- Tailwind CSS with custom dark theme -- Responsive design (mobile-first) - -## 🧪 Testing - -### Running Tests +Create `.env` files for local development: ```bash -# Run Python tests -uv run pytest +# Root .env (shared settings) +DATABASE_URL=postgresql://user:pass@localhost/thrillwiki +REDIS_URL=redis://localhost:6379 +SECRET_KEY=your-secret-key -# Run with coverage -uv run coverage run -m pytest -uv run coverage report +# Backend .env +DJANGO_SETTINGS_MODULE=config.django.local +DEBUG=True -# Run E2E tests with Playwright -uv run pytest tests/e2e/ +# Frontend .env +VITE_API_BASE_URL=http://localhost:8000/api ``` -### Test Structure -- Unit tests: Located within each app's `tests/` directory -- E2E tests: [`tests/e2e/`](tests/e2e/) -- Test fixtures: [`tests/fixtures/`](tests/fixtures/) +## 📖 Documentation -## 📚 Documentation +- [Backend Documentation](./backend/README.md) +- [Frontend Documentation](./frontend/README.md) +- [Deployment Guide](./shared/docs/deployment/) +- [API Documentation](./shared/docs/api/) -### Memory Bank System -The project uses a comprehensive documentation system in [`memory-bank/`](memory-bank/): +## 🚀 Deployment -- [`memory-bank/activeContext.md`](memory-bank/activeContext.md) - Current development context -- [`memory-bank/documentation/design-system.md`](memory-bank/documentation/design-system.md) - Design system documentation -- [`memory-bank/features/`](memory-bank/features/) - Feature-specific documentation -- [`memory-bank/testing/`](memory-bank/testing/) - Testing documentation and results +See [Deployment Guide](./shared/docs/deployment/) for production setup instructions. -### Key Documentation Files -- [Design System](memory-bank/documentation/design-system.md) - UI/UX guidelines and patterns -- [Authentication System](memory-bank/features/auth/) - OAuth and user management -- [Layout Optimization](memory-bank/projects/) - Responsive design implementations +## 🤝 Contributing -## 🚨 Important Development Rules +1. Fork the repository +2. Create a feature branch +3. Make your changes +4. Run tests and linting +5. Submit a pull request -### Critical Commands -1. **Server Startup**: Always use the full command sequence: - ```bash - lsof -ti :8000 | xargs kill -9; find . -type d -name "__pycache__" -exec rm -r {} +; uv run manage.py tailwind runserver - ``` +## 📄 License -2. **Package Management**: Only use UV: - ```bash - uv add # ✅ Correct - pip install # ❌ Wrong - ``` - -3. **Django Commands**: Always prefix with `uv run`: - ```bash - uv run manage.py # ✅ Correct - python manage.py # ❌ Wrong - ``` - -### Database Configuration -- Ensure PostgreSQL is running before starting development -- PostGIS extension must be enabled -- Update database host settings for your environment - -### GeoDjango Requirements -- GDAL and GEOS libraries must be properly installed -- Library paths are configured in [`thrillwiki/settings.py`](thrillwiki/settings.py) for macOS Homebrew -- Current paths: `/opt/homebrew/lib/libgdal.dylib` and `/opt/homebrew/lib/libgeos_c.dylib` -- May need adjustment based on your system's library locations (Linux users will need different paths) - -## 🔍 Troubleshooting - -### Common Issues - -1. **PostGIS Extension Error** - ```bash - # Connect to database and enable PostGIS - psql thrillwiki - CREATE EXTENSION postgis; - ``` - -2. **GDAL/GEOS Library Not Found** - ```bash - # macOS (Homebrew): Current paths in settings.py - GDAL_LIBRARY_PATH = "/opt/homebrew/lib/libgdal.dylib" - GEOS_LIBRARY_PATH = "/opt/homebrew/lib/libgeos_c.dylib" - - # Linux: Update paths in settings.py to something like: - # GDAL_LIBRARY_PATH = "/usr/lib/x86_64-linux-gnu/libgdal.so" - # GEOS_LIBRARY_PATH = "/usr/lib/x86_64-linux-gnu/libgeos_c.so" - - # Find your library locations - find /usr -name "libgdal*" 2>/dev/null - find /usr -name "libgeos*" 2>/dev/null - find /opt -name "libgdal*" 2>/dev/null - find /opt -name "libgeos*" 2>/dev/null - ``` - -3. **Port 8000 Already in Use** - ```bash - # Kill existing processes - lsof -ti :8000 | xargs kill -9 - ``` - -4. **Tailwind CSS Not Compiling** - ```bash - # Ensure Node.js is installed and use the full server command - node --version - uv run manage.py tailwind runserver - ``` - -### Getting Help - -1. Check the [`memory-bank/`](memory-bank/) documentation for detailed feature information -2. Review [`memory-bank/testing/`](memory-bank/testing/) for known issues and solutions -3. Ensure all prerequisites are properly installed -4. Verify database connection and PostGIS extension - -## 🎯 Next Steps - -After successful setup: - -1. **Explore the Admin Interface**: http://localhost:8000/admin/ -2. **Browse the Application**: http://localhost:8000/ -3. **Review Documentation**: Check [`memory-bank/`](memory-bank/) for detailed feature docs -4. **Run Tests**: Ensure everything works with `uv run pytest` -5. **Start Development**: Follow the development workflow guidelines above - ---- - -**Happy Coding!** 🎢✨ - -For detailed feature documentation and development context, see the [`memory-bank/`](memory-bank/) directory. +This project is licensed under the MIT License. diff --git a/accounts/__init__.py b/accounts/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/accounts/adapters.py b/accounts/adapters.py deleted file mode 100644 index 3b2a79b4..00000000 --- a/accounts/adapters.py +++ /dev/null @@ -1,64 +0,0 @@ -from django.conf import settings -from allauth.account.adapter import DefaultAccountAdapter -from allauth.socialaccount.adapter import DefaultSocialAccountAdapter -from django.contrib.auth import get_user_model -from django.contrib.sites.shortcuts import get_current_site - -User = get_user_model() - - -class CustomAccountAdapter(DefaultAccountAdapter): - def is_open_for_signup(self, request): - """ - Whether to allow sign ups. - """ - return True - - def get_email_confirmation_url(self, request, emailconfirmation): - """ - Constructs the email confirmation (activation) url. - """ - get_current_site(request) - return f"{settings.LOGIN_REDIRECT_URL}verify-email?key={emailconfirmation.key}" - - def send_confirmation_mail(self, request, emailconfirmation, signup): - """ - Sends the confirmation email. - """ - current_site = get_current_site(request) - activate_url = self.get_email_confirmation_url(request, emailconfirmation) - ctx = { - "user": emailconfirmation.email_address.user, - "activate_url": activate_url, - "current_site": current_site, - "key": emailconfirmation.key, - } - if signup: - email_template = "account/email/email_confirmation_signup" - else: - email_template = "account/email/email_confirmation" - self.send_mail(email_template, emailconfirmation.email_address.email, ctx) - - -class CustomSocialAccountAdapter(DefaultSocialAccountAdapter): - def is_open_for_signup(self, request, sociallogin): - """ - Whether to allow social account sign ups. - """ - return True - - def populate_user(self, request, sociallogin, data): - """ - Hook that can be used to further populate the user instance. - """ - user = super().populate_user(request, sociallogin, data) - if sociallogin.account.provider == "discord": - user.discord_id = sociallogin.account.uid - return user - - def save_user(self, request, sociallogin, form=None): - """ - Save the newly signed up social login. - """ - user = super().save_user(request, sociallogin, form) - return user diff --git a/accounts/admin.py b/accounts/admin.py deleted file mode 100644 index fbc76fea..00000000 --- a/accounts/admin.py +++ /dev/null @@ -1,282 +0,0 @@ -from django.contrib import admin -from django.contrib.auth.admin import UserAdmin -from django.utils.html import format_html -from django.contrib.auth.models import Group -from .models import User, UserProfile, EmailVerification, TopList, TopListItem - - -class UserProfileInline(admin.StackedInline): - model = UserProfile - can_delete = False - verbose_name_plural = "Profile" - fieldsets = ( - ( - "Personal Info", - {"fields": ("display_name", "avatar", "pronouns", "bio")}, - ), - ( - "Social Media", - {"fields": ("twitter", "instagram", "youtube", "discord")}, - ), - ( - "Ride Credits", - { - "fields": ( - "coaster_credits", - "dark_ride_credits", - "flat_ride_credits", - "water_ride_credits", - ) - }, - ), - ) - - -class TopListItemInline(admin.TabularInline): - model = TopListItem - extra = 1 - fields = ("content_type", "object_id", "rank", "notes") - ordering = ("rank",) - - -@admin.register(User) -class CustomUserAdmin(UserAdmin): - list_display = ( - "username", - "email", - "get_avatar", - "get_status", - "role", - "date_joined", - "last_login", - "get_credits", - ) - list_filter = ( - "is_active", - "is_staff", - "role", - "is_banned", - "groups", - "date_joined", - ) - search_fields = ("username", "email") - ordering = ("-date_joined",) - actions = [ - "activate_users", - "deactivate_users", - "ban_users", - "unban_users", - ] - inlines = [UserProfileInline] - - fieldsets = ( - (None, {"fields": ("username", "password")}), - ("Personal info", {"fields": ("email", "pending_email")}), - ( - "Roles and Permissions", - { - "fields": ("role", "groups", "user_permissions"), - "description": ( - "Role determines group membership. Groups determine permissions." - ), - }, - ), - ( - "Status", - { - "fields": ("is_active", "is_staff", "is_superuser"), - "description": "These are automatically managed based on role.", - }, - ), - ( - "Ban Status", - { - "fields": ("is_banned", "ban_reason", "ban_date"), - }, - ), - ( - "Preferences", - { - "fields": ("theme_preference",), - }, - ), - ("Important dates", {"fields": ("last_login", "date_joined")}), - ) - add_fieldsets = ( - ( - None, - { - "classes": ("wide",), - "fields": ( - "username", - "email", - "password1", - "password2", - "role", - ), - }, - ), - ) - - @admin.display(description="Avatar") - def get_avatar(self, obj): - if obj.profile.avatar: - return format_html( - '', - obj.profile.avatar.url, - ) - return format_html( - '
{}
', - obj.username[0].upper(), - ) - - @admin.display(description="Status") - def get_status(self, obj): - if obj.is_banned: - return format_html('Banned') - if not obj.is_active: - return format_html('Inactive') - if obj.is_superuser: - return format_html('Superuser') - if obj.is_staff: - return format_html('Staff') - return format_html('Active') - - @admin.display(description="Ride Credits") - def get_credits(self, obj): - try: - profile = obj.profile - return format_html( - "RC: {}
DR: {}
FR: {}
WR: {}", - profile.coaster_credits, - profile.dark_ride_credits, - profile.flat_ride_credits, - profile.water_ride_credits, - ) - except UserProfile.DoesNotExist: - return "-" - - @admin.action(description="Activate selected users") - def activate_users(self, request, queryset): - queryset.update(is_active=True) - - @admin.action(description="Deactivate selected users") - def deactivate_users(self, request, queryset): - queryset.update(is_active=False) - - @admin.action(description="Ban selected users") - def ban_users(self, request, queryset): - from django.utils import timezone - - queryset.update(is_banned=True, ban_date=timezone.now()) - - @admin.action(description="Unban selected users") - def unban_users(self, request, queryset): - queryset.update(is_banned=False, ban_date=None, ban_reason="") - - def save_model(self, request, obj, form, change): - creating = not obj.pk - super().save_model(request, obj, form, change) - if creating and obj.role != User.Roles.USER: - # Ensure new user with role gets added to appropriate group - group = Group.objects.filter(name=obj.role).first() - if group: - obj.groups.add(group) - - -@admin.register(UserProfile) -class UserProfileAdmin(admin.ModelAdmin): - list_display = ( - "user", - "display_name", - "coaster_credits", - "dark_ride_credits", - "flat_ride_credits", - "water_ride_credits", - ) - list_filter = ( - "coaster_credits", - "dark_ride_credits", - "flat_ride_credits", - "water_ride_credits", - ) - search_fields = ("user__username", "user__email", "display_name", "bio") - - fieldsets = ( - ( - "User Information", - {"fields": ("user", "display_name", "avatar", "pronouns", "bio")}, - ), - ( - "Social Media", - {"fields": ("twitter", "instagram", "youtube", "discord")}, - ), - ( - "Ride Credits", - { - "fields": ( - "coaster_credits", - "dark_ride_credits", - "flat_ride_credits", - "water_ride_credits", - ) - }, - ), - ) - - -@admin.register(EmailVerification) -class EmailVerificationAdmin(admin.ModelAdmin): - list_display = ("user", "created_at", "last_sent", "is_expired") - list_filter = ("created_at", "last_sent") - search_fields = ("user__username", "user__email", "token") - readonly_fields = ("created_at", "last_sent") - - fieldsets = ( - ("Verification Details", {"fields": ("user", "token")}), - ("Timing", {"fields": ("created_at", "last_sent")}), - ) - - @admin.display(description="Status") - def is_expired(self, obj): - from django.utils import timezone - from datetime import timedelta - - if timezone.now() - obj.last_sent > timedelta(days=1): - return format_html('Expired') - return format_html('Valid') - - -@admin.register(TopList) -class TopListAdmin(admin.ModelAdmin): - list_display = ("title", "user", "category", "created_at", "updated_at") - list_filter = ("category", "created_at", "updated_at") - search_fields = ("title", "user__username", "description") - inlines = [TopListItemInline] - - fieldsets = ( - ( - "Basic Information", - {"fields": ("user", "title", "category", "description")}, - ), - ( - "Timestamps", - {"fields": ("created_at", "updated_at"), "classes": ("collapse",)}, - ), - ) - readonly_fields = ("created_at", "updated_at") - - -@admin.register(TopListItem) -class TopListItemAdmin(admin.ModelAdmin): - list_display = ("top_list", "content_type", "object_id", "rank") - list_filter = ("top_list__category", "rank") - search_fields = ("top_list__title", "notes") - ordering = ("top_list", "rank") - - fieldsets = ( - ("List Information", {"fields": ("top_list", "rank")}), - ("Item Details", {"fields": ("content_type", "object_id", "notes")}), - ) diff --git a/accounts/apps.py b/accounts/apps.py deleted file mode 100644 index e63dc433..00000000 --- a/accounts/apps.py +++ /dev/null @@ -1,9 +0,0 @@ -from django.apps import AppConfig - - -class AccountsConfig(AppConfig): - default_auto_field = "django.db.models.BigAutoField" - name = "accounts" - - def ready(self): - import accounts.signals # noqa diff --git a/accounts/management/commands/check_all_social_tables.py b/accounts/management/commands/check_all_social_tables.py deleted file mode 100644 index fedeeaf8..00000000 --- a/accounts/management/commands/check_all_social_tables.py +++ /dev/null @@ -1,46 +0,0 @@ -from django.core.management.base import BaseCommand -from allauth.socialaccount.models import SocialApp, SocialAccount, SocialToken -from django.contrib.sites.models import Site - - -class Command(BaseCommand): - help = "Check all social auth related tables" - - def handle(self, *args, **options): - # Check SocialApp - self.stdout.write("\nChecking SocialApp table:") - for app in SocialApp.objects.all(): - self.stdout.write( - f"ID: { - app.pk}, Provider: { - app.provider}, Name: { - app.name}, Client ID: { - app.client_id}" - ) - self.stdout.write("Sites:") - for site in app.sites.all(): - self.stdout.write(f" - {site.domain}") - - # Check SocialAccount - self.stdout.write("\nChecking SocialAccount table:") - for account in SocialAccount.objects.all(): - self.stdout.write( - f"ID: { - account.pk}, Provider: { - account.provider}, UID: { - account.uid}" - ) - - # Check SocialToken - self.stdout.write("\nChecking SocialToken table:") - for token in SocialToken.objects.all(): - self.stdout.write( - f"ID: {token.pk}, Account: {token.account}, App: {token.app}" - ) - - # Check Site - self.stdout.write("\nChecking Site table:") - for site in Site.objects.all(): - self.stdout.write( - f"ID: {site.pk}, Domain: {site.domain}, Name: {site.name}" - ) diff --git a/accounts/management/commands/check_social_apps.py b/accounts/management/commands/check_social_apps.py deleted file mode 100644 index 33a66011..00000000 --- a/accounts/management/commands/check_social_apps.py +++ /dev/null @@ -1,27 +0,0 @@ -from django.core.management.base import BaseCommand -from allauth.socialaccount.models import SocialApp - - -class Command(BaseCommand): - help = "Check social app configurations" - - def handle(self, *args, **options): - social_apps = SocialApp.objects.all() - - if not social_apps: - self.stdout.write(self.style.ERROR("No social apps found")) - return - - for app in social_apps: - self.stdout.write( - self.style.SUCCESS( - f"\nProvider: { - app.provider}" - ) - ) - self.stdout.write(f"Name: {app.name}") - self.stdout.write(f"Client ID: {app.client_id}") - self.stdout.write(f"Secret: {app.secret}") - self.stdout.write( - f'Sites: {", ".join(str(site.domain) for site in app.sites.all())}' - ) diff --git a/accounts/management/commands/cleanup_social_auth.py b/accounts/management/commands/cleanup_social_auth.py deleted file mode 100644 index 56e7d8fb..00000000 --- a/accounts/management/commands/cleanup_social_auth.py +++ /dev/null @@ -1,28 +0,0 @@ -from django.core.management.base import BaseCommand -from django.db import connection - - -class Command(BaseCommand): - help = "Clean up social auth tables and migrations" - - def handle(self, *args, **options): - with connection.cursor() as cursor: - # Drop social auth tables - cursor.execute("DROP TABLE IF EXISTS socialaccount_socialapp") - cursor.execute("DROP TABLE IF EXISTS socialaccount_socialapp_sites") - cursor.execute("DROP TABLE IF EXISTS socialaccount_socialaccount") - cursor.execute("DROP TABLE IF EXISTS socialaccount_socialtoken") - - # Remove migration records - cursor.execute("DELETE FROM django_migrations WHERE app='socialaccount'") - cursor.execute( - "DELETE FROM django_migrations WHERE app='accounts' " - "AND name LIKE '%social%'" - ) - - # Reset sequences - cursor.execute("DELETE FROM sqlite_sequence WHERE name LIKE '%social%'") - - self.stdout.write( - self.style.SUCCESS("Successfully cleaned up social auth configuration") - ) diff --git a/accounts/management/commands/cleanup_test_data.py b/accounts/management/commands/cleanup_test_data.py deleted file mode 100644 index a3b7834d..00000000 --- a/accounts/management/commands/cleanup_test_data.py +++ /dev/null @@ -1,67 +0,0 @@ -from django.core.management.base import BaseCommand -from django.contrib.auth import get_user_model -from parks.models import ParkReview, Park -from rides.models import Ride -from media.models import Photo - -User = get_user_model() - - -class Command(BaseCommand): - help = "Cleans up test users and data created during e2e testing" - - def handle(self, *args, **kwargs): - # Delete test users - test_users = User.objects.filter(username__in=["testuser", "moderator"]) - count = test_users.count() - test_users.delete() - self.stdout.write(self.style.SUCCESS(f"Deleted {count} test users")) - - # Delete test reviews - reviews = ParkReview.objects.filter( - user__username__in=["testuser", "moderator"] - ) - count = reviews.count() - reviews.delete() - self.stdout.write(self.style.SUCCESS(f"Deleted {count} test reviews")) - - # Delete test photos - photos = Photo.objects.filter(uploader__username__in=["testuser", "moderator"]) - count = photos.count() - photos.delete() - self.stdout.write(self.style.SUCCESS(f"Deleted {count} test photos")) - - # Delete test parks - parks = Park.objects.filter(name__startswith="Test Park") - count = parks.count() - parks.delete() - self.stdout.write(self.style.SUCCESS(f"Deleted {count} test parks")) - - # Delete test rides - rides = Ride.objects.filter(name__startswith="Test Ride") - count = rides.count() - rides.delete() - self.stdout.write(self.style.SUCCESS(f"Deleted {count} test rides")) - - # Clean up test files - import os - import glob - - # Clean up test uploads - media_patterns = [ - "media/uploads/test_*", - "media/avatars/test_*", - "media/park/test_*", - "media/rides/test_*", - ] - - for pattern in media_patterns: - files = glob.glob(pattern) - for f in files: - try: - os.remove(f) - self.stdout.write(self.style.SUCCESS(f"Deleted {f}")) - except OSError as e: - self.stdout.write(self.style.WARNING(f"Error deleting {f}: {e}")) - - self.stdout.write(self.style.SUCCESS("Test data cleanup complete")) diff --git a/accounts/management/commands/create_social_apps.py b/accounts/management/commands/create_social_apps.py deleted file mode 100644 index b45e9e63..00000000 --- a/accounts/management/commands/create_social_apps.py +++ /dev/null @@ -1,55 +0,0 @@ -from django.core.management.base import BaseCommand -from django.contrib.sites.models import Site -from allauth.socialaccount.models import SocialApp - - -class Command(BaseCommand): - help = "Create social apps for authentication" - - def handle(self, *args, **options): - # Get the default site - site = Site.objects.get_or_create( - id=1, - defaults={ - "domain": "localhost:8000", - "name": "ThrillWiki Development", - }, - )[0] - - # Create Discord app - discord_app, created = SocialApp.objects.get_or_create( - provider="discord", - defaults={ - "name": "Discord", - "client_id": "1299112802274902047", - "secret": "ece7Pe_M4mD4mYzAgcINjTEKL_3ftL11", - }, - ) - if not created: - discord_app.client_id = "1299112802274902047" - discord_app.secret = "ece7Pe_M4mD4mYzAgcINjTEKL_3ftL11" - discord_app.save() - discord_app.sites.add(site) - self.stdout.write(f'{"Created" if created else "Updated"} Discord app') - - # Create Google app - google_app, created = SocialApp.objects.get_or_create( - provider="google", - defaults={ - "name": "Google", - "client_id": ( - "135166769591-nopcgmo0fkqfqfs9qe783a137mtmcrt2." - "apps.googleusercontent.com" - ), - "secret": "GOCSPX-Wd_0Ue0Ue0Ue0Ue0Ue0Ue0Ue0Ue", - }, - ) - if not created: - google_app.client_id = ( - "135166769591-nopcgmo0fkqfqfs9qe783a137mtmcrt2." - "apps.googleusercontent.com" - ) - google_app.secret = "GOCSPX-Wd_0Ue0Ue0Ue0Ue0Ue0Ue0Ue0Ue" - google_app.save() - google_app.sites.add(site) - self.stdout.write(f'{"Created" if created else "Updated"} Google app') diff --git a/accounts/management/commands/create_test_users.py b/accounts/management/commands/create_test_users.py deleted file mode 100644 index 8cf9de22..00000000 --- a/accounts/management/commands/create_test_users.py +++ /dev/null @@ -1,58 +0,0 @@ -from django.core.management.base import BaseCommand -from django.contrib.auth.models import Group, Permission, User - - -class Command(BaseCommand): - help = "Creates test users for e2e testing" - - def handle(self, *args, **kwargs): - # Create regular test user - if not User.objects.filter(username="testuser").exists(): - user = User.objects.create( - username="testuser", - email="testuser@example.com", - ) - user.set_password("testpass123") - user.save() - self.stdout.write( - self.style.SUCCESS(f"Created test user: {user.get_username()}") - ) - else: - self.stdout.write(self.style.WARNING("Test user already exists")) - - if not User.objects.filter(username="moderator").exists(): - moderator = User.objects.create( - username="moderator", - email="moderator@example.com", - ) - moderator.set_password("modpass123") - moderator.save() - - # Create moderator group if it doesn't exist - moderator_group, created = Group.objects.get_or_create(name="Moderators") - - # Add relevant permissions - permissions = Permission.objects.filter( - codename__in=[ - "change_review", - "delete_review", - "change_park", - "change_ride", - "moderate_photos", - "moderate_comments", - ] - ) - moderator_group.permissions.add(*permissions) - - # Add user to moderator group - moderator.groups.add(moderator_group) - - self.stdout.write( - self.style.SUCCESS( - f"Created moderator user: {moderator.get_username()}" - ) - ) - else: - self.stdout.write(self.style.WARNING("Moderator user already exists")) - - self.stdout.write(self.style.SUCCESS("Test users setup complete")) diff --git a/accounts/management/commands/fix_migration_history.py b/accounts/management/commands/fix_migration_history.py deleted file mode 100644 index 3a8eafe1..00000000 --- a/accounts/management/commands/fix_migration_history.py +++ /dev/null @@ -1,18 +0,0 @@ -from django.core.management.base import BaseCommand -from django.db import connection - - -class Command(BaseCommand): - help = "Fix migration history by removing rides.0001_initial" - - def handle(self, *args, **kwargs): - with connection.cursor() as cursor: - cursor.execute( - "DELETE FROM django_migrations WHERE app='rides' " - "AND name='0001_initial';" - ) - self.stdout.write( - self.style.SUCCESS( - "Successfully removed rides.0001_initial from migration history" - ) - ) diff --git a/accounts/management/commands/fix_social_apps.py b/accounts/management/commands/fix_social_apps.py deleted file mode 100644 index 8bbc4372..00000000 --- a/accounts/management/commands/fix_social_apps.py +++ /dev/null @@ -1,41 +0,0 @@ -from django.core.management.base import BaseCommand -from allauth.socialaccount.models import SocialApp -from django.contrib.sites.models import Site -import os - - -class Command(BaseCommand): - help = "Fix social app configurations" - - def handle(self, *args, **options): - # Delete all existing social apps - SocialApp.objects.all().delete() - self.stdout.write("Deleted all existing social apps") - - # Get the default site - site = Site.objects.get(id=1) - - # Create Google provider - google_app = SocialApp.objects.create( - provider="google", - name="Google", - client_id=os.getenv("GOOGLE_CLIENT_ID"), - secret=os.getenv("GOOGLE_CLIENT_SECRET"), - ) - google_app.sites.add(site) - self.stdout.write( - f"Created Google app with client_id: { - google_app.client_id}" - ) - - # Create Discord provider - discord_app = SocialApp.objects.create( - provider="discord", - name="Discord", - client_id=os.getenv("DISCORD_CLIENT_ID"), - secret=os.getenv("DISCORD_CLIENT_SECRET"), - ) - discord_app.sites.add(site) - self.stdout.write( - f"Created Discord app with client_id: {discord_app.client_id}" - ) diff --git a/accounts/management/commands/generate_letter_avatars.py b/accounts/management/commands/generate_letter_avatars.py deleted file mode 100644 index cdf6212e..00000000 --- a/accounts/management/commands/generate_letter_avatars.py +++ /dev/null @@ -1,54 +0,0 @@ -from django.core.management.base import BaseCommand -from PIL import Image, ImageDraw, ImageFont -import os - - -def generate_avatar(letter): - """Generate an avatar for a given letter or number""" - avatar_size = (100, 100) - background_color = (0, 123, 255) # Blue background - text_color = (255, 255, 255) # White text - font_size = 100 - - # Create a blank image with background color - image = Image.new("RGB", avatar_size, background_color) - draw = ImageDraw.Draw(image) - - # Load a font - font_path = "[AWS-SECRET-REMOVED]ans-Bold.ttf" - font = ImageFont.truetype(font_path, font_size) - - # Calculate text size and position using textbbox - text_bbox = draw.textbbox((0, 0), letter, font=font) - text_width, text_height = ( - text_bbox[2] - text_bbox[0], - text_bbox[3] - text_bbox[1], - ) - text_position = ( - (avatar_size[0] - text_width) / 2, - (avatar_size[1] - text_height) / 2, - ) - - # Draw the text on the image - draw.text(text_position, letter, font=font, fill=text_color) - - # Ensure the avatars directory exists - avatar_dir = "avatars/letters" - if not os.path.exists(avatar_dir): - os.makedirs(avatar_dir) - - # Save the image to the avatars directory - avatar_path = os.path.join(avatar_dir, f"{letter}_avatar.png") - image.save(avatar_path) - - -class Command(BaseCommand): - help = "Generate avatars for letters A-Z and numbers 0-9" - - def handle(self, *args, **kwargs): - characters = [chr(i) for i in range(65, 91)] + [ - str(i) for i in range(10) - ] # A-Z and 0-9 - for char in characters: - generate_avatar(char) - self.stdout.write(self.style.SUCCESS(f"Generated avatar for {char}")) diff --git a/accounts/management/commands/regenerate_avatars.py b/accounts/management/commands/regenerate_avatars.py deleted file mode 100644 index d71bd945..00000000 --- a/accounts/management/commands/regenerate_avatars.py +++ /dev/null @@ -1,18 +0,0 @@ -from django.core.management.base import BaseCommand -from accounts.models import UserProfile - - -class Command(BaseCommand): - help = "Regenerate default avatars for users without an uploaded avatar" - - def handle(self, *args, **kwargs): - profiles = UserProfile.objects.filter(avatar="") - for profile in profiles: - # This will trigger the avatar generation logic in the save method - profile.save() - self.stdout.write( - self.style.SUCCESS( - f"Regenerated avatar for { - profile.user.username}" - ) - ) diff --git a/accounts/management/commands/reset_db.py b/accounts/management/commands/reset_db.py deleted file mode 100644 index b08ffcdb..00000000 --- a/accounts/management/commands/reset_db.py +++ /dev/null @@ -1,113 +0,0 @@ -from django.core.management.base import BaseCommand -from django.db import connection -from django.contrib.auth.hashers import make_password -import uuid - - -class Command(BaseCommand): - help = "Reset database and create admin user" - - def handle(self, *args, **options): - self.stdout.write("Resetting database...") - - # Drop all tables - with connection.cursor() as cursor: - cursor.execute( - """ - DO $$ DECLARE - r RECORD; - BEGIN - FOR r IN ( - SELECT tablename FROM pg_tables - WHERE schemaname = current_schema() - ) LOOP - EXECUTE 'DROP TABLE IF EXISTS ' || \ - quote_ident(r.tablename) || ' CASCADE'; - END LOOP; - END $$; - """ - ) - - # Reset sequences - cursor.execute( - """ - DO $$ DECLARE - r RECORD; - BEGIN - FOR r IN ( - SELECT sequencename FROM pg_sequences - WHERE schemaname = current_schema() - ) LOOP - EXECUTE 'ALTER SEQUENCE ' || \ - quote_ident(r.sequencename) || ' RESTART WITH 1'; - END LOOP; - END $$; - """ - ) - - self.stdout.write("All tables dropped and sequences reset.") - - # Run migrations - from django.core.management import call_command - - call_command("migrate") - - self.stdout.write("Migrations applied.") - - # Create superuser using raw SQL - try: - with connection.cursor() as cursor: - # Create user - user_id = str(uuid.uuid4())[:10] - cursor.execute( - """ - INSERT INTO accounts_user ( - username, password, email, is_superuser, is_staff, - is_active, date_joined, user_id, first_name, - last_name, role, is_banned, ban_reason, - theme_preference - ) VALUES ( - 'admin', %s, 'admin@thrillwiki.com', true, true, - true, NOW(), %s, '', '', 'SUPERUSER', false, '', - 'light' - ) RETURNING id; - """, - [make_password("admin"), user_id], - ) - - result = cursor.fetchone() - if result is None: - raise Exception("Failed to create user - no ID returned") - user_db_id = result[0] - - # Create profile - profile_id = str(uuid.uuid4())[:10] - cursor.execute( - """ - INSERT INTO accounts_userprofile ( - profile_id, display_name, pronouns, bio, - twitter, instagram, youtube, discord, - coaster_credits, dark_ride_credits, - flat_ride_credits, water_ride_credits, - user_id, avatar - ) VALUES ( - %s, 'Admin', 'they/them', 'ThrillWiki Administrator', - '', '', '', '', - 0, 0, 0, 0, - %s, '' - ); - """, - [profile_id, user_db_id], - ) - - self.stdout.write("Superuser created.") - except Exception as e: - self.stdout.write( - self.style.ERROR( - f"Error creating superuser: { - str(e)}" - ) - ) - raise - - self.stdout.write(self.style.SUCCESS("Database reset complete.")) diff --git a/accounts/management/commands/reset_social_apps.py b/accounts/management/commands/reset_social_apps.py deleted file mode 100644 index c4f0c35b..00000000 --- a/accounts/management/commands/reset_social_apps.py +++ /dev/null @@ -1,39 +0,0 @@ -from django.core.management.base import BaseCommand -from allauth.socialaccount.models import SocialApp -from django.contrib.sites.models import Site -from django.db import connection - - -class Command(BaseCommand): - help = "Reset social apps configuration" - - def handle(self, *args, **options): - # Delete all social apps using raw SQL to bypass Django's ORM - with connection.cursor() as cursor: - cursor.execute("DELETE FROM socialaccount_socialapp_sites") - cursor.execute("DELETE FROM socialaccount_socialapp") - - # Get the default site - site = Site.objects.get(id=1) - - # Create Discord app - discord_app = SocialApp.objects.create( - provider="discord", - name="Discord", - client_id="1299112802274902047", - secret="ece7Pe_M4mD4mYzAgcINjTEKL_3ftL11", - ) - discord_app.sites.add(site) - self.stdout.write(f"Created Discord app with ID: {discord_app.pk}") - - # Create Google app - google_app = SocialApp.objects.create( - provider="google", - name="Google", - client_id=( - "135166769591-nopcgmo0fkqfqfs9qe783a137mtmcrt2.apps.googleusercontent.com" - ), - secret="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", - ) - google_app.sites.add(site) - self.stdout.write(f"Created Google app with ID: {google_app.pk}") diff --git a/accounts/management/commands/reset_social_auth.py b/accounts/management/commands/reset_social_auth.py deleted file mode 100644 index 5dbc7707..00000000 --- a/accounts/management/commands/reset_social_auth.py +++ /dev/null @@ -1,24 +0,0 @@ -from django.core.management.base import BaseCommand -from django.db import connection - - -class Command(BaseCommand): - help = "Reset social auth configuration" - - def handle(self, *args, **options): - with connection.cursor() as cursor: - # Delete all social apps - cursor.execute("DELETE FROM socialaccount_socialapp") - cursor.execute("DELETE FROM socialaccount_socialapp_sites") - - # Reset sequences - cursor.execute( - "DELETE FROM sqlite_sequence WHERE name='socialaccount_socialapp'" - ) - cursor.execute( - "DELETE FROM sqlite_sequence WHERE name='socialaccount_socialapp_sites'" - ) - - self.stdout.write( - self.style.SUCCESS("Successfully reset social auth configuration") - ) diff --git a/accounts/management/commands/setup_groups.py b/accounts/management/commands/setup_groups.py deleted file mode 100644 index 70f84888..00000000 --- a/accounts/management/commands/setup_groups.py +++ /dev/null @@ -1,49 +0,0 @@ -from django.core.management.base import BaseCommand -from django.contrib.auth.models import Group -from accounts.models import User -from accounts.signals import create_default_groups - - -class Command(BaseCommand): - help = "Set up default groups and permissions for user roles" - - def handle(self, *args, **options): - self.stdout.write("Creating default groups and permissions...") - - try: - # Create default groups with permissions - create_default_groups() - - # Sync existing users with groups based on their roles - users = User.objects.exclude(role=User.Roles.USER) - for user in users: - group = Group.objects.filter(name=user.role).first() - if group: - user.groups.add(group) - - # Update staff/superuser status based on role - if user.role == User.Roles.SUPERUSER: - user.is_superuser = True - user.is_staff = True - elif user.role in [User.Roles.ADMIN, User.Roles.MODERATOR]: - user.is_staff = True - user.save() - - self.stdout.write( - self.style.SUCCESS("Successfully set up groups and permissions") - ) - - # Print summary - for group in Group.objects.all(): - self.stdout.write(f"\nGroup: {group.name}") - self.stdout.write("Permissions:") - for perm in group.permissions.all(): - self.stdout.write(f" - {perm.codename}") - - except Exception as e: - self.stdout.write( - self.style.ERROR( - f"Error setting up groups: { - str(e)}" - ) - ) diff --git a/accounts/management/commands/setup_site.py b/accounts/management/commands/setup_site.py deleted file mode 100644 index 5adf6566..00000000 --- a/accounts/management/commands/setup_site.py +++ /dev/null @@ -1,16 +0,0 @@ -from django.core.management.base import BaseCommand -from django.contrib.sites.models import Site - - -class Command(BaseCommand): - help = "Set up default site" - - def handle(self, *args, **options): - # Delete any existing sites - Site.objects.all().delete() - - # Create default site - site = Site.objects.create( - id=1, domain="localhost:8000", name="ThrillWiki Development" - ) - self.stdout.write(self.style.SUCCESS(f"Created site: {site.domain}")) diff --git a/accounts/management/commands/setup_social_auth.py b/accounts/management/commands/setup_social_auth.py deleted file mode 100644 index a0e0fb90..00000000 --- a/accounts/management/commands/setup_social_auth.py +++ /dev/null @@ -1,126 +0,0 @@ -from django.core.management.base import BaseCommand -from django.contrib.sites.models import Site -from allauth.socialaccount.models import SocialApp -from dotenv import load_dotenv -import os - - -class Command(BaseCommand): - help = "Sets up social authentication apps" - - def handle(self, *args, **kwargs): - # Load environment variables - load_dotenv() - - # Get environment variables - google_client_id = os.getenv("GOOGLE_CLIENT_ID") - google_client_secret = os.getenv("GOOGLE_CLIENT_SECRET") - discord_client_id = os.getenv("DISCORD_CLIENT_ID") - discord_client_secret = os.getenv("DISCORD_CLIENT_SECRET") - - # DEBUG: Log environment variable values - self.stdout.write( - f"DEBUG: google_client_id type: { - type(google_client_id)}, value: {google_client_id}" - ) - self.stdout.write( - f"DEBUG: google_client_secret type: { - type(google_client_secret)}, value: {google_client_secret}" - ) - self.stdout.write( - f"DEBUG: discord_client_id type: { - type(discord_client_id)}, value: {discord_client_id}" - ) - self.stdout.write( - f"DEBUG: discord_client_secret type: { - type(discord_client_secret)}, value: {discord_client_secret}" - ) - - if not all( - [ - google_client_id, - google_client_secret, - discord_client_id, - discord_client_secret, - ] - ): - self.stdout.write( - self.style.ERROR("Missing required environment variables") - ) - self.stdout.write( - f"DEBUG: google_client_id is None: {google_client_id is None}" - ) - self.stdout.write( - f"DEBUG: google_client_secret is None: { - google_client_secret is None}" - ) - self.stdout.write( - f"DEBUG: discord_client_id is None: { - discord_client_id is None}" - ) - self.stdout.write( - f"DEBUG: discord_client_secret is None: { - discord_client_secret is None}" - ) - return - - # Get or create the default site - site, _ = Site.objects.get_or_create( - id=1, defaults={"domain": "localhost:8000", "name": "localhost"} - ) - - # Set up Google - google_app, created = SocialApp.objects.get_or_create( - provider="google", - defaults={ - "name": "Google", - "client_id": google_client_id, - "secret": google_client_secret, - }, - ) - if not created: - self.stdout.write( - f"DEBUG: About to assign google_client_id: {google_client_id} (type: { - type(google_client_id)})" - ) - if google_client_id is not None and google_client_secret is not None: - google_app.client_id = google_client_id - google_app.secret = google_client_secret - google_app.save() - self.stdout.write("DEBUG: Successfully updated Google app") - else: - self.stdout.write( - self.style.ERROR( - "Google client_id or secret is None, skipping update." - ) - ) - google_app.sites.add(site) - - # Set up Discord - discord_app, created = SocialApp.objects.get_or_create( - provider="discord", - defaults={ - "name": "Discord", - "client_id": discord_client_id, - "secret": discord_client_secret, - }, - ) - if not created: - self.stdout.write( - f"DEBUG: About to assign discord_client_id: {discord_client_id} (type: { - type(discord_client_id)})" - ) - if discord_client_id is not None and discord_client_secret is not None: - discord_app.client_id = discord_client_id - discord_app.secret = discord_client_secret - discord_app.save() - self.stdout.write("DEBUG: Successfully updated Discord app") - else: - self.stdout.write( - self.style.ERROR( - "Discord client_id or secret is None, skipping update." - ) - ) - discord_app.sites.add(site) - - self.stdout.write(self.style.SUCCESS("Successfully set up social auth apps")) diff --git a/accounts/management/commands/setup_social_auth_admin.py b/accounts/management/commands/setup_social_auth_admin.py deleted file mode 100644 index bb030798..00000000 --- a/accounts/management/commands/setup_social_auth_admin.py +++ /dev/null @@ -1,70 +0,0 @@ -from django.core.management.base import BaseCommand -from django.contrib.sites.models import Site -from django.contrib.auth import get_user_model - -User = get_user_model() - - -class Command(BaseCommand): - help = "Set up social authentication through admin interface" - - def handle(self, *args, **options): - # Get or create the default site - site, _ = Site.objects.get_or_create( - id=1, - defaults={ - "domain": "localhost:8000", - "name": "ThrillWiki Development", - }, - ) - if not _: - site.domain = "localhost:8000" - site.name = "ThrillWiki Development" - site.save() - self.stdout.write(f'{"Created" if _ else "Updated"} site: {site.domain}') - - # Create superuser if it doesn't exist - if not User.objects.filter(username="admin").exists(): - admin_user = User.objects.create( - username="admin", - email="admin@example.com", - is_staff=True, - is_superuser=True, - ) - admin_user.set_password("admin") - admin_user.save() - self.stdout.write("Created superuser: admin/admin") - - self.stdout.write( - self.style.SUCCESS( - """ -Social auth setup instructions: - -1. Run the development server: - python manage.py runserver - -2. Go to the admin interface: - http://localhost:8000/admin/ - -3. Log in with: - Username: admin - Password: admin - -4. Add social applications: - - Go to "Social applications" under "Social Accounts" - - Add Discord app: - Provider: discord - Name: Discord - Client id: 1299112802274902047 - Secret key: ece7Pe_M4mD4mYzAgcINjTEKL_3ftL11 - Sites: Add "localhost:8000" - - - Add Google app: - Provider: google - Name: Google - Client id: 135166769591-nopcgmo0fkqfqfs9qe783a137mtmcrt2.apps.googleusercontent.com - Secret key: GOCSPX-Wd_0Ue0Ue0Ue0Ue0Ue0Ue0Ue0Ue - Sites: Add "localhost:8000" -""" - ) - ) diff --git a/accounts/management/commands/test_discord_auth.py b/accounts/management/commands/test_discord_auth.py deleted file mode 100644 index 04586756..00000000 --- a/accounts/management/commands/test_discord_auth.py +++ /dev/null @@ -1,61 +0,0 @@ -from django.core.management.base import BaseCommand -from django.test import Client -from allauth.socialaccount.models import SocialApp - - -class Command(BaseCommand): - help = "Test Discord OAuth2 authentication flow" - - def handle(self, *args, **options): - client = Client(HTTP_HOST="localhost:8000") - - # Get Discord app - try: - discord_app = SocialApp.objects.get(provider="discord") - self.stdout.write("Found Discord app configuration:") - self.stdout.write(f"Client ID: {discord_app.client_id}") - - # Test login URL - login_url = "/accounts/discord/login/" - response = client.get(login_url, HTTP_HOST="localhost:8000") - self.stdout.write(f"\nTesting login URL: {login_url}") - self.stdout.write(f"Status code: {response.status_code}") - - if response.status_code == 302: - redirect_url = response["Location"] - self.stdout.write(f"Redirects to: {redirect_url}") - - # Parse OAuth2 parameters - self.stdout.write("\nOAuth2 Parameters:") - if "client_id=" in redirect_url: - self.stdout.write("✓ client_id parameter present") - if "redirect_uri=" in redirect_url: - self.stdout.write("✓ redirect_uri parameter present") - if "scope=" in redirect_url: - self.stdout.write("✓ scope parameter present") - if "response_type=" in redirect_url: - self.stdout.write("✓ response_type parameter present") - if "code_challenge=" in redirect_url: - self.stdout.write("✓ PKCE enabled (code_challenge present)") - - # Show callback URL - callback_url = "http://localhost:8000/accounts/discord/login/callback/" - self.stdout.write( - "\nCallback URL to configure in Discord Developer Portal:" - ) - self.stdout.write(callback_url) - - # Show frontend login URL - frontend_url = "http://localhost:5173" - self.stdout.write("\nFrontend configuration:") - self.stdout.write(f"Frontend URL: {frontend_url}") - self.stdout.write("Discord login button should use:") - self.stdout.write("/accounts/discord/login/?process=login") - - # Show allauth URLs - self.stdout.write("\nAllauth URLs:") - self.stdout.write("Login URL: /accounts/discord/login/?process=login") - self.stdout.write("Callback URL: /accounts/discord/login/callback/") - - except SocialApp.DoesNotExist: - self.stdout.write(self.style.ERROR("Discord app not found")) diff --git a/accounts/management/commands/update_social_apps_sites.py b/accounts/management/commands/update_social_apps_sites.py deleted file mode 100644 index 2e493170..00000000 --- a/accounts/management/commands/update_social_apps_sites.py +++ /dev/null @@ -1,23 +0,0 @@ -from django.core.management.base import BaseCommand -from allauth.socialaccount.models import SocialApp -from django.contrib.sites.models import Site - - -class Command(BaseCommand): - help = "Update social apps to be associated with all sites" - - def handle(self, *args, **options): - # Get all sites - sites = Site.objects.all() - - # Update each social app - for app in SocialApp.objects.all(): - self.stdout.write(f"Updating {app.provider} app...") - # Clear existing sites - app.sites.clear() - # Add all sites - for site in sites: - app.sites.add(site) - self.stdout.write( - f'Added sites: {", ".join(site.domain for site in sites)}' - ) diff --git a/accounts/management/commands/verify_discord_settings.py b/accounts/management/commands/verify_discord_settings.py deleted file mode 100644 index c83fe6da..00000000 --- a/accounts/management/commands/verify_discord_settings.py +++ /dev/null @@ -1,42 +0,0 @@ -from django.core.management.base import BaseCommand -from allauth.socialaccount.models import SocialApp -from django.conf import settings - - -class Command(BaseCommand): - help = "Verify Discord OAuth2 settings" - - def handle(self, *args, **options): - # Get Discord app - try: - discord_app = SocialApp.objects.get(provider="discord") - self.stdout.write("Found Discord app configuration:") - self.stdout.write(f"Client ID: {discord_app.client_id}") - self.stdout.write(f"Secret: {discord_app.secret}") - - # Get sites - sites = discord_app.sites.all() - self.stdout.write("\nAssociated sites:") - for site in sites: - self.stdout.write(f"- {site.domain} ({site.name})") - - # Show callback URL - callback_url = "http://localhost:8000/accounts/discord/login/callback/" - self.stdout.write( - "\nCallback URL to configure in Discord Developer Portal:" - ) - self.stdout.write(callback_url) - - # Show OAuth2 settings - self.stdout.write("\nOAuth2 settings in settings.py:") - discord_settings = settings.SOCIALACCOUNT_PROVIDERS.get("discord", {}) - self.stdout.write( - f'PKCE Enabled: { - discord_settings.get( - "OAUTH_PKCE_ENABLED", - False)}' - ) - self.stdout.write(f'Scopes: {discord_settings.get("SCOPE", [])}') - - except SocialApp.DoesNotExist: - self.stdout.write(self.style.ERROR("Discord app not found")) diff --git a/accounts/migrations/0001_initial.py b/accounts/migrations/0001_initial.py deleted file mode 100644 index 61ba3912..00000000 --- a/accounts/migrations/0001_initial.py +++ /dev/null @@ -1,552 +0,0 @@ -# Generated by Django 5.1.4 on 2025-08-13 21:35 - -import django.contrib.auth.models -import django.contrib.auth.validators -import django.db.models.deletion -import django.utils.timezone -import pgtrigger.compiler -import pgtrigger.migrations -from django.conf import settings -from django.db import migrations, models - - -class Migration(migrations.Migration): - - initial = True - - dependencies = [ - ("auth", "0012_alter_user_first_name_max_length"), - ("contenttypes", "0002_remove_content_type_name"), - ("pghistory", "0006_delete_aggregateevent"), - ] - - operations = [ - migrations.CreateModel( - name="User", - fields=[ - ( - "id", - models.BigAutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "password", - models.CharField(max_length=128, verbose_name="password"), - ), - ( - "last_login", - models.DateTimeField( - blank=True, null=True, verbose_name="last login" - ), - ), - ( - "is_superuser", - models.BooleanField( - default=False, - help_text="Designates that this user has all permissions without explicitly assigning them.", - verbose_name="superuser status", - ), - ), - ( - "username", - models.CharField( - error_messages={ - "unique": "A user with that username already exists." - }, - help_text="Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.", - max_length=150, - unique=True, - validators=[ - django.contrib.auth.validators.UnicodeUsernameValidator() - ], - verbose_name="username", - ), - ), - ( - "first_name", - models.CharField( - blank=True, max_length=150, verbose_name="first name" - ), - ), - ( - "last_name", - models.CharField( - blank=True, max_length=150, verbose_name="last name" - ), - ), - ( - "email", - models.EmailField( - blank=True, - max_length=254, - verbose_name="email address", - ), - ), - ( - "is_staff", - models.BooleanField( - default=False, - help_text="Designates whether the user can log into this admin site.", - verbose_name="staff status", - ), - ), - ( - "is_active", - models.BooleanField( - default=True, - help_text="Designates whether this user should be treated as active. Unselect this instead of deleting accounts.", - verbose_name="active", - ), - ), - ( - "date_joined", - models.DateTimeField( - default=django.utils.timezone.now, - verbose_name="date joined", - ), - ), - ( - "user_id", - models.CharField( - editable=False, - help_text="Unique identifier for this user that remains constant even if the username changes", - max_length=10, - unique=True, - ), - ), - ( - "role", - models.CharField( - choices=[ - ("USER", "User"), - ("MODERATOR", "Moderator"), - ("ADMIN", "Admin"), - ("SUPERUSER", "Superuser"), - ], - default="USER", - max_length=10, - ), - ), - ("is_banned", models.BooleanField(default=False)), - ("ban_reason", models.TextField(blank=True)), - ("ban_date", models.DateTimeField(blank=True, null=True)), - ( - "pending_email", - models.EmailField(blank=True, max_length=254, null=True), - ), - ( - "theme_preference", - models.CharField( - choices=[("light", "Light"), ("dark", "Dark")], - default="light", - max_length=5, - ), - ), - ( - "groups", - models.ManyToManyField( - blank=True, - help_text="The groups this user belongs to. A user will get all permissions granted to each of their groups.", - related_name="user_set", - related_query_name="user", - to="auth.group", - verbose_name="groups", - ), - ), - ( - "user_permissions", - models.ManyToManyField( - blank=True, - help_text="Specific permissions for this user.", - related_name="user_set", - related_query_name="user", - to="auth.permission", - verbose_name="user permissions", - ), - ), - ], - options={ - "verbose_name": "user", - "verbose_name_plural": "users", - "abstract": False, - }, - managers=[ - ("objects", django.contrib.auth.models.UserManager()), - ], - ), - migrations.CreateModel( - name="EmailVerification", - fields=[ - ( - "id", - models.BigAutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ("token", models.CharField(max_length=64, unique=True)), - ("created_at", models.DateTimeField(auto_now_add=True)), - ("last_sent", models.DateTimeField(auto_now_add=True)), - ( - "user", - models.OneToOneField( - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL, - ), - ), - ], - options={ - "verbose_name": "Email Verification", - "verbose_name_plural": "Email Verifications", - }, - ), - migrations.CreateModel( - name="PasswordReset", - fields=[ - ( - "id", - models.BigAutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ("token", models.CharField(max_length=64)), - ("created_at", models.DateTimeField(auto_now_add=True)), - ("expires_at", models.DateTimeField()), - ("used", models.BooleanField(default=False)), - ( - "user", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - to=settings.AUTH_USER_MODEL, - ), - ), - ], - options={ - "verbose_name": "Password Reset", - "verbose_name_plural": "Password Resets", - }, - ), - migrations.CreateModel( - name="TopList", - fields=[ - ( - "id", - models.BigAutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ("title", models.CharField(max_length=100)), - ( - "category", - models.CharField( - choices=[ - ("RC", "Roller Coaster"), - ("DR", "Dark Ride"), - ("FR", "Flat Ride"), - ("WR", "Water Ride"), - ("PK", "Park"), - ], - max_length=2, - ), - ), - ("description", models.TextField(blank=True)), - ("created_at", models.DateTimeField(auto_now_add=True)), - ("updated_at", models.DateTimeField(auto_now=True)), - ( - "user", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - related_name="top_lists", - to=settings.AUTH_USER_MODEL, - ), - ), - ], - options={ - "ordering": ["-updated_at"], - }, - ), - migrations.CreateModel( - name="TopListEvent", - fields=[ - ( - "pgh_id", - models.AutoField(primary_key=True, serialize=False), - ), - ("pgh_created_at", models.DateTimeField(auto_now_add=True)), - ("pgh_label", models.TextField(help_text="The event label.")), - ("id", models.BigIntegerField()), - ("title", models.CharField(max_length=100)), - ( - "category", - models.CharField( - choices=[ - ("RC", "Roller Coaster"), - ("DR", "Dark Ride"), - ("FR", "Flat Ride"), - ("WR", "Water Ride"), - ("PK", "Park"), - ], - max_length=2, - ), - ), - ("description", models.TextField(blank=True)), - ("created_at", models.DateTimeField(auto_now_add=True)), - ("updated_at", models.DateTimeField(auto_now=True)), - ( - "pgh_context", - models.ForeignKey( - db_constraint=False, - null=True, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="+", - to="pghistory.context", - ), - ), - ( - "pgh_obj", - models.ForeignKey( - db_constraint=False, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="events", - to="accounts.toplist", - ), - ), - ( - "user", - models.ForeignKey( - db_constraint=False, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="+", - related_query_name="+", - to=settings.AUTH_USER_MODEL, - ), - ), - ], - options={ - "abstract": False, - }, - ), - migrations.CreateModel( - name="TopListItem", - fields=[ - ( - "id", - models.BigAutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ("created_at", models.DateTimeField(auto_now_add=True)), - ("updated_at", models.DateTimeField(auto_now=True)), - ("object_id", models.PositiveIntegerField()), - ("rank", models.PositiveIntegerField()), - ("notes", models.TextField(blank=True)), - ( - "content_type", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - to="contenttypes.contenttype", - ), - ), - ( - "top_list", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - related_name="items", - to="accounts.toplist", - ), - ), - ], - options={ - "ordering": ["rank"], - }, - ), - migrations.CreateModel( - name="TopListItemEvent", - fields=[ - ( - "pgh_id", - models.AutoField(primary_key=True, serialize=False), - ), - ("pgh_created_at", models.DateTimeField(auto_now_add=True)), - ("pgh_label", models.TextField(help_text="The event label.")), - ("id", models.BigIntegerField()), - ("created_at", models.DateTimeField(auto_now_add=True)), - ("updated_at", models.DateTimeField(auto_now=True)), - ("object_id", models.PositiveIntegerField()), - ("rank", models.PositiveIntegerField()), - ("notes", models.TextField(blank=True)), - ( - "content_type", - models.ForeignKey( - db_constraint=False, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="+", - related_query_name="+", - to="contenttypes.contenttype", - ), - ), - ( - "pgh_context", - models.ForeignKey( - db_constraint=False, - null=True, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="+", - to="pghistory.context", - ), - ), - ( - "pgh_obj", - models.ForeignKey( - db_constraint=False, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="events", - to="accounts.toplistitem", - ), - ), - ( - "top_list", - models.ForeignKey( - db_constraint=False, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="+", - related_query_name="+", - to="accounts.toplist", - ), - ), - ], - options={ - "abstract": False, - }, - ), - migrations.CreateModel( - name="UserProfile", - fields=[ - ( - "id", - models.BigAutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "profile_id", - models.CharField( - editable=False, - help_text="Unique identifier for this profile that remains constant", - max_length=10, - unique=True, - ), - ), - ( - "display_name", - models.CharField( - help_text="This is the name that will be displayed on the site", - max_length=50, - unique=True, - ), - ), - ( - "avatar", - models.ImageField(blank=True, upload_to="avatars/"), - ), - ("pronouns", models.CharField(blank=True, max_length=50)), - ("bio", models.TextField(blank=True, max_length=500)), - ("twitter", models.URLField(blank=True)), - ("instagram", models.URLField(blank=True)), - ("youtube", models.URLField(blank=True)), - ("discord", models.CharField(blank=True, max_length=100)), - ("coaster_credits", models.IntegerField(default=0)), - ("dark_ride_credits", models.IntegerField(default=0)), - ("flat_ride_credits", models.IntegerField(default=0)), - ("water_ride_credits", models.IntegerField(default=0)), - ( - "user", - models.OneToOneField( - on_delete=django.db.models.deletion.CASCADE, - related_name="profile", - to=settings.AUTH_USER_MODEL, - ), - ), - ], - ), - pgtrigger.migrations.AddTrigger( - model_name="toplist", - trigger=pgtrigger.compiler.Trigger( - name="insert_insert", - sql=pgtrigger.compiler.UpsertTriggerSql( - func='INSERT INTO "accounts_toplistevent" ("category", "created_at", "description", "id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "title", "updated_at", "user_id") VALUES (NEW."category", NEW."created_at", NEW."description", NEW."id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."title", NEW."updated_at", NEW."user_id"); RETURN NULL;', - hash="[AWS-SECRET-REMOVED]", - operation="INSERT", - pgid="pgtrigger_insert_insert_26546", - table="accounts_toplist", - when="AFTER", - ), - ), - ), - pgtrigger.migrations.AddTrigger( - model_name="toplist", - trigger=pgtrigger.compiler.Trigger( - name="update_update", - sql=pgtrigger.compiler.UpsertTriggerSql( - condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)", - func='INSERT INTO "accounts_toplistevent" ("category", "created_at", "description", "id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "title", "updated_at", "user_id") VALUES (NEW."category", NEW."created_at", NEW."description", NEW."id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."title", NEW."updated_at", NEW."user_id"); RETURN NULL;', - hash="[AWS-SECRET-REMOVED]", - operation="UPDATE", - pgid="pgtrigger_update_update_84849", - table="accounts_toplist", - when="AFTER", - ), - ), - ), - migrations.AlterUniqueTogether( - name="toplistitem", - unique_together={("top_list", "rank")}, - ), - pgtrigger.migrations.AddTrigger( - model_name="toplistitem", - trigger=pgtrigger.compiler.Trigger( - name="insert_insert", - sql=pgtrigger.compiler.UpsertTriggerSql( - func='INSERT INTO "accounts_toplistitemevent" ("content_type_id", "created_at", "id", "notes", "object_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "rank", "top_list_id", "updated_at") VALUES (NEW."content_type_id", NEW."created_at", NEW."id", NEW."notes", NEW."object_id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."rank", NEW."top_list_id", NEW."updated_at"); RETURN NULL;', - hash="[AWS-SECRET-REMOVED]", - operation="INSERT", - pgid="pgtrigger_insert_insert_56dfc", - table="accounts_toplistitem", - when="AFTER", - ), - ), - ), - pgtrigger.migrations.AddTrigger( - model_name="toplistitem", - trigger=pgtrigger.compiler.Trigger( - name="update_update", - sql=pgtrigger.compiler.UpsertTriggerSql( - condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)", - func='INSERT INTO "accounts_toplistitemevent" ("content_type_id", "created_at", "id", "notes", "object_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "rank", "top_list_id", "updated_at") VALUES (NEW."content_type_id", NEW."created_at", NEW."id", NEW."notes", NEW."object_id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."rank", NEW."top_list_id", NEW."updated_at"); RETURN NULL;', - hash="[AWS-SECRET-REMOVED]", - operation="UPDATE", - pgid="pgtrigger_update_update_2b6e3", - table="accounts_toplistitem", - when="AFTER", - ), - ), - ), - ] diff --git a/accounts/migrations/__init__.py b/accounts/migrations/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/accounts/mixins.py b/accounts/mixins.py deleted file mode 100644 index a5977950..00000000 --- a/accounts/mixins.py +++ /dev/null @@ -1,35 +0,0 @@ -import requests -from django.conf import settings -from django.core.exceptions import ValidationError - - -class TurnstileMixin: - """ - Mixin to handle Cloudflare Turnstile validation. - Bypasses validation when DEBUG is True. - """ - - def validate_turnstile(self, request): - """ - Validate the Turnstile response token. - Skips validation when DEBUG is True. - """ - if settings.DEBUG: - return - - token = request.POST.get("cf-turnstile-response") - if not token: - raise ValidationError("Please complete the Turnstile challenge.") - - # Verify the token with Cloudflare - data = { - "secret": settings.TURNSTILE_SECRET_KEY, - "response": token, - "remoteip": request.META.get("REMOTE_ADDR"), - } - - response = requests.post(settings.TURNSTILE_VERIFY_URL, data=data, timeout=60) - result = response.json() - - if not result.get("success"): - raise ValidationError("Turnstile validation failed. Please try again.") diff --git a/accounts/models.py b/accounts/models.py deleted file mode 100644 index a452870b..00000000 --- a/accounts/models.py +++ /dev/null @@ -1,219 +0,0 @@ -from django.contrib.auth.models import AbstractUser -from django.db import models -from django.urls import reverse -from django.utils.translation import gettext_lazy as _ -import os -import secrets -from core.history import TrackedModel - -# import pghistory - - -def generate_random_id(model_class, id_field): - """Generate a random ID starting at 4 digits, expanding to 5 if needed""" - while True: - # Try to get a 4-digit number first - new_id = str(secrets.SystemRandom().randint(1000, 9999)) - if not model_class.objects.filter(**{id_field: new_id}).exists(): - return new_id - - # If all 4-digit numbers are taken, try 5 digits - new_id = str(secrets.SystemRandom().randint(10000, 99999)) - if not model_class.objects.filter(**{id_field: new_id}).exists(): - return new_id - - -class User(AbstractUser): - class Roles(models.TextChoices): - USER = "USER", _("User") - MODERATOR = "MODERATOR", _("Moderator") - ADMIN = "ADMIN", _("Admin") - SUPERUSER = "SUPERUSER", _("Superuser") - - class ThemePreference(models.TextChoices): - LIGHT = "light", _("Light") - DARK = "dark", _("Dark") - - # Read-only ID - user_id = models.CharField( - max_length=10, - unique=True, - editable=False, - help_text=( - "Unique identifier for this user that remains constant even if the " - "username changes" - ), - ) - - role = models.CharField( - max_length=10, - choices=Roles.choices, - default=Roles.USER, - ) - is_banned = models.BooleanField(default=False) - ban_reason = models.TextField(blank=True) - ban_date = models.DateTimeField(null=True, blank=True) - pending_email = models.EmailField(blank=True, null=True) - theme_preference = models.CharField( - max_length=5, - choices=ThemePreference.choices, - default=ThemePreference.LIGHT, - ) - - def __str__(self): - return self.get_display_name() - - def get_absolute_url(self): - return reverse("profile", kwargs={"username": self.username}) - - def get_display_name(self): - """Get the user's display name, falling back to username if not set""" - profile = getattr(self, "profile", None) - if profile and profile.display_name: - return profile.display_name - return self.username - - def save(self, *args, **kwargs): - if not self.user_id: - self.user_id = generate_random_id(User, "user_id") - super().save(*args, **kwargs) - - -class UserProfile(models.Model): - # Read-only ID - profile_id = models.CharField( - max_length=10, - unique=True, - editable=False, - help_text="Unique identifier for this profile that remains constant", - ) - - user = models.OneToOneField(User, on_delete=models.CASCADE, related_name="profile") - display_name = models.CharField( - max_length=50, - unique=True, - help_text="This is the name that will be displayed on the site", - ) - avatar = models.ImageField(upload_to="avatars/", blank=True) - pronouns = models.CharField(max_length=50, blank=True) - - bio = models.TextField(max_length=500, blank=True) - - # Social media links - twitter = models.URLField(blank=True) - instagram = models.URLField(blank=True) - youtube = models.URLField(blank=True) - discord = models.CharField(max_length=100, blank=True) - - # Ride statistics - coaster_credits = models.IntegerField(default=0) - dark_ride_credits = models.IntegerField(default=0) - flat_ride_credits = models.IntegerField(default=0) - water_ride_credits = models.IntegerField(default=0) - - def get_avatar(self): - """ - Return the avatar URL or serve a pre-generated avatar based on the - first letter of the username - """ - if self.avatar: - return self.avatar.url - first_letter = self.user.username.upper() - avatar_path = f"avatars/letters/{first_letter}_avatar.png" - if os.path.exists(avatar_path): - return f"/{avatar_path}" - return "/static/images/default-avatar.png" - - def save(self, *args, **kwargs): - # If no display name is set, use the username - if not self.display_name: - self.display_name = self.user.username - - if not self.profile_id: - self.profile_id = generate_random_id(UserProfile, "profile_id") - super().save(*args, **kwargs) - - def __str__(self): - return self.display_name - - -class EmailVerification(models.Model): - user = models.OneToOneField(User, on_delete=models.CASCADE) - token = models.CharField(max_length=64, unique=True) - created_at = models.DateTimeField(auto_now_add=True) - last_sent = models.DateTimeField(auto_now_add=True) - - def __str__(self): - return f"Email verification for {self.user.username}" - - class Meta: - verbose_name = "Email Verification" - verbose_name_plural = "Email Verifications" - - -class PasswordReset(models.Model): - user = models.ForeignKey(User, on_delete=models.CASCADE) - token = models.CharField(max_length=64) - created_at = models.DateTimeField(auto_now_add=True) - expires_at = models.DateTimeField() - used = models.BooleanField(default=False) - - def __str__(self): - return f"Password reset for {self.user.username}" - - class Meta: - verbose_name = "Password Reset" - verbose_name_plural = "Password Resets" - - -# @pghistory.track() - - -class TopList(TrackedModel): - class Categories(models.TextChoices): - ROLLER_COASTER = "RC", _("Roller Coaster") - DARK_RIDE = "DR", _("Dark Ride") - FLAT_RIDE = "FR", _("Flat Ride") - WATER_RIDE = "WR", _("Water Ride") - PARK = "PK", _("Park") - - user = models.ForeignKey( - User, - on_delete=models.CASCADE, - related_name="top_lists", # Added related_name for User model access - ) - title = models.CharField(max_length=100) - category = models.CharField(max_length=2, choices=Categories.choices) - description = models.TextField(blank=True) - created_at = models.DateTimeField(auto_now_add=True) - updated_at = models.DateTimeField(auto_now=True) - - class Meta(TrackedModel.Meta): - ordering = ["-updated_at"] - - def __str__(self): - return ( - f"{self.user.get_display_name()}'s {self.category} Top List: {self.title}" - ) - - -# @pghistory.track() - - -class TopListItem(TrackedModel): - top_list = models.ForeignKey( - TopList, on_delete=models.CASCADE, related_name="items" - ) - content_type = models.ForeignKey( - "contenttypes.ContentType", on_delete=models.CASCADE - ) - object_id = models.PositiveIntegerField() - rank = models.PositiveIntegerField() - notes = models.TextField(blank=True) - - class Meta(TrackedModel.Meta): - ordering = ["rank"] - unique_together = [["top_list", "rank"]] - - def __str__(self): - return f"#{self.rank} in {self.top_list.title}" diff --git a/accounts/models_temp.py b/accounts/models_temp.py deleted file mode 100644 index b3dbb6bc..00000000 --- a/accounts/models_temp.py +++ /dev/null @@ -1,208 +0,0 @@ -from django.contrib.auth.models import AbstractUser -from django.db import models -from django.urls import reverse -from django.utils.translation import gettext_lazy as _ -import os -import secrets -from core.history import TrackedModel -import pghistory - - -def generate_random_id(model_class, id_field): - """Generate a random ID starting at 4 digits, expanding to 5 if needed""" - while True: - # Try to get a 4-digit number first - new_id = str(secrets.SystemRandom().randint(1000, 9999)) - if not model_class.objects.filter(**{id_field: new_id}).exists(): - return new_id - - # If all 4-digit numbers are taken, try 5 digits - new_id = str(secrets.SystemRandom().randint(10000, 99999)) - if not model_class.objects.filter(**{id_field: new_id}).exists(): - return new_id - - -class User(AbstractUser): - class Roles(models.TextChoices): - USER = "USER", _("User") - MODERATOR = "MODERATOR", _("Moderator") - ADMIN = "ADMIN", _("Admin") - SUPERUSER = "SUPERUSER", _("Superuser") - - class ThemePreference(models.TextChoices): - LIGHT = "light", _("Light") - DARK = "dark", _("Dark") - - # Read-only ID - user_id = models.CharField( - max_length=10, - unique=True, - editable=False, - help_text="Unique identifier for this user that remains constant even if the username changes", - ) - - role = models.CharField( - max_length=10, - choices=Roles.choices, - default=Roles.USER, - ) - is_banned = models.BooleanField(default=False) - ban_reason = models.TextField(blank=True) - ban_date = models.DateTimeField(null=True, blank=True) - pending_email = models.EmailField(blank=True, null=True) - theme_preference = models.CharField( - max_length=5, - choices=ThemePreference.choices, - default=ThemePreference.LIGHT, - ) - - def __str__(self): - return self.get_display_name() - - def get_absolute_url(self): - return reverse("profile", kwargs={"username": self.username}) - - def get_display_name(self): - """Get the user's display name, falling back to username if not set""" - profile = getattr(self, "profile", None) - if profile and profile.display_name: - return profile.display_name - return self.username - - def save(self, *args, **kwargs): - if not self.user_id: - self.user_id = generate_random_id(User, "user_id") - super().save(*args, **kwargs) - - -class UserProfile(models.Model): - # Read-only ID - profile_id = models.CharField( - max_length=10, - unique=True, - editable=False, - help_text="Unique identifier for this profile that remains constant", - ) - - user = models.OneToOneField(User, on_delete=models.CASCADE, related_name="profile") - display_name = models.CharField( - max_length=50, - unique=True, - help_text="This is the name that will be displayed on the site", - ) - avatar = models.ImageField(upload_to="avatars/", blank=True) - pronouns = models.CharField(max_length=50, blank=True) - - bio = models.TextField(max_length=500, blank=True) - - # Social media links - twitter = models.URLField(blank=True) - instagram = models.URLField(blank=True) - youtube = models.URLField(blank=True) - discord = models.CharField(max_length=100, blank=True) - - # Ride statistics - coaster_credits = models.IntegerField(default=0) - dark_ride_credits = models.IntegerField(default=0) - flat_ride_credits = models.IntegerField(default=0) - water_ride_credits = models.IntegerField(default=0) - - def get_avatar(self): - """Return the avatar URL or serve a pre-generated avatar based on the first letter of the username""" - if self.avatar: - return self.avatar.url - first_letter = self.user.username[0].upper() - avatar_path = f"avatars/letters/{first_letter}_avatar.png" - if os.path.exists(avatar_path): - return f"/{avatar_path}" - return "/static/images/default-avatar.png" - - def save(self, *args, **kwargs): - # If no display name is set, use the username - if not self.display_name: - self.display_name = self.user.username - - if not self.profile_id: - self.profile_id = generate_random_id(UserProfile, "profile_id") - super().save(*args, **kwargs) - - def __str__(self): - return self.display_name - - -class EmailVerification(models.Model): - user = models.OneToOneField(User, on_delete=models.CASCADE) - token = models.CharField(max_length=64, unique=True) - created_at = models.DateTimeField(auto_now_add=True) - last_sent = models.DateTimeField(auto_now_add=True) - - def __str__(self): - return f"Email verification for {self.user.username}" - - class Meta: - verbose_name = "Email Verification" - verbose_name_plural = "Email Verifications" - - -class PasswordReset(models.Model): - user = models.ForeignKey(User, on_delete=models.CASCADE) - token = models.CharField(max_length=64) - created_at = models.DateTimeField(auto_now_add=True) - expires_at = models.DateTimeField() - used = models.BooleanField(default=False) - - def __str__(self): - return f"Password reset for {self.user.username}" - - class Meta: - verbose_name = "Password Reset" - verbose_name_plural = "Password Resets" - - -@pghistory.track() -class TopList(TrackedModel): - class Categories(models.TextChoices): - ROLLER_COASTER = "RC", _("Roller Coaster") - DARK_RIDE = "DR", _("Dark Ride") - FLAT_RIDE = "FR", _("Flat Ride") - WATER_RIDE = "WR", _("Water Ride") - PARK = "PK", _("Park") - - user = models.ForeignKey( - User, - on_delete=models.CASCADE, - related_name="top_lists", # Added related_name for User model access - ) - title = models.CharField(max_length=100) - category = models.CharField(max_length=2, choices=Categories.choices) - description = models.TextField(blank=True) - created_at = models.DateTimeField(auto_now_add=True) - updated_at = models.DateTimeField(auto_now=True) - - class Meta(TrackedModel.Meta): - ordering = ["-updated_at"] - - def __str__(self): - return ( - f"{self.user.get_display_name()}'s {self.category} Top List: {self.title}" - ) - - -@pghistory.track() -class TopListItem(TrackedModel): - top_list = models.ForeignKey( - TopList, on_delete=models.CASCADE, related_name="items" - ) - content_type = models.ForeignKey( - "contenttypes.ContentType", on_delete=models.CASCADE - ) - object_id = models.PositiveIntegerField() - rank = models.PositiveIntegerField() - notes = models.TextField(blank=True) - - class Meta(TrackedModel.Meta): - ordering = ["rank"] - unique_together = [["top_list", "rank"]] - - def __str__(self): - return f"#{self.rank} in {self.top_list.title}" diff --git a/accounts/selectors.py b/accounts/selectors.py deleted file mode 100644 index fda8718f..00000000 --- a/accounts/selectors.py +++ /dev/null @@ -1,273 +0,0 @@ -""" -Selectors for user and account-related data retrieval. -Following Django styleguide pattern for separating data access from business logic. -""" - -from typing import Dict, Any -from django.db.models import QuerySet, Q, F, Count -from django.contrib.auth import get_user_model -from django.utils import timezone -from datetime import timedelta - -User = get_user_model() - - -def user_profile_optimized(*, user_id: int) -> Any: - """ - Get a user with optimized queries for profile display. - - Args: - user_id: User ID - - Returns: - User instance with prefetched related data - - Raises: - User.DoesNotExist: If user doesn't exist - """ - return ( - User.objects.prefetch_related( - "park_reviews", "ride_reviews", "socialaccount_set" - ) - .annotate( - park_review_count=Count( - "park_reviews", filter=Q(park_reviews__is_published=True) - ), - ride_review_count=Count( - "ride_reviews", filter=Q(ride_reviews__is_published=True) - ), - total_review_count=F("park_review_count") + F("ride_review_count"), - ) - .get(id=user_id) - ) - - -def active_users_with_stats() -> QuerySet: - """ - Get active users with review statistics. - - Returns: - QuerySet of active users with review counts - """ - return ( - User.objects.filter(is_active=True) - .annotate( - park_review_count=Count( - "park_reviews", filter=Q(park_reviews__is_published=True) - ), - ride_review_count=Count( - "ride_reviews", filter=Q(ride_reviews__is_published=True) - ), - total_review_count=F("park_review_count") + F("ride_review_count"), - ) - .order_by("-total_review_count") - ) - - -def users_with_recent_activity(*, days: int = 30) -> QuerySet: - """ - Get users who have been active in the last N days. - - Args: - days: Number of days to look back for activity - - Returns: - QuerySet of recently active users - """ - cutoff_date = timezone.now() - timedelta(days=days) - - return ( - User.objects.filter( - Q(last_login__gte=cutoff_date) - | Q(park_reviews__created_at__gte=cutoff_date) - | Q(ride_reviews__created_at__gte=cutoff_date) - ) - .annotate( - recent_park_reviews=Count( - "park_reviews", - filter=Q(park_reviews__created_at__gte=cutoff_date), - ), - recent_ride_reviews=Count( - "ride_reviews", - filter=Q(ride_reviews__created_at__gte=cutoff_date), - ), - recent_total_reviews=F("recent_park_reviews") + F("recent_ride_reviews"), - ) - .order_by("-last_login") - .distinct() - ) - - -def top_reviewers(*, limit: int = 10) -> QuerySet: - """ - Get top users by review count. - - Args: - limit: Maximum number of users to return - - Returns: - QuerySet of top reviewers - """ - return ( - User.objects.filter(is_active=True) - .annotate( - park_review_count=Count( - "park_reviews", filter=Q(park_reviews__is_published=True) - ), - ride_review_count=Count( - "ride_reviews", filter=Q(ride_reviews__is_published=True) - ), - total_review_count=F("park_review_count") + F("ride_review_count"), - ) - .filter(total_review_count__gt=0) - .order_by("-total_review_count")[:limit] - ) - - -def moderator_users() -> QuerySet: - """ - Get users with moderation permissions. - - Returns: - QuerySet of users who can moderate content - """ - return ( - User.objects.filter( - Q(is_staff=True) - | Q(groups__name="Moderators") - | Q( - user_permissions__codename__in=[ - "change_parkreview", - "change_ridereview", - ] - ) - ) - .distinct() - .order_by("username") - ) - - -def users_by_registration_date(*, start_date, end_date) -> QuerySet: - """ - Get users who registered within a date range. - - Args: - start_date: Start of date range - end_date: End of date range - - Returns: - QuerySet of users registered in the date range - """ - return User.objects.filter( - date_joined__date__gte=start_date, date_joined__date__lte=end_date - ).order_by("-date_joined") - - -def user_search_autocomplete(*, query: str, limit: int = 10) -> QuerySet: - """ - Get users matching a search query for autocomplete functionality. - - Args: - query: Search string - limit: Maximum number of results - - Returns: - QuerySet of matching users for autocomplete - """ - return User.objects.filter( - Q(username__icontains=query) - | Q(first_name__icontains=query) - | Q(last_name__icontains=query), - is_active=True, - ).order_by("username")[:limit] - - -def users_with_social_accounts() -> QuerySet: - """ - Get users who have connected social accounts. - - Returns: - QuerySet of users with social account connections - """ - return ( - User.objects.filter(socialaccount__isnull=False) - .prefetch_related("socialaccount_set") - .distinct() - .order_by("username") - ) - - -def user_statistics_summary() -> Dict[str, Any]: - """ - Get overall user statistics for dashboard/analytics. - - Returns: - Dictionary containing user statistics - """ - total_users = User.objects.count() - active_users = User.objects.filter(is_active=True).count() - staff_users = User.objects.filter(is_staff=True).count() - - # Users with reviews - users_with_reviews = ( - User.objects.filter( - Q(park_reviews__isnull=False) | Q(ride_reviews__isnull=False) - ) - .distinct() - .count() - ) - - # Recent registrations (last 30 days) - cutoff_date = timezone.now() - timedelta(days=30) - recent_registrations = User.objects.filter(date_joined__gte=cutoff_date).count() - - return { - "total_users": total_users, - "active_users": active_users, - "inactive_users": total_users - active_users, - "staff_users": staff_users, - "users_with_reviews": users_with_reviews, - "recent_registrations": recent_registrations, - "review_participation_rate": ( - (users_with_reviews / total_users * 100) if total_users > 0 else 0 - ), - } - - -def users_needing_email_verification() -> QuerySet: - """ - Get users who haven't verified their email addresses. - - Returns: - QuerySet of users with unverified emails - """ - return ( - User.objects.filter(is_active=True, emailaddress__verified=False) - .distinct() - .order_by("date_joined") - ) - - -def users_by_review_activity(*, min_reviews: int = 1) -> QuerySet: - """ - Get users who have written at least a minimum number of reviews. - - Args: - min_reviews: Minimum number of reviews required - - Returns: - QuerySet of users with sufficient review activity - """ - return ( - User.objects.annotate( - park_review_count=Count( - "park_reviews", filter=Q(park_reviews__is_published=True) - ), - ride_review_count=Count( - "ride_reviews", filter=Q(ride_reviews__is_published=True) - ), - total_review_count=F("park_review_count") + F("ride_review_count"), - ) - .filter(total_review_count__gte=min_reviews) - .order_by("-total_review_count") - ) diff --git a/accounts/signals.py b/accounts/signals.py deleted file mode 100644 index e7f9dc7b..00000000 --- a/accounts/signals.py +++ /dev/null @@ -1,189 +0,0 @@ -from django.db.models.signals import post_save, pre_save -from django.dispatch import receiver -from django.contrib.auth.models import Group -from django.db import transaction -from django.core.files import File -from django.core.files.temp import NamedTemporaryFile -import requests -from .models import User, UserProfile - - -@receiver(post_save, sender=User) -def create_user_profile(sender, instance, created, **kwargs): - """Create UserProfile for new users""" - try: - if created: - # Create profile - profile = UserProfile.objects.create(user=instance) - - # If user has a social account with avatar, download it - social_account = instance.socialaccount_set.first() - if social_account: - extra_data = social_account.extra_data - avatar_url = None - - if social_account.provider == "google": - avatar_url = extra_data.get("picture") - elif social_account.provider == "discord": - avatar = extra_data.get("avatar") - discord_id = extra_data.get("id") - if avatar: - avatar_url = f"https://cdn.discordapp.com/avatars/{discord_id}/{avatar}.png" - - if avatar_url: - try: - response = requests.get(avatar_url, timeout=60) - if response.status_code == 200: - img_temp = NamedTemporaryFile(delete=True) - img_temp.write(response.content) - img_temp.flush() - - file_name = f"avatar_{instance.username}.png" - profile.avatar.save(file_name, File(img_temp), save=True) - except Exception as e: - print( - f"Error downloading avatar for user { - instance.username}: { - str(e)}" - ) - except Exception as e: - print(f"Error creating profile for user {instance.username}: {str(e)}") - - -@receiver(post_save, sender=User) -def save_user_profile(sender, instance, **kwargs): - """Ensure UserProfile exists and is saved""" - try: - # Try to get existing profile first - try: - profile = instance.profile - profile.save() - except UserProfile.DoesNotExist: - # Profile doesn't exist, create it - UserProfile.objects.create(user=instance) - except Exception as e: - print(f"Error saving profile for user {instance.username}: {str(e)}") - - -@receiver(pre_save, sender=User) -def sync_user_role_with_groups(sender, instance, **kwargs): - """Sync user role with Django groups""" - if instance.pk: # Only for existing users - try: - old_instance = User.objects.get(pk=instance.pk) - if old_instance.role != instance.role: - # Role has changed, update groups - with transaction.atomic(): - # Remove from old role group if exists - if old_instance.role != User.Roles.USER: - old_group = Group.objects.filter(name=old_instance.role).first() - if old_group: - instance.groups.remove(old_group) - - # Add to new role group - if instance.role != User.Roles.USER: - new_group, _ = Group.objects.get_or_create(name=instance.role) - instance.groups.add(new_group) - - # Special handling for superuser role - if instance.role == User.Roles.SUPERUSER: - instance.is_superuser = True - instance.is_staff = True - elif old_instance.role == User.Roles.SUPERUSER: - # If removing superuser role, remove superuser - # status - instance.is_superuser = False - if instance.role not in [ - User.Roles.ADMIN, - User.Roles.MODERATOR, - ]: - instance.is_staff = False - - # Handle staff status for admin and moderator roles - if instance.role in [ - User.Roles.ADMIN, - User.Roles.MODERATOR, - ]: - instance.is_staff = True - elif old_instance.role in [ - User.Roles.ADMIN, - User.Roles.MODERATOR, - ]: - # If removing admin/moderator role, remove staff - # status - if instance.role not in [User.Roles.SUPERUSER]: - instance.is_staff = False - except User.DoesNotExist: - pass - except Exception as e: - print( - f"Error syncing role with groups for user { - instance.username}: { - str(e)}" - ) - - -def create_default_groups(): - """ - Create default groups with appropriate permissions. - Call this in a migration or management command. - """ - try: - from django.contrib.auth.models import Permission - - # Create Moderator group - moderator_group, _ = Group.objects.get_or_create(name=User.Roles.MODERATOR) - moderator_permissions = [ - # Review moderation permissions - "change_review", - "delete_review", - "change_reviewreport", - "delete_reviewreport", - # Edit moderation permissions - "change_parkedit", - "delete_parkedit", - "change_rideedit", - "delete_rideedit", - "change_companyedit", - "delete_companyedit", - "change_manufactureredit", - "delete_manufactureredit", - ] - - # Create Admin group - admin_group, _ = Group.objects.get_or_create(name=User.Roles.ADMIN) - admin_permissions = moderator_permissions + [ - # User management permissions - "change_user", - "delete_user", - # Content management permissions - "add_park", - "change_park", - "delete_park", - "add_ride", - "change_ride", - "delete_ride", - "add_company", - "change_company", - "delete_company", - "add_manufacturer", - "change_manufacturer", - "delete_manufacturer", - ] - - # Assign permissions to groups - for codename in moderator_permissions: - try: - perm = Permission.objects.get(codename=codename) - moderator_group.permissions.add(perm) - except Permission.DoesNotExist: - print(f"Permission not found: {codename}") - - for codename in admin_permissions: - try: - perm = Permission.objects.get(codename=codename) - admin_group.permissions.add(perm) - except Permission.DoesNotExist: - print(f"Permission not found: {codename}") - except Exception as e: - print(f"Error creating default groups: {str(e)}") diff --git a/accounts/templatetags/__init__.py b/accounts/templatetags/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/accounts/templatetags/turnstile_tags.py b/accounts/templatetags/turnstile_tags.py deleted file mode 100644 index 0f7219ba..00000000 --- a/accounts/templatetags/turnstile_tags.py +++ /dev/null @@ -1,23 +0,0 @@ -from django import template -from django.conf import settings -from django.template.loader import render_to_string - -register = template.Library() - - -@register.simple_tag -def turnstile_widget(): - """ - Template tag to render the Cloudflare Turnstile widget. - When DEBUG is True, renders an empty template. - When DEBUG is False, renders the normal widget. - Usage: {% load turnstile_tags %}{% turnstile_widget %} - """ - if settings.DEBUG: - template_name = "accounts/turnstile_widget_empty.html" - context = {} - else: - template_name = "accounts/turnstile_widget.html" - context = {"site_key": settings.TURNSTILE_SITE_KEY} - - return render_to_string(template_name, context) diff --git a/accounts/tests.py b/accounts/tests.py deleted file mode 100644 index f7385e26..00000000 --- a/accounts/tests.py +++ /dev/null @@ -1,126 +0,0 @@ -from django.test import TestCase -from django.contrib.auth.models import Group, Permission -from django.contrib.contenttypes.models import ContentType -from unittest.mock import patch, MagicMock -from .models import User, UserProfile -from .signals import create_default_groups - - -class SignalsTestCase(TestCase): - def setUp(self): - self.user = User.objects.create_user( - username="testuser", - email="testuser@example.com", - password="password", - ) - - def test_create_user_profile(self): - # Refresh user from database to ensure signals have been processed - self.user.refresh_from_db() - - # Check if profile exists in database first - profile_exists = UserProfile.objects.filter(user=self.user).exists() - self.assertTrue(profile_exists, "UserProfile should be created by signals") - - # Now safely access the profile - profile = UserProfile.objects.get(user=self.user) - self.assertIsInstance(profile, UserProfile) - - # Test the reverse relationship - self.assertTrue(hasattr(self.user, "profile")) - # Test that we can access the profile through the user relationship - user_profile = getattr(self.user, "profile", None) - self.assertEqual(user_profile, profile) - - @patch("accounts.signals.requests.get") - def test_create_user_profile_with_social_avatar(self, mock_get): - # Mock the response from requests.get - mock_response = MagicMock() - mock_response.status_code = 200 - mock_response.content = b"fake-image-content" - mock_get.return_value = mock_response - - # Create a social account for the user (we'll skip this test since socialaccount_set requires allauth setup) - # This test would need proper allauth configuration to work - self.skipTest("Requires proper allauth socialaccount setup") - - def test_save_user_profile(self): - # Get the profile safely first - profile = UserProfile.objects.get(user=self.user) - profile.delete() - - # Refresh user to clear cached profile relationship - self.user.refresh_from_db() - - # Check that profile no longer exists - self.assertFalse(UserProfile.objects.filter(user=self.user).exists()) - - # Trigger save to recreate profile via signal - self.user.save() - - # Verify profile was recreated - self.assertTrue(UserProfile.objects.filter(user=self.user).exists()) - new_profile = UserProfile.objects.get(user=self.user) - self.assertIsInstance(new_profile, UserProfile) - - def test_sync_user_role_with_groups(self): - self.user.role = User.Roles.MODERATOR - self.user.save() - self.assertTrue(self.user.groups.filter(name=User.Roles.MODERATOR).exists()) - self.assertTrue(self.user.is_staff) - - self.user.role = User.Roles.ADMIN - self.user.save() - self.assertFalse(self.user.groups.filter(name=User.Roles.MODERATOR).exists()) - self.assertTrue(self.user.groups.filter(name=User.Roles.ADMIN).exists()) - self.assertTrue(self.user.is_staff) - - self.user.role = User.Roles.SUPERUSER - self.user.save() - self.assertFalse(self.user.groups.filter(name=User.Roles.ADMIN).exists()) - self.assertTrue(self.user.groups.filter(name=User.Roles.SUPERUSER).exists()) - self.assertTrue(self.user.is_superuser) - self.assertTrue(self.user.is_staff) - - self.user.role = User.Roles.USER - self.user.save() - self.assertFalse(self.user.groups.exists()) - self.assertFalse(self.user.is_superuser) - self.assertFalse(self.user.is_staff) - - def test_create_default_groups(self): - # Create some permissions for testing - content_type = ContentType.objects.get_for_model(User) - Permission.objects.create( - codename="change_review", - name="Can change review", - content_type=content_type, - ) - Permission.objects.create( - codename="delete_review", - name="Can delete review", - content_type=content_type, - ) - Permission.objects.create( - codename="change_user", - name="Can change user", - content_type=content_type, - ) - - create_default_groups() - - moderator_group = Group.objects.get(name=User.Roles.MODERATOR) - self.assertIsNotNone(moderator_group) - self.assertTrue( - moderator_group.permissions.filter(codename="change_review").exists() - ) - self.assertFalse( - moderator_group.permissions.filter(codename="change_user").exists() - ) - - admin_group = Group.objects.get(name=User.Roles.ADMIN) - self.assertIsNotNone(admin_group) - self.assertTrue( - admin_group.permissions.filter(codename="change_review").exists() - ) - self.assertTrue(admin_group.permissions.filter(codename="change_user").exists()) diff --git a/accounts/urls.py b/accounts/urls.py deleted file mode 100644 index 721d4026..00000000 --- a/accounts/urls.py +++ /dev/null @@ -1,48 +0,0 @@ -from django.urls import path -from django.contrib.auth import views as auth_views -from allauth.account.views import LogoutView -from . import views - -app_name = "accounts" - -urlpatterns = [ - # Override allauth's login and signup views with our Turnstile-enabled - # versions - path("login/", views.CustomLoginView.as_view(), name="account_login"), - path("signup/", views.CustomSignupView.as_view(), name="account_signup"), - # Authentication views - path("logout/", LogoutView.as_view(), name="logout"), - path( - "password_change/", - auth_views.PasswordChangeView.as_view(), - name="password_change", - ), - path( - "password_change/done/", - auth_views.PasswordChangeDoneView.as_view(), - name="password_change_done", - ), - path( - "password_reset/", - auth_views.PasswordResetView.as_view(), - name="password_reset", - ), - path( - "password_reset/done/", - auth_views.PasswordResetDoneView.as_view(), - name="password_reset_done", - ), - path( - "reset///", - auth_views.PasswordResetConfirmView.as_view(), - name="password_reset_confirm", - ), - path( - "reset/done/", - auth_views.PasswordResetCompleteView.as_view(), - name="password_reset_complete", - ), - # Profile views - path("profile/", views.user_redirect_view, name="profile_redirect"), - path("settings/", views.SettingsView.as_view(), name="settings"), -] diff --git a/accounts/views.py b/accounts/views.py deleted file mode 100644 index ed3720f6..00000000 --- a/accounts/views.py +++ /dev/null @@ -1,426 +0,0 @@ -from django.views.generic import DetailView, TemplateView -from django.contrib.auth import get_user_model -from django.shortcuts import get_object_or_404, redirect, render -from django.contrib.auth.decorators import login_required -from django.contrib.auth.mixins import LoginRequiredMixin -from django.contrib import messages -from django.core.exceptions import ValidationError -from django.template.loader import render_to_string -from django.utils.crypto import get_random_string -from django.utils import timezone -from datetime import timedelta -from django.contrib.sites.shortcuts import get_current_site -from django.contrib.sites.models import Site -from django.contrib.sites.requests import RequestSite -from django.db.models import QuerySet -from django.http import HttpResponseRedirect, HttpResponse, HttpRequest -from django.urls import reverse -from django.contrib.auth import login -from django.core.files.uploadedfile import UploadedFile -from accounts.models import ( - User, - PasswordReset, - TopList, - EmailVerification, - UserProfile, -) -from email_service.services import EmailService -from parks.models import ParkReview -from rides.models import RideReview -from allauth.account.views import LoginView, SignupView -from .mixins import TurnstileMixin -from typing import Dict, Any, Optional, Union, cast, TYPE_CHECKING -from django_htmx.http import HttpResponseClientRefresh -from contextlib import suppress -import re - -UserModel = get_user_model() - - -class CustomLoginView(TurnstileMixin, LoginView): - def form_valid(self, form): - try: - self.validate_turnstile(self.request) - except ValidationError as e: - form.add_error(None, str(e)) - return self.form_invalid(form) - - response = super().form_valid(form) - return ( - HttpResponseClientRefresh() - if getattr(self.request, "htmx", False) - else response - ) - - def form_invalid(self, form): - if getattr(self.request, "htmx", False): - return render( - self.request, - "account/partials/login_form.html", - self.get_context_data(form=form), - ) - return super().form_invalid(form) - - def get(self, request: HttpRequest, *args: Any, **kwargs: Any) -> HttpResponse: - if getattr(request, "htmx", False): - return render( - request, - "account/partials/login_modal.html", - self.get_context_data(), - ) - return super().get(request, *args, **kwargs) - - -class CustomSignupView(TurnstileMixin, SignupView): - def form_valid(self, form): - try: - self.validate_turnstile(self.request) - except ValidationError as e: - form.add_error(None, str(e)) - return self.form_invalid(form) - - response = super().form_valid(form) - return ( - HttpResponseClientRefresh() - if getattr(self.request, "htmx", False) - else response - ) - - def form_invalid(self, form): - if getattr(self.request, "htmx", False): - return render( - self.request, - "account/partials/signup_modal.html", - self.get_context_data(form=form), - ) - return super().form_invalid(form) - - def get(self, request: HttpRequest, *args: Any, **kwargs: Any) -> HttpResponse: - if getattr(request, "htmx", False): - return render( - request, - "account/partials/signup_modal.html", - self.get_context_data(), - ) - return super().get(request, *args, **kwargs) - - -@login_required -def user_redirect_view(request: HttpRequest) -> HttpResponse: - user = cast(User, request.user) - return redirect("profile", username=user.username) - - -def handle_social_login(request: HttpRequest, email: str) -> HttpResponse: - if sociallogin := request.session.get("socialaccount_sociallogin"): - sociallogin.user.email = email - sociallogin.save() - login(request, sociallogin.user) - del request.session["socialaccount_sociallogin"] - messages.success(request, "Successfully logged in") - return redirect("/") - - -def email_required(request: HttpRequest) -> HttpResponse: - if not request.session.get("socialaccount_sociallogin"): - messages.error(request, "No social login in progress") - return redirect("/") - - if request.method == "POST": - if email := request.POST.get("email"): - return handle_social_login(request, email) - messages.error(request, "Email is required") - return render( - request, - "accounts/email_required.html", - {"error": "Email is required"}, - ) - - return render(request, "accounts/email_required.html") - - -class ProfileView(DetailView): - model = User - template_name = "accounts/profile.html" - context_object_name = "profile_user" - slug_field = "username" - slug_url_kwarg = "username" - - def get_queryset(self) -> QuerySet[User]: - return User.objects.select_related("profile") - - def get_context_data(self, **kwargs: Any) -> Dict[str, Any]: - context = super().get_context_data(**kwargs) - user = cast(User, self.get_object()) - - context["park_reviews"] = self._get_user_park_reviews(user) - context["ride_reviews"] = self._get_user_ride_reviews(user) - context["top_lists"] = self._get_user_top_lists(user) - - return context - - def _get_user_park_reviews(self, user: User) -> QuerySet[ParkReview]: - return ( - ParkReview.objects.filter(user=user, is_published=True) - .select_related("user", "user__profile", "park") - .order_by("-created_at")[:5] - ) - - def _get_user_ride_reviews(self, user: User) -> QuerySet[RideReview]: - return ( - RideReview.objects.filter(user=user, is_published=True) - .select_related("user", "user__profile", "ride") - .order_by("-created_at")[:5] - ) - - def _get_user_top_lists(self, user: User) -> QuerySet[TopList]: - return ( - TopList.objects.filter(user=user) - .select_related("user", "user__profile") - .prefetch_related("items") - .order_by("-created_at")[:5] - ) - - -class SettingsView(LoginRequiredMixin, TemplateView): - template_name = "accounts/settings.html" - - def get_context_data(self, **kwargs: Any) -> Dict[str, Any]: - context = super().get_context_data(**kwargs) - context["user"] = self.request.user - return context - - def _handle_profile_update(self, request: HttpRequest) -> None: - user = cast(User, request.user) - profile = get_object_or_404(UserProfile, user=user) - - if display_name := request.POST.get("display_name"): - profile.display_name = display_name - - if "avatar" in request.FILES: - avatar_file = cast(UploadedFile, request.FILES["avatar"]) - profile.avatar.save(avatar_file.name, avatar_file, save=False) - profile.save() - - user.save() - messages.success(request, "Profile updated successfully") - - def _validate_password(self, password: str) -> bool: - """Validate password meets requirements.""" - return ( - len(password) >= 8 - and bool(re.search(r"[A-Z]", password)) - and bool(re.search(r"[a-z]", password)) - and bool(re.search(r"[0-9]", password)) - ) - - def _send_password_change_confirmation( - self, request: HttpRequest, user: User - ) -> None: - """Send password change confirmation email.""" - site = get_current_site(request) - context = { - "user": user, - "site_name": site.name, - } - - email_html = render_to_string( - "accounts/email/password_change_confirmation.html", context - ) - - EmailService.send_email( - to=user.email, - subject="Password Changed Successfully", - text="Your password has been changed successfully.", - site=site, - html=email_html, - ) - - def _handle_password_change( - self, request: HttpRequest - ) -> Optional[HttpResponseRedirect]: - user = cast(User, request.user) - old_password = request.POST.get("old_password", "") - new_password = request.POST.get("new_password", "") - confirm_password = request.POST.get("confirm_password", "") - - if not user.check_password(old_password): - messages.error(request, "Current password is incorrect") - return None - - if new_password != confirm_password: - messages.error(request, "New passwords do not match") - return None - - if not self._validate_password(new_password): - messages.error( - request, - "Password must be at least 8 characters and contain uppercase, lowercase, and numbers", - ) - return None - - user.set_password(new_password) - user.save() - - self._send_password_change_confirmation(request, user) - messages.success( - request, - "Password changed successfully. Please check your email for confirmation.", - ) - return HttpResponseRedirect(reverse("account_login")) - - def _handle_email_change(self, request: HttpRequest) -> None: - if new_email := request.POST.get("new_email"): - self._send_email_verification(request, new_email) - messages.success( - request, "Verification email sent to your new email address" - ) - else: - messages.error(request, "New email is required") - - def _send_email_verification(self, request: HttpRequest, new_email: str) -> None: - user = cast(User, request.user) - token = get_random_string(64) - EmailVerification.objects.update_or_create(user=user, defaults={"token": token}) - - site = cast(Site, get_current_site(request)) - verification_url = reverse("verify_email", kwargs={"token": token}) - - context = { - "user": user, - "verification_url": verification_url, - "site_name": site.name, - } - - email_html = render_to_string("accounts/email/verify_email.html", context) - EmailService.send_email( - to=new_email, - subject="Verify your new email address", - text="Click the link to verify your new email address", - site=site, - html=email_html, - ) - - user.pending_email = new_email - user.save() - - def post(self, request: HttpRequest, *args: Any, **kwargs: Any) -> HttpResponse: - action = request.POST.get("action") - - if action == "update_profile": - self._handle_profile_update(request) - elif action == "change_password": - if response := self._handle_password_change(request): - return response - elif action == "change_email": - self._handle_email_change(request) - - return self.get(request, *args, **kwargs) - - -def create_password_reset_token(user: User) -> str: - token = get_random_string(64) - PasswordReset.objects.update_or_create( - user=user, - defaults={ - "token": token, - "expires_at": timezone.now() + timedelta(hours=24), - }, - ) - return token - - -def send_password_reset_email( - user: User, site: Union[Site, RequestSite], token: str -) -> None: - reset_url = reverse("password_reset_confirm", kwargs={"token": token}) - context = { - "user": user, - "reset_url": reset_url, - "site_name": site.name, - } - email_html = render_to_string("accounts/email/password_reset.html", context) - - EmailService.send_email( - to=user.email, - subject="Reset your password", - text="Click the link to reset your password", - site=site, - html=email_html, - ) - - -def request_password_reset(request: HttpRequest) -> HttpResponse: - if request.method != "POST": - return render(request, "accounts/password_reset.html") - - if not (email := request.POST.get("email")): - messages.error(request, "Email is required") - return redirect("account_reset_password") - - with suppress(User.DoesNotExist): - user = User.objects.get(email=email) - token = create_password_reset_token(user) - site = get_current_site(request) - send_password_reset_email(user, site, token) - - messages.success(request, "Password reset email sent") - return redirect("account_login") - - -def handle_password_reset( - request: HttpRequest, - user: User, - new_password: str, - reset: PasswordReset, - site: Union[Site, RequestSite], -) -> None: - user.set_password(new_password) - user.save() - - reset.used = True - reset.save() - - send_password_reset_confirmation(user, site) - messages.success(request, "Password reset successfully") - - -def send_password_reset_confirmation( - user: User, site: Union[Site, RequestSite] -) -> None: - context = { - "user": user, - "site_name": site.name, - } - email_html = render_to_string( - "accounts/email/password_reset_complete.html", context - ) - - EmailService.send_email( - to=user.email, - subject="Password Reset Complete", - text="Your password has been reset successfully.", - site=site, - html=email_html, - ) - - -def reset_password(request: HttpRequest, token: str) -> HttpResponse: - try: - reset = PasswordReset.objects.select_related("user").get( - token=token, expires_at__gt=timezone.now(), used=False - ) - - if request.method == "POST": - if new_password := request.POST.get("new_password"): - site = get_current_site(request) - handle_password_reset(request, reset.user, new_password, reset, site) - return redirect("account_login") - - messages.error(request, "New password is required") - - return render(request, "accounts/password_reset_confirm.html", {"token": token}) - - except PasswordReset.DoesNotExist: - messages.error(request, "Invalid or expired reset token") - return redirect("account_reset_password") diff --git a/avatars/letters/0_avatar.png b/avatars/letters/0_avatar.png deleted file mode 100644 index 386a9c5a..00000000 Binary files a/avatars/letters/0_avatar.png and /dev/null differ diff --git a/avatars/letters/1_avatar.png b/avatars/letters/1_avatar.png deleted file mode 100644 index bf69a3ed..00000000 Binary files a/avatars/letters/1_avatar.png and /dev/null differ diff --git a/avatars/letters/2_avatar.png b/avatars/letters/2_avatar.png deleted file mode 100644 index 992aba8e..00000000 Binary files a/avatars/letters/2_avatar.png and /dev/null differ diff --git a/avatars/letters/3_avatar.png b/avatars/letters/3_avatar.png deleted file mode 100644 index 8b4acc8e..00000000 Binary files a/avatars/letters/3_avatar.png and /dev/null differ diff --git a/avatars/letters/4_avatar.png b/avatars/letters/4_avatar.png deleted file mode 100644 index db8a5fdb..00000000 Binary files a/avatars/letters/4_avatar.png and /dev/null differ diff --git a/avatars/letters/5_avatar.png b/avatars/letters/5_avatar.png deleted file mode 100644 index 720bfb36..00000000 Binary files a/avatars/letters/5_avatar.png and /dev/null differ diff --git a/avatars/letters/6_avatar.png b/avatars/letters/6_avatar.png deleted file mode 100644 index ff823d82..00000000 Binary files a/avatars/letters/6_avatar.png and /dev/null differ diff --git a/avatars/letters/7_avatar.png b/avatars/letters/7_avatar.png deleted file mode 100644 index d70771f6..00000000 Binary files a/avatars/letters/7_avatar.png and /dev/null differ diff --git a/avatars/letters/8_avatar.png b/avatars/letters/8_avatar.png deleted file mode 100644 index cc7e1dbc..00000000 Binary files a/avatars/letters/8_avatar.png and /dev/null differ diff --git a/avatars/letters/9_avatar.png b/avatars/letters/9_avatar.png deleted file mode 100644 index 6fde44c8..00000000 Binary files a/avatars/letters/9_avatar.png and /dev/null differ diff --git a/avatars/letters/A_avatar.png b/avatars/letters/A_avatar.png deleted file mode 100644 index b18da02c..00000000 Binary files a/avatars/letters/A_avatar.png and /dev/null differ diff --git a/avatars/letters/B_avatar.png b/avatars/letters/B_avatar.png deleted file mode 100644 index d66ea1b7..00000000 Binary files a/avatars/letters/B_avatar.png and /dev/null differ diff --git a/avatars/letters/C_avatar.png b/avatars/letters/C_avatar.png deleted file mode 100644 index e0ccb580..00000000 Binary files a/avatars/letters/C_avatar.png and /dev/null differ diff --git a/avatars/letters/D_avatar.png b/avatars/letters/D_avatar.png deleted file mode 100644 index 7dd34961..00000000 Binary files a/avatars/letters/D_avatar.png and /dev/null differ diff --git a/avatars/letters/E_avatar.png b/avatars/letters/E_avatar.png deleted file mode 100644 index 46626957..00000000 Binary files a/avatars/letters/E_avatar.png and /dev/null differ diff --git a/avatars/letters/F_avatar.png b/avatars/letters/F_avatar.png deleted file mode 100644 index adfbfc0f..00000000 Binary files a/avatars/letters/F_avatar.png and /dev/null differ diff --git a/avatars/letters/G_avatar.png b/avatars/letters/G_avatar.png deleted file mode 100644 index 84fa26c5..00000000 Binary files a/avatars/letters/G_avatar.png and /dev/null differ diff --git a/avatars/letters/H_avatar.png b/avatars/letters/H_avatar.png deleted file mode 100644 index d0847b2d..00000000 Binary files a/avatars/letters/H_avatar.png and /dev/null differ diff --git a/avatars/letters/I_avatar.png b/avatars/letters/I_avatar.png deleted file mode 100644 index 0fb6102b..00000000 Binary files a/avatars/letters/I_avatar.png and /dev/null differ diff --git a/avatars/letters/J_avatar.png b/avatars/letters/J_avatar.png deleted file mode 100644 index f6bb2945..00000000 Binary files a/avatars/letters/J_avatar.png and /dev/null differ diff --git a/avatars/letters/K_avatar.png b/avatars/letters/K_avatar.png deleted file mode 100644 index 5a128787..00000000 Binary files a/avatars/letters/K_avatar.png and /dev/null differ diff --git a/avatars/letters/L_avatar.png b/avatars/letters/L_avatar.png deleted file mode 100644 index c32f6dac..00000000 Binary files a/avatars/letters/L_avatar.png and /dev/null differ diff --git a/avatars/letters/M_avatar.png b/avatars/letters/M_avatar.png deleted file mode 100644 index 3a41bfbf..00000000 Binary files a/avatars/letters/M_avatar.png and /dev/null differ diff --git a/avatars/letters/N_avatar.png b/avatars/letters/N_avatar.png deleted file mode 100644 index 63134b10..00000000 Binary files a/avatars/letters/N_avatar.png and /dev/null differ diff --git a/avatars/letters/O_avatar.png b/avatars/letters/O_avatar.png deleted file mode 100644 index 1327bf3d..00000000 Binary files a/avatars/letters/O_avatar.png and /dev/null differ diff --git a/avatars/letters/P_avatar.png b/avatars/letters/P_avatar.png deleted file mode 100644 index b0ba5843..00000000 Binary files a/avatars/letters/P_avatar.png and /dev/null differ diff --git a/avatars/letters/Q_avatar.png b/avatars/letters/Q_avatar.png deleted file mode 100644 index d15ca407..00000000 Binary files a/avatars/letters/Q_avatar.png and /dev/null differ diff --git a/avatars/letters/R_avatar.png b/avatars/letters/R_avatar.png deleted file mode 100644 index 9f68b774..00000000 Binary files a/avatars/letters/R_avatar.png and /dev/null differ diff --git a/avatars/letters/S_avatar.png b/avatars/letters/S_avatar.png deleted file mode 100644 index 9dc10694..00000000 Binary files a/avatars/letters/S_avatar.png and /dev/null differ diff --git a/avatars/letters/T_avatar.png b/avatars/letters/T_avatar.png deleted file mode 100644 index 53af421e..00000000 Binary files a/avatars/letters/T_avatar.png and /dev/null differ diff --git a/avatars/letters/U_avatar.png b/avatars/letters/U_avatar.png deleted file mode 100644 index 50914422..00000000 Binary files a/avatars/letters/U_avatar.png and /dev/null differ diff --git a/avatars/letters/V_avatar.png b/avatars/letters/V_avatar.png deleted file mode 100644 index eca19b33..00000000 Binary files a/avatars/letters/V_avatar.png and /dev/null differ diff --git a/avatars/letters/W_avatar.png b/avatars/letters/W_avatar.png deleted file mode 100644 index 1f8ab220..00000000 Binary files a/avatars/letters/W_avatar.png and /dev/null differ diff --git a/avatars/letters/X_avatar.png b/avatars/letters/X_avatar.png deleted file mode 100644 index ce50a79d..00000000 Binary files a/avatars/letters/X_avatar.png and /dev/null differ diff --git a/avatars/letters/Y_avatar.png b/avatars/letters/Y_avatar.png deleted file mode 100644 index 7bf3d7b6..00000000 Binary files a/avatars/letters/Y_avatar.png and /dev/null differ diff --git a/avatars/letters/Z_avatar.png b/avatars/letters/Z_avatar.png deleted file mode 100644 index cad23538..00000000 Binary files a/avatars/letters/Z_avatar.png and /dev/null differ diff --git a/backups/config/.github-pat.20250818_210101.backup b/backups/config/.github-pat.20250818_210101.backup deleted file mode 100644 index 630c5d5e..00000000 --- a/backups/config/.github-pat.20250818_210101.backup +++ /dev/null @@ -1 +0,0 @@ -[GITHUB-TOKEN-REMOVED] \ No newline at end of file diff --git a/backups/config/thrillwiki-automation.env.20250818_210101.backup b/backups/config/thrillwiki-automation.env.20250818_210101.backup deleted file mode 100644 index c06fa181..00000000 --- a/backups/config/thrillwiki-automation.env.20250818_210101.backup +++ /dev/null @@ -1,203 +0,0 @@ -# ThrillWiki Automation Service Environment Configuration -# Copy this file to thrillwiki-automation***REMOVED*** and customize for your environment -# -# Security Note: This file should have restricted permissions (600) as it may contain -# sensitive information like GitHub Personal Access Tokens - -# [AWS-SECRET-REMOVED]==================================== -# PROJECT CONFIGURATION -# [AWS-SECRET-REMOVED]==================================== - -# Base project directory (usually auto-detected) -# PROJECT_DIR=/home/ubuntu/thrillwiki - -# Service name for systemd integration -# SERVICE_NAME=thrillwiki - -# [AWS-SECRET-REMOVED]==================================== -# GITHUB REPOSITORY CONFIGURATION -# [AWS-SECRET-REMOVED]==================================== - -# GitHub repository remote name -# GITHUB_REPO=origin - -# Branch to pull from -# GITHUB_BRANCH=main - -# GitHub Personal Access Token (PAT) - Required for private repositories -# Generate at: https://github.com/settings/tokens -# Required permissions: repo (Full control of private repositories) -# GITHUB_TOKEN=ghp_your_personal_access_token_here - -# GitHub token file location (alternative to GITHUB_TOKEN) -# GITHUB_TOKEN_FILE=/home/ubuntu/thrillwiki/.github-pat - -# [AWS-SECRET-REMOVED]==================================== -# AUTOMATION TIMING CONFIGURATION -# [AWS-SECRET-REMOVED]==================================== - -# Repository pull interval in seconds (default: 300 = 5 minutes) -# PULL_INTERVAL=300 - -# Health check interval in seconds (default: 60 = 1 minute) -# HEALTH_CHECK_INTERVAL=60 - -# Server startup timeout in seconds (default: 120 = 2 minutes) -# STARTUP_TIMEOUT=120 - -# Restart delay after failure in seconds (default: 10) -# RESTART_DELAY=10 - -# [AWS-SECRET-REMOVED]==================================== -# LOGGING CONFIGURATION -# [AWS-SECRET-REMOVED]==================================== - -# Log directory (default: project_dir/logs) -# LOG_DIR=/home/ubuntu/thrillwiki/logs - -# Log file path -# LOG_[AWS-SECRET-REMOVED]proof-automation.log - -# Maximum log file size in bytes (default: 10485760 = 10MB) -# MAX_LOG_SIZE=10485760 - -# Lock file location to prevent multiple instances -# LOCK_FILE=/tmp/thrillwiki-bulletproof.lock - -# [AWS-SECRET-REMOVED]==================================== -# DEVELOPMENT SERVER CONFIGURATION -# [AWS-SECRET-REMOVED]==================================== - -# Server host address (default: 0.0.0.0 for all interfaces) -# SERVER_HOST=0.0.0.0 - -# Server port (default: 8000) -# SERVER_PORT=8000 - -# [AWS-SECRET-REMOVED]==================================== -# DJANGO CONFIGURATION -# [AWS-SECRET-REMOVED]==================================== - -# Django settings module -# DJANGO_SETTINGS_MODULE=thrillwiki.settings - -# Python path -# PYTHONPATH=/home/ubuntu/thrillwiki - -# [AWS-SECRET-REMOVED]==================================== -# ADVANCED CONFIGURATION -# [AWS-SECRET-REMOVED]==================================== - -# GitHub authentication script location -# GITHUB_AUTH_[AWS-SECRET-REMOVED]ithub-auth.py - -# Enable verbose logging (true/false) -# VERBOSE_LOGGING=false - -# Enable debug mode for troubleshooting (true/false) -# DEBUG_MODE=false - -# Custom git remote URL (overrides GITHUB_REPO if set) -# CUSTOM_GIT_REMOTE=https://github.com/username/repository.git - -# Email notifications for critical failures (requires email configuration) -# NOTIFICATION_EMAIL=admin@example.com - -# Maximum consecutive failures before alerting (default: 5) -# MAX_CONSECUTIVE_FAILURES=5 - -# Enable automatic dependency updates (true/false, default: true) -# AUTO_UPDATE_DEPENDENCIES=true - -# Enable automatic migrations on code changes (true/false, default: true) -# AUTO_MIGRATE=true - -# Enable automatic static file collection (true/false, default: true) -# AUTO_COLLECTSTATIC=true - -# [AWS-SECRET-REMOVED]==================================== -# SECURITY CONFIGURATION -# [AWS-SECRET-REMOVED]==================================== - -# GitHub authentication method (token|ssh|https) -# Default: token (uses GITHUB_TOKEN or GITHUB_TOKEN_FILE) -# GITHUB_AUTH_METHOD=token - -# SSH key path for git operations (when using ssh auth method) -# SSH_KEY_PATH=/home/ubuntu/.ssh/***REMOVED*** - -# Git user configuration for commits -# GIT_USER_NAME="ThrillWiki Automation" -# GIT_USER_EMAIL="automation@thrillwiki.local" - -# [AWS-SECRET-REMOVED]==================================== -# MONITORING AND HEALTH CHECKS -# [AWS-SECRET-REMOVED]==================================== - -# Health check URL to verify server is running -# HEALTH_CHECK_URL=http://localhost:8000/health/ - -# Health check timeout in seconds -# HEALTH_CHECK_TIMEOUT=30 - -# Enable system resource monitoring (true/false) -# MONITOR_RESOURCES=true - -# Memory usage threshold for warnings (in MB) -# MEMORY_WARNING_THRESHOLD=1024 - -# CPU usage threshold for warnings (percentage) -# CPU_WARNING_THRESHOLD=80 - -# Disk usage threshold for warnings (percentage) -# DISK_WARNING_THRESHOLD=90 - -# [AWS-SECRET-REMOVED]==================================== -# INTEGRATION SETTINGS -# [AWS-SECRET-REMOVED]==================================== - -# Webhook integration (if using thrillwiki-webhook service) -# WEBHOOK_INTEGRATION=true - -# Slack webhook URL for notifications (optional) -# SLACK_WEBHOOK_URL=https://hooks.slack.com/services/your/webhook/url - -# Discord webhook URL for notifications (optional) -# DISCORD_WEBHOOK_URL=https://discord.com/api/webhooks/your/webhook/url - -# [AWS-SECRET-REMOVED]==================================== -# USAGE EXAMPLES -# [AWS-SECRET-REMOVED]==================================== - -# Example 1: Basic setup with GitHub PAT -# GITHUB_TOKEN=ghp_your_token_here -# PULL_INTERVAL=300 -# AUTO_MIGRATE=true - -# Example 2: Enhanced monitoring setup -# HEALTH_CHECK_INTERVAL=30 -# MONITOR_RESOURCES=true -# NOTIFICATION_EMAIL=admin@thrillwiki.com -# SLACK_WEBHOOK_URL=https://hooks.slack.com/services/your/webhook - -# Example 3: Development environment with frequent pulls -# PULL_INTERVAL=60 -# DEBUG_MODE=true -# VERBOSE_LOGGING=true -# AUTO_UPDATE_DEPENDENCIES=true - -# [AWS-SECRET-REMOVED]==================================== -# INSTALLATION NOTES -# [AWS-SECRET-REMOVED]==================================== - -# 1. Copy this file: cp thrillwiki-automation***REMOVED***.example thrillwiki-automation***REMOVED*** -# 2. Set secure permissions: chmod 600 thrillwiki-automation***REMOVED*** -# 3. Customize the settings above for your environment -# 4. Enable the service: sudo systemctl enable thrillwiki-automation -# 5. Start the service: sudo systemctl start thrillwiki-automation -# 6. Check status: sudo systemctl status thrillwiki-automation -# 7. View logs: sudo journalctl -u thrillwiki-automation -f - -# For security, ensure only the ubuntu user can read this file: -# sudo chown ubuntu:ubuntu thrillwiki-automation***REMOVED*** -# sudo chmod 600 thrillwiki-automation***REMOVED*** \ No newline at end of file diff --git a/config/__init__.py b/config/__init__.py deleted file mode 100644 index 81456fa2..00000000 --- a/config/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# Configuration package for thrillwiki project diff --git a/config/django/__init__.py b/config/django/__init__.py deleted file mode 100644 index 0b64ab72..00000000 --- a/config/django/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# Django settings package diff --git a/config/django/base.py b/config/django/base.py deleted file mode 100644 index 3fa7a9ca..00000000 --- a/config/django/base.py +++ /dev/null @@ -1,377 +0,0 @@ -""" -Base Django settings for thrillwiki project. -Common settings shared across all environments. -""" - -import environ # type: ignore[import] -from pathlib import Path - -# Initialize environment variables -env = environ.Env( - DEBUG=(bool, False), - SECRET_KEY=(str, ""), - ALLOWED_HOSTS=(list, []), - DATABASE_URL=(str, ""), - CACHE_URL=(str, "locmem://"), - EMAIL_URL=(str, ""), - REDIS_URL=(str, ""), -) - -# Build paths inside the project like this: BASE_DIR / 'subdir'. -BASE_DIR = Path(__file__).resolve().parent.parent.parent - -# Read environment file if it exists -environ.Env.read_env(BASE_DIR / ".env") - -# SECURITY WARNING: keep the secret key used in production secret! -SECRET_KEY = env("SECRET_KEY") - -# SECURITY WARNING: don't run with debug turned on in production! -DEBUG = env("DEBUG") - -# Allowed hosts -ALLOWED_HOSTS = env("ALLOWED_HOSTS") - -# CSRF trusted origins -CSRF_TRUSTED_ORIGINS = env("CSRF_TRUSTED_ORIGINS", default=[]) # type: ignore[arg-type] - -# Application definition -DJANGO_APPS = [ - "django.contrib.admin", - "django.contrib.auth", - "django.contrib.contenttypes", - "django.contrib.sessions", - "django.contrib.messages", - "django.contrib.staticfiles", - "django.contrib.sites", - "django.contrib.gis", # GeoDjango -] - -THIRD_PARTY_APPS = [ - "rest_framework", # Django REST Framework - "drf_spectacular", # OpenAPI 3.0 documentation - "corsheaders", # CORS headers for API - "pghistory", # django-pghistory - "pgtrigger", # Required by django-pghistory - "allauth", - "allauth.account", - "allauth.socialaccount", - "allauth.socialaccount.providers.google", - "allauth.socialaccount.providers.discord", - "django_cleanup", - "django_filters", - "django_htmx", - "whitenoise", - "django_tailwind_cli", - "autocomplete", # Django HTMX Autocomplete - "health_check", # Health checks - "health_check.db", - "health_check.cache", - "health_check.storage", - "health_check.contrib.migrations", - "health_check.contrib.redis", -] - -LOCAL_APPS = [ - "core", - "accounts", - "parks", - "rides", - "email_service", - "media.apps.MediaConfig", - "moderation", - "location", -] - -INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS - -MIDDLEWARE = [ - "django.middleware.cache.UpdateCacheMiddleware", - "corsheaders.middleware.CorsMiddleware", # CORS middleware for API - "django.middleware.security.SecurityMiddleware", - "whitenoise.middleware.WhiteNoiseMiddleware", - "django.contrib.sessions.middleware.SessionMiddleware", - "django.middleware.common.CommonMiddleware", - "django.middleware.csrf.CsrfViewMiddleware", - "django.contrib.auth.middleware.AuthenticationMiddleware", - "django.contrib.messages.middleware.MessageMiddleware", - "django.middleware.clickjacking.XFrameOptionsMiddleware", - "core.middleware.PgHistoryContextMiddleware", # Add history context tracking - "allauth.account.middleware.AccountMiddleware", - "django.middleware.cache.FetchFromCacheMiddleware", - "django_htmx.middleware.HtmxMiddleware", - "core.middleware.PageViewMiddleware", # Add our page view tracking -] - -ROOT_URLCONF = "thrillwiki.urls" - -TEMPLATES = [ - { - "BACKEND": "django.template.backends.django.DjangoTemplates", - "DIRS": [BASE_DIR / "templates"], - "APP_DIRS": True, - "OPTIONS": { - "context_processors": [ - "django.template.context_processors.debug", - "django.template.context_processors.request", - "django.contrib.auth.context_processors.auth", - "django.contrib.messages.context_processors.messages", - "moderation.context_processors.moderation_access", - ] - }, - } -] - -WSGI_APPLICATION = "thrillwiki.wsgi.application" - -# Password validation -AUTH_PASSWORD_VALIDATORS = [ - { - "NAME": ( - "django.contrib.auth.password_validation.UserAttributeSimilarityValidator" - ), - }, - { - "NAME": "django.contrib.auth.password_validation.MinimumLengthValidator", - }, - { - "NAME": "django.contrib.auth.password_validation.CommonPasswordValidator", - }, - { - "NAME": "django.contrib.auth.password_validation.NumericPasswordValidator", - }, -] - -# Internationalization -LANGUAGE_CODE = "en-us" -TIME_ZONE = "America/New_York" -USE_I18N = True -USE_TZ = True - -# Static files (CSS, JavaScript, Images) -STATIC_URL = "static/" -STATICFILES_DIRS = [BASE_DIR / "static"] -STATIC_ROOT = BASE_DIR / "staticfiles" - -# Media files -MEDIA_URL = "/media/" -MEDIA_ROOT = BASE_DIR / "media" - -# Default primary key field type -DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" - -# Authentication settings -AUTHENTICATION_BACKENDS = [ - "django.contrib.auth.backends.ModelBackend", - "allauth.account.auth_backends.AuthenticationBackend", -] - -# django-allauth settings -SITE_ID = 1 -ACCOUNT_SIGNUP_FIELDS = ["email*", "username*", "password1*", "password2*"] -ACCOUNT_LOGIN_METHODS = {"email", "username"} -ACCOUNT_EMAIL_VERIFICATION = "optional" -LOGIN_REDIRECT_URL = "/" -ACCOUNT_LOGOUT_REDIRECT_URL = "/" - -# Custom adapters -ACCOUNT_ADAPTER = "accounts.adapters.CustomAccountAdapter" -SOCIALACCOUNT_ADAPTER = "accounts.adapters.CustomSocialAccountAdapter" - -# Social account settings -SOCIALACCOUNT_PROVIDERS = { - "google": { - "SCOPE": [ - "profile", - "email", - ], - "AUTH_PARAMS": {"access_type": "online"}, - }, - "discord": { - "SCOPE": ["identify", "email"], - "OAUTH_PKCE_ENABLED": True, - }, -} - -# Additional social account settings -SOCIALACCOUNT_LOGIN_ON_GET = True -SOCIALACCOUNT_AUTO_SIGNUP = False -SOCIALACCOUNT_STORE_TOKENS = True - -# Custom User Model -AUTH_USER_MODEL = "accounts.User" - -# Autocomplete configuration -AUTOCOMPLETE_BLOCK_UNAUTHENTICATED = False - -# Tailwind configuration -TAILWIND_CLI_CONFIG_FILE = BASE_DIR / "tailwind.config.js" -TAILWIND_CLI_SRC_CSS = BASE_DIR / "static/css/src/input.css" -TAILWIND_CLI_DIST_CSS = BASE_DIR / "static/css/tailwind.css" - -# Test runner -TEST_RUNNER = "django.test.runner.DiscoverRunner" - -# Road Trip Service Settings -ROADTRIP_CACHE_TIMEOUT = 3600 * 24 # 24 hours for geocoding -ROADTRIP_ROUTE_CACHE_TIMEOUT = 3600 * 6 # 6 hours for routes -ROADTRIP_MAX_REQUESTS_PER_SECOND = 1 # Respect OSM rate limits -ROADTRIP_USER_AGENT = "ThrillWiki Road Trip Planner (https://thrillwiki.com)" -ROADTRIP_REQUEST_TIMEOUT = 10 # seconds -ROADTRIP_MAX_RETRIES = 3 -ROADTRIP_BACKOFF_FACTOR = 2 - -# Django REST Framework Settings -REST_FRAMEWORK = { - "DEFAULT_AUTHENTICATION_CLASSES": [ - "rest_framework.authentication.SessionAuthentication", - "rest_framework.authentication.TokenAuthentication", - ], - "DEFAULT_PERMISSION_CLASSES": [ - "rest_framework.permissions.IsAuthenticated", - ], - "DEFAULT_PAGINATION_CLASS": "rest_framework.pagination.PageNumberPagination", - "PAGE_SIZE": 20, - "DEFAULT_VERSIONING_CLASS": "rest_framework.versioning.AcceptHeaderVersioning", - "DEFAULT_VERSION": "v1", - "ALLOWED_VERSIONS": ["v1"], - "DEFAULT_RENDERER_CLASSES": [ - "rest_framework.renderers.JSONRenderer", - "rest_framework.renderers.BrowsableAPIRenderer", - ], - "DEFAULT_PARSER_CLASSES": [ - "rest_framework.parsers.JSONParser", - "rest_framework.parsers.FormParser", - "rest_framework.parsers.MultiPartParser", - ], - "EXCEPTION_HANDLER": "core.api.exceptions.custom_exception_handler", - "DEFAULT_FILTER_BACKENDS": [ - "django_filters.rest_framework.DjangoFilterBackend", - "rest_framework.filters.SearchFilter", - "rest_framework.filters.OrderingFilter", - ], - "TEST_REQUEST_DEFAULT_FORMAT": "json", - "NON_FIELD_ERRORS_KEY": "non_field_errors", - "DEFAULT_SCHEMA_CLASS": "drf_spectacular.openapi.AutoSchema", -} - -# CORS Settings for API -CORS_ALLOWED_ORIGINS = env("CORS_ALLOWED_ORIGINS", default=[]) # type: ignore[arg-type] -CORS_ALLOW_CREDENTIALS = True -CORS_ALLOW_ALL_ORIGINS = env( - "CORS_ALLOW_ALL_ORIGINS", default=False -) # type: ignore[arg-type] - -# API-specific settings -API_RATE_LIMIT_PER_MINUTE = env.int( - "API_RATE_LIMIT_PER_MINUTE", default=60 -) # type: ignore[arg-type] -API_RATE_LIMIT_PER_HOUR = env.int( - "API_RATE_LIMIT_PER_HOUR", default=1000 -) # type: ignore[arg-type] - -# drf-spectacular settings -SPECTACULAR_SETTINGS = { - "TITLE": "ThrillWiki API", - "DESCRIPTION": "Comprehensive theme park and ride information API", - "VERSION": "1.0.0", - "SERVE_INCLUDE_SCHEMA": False, - "COMPONENT_SPLIT_REQUEST": True, - "TAGS": [ - {"name": "parks", "description": "Theme park operations"}, - {"name": "rides", "description": "Ride information and management"}, - {"name": "locations", "description": "Geographic location services"}, - {"name": "accounts", "description": "User account management"}, - {"name": "media", "description": "Media and image management"}, - {"name": "moderation", "description": "Content moderation"}, - ], - "SCHEMA_PATH_PREFIX": "/api/", - "DEFAULT_GENERATOR_CLASS": "drf_spectacular.generators.SchemaGenerator", - "SERVE_PERMISSIONS": ["rest_framework.permissions.AllowAny"], - "SWAGGER_UI_SETTINGS": { - "deepLinking": True, - "persistAuthorization": True, - "displayOperationId": False, - "displayRequestDuration": True, - }, - "REDOC_UI_SETTINGS": { - "hideDownloadButton": False, - "hideHostname": False, - "hideLoading": False, - "hideSchemaPattern": True, - "scrollYOffset": 0, - "theme": {"colors": {"primary": {"main": "#1976d2"}}}, - }, -} - -# Health Check Configuration -HEALTH_CHECK = { - "DISK_USAGE_MAX": 90, # Fail if disk usage is over 90% - "MEMORY_MIN": 100, # Fail if less than 100MB available memory -} - -# Custom health check backends -HEALTH_CHECK_BACKENDS = [ - "health_check.db", - "health_check.cache", - "health_check.storage", - "core.health_checks.custom_checks.CacheHealthCheck", - "core.health_checks.custom_checks.DatabasePerformanceCheck", - "core.health_checks.custom_checks.ApplicationHealthCheck", - "core.health_checks.custom_checks.ExternalServiceHealthCheck", - "core.health_checks.custom_checks.DiskSpaceHealthCheck", -] - -# Enhanced Cache Configuration -DJANGO_REDIS_CACHE_BACKEND = "django_redis.cache.RedisCache" -DJANGO_REDIS_CLIENT_CLASS = "django_redis.client.DefaultClient" - -CACHES = { - "default": { - "BACKEND": DJANGO_REDIS_CACHE_BACKEND, - # type: ignore[arg-type] - # pyright: ignore[reportArgumentType] - # pyright: ignore[reportArgumentType] - # type: ignore - "LOCATION": env("REDIS_URL", default="redis://127.0.0.1:6379/1"), - "OPTIONS": { - "CLIENT_CLASS": DJANGO_REDIS_CLIENT_CLASS, - "PARSER_CLASS": "redis.connection.HiredisParser", - "CONNECTION_POOL_CLASS": "redis.BlockingConnectionPool", - "CONNECTION_POOL_CLASS_KWARGS": { - "max_connections": 50, - "timeout": 20, - }, - "COMPRESSOR": "django_redis.compressors.zlib.ZlibCompressor", - "IGNORE_EXCEPTIONS": True, - }, - "KEY_PREFIX": "thrillwiki", - "VERSION": 1, - }, - "sessions": { - "BACKEND": DJANGO_REDIS_CACHE_BACKEND, - # type: ignore[arg-type] - # type: ignore - "LOCATION": env("REDIS_URL", default="redis://127.0.0.1:6379/2"), - "OPTIONS": { - "CLIENT_CLASS": DJANGO_REDIS_CLIENT_CLASS, - }, - }, - "api": { - "BACKEND": DJANGO_REDIS_CACHE_BACKEND, - # type: ignore[arg-type] - "LOCATION": env("REDIS_URL", default="redis://127.0.0.1:6379/3"), - "OPTIONS": { - "CLIENT_CLASS": DJANGO_REDIS_CLIENT_CLASS, - }, - }, -} - -# Use Redis for sessions -SESSION_ENGINE = "django.contrib.sessions.backends.cache" -SESSION_CACHE_ALIAS = "sessions" -SESSION_COOKIE_AGE = 86400 # 24 hours - -# Cache middleware settings -CACHE_MIDDLEWARE_SECONDS = 300 # 5 minutes -CACHE_MIDDLEWARE_KEY_PREFIX = "thrillwiki" diff --git a/config/django/local.py b/config/django/local.py deleted file mode 100644 index 6f33014f..00000000 --- a/config/django/local.py +++ /dev/null @@ -1,189 +0,0 @@ -""" -Local development settings for thrillwiki project. -""" - -import logging -from .base import * -from ..settings import database - -# Import the module and use its members, e.g., email.EMAIL_HOST - -# Import the module and use its members, e.g., security.SECURE_HSTS_SECONDS - -# Import database configuration -DATABASES = database.DATABASES - -# Development-specific settings -DEBUG = True - -# For local development, allow all hosts -ALLOWED_HOSTS = ["*"] - -# CSRF trusted origins for local development -CSRF_TRUSTED_ORIGINS = [ - "http://localhost:8000", - "http://127.0.0.1:8000", - "https://beta.thrillwiki.com", -] - -GDAL_LIBRARY_PATH = "/opt/homebrew/lib/libgdal.dylib" -GEOS_LIBRARY_PATH = "/opt/homebrew/lib/libgeos_c.dylib" - -# Local cache configuration -LOC_MEM_CACHE_BACKEND = "django.core.cache.backends.locmem.LocMemCache" - -CACHES = { - "default": { - "BACKEND": LOC_MEM_CACHE_BACKEND, - "LOCATION": "unique-snowflake", - "TIMEOUT": 300, # 5 minutes - "OPTIONS": {"MAX_ENTRIES": 1000}, - }, - "sessions": { - "BACKEND": LOC_MEM_CACHE_BACKEND, - "LOCATION": "sessions-cache", - "TIMEOUT": 86400, # 24 hours (same as SESSION_COOKIE_AGE) - "OPTIONS": {"MAX_ENTRIES": 5000}, - }, - "api": { - "BACKEND": LOC_MEM_CACHE_BACKEND, - "LOCATION": "api-cache", - "TIMEOUT": 300, # 5 minutes - "OPTIONS": {"MAX_ENTRIES": 2000}, - }, -} - -# Development-friendly cache settings -CACHE_MIDDLEWARE_SECONDS = 1 # Very short cache for development -CACHE_MIDDLEWARE_KEY_PREFIX = "thrillwiki_dev" - -# Development email backend -EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend" - -# Security settings for development -SECURE_SSL_REDIRECT = False -SESSION_COOKIE_SECURE = False -CSRF_COOKIE_SECURE = False - -# Development monitoring tools -DEVELOPMENT_APPS = [ - "silk", - "debug_toolbar", - "nplusone.ext.django", - "django_extensions", -] - -# Add development apps if available -for app in DEVELOPMENT_APPS: - if app not in INSTALLED_APPS: - INSTALLED_APPS.append(app) - -# Development middleware -DEVELOPMENT_MIDDLEWARE = [ - "silk.middleware.SilkyMiddleware", - "debug_toolbar.middleware.DebugToolbarMiddleware", - "nplusone.ext.django.NPlusOneMiddleware", - "core.middleware.performance_middleware.PerformanceMiddleware", - "core.middleware.performance_middleware.QueryCountMiddleware", -] - -# Add development middleware -for middleware in DEVELOPMENT_MIDDLEWARE: - if middleware not in MIDDLEWARE: - MIDDLEWARE.insert(1, middleware) # Insert after security middleware - -# Debug toolbar configuration -INTERNAL_IPS = ["127.0.0.1", "::1"] - -# Silk configuration for development -# Disable profiler to avoid silk_profile installation issues -SILKY_PYTHON_PROFILER = False -SILKY_PYTHON_PROFILER_BINARY = False # Disable binary profiler -SILKY_PYTHON_PROFILER_RESULT_PATH = ( - BASE_DIR / "profiles" -) # Not needed when profiler is disabled -SILKY_AUTHENTICATION = True # Require login to access Silk -SILKY_AUTHORISATION = True # Enable authorization -SILKY_MAX_REQUEST_BODY_SIZE = -1 # Don't limit request body size -# Limit response body size to 1KB for performance -SILKY_MAX_RESPONSE_BODY_SIZE = 1024 -SILKY_META = True # Record metadata about requests - -# NPlusOne configuration -NPLUSONE_LOGGER = logging.getLogger("nplusone") -NPLUSONE_LOG_LEVEL = logging.WARN - -# Enhanced development logging -LOGGING = { - "version": 1, - "disable_existing_loggers": False, - "formatters": { - "verbose": { - "format": "{levelname} {asctime} {module} {process:d} {thread:d} {message}", - "style": "{", - }, - "json": { - "()": "pythonjsonlogger.jsonlogger.JsonFormatter", - "format": ( - "%(levelname)s %(asctime)s %(module)s %(process)d " - "%(thread)d %(message)s" - ), - }, - }, - "handlers": { - "console": { - "class": "logging.StreamHandler", - "formatter": "verbose", - }, - "file": { - "class": "logging.handlers.RotatingFileHandler", - "filename": BASE_DIR / "logs" / "thrillwiki.log", - "maxBytes": 1024 * 1024 * 10, # 10MB - "backupCount": 5, - "formatter": "json", - }, - "performance": { - "class": "logging.handlers.RotatingFileHandler", - "filename": BASE_DIR / "logs" / "performance.log", - "maxBytes": 1024 * 1024 * 10, # 10MB - "backupCount": 5, - "formatter": "json", - }, - }, - "root": { - "level": "INFO", - "handlers": ["console"], - }, - "loggers": { - "django": { - "handlers": ["file"], - "level": "INFO", - "propagate": False, - }, - "django.db.backends": { - "handlers": ["console"], - "level": "DEBUG", - "propagate": False, - }, - "thrillwiki": { - "handlers": ["console", "file"], - "level": "DEBUG", - "propagate": False, - }, - "performance": { - "handlers": ["performance"], - "level": "INFO", - "propagate": False, - }, - "query_optimization": { - "handlers": ["console", "file"], - "level": "WARNING", - "propagate": False, - }, - "nplusone": { - "handlers": ["console"], - "level": "WARNING", - "propagate": False, - }, - }, -} diff --git a/config/django/production.py b/config/django/production.py deleted file mode 100644 index 9d4ef4e8..00000000 --- a/config/django/production.py +++ /dev/null @@ -1,103 +0,0 @@ -""" -Production settings for thrillwiki project. -""" - -# Import the module and use its members, e.g., base.BASE_DIR, base***REMOVED*** -from . import base - -# Import the module and use its members, e.g., database.DATABASES - -# Import the module and use its members, e.g., email.EMAIL_HOST - -# Import the module and use its members, e.g., security.SECURE_HSTS_SECONDS - -# Import the module and use its members, e.g., email.EMAIL_HOST - -# Import the module and use its members, e.g., security.SECURE_HSTS_SECONDS - -# Production settings -DEBUG = False - -# Allowed hosts must be explicitly set in production -ALLOWED_HOSTS = base.env.list("ALLOWED_HOSTS") - -# CSRF trusted origins for production -CSRF_TRUSTED_ORIGINS = base.env.list("CSRF_TRUSTED_ORIGINS") - -# Security settings for production -SECURE_SSL_REDIRECT = True -SESSION_COOKIE_SECURE = True -CSRF_COOKIE_SECURE = True -SECURE_HSTS_SECONDS = 31536000 # 1 year -SECURE_HSTS_INCLUDE_SUBDOMAINS = True -SECURE_HSTS_PRELOAD = True - -# Production logging -LOGGING = { - "version": 1, - "disable_existing_loggers": False, - "formatters": { - "verbose": { - "format": "{levelname} {asctime} {module} {process:d} {thread:d} {message}", - "style": "{", - }, - "simple": { - "format": "{levelname} {message}", - "style": "{", - }, - }, - "handlers": { - "file": { - "level": "INFO", - "class": "logging.handlers.RotatingFileHandler", - "filename": base.BASE_DIR / "logs" / "django.log", - "maxBytes": 1024 * 1024 * 15, # 15MB - "backupCount": 10, - "formatter": "verbose", - }, - "error_file": { - "level": "ERROR", - "class": "logging.handlers.RotatingFileHandler", - "filename": base.BASE_DIR / "logs" / "django_error.log", - "maxBytes": 1024 * 1024 * 15, # 15MB - "backupCount": 10, - "formatter": "verbose", - }, - }, - "root": { - "handlers": ["file"], - "level": "INFO", - }, - "loggers": { - "django": { - "handlers": ["file", "error_file"], - "level": "INFO", - "propagate": False, - }, - "thrillwiki": { - "handlers": ["file", "error_file"], - "level": "INFO", - "propagate": False, - }, - }, -} - -# Static files collection for production -STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage" - -# Cache settings for production (Redis recommended) -redis_url = base.env.str("REDIS_URL", default=None) -if redis_url: - CACHES = { - "default": { - "BACKEND": "django_redis.cache.RedisCache", - "LOCATION": redis_url, - "OPTIONS": { - "CLIENT_CLASS": "django_redis.client.DefaultClient", - }, - } - } - - # Use Redis for sessions in production - SESSION_ENGINE = "django.contrib.sessions.backends.cache" - SESSION_CACHE_ALIAS = "default" diff --git a/config/django/test.py b/config/django/test.py deleted file mode 100644 index cf00f9b9..00000000 --- a/config/django/test.py +++ /dev/null @@ -1,65 +0,0 @@ -""" -Test settings for thrillwiki project. -""" - -from .base import * - -# Test-specific settings -DEBUG = False - -# Use in-memory database for faster tests -DATABASES = { - "default": { - "ENGINE": "django.contrib.gis.db.backends.spatialite", - "NAME": ":memory:", - } -} - -# Use in-memory cache for tests -CACHES = { - "default": { - "BACKEND": "django.core.cache.backends.locmem.LocMemCache", - "LOCATION": "test-cache", - } -} - -# Disable migrations for faster tests - - -class DisableMigrations: - def __contains__(self, item): - return True - - def __getitem__(self, item): - return None - - -MIGRATION_MODULES = DisableMigrations() - -# Email backend for tests -EMAIL_BACKEND = "django.core.mail.backends.locmem.EmailBackend" - -# Password hashers for faster tests -PASSWORD_HASHERS = [ - "django.contrib.auth.hashers.MD5PasswordHasher", -] - -# Disable logging during tests -LOGGING_CONFIG = None - -# Media files for tests -MEDIA_ROOT = BASE_DIR / "test_media" - -# Static files for tests -STATIC_ROOT = BASE_DIR / "test_static" - -# Disable Turnstile for tests -TURNSTILE_SITE_KEY = "test-key" -TURNSTILE_SECRET_KEY = "test-secret" - -# Test-specific middleware (remove caching middleware) -MIDDLEWARE = [m for m in MIDDLEWARE if "cache" not in m.lower()] - -# Celery settings for tests (if Celery is used) -CELERY_TASK_ALWAYS_EAGER = True -CELERY_TASK_EAGER_PROPAGATES = True diff --git a/config/django/test_accounts.py b/config/django/test_accounts.py deleted file mode 100644 index a3e0aa1c..00000000 --- a/config/django/test_accounts.py +++ /dev/null @@ -1,44 +0,0 @@ -""" -Test Django settings for thrillwiki accounts app. -""" - -# Use in-memory database for tests -DATABASES = { - "default": { - "ENGINE": "django.contrib.gis.db.backends.postgis", - "NAME": "test_db", - } -} - -# Use a faster password hasher for tests -PASSWORD_HASHERS = [ - "django.contrib.auth.hashers.MD5PasswordHasher", -] - -# Disable whitenoise for tests -WHITENOISE_AUTOREFRESH = True -STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage" - -INSTALLED_APPS = [ - "django.contrib.admin", - "django.contrib.auth", - "django.contrib.contenttypes", - "django.contrib.sessions", - "django.contrib.messages", - "django.contrib.staticfiles", - "django.contrib.sites", - "allauth", - "allauth.account", - "allauth.socialaccount", - "accounts", - "core", - "pghistory", - "pgtrigger", - "email_service", - "parks", - "rides", - "media.apps.MediaConfig", -] - -GDAL_LIBRARY_PATH = "/opt/homebrew/lib/libgdal.dylib" -GEOS_LIBRARY_PATH = "/opt/homebrew/lib/libgeos_c.dylib" diff --git a/config/settings/__init__.py b/config/settings/__init__.py deleted file mode 100644 index da62675a..00000000 --- a/config/settings/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# Settings modules package diff --git a/config/settings/database.py b/config/settings/database.py deleted file mode 100644 index 0da947e5..00000000 --- a/config/settings/database.py +++ /dev/null @@ -1,28 +0,0 @@ -""" -Database configuration for thrillwiki project. -""" - -import environ - -env = environ.Env() - -# Database configuration -db_config = env.db("DATABASE_URL") - - -# Force PostGIS backend for spatial data support -db_config["ENGINE"] = "django.contrib.gis.db.backends.postgis" - -DATABASES = { - "default": db_config, -} - -# GeoDjango Settings - Environment specific -GDAL_LIBRARY_PATH = env("GDAL_LIBRARY_PATH", default=None) -GEOS_LIBRARY_PATH = env("GEOS_LIBRARY_PATH", default=None) - -# Cache settings -CACHES = {"default": env.cache("CACHE_URL", default="locmemcache://")} - -CACHE_MIDDLEWARE_SECONDS = env.int("CACHE_MIDDLEWARE_SECONDS", default=300) # 5 minutes -CACHE_MIDDLEWARE_KEY_PREFIX = env("CACHE_MIDDLEWARE_KEY_PREFIX", default="thrillwiki") diff --git a/config/settings/email.py b/config/settings/email.py deleted file mode 100644 index 259ee1e9..00000000 --- a/config/settings/email.py +++ /dev/null @@ -1,24 +0,0 @@ -""" -Email configuration for thrillwiki project. -""" - -import environ - -env = environ.Env() - -# Email settings -EMAIL_BACKEND = env( - "EMAIL_BACKEND", default="email_service.backends.ForwardEmailBackend" -) -FORWARD_EMAIL_BASE_URL = env( - "FORWARD_EMAIL_BASE_URL", default="https://api.forwardemail.net" -) -SERVER_EMAIL = env("SERVER_EMAIL", default="django_webmaster@thrillwiki.com") - -# Email URLs can be configured using EMAIL_URL environment variable -# Example: EMAIL_URL=smtp://user:pass@localhost:587 -EMAIL_URL = env("EMAIL_URL", default=None) - -if EMAIL_URL: - email_config = env.email(EMAIL_URL) - vars().update(email_config) diff --git a/config/settings/security.py b/config/settings/security.py deleted file mode 100644 index 32586aa2..00000000 --- a/config/settings/security.py +++ /dev/null @@ -1,36 +0,0 @@ -""" -Security configuration for thrillwiki project. -""" - -import environ - -env = environ.Env() - -# Cloudflare Turnstile settings -TURNSTILE_SITE_KEY = env("TURNSTILE_SITE_KEY", default="") -TURNSTILE_SECRET_KEY = env("TURNSTILE_SECRET_KEY", default="") -TURNSTILE_VERIFY_URL = env( - "TURNSTILE_VERIFY_URL", - default="https://challenges.cloudflare.com/turnstile/v0/siteverify", -) - -# Security headers and settings (for production) -SECURE_BROWSER_XSS_FILTER = env.bool("SECURE_BROWSER_XSS_FILTER", default=True) -SECURE_CONTENT_TYPE_NOSNIFF = env.bool("SECURE_CONTENT_TYPE_NOSNIFF", default=True) -SECURE_HSTS_INCLUDE_SUBDOMAINS = env.bool( - "SECURE_HSTS_INCLUDE_SUBDOMAINS", default=True -) -SECURE_HSTS_SECONDS = env.int("SECURE_HSTS_SECONDS", default=31536000) # 1 year -SECURE_REDIRECT_EXEMPT = env.list("SECURE_REDIRECT_EXEMPT", default=[]) -SECURE_SSL_REDIRECT = env.bool("SECURE_SSL_REDIRECT", default=False) -SECURE_PROXY_SSL_HEADER = env.tuple("SECURE_PROXY_SSL_HEADER", default=None) - -# Session security -SESSION_COOKIE_SECURE = env.bool("SESSION_COOKIE_SECURE", default=False) -SESSION_COOKIE_HTTPONLY = env.bool("SESSION_COOKIE_HTTPONLY", default=True) -SESSION_COOKIE_SAMESITE = env("SESSION_COOKIE_SAMESITE", default="Lax") - -# CSRF security -CSRF_COOKIE_SECURE = env.bool("CSRF_COOKIE_SECURE", default=False) -CSRF_COOKIE_HTTPONLY = env.bool("CSRF_COOKIE_HTTPONLY", default=True) -CSRF_COOKIE_SAMESITE = env("CSRF_COOKIE_SAMESITE", default="Lax") diff --git a/core/__init__.py b/core/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/core/admin.py b/core/admin.py deleted file mode 100644 index 0fe9d1f3..00000000 --- a/core/admin.py +++ /dev/null @@ -1,31 +0,0 @@ -from django.contrib import admin -from django.utils.html import format_html -from .models import SlugHistory - - -@admin.register(SlugHistory) -class SlugHistoryAdmin(admin.ModelAdmin): - list_display = ["content_object_link", "old_slug", "created_at"] - list_filter = ["content_type", "created_at"] - search_fields = ["old_slug", "object_id"] - readonly_fields = ["content_type", "object_id", "old_slug", "created_at"] - date_hierarchy = "created_at" - ordering = ["-created_at"] - - def content_object_link(self, obj): - """Create a link to the related object's admin page""" - try: - url = obj.content_object.get_absolute_url() - return format_html('{}', url, str(obj.content_object)) - except (AttributeError, ValueError): - return str(obj.content_object) - - content_object_link.short_description = "Object" - - def has_add_permission(self, request): - """Disable manual creation of slug history records""" - return False - - def has_change_permission(self, request, obj=None): - """Disable editing of slug history records""" - return False diff --git a/core/analytics.py b/core/analytics.py deleted file mode 100644 index 9daae44f..00000000 --- a/core/analytics.py +++ /dev/null @@ -1,60 +0,0 @@ -from django.db import models -from django.contrib.contenttypes.fields import GenericForeignKey -from django.contrib.contenttypes.models import ContentType -from django.utils import timezone -from django.db.models import Count - - -class PageView(models.Model): - content_type = models.ForeignKey( - ContentType, on_delete=models.CASCADE, related_name="page_views" - ) - object_id = models.PositiveIntegerField() - content_object = GenericForeignKey("content_type", "object_id") - - timestamp = models.DateTimeField(auto_now_add=True, db_index=True) - ip_address = models.GenericIPAddressField() - user_agent = models.CharField(max_length=512, blank=True) - - class Meta: - indexes = [ - models.Index(fields=["timestamp"]), - models.Index(fields=["content_type", "object_id"]), - ] - - @classmethod - def get_trending_items(cls, model_class, hours=24, limit=10): - """Get trending items of a specific model class based on views in last X hours. - - Args: - model_class: The model class to get trending items for (e.g., Park, Ride) - hours (int): Number of hours to look back for views (default: 24) - limit (int): Maximum number of items to return (default: 10) - - Returns: - QuerySet: The trending items ordered by view count - """ - content_type = ContentType.objects.get_for_model(model_class) - cutoff = timezone.now() - timezone.timedelta(hours=hours) - - # Query through the ContentType relationship - item_ids = ( - cls.objects.filter(content_type=content_type, timestamp__gte=cutoff) - .values("object_id") - .annotate(view_count=Count("id")) - .filter(view_count__gt=0) - .order_by("-view_count") - .values_list("object_id", flat=True)[:limit] - ) - - # Get the actual items in the correct order - if item_ids: - # Convert the list to a string of comma-separated values - id_list = list(item_ids) - # Use Case/When to preserve the ordering - from django.db.models import Case, When - - preserved = Case(*[When(pk=pk, then=pos) for pos, pk in enumerate(id_list)]) - return model_class.objects.filter(pk__in=id_list).order_by(preserved) - - return model_class.objects.none() diff --git a/core/api/__init__.py b/core/api/__init__.py deleted file mode 100644 index 64ba41d6..00000000 --- a/core/api/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# Core API infrastructure for ThrillWiki diff --git a/core/api/exceptions.py b/core/api/exceptions.py deleted file mode 100644 index 96629256..00000000 --- a/core/api/exceptions.py +++ /dev/null @@ -1,205 +0,0 @@ -""" -Custom exception handling for ThrillWiki API. -Provides standardized error responses following Django styleguide patterns. -""" - -from typing import Any, Dict, Optional - -from django.http import Http404 -from django.core.exceptions import ( - PermissionDenied, - ValidationError as DjangoValidationError, -) -from rest_framework import status -from rest_framework.response import Response -from rest_framework.views import exception_handler -from rest_framework.exceptions import ( - ValidationError as DRFValidationError, - NotFound, - PermissionDenied as DRFPermissionDenied, -) - -from ..exceptions import ThrillWikiException -from ..logging import get_logger, log_exception - -logger = get_logger(__name__) - - -def custom_exception_handler( - exc: Exception, context: Dict[str, Any] -) -> Optional[Response]: - """ - Custom exception handler for DRF that provides standardized error responses. - - Returns: - Response with standardized error format or None to fallback to default handler - """ - # Call REST framework's default exception handler first - response = exception_handler(exc, context) - - if response is not None: - # Standardize the error response format - custom_response_data = { - "status": "error", - "error": { - "code": _get_error_code(exc), - "message": _get_error_message(exc, response.data), - "details": _get_error_details(exc, response.data), - }, - "data": None, - } - - # Add request context for debugging - if hasattr(context.get("request"), "user"): - custom_response_data["error"]["request_user"] = str(context["request"].user) - - # Log the error for monitoring - log_exception( - logger, - exc, - context={"response_status": response.status_code}, - request=context.get("request"), - ) - - response.data = custom_response_data - - # Handle ThrillWiki custom exceptions - elif isinstance(exc, ThrillWikiException): - custom_response_data = { - "status": "error", - "error": exc.to_dict(), - "data": None, - } - - log_exception( - logger, - exc, - context={"response_status": exc.status_code}, - request=context.get("request"), - ) - response = Response(custom_response_data, status=exc.status_code) - - # Handle specific Django exceptions that DRF doesn't catch - elif isinstance(exc, DjangoValidationError): - custom_response_data = { - "status": "error", - "error": { - "code": "VALIDATION_ERROR", - "message": "Validation failed", - "details": _format_django_validation_errors(exc), - }, - "data": None, - } - - log_exception( - logger, - exc, - context={"response_status": status.HTTP_400_BAD_REQUEST}, - request=context.get("request"), - ) - response = Response(custom_response_data, status=status.HTTP_400_BAD_REQUEST) - - elif isinstance(exc, Http404): - custom_response_data = { - "status": "error", - "error": { - "code": "NOT_FOUND", - "message": "Resource not found", - "details": str(exc) if str(exc) else None, - }, - "data": None, - } - - log_exception( - logger, - exc, - context={"response_status": status.HTTP_404_NOT_FOUND}, - request=context.get("request"), - ) - response = Response(custom_response_data, status=status.HTTP_404_NOT_FOUND) - - elif isinstance(exc, PermissionDenied): - custom_response_data = { - "status": "error", - "error": { - "code": "PERMISSION_DENIED", - "message": "Permission denied", - "details": str(exc) if str(exc) else None, - }, - "data": None, - } - - log_exception( - logger, - exc, - context={"response_status": status.HTTP_403_FORBIDDEN}, - request=context.get("request"), - ) - response = Response(custom_response_data, status=status.HTTP_403_FORBIDDEN) - - return response - - -def _get_error_code(exc: Exception) -> str: - """Extract or determine error code from exception.""" - if hasattr(exc, "default_code"): - return exc.default_code.upper() - - if isinstance(exc, DRFValidationError): - return "VALIDATION_ERROR" - elif isinstance(exc, NotFound): - return "NOT_FOUND" - elif isinstance(exc, DRFPermissionDenied): - return "PERMISSION_DENIED" - - return exc.__class__.__name__.upper() - - -def _get_error_message(exc: Exception, response_data: Any) -> str: - """Extract user-friendly error message.""" - if isinstance(response_data, dict): - # Handle DRF validation errors - if "detail" in response_data: - return str(response_data["detail"]) - elif "non_field_errors" in response_data: - errors = response_data["non_field_errors"] - return errors[0] if isinstance(errors, list) and errors else str(errors) - elif isinstance(response_data, dict) and len(response_data) == 1: - key, value = next(iter(response_data.items())) - if isinstance(value, list) and value: - return f"{key}: {value[0]}" - return f"{key}: {value}" - - # Fallback to exception message - return str(exc) if str(exc) else "An error occurred" - - -def _get_error_details(exc: Exception, response_data: Any) -> Optional[Dict[str, Any]]: - """Extract detailed error information for debugging.""" - if isinstance(response_data, dict) and len(response_data) > 1: - return response_data - - if hasattr(exc, "detail") and isinstance(exc.detail, dict): - return exc.detail - - return None - - -def _format_django_validation_errors( - exc: DjangoValidationError, -) -> Dict[str, Any]: - """Format Django ValidationError for API response.""" - if hasattr(exc, "error_dict"): - # Field-specific errors - return { - field: [str(error) for error in errors] - for field, errors in exc.error_dict.items() - } - elif hasattr(exc, "error_list"): - # Non-field errors - return {"non_field_errors": [str(error) for error in exc.error_list]} - - return {"non_field_errors": [str(exc)]} - - -# Removed _log_api_error - using centralized logging instead diff --git a/core/api/mixins.py b/core/api/mixins.py deleted file mode 100644 index 299c8087..00000000 --- a/core/api/mixins.py +++ /dev/null @@ -1,260 +0,0 @@ -""" -Common mixins for API views following Django styleguide patterns. -""" - -from typing import Dict, Any, Optional -from rest_framework.request import Request -from rest_framework.response import Response -from rest_framework import status - - -class ApiMixin: - """ - Base mixin for API views providing standardized response formatting. - """ - - def create_response( - self, - *, - data: Any = None, - message: Optional[str] = None, - status_code: int = status.HTTP_200_OK, - pagination: Optional[Dict[str, Any]] = None, - metadata: Optional[Dict[str, Any]] = None, - ) -> Response: - """ - Create standardized API response. - - Args: - data: Response data - message: Optional success message - status_code: HTTP status code - pagination: Pagination information - metadata: Additional metadata - - Returns: - Standardized Response object - """ - response_data = { - "status": "success" if status_code < 400 else "error", - "data": data, - } - - if message: - response_data["message"] = message - - if pagination: - response_data["pagination"] = pagination - - if metadata: - response_data["metadata"] = metadata - - return Response(response_data, status=status_code) - - def create_error_response( - self, - *, - message: str, - status_code: int = status.HTTP_400_BAD_REQUEST, - error_code: Optional[str] = None, - details: Optional[Dict[str, Any]] = None, - ) -> Response: - """ - Create standardized error response. - - Args: - message: Error message - status_code: HTTP status code - error_code: Optional error code - details: Additional error details - - Returns: - Standardized error Response object - """ - error_data = { - "code": error_code or "GENERIC_ERROR", - "message": message, - } - - if details: - error_data["details"] = details - - response_data = { - "status": "error", - "error": error_data, - "data": None, - } - - return Response(response_data, status=status_code) - - -class CreateApiMixin(ApiMixin): - """ - Mixin for create API endpoints with standardized input/output handling. - """ - - def create(self, request: Request, *args, **kwargs) -> Response: - """Handle POST requests for creating resources.""" - serializer = self.get_input_serializer(data=request.data) - serializer.is_valid(raise_exception=True) - - # Create the object using the service layer - obj = self.perform_create(**serializer.validated_data) - - # Serialize the output - output_serializer = self.get_output_serializer(obj) - - return self.create_response( - data=output_serializer.data, - status_code=status.HTTP_201_CREATED, - message="Resource created successfully", - ) - - def perform_create(self, **validated_data): - """ - Override this method to implement object creation logic. - Should use service layer methods. - """ - raise NotImplementedError("Subclasses must implement perform_create") - - def get_input_serializer(self, *args, **kwargs): - """Get the input serializer for validation.""" - return self.InputSerializer(*args, **kwargs) - - def get_output_serializer(self, *args, **kwargs): - """Get the output serializer for response.""" - return self.OutputSerializer(*args, **kwargs) - - -class UpdateApiMixin(ApiMixin): - """ - Mixin for update API endpoints with standardized input/output handling. - """ - - def update(self, request: Request, *args, **kwargs) -> Response: - """Handle PUT/PATCH requests for updating resources.""" - instance = self.get_object() - serializer = self.get_input_serializer( - data=request.data, partial=kwargs.get("partial", False) - ) - serializer.is_valid(raise_exception=True) - - # Update the object using the service layer - updated_obj = self.perform_update(instance, **serializer.validated_data) - - # Serialize the output - output_serializer = self.get_output_serializer(updated_obj) - - return self.create_response( - data=output_serializer.data, - message="Resource updated successfully", - ) - - def perform_update(self, instance, **validated_data): - """ - Override this method to implement object update logic. - Should use service layer methods. - """ - raise NotImplementedError("Subclasses must implement perform_update") - - def get_input_serializer(self, *args, **kwargs): - """Get the input serializer for validation.""" - return self.InputSerializer(*args, **kwargs) - - def get_output_serializer(self, *args, **kwargs): - """Get the output serializer for response.""" - return self.OutputSerializer(*args, **kwargs) - - -class ListApiMixin(ApiMixin): - """ - Mixin for list API endpoints with pagination and filtering. - """ - - def list(self, request: Request, *args, **kwargs) -> Response: - """Handle GET requests for listing resources.""" - # Use selector to get filtered queryset - queryset = self.get_queryset() - - # Apply pagination - page = self.paginate_queryset(queryset) - if page is not None: - serializer = self.get_output_serializer(page, many=True) - return self.get_paginated_response(serializer.data) - - # No pagination - serializer = self.get_output_serializer(queryset, many=True) - return self.create_response(data=serializer.data) - - def get_queryset(self): - """ - Override this method to use selector patterns. - Should call selector functions, not access model managers directly. - """ - raise NotImplementedError( - "Subclasses must implement get_queryset using selectors" - ) - - def get_output_serializer(self, *args, **kwargs): - """Get the output serializer for response.""" - return self.OutputSerializer(*args, **kwargs) - - -class RetrieveApiMixin(ApiMixin): - """ - Mixin for retrieve API endpoints. - """ - - def retrieve(self, request: Request, *args, **kwargs) -> Response: - """Handle GET requests for retrieving a single resource.""" - instance = self.get_object() - serializer = self.get_output_serializer(instance) - - return self.create_response(data=serializer.data) - - def get_object(self): - """ - Override this method to use selector patterns. - Should call selector functions for optimized queries. - """ - raise NotImplementedError( - "Subclasses must implement get_object using selectors" - ) - - def get_output_serializer(self, *args, **kwargs): - """Get the output serializer for response.""" - return self.OutputSerializer(*args, **kwargs) - - -class DestroyApiMixin(ApiMixin): - """ - Mixin for delete API endpoints. - """ - - def destroy(self, request: Request, *args, **kwargs) -> Response: - """Handle DELETE requests for destroying resources.""" - instance = self.get_object() - - # Delete using service layer - self.perform_destroy(instance) - - return self.create_response( - status_code=status.HTTP_204_NO_CONTENT, - message="Resource deleted successfully", - ) - - def perform_destroy(self, instance): - """ - Override this method to implement object deletion logic. - Should use service layer methods. - """ - raise NotImplementedError("Subclasses must implement perform_destroy") - - def get_object(self): - """ - Override this method to use selector patterns. - Should call selector functions for optimized queries. - """ - raise NotImplementedError( - "Subclasses must implement get_object using selectors" - ) diff --git a/core/apps.py b/core/apps.py deleted file mode 100644 index c0ce093b..00000000 --- a/core/apps.py +++ /dev/null @@ -1,6 +0,0 @@ -from django.apps import AppConfig - - -class CoreConfig(AppConfig): - default_auto_field = "django.db.models.BigAutoField" - name = "core" diff --git a/core/decorators/__init__.py b/core/decorators/__init__.py deleted file mode 100644 index 37146aa4..00000000 --- a/core/decorators/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# Decorators module diff --git a/core/decorators/cache_decorators.py b/core/decorators/cache_decorators.py deleted file mode 100644 index 8e874754..00000000 --- a/core/decorators/cache_decorators.py +++ /dev/null @@ -1,409 +0,0 @@ -""" -Advanced caching decorators for API views and functions. -""" - -import hashlib -import json -import time -from functools import wraps -from typing import Optional, List, Callable -from django.utils.decorators import method_decorator -from django.views.decorators.vary import vary_on_headers -from core.services.enhanced_cache_service import EnhancedCacheService -import logging - -logger = logging.getLogger(__name__) - - -def cache_api_response( - timeout=1800, vary_on=None, key_prefix="api", cache_backend="api" -): - """ - Advanced decorator for caching API responses with flexible configuration - - Args: - timeout: Cache timeout in seconds - vary_on: List of request attributes to vary cache on - key_prefix: Prefix for cache keys - cache_backend: Cache backend to use - """ - - def decorator(view_func): - @wraps(view_func) - def wrapper(self, request, *args, **kwargs): - # Only cache GET requests - if request.method != "GET": - return view_func(self, request, *args, **kwargs) - - # Generate cache key based on view, user, and parameters - cache_key_parts = [ - key_prefix, - view_func.__name__, - ( - str(request.user.id) - if request.user.is_authenticated - else "anonymous" - ), - str(hash(frozenset(request.GET.items()))), - ] - - # Add URL parameters to cache key - if args: - cache_key_parts.append(str(hash(args))) - if kwargs: - cache_key_parts.append(str(hash(frozenset(kwargs.items())))) - - # Add custom vary_on fields - if vary_on: - for field in vary_on: - value = getattr(request, field, "") - cache_key_parts.append(str(value)) - - cache_key = ":".join(cache_key_parts) - - # Try to get from cache - cache_service = EnhancedCacheService() - cached_response = getattr(cache_service, cache_backend + "_cache").get( - cache_key - ) - - if cached_response: - logger.debug( - f"Cache hit for API view {view_func.__name__}", - extra={ - "cache_key": cache_key, - "view": view_func.__name__, - "cache_hit": True, - }, - ) - return cached_response - - # Execute view and cache result - start_time = time.time() - response = view_func(self, request, *args, **kwargs) - execution_time = time.time() - start_time - - # Only cache successful responses - if hasattr(response, "status_code") and response.status_code == 200: - getattr(cache_service, cache_backend + "_cache").set( - cache_key, response, timeout - ) - logger.debug( - f"Cached API response for view {view_func.__name__}", - extra={ - "cache_key": cache_key, - "view": view_func.__name__, - "execution_time": execution_time, - "cache_timeout": timeout, - "cache_miss": True, - }, - ) - else: - logger.debug( - f"Not caching response for view { - view_func.__name__} (status: { - getattr( - response, - 'status_code', - 'unknown')})" - ) - - return response - - return wrapper - - return decorator - - -def cache_queryset_result( - cache_key_template: str, timeout: int = 3600, cache_backend="default" -): - """ - Decorator for caching expensive queryset operations - - Args: - cache_key_template: Template for cache key (can use format placeholders) - timeout: Cache timeout in seconds - cache_backend: Cache backend to use - """ - - def decorator(func): - @wraps(func) - def wrapper(*args, **kwargs): - # Generate cache key from template and arguments - try: - cache_key = cache_key_template.format(*args, **kwargs) - except (KeyError, IndexError): - # Fallback to simpler key generation - cache_key = f"{cache_key_template}:{ - hash( - str(args) + - str(kwargs))}" - - cache_service = EnhancedCacheService() - cached_result = getattr(cache_service, cache_backend + "_cache").get( - cache_key - ) - - if cached_result is not None: - logger.debug( - f"Cache hit for queryset operation: { - func.__name__}" - ) - return cached_result - - # Execute function and cache result - start_time = time.time() - result = func(*args, **kwargs) - execution_time = time.time() - start_time - - getattr(cache_service, cache_backend + "_cache").set( - cache_key, result, timeout - ) - logger.debug( - f"Cached queryset result for {func.__name__}", - extra={ - "cache_key": cache_key, - "function": func.__name__, - "execution_time": execution_time, - "cache_timeout": timeout, - }, - ) - - return result - - return wrapper - - return decorator - - -def invalidate_cache_on_save(model_name: str, cache_patterns: List[str] = None): - """ - Decorator to invalidate cache when model instances are saved - - Args: - model_name: Name of the model - cache_patterns: List of cache key patterns to invalidate - """ - - def decorator(func): - @wraps(func) - def wrapper(self, *args, **kwargs): - result = func(self, *args, **kwargs) - - # Invalidate related cache entries - cache_service = EnhancedCacheService() - - # Standard model cache invalidation - instance_id = getattr(self, "id", None) - cache_service.invalidate_model_cache(model_name, instance_id) - - # Custom pattern invalidation - if cache_patterns: - for pattern in cache_patterns: - if instance_id: - pattern = pattern.format(model=model_name, id=instance_id) - cache_service.invalidate_pattern(pattern) - - logger.info( - f"Invalidated cache for {model_name} after save", - extra={ - "model": model_name, - "instance_id": instance_id, - "patterns": cache_patterns, - }, - ) - - return result - - return wrapper - - return decorator - - -class CachedAPIViewMixin: - """Mixin to add caching capabilities to API views""" - - cache_timeout = 1800 # 30 minutes default - cache_vary_on = ["version"] - cache_key_prefix = "api" - cache_backend = "api" - - @method_decorator(vary_on_headers("User-Agent", "Accept-Language")) - def dispatch(self, request, *args, **kwargs): - """Add caching to the dispatch method""" - if request.method == "GET" and getattr(self, "enable_caching", True): - return self._cached_dispatch(request, *args, **kwargs) - return super().dispatch(request, *args, **kwargs) - - def _cached_dispatch(self, request, *args, **kwargs): - """Handle cached dispatch for GET requests""" - cache_key = self._generate_cache_key(request, *args, **kwargs) - - cache_service = EnhancedCacheService() - cached_response = getattr(cache_service, self.cache_backend + "_cache").get( - cache_key - ) - - if cached_response: - logger.debug(f"Cache hit for view {self.__class__.__name__}") - return cached_response - - # Execute view - response = super().dispatch(request, *args, **kwargs) - - # Cache successful responses - if hasattr(response, "status_code") and response.status_code == 200: - getattr(cache_service, self.cache_backend + "_cache").set( - cache_key, response, self.cache_timeout - ) - logger.debug(f"Cached response for view {self.__class__.__name__}") - - return response - - def _generate_cache_key(self, request, *args, **kwargs): - """Generate cache key for the request""" - key_parts = [ - self.cache_key_prefix, - self.__class__.__name__, - request.method, - (str(request.user.id) if request.user.is_authenticated else "anonymous"), - str(hash(frozenset(request.GET.items()))), - ] - - if args: - key_parts.append(str(hash(args))) - if kwargs: - key_parts.append(str(hash(frozenset(kwargs.items())))) - - # Add vary_on fields - for field in self.cache_vary_on: - value = getattr(request, field, "") - key_parts.append(str(value)) - - return ":".join(key_parts) - - -def smart_cache( - timeout: int = 3600, - key_func: Optional[Callable] = None, - invalidate_on: Optional[List[str]] = None, - cache_backend: str = "default", -): - """ - Smart caching decorator that adapts to function arguments - - Args: - timeout: Cache timeout in seconds - key_func: Custom function to generate cache key - invalidate_on: List of signals to invalidate cache on - cache_backend: Cache backend to use - """ - - def decorator(func): - @wraps(func) - def wrapper(*args, **kwargs): - # Generate cache key - if key_func: - cache_key = key_func(*args, **kwargs) - else: - # Default key generation - key_data = { - "func": f"{func.__module__}.{func.__name__}", - "args": str(args), - "kwargs": json.dumps(kwargs, sort_keys=True, default=str), - } - key_string = json.dumps(key_data, sort_keys=True) - cache_key = f"smart_cache:{ - hashlib.md5( - key_string.encode()).hexdigest()}" - - # Try to get from cache - cache_service = EnhancedCacheService() - cached_result = getattr(cache_service, cache_backend + "_cache").get( - cache_key - ) - - if cached_result is not None: - logger.debug(f"Smart cache hit for {func.__name__}") - return cached_result - - # Execute function - start_time = time.time() - result = func(*args, **kwargs) - execution_time = time.time() - start_time - - # Cache result - getattr(cache_service, cache_backend + "_cache").set( - cache_key, result, timeout - ) - - logger.debug( - f"Smart cached result for {func.__name__}", - extra={ - "cache_key": cache_key, - "execution_time": execution_time, - "function": func.__name__, - }, - ) - - return result - - # Add cache invalidation if specified - if invalidate_on: - wrapper._cache_invalidate_on = invalidate_on - wrapper._cache_backend = cache_backend - - return wrapper - - return decorator - - -def conditional_cache(condition_func: Callable, **cache_kwargs): - """ - Cache decorator that only caches when condition is met - - Args: - condition_func: Function that returns True if caching should be applied - **cache_kwargs: Arguments passed to smart_cache - """ - - def decorator(func): - cached_func = smart_cache(**cache_kwargs)(func) - - @wraps(func) - def wrapper(*args, **kwargs): - if condition_func(*args, **kwargs): - return cached_func(*args, **kwargs) - else: - return func(*args, **kwargs) - - return wrapper - - return decorator - - -# Utility functions for cache key generation -def generate_user_cache_key(user, suffix: str = ""): - """Generate cache key based on user""" - user_id = user.id if user.is_authenticated else "anonymous" - return f"user:{user_id}:{suffix}" if suffix else f"user:{user_id}" - - -def generate_model_cache_key(model_instance, suffix: str = ""): - """Generate cache key based on model instance""" - model_name = model_instance._meta.model_name - instance_id = model_instance.id - return ( - f"{model_name}:{instance_id}:{suffix}" - if suffix - else f"{model_name}:{instance_id}" - ) - - -def generate_queryset_cache_key(queryset, params: dict = None): - """Generate cache key for queryset with parameters""" - model_name = queryset.model._meta.model_name - params_str = json.dumps(params or {}, sort_keys=True, default=str) - params_hash = hashlib.md5(params_str.encode()).hexdigest() - return f"queryset:{model_name}:{params_hash}" diff --git a/core/exceptions.py b/core/exceptions.py deleted file mode 100644 index 2a8c9e9e..00000000 --- a/core/exceptions.py +++ /dev/null @@ -1,224 +0,0 @@ -""" -Custom exception classes for ThrillWiki. -Provides domain-specific exceptions with proper error codes and messages. -""" - -from typing import Optional, Dict, Any - - -class ThrillWikiException(Exception): - """Base exception for all ThrillWiki-specific errors.""" - - default_message = "An error occurred" - error_code = "THRILLWIKI_ERROR" - status_code = 500 - - def __init__( - self, - message: Optional[str] = None, - error_code: Optional[str] = None, - details: Optional[Dict[str, Any]] = None, - ): - self.message = message or self.default_message - self.error_code = error_code or self.error_code - self.details = details or {} - super().__init__(self.message) - - def to_dict(self) -> Dict[str, Any]: - """Convert exception to dictionary for API responses.""" - return { - "error_code": self.error_code, - "message": self.message, - "details": self.details, - } - - -class ValidationException(ThrillWikiException): - """Raised when data validation fails.""" - - default_message = "Validation failed" - error_code = "VALIDATION_ERROR" - status_code = 400 - - -class NotFoundError(ThrillWikiException): - """Raised when a requested resource is not found.""" - - default_message = "Resource not found" - error_code = "NOT_FOUND" - status_code = 404 - - -class PermissionDeniedError(ThrillWikiException): - """Raised when user lacks permission for an operation.""" - - default_message = "Permission denied" - error_code = "PERMISSION_DENIED" - status_code = 403 - - -class BusinessLogicError(ThrillWikiException): - """Raised when business logic constraints are violated.""" - - default_message = "Business logic violation" - error_code = "BUSINESS_LOGIC_ERROR" - status_code = 400 - - -class ExternalServiceError(ThrillWikiException): - """Raised when external service calls fail.""" - - default_message = "External service error" - error_code = "EXTERNAL_SERVICE_ERROR" - status_code = 502 - - -# Domain-specific exceptions - - -class ParkError(ThrillWikiException): - """Base exception for park-related errors.""" - - error_code = "PARK_ERROR" - - -class ParkNotFoundError(NotFoundError): - """Raised when a park is not found.""" - - default_message = "Park not found" - error_code = "PARK_NOT_FOUND" - - def __init__(self, park_slug: Optional[str] = None, **kwargs): - if park_slug: - kwargs["details"] = {"park_slug": park_slug} - kwargs["message"] = f"Park with slug '{park_slug}' not found" - super().__init__(**kwargs) - - -class ParkOperationError(BusinessLogicError): - """Raised when park operation constraints are violated.""" - - default_message = "Invalid park operation" - error_code = "PARK_OPERATION_ERROR" - - -class RideError(ThrillWikiException): - """Base exception for ride-related errors.""" - - error_code = "RIDE_ERROR" - - -class RideNotFoundError(NotFoundError): - """Raised when a ride is not found.""" - - default_message = "Ride not found" - error_code = "RIDE_NOT_FOUND" - - def __init__(self, ride_slug: Optional[str] = None, **kwargs): - if ride_slug: - kwargs["details"] = {"ride_slug": ride_slug} - kwargs["message"] = f"Ride with slug '{ride_slug}' not found" - super().__init__(**kwargs) - - -class RideOperationError(BusinessLogicError): - """Raised when ride operation constraints are violated.""" - - default_message = "Invalid ride operation" - error_code = "RIDE_OPERATION_ERROR" - - -class LocationError(ThrillWikiException): - """Base exception for location-related errors.""" - - error_code = "LOCATION_ERROR" - - -class InvalidCoordinatesError(ValidationException): - """Raised when geographic coordinates are invalid.""" - - default_message = "Invalid geographic coordinates" - error_code = "INVALID_COORDINATES" - - def __init__( - self, - latitude: Optional[float] = None, - longitude: Optional[float] = None, - **kwargs, - ): - if latitude is not None or longitude is not None: - kwargs["details"] = {"latitude": latitude, "longitude": longitude} - super().__init__(**kwargs) - - -class GeolocationError(ExternalServiceError): - """Raised when geolocation services fail.""" - - default_message = "Geolocation service unavailable" - error_code = "GEOLOCATION_ERROR" - - -class ReviewError(ThrillWikiException): - """Base exception for review-related errors.""" - - error_code = "REVIEW_ERROR" - - -class ReviewModerationError(BusinessLogicError): - """Raised when review moderation constraints are violated.""" - - default_message = "Review moderation error" - error_code = "REVIEW_MODERATION_ERROR" - - -class DuplicateReviewError(BusinessLogicError): - """Raised when user tries to create duplicate reviews.""" - - default_message = "User has already reviewed this item" - error_code = "DUPLICATE_REVIEW" - - -class AccountError(ThrillWikiException): - """Base exception for account-related errors.""" - - error_code = "ACCOUNT_ERROR" - - -class InsufficientPermissionsError(PermissionDeniedError): - """Raised when user lacks required permissions.""" - - default_message = "Insufficient permissions" - error_code = "INSUFFICIENT_PERMISSIONS" - - def __init__(self, required_permission: Optional[str] = None, **kwargs): - if required_permission: - kwargs["details"] = {"required_permission": required_permission} - kwargs["message"] = f"Permission '{required_permission}' required" - super().__init__(**kwargs) - - -class EmailError(ExternalServiceError): - """Raised when email operations fail.""" - - default_message = "Email service error" - error_code = "EMAIL_ERROR" - - -class CacheError(ThrillWikiException): - """Raised when cache operations fail.""" - - default_message = "Cache operation failed" - error_code = "CACHE_ERROR" - status_code = 500 - - -class RoadTripError(ExternalServiceError): - """Raised when road trip planning fails.""" - - default_message = "Road trip planning error" - error_code = "ROADTRIP_ERROR" - - def __init__(self, service_name: Optional[str] = None, **kwargs): - if service_name: - kwargs["details"] = {"service": service_name} - super().__init__(**kwargs) diff --git a/core/forms.py b/core/forms.py deleted file mode 100644 index 2ffb7af4..00000000 --- a/core/forms.py +++ /dev/null @@ -1,43 +0,0 @@ -"""Core forms and form components.""" - -from django.conf import settings -from django.core.exceptions import PermissionDenied -from django.utils.translation import gettext_lazy as _ - -from autocomplete import Autocomplete - - -class BaseAutocomplete(Autocomplete): - """Base autocomplete class for consistent autocomplete behavior across the project. - - This class extends django-htmx-autocomplete's base Autocomplete class to provide: - - Project-wide defaults for autocomplete behavior - - Translation strings - - Authentication enforcement - - Sensible search configuration - """ - - # Search configuration - minimum_search_length = 2 # More responsive than default 3 - max_results = 10 # Reasonable limit for performance - - # UI text configuration using gettext for i18n - no_result_text = _("No matches found") - narrow_search_text = _( - "Showing %(page_size)s of %(total)s matches. Please refine your search." - ) - type_at_least_n_characters = _("Type at least %(n)s characters...") - - # Project-wide component settings - placeholder = _("Search...") - - @staticmethod - def auth_check(request): - """Enforce authentication by default. - - This can be overridden in subclasses if public access is needed. - Configure AUTOCOMPLETE_BLOCK_UNAUTHENTICATED in settings to disable. - """ - block_unauth = getattr(settings, "AUTOCOMPLETE_BLOCK_UNAUTHENTICATED", True) - if block_unauth and not request.user.is_authenticated: - raise PermissionDenied(_("Authentication required")) diff --git a/core/forms/__init__.py b/core/forms/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/core/forms/search.py b/core/forms/search.py deleted file mode 100644 index 6008e7f3..00000000 --- a/core/forms/search.py +++ /dev/null @@ -1,168 +0,0 @@ -from django import forms -from django.utils.translation import gettext_lazy as _ - - -class LocationSearchForm(forms.Form): - """ - A comprehensive search form that includes text search, location-based - search, and content type filtering for a unified search experience. - """ - - # Text search query - q = forms.CharField( - required=False, - label=_("Search Query"), - widget=forms.TextInput( - attrs={ - "placeholder": _("Search parks, rides, companies..."), - "class": ( - "w-full px-3 py-2 border border-gray-300 rounded-md shadow-sm " - "focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 " - "dark:border-gray-600 dark:text-white" - ), - } - ), - ) - - # Location-based search - location = forms.CharField( - required=False, - label=_("Near Location"), - widget=forms.TextInput( - attrs={ - "placeholder": _("City, address, or coordinates..."), - "id": "location-input", - "class": ( - "w-full px-3 py-2 border border-gray-300 rounded-md shadow-sm " - "focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 " - "dark:border-gray-600 dark:text-white" - ), - } - ), - ) - - # Hidden fields for coordinates - lat = forms.FloatField( - required=False, widget=forms.HiddenInput(attrs={"id": "lat-input"}) - ) - lng = forms.FloatField( - required=False, widget=forms.HiddenInput(attrs={"id": "lng-input"}) - ) - - # Search radius - radius_km = forms.ChoiceField( - required=False, - label=_("Search Radius"), - choices=[ - ("", _("Any distance")), - ("5", _("5 km")), - ("10", _("10 km")), - ("25", _("25 km")), - ("50", _("50 km")), - ("100", _("100 km")), - ("200", _("200 km")), - ], - widget=forms.Select( - attrs={ - "class": ( - "w-full px-3 py-2 border border-gray-300 rounded-md shadow-sm " - "focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 " - "dark:border-gray-600 dark:text-white" - ) - } - ), - ) - - # Content type filters - search_parks = forms.BooleanField( - required=False, - initial=True, - label=_("Search Parks"), - widget=forms.CheckboxInput( - attrs={ - "class": ( - "rounded border-gray-300 text-blue-600 focus:ring-blue-500 " - "dark:border-gray-600 dark:bg-gray-700" - ) - } - ), - ) - search_rides = forms.BooleanField( - required=False, - label=_("Search Rides"), - widget=forms.CheckboxInput( - attrs={ - "class": ( - "rounded border-gray-300 text-blue-600 focus:ring-blue-500 " - "dark:border-gray-600 dark:bg-gray-700" - ) - } - ), - ) - search_companies = forms.BooleanField( - required=False, - label=_("Search Companies"), - widget=forms.CheckboxInput( - attrs={ - "class": ( - "rounded border-gray-300 text-blue-600 focus:ring-blue-500 " - "dark:border-gray-600 dark:bg-gray-700" - ) - } - ), - ) - - # Geographic filters - country = forms.CharField( - required=False, - widget=forms.TextInput( - attrs={ - "placeholder": _("Country"), - "class": ( - "w-full px-3 py-2 text-sm border border-gray-300 rounded-md " - "shadow-sm focus:ring-blue-500 focus:border-blue-500 " - "dark:bg-gray-700 dark:border-gray-600 dark:text-white" - ), - } - ), - ) - state = forms.CharField( - required=False, - widget=forms.TextInput( - attrs={ - "placeholder": _("State/Region"), - "class": ( - "w-full px-3 py-2 text-sm border border-gray-300 rounded-md " - "shadow-sm focus:ring-blue-500 focus:border-blue-500 " - "dark:bg-gray-700 dark:border-gray-600 dark:text-white" - ), - } - ), - ) - city = forms.CharField( - required=False, - widget=forms.TextInput( - attrs={ - "placeholder": _("City"), - "class": ( - "w-full px-3 py-2 text-sm border border-gray-300 rounded-md " - "shadow-sm focus:ring-blue-500 focus:border-blue-500 " - "dark:bg-gray-700 dark:border-gray-600 dark:text-white" - ), - } - ), - ) - - def clean(self): - cleaned_data = super().clean() - - # If lat/lng are provided, ensure location field is populated for - # display - lat = cleaned_data.get("lat") - lng = cleaned_data.get("lng") - location = cleaned_data.get("location") - - if lat and lng and not location: - cleaned_data["location"] = f"{lat}, {lng}" - - return cleaned_data diff --git a/core/health_checks/__init__.py b/core/health_checks/__init__.py deleted file mode 100644 index 229204aa..00000000 --- a/core/health_checks/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# Health checks module diff --git a/core/health_checks/custom_checks.py b/core/health_checks/custom_checks.py deleted file mode 100644 index 7f0f93d4..00000000 --- a/core/health_checks/custom_checks.py +++ /dev/null @@ -1,325 +0,0 @@ -""" -Custom health checks for ThrillWiki application. -""" - -import time -import logging -from django.core.cache import cache -from django.db import connection -from health_check.backends import BaseHealthCheckBackend - -logger = logging.getLogger(__name__) - - -class CacheHealthCheck(BaseHealthCheckBackend): - """Check Redis cache connectivity and performance""" - - critical_service = True - - def check_status(self): - try: - # Test cache write/read performance - test_key = "health_check_test" - test_value = "test_value_" + str(int(time.time())) - - start_time = time.time() - cache.set(test_key, test_value, timeout=30) - cached_value = cache.get(test_key) - cache_time = time.time() - start_time - - if cached_value != test_value: - self.add_error("Cache read/write test failed - values don't match") - return - - # Check cache performance - if cache_time > 0.1: # Warn if cache operations take more than 100ms - self.add_error( - f"Cache performance degraded: { - cache_time:.3f}s for read/write operation" - ) - return - - # Clean up test key - cache.delete(test_key) - - # Additional Redis-specific checks if using django-redis - try: - from django_redis import get_redis_connection - - redis_client = get_redis_connection("default") - info = redis_client.info() - - # Check memory usage - used_memory = info.get("used_memory", 0) - max_memory = info.get("maxmemory", 0) - - if max_memory > 0: - memory_usage_percent = (used_memory / max_memory) * 100 - if memory_usage_percent > 90: - self.add_error( - f"Redis memory usage critical: { - memory_usage_percent:.1f}%" - ) - elif memory_usage_percent > 80: - logger.warning( - f"Redis memory usage high: { - memory_usage_percent:.1f}%" - ) - - except ImportError: - # django-redis not available, skip additional checks - pass - except Exception as e: - logger.warning(f"Could not get Redis info: {e}") - - except Exception as e: - self.add_error(f"Cache service unavailable: {e}") - - -class DatabasePerformanceCheck(BaseHealthCheckBackend): - """Check database performance and connectivity""" - - critical_service = False - - def check_status(self): - try: - start_time = time.time() - - # Test basic connectivity - with connection.cursor() as cursor: - cursor.execute("SELECT 1") - result = cursor.fetchone() - - if result[0] != 1: - self.add_error("Database connectivity test failed") - return - - basic_query_time = time.time() - start_time - - # Test a more complex query (if it takes too long, there might be - # performance issues) - start_time = time.time() - with connection.cursor() as cursor: - cursor.execute("SELECT COUNT(*) FROM django_content_type") - cursor.fetchone() - - complex_query_time = time.time() - start_time - - # Performance thresholds - if basic_query_time > 1.0: - self.add_error( - f"Database responding slowly: basic query took { - basic_query_time:.2f}s" - ) - elif basic_query_time > 0.5: - logger.warning( - f"Database performance degraded: basic query took { - basic_query_time:.2f}s" - ) - - if complex_query_time > 2.0: - self.add_error( - f"Database performance critical: complex query took { - complex_query_time:.2f}s" - ) - elif complex_query_time > 1.0: - logger.warning( - f"Database performance slow: complex query took { - complex_query_time:.2f}s" - ) - - # Check database version and settings if possible - try: - with connection.cursor() as cursor: - cursor.execute("SELECT version()") - version = cursor.fetchone()[0] - logger.debug(f"Database version: {version}") - except Exception as e: - logger.debug(f"Could not get database version: {e}") - - except Exception as e: - self.add_error(f"Database performance check failed: {e}") - - -class ApplicationHealthCheck(BaseHealthCheckBackend): - """Check application-specific health indicators""" - - critical_service = False - - def check_status(self): - try: - # Check if we can import critical modules - critical_modules = [ - "parks.models", - "rides.models", - "accounts.models", - "core.services", - ] - - for module_name in critical_modules: - try: - __import__(module_name) - except ImportError as e: - self.add_error( - f"Critical module import failed: {module_name} - {e}" - ) - - # Check if we can access critical models - try: - from parks.models import Park - from rides.models import Ride - from django.contrib.auth import get_user_model - - User = get_user_model() - - # Test that we can query these models (just count, don't load - # data) - park_count = Park.objects.count() - ride_count = Ride.objects.count() - user_count = User.objects.count() - - logger.debug( - f"Model counts - Parks: {park_count}, Rides: {ride_count}, Users: {user_count}" - ) - - except Exception as e: - self.add_error(f"Model access check failed: {e}") - - # Check media and static file configuration - from django.conf import settings - import os - - if not os.path.exists(settings.MEDIA_ROOT): - self.add_error( - f"Media directory does not exist: { - settings.MEDIA_ROOT}" - ) - - if not os.path.exists(settings.STATIC_ROOT) and not settings.DEBUG: - self.add_error( - f"Static directory does not exist: {settings.STATIC_ROOT}" - ) - - except Exception as e: - self.add_error(f"Application health check failed: {e}") - - -class ExternalServiceHealthCheck(BaseHealthCheckBackend): - """Check external services and dependencies""" - - critical_service = False - - def check_status(self): - # Check email service if configured - try: - from django.core.mail import get_connection - from django.conf import settings - - if ( - hasattr(settings, "EMAIL_BACKEND") - and "console" not in settings.EMAIL_BACKEND - ): - # Only check if not using console backend - connection = get_connection() - if hasattr(connection, "open"): - try: - connection.open() - connection.close() - except Exception as e: - logger.warning(f"Email service check failed: {e}") - # Don't fail the health check for email issues in - # development - - except Exception as e: - logger.debug(f"Email service check error: {e}") - - # Check if Sentry is configured and working - try: - import sentry_sdk - - if sentry_sdk.Hub.current.client: - # Sentry is configured - try: - # Test that we can capture a test message (this won't - # actually send to Sentry) - with sentry_sdk.push_scope() as scope: - scope.set_tag("health_check", True) - # Don't actually send a message, just verify the SDK is - # working - logger.debug("Sentry SDK is operational") - except Exception as e: - logger.warning(f"Sentry SDK check failed: {e}") - - except ImportError: - logger.debug("Sentry SDK not installed") - except Exception as e: - logger.debug(f"Sentry check error: {e}") - - # Check Redis connection if configured - try: - from django.core.cache import caches - from django.conf import settings - - cache_config = settings.CACHES.get("default", {}) - if "redis" in cache_config.get("BACKEND", "").lower(): - # Redis is configured, test basic connectivity - redis_cache = caches["default"] - redis_cache.set("health_check_redis", "test", 10) - value = redis_cache.get("health_check_redis") - if value != "test": - self.add_error("Redis cache connectivity test failed") - else: - redis_cache.delete("health_check_redis") - - except Exception as e: - logger.warning(f"Redis connectivity check failed: {e}") - - -class DiskSpaceHealthCheck(BaseHealthCheckBackend): - """Check available disk space""" - - critical_service = False - - def check_status(self): - try: - import shutil - from django.conf import settings - - # Check disk space for media directory - media_usage = shutil.disk_usage(settings.MEDIA_ROOT) - media_free_percent = (media_usage.free / media_usage.total) * 100 - - # Check disk space for logs directory if it exists - logs_dir = getattr(settings, "BASE_DIR", "/tmp") / "logs" - if logs_dir.exists(): - logs_usage = shutil.disk_usage(logs_dir) - logs_free_percent = (logs_usage.free / logs_usage.total) * 100 - else: - logs_free_percent = media_free_percent # Use same as media - - # Alert thresholds - if media_free_percent < 10: - self.add_error( - f"Critical disk space: { - media_free_percent:.1f}% free in media directory" - ) - elif media_free_percent < 20: - logger.warning( - f"Low disk space: { - media_free_percent:.1f}% free in media directory" - ) - - if logs_free_percent < 10: - self.add_error( - f"Critical disk space: { - logs_free_percent:.1f}% free in logs directory" - ) - elif logs_free_percent < 20: - logger.warning( - f"Low disk space: { - logs_free_percent:.1f}% free in logs directory" - ) - - except Exception as e: - logger.warning(f"Disk space check failed: {e}") - # Don't fail health check for disk space issues in development diff --git a/core/history.py b/core/history.py deleted file mode 100644 index 44c8cd73..00000000 --- a/core/history.py +++ /dev/null @@ -1,107 +0,0 @@ -from django.db import models -from django.contrib.contenttypes.models import ContentType -from django.contrib.contenttypes.fields import GenericForeignKey -from django.conf import settings -from typing import Any, Dict, Optional -from django.db.models import QuerySet - - -class DiffMixin: - """Mixin to add diffing capabilities to models""" - - def get_prev_record(self) -> Optional[Any]: - """Get the previous record for this instance""" - try: - return ( - type(self) - .objects.filter( - pgh_created_at__lt=self.pgh_created_at, - pgh_obj_id=self.pgh_obj_id, - ) - .order_by("-pgh_created_at") - .first() - ) - except (AttributeError, TypeError): - return None - - def diff_against_previous(self) -> Dict: - """Compare this record against the previous one""" - prev_record = self.get_prev_record() - if not prev_record: - return {} - - skip_fields = { - "pgh_id", - "pgh_created_at", - "pgh_label", - "pgh_obj_id", - "pgh_context_id", - "_state", - "created_at", - "updated_at", - } - - changes = {} - for field, value in self.__dict__.items(): - # Skip internal fields and those we don't want to track - if field.startswith("_") or field in skip_fields or field.endswith("_id"): - continue - - try: - old_value = getattr(prev_record, field) - new_value = value - if old_value != new_value: - changes[field] = { - "old": (str(old_value) if old_value is not None else "None"), - "new": (str(new_value) if new_value is not None else "None"), - } - except AttributeError: - continue - - return changes - - -class TrackedModel(models.Model): - """Abstract base class for models that need history tracking""" - - created_at = models.DateTimeField(auto_now_add=True) - updated_at = models.DateTimeField(auto_now=True) - - class Meta: - abstract = True - - def get_history(self) -> QuerySet: - """Get all history records for this instance in chronological order""" - event_model = self.events.model # pghistory provides this automatically - if event_model: - return event_model.objects.filter(pgh_obj_id=self.pk).order_by( - "-pgh_created_at" - ) - return self.__class__.objects.none() - - -class HistoricalSlug(models.Model): - """Track historical slugs for models""" - - content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE) - object_id = models.PositiveIntegerField() - content_object = GenericForeignKey("content_type", "object_id") - slug = models.SlugField(max_length=255) - created_at = models.DateTimeField(auto_now_add=True) - user = models.ForeignKey( - settings.AUTH_USER_MODEL, - null=True, - blank=True, - on_delete=models.SET_NULL, - related_name="historical_slugs", - ) - - class Meta: - unique_together = ("content_type", "slug") - indexes = [ - models.Index(fields=["content_type", "object_id"]), - models.Index(fields=["slug"]), - ] - - def __str__(self) -> str: - return f"{self.content_type} - {self.object_id} - {self.slug}" diff --git a/core/logging.py b/core/logging.py deleted file mode 100644 index 5d513b25..00000000 --- a/core/logging.py +++ /dev/null @@ -1,261 +0,0 @@ -""" -Centralized logging configuration for ThrillWiki. -Provides structured logging with proper formatting and context. -""" - -import logging -import sys -from typing import Dict, Any, Optional -from django.conf import settings -from django.utils import timezone - - -class ThrillWikiFormatter(logging.Formatter): - """Custom formatter for ThrillWiki logs with structured output.""" - - def format(self, record): - # Add timestamp if not present - if not hasattr(record, "timestamp"): - record.timestamp = timezone.now().isoformat() - - # Add request context if available - if hasattr(record, "request"): - record.request_id = getattr(record.request, "id", "unknown") - record.user_id = ( - getattr(record.request.user, "id", "anonymous") - if hasattr(record.request, "user") - else "unknown" - ) - record.path = getattr(record.request, "path", "unknown") - record.method = getattr(record.request, "method", "unknown") - - # Structure the log message - if hasattr(record, "extra_data"): - record.structured_data = record.extra_data - - return super().format(record) - - -def get_logger(name: str) -> logging.Logger: - """ - Get a configured logger for ThrillWiki components. - - Args: - name: Logger name (usually __name__) - - Returns: - Configured logger instance - """ - logger = logging.getLogger(name) - - # Only configure if not already configured - if not logger.handlers: - handler = logging.StreamHandler(sys.stdout) - formatter = ThrillWikiFormatter( - fmt="%(asctime)s - %(name)s - %(levelname)s - %(message)s" - ) - handler.setFormatter(formatter) - logger.addHandler(handler) - logger.setLevel(logging.INFO if settings.DEBUG else logging.WARNING) - - return logger - - -def log_exception( - logger: logging.Logger, - exception: Exception, - *, - context: Optional[Dict[str, Any]] = None, - request=None, - level: int = logging.ERROR, -) -> None: - """ - Log an exception with structured context. - - Args: - logger: Logger instance - exception: Exception to log - context: Additional context data - request: Django request object - level: Log level - """ - log_data = { - "exception_type": exception.__class__.__name__, - "exception_message": str(exception), - "context": context or {}, - } - - if request: - log_data.update( - { - "request_path": getattr(request, "path", "unknown"), - "request_method": getattr(request, "method", "unknown"), - "user_id": ( - getattr(request.user, "id", "anonymous") - if hasattr(request, "user") - else "unknown" - ), - } - ) - - logger.log( - level, - f"Exception occurred: {exception}", - extra={"extra_data": log_data}, - exc_info=True, - ) - - -def log_business_event( - logger: logging.Logger, - event_type: str, - *, - message: str, - context: Optional[Dict[str, Any]] = None, - request=None, - level: int = logging.INFO, -) -> None: - """ - Log a business event with structured context. - - Args: - logger: Logger instance - event_type: Type of business event - message: Event message - context: Additional context data - request: Django request object - level: Log level - """ - log_data = {"event_type": event_type, "context": context or {}} - - if request: - log_data.update( - { - "request_path": getattr(request, "path", "unknown"), - "request_method": getattr(request, "method", "unknown"), - "user_id": ( - getattr(request.user, "id", "anonymous") - if hasattr(request, "user") - else "unknown" - ), - } - ) - - logger.log(level, message, extra={"extra_data": log_data}) - - -def log_performance_metric( - logger: logging.Logger, - operation: str, - *, - duration_ms: float, - context: Optional[Dict[str, Any]] = None, - level: int = logging.INFO, -) -> None: - """ - Log a performance metric. - - Args: - logger: Logger instance - operation: Operation name - duration_ms: Duration in milliseconds - context: Additional context data - level: Log level - """ - log_data = { - "metric_type": "performance", - "operation": operation, - "duration_ms": duration_ms, - "context": context or {}, - } - - message = f"Performance: {operation} took {duration_ms:.2f}ms" - logger.log(level, message, extra={"extra_data": log_data}) - - -def log_api_request( - logger: logging.Logger, - request, - *, - response_status: Optional[int] = None, - duration_ms: Optional[float] = None, - level: int = logging.INFO, -) -> None: - """ - Log an API request with context. - - Args: - logger: Logger instance - request: Django request object - response_status: HTTP response status code - duration_ms: Request duration in milliseconds - level: Log level - """ - log_data = { - "request_type": "api", - "path": getattr(request, "path", "unknown"), - "method": getattr(request, "method", "unknown"), - "user_id": ( - getattr(request.user, "id", "anonymous") - if hasattr(request, "user") - else "unknown" - ), - "response_status": response_status, - "duration_ms": duration_ms, - } - - message = f"API Request: {request.method} {request.path}" - if response_status: - message += f" -> {response_status}" - if duration_ms: - message += f" ({duration_ms:.2f}ms)" - - logger.log(level, message, extra={"extra_data": log_data}) - - -def log_security_event( - logger: logging.Logger, - event_type: str, - *, - message: str, - severity: str = "medium", - context: Optional[Dict[str, Any]] = None, - request=None, -) -> None: - """ - Log a security-related event. - - Args: - logger: Logger instance - event_type: Type of security event - message: Event message - severity: Event severity (low, medium, high, critical) - context: Additional context data - request: Django request object - """ - log_data = { - "security_event": True, - "event_type": event_type, - "severity": severity, - "context": context or {}, - } - - if request: - log_data.update( - { - "request_path": getattr(request, "path", "unknown"), - "request_method": getattr(request, "method", "unknown"), - "user_id": ( - getattr(request.user, "id", "anonymous") - if hasattr(request, "user") - else "unknown" - ), - "remote_addr": request.META.get("REMOTE_ADDR", "unknown"), - "user_agent": request.META.get("HTTP_USER_AGENT", "unknown"), - } - ) - - # Use WARNING for medium/high, ERROR for critical - level = logging.ERROR if severity in ["high", "critical"] else logging.WARNING - - logger.log(level, f"SECURITY: {message}", extra={"extra_data": log_data}) diff --git a/core/management/commands/update_trending.py b/core/management/commands/update_trending.py deleted file mode 100644 index cc440341..00000000 --- a/core/management/commands/update_trending.py +++ /dev/null @@ -1,35 +0,0 @@ -from django.core.management.base import BaseCommand -from django.core.cache import cache -from parks.models import Park -from rides.models import Ride -from core.analytics import PageView - - -class Command(BaseCommand): - help = "Updates trending parks and rides cache based on views in the last 24 hours" - - def handle(self, *args, **kwargs): - """ - Updates the trending parks and rides in the cache. - - This command is designed to be run every hour via cron to keep the trending - items up to date. It looks at page views from the last 24 hours and caches - the top 10 most viewed parks and rides. - - The cached data is used by the home page to display trending items without - having to query the database on every request. - """ - # Get top 10 trending parks and rides from the last 24 hours - trending_parks = PageView.get_trending_items(Park, hours=24, limit=10) - trending_rides = PageView.get_trending_items(Ride, hours=24, limit=10) - - # Cache the results for 1 hour - cache.set("trending_parks", trending_parks, 3600) # 3600 seconds = 1 hour - cache.set("trending_rides", trending_rides, 3600) - - self.stdout.write( - self.style.SUCCESS( - "Successfully updated trending parks and rides. " - "Cached 10 items each for parks and rides based on views in the last 24 hours." - ) - ) diff --git a/core/managers.py b/core/managers.py deleted file mode 100644 index 027c3091..00000000 --- a/core/managers.py +++ /dev/null @@ -1,273 +0,0 @@ -""" -Custom managers and QuerySets for optimized database patterns. -Following Django styleguide best practices for database access. -""" - -from typing import Optional, List, Union -from django.db import models -from django.db.models import Q, Count, Avg, Max -from django.contrib.gis.geos import Point -from django.contrib.gis.measure import Distance -from django.utils import timezone -from datetime import timedelta - - -class BaseQuerySet(models.QuerySet): - """Base QuerySet with common optimizations and patterns.""" - - def active(self): - """Filter for active/enabled records.""" - if hasattr(self.model, "is_active"): - return self.filter(is_active=True) - return self - - def published(self): - """Filter for published records.""" - if hasattr(self.model, "is_published"): - return self.filter(is_published=True) - return self - - def recent(self, *, days: int = 30): - """Filter for recently created records.""" - cutoff_date = timezone.now() - timedelta(days=days) - return self.filter(created_at__gte=cutoff_date) - - def search(self, *, query: str, fields: Optional[List[str]] = None): - """ - Full-text search across specified fields. - - Args: - query: Search query string - fields: List of field names to search (defaults to name, description) - """ - if not query: - return self - - if fields is None: - fields = ["name", "description"] if hasattr(self.model, "name") else [] - - q_objects = Q() - for field in fields: - if hasattr(self.model, field): - q_objects |= Q(**{f"{field}__icontains": query}) - - return self.filter(q_objects) if q_objects else self - - def with_stats(self): - """Add basic statistics annotations.""" - return self - - def optimized_for_list(self): - """Optimize queryset for list display.""" - return self.select_related().prefetch_related() - - def optimized_for_detail(self): - """Optimize queryset for detail display.""" - return self.select_related().prefetch_related() - - -class BaseManager(models.Manager): - """Base manager with common patterns.""" - - def get_queryset(self): - return BaseQuerySet(self.model, using=self._db) - - def active(self): - return self.get_queryset().active() - - def published(self): - return self.get_queryset().published() - - def recent(self, *, days: int = 30): - return self.get_queryset().recent(days=days) - - def search(self, *, query: str, fields: Optional[List[str]] = None): - return self.get_queryset().search(query=query, fields=fields) - - -class LocationQuerySet(BaseQuerySet): - """QuerySet for location-based models with geographic functionality.""" - - def near_point(self, *, point: Point, distance_km: float = 50): - """Filter locations near a geographic point.""" - if hasattr(self.model, "point"): - return ( - self.filter(point__distance_lte=(point, Distance(km=distance_km))) - .distance(point) - .order_by("distance") - ) - return self - - def within_bounds(self, *, north: float, south: float, east: float, west: float): - """Filter locations within geographic bounds.""" - if hasattr(self.model, "point"): - return self.filter( - point__latitude__gte=south, - point__latitude__lte=north, - point__longitude__gte=west, - point__longitude__lte=east, - ) - return self - - def by_country(self, *, country: str): - """Filter by country.""" - if hasattr(self.model, "country"): - return self.filter(country__iexact=country) - return self - - def by_region(self, *, state: str): - """Filter by state/region.""" - if hasattr(self.model, "state"): - return self.filter(state__iexact=state) - return self - - def by_city(self, *, city: str): - """Filter by city.""" - if hasattr(self.model, "city"): - return self.filter(city__iexact=city) - return self - - -class LocationManager(BaseManager): - """Manager for location-based models.""" - - def get_queryset(self): - return LocationQuerySet(self.model, using=self._db) - - def near_point(self, *, point: Point, distance_km: float = 50): - return self.get_queryset().near_point(point=point, distance_km=distance_km) - - def within_bounds(self, *, north: float, south: float, east: float, west: float): - return self.get_queryset().within_bounds( - north=north, south=south, east=east, west=west - ) - - -class ReviewableQuerySet(BaseQuerySet): - """QuerySet for models that can be reviewed.""" - - def with_review_stats(self): - """Add review statistics annotations.""" - return self.annotate( - review_count=Count("reviews", filter=Q(reviews__is_published=True)), - average_rating=Avg("reviews__rating", filter=Q(reviews__is_published=True)), - latest_review_date=Max( - "reviews__created_at", filter=Q(reviews__is_published=True) - ), - ) - - def highly_rated(self, *, min_rating: float = 8.0): - """Filter for highly rated items.""" - return self.with_review_stats().filter(average_rating__gte=min_rating) - - def recently_reviewed(self, *, days: int = 30): - """Filter for items with recent reviews.""" - cutoff_date = timezone.now() - timedelta(days=days) - return self.filter( - reviews__created_at__gte=cutoff_date, reviews__is_published=True - ).distinct() - - -class ReviewableManager(BaseManager): - """Manager for reviewable models.""" - - def get_queryset(self): - return ReviewableQuerySet(self.model, using=self._db) - - def with_review_stats(self): - return self.get_queryset().with_review_stats() - - def highly_rated(self, *, min_rating: float = 8.0): - return self.get_queryset().highly_rated(min_rating=min_rating) - - -class HierarchicalQuerySet(BaseQuerySet): - """QuerySet for hierarchical models (with parent/child relationships).""" - - def root_level(self): - """Filter for root-level items (no parent).""" - if hasattr(self.model, "parent"): - return self.filter(parent__isnull=True) - return self - - def children_of(self, *, parent_id: int): - """Get children of a specific parent.""" - if hasattr(self.model, "parent"): - return self.filter(parent_id=parent_id) - return self - - def with_children_count(self): - """Add count of children.""" - if hasattr(self.model, "children"): - return self.annotate(children_count=Count("children")) - return self - - -class HierarchicalManager(BaseManager): - """Manager for hierarchical models.""" - - def get_queryset(self): - return HierarchicalQuerySet(self.model, using=self._db) - - def root_level(self): - return self.get_queryset().root_level() - - -class TimestampedQuerySet(BaseQuerySet): - """QuerySet for models with created_at/updated_at timestamps.""" - - def created_between(self, *, start_date, end_date): - """Filter by creation date range.""" - return self.filter(created_at__date__range=[start_date, end_date]) - - def updated_since(self, *, since_date): - """Filter for records updated since a date.""" - return self.filter(updated_at__gte=since_date) - - def by_creation_date(self, *, descending: bool = True): - """Order by creation date.""" - order = "-created_at" if descending else "created_at" - return self.order_by(order) - - -class TimestampedManager(BaseManager): - """Manager for timestamped models.""" - - def get_queryset(self): - return TimestampedQuerySet(self.model, using=self._db) - - def created_between(self, *, start_date, end_date): - return self.get_queryset().created_between( - start_date=start_date, end_date=end_date - ) - - -class StatusQuerySet(BaseQuerySet): - """QuerySet for models with status fields.""" - - def with_status(self, *, status: Union[str, List[str]]): - """Filter by status.""" - if isinstance(status, list): - return self.filter(status__in=status) - return self.filter(status=status) - - def operating(self): - """Filter for operating/active status.""" - return self.filter(status="OPERATING") - - def closed(self): - """Filter for closed status.""" - return self.filter(status__in=["CLOSED_TEMP", "CLOSED_PERM"]) - - -class StatusManager(BaseManager): - """Manager for status-based models.""" - - def get_queryset(self): - return StatusQuerySet(self.model, using=self._db) - - def operating(self): - return self.get_queryset().operating() - - def closed(self): - return self.get_queryset().closed() diff --git a/core/middleware/__init__.py b/core/middleware/__init__.py deleted file mode 100644 index fac8a392..00000000 --- a/core/middleware/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# Core middleware modules - -# Import middleware classes from the analytics module -from .analytics import PageViewMiddleware, PgHistoryContextMiddleware - -# Import middleware classes from the performance_middleware.py module -from .performance_middleware import ( - PerformanceMiddleware, - QueryCountMiddleware, - DatabaseConnectionMiddleware, - CachePerformanceMiddleware, -) - -# Make all middleware classes available at the package level -__all__ = [ - "PageViewMiddleware", - "PgHistoryContextMiddleware", - "PerformanceMiddleware", - "QueryCountMiddleware", - "DatabaseConnectionMiddleware", - "CachePerformanceMiddleware", -] diff --git a/core/middleware/analytics.py b/core/middleware/analytics.py deleted file mode 100644 index 2c3eb2ea..00000000 --- a/core/middleware/analytics.py +++ /dev/null @@ -1,84 +0,0 @@ -""" -Analytics and tracking middleware for Django application. -""" - -import pghistory -from django.contrib.auth.models import AnonymousUser -from django.core.handlers.wsgi import WSGIRequest -from django.utils.deprecation import MiddlewareMixin -from django.contrib.contenttypes.models import ContentType -from django.views.generic.detail import DetailView -from core.analytics import PageView - - -class RequestContextProvider(pghistory.context): - """Custom context provider for pghistory that extracts information from the request.""" - - def __call__(self, request: WSGIRequest) -> dict: - return { - "user": ( - str(request.user) - if request.user and not isinstance(request.user, AnonymousUser) - else None - ), - "ip": request.META.get("REMOTE_ADDR"), - "user_agent": request.META.get("HTTP_USER_AGENT"), - "session_key": ( - request.session.session_key if hasattr(request, "session") else None - ), - } - - -# Initialize the context provider -request_context = RequestContextProvider() - - -class PgHistoryContextMiddleware: - """ - Middleware that ensures request object is available to pghistory context. - """ - - def __init__(self, get_response): - self.get_response = get_response - - def __call__(self, request): - response = self.get_response(request) - return response - - -class PageViewMiddleware(MiddlewareMixin): - """Middleware to track page views for DetailView-based pages.""" - - def process_view(self, request, view_func, view_args, view_kwargs): - # Only track GET requests - if request.method != "GET": - return None - - # Get view class if it exists - view_class = getattr(view_func, "view_class", None) - if not view_class or not issubclass(view_class, DetailView): - return None - - # Get the object if it's a detail view - try: - view_instance = view_class() - view_instance.request = request - view_instance.args = view_args - view_instance.kwargs = view_kwargs - obj = view_instance.get_object() - except (AttributeError, Exception): - return None - - # Record the page view - try: - PageView.objects.create( - content_type=ContentType.objects.get_for_model(obj.__class__), - object_id=obj.pk, - ip_address=request.META.get("REMOTE_ADDR", ""), - user_agent=request.META.get("HTTP_USER_AGENT", "")[:512], - ) - except Exception: - # Fail silently to not interrupt the request - pass - - return None diff --git a/core/middleware/performance_middleware.py b/core/middleware/performance_middleware.py deleted file mode 100644 index 09d8bccd..00000000 --- a/core/middleware/performance_middleware.py +++ /dev/null @@ -1,317 +0,0 @@ -""" -Performance monitoring middleware for tracking request metrics. -""" - -import time -import logging -from django.db import connection -from django.utils.deprecation import MiddlewareMixin -from django.conf import settings - -performance_logger = logging.getLogger("performance") -logger = logging.getLogger(__name__) - - -class PerformanceMiddleware(MiddlewareMixin): - """Middleware to collect performance metrics for each request""" - - def process_request(self, request): - """Initialize performance tracking for the request""" - request._performance_start_time = time.time() - request._performance_initial_queries = ( - len(connection.queries) if hasattr(connection, "queries") else 0 - ) - return None - - def process_response(self, request, response): - """Log performance metrics after response is ready""" - # Skip performance tracking for certain paths - skip_paths = [ - "/health/", - "/admin/jsi18n/", - "/static/", - "/media/", - "/__debug__/", - ] - if any(request.path.startswith(path) for path in skip_paths): - return response - - # Calculate metrics - end_time = time.time() - start_time = getattr(request, "_performance_start_time", end_time) - duration = end_time - start_time - - initial_queries = getattr(request, "_performance_initial_queries", 0) - total_queries = ( - len(connection.queries) - initial_queries - if hasattr(connection, "queries") - else 0 - ) - - # Get content length - content_length = 0 - if hasattr(response, "content"): - content_length = len(response.content) - elif hasattr(response, "streaming_content"): - # For streaming responses, we can't easily measure content length - content_length = -1 - - # Build performance data - performance_data = { - "path": request.path, - "method": request.method, - "status_code": response.status_code, - "duration_ms": round(duration * 1000, 2), - "duration_seconds": round(duration, 3), - "query_count": total_queries, - "content_length_bytes": content_length, - "user_id": ( - getattr(request.user, "id", None) - if hasattr(request, "user") and request.user.is_authenticated - else None - ), - "user_agent": request.META.get("HTTP_USER_AGENT", "")[ - :100 - ], # Truncate user agent - "remote_addr": self._get_client_ip(request), - } - - # Add query details in debug mode - if settings.DEBUG and hasattr(connection, "queries") and total_queries > 0: - recent_queries = connection.queries[-total_queries:] - performance_data["queries"] = [ - { - "sql": ( - query["sql"][:200] + "..." - if len(query["sql"]) > 200 - else query["sql"] - ), - "time": float(query["time"]), - } - for query in recent_queries[-10:] # Last 10 queries only - ] - - # Identify slow queries - slow_queries = [q for q in recent_queries if float(q["time"]) > 0.1] - if slow_queries: - performance_data["slow_query_count"] = len(slow_queries) - performance_data["slowest_query_time"] = max( - float(q["time"]) for q in slow_queries - ) - - # Determine log level based on performance - log_level = self._get_log_level(duration, total_queries, response.status_code) - - # Log the performance data - performance_logger.log( - log_level, - f"Request performance: {request.method} {request.path} - " - f"{duration:.3f}s, {total_queries} queries, {response.status_code}", - extra=performance_data, - ) - - # Add performance headers for debugging (only in debug mode) - if settings.DEBUG: - response["X-Response-Time"] = f"{duration * 1000:.2f}ms" - response["X-Query-Count"] = str(total_queries) - if total_queries > 0 and hasattr(connection, "queries"): - total_query_time = sum( - float(q["time"]) for q in connection.queries[-total_queries:] - ) - response["X-Query-Time"] = f"{total_query_time * 1000:.2f}ms" - - return response - - def process_exception(self, request, exception): - """Log performance data even when an exception occurs""" - end_time = time.time() - start_time = getattr(request, "_performance_start_time", end_time) - duration = end_time - start_time - - initial_queries = getattr(request, "_performance_initial_queries", 0) - total_queries = ( - len(connection.queries) - initial_queries - if hasattr(connection, "queries") - else 0 - ) - - performance_data = { - "path": request.path, - "method": request.method, - "status_code": 500, # Exception occurred - "duration_ms": round(duration * 1000, 2), - "query_count": total_queries, - "exception": str(exception), - "exception_type": type(exception).__name__, - "user_id": ( - getattr(request.user, "id", None) - if hasattr(request, "user") and request.user.is_authenticated - else None - ), - } - - performance_logger.error( - f"Request exception: { - request.method} { - request.path} - " - f"{ - duration:.3f}s, {total_queries} queries, { - type(exception).__name__}: {exception}", - extra=performance_data, - ) - - return None # Don't handle the exception, just log it - - def _get_client_ip(self, request): - """Extract client IP address from request""" - x_forwarded_for = request.META.get("HTTP_X_FORWARDED_FOR") - if x_forwarded_for: - ip = x_forwarded_for.split(",")[0].strip() - else: - ip = request.META.get("REMOTE_ADDR", "") - return ip - - def _get_log_level(self, duration, query_count, status_code): - """Determine appropriate log level based on performance metrics""" - # Error responses - if status_code >= 500: - return logging.ERROR - elif status_code >= 400: - return logging.WARNING - - # Performance-based log levels - if duration > 5.0: # Very slow requests - return logging.ERROR - elif duration > 2.0 or query_count > 20: # Slow requests or high query count - return logging.WARNING - elif duration > 1.0 or query_count > 10: # Moderately slow - return logging.INFO - else: - return logging.DEBUG - - -class QueryCountMiddleware(MiddlewareMixin): - """Middleware to track and limit query counts per request""" - - def __init__(self, get_response): - self.get_response = get_response - self.query_limit = getattr(settings, "MAX_QUERIES_PER_REQUEST", 50) - super().__init__(get_response) - - def process_request(self, request): - """Initialize query tracking""" - request._query_count_start = ( - len(connection.queries) if hasattr(connection, "queries") else 0 - ) - return None - - def process_response(self, request, response): - """Check query count and warn if excessive""" - if not hasattr(connection, "queries"): - return response - - start_count = getattr(request, "_query_count_start", 0) - current_count = len(connection.queries) - request_query_count = current_count - start_count - - if request_query_count > self.query_limit: - logger.warning( - f"Excessive query count: { - request.path} executed {request_query_count} queries " - f"(limit: { - self.query_limit})", - extra={ - "path": request.path, - "method": request.method, - "query_count": request_query_count, - "query_limit": self.query_limit, - "excessive_queries": True, - }, - ) - - return response - - -class DatabaseConnectionMiddleware(MiddlewareMixin): - """Middleware to monitor database connection health""" - - def process_request(self, request): - """Check database connection at start of request""" - try: - # Simple connection test - from django.db import connection - - with connection.cursor() as cursor: - cursor.execute("SELECT 1") - cursor.fetchone() - except Exception as e: - logger.error( - f"Database connection failed at request start: {e}", - extra={ - "path": request.path, - "method": request.method, - "database_error": str(e), - }, - ) - # Don't block the request, let Django handle the database error - - return None - - def process_response(self, request, response): - """Close database connections properly""" - try: - from django.db import connection - - connection.close() - except Exception as e: - logger.warning(f"Error closing database connection: {e}") - - return response - - -class CachePerformanceMiddleware(MiddlewareMixin): - """Middleware to monitor cache performance""" - - def process_request(self, request): - """Initialize cache performance tracking""" - request._cache_hits = 0 - request._cache_misses = 0 - request._cache_start_time = time.time() - return None - - def process_response(self, request, response): - """Log cache performance metrics""" - cache_duration = time.time() - getattr( - request, "_cache_start_time", time.time() - ) - cache_hits = getattr(request, "_cache_hits", 0) - cache_misses = getattr(request, "_cache_misses", 0) - - if cache_hits + cache_misses > 0: - hit_rate = (cache_hits / (cache_hits + cache_misses)) * 100 - - cache_data = { - "path": request.path, - "cache_hits": cache_hits, - "cache_misses": cache_misses, - "cache_hit_rate": round(hit_rate, 2), - "cache_operations": cache_hits + cache_misses, - # milliseconds - "cache_duration": round(cache_duration * 1000, 2), - } - - # Log cache performance - if hit_rate < 50 and cache_hits + cache_misses > 5: - logger.warning( - f"Low cache hit rate for {request.path}: {hit_rate:.1f}%", - extra=cache_data, - ) - else: - logger.debug( - f"Cache performance for { - request.path}: { - hit_rate:.1f}% hit rate", - extra=cache_data, - ) - - return response diff --git a/core/migrations/0001_initial.py b/core/migrations/0001_initial.py deleted file mode 100644 index 8ac879ad..00000000 --- a/core/migrations/0001_initial.py +++ /dev/null @@ -1,54 +0,0 @@ -# Generated by Django 5.1.4 on 2025-08-13 21:35 - -import django.db.models.deletion -from django.db import migrations, models - - -class Migration(migrations.Migration): - - initial = True - - dependencies = [ - ("contenttypes", "0002_remove_content_type_name"), - ] - - operations = [ - migrations.CreateModel( - name="SlugHistory", - fields=[ - ( - "id", - models.BigAutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ("object_id", models.CharField(max_length=50)), - ("old_slug", models.SlugField(max_length=200)), - ("created_at", models.DateTimeField(auto_now_add=True)), - ( - "content_type", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - to="contenttypes.contenttype", - ), - ), - ], - options={ - "verbose_name_plural": "Slug histories", - "ordering": ["-created_at"], - "indexes": [ - models.Index( - fields=["content_type", "object_id"], - name="core_slughi_content_8bbf56_idx", - ), - models.Index( - fields=["old_slug"], - name="core_slughi_old_slu_aaef7f_idx", - ), - ], - }, - ), - ] diff --git a/core/migrations/0002_historicalslug_pageview.py b/core/migrations/0002_historicalslug_pageview.py deleted file mode 100644 index 7e882ddd..00000000 --- a/core/migrations/0002_historicalslug_pageview.py +++ /dev/null @@ -1,102 +0,0 @@ -# Generated by Django 5.1.4 on 2025-08-14 14:50 - -import django.db.models.deletion -from django.conf import settings -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("contenttypes", "0002_remove_content_type_name"), - ("core", "0001_initial"), - migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ] - - operations = [ - migrations.CreateModel( - name="HistoricalSlug", - fields=[ - ( - "id", - models.BigAutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ("object_id", models.PositiveIntegerField()), - ("slug", models.SlugField(max_length=255)), - ("created_at", models.DateTimeField(auto_now_add=True)), - ( - "content_type", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - to="contenttypes.contenttype", - ), - ), - ( - "user", - models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="historical_slugs", - to=settings.AUTH_USER_MODEL, - ), - ), - ], - options={ - "indexes": [ - models.Index( - fields=["content_type", "object_id"], - name="core_histor_content_b4c470_idx", - ), - models.Index(fields=["slug"], name="core_histor_slug_8fd7b3_idx"), - ], - "unique_together": {("content_type", "slug")}, - }, - ), - migrations.CreateModel( - name="PageView", - fields=[ - ( - "id", - models.BigAutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ("object_id", models.PositiveIntegerField()), - ( - "timestamp", - models.DateTimeField(auto_now_add=True, db_index=True), - ), - ("ip_address", models.GenericIPAddressField()), - ("user_agent", models.CharField(blank=True, max_length=512)), - ( - "content_type", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - related_name="page_views", - to="contenttypes.contenttype", - ), - ), - ], - options={ - "indexes": [ - models.Index( - fields=["timestamp"], - name="core_pagevi_timesta_757ebb_idx", - ), - models.Index( - fields=["content_type", "object_id"], - name="core_pagevi_content_eda7ad_idx", - ), - ], - }, - ), - ] diff --git a/core/migrations/__init__.py b/core/migrations/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/core/mixins/__init__.py b/core/mixins/__init__.py deleted file mode 100644 index a5e72f20..00000000 --- a/core/mixins/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -from django.views.generic.list import MultipleObjectMixin - - -class HTMXFilterableMixin(MultipleObjectMixin): - """ - A mixin that provides filtering capabilities for HTMX requests. - """ - - filter_class = None - - def get_queryset(self): - queryset = super().get_queryset() - self.filterset = self.filter_class(self.request.GET, queryset=queryset) - return self.filterset.qs - - def get_context_data(self, **kwargs): - context = super().get_context_data(**kwargs) - context["filter"] = self.filterset - return context diff --git a/core/models.py b/core/models.py deleted file mode 100644 index 2ea5db02..00000000 --- a/core/models.py +++ /dev/null @@ -1,113 +0,0 @@ -from django.db import models -from django.contrib.contenttypes.fields import GenericForeignKey -from django.contrib.contenttypes.models import ContentType -from django.utils.text import slugify -from core.history import TrackedModel - - -class SlugHistory(models.Model): - """ - Model for tracking slug changes across all models that use slugs. - Uses generic relations to work with any model. - """ - - content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE) - object_id = models.CharField( - max_length=50 - ) # Using CharField to work with our custom IDs - content_object = GenericForeignKey("content_type", "object_id") - - old_slug = models.SlugField(max_length=200) - created_at = models.DateTimeField(auto_now_add=True) - - class Meta: - indexes = [ - models.Index(fields=["content_type", "object_id"]), - models.Index(fields=["old_slug"]), - ] - verbose_name_plural = "Slug histories" - ordering = ["-created_at"] - - def __str__(self): - return f"Old slug '{self.old_slug}' for {self.content_object}" - - -class SluggedModel(TrackedModel): - """ - Abstract base model that provides slug functionality with history tracking. - """ - - name = models.CharField(max_length=200) - slug = models.SlugField(max_length=200, unique=True) - - class Meta: - abstract = True - - def save(self, *args, **kwargs): - # Get the current instance from DB if it exists - if self.pk: - try: - old_instance = self.__class__.objects.get(pk=self.pk) - # If slug has changed, save the old one to history - if old_instance.slug != self.slug: - SlugHistory.objects.create( - content_type=ContentType.objects.get_for_model(self), - object_id=getattr(self, self.get_id_field_name()), - old_slug=old_instance.slug, - ) - except self.__class__.DoesNotExist: - pass - - # Generate slug if not set - if not self.slug: - self.slug = slugify(self.name) - - super().save(*args, **kwargs) - - def get_id_field_name(self): - """ - Returns the name of the read-only ID field for this model. - Should be overridden by subclasses. - """ - raise NotImplementedError( - "Subclasses of SluggedModel must implement get_id_field_name()" - ) - - @classmethod - def get_by_slug(cls, slug): - """ - Get an object by its current or historical slug. - Returns (object, is_old_slug) tuple. - """ - try: - # Try to get by current slug first - return cls.objects.get(slug=slug), False - except cls.DoesNotExist: - # Check pghistory first - history_model = cls.get_history_model() - history_entry = ( - history_model.objects.filter(slug=slug) - .order_by("-pgh_created_at") - .first() - ) - - if history_entry: - return cls.objects.get(id=history_entry.pgh_obj_id), True - - # Try to find in manual slug history as fallback - history = ( - SlugHistory.objects.filter( - content_type=ContentType.objects.get_for_model(cls), - old_slug=slug, - ) - .order_by("-created_at") - .first() - ) - - if history: - return ( - cls.objects.get(**{cls.get_id_field_name(): history.object_id}), - True, - ) - - raise cls.DoesNotExist(f"{cls.__name__} with slug '{slug}' does not exist") diff --git a/core/selectors.py b/core/selectors.py deleted file mode 100644 index 6336084b..00000000 --- a/core/selectors.py +++ /dev/null @@ -1,322 +0,0 @@ -""" -Selectors for core functionality including map services and analytics. -Following Django styleguide pattern for separating data access from business logic. -""" - -from typing import Optional, Dict, Any, List -from django.db.models import QuerySet, Q, Count -from django.contrib.gis.geos import Point, Polygon -from django.contrib.gis.measure import Distance -from django.utils import timezone -from datetime import timedelta - -from .analytics import PageView -from parks.models import Park -from rides.models import Ride - - -def unified_locations_for_map( - *, - bounds: Optional[Polygon] = None, - location_types: Optional[List[str]] = None, - filters: Optional[Dict[str, Any]] = None, -) -> Dict[str, QuerySet]: - """ - Get unified location data for map display across all location types. - - Args: - bounds: Geographic boundary polygon - location_types: List of location types to include ('park', 'ride') - filters: Additional filter parameters - - Returns: - Dictionary containing querysets for each location type - """ - results = {} - - # Default to all location types if none specified - if not location_types: - location_types = ["park", "ride"] - - # Parks - if "park" in location_types: - park_queryset = ( - Park.objects.select_related("operator") - .prefetch_related("location") - .annotate(ride_count_calculated=Count("rides")) - ) - - if bounds: - park_queryset = park_queryset.filter(location__coordinates__within=bounds) - - if filters: - if "status" in filters: - park_queryset = park_queryset.filter(status=filters["status"]) - if "operator" in filters: - park_queryset = park_queryset.filter(operator=filters["operator"]) - - results["parks"] = park_queryset.order_by("name") - - # Rides - if "ride" in location_types: - ride_queryset = Ride.objects.select_related( - "park", "manufacturer" - ).prefetch_related("park__location", "location") - - if bounds: - ride_queryset = ride_queryset.filter( - Q(location__coordinates__within=bounds) - | Q(park__location__coordinates__within=bounds) - ) - - if filters: - if "category" in filters: - ride_queryset = ride_queryset.filter(category=filters["category"]) - if "manufacturer" in filters: - ride_queryset = ride_queryset.filter( - manufacturer=filters["manufacturer"] - ) - if "park" in filters: - ride_queryset = ride_queryset.filter(park=filters["park"]) - - results["rides"] = ride_queryset.order_by("park__name", "name") - - return results - - -def locations_near_point( - *, - point: Point, - distance_km: float = 50, - location_types: Optional[List[str]] = None, - limit: int = 20, -) -> Dict[str, QuerySet]: - """ - Get locations near a specific geographic point across all types. - - Args: - point: Geographic point (longitude, latitude) - distance_km: Maximum distance in kilometers - location_types: List of location types to include - limit: Maximum number of results per type - - Returns: - Dictionary containing nearby locations by type - """ - results = {} - - if not location_types: - location_types = ["park", "ride"] - - # Parks near point - if "park" in location_types: - results["parks"] = ( - Park.objects.filter( - location__coordinates__distance_lte=( - point, - Distance(km=distance_km), - ) - ) - .select_related("operator") - .prefetch_related("location") - .distance(point) - .order_by("distance")[:limit] - ) - - # Rides near point - if "ride" in location_types: - results["rides"] = ( - Ride.objects.filter( - Q( - location__coordinates__distance_lte=( - point, - Distance(km=distance_km), - ) - ) - | Q( - park__location__coordinates__distance_lte=( - point, - Distance(km=distance_km), - ) - ) - ) - .select_related("park", "manufacturer") - .prefetch_related("park__location") - .distance(point) - .order_by("distance")[:limit] - ) - - return results - - -def search_all_locations(*, query: str, limit: int = 20) -> Dict[str, QuerySet]: - """ - Search across all location types for a query string. - - Args: - query: Search string - limit: Maximum results per type - - Returns: - Dictionary containing search results by type - """ - results = {} - - # Search parks - results["parks"] = ( - Park.objects.filter( - Q(name__icontains=query) - | Q(description__icontains=query) - | Q(location__city__icontains=query) - | Q(location__region__icontains=query) - ) - .select_related("operator") - .prefetch_related("location") - .order_by("name")[:limit] - ) - - # Search rides - results["rides"] = ( - Ride.objects.filter( - Q(name__icontains=query) - | Q(description__icontains=query) - | Q(park__name__icontains=query) - | Q(manufacturer__name__icontains=query) - ) - .select_related("park", "manufacturer") - .prefetch_related("park__location") - .order_by("park__name", "name")[:limit] - ) - - return results - - -def page_views_for_analytics( - *, - start_date: Optional[timezone.datetime] = None, - end_date: Optional[timezone.datetime] = None, - path_pattern: Optional[str] = None, -) -> QuerySet[PageView]: - """ - Get page views for analytics with optional filtering. - - Args: - start_date: Start date for filtering - end_date: End date for filtering - path_pattern: URL path pattern to filter by - - Returns: - QuerySet of page views - """ - queryset = PageView.objects.all() - - if start_date: - queryset = queryset.filter(timestamp__gte=start_date) - - if end_date: - queryset = queryset.filter(timestamp__lte=end_date) - - if path_pattern: - queryset = queryset.filter(path__icontains=path_pattern) - - return queryset.order_by("-timestamp") - - -def popular_pages_summary(*, days: int = 30) -> Dict[str, Any]: - """ - Get summary of most popular pages in the last N days. - - Args: - days: Number of days to analyze - - Returns: - Dictionary containing popular pages statistics - """ - cutoff_date = timezone.now() - timedelta(days=days) - - # Most viewed pages - popular_pages = ( - PageView.objects.filter(timestamp__gte=cutoff_date) - .values("path") - .annotate(view_count=Count("id")) - .order_by("-view_count")[:10] - ) - - # Total page views - total_views = PageView.objects.filter(timestamp__gte=cutoff_date).count() - - # Unique visitors (based on IP) - unique_visitors = ( - PageView.objects.filter(timestamp__gte=cutoff_date) - .values("ip_address") - .distinct() - .count() - ) - - return { - "popular_pages": list(popular_pages), - "total_views": total_views, - "unique_visitors": unique_visitors, - "period_days": days, - } - - -def geographic_distribution_summary() -> Dict[str, Any]: - """ - Get geographic distribution statistics for all locations. - - Returns: - Dictionary containing geographic statistics - """ - # Parks by country - parks_by_country = ( - Park.objects.filter(location__country__isnull=False) - .values("location__country") - .annotate(count=Count("id")) - .order_by("-count") - ) - - # Rides by country (through park location) - rides_by_country = ( - Ride.objects.filter(park__location__country__isnull=False) - .values("park__location__country") - .annotate(count=Count("id")) - .order_by("-count") - ) - - return { - "parks_by_country": list(parks_by_country), - "rides_by_country": list(rides_by_country), - } - - -def system_health_metrics() -> Dict[str, Any]: - """ - Get system health and activity metrics. - - Returns: - Dictionary containing system health statistics - """ - now = timezone.now() - last_24h = now - timedelta(hours=24) - last_7d = now - timedelta(days=7) - - return { - "total_parks": Park.objects.count(), - "operating_parks": Park.objects.filter(status="OPERATING").count(), - "total_rides": Ride.objects.count(), - "page_views_24h": PageView.objects.filter(timestamp__gte=last_24h).count(), - "page_views_7d": PageView.objects.filter(timestamp__gte=last_7d).count(), - "data_freshness": { - "latest_park_update": ( - Park.objects.order_by("-updated_at").first().updated_at - if Park.objects.exists() - else None - ), - "latest_ride_update": ( - Ride.objects.order_by("-updated_at").first().updated_at - if Ride.objects.exists() - else None - ), - }, - } diff --git a/core/services/__init__.py b/core/services/__init__.py deleted file mode 100644 index 92207b6e..00000000 --- a/core/services/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -""" -Core services for ThrillWiki unified map functionality. -""" - -from .map_service import UnifiedMapService -from .clustering_service import ClusteringService -from .map_cache_service import MapCacheService -from .data_structures import ( - UnifiedLocation, - LocationType, - GeoBounds, - MapFilters, - MapResponse, - ClusterData, -) - -__all__ = [ - "UnifiedMapService", - "ClusteringService", - "MapCacheService", - "UnifiedLocation", - "LocationType", - "GeoBounds", - "MapFilters", - "MapResponse", - "ClusterData", -] diff --git a/core/services/clustering_service.py b/core/services/clustering_service.py deleted file mode 100644 index 03dc59c0..00000000 --- a/core/services/clustering_service.py +++ /dev/null @@ -1,365 +0,0 @@ -""" -Clustering service for map locations to improve performance and user experience. -""" - -import math -from typing import List, Tuple, Dict, Any, Optional -from dataclasses import dataclass -from collections import defaultdict - -from .data_structures import ( - UnifiedLocation, - ClusterData, - GeoBounds, - LocationType, -) - - -@dataclass -class ClusterPoint: - """Internal representation of a point for clustering.""" - - location: UnifiedLocation - x: float # Projected x coordinate - y: float # Projected y coordinate - - -class ClusteringService: - """ - Handles location clustering for map display using a simple grid-based approach - with zoom-level dependent clustering radius. - """ - - # Clustering configuration - DEFAULT_RADIUS = 40 # pixels - MIN_POINTS_TO_CLUSTER = 2 - MAX_ZOOM_FOR_CLUSTERING = 15 - MIN_ZOOM_FOR_CLUSTERING = 3 - - # Zoom level configurations - ZOOM_CONFIGS = { - 3: {"radius": 80, "min_points": 5}, # World level - 4: {"radius": 70, "min_points": 4}, # Continent level - 5: {"radius": 60, "min_points": 3}, # Country level - 6: {"radius": 50, "min_points": 3}, # Large region level - 7: {"radius": 45, "min_points": 2}, # Region level - 8: {"radius": 40, "min_points": 2}, # State level - 9: {"radius": 35, "min_points": 2}, # Metro area level - 10: {"radius": 30, "min_points": 2}, # City level - 11: {"radius": 25, "min_points": 2}, # District level - 12: {"radius": 20, "min_points": 2}, # Neighborhood level - 13: {"radius": 15, "min_points": 2}, # Block level - 14: {"radius": 10, "min_points": 2}, # Street level - 15: {"radius": 5, "min_points": 2}, # Building level - } - - def __init__(self): - self.cluster_id_counter = 0 - - def should_cluster(self, zoom_level: int, point_count: int) -> bool: - """Determine if clustering should be applied based on zoom level and point count.""" - if zoom_level > self.MAX_ZOOM_FOR_CLUSTERING: - return False - if zoom_level < self.MIN_ZOOM_FOR_CLUSTERING: - return True - - config = self.ZOOM_CONFIGS.get( - zoom_level, {"min_points": self.MIN_POINTS_TO_CLUSTER} - ) - return point_count >= config["min_points"] - - def cluster_locations( - self, - locations: List[UnifiedLocation], - zoom_level: int, - bounds: Optional[GeoBounds] = None, - ) -> Tuple[List[UnifiedLocation], List[ClusterData]]: - """ - Cluster locations based on zoom level and density. - Returns (unclustered_locations, clusters). - """ - if not locations or not self.should_cluster(zoom_level, len(locations)): - return locations, [] - - # Convert locations to projected coordinates for clustering - cluster_points = self._project_locations(locations, bounds) - - # Get clustering configuration for zoom level - config = self.ZOOM_CONFIGS.get( - zoom_level, - { - "radius": self.DEFAULT_RADIUS, - "min_points": self.MIN_POINTS_TO_CLUSTER, - }, - ) - - # Perform clustering - clustered_groups = self._cluster_points( - cluster_points, config["radius"], config["min_points"] - ) - - # Separate individual locations from clusters - unclustered_locations = [] - clusters = [] - - for group in clustered_groups: - if len(group) < config["min_points"]: - # Add individual locations - unclustered_locations.extend([cp.location for cp in group]) - else: - # Create cluster - cluster = self._create_cluster(group) - clusters.append(cluster) - - return unclustered_locations, clusters - - def _project_locations( - self, - locations: List[UnifiedLocation], - bounds: Optional[GeoBounds] = None, - ) -> List[ClusterPoint]: - """Convert lat/lng coordinates to projected x/y for clustering calculations.""" - cluster_points = [] - - # Use bounds or calculate from locations - if not bounds: - lats = [loc.latitude for loc in locations] - lngs = [loc.longitude for loc in locations] - bounds = GeoBounds( - north=max(lats), - south=min(lats), - east=max(lngs), - west=min(lngs), - ) - - # Simple equirectangular projection (good enough for clustering) - center_lat = (bounds.north + bounds.south) / 2 - lat_scale = 111320 # meters per degree latitude - lng_scale = 111320 * math.cos( - math.radians(center_lat) - ) # meters per degree longitude - - for location in locations: - # Convert to meters relative to bounds center - x = (location.longitude - (bounds.west + bounds.east) / 2) * lng_scale - y = (location.latitude - (bounds.north + bounds.south) / 2) * lat_scale - - cluster_points.append(ClusterPoint(location=location, x=x, y=y)) - - return cluster_points - - def _cluster_points( - self, points: List[ClusterPoint], radius_pixels: int, min_points: int - ) -> List[List[ClusterPoint]]: - """ - Cluster points using a simple distance-based approach. - Radius is in pixels, converted to meters based on zoom level. - """ - # Convert pixel radius to meters (rough approximation) - # At zoom level 10, 1 pixel ≈ 150 meters - radius_meters = radius_pixels * 150 - - clustered = [False] * len(points) - clusters = [] - - for i, point in enumerate(points): - if clustered[i]: - continue - - # Find all points within radius - cluster_group = [point] - clustered[i] = True - - for j, other_point in enumerate(points): - if i == j or clustered[j]: - continue - - distance = self._calculate_distance(point, other_point) - if distance <= radius_meters: - cluster_group.append(other_point) - clustered[j] = True - - clusters.append(cluster_group) - - return clusters - - def _calculate_distance(self, point1: ClusterPoint, point2: ClusterPoint) -> float: - """Calculate Euclidean distance between two projected points in meters.""" - dx = point1.x - point2.x - dy = point1.y - point2.y - return math.sqrt(dx * dx + dy * dy) - - def _create_cluster(self, cluster_points: List[ClusterPoint]) -> ClusterData: - """Create a ClusterData object from a group of points.""" - locations = [cp.location for cp in cluster_points] - - # Calculate cluster center (average position) - avg_lat = sum(loc.latitude for loc in locations) / len(locations) - avg_lng = sum(loc.longitude for loc in locations) / len(locations) - - # Calculate cluster bounds - lats = [loc.latitude for loc in locations] - lngs = [loc.longitude for loc in locations] - cluster_bounds = GeoBounds( - north=max(lats), south=min(lats), east=max(lngs), west=min(lngs) - ) - - # Collect location types in cluster - types = set(loc.type for loc in locations) - - # Select representative location (highest weight) - representative = self._select_representative_location(locations) - - # Generate cluster ID - self.cluster_id_counter += 1 - cluster_id = f"cluster_{self.cluster_id_counter}" - - return ClusterData( - id=cluster_id, - coordinates=(avg_lat, avg_lng), - count=len(locations), - types=types, - bounds=cluster_bounds, - representative_location=representative, - ) - - def _select_representative_location( - self, locations: List[UnifiedLocation] - ) -> Optional[UnifiedLocation]: - """Select the most representative location for a cluster.""" - if not locations: - return None - - # Prioritize by: 1) Parks over rides/companies, 2) Higher weight, 3) - # Better rating - parks = [loc for loc in locations if loc.type == LocationType.PARK] - if parks: - return max( - parks, - key=lambda x: ( - x.cluster_weight, - x.metadata.get("rating", 0) or 0, - ), - ) - - rides = [loc for loc in locations if loc.type == LocationType.RIDE] - if rides: - return max( - rides, - key=lambda x: ( - x.cluster_weight, - x.metadata.get("rating", 0) or 0, - ), - ) - - companies = [loc for loc in locations if loc.type == LocationType.COMPANY] - if companies: - return max(companies, key=lambda x: x.cluster_weight) - - # Fall back to highest weight location - return max(locations, key=lambda x: x.cluster_weight) - - def get_cluster_breakdown(self, clusters: List[ClusterData]) -> Dict[str, Any]: - """Get statistics about clustering results.""" - if not clusters: - return { - "total_clusters": 0, - "total_points_clustered": 0, - "average_cluster_size": 0, - "type_distribution": {}, - "category_distribution": {}, - } - - total_points = sum(cluster.count for cluster in clusters) - type_counts = defaultdict(int) - category_counts = defaultdict(int) - - for cluster in clusters: - for location_type in cluster.types: - type_counts[location_type.value] += cluster.count - - if cluster.representative_location: - category_counts[cluster.representative_location.cluster_category] += 1 - - return { - "total_clusters": len(clusters), - "total_points_clustered": total_points, - "average_cluster_size": total_points / len(clusters), - "largest_cluster_size": max(cluster.count for cluster in clusters), - "smallest_cluster_size": min(cluster.count for cluster in clusters), - "type_distribution": dict(type_counts), - "category_distribution": dict(category_counts), - } - - def expand_cluster( - self, cluster: ClusterData, zoom_level: int - ) -> List[UnifiedLocation]: - """ - Expand a cluster to show individual locations (for drill-down functionality). - This would typically require re-querying the database with the cluster bounds. - """ - # This is a placeholder - in practice, this would re-query the database - # with the cluster bounds and higher detail level - return [] - - -class SmartClusteringRules: - """ - Advanced clustering rules that consider location types and importance. - """ - - @staticmethod - def should_cluster_together(loc1: UnifiedLocation, loc2: UnifiedLocation) -> bool: - """Determine if two locations should be clustered together.""" - - # Same park rides should cluster together more readily - if loc1.type == LocationType.RIDE and loc2.type == LocationType.RIDE: - park1_id = loc1.metadata.get("park_id") - park2_id = loc2.metadata.get("park_id") - if park1_id and park2_id and park1_id == park2_id: - return True - - # Major parks should resist clustering unless very close - if ( - loc1.cluster_category == "major_park" - or loc2.cluster_category == "major_park" - ): - return False - - # Similar types cluster more readily - if loc1.type == loc2.type: - return True - - # Different types can cluster but with higher threshold - return False - - @staticmethod - def calculate_cluster_priority( - locations: List[UnifiedLocation], - ) -> UnifiedLocation: - """Select the representative location for a cluster based on priority rules.""" - # Prioritize by: 1) Parks over rides, 2) Higher weight, 3) Better - # rating - parks = [loc for loc in locations if loc.type == LocationType.PARK] - if parks: - return max( - parks, - key=lambda x: ( - x.cluster_weight, - x.metadata.get("rating", 0) or 0, - x.metadata.get("ride_count", 0) or 0, - ), - ) - - rides = [loc for loc in locations if loc.type == LocationType.RIDE] - if rides: - return max( - rides, - key=lambda x: ( - x.cluster_weight, - x.metadata.get("rating", 0) or 0, - ), - ) - - # Fall back to highest weight - return max(locations, key=lambda x: x.cluster_weight) diff --git a/core/services/data_structures.py b/core/services/data_structures.py deleted file mode 100644 index 9b60710c..00000000 --- a/core/services/data_structures.py +++ /dev/null @@ -1,253 +0,0 @@ -""" -Data structures for the unified map service. -""" - -from dataclasses import dataclass, field -from enum import Enum -from typing import Dict, List, Optional, Set, Tuple, Any -from django.contrib.gis.geos import Polygon - - -class LocationType(Enum): - """Types of locations supported by the map service.""" - - PARK = "park" - RIDE = "ride" - COMPANY = "company" - GENERIC = "generic" - - -@dataclass -class GeoBounds: - """Geographic boundary box for spatial queries.""" - - north: float - south: float - east: float - west: float - - def __post_init__(self): - """Validate bounds after initialization.""" - if self.north < self.south: - raise ValueError("North bound must be greater than south bound") - if self.east < self.west: - raise ValueError("East bound must be greater than west bound") - if not (-90 <= self.south <= 90 and -90 <= self.north <= 90): - raise ValueError("Latitude bounds must be between -90 and 90") - if not (-180 <= self.west <= 180 and -180 <= self.east <= 180): - raise ValueError("Longitude bounds must be between -180 and 180") - - def to_polygon(self) -> Polygon: - """Convert bounds to PostGIS Polygon for database queries.""" - return Polygon.from_bbox((self.west, self.south, self.east, self.north)) - - def expand(self, factor: float = 1.1) -> "GeoBounds": - """Expand bounds by factor for buffer queries.""" - center_lat = (self.north + self.south) / 2 - center_lng = (self.east + self.west) / 2 - - lat_range = (self.north - self.south) * factor / 2 - lng_range = (self.east - self.west) * factor / 2 - - return GeoBounds( - north=min(90, center_lat + lat_range), - south=max(-90, center_lat - lat_range), - east=min(180, center_lng + lng_range), - west=max(-180, center_lng - lng_range), - ) - - def contains_point(self, lat: float, lng: float) -> bool: - """Check if a point is within these bounds.""" - return self.south <= lat <= self.north and self.west <= lng <= self.east - - def to_dict(self) -> Dict[str, float]: - """Convert to dictionary for JSON serialization.""" - return { - "north": self.north, - "south": self.south, - "east": self.east, - "west": self.west, - } - - -@dataclass -class MapFilters: - """Filtering options for map queries.""" - - location_types: Optional[Set[LocationType]] = None - park_status: Optional[Set[str]] = None # OPERATING, CLOSED_TEMP, etc. - ride_types: Optional[Set[str]] = None - company_roles: Optional[Set[str]] = None # OPERATOR, MANUFACTURER, etc. - search_query: Optional[str] = None - min_rating: Optional[float] = None - has_coordinates: bool = True - country: Optional[str] = None - state: Optional[str] = None - city: Optional[str] = None - - def to_dict(self) -> Dict[str, Any]: - """Convert to dictionary for caching and serialization.""" - return { - "location_types": ( - [t.value for t in self.location_types] if self.location_types else None - ), - "park_status": (list(self.park_status) if self.park_status else None), - "ride_types": list(self.ride_types) if self.ride_types else None, - "company_roles": (list(self.company_roles) if self.company_roles else None), - "search_query": self.search_query, - "min_rating": self.min_rating, - "has_coordinates": self.has_coordinates, - "country": self.country, - "state": self.state, - "city": self.city, - } - - -@dataclass -class UnifiedLocation: - """Unified location interface for all location types.""" - - id: str # Composite: f"{type}_{id}" - type: LocationType - name: str - coordinates: Tuple[float, float] # (lat, lng) - address: Optional[str] = None - metadata: Dict[str, Any] = field(default_factory=dict) - type_data: Dict[str, Any] = field(default_factory=dict) - cluster_weight: int = 1 - cluster_category: str = "default" - - @property - def latitude(self) -> float: - """Get latitude from coordinates.""" - return self.coordinates[0] - - @property - def longitude(self) -> float: - """Get longitude from coordinates.""" - return self.coordinates[1] - - def to_geojson_feature(self) -> Dict[str, Any]: - """Convert to GeoJSON feature for mapping libraries.""" - return { - "type": "Feature", - "properties": { - "id": self.id, - "type": self.type.value, - "name": self.name, - "address": self.address, - "metadata": self.metadata, - "type_data": self.type_data, - "cluster_weight": self.cluster_weight, - "cluster_category": self.cluster_category, - }, - "geometry": { - "type": "Point", - # GeoJSON uses lng, lat - "coordinates": [self.longitude, self.latitude], - }, - } - - def to_dict(self) -> Dict[str, Any]: - """Convert to dictionary for JSON responses.""" - return { - "id": self.id, - "type": self.type.value, - "name": self.name, - "coordinates": list(self.coordinates), - "address": self.address, - "metadata": self.metadata, - "type_data": self.type_data, - "cluster_weight": self.cluster_weight, - "cluster_category": self.cluster_category, - } - - -@dataclass -class ClusterData: - """Represents a cluster of locations for map display.""" - - id: str - coordinates: Tuple[float, float] # (lat, lng) - count: int - types: Set[LocationType] - bounds: GeoBounds - representative_location: Optional[UnifiedLocation] = None - - def to_dict(self) -> Dict[str, Any]: - """Convert to dictionary for JSON responses.""" - return { - "id": self.id, - "coordinates": list(self.coordinates), - "count": self.count, - "types": [t.value for t in self.types], - "bounds": self.bounds.to_dict(), - "representative": ( - self.representative_location.to_dict() - if self.representative_location - else None - ), - } - - -@dataclass -class MapResponse: - """Response structure for map API calls.""" - - locations: List[UnifiedLocation] = field(default_factory=list) - clusters: List[ClusterData] = field(default_factory=list) - bounds: Optional[GeoBounds] = None - total_count: int = 0 - filtered_count: int = 0 - zoom_level: Optional[int] = None - clustered: bool = False - cache_hit: bool = False - query_time_ms: Optional[int] = None - filters_applied: List[str] = field(default_factory=list) - - def to_dict(self) -> Dict[str, Any]: - """Convert to dictionary for JSON responses.""" - return { - "status": "success", - "data": { - "locations": [loc.to_dict() for loc in self.locations], - "clusters": [cluster.to_dict() for cluster in self.clusters], - "bounds": self.bounds.to_dict() if self.bounds else None, - "total_count": self.total_count, - "filtered_count": self.filtered_count, - "zoom_level": self.zoom_level, - "clustered": self.clustered, - }, - "meta": { - "cache_hit": self.cache_hit, - "query_time_ms": self.query_time_ms, - "filters_applied": self.filters_applied, - "pagination": { - "has_more": False, # TODO: Implement pagination - "total_pages": 1, - }, - }, - } - - -@dataclass -class QueryPerformanceMetrics: - """Performance metrics for query optimization.""" - - query_time_ms: int - db_query_count: int - cache_hit: bool - result_count: int - bounds_used: bool - clustering_used: bool - - def to_dict(self) -> Dict[str, Any]: - """Convert to dictionary for logging.""" - return { - "query_time_ms": self.query_time_ms, - "db_query_count": self.db_query_count, - "cache_hit": self.cache_hit, - "result_count": self.result_count, - "bounds_used": self.bounds_used, - "clustering_used": self.clustering_used, - } diff --git a/core/services/enhanced_cache_service.py b/core/services/enhanced_cache_service.py deleted file mode 100644 index 874e4bac..00000000 --- a/core/services/enhanced_cache_service.py +++ /dev/null @@ -1,320 +0,0 @@ -""" -Enhanced caching service with multiple cache backends and strategies. -""" - -from typing import Optional, Any, Dict, Callable -from django.core.cache import caches -import hashlib -import json -import logging -import time -from functools import wraps - -logger = logging.getLogger(__name__) - - -# Define GeoBounds for type hinting -class GeoBounds: - def __init__(self, min_lat: float, min_lng: float, max_lat: float, max_lng: float): - self.min_lat = min_lat - self.min_lng = min_lng - self.max_lat = max_lat - self.max_lng = max_lng - - -class EnhancedCacheService: - """Comprehensive caching service with multiple cache backends""" - - def __init__(self): - self.default_cache = caches["default"] - try: - self.api_cache = caches["api"] - except Exception: - # Fallback to default cache if api cache not configured - self.api_cache = self.default_cache - - # L1: Query-level caching - def cache_queryset( - self, - cache_key: str, - queryset_func: Callable, - timeout: int = 3600, - **kwargs, - ) -> Any: - """Cache expensive querysets""" - cached_result = self.default_cache.get(cache_key) - if cached_result is None: - start_time = time.time() - result = queryset_func(**kwargs) - duration = time.time() - start_time - - # Log cache miss and function execution time - logger.info( - f"Cache miss for key '{cache_key}', executed in { - duration:.3f}s", - extra={"cache_key": cache_key, "execution_time": duration}, - ) - - self.default_cache.set(cache_key, result, timeout) - return result - - logger.debug(f"Cache hit for key '{cache_key}'") - return cached_result - - # L2: API response caching - def cache_api_response( - self, - view_name: str, - params: Dict, - response_data: Any, - timeout: int = 1800, - ): - """Cache API responses based on view and parameters""" - cache_key = self._generate_api_cache_key(view_name, params) - self.api_cache.set(cache_key, response_data, timeout) - logger.debug(f"Cached API response for view '{view_name}'") - - def get_cached_api_response(self, view_name: str, params: Dict) -> Optional[Any]: - """Retrieve cached API response""" - cache_key = self._generate_api_cache_key(view_name, params) - result = self.api_cache.get(cache_key) - - if result: - logger.debug(f"Cache hit for API view '{view_name}'") - else: - logger.debug(f"Cache miss for API view '{view_name}'") - - return result - - # L3: Geographic caching (building on existing MapCacheService) - def cache_geographic_data( - self, - bounds: "GeoBounds", - data: Any, - zoom_level: int, - timeout: int = 1800, - ): - """Cache geographic data with spatial keys""" - # Generate spatial cache key based on bounds and zoom level - cache_key = f"geo:{ - bounds.min_lat}:{ - bounds.min_lng}:{ - bounds.max_lat}:{ - bounds.max_lng}:z{zoom_level}" - self.default_cache.set(cache_key, data, timeout) - logger.debug(f"Cached geographic data for bounds {bounds}") - - def get_cached_geographic_data( - self, bounds: "GeoBounds", zoom_level: int - ) -> Optional[Any]: - """Retrieve cached geographic data""" - cache_key = f"geo:{ - bounds.min_lat}:{ - bounds.min_lng}:{ - bounds.max_lat}:{ - bounds.max_lng}:z{zoom_level}" - return self.default_cache.get(cache_key) - - # Cache invalidation utilities - def invalidate_pattern(self, pattern: str): - """Invalidate cache keys matching a pattern (if backend supports it)""" - try: - # For Redis cache backends - if hasattr(self.default_cache, "delete_pattern"): - deleted_count = self.default_cache.delete_pattern(pattern) - logger.info( - f"Invalidated {deleted_count} cache keys matching pattern '{pattern}'" - ) - return deleted_count - else: - logger.warning( - f"Cache backend does not support pattern deletion for pattern '{pattern}'" - ) - except Exception as e: - logger.error(f"Error invalidating cache pattern '{pattern}': {e}") - - def invalidate_model_cache( - self, model_name: str, instance_id: Optional[int] = None - ): - """Invalidate cache keys related to a specific model""" - if instance_id: - pattern = f"*{model_name}:{instance_id}*" - else: - pattern = f"*{model_name}*" - - self.invalidate_pattern(pattern) - - # Cache warming utilities - def warm_cache( - self, - cache_key: str, - warm_func: Callable, - timeout: int = 3600, - **kwargs, - ): - """Proactively warm cache with data""" - try: - data = warm_func(**kwargs) - self.default_cache.set(cache_key, data, timeout) - logger.info(f"Warmed cache for key '{cache_key}'") - except Exception as e: - logger.error(f"Error warming cache for key '{cache_key}': {e}") - - def _generate_api_cache_key(self, view_name: str, params: Dict) -> str: - """Generate consistent cache keys for API responses""" - # Sort params to ensure consistent key generation - params_str = json.dumps(params, sort_keys=True, default=str) - params_hash = hashlib.md5(params_str.encode()).hexdigest() - return f"api:{view_name}:{params_hash}" - - -# Cache decorators -def cache_api_response(timeout=1800, vary_on=None, key_prefix=""): - """Decorator for caching API responses""" - - def decorator(view_func): - @wraps(view_func) - def wrapper(self, request, *args, **kwargs): - if request.method != "GET": - return view_func(self, request, *args, **kwargs) - - # Generate cache key based on view, user, and parameters - cache_key_parts = [ - key_prefix or view_func.__name__, - ( - str(request.user.id) - if request.user.is_authenticated - else "anonymous" - ), - str(hash(frozenset(request.GET.items()))), - ] - - if vary_on: - for field in vary_on: - cache_key_parts.append(str(getattr(request, field, ""))) - - cache_key = ":".join(cache_key_parts) - - # Try to get from cache - cache_service = EnhancedCacheService() - cached_response = cache_service.api_cache.get(cache_key) - if cached_response: - logger.debug(f"Cache hit for API view {view_func.__name__}") - return cached_response - - # Execute view and cache result - response = view_func(self, request, *args, **kwargs) - if hasattr(response, "status_code") and response.status_code == 200: - cache_service.api_cache.set(cache_key, response, timeout) - logger.debug( - f"Cached API response for view { - view_func.__name__}" - ) - - return response - - return wrapper - - return decorator - - -def cache_queryset_result(cache_key_template: str, timeout: int = 3600): - """Decorator for caching queryset results""" - - def decorator(func): - @wraps(func) - def wrapper(*args, **kwargs): - # Generate cache key from template and arguments - cache_key = cache_key_template.format(*args, **kwargs) - - cache_service = EnhancedCacheService() - return cache_service.cache_queryset( - cache_key, func, timeout, *args, **kwargs - ) - - return wrapper - - return decorator - - -# Context manager for cache warming -class CacheWarmer: - """Context manager for batch cache warming operations""" - - def __init__(self): - self.cache_service = EnhancedCacheService() - self.warm_operations = [] - - def add( - self, - cache_key: str, - warm_func: Callable, - timeout: int = 3600, - **kwargs, - ): - """Add a cache warming operation to the batch""" - self.warm_operations.append( - { - "cache_key": cache_key, - "warm_func": warm_func, - "timeout": timeout, - "kwargs": kwargs, - } - ) - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - """Execute all cache warming operations""" - logger.info(f"Warming {len(self.warm_operations)} cache entries") - - for operation in self.warm_operations: - try: - self.cache_service.warm_cache(**operation) - except Exception as e: - logger.error( - f"Error warming cache for { - operation['cache_key']}: {e}" - ) - - -# Cache statistics and monitoring -class CacheMonitor: - """Monitor cache performance and statistics""" - - def __init__(self): - self.cache_service = EnhancedCacheService() - - def get_cache_stats(self) -> Dict[str, Any]: - """Get cache statistics if available""" - stats = {} - - try: - # Redis cache stats - if hasattr(self.cache_service.default_cache, "_cache"): - redis_client = self.cache_service.default_cache._cache.get_client() - info = redis_client.info() - stats["redis"] = { - "used_memory": info.get("used_memory_human"), - "connected_clients": info.get("connected_clients"), - "total_commands_processed": info.get("total_commands_processed"), - "keyspace_hits": info.get("keyspace_hits"), - "keyspace_misses": info.get("keyspace_misses"), - } - - # Calculate hit rate - hits = info.get("keyspace_hits", 0) - misses = info.get("keyspace_misses", 0) - if hits + misses > 0: - stats["redis"]["hit_rate"] = hits / (hits + misses) * 100 - except Exception as e: - logger.error(f"Error getting cache stats: {e}") - - return stats - - def log_cache_performance(self): - """Log cache performance metrics""" - stats = self.get_cache_stats() - if stats: - logger.info("Cache performance statistics", extra=stats) diff --git a/core/services/location_adapters.py b/core/services/location_adapters.py deleted file mode 100644 index 64c84eed..00000000 --- a/core/services/location_adapters.py +++ /dev/null @@ -1,479 +0,0 @@ -""" -Location adapters for converting between domain-specific models and UnifiedLocation. -""" - -from django.db import models -from typing import List, Optional -from django.db.models import QuerySet -from django.urls import reverse - -from .data_structures import ( - UnifiedLocation, - LocationType, - GeoBounds, - MapFilters, -) -from parks.models import ParkLocation, CompanyHeadquarters -from rides.models import RideLocation -from location.models import Location - - -class BaseLocationAdapter: - """Base adapter class for location conversions.""" - - def to_unified_location(self, location_obj) -> Optional[UnifiedLocation]: - """Convert model instance to UnifiedLocation.""" - raise NotImplementedError - - def get_queryset( - self, - bounds: Optional[GeoBounds] = None, - filters: Optional[MapFilters] = None, - ) -> QuerySet: - """Get optimized queryset for this location type.""" - raise NotImplementedError - - def bulk_convert(self, queryset: QuerySet) -> List[UnifiedLocation]: - """Convert multiple location objects efficiently.""" - unified_locations = [] - for obj in queryset: - unified_loc = self.to_unified_location(obj) - if unified_loc: - unified_locations.append(unified_loc) - return unified_locations - - -class ParkLocationAdapter(BaseLocationAdapter): - """Converts Park/ParkLocation to UnifiedLocation.""" - - def to_unified_location( - self, park_location: ParkLocation - ) -> Optional[UnifiedLocation]: - """Convert ParkLocation to UnifiedLocation.""" - if not park_location.point: - return None - - park = park_location.park - - return UnifiedLocation( - id=f"park_{park.id}", - type=LocationType.PARK, - name=park.name, - coordinates=(park_location.latitude, park_location.longitude), - address=park_location.formatted_address, - metadata={ - "status": getattr(park, "status", "UNKNOWN"), - "rating": ( - float(park.average_rating) - if hasattr(park, "average_rating") and park.average_rating - else None - ), - "ride_count": getattr(park, "ride_count", 0), - "coaster_count": getattr(park, "coaster_count", 0), - "operator": ( - park.operator.name - if hasattr(park, "operator") and park.operator - else None - ), - "city": park_location.city, - "state": park_location.state, - "country": park_location.country, - }, - type_data={ - "slug": park.slug, - "opening_date": ( - park.opening_date.isoformat() - if hasattr(park, "opening_date") and park.opening_date - else None - ), - "website": getattr(park, "website", ""), - "operating_season": getattr(park, "operating_season", ""), - "highway_exit": park_location.highway_exit, - "parking_notes": park_location.parking_notes, - "best_arrival_time": ( - park_location.best_arrival_time.strftime("%H:%M") - if park_location.best_arrival_time - else None - ), - "seasonal_notes": park_location.seasonal_notes, - "url": self._get_park_url(park), - }, - cluster_weight=self._calculate_park_weight(park), - cluster_category=self._get_park_category(park), - ) - - def get_queryset( - self, - bounds: Optional[GeoBounds] = None, - filters: Optional[MapFilters] = None, - ) -> QuerySet: - """Get optimized queryset for park locations.""" - queryset = ParkLocation.objects.select_related("park", "park__operator").filter( - point__isnull=False - ) - - # Spatial filtering - if bounds: - queryset = queryset.filter(point__within=bounds.to_polygon()) - - # Park-specific filters - if filters: - if filters.park_status: - queryset = queryset.filter(park__status__in=filters.park_status) - if filters.search_query: - queryset = queryset.filter(park__name__icontains=filters.search_query) - if filters.country: - queryset = queryset.filter(country=filters.country) - if filters.state: - queryset = queryset.filter(state=filters.state) - if filters.city: - queryset = queryset.filter(city=filters.city) - - return queryset.order_by("park__name") - - def _calculate_park_weight(self, park) -> int: - """Calculate clustering weight based on park importance.""" - weight = 1 - if hasattr(park, "ride_count") and park.ride_count and park.ride_count > 20: - weight += 2 - if ( - hasattr(park, "coaster_count") - and park.coaster_count - and park.coaster_count > 5 - ): - weight += 1 - if ( - hasattr(park, "average_rating") - and park.average_rating - and park.average_rating > 4.0 - ): - weight += 1 - return min(weight, 5) # Cap at 5 - - def _get_park_category(self, park) -> str: - """Determine park category for clustering.""" - coaster_count = getattr(park, "coaster_count", 0) or 0 - ride_count = getattr(park, "ride_count", 0) or 0 - - if coaster_count >= 10: - return "major_park" - elif ride_count >= 15: - return "theme_park" - else: - return "small_park" - - def _get_park_url(self, park) -> str: - """Get URL for park detail page.""" - try: - return reverse("parks:detail", kwargs={"slug": park.slug}) - except BaseException: - return f"/parks/{park.slug}/" - - -class RideLocationAdapter(BaseLocationAdapter): - """Converts Ride/RideLocation to UnifiedLocation.""" - - def to_unified_location( - self, ride_location: RideLocation - ) -> Optional[UnifiedLocation]: - """Convert RideLocation to UnifiedLocation.""" - if not ride_location.point: - return None - - ride = ride_location.ride - - return UnifiedLocation( - id=f"ride_{ride.id}", - type=LocationType.RIDE, - name=ride.name, - coordinates=(ride_location.latitude, ride_location.longitude), - address=( - f"{ride_location.park_area}, {ride.park.name}" - if ride_location.park_area - else ride.park.name - ), - metadata={ - "park_id": ride.park.id, - "park_name": ride.park.name, - "park_area": ride_location.park_area, - "ride_type": getattr(ride, "ride_type", "Unknown"), - "status": getattr(ride, "status", "UNKNOWN"), - "rating": ( - float(ride.average_rating) - if hasattr(ride, "average_rating") and ride.average_rating - else None - ), - "manufacturer": ( - getattr(ride, "manufacturer", {}).get("name") - if hasattr(ride, "manufacturer") - else None - ), - }, - type_data={ - "slug": ride.slug, - "opening_date": ( - ride.opening_date.isoformat() - if hasattr(ride, "opening_date") and ride.opening_date - else None - ), - "height_requirement": getattr(ride, "height_requirement", ""), - "duration_minutes": getattr(ride, "duration_minutes", None), - "max_speed_mph": getattr(ride, "max_speed_mph", None), - "entrance_notes": ride_location.entrance_notes, - "accessibility_notes": ride_location.accessibility_notes, - "url": self._get_ride_url(ride), - }, - cluster_weight=self._calculate_ride_weight(ride), - cluster_category=self._get_ride_category(ride), - ) - - def get_queryset( - self, - bounds: Optional[GeoBounds] = None, - filters: Optional[MapFilters] = None, - ) -> QuerySet: - """Get optimized queryset for ride locations.""" - queryset = RideLocation.objects.select_related( - "ride", "ride__park", "ride__park__operator" - ).filter(point__isnull=False) - - # Spatial filtering - if bounds: - queryset = queryset.filter(point__within=bounds.to_polygon()) - - # Ride-specific filters - if filters: - if filters.ride_types: - queryset = queryset.filter(ride__ride_type__in=filters.ride_types) - if filters.search_query: - queryset = queryset.filter(ride__name__icontains=filters.search_query) - - return queryset.order_by("ride__name") - - def _calculate_ride_weight(self, ride) -> int: - """Calculate clustering weight based on ride importance.""" - weight = 1 - ride_type = getattr(ride, "ride_type", "").lower() - if "coaster" in ride_type or "roller" in ride_type: - weight += 1 - if ( - hasattr(ride, "average_rating") - and ride.average_rating - and ride.average_rating > 4.0 - ): - weight += 1 - return min(weight, 3) # Cap at 3 for rides - - def _get_ride_category(self, ride) -> str: - """Determine ride category for clustering.""" - ride_type = getattr(ride, "ride_type", "").lower() - if "coaster" in ride_type or "roller" in ride_type: - return "coaster" - elif "water" in ride_type or "splash" in ride_type: - return "water_ride" - else: - return "other_ride" - - def _get_ride_url(self, ride) -> str: - """Get URL for ride detail page.""" - try: - return reverse("rides:detail", kwargs={"slug": ride.slug}) - except BaseException: - return f"/rides/{ride.slug}/" - - -class CompanyLocationAdapter(BaseLocationAdapter): - """Converts Company/CompanyHeadquarters to UnifiedLocation.""" - - def to_unified_location( - self, company_headquarters: CompanyHeadquarters - ) -> Optional[UnifiedLocation]: - """Convert CompanyHeadquarters to UnifiedLocation.""" - # Note: CompanyHeadquarters doesn't have coordinates, so we need to geocode - # For now, we'll skip companies without coordinates - # TODO: Implement geocoding service integration - return None - - def get_queryset( - self, - bounds: Optional[GeoBounds] = None, - filters: Optional[MapFilters] = None, - ) -> QuerySet: - """Get optimized queryset for company locations.""" - queryset = CompanyHeadquarters.objects.select_related("company") - - # Company-specific filters - if filters: - if filters.company_roles: - queryset = queryset.filter( - company__roles__overlap=filters.company_roles - ) - if filters.search_query: - queryset = queryset.filter( - company__name__icontains=filters.search_query - ) - if filters.country: - queryset = queryset.filter(country=filters.country) - if filters.city: - queryset = queryset.filter(city=filters.city) - - return queryset.order_by("company__name") - - -class GenericLocationAdapter(BaseLocationAdapter): - """Converts generic Location model to UnifiedLocation.""" - - def to_unified_location(self, location: Location) -> Optional[UnifiedLocation]: - """Convert generic Location to UnifiedLocation.""" - if not location.point and not (location.latitude and location.longitude): - return None - - # Use point coordinates if available, fall back to lat/lng fields - if location.point: - coordinates = (location.point.y, location.point.x) - else: - coordinates = (float(location.latitude), float(location.longitude)) - - return UnifiedLocation( - id=f"generic_{location.id}", - type=LocationType.GENERIC, - name=location.name, - coordinates=coordinates, - address=location.get_formatted_address(), - metadata={ - "location_type": location.location_type, - "content_type": ( - location.content_type.model if location.content_type else None - ), - "object_id": location.object_id, - "city": location.city, - "state": location.state, - "country": location.country, - }, - type_data={ - "created_at": ( - location.created_at.isoformat() if location.created_at else None - ), - "updated_at": ( - location.updated_at.isoformat() if location.updated_at else None - ), - }, - cluster_weight=1, - cluster_category="generic", - ) - - def get_queryset( - self, - bounds: Optional[GeoBounds] = None, - filters: Optional[MapFilters] = None, - ) -> QuerySet: - """Get optimized queryset for generic locations.""" - queryset = Location.objects.select_related("content_type").filter( - models.Q(point__isnull=False) - | models.Q(latitude__isnull=False, longitude__isnull=False) - ) - - # Spatial filtering - if bounds: - queryset = queryset.filter( - models.Q(point__within=bounds.to_polygon()) - | models.Q( - latitude__gte=bounds.south, - latitude__lte=bounds.north, - longitude__gte=bounds.west, - longitude__lte=bounds.east, - ) - ) - - # Generic filters - if filters: - if filters.search_query: - queryset = queryset.filter(name__icontains=filters.search_query) - if filters.country: - queryset = queryset.filter(country=filters.country) - if filters.city: - queryset = queryset.filter(city=filters.city) - - return queryset.order_by("name") - - -class LocationAbstractionLayer: - """ - Abstraction layer handling different location model types. - Implements the adapter pattern to provide unified access to all location types. - """ - - def __init__(self): - self.adapters = { - LocationType.PARK: ParkLocationAdapter(), - LocationType.RIDE: RideLocationAdapter(), - LocationType.COMPANY: CompanyLocationAdapter(), - LocationType.GENERIC: GenericLocationAdapter(), - } - - def get_all_locations( - self, - bounds: Optional[GeoBounds] = None, - filters: Optional[MapFilters] = None, - ) -> List[UnifiedLocation]: - """Get locations from all sources within bounds.""" - all_locations = [] - - # Determine which location types to include - location_types = ( - filters.location_types - if filters and filters.location_types - else set(LocationType) - ) - - for location_type in location_types: - adapter = self.adapters[location_type] - queryset = adapter.get_queryset(bounds, filters) - locations = adapter.bulk_convert(queryset) - all_locations.extend(locations) - - return all_locations - - def get_locations_by_type( - self, - location_type: LocationType, - bounds: Optional[GeoBounds] = None, - filters: Optional[MapFilters] = None, - ) -> List[UnifiedLocation]: - """Get locations of specific type.""" - adapter = self.adapters[location_type] - queryset = adapter.get_queryset(bounds, filters) - return adapter.bulk_convert(queryset) - - def get_location_by_id( - self, location_type: LocationType, location_id: int - ) -> Optional[UnifiedLocation]: - """Get single location with full details.""" - adapter = self.adapters[location_type] - - try: - if location_type == LocationType.PARK: - obj = ParkLocation.objects.select_related("park", "park__operator").get( - park_id=location_id - ) - elif location_type == LocationType.RIDE: - obj = RideLocation.objects.select_related("ride", "ride__park").get( - ride_id=location_id - ) - elif location_type == LocationType.COMPANY: - obj = CompanyHeadquarters.objects.select_related("company").get( - company_id=location_id - ) - elif location_type == LocationType.GENERIC: - obj = Location.objects.select_related("content_type").get( - id=location_id - ) - else: - return None - - return adapter.to_unified_location(obj) - except Exception: - return None - - -# Import models after defining adapters to avoid circular imports diff --git a/core/services/location_search.py b/core/services/location_search.py deleted file mode 100644 index 329d5c6d..00000000 --- a/core/services/location_search.py +++ /dev/null @@ -1,465 +0,0 @@ -""" -Location-aware search service for ThrillWiki. - -Integrates PostGIS location data with existing search functionality -to provide proximity-based search, location filtering, and geographic -search capabilities. -""" - -from django.contrib.gis.geos import Point -from django.contrib.gis.measure import Distance -from django.db.models import Q -from typing import Optional, List, Dict, Any, Set -from dataclasses import dataclass - -from parks.models import Park, Company, ParkLocation -from rides.models import Ride - - -@dataclass -class LocationSearchFilters: - """Filters for location-aware search queries.""" - - # Text search - search_query: Optional[str] = None - - # Location-based filters - location_point: Optional[Point] = None - radius_km: Optional[float] = None - location_types: Optional[Set[str]] = None # 'park', 'ride', 'company' - - # Geographic filters - country: Optional[str] = None - state: Optional[str] = None - city: Optional[str] = None - - # Content-specific filters - park_status: Optional[List[str]] = None - ride_types: Optional[List[str]] = None - company_roles: Optional[List[str]] = None - - # Result options - include_distance: bool = True - max_results: int = 100 - - -@dataclass -class LocationSearchResult: - """Single search result with location data.""" - - # Core data - content_type: str # 'park', 'ride', 'company' - object_id: int - name: str - description: Optional[str] = None - url: Optional[str] = None - - # Location data - latitude: Optional[float] = None - longitude: Optional[float] = None - address: Optional[str] = None - city: Optional[str] = None - state: Optional[str] = None - country: Optional[str] = None - - # Distance data (if proximity search) - distance_km: Optional[float] = None - - # Additional metadata - status: Optional[str] = None - tags: Optional[List[str]] = None - rating: Optional[float] = None - - def to_dict(self) -> Dict[str, Any]: - """Convert to dictionary for JSON serialization.""" - return { - "content_type": self.content_type, - "object_id": self.object_id, - "name": self.name, - "description": self.description, - "url": self.url, - "location": { - "latitude": self.latitude, - "longitude": self.longitude, - "address": self.address, - "city": self.city, - "state": self.state, - "country": self.country, - }, - "distance_km": self.distance_km, - "status": self.status, - "tags": self.tags or [], - "rating": self.rating, - } - - -class LocationSearchService: - """Service for performing location-aware searches across ThrillWiki content.""" - - def search(self, filters: LocationSearchFilters) -> List[LocationSearchResult]: - """ - Perform a comprehensive location-aware search. - - Args: - filters: Search filters and options - - Returns: - List of search results with location data - """ - results = [] - - # Search each content type based on filters - if not filters.location_types or "park" in filters.location_types: - results.extend(self._search_parks(filters)) - - if not filters.location_types or "ride" in filters.location_types: - results.extend(self._search_rides(filters)) - - if not filters.location_types or "company" in filters.location_types: - results.extend(self._search_companies(filters)) - - # Sort by distance if proximity search, otherwise by relevance - if filters.location_point and filters.include_distance: - results.sort(key=lambda x: x.distance_km or float("inf")) - else: - results.sort(key=lambda x: x.name.lower()) - - # Apply max results limit - return results[: filters.max_results] - - def _search_parks( - self, filters: LocationSearchFilters - ) -> List[LocationSearchResult]: - """Search parks with location data.""" - queryset = Park.objects.select_related("location", "operator").all() - - # Apply location filters - queryset = self._apply_location_filters(queryset, filters, "location__point") - - # Apply text search - if filters.search_query: - query = ( - Q(name__icontains=filters.search_query) - | Q(description__icontains=filters.search_query) - | Q(location__city__icontains=filters.search_query) - | Q(location__state__icontains=filters.search_query) - | Q(location__country__icontains=filters.search_query) - ) - queryset = queryset.filter(query) - - # Apply park-specific filters - if filters.park_status: - queryset = queryset.filter(status__in=filters.park_status) - - # Add distance annotation if proximity search - if filters.location_point and filters.include_distance: - queryset = queryset.annotate( - distance=Distance("location__point", filters.location_point) - ).order_by("distance") - - # Convert to search results - results = [] - for park in queryset: - result = LocationSearchResult( - content_type="park", - object_id=park.id, - name=park.name, - description=park.description, - url=( - park.get_absolute_url() - if hasattr(park, "get_absolute_url") - else None - ), - status=park.get_status_display(), - rating=(float(park.average_rating) if park.average_rating else None), - tags=["park", park.status.lower()], - ) - - # Add location data - if hasattr(park, "location") and park.location: - location = park.location - result.latitude = location.latitude - result.longitude = location.longitude - result.address = location.formatted_address - result.city = location.city - result.state = location.state - result.country = location.country - - # Add distance if proximity search - if ( - filters.location_point - and filters.include_distance - and hasattr(park, "distance") - ): - result.distance_km = float(park.distance.km) - - results.append(result) - - return results - - def _search_rides( - self, filters: LocationSearchFilters - ) -> List[LocationSearchResult]: - """Search rides with location data.""" - queryset = Ride.objects.select_related("park", "location").all() - - # Apply location filters - queryset = self._apply_location_filters(queryset, filters, "location__point") - - # Apply text search - if filters.search_query: - query = ( - Q(name__icontains=filters.search_query) - | Q(description__icontains=filters.search_query) - | Q(park__name__icontains=filters.search_query) - | Q(location__park_area__icontains=filters.search_query) - ) - queryset = queryset.filter(query) - - # Apply ride-specific filters - if filters.ride_types: - queryset = queryset.filter(ride_type__in=filters.ride_types) - - # Add distance annotation if proximity search - if filters.location_point and filters.include_distance: - queryset = queryset.annotate( - distance=Distance("location__point", filters.location_point) - ).order_by("distance") - - # Convert to search results - results = [] - for ride in queryset: - result = LocationSearchResult( - content_type="ride", - object_id=ride.id, - name=ride.name, - description=ride.description, - url=( - ride.get_absolute_url() - if hasattr(ride, "get_absolute_url") - else None - ), - status=ride.status, - tags=[ - "ride", - ride.ride_type.lower() if ride.ride_type else "attraction", - ], - ) - - # Add location data from ride location or park location - location = None - if hasattr(ride, "location") and ride.location: - location = ride.location - result.latitude = location.latitude - result.longitude = location.longitude - result.address = ( - f"{ride.park.name} - {location.park_area}" - if location.park_area - else ride.park.name - ) - - # Add distance if proximity search - if ( - filters.location_point - and filters.include_distance - and hasattr(ride, "distance") - ): - result.distance_km = float(ride.distance.km) - - # Fall back to park location if no specific ride location - elif ride.park and hasattr(ride.park, "location") and ride.park.location: - park_location = ride.park.location - result.latitude = park_location.latitude - result.longitude = park_location.longitude - result.address = park_location.formatted_address - result.city = park_location.city - result.state = park_location.state - result.country = park_location.country - - results.append(result) - - return results - - def _search_companies( - self, filters: LocationSearchFilters - ) -> List[LocationSearchResult]: - """Search companies with headquarters location data.""" - queryset = Company.objects.select_related("headquarters").all() - - # Apply location filters - queryset = self._apply_location_filters( - queryset, filters, "headquarters__point" - ) - - # Apply text search - if filters.search_query: - query = ( - Q(name__icontains=filters.search_query) - | Q(description__icontains=filters.search_query) - | Q(headquarters__city__icontains=filters.search_query) - | Q(headquarters__state_province__icontains=filters.search_query) - | Q(headquarters__country__icontains=filters.search_query) - ) - queryset = queryset.filter(query) - - # Apply company-specific filters - if filters.company_roles: - queryset = queryset.filter(roles__overlap=filters.company_roles) - - # Add distance annotation if proximity search - if filters.location_point and filters.include_distance: - queryset = queryset.annotate( - distance=Distance("headquarters__point", filters.location_point) - ).order_by("distance") - - # Convert to search results - results = [] - for company in queryset: - result = LocationSearchResult( - content_type="company", - object_id=company.id, - name=company.name, - description=company.description, - url=( - company.get_absolute_url() - if hasattr(company, "get_absolute_url") - else None - ), - tags=["company"] + (company.roles or []), - ) - - # Add location data - if hasattr(company, "headquarters") and company.headquarters: - hq = company.headquarters - result.latitude = hq.latitude - result.longitude = hq.longitude - result.address = hq.formatted_address - result.city = hq.city - result.state = hq.state_province - result.country = hq.country - - # Add distance if proximity search - if ( - filters.location_point - and filters.include_distance - and hasattr(company, "distance") - ): - result.distance_km = float(company.distance.km) - - results.append(result) - - return results - - def _apply_location_filters( - self, queryset, filters: LocationSearchFilters, point_field: str - ): - """Apply common location filters to a queryset.""" - - # Proximity filter - if filters.location_point and filters.radius_km: - distance = Distance(km=filters.radius_km) - queryset = queryset.filter( - **{ - f"{point_field}__distance_lte": ( - filters.location_point, - distance, - ) - } - ) - - # Geographic filters - adjust field names based on model - if filters.country: - if "headquarters" in point_field: - queryset = queryset.filter( - headquarters__country__icontains=filters.country - ) - else: - location_field = point_field.split("__")[0] - queryset = queryset.filter( - **{f"{location_field}__country__icontains": filters.country} - ) - - if filters.state: - if "headquarters" in point_field: - queryset = queryset.filter( - headquarters__state_province__icontains=filters.state - ) - else: - location_field = point_field.split("__")[0] - queryset = queryset.filter( - **{f"{location_field}__state__icontains": filters.state} - ) - - if filters.city: - location_field = point_field.split("__")[0] - queryset = queryset.filter( - **{f"{location_field}__city__icontains": filters.city} - ) - - return queryset - - def suggest_locations(self, query: str, limit: int = 10) -> List[Dict[str, Any]]: - """ - Get location suggestions for autocomplete. - - Args: - query: Search query string - limit: Maximum number of suggestions - - Returns: - List of location suggestions - """ - suggestions = [] - - if len(query) < 2: - return suggestions - - # Get park location suggestions - park_locations = ParkLocation.objects.filter( - Q(park__name__icontains=query) - | Q(city__icontains=query) - | Q(state__icontains=query) - ).select_related("park")[: limit // 3] - - for location in park_locations: - suggestions.append( - { - "type": "park", - "name": location.park.name, - "address": location.formatted_address, - "coordinates": location.coordinates, - "url": ( - location.park.get_absolute_url() - if hasattr(location.park, "get_absolute_url") - else None - ), - } - ) - - # Get city suggestions - cities = ( - ParkLocation.objects.filter(city__icontains=query) - .values("city", "state", "country") - .distinct()[: limit // 3] - ) - - for city_data in cities: - suggestions.append( - { - "type": "city", - "name": f"{ - city_data['city']}, { - city_data['state']}", - "address": f"{ - city_data['city']}, { - city_data['state']}, { - city_data['country']}", - "coordinates": None, - } - ) - - return suggestions[:limit] - - -# Global instance -location_search_service = LocationSearchService() diff --git a/core/services/map_cache_service.py b/core/services/map_cache_service.py deleted file mode 100644 index 5bebd9c8..00000000 --- a/core/services/map_cache_service.py +++ /dev/null @@ -1,438 +0,0 @@ -""" -Caching service for map data to improve performance and reduce database load. -""" - -import hashlib -import json -import time -from typing import Dict, List, Optional, Any - -from django.core.cache import cache -from django.utils import timezone - -from .data_structures import ( - UnifiedLocation, - ClusterData, - GeoBounds, - MapFilters, - MapResponse, - QueryPerformanceMetrics, -) - - -class MapCacheService: - """ - Handles caching of map data with geographic partitioning and intelligent invalidation. - """ - - # Cache configuration - DEFAULT_TTL = 3600 # 1 hour - CLUSTER_TTL = 7200 # 2 hours (clusters change less frequently) - LOCATION_DETAIL_TTL = 1800 # 30 minutes - BOUNDS_CACHE_TTL = 1800 # 30 minutes - - # Cache key prefixes - CACHE_PREFIX = "thrillwiki_map" - LOCATIONS_PREFIX = f"{CACHE_PREFIX}:locations" - CLUSTERS_PREFIX = f"{CACHE_PREFIX}:clusters" - BOUNDS_PREFIX = f"{CACHE_PREFIX}:bounds" - DETAIL_PREFIX = f"{CACHE_PREFIX}:detail" - STATS_PREFIX = f"{CACHE_PREFIX}:stats" - - # Geographic partitioning settings - GEOHASH_PRECISION = 6 # ~1.2km precision for cache partitioning - - def __init__(self): - self.cache_stats = { - "hits": 0, - "misses": 0, - "invalidations": 0, - "geohash_partitions": 0, - } - - def get_locations_cache_key( - self, - bounds: Optional[GeoBounds], - filters: Optional[MapFilters], - zoom_level: Optional[int] = None, - ) -> str: - """Generate cache key for location queries.""" - key_parts = [self.LOCATIONS_PREFIX] - - if bounds: - # Use geohash for spatial locality - geohash = self._bounds_to_geohash(bounds) - key_parts.append(f"geo:{geohash}") - - if filters: - # Create deterministic hash of filters - filter_hash = self._hash_filters(filters) - key_parts.append(f"filters:{filter_hash}") - - if zoom_level is not None: - key_parts.append(f"zoom:{zoom_level}") - - return ":".join(key_parts) - - def get_clusters_cache_key( - self, - bounds: Optional[GeoBounds], - filters: Optional[MapFilters], - zoom_level: int, - ) -> str: - """Generate cache key for cluster queries.""" - key_parts = [self.CLUSTERS_PREFIX, f"zoom:{zoom_level}"] - - if bounds: - geohash = self._bounds_to_geohash(bounds) - key_parts.append(f"geo:{geohash}") - - if filters: - filter_hash = self._hash_filters(filters) - key_parts.append(f"filters:{filter_hash}") - - return ":".join(key_parts) - - def get_location_detail_cache_key( - self, location_type: str, location_id: int - ) -> str: - """Generate cache key for individual location details.""" - return f"{self.DETAIL_PREFIX}:{location_type}:{location_id}" - - def cache_locations( - self, - cache_key: str, - locations: List[UnifiedLocation], - ttl: Optional[int] = None, - ) -> None: - """Cache location data.""" - try: - # Convert locations to serializable format - cache_data = { - "locations": [loc.to_dict() for loc in locations], - "cached_at": timezone.now().isoformat(), - "count": len(locations), - } - - cache.set(cache_key, cache_data, ttl or self.DEFAULT_TTL) - except Exception as e: - # Log error but don't fail the request - print(f"Cache write error for key {cache_key}: {e}") - - def cache_clusters( - self, - cache_key: str, - clusters: List[ClusterData], - ttl: Optional[int] = None, - ) -> None: - """Cache cluster data.""" - try: - cache_data = { - "clusters": [cluster.to_dict() for cluster in clusters], - "cached_at": timezone.now().isoformat(), - "count": len(clusters), - } - - cache.set(cache_key, cache_data, ttl or self.CLUSTER_TTL) - except Exception as e: - print(f"Cache write error for clusters {cache_key}: {e}") - - def cache_map_response( - self, cache_key: str, response: MapResponse, ttl: Optional[int] = None - ) -> None: - """Cache complete map response.""" - try: - cache_data = response.to_dict() - cache_data["cached_at"] = timezone.now().isoformat() - - cache.set(cache_key, cache_data, ttl or self.DEFAULT_TTL) - except Exception as e: - print(f"Cache write error for response {cache_key}: {e}") - - def get_cached_locations(self, cache_key: str) -> Optional[List[UnifiedLocation]]: - """Retrieve cached location data.""" - try: - cache_data = cache.get(cache_key) - if not cache_data: - self.cache_stats["misses"] += 1 - return None - - self.cache_stats["hits"] += 1 - - # Convert back to UnifiedLocation objects - locations = [] - for loc_data in cache_data["locations"]: - # Reconstruct UnifiedLocation from dictionary - locations.append(self._dict_to_unified_location(loc_data)) - - return locations - - except Exception as e: - print(f"Cache read error for key {cache_key}: {e}") - self.cache_stats["misses"] += 1 - return None - - def get_cached_clusters(self, cache_key: str) -> Optional[List[ClusterData]]: - """Retrieve cached cluster data.""" - try: - cache_data = cache.get(cache_key) - if not cache_data: - self.cache_stats["misses"] += 1 - return None - - self.cache_stats["hits"] += 1 - - # Convert back to ClusterData objects - clusters = [] - for cluster_data in cache_data["clusters"]: - clusters.append(self._dict_to_cluster_data(cluster_data)) - - return clusters - - except Exception as e: - print(f"Cache read error for clusters {cache_key}: {e}") - self.cache_stats["misses"] += 1 - return None - - def get_cached_map_response(self, cache_key: str) -> Optional[MapResponse]: - """Retrieve cached map response.""" - try: - cache_data = cache.get(cache_key) - if not cache_data: - self.cache_stats["misses"] += 1 - return None - - self.cache_stats["hits"] += 1 - - # Convert back to MapResponse object - return self._dict_to_map_response(cache_data["data"]) - - except Exception as e: - print(f"Cache read error for response {cache_key}: {e}") - self.cache_stats["misses"] += 1 - return None - - def invalidate_location_cache( - self, location_type: str, location_id: Optional[int] = None - ) -> None: - """Invalidate cache for specific location or all locations of a type.""" - try: - if location_id: - # Invalidate specific location detail - detail_key = self.get_location_detail_cache_key( - location_type, location_id - ) - cache.delete(detail_key) - - # Invalidate related location and cluster caches - # In a production system, you'd want more sophisticated cache - # tagging - cache.delete_many( - [f"{self.LOCATIONS_PREFIX}:*", f"{self.CLUSTERS_PREFIX}:*"] - ) - - self.cache_stats["invalidations"] += 1 - - except Exception as e: - print(f"Cache invalidation error: {e}") - - def invalidate_bounds_cache(self, bounds: GeoBounds) -> None: - """Invalidate cache for specific geographic bounds.""" - try: - geohash = self._bounds_to_geohash(bounds) - pattern = f"{self.LOCATIONS_PREFIX}:geo:{geohash}*" - - # In production, you'd use cache tagging or Redis SCAN - # For now, we'll invalidate broader patterns - cache.delete_many([pattern]) - - self.cache_stats["invalidations"] += 1 - - except Exception as e: - print(f"Bounds cache invalidation error: {e}") - - def clear_all_map_cache(self) -> None: - """Clear all map-related cache data.""" - try: - cache.delete_many( - [ - f"{self.LOCATIONS_PREFIX}:*", - f"{self.CLUSTERS_PREFIX}:*", - f"{self.BOUNDS_PREFIX}:*", - f"{self.DETAIL_PREFIX}:*", - ] - ) - - self.cache_stats["invalidations"] += 1 - - except Exception as e: - print(f"Cache clear error: {e}") - - def get_cache_stats(self) -> Dict[str, Any]: - """Get cache performance statistics.""" - total_requests = self.cache_stats["hits"] + self.cache_stats["misses"] - hit_rate = ( - (self.cache_stats["hits"] / total_requests * 100) - if total_requests > 0 - else 0 - ) - - return { - "hits": self.cache_stats["hits"], - "misses": self.cache_stats["misses"], - "hit_rate_percent": round(hit_rate, 2), - "invalidations": self.cache_stats["invalidations"], - "geohash_partitions": self.cache_stats["geohash_partitions"], - } - - def record_performance_metrics(self, metrics: QueryPerformanceMetrics) -> None: - """Record query performance metrics for analysis.""" - try: - # 5-minute buckets - stats_key = f"{ - self.STATS_PREFIX}:performance:{ - int( - time.time() // - 300)}" - - current_stats = cache.get( - stats_key, - { - "query_count": 0, - "total_time_ms": 0, - "cache_hits": 0, - "db_queries": 0, - }, - ) - - current_stats["query_count"] += 1 - current_stats["total_time_ms"] += metrics.query_time_ms - current_stats["cache_hits"] += 1 if metrics.cache_hit else 0 - current_stats["db_queries"] += metrics.db_query_count - - cache.set(stats_key, current_stats, 3600) # Keep for 1 hour - - except Exception as e: - print(f"Performance metrics recording error: {e}") - - def _bounds_to_geohash(self, bounds: GeoBounds) -> str: - """Convert geographic bounds to geohash for cache partitioning.""" - # Use center point of bounds for geohash - center_lat = (bounds.north + bounds.south) / 2 - center_lng = (bounds.east + bounds.west) / 2 - - # Simple geohash implementation (in production, use a library) - return self._encode_geohash(center_lat, center_lng, self.GEOHASH_PRECISION) - - def _encode_geohash(self, lat: float, lng: float, precision: int) -> str: - """Simple geohash encoding implementation.""" - # This is a simplified implementation - # In production, use the `geohash` library - lat_range = [-90.0, 90.0] - lng_range = [-180.0, 180.0] - - geohash = "" - bits = 0 - bit_count = 0 - even_bit = True - - while len(geohash) < precision: - if even_bit: - # longitude - mid = (lng_range[0] + lng_range[1]) / 2 - if lng >= mid: - bits = (bits << 1) + 1 - lng_range[0] = mid - else: - bits = bits << 1 - lng_range[1] = mid - else: - # latitude - mid = (lat_range[0] + lat_range[1]) / 2 - if lat >= mid: - bits = (bits << 1) + 1 - lat_range[0] = mid - else: - bits = bits << 1 - lat_range[1] = mid - - even_bit = not even_bit - bit_count += 1 - - if bit_count == 5: - # Convert 5 bits to base32 character - geohash += "0123456789bcdefghjkmnpqrstuvwxyz"[bits] - bits = 0 - bit_count = 0 - - return geohash - - def _hash_filters(self, filters: MapFilters) -> str: - """Create deterministic hash of filters for cache keys.""" - filter_dict = filters.to_dict() - # Sort to ensure consistent ordering - filter_str = json.dumps(filter_dict, sort_keys=True) - return hashlib.md5(filter_str.encode()).hexdigest()[:8] - - def _dict_to_unified_location(self, data: Dict[str, Any]) -> UnifiedLocation: - """Convert dictionary back to UnifiedLocation object.""" - from .data_structures import LocationType - - return UnifiedLocation( - id=data["id"], - type=LocationType(data["type"]), - name=data["name"], - coordinates=tuple(data["coordinates"]), - address=data.get("address"), - metadata=data.get("metadata", {}), - type_data=data.get("type_data", {}), - cluster_weight=data.get("cluster_weight", 1), - cluster_category=data.get("cluster_category", "default"), - ) - - def _dict_to_cluster_data(self, data: Dict[str, Any]) -> ClusterData: - """Convert dictionary back to ClusterData object.""" - from .data_structures import LocationType - - bounds = GeoBounds(**data["bounds"]) - types = {LocationType(t) for t in data["types"]} - - representative = None - if data.get("representative"): - representative = self._dict_to_unified_location(data["representative"]) - - return ClusterData( - id=data["id"], - coordinates=tuple(data["coordinates"]), - count=data["count"], - types=types, - bounds=bounds, - representative_location=representative, - ) - - def _dict_to_map_response(self, data: Dict[str, Any]) -> MapResponse: - """Convert dictionary back to MapResponse object.""" - locations = [ - self._dict_to_unified_location(loc) for loc in data.get("locations", []) - ] - clusters = [ - self._dict_to_cluster_data(cluster) for cluster in data.get("clusters", []) - ] - - bounds = None - if data.get("bounds"): - bounds = GeoBounds(**data["bounds"]) - - return MapResponse( - locations=locations, - clusters=clusters, - bounds=bounds, - total_count=data.get("total_count", 0), - filtered_count=data.get("filtered_count", 0), - zoom_level=data.get("zoom_level"), - clustered=data.get("clustered", False), - ) - - -# Global cache service instance -map_cache = MapCacheService() diff --git a/core/services/map_service.py b/core/services/map_service.py deleted file mode 100644 index 6e07678a..00000000 --- a/core/services/map_service.py +++ /dev/null @@ -1,474 +0,0 @@ -""" -Unified Map Service - Main orchestrating service for all map functionality. -""" - -import time -from typing import List, Optional, Dict, Any, Set -from django.db import connection - -from .data_structures import ( - UnifiedLocation, - ClusterData, - GeoBounds, - MapFilters, - MapResponse, - LocationType, - QueryPerformanceMetrics, -) -from .location_adapters import LocationAbstractionLayer -from .clustering_service import ClusteringService -from .map_cache_service import MapCacheService - - -class UnifiedMapService: - """ - Main service orchestrating map data retrieval, filtering, clustering, and caching. - Provides a unified interface for all location types with performance optimization. - """ - - # Performance thresholds - MAX_UNCLUSTERED_POINTS = 500 - MAX_CLUSTERED_POINTS = 2000 - DEFAULT_ZOOM_LEVEL = 10 - - def __init__(self): - self.location_layer = LocationAbstractionLayer() - self.clustering_service = ClusteringService() - self.cache_service = MapCacheService() - - def get_map_data( - self, - *, - bounds: Optional[GeoBounds] = None, - filters: Optional[MapFilters] = None, - zoom_level: int = DEFAULT_ZOOM_LEVEL, - cluster: bool = True, - use_cache: bool = True, - ) -> MapResponse: - """ - Primary method for retrieving unified map data. - - Args: - bounds: Geographic bounds to query within - filters: Filtering criteria for locations - zoom_level: Map zoom level for clustering decisions - cluster: Whether to apply clustering - use_cache: Whether to use cached data - - Returns: - MapResponse with locations, clusters, and metadata - """ - start_time = time.time() - initial_query_count = len(connection.queries) - cache_hit = False - - try: - # Generate cache key - cache_key = None - if use_cache: - cache_key = self._generate_cache_key( - bounds, filters, zoom_level, cluster - ) - - # Try to get from cache first - cached_response = self.cache_service.get_cached_map_response(cache_key) - if cached_response: - cached_response.cache_hit = True - cached_response.query_time_ms = int( - (time.time() - start_time) * 1000 - ) - return cached_response - - # Get locations from database - locations = self._get_locations_from_db(bounds, filters) - - # Apply smart limiting based on zoom level and density - locations = self._apply_smart_limiting(locations, bounds, zoom_level) - - # Determine if clustering should be applied - should_cluster = cluster and self.clustering_service.should_cluster( - zoom_level, len(locations) - ) - - # Apply clustering if needed - clusters = [] - if should_cluster: - locations, clusters = self.clustering_service.cluster_locations( - locations, zoom_level, bounds - ) - - # Calculate response bounds - response_bounds = self._calculate_response_bounds( - locations, clusters, bounds - ) - - # Create response - response = MapResponse( - locations=locations, - clusters=clusters, - bounds=response_bounds, - total_count=len(locations) + sum(cluster.count for cluster in clusters), - filtered_count=len(locations), - zoom_level=zoom_level, - clustered=should_cluster, - cache_hit=cache_hit, - query_time_ms=int((time.time() - start_time) * 1000), - filters_applied=self._get_applied_filters_list(filters), - ) - - # Cache the response - if use_cache and cache_key: - self.cache_service.cache_map_response(cache_key, response) - - # Record performance metrics - self._record_performance_metrics( - start_time, - initial_query_count, - cache_hit, - len(locations) + len(clusters), - bounds is not None, - should_cluster, - ) - - return response - - except Exception: - # Return error response - return MapResponse( - locations=[], - clusters=[], - total_count=0, - filtered_count=0, - query_time_ms=int((time.time() - start_time) * 1000), - cache_hit=False, - ) - - def get_location_details( - self, location_type: str, location_id: int - ) -> Optional[UnifiedLocation]: - """ - Get detailed information for a specific location. - - Args: - location_type: Type of location (park, ride, company, generic) - location_id: ID of the location - - Returns: - UnifiedLocation with full details or None if not found - """ - try: - # Check cache first - cache_key = self.cache_service.get_location_detail_cache_key( - location_type, location_id - ) - cached_locations = self.cache_service.get_cached_locations(cache_key) - if cached_locations: - return cached_locations[0] if cached_locations else None - - # Get from database - location_type_enum = LocationType(location_type.lower()) - location = self.location_layer.get_location_by_id( - location_type_enum, location_id - ) - - # Cache the result - if location: - self.cache_service.cache_locations( - cache_key, - [location], - self.cache_service.LOCATION_DETAIL_TTL, - ) - - return location - - except Exception as e: - print(f"Error getting location details: {e}") - return None - - def search_locations( - self, - query: str, - bounds: Optional[GeoBounds] = None, - location_types: Optional[Set[LocationType]] = None, - limit: int = 50, - ) -> List[UnifiedLocation]: - """ - Search locations with text query. - - Args: - query: Search query string - bounds: Optional geographic bounds to search within - location_types: Optional set of location types to search - limit: Maximum number of results - - Returns: - List of matching UnifiedLocation objects - """ - try: - # Create search filters - filters = MapFilters( - search_query=query, - location_types=location_types or {LocationType.PARK, LocationType.RIDE}, - has_coordinates=True, - ) - - # Get locations - locations = self.location_layer.get_all_locations(bounds, filters) - - # Apply limit - return locations[:limit] - - except Exception as e: - print(f"Error searching locations: {e}") - return [] - - def get_locations_by_bounds( - self, - north: float, - south: float, - east: float, - west: float, - location_types: Optional[Set[LocationType]] = None, - zoom_level: int = DEFAULT_ZOOM_LEVEL, - ) -> MapResponse: - """ - Get locations within specific geographic bounds. - - Args: - north, south, east, west: Bounding box coordinates - location_types: Optional filter for location types - zoom_level: Map zoom level for optimization - - Returns: - MapResponse with locations in bounds - """ - try: - bounds = GeoBounds(north=north, south=south, east=east, west=west) - filters = ( - MapFilters(location_types=location_types) if location_types else None - ) - - return self.get_map_data( - bounds=bounds, filters=filters, zoom_level=zoom_level - ) - - except ValueError: - # Invalid bounds - return MapResponse( - locations=[], clusters=[], total_count=0, filtered_count=0 - ) - - def get_clustered_locations( - self, - zoom_level: int, - bounds: Optional[GeoBounds] = None, - filters: Optional[MapFilters] = None, - ) -> MapResponse: - """ - Get clustered location data for map display. - - Args: - zoom_level: Map zoom level for clustering configuration - bounds: Optional geographic bounds - filters: Optional filtering criteria - - Returns: - MapResponse with clustered data - """ - return self.get_map_data( - bounds=bounds, filters=filters, zoom_level=zoom_level, cluster=True - ) - - def get_locations_by_type( - self, - location_type: LocationType, - bounds: Optional[GeoBounds] = None, - limit: Optional[int] = None, - ) -> List[UnifiedLocation]: - """ - Get locations of a specific type. - - Args: - location_type: Type of locations to retrieve - bounds: Optional geographic bounds - limit: Optional limit on results - - Returns: - List of UnifiedLocation objects - """ - try: - filters = MapFilters(location_types={location_type}) - locations = self.location_layer.get_locations_by_type( - location_type, bounds, filters - ) - - if limit: - locations = locations[:limit] - - return locations - - except Exception as e: - print(f"Error getting locations by type: {e}") - return [] - - def invalidate_cache( - self, - location_type: Optional[str] = None, - location_id: Optional[int] = None, - bounds: Optional[GeoBounds] = None, - ) -> None: - """ - Invalidate cached map data. - - Args: - location_type: Optional specific location type to invalidate - location_id: Optional specific location ID to invalidate - bounds: Optional specific bounds to invalidate - """ - if location_type and location_id: - self.cache_service.invalidate_location_cache(location_type, location_id) - elif bounds: - self.cache_service.invalidate_bounds_cache(bounds) - else: - self.cache_service.clear_all_map_cache() - - def get_service_stats(self) -> Dict[str, Any]: - """Get service performance and usage statistics.""" - cache_stats = self.cache_service.get_cache_stats() - - return { - "cache_performance": cache_stats, - "clustering_available": True, - "supported_location_types": [t.value for t in LocationType], - "max_unclustered_points": self.MAX_UNCLUSTERED_POINTS, - "max_clustered_points": self.MAX_CLUSTERED_POINTS, - "service_version": "1.0.0", - } - - def _get_locations_from_db( - self, bounds: Optional[GeoBounds], filters: Optional[MapFilters] - ) -> List[UnifiedLocation]: - """Get locations from database using the abstraction layer.""" - return self.location_layer.get_all_locations(bounds, filters) - - def _apply_smart_limiting( - self, - locations: List[UnifiedLocation], - bounds: Optional[GeoBounds], - zoom_level: int, - ) -> List[UnifiedLocation]: - """Apply intelligent limiting based on zoom level and density.""" - if zoom_level < 6: # Very zoomed out - show only major parks - major_parks = [ - loc - for loc in locations - if ( - loc.type == LocationType.PARK - and loc.cluster_category in ["major_park", "theme_park"] - ) - ] - return major_parks[:200] - elif zoom_level < 10: # Regional level - return locations[:1000] - else: # City level and closer - return locations[: self.MAX_CLUSTERED_POINTS] - - def _calculate_response_bounds( - self, - locations: List[UnifiedLocation], - clusters: List[ClusterData], - request_bounds: Optional[GeoBounds], - ) -> Optional[GeoBounds]: - """Calculate the actual bounds of the response data.""" - if request_bounds: - return request_bounds - - all_coords = [] - - # Add location coordinates - for loc in locations: - all_coords.append((loc.latitude, loc.longitude)) - - # Add cluster coordinates - for cluster in clusters: - all_coords.append(cluster.coordinates) - - if not all_coords: - return None - - lats, lngs = zip(*all_coords) - return GeoBounds( - north=max(lats), south=min(lats), east=max(lngs), west=min(lngs) - ) - - def _get_applied_filters_list(self, filters: Optional[MapFilters]) -> List[str]: - """Get list of applied filter types for metadata.""" - if not filters: - return [] - - applied = [] - if filters.location_types: - applied.append("location_types") - if filters.search_query: - applied.append("search_query") - if filters.park_status: - applied.append("park_status") - if filters.ride_types: - applied.append("ride_types") - if filters.company_roles: - applied.append("company_roles") - if filters.min_rating: - applied.append("min_rating") - if filters.country: - applied.append("country") - if filters.state: - applied.append("state") - if filters.city: - applied.append("city") - - return applied - - def _generate_cache_key( - self, - bounds: Optional[GeoBounds], - filters: Optional[MapFilters], - zoom_level: int, - cluster: bool, - ) -> str: - """Generate cache key for the request.""" - if cluster: - return self.cache_service.get_clusters_cache_key( - bounds, filters, zoom_level - ) - else: - return self.cache_service.get_locations_cache_key( - bounds, filters, zoom_level - ) - - def _record_performance_metrics( - self, - start_time: float, - initial_query_count: int, - cache_hit: bool, - result_count: int, - bounds_used: bool, - clustering_used: bool, - ) -> None: - """Record performance metrics for monitoring.""" - query_time_ms = int((time.time() - start_time) * 1000) - db_query_count = len(connection.queries) - initial_query_count - - metrics = QueryPerformanceMetrics( - query_time_ms=query_time_ms, - db_query_count=db_query_count, - cache_hit=cache_hit, - result_count=result_count, - bounds_used=bounds_used, - clustering_used=clustering_used, - ) - - self.cache_service.record_performance_metrics(metrics) - - -# Global service instance -unified_map_service = UnifiedMapService() diff --git a/core/services/performance_monitoring.py b/core/services/performance_monitoring.py deleted file mode 100644 index e07a1524..00000000 --- a/core/services/performance_monitoring.py +++ /dev/null @@ -1,407 +0,0 @@ -""" -Performance monitoring utilities and context managers. -""" - -import time -import logging -from contextlib import contextmanager -from functools import wraps -from typing import Optional, Dict, Any, List -from django.db import connection -from django.conf import settings -from django.utils import timezone - -logger = logging.getLogger("performance") - - -@contextmanager -def monitor_performance(operation_name: str, **tags): - """Context manager for monitoring operation performance""" - start_time = time.time() - initial_queries = len(connection.queries) - - # Create performance context - performance_context = { - "operation": operation_name, - "start_time": start_time, - "timestamp": timezone.now().isoformat(), - **tags, - } - - try: - yield performance_context - except Exception as e: - performance_context["error"] = str(e) - performance_context["status"] = "error" - raise - else: - performance_context["status"] = "success" - finally: - end_time = time.time() - duration = end_time - start_time - total_queries = len(connection.queries) - initial_queries - - # Update performance context with final metrics - performance_context.update( - { - "duration_seconds": duration, - "duration_ms": round(duration * 1000, 2), - "query_count": total_queries, - "end_time": end_time, - } - ) - - # Log performance data - log_level = ( - logging.WARNING if duration > 2.0 or total_queries > 10 else logging.INFO - ) - logger.log( - log_level, - f"Performance: {operation_name} completed in { - duration:.3f}s with {total_queries} queries", - extra=performance_context, - ) - - # Log slow operations with additional detail - if duration > 2.0: - logger.warning( - f"Slow operation detected: {operation_name} took { - duration:.3f}s", - extra={ - "slow_operation": True, - "threshold_exceeded": "duration", - **performance_context, - }, - ) - - if total_queries > 10: - logger.warning( - f"High query count: {operation_name} executed {total_queries} queries", - extra={ - "high_query_count": True, - "threshold_exceeded": "query_count", - **performance_context, - }, - ) - - -@contextmanager -def track_queries(operation_name: str, warn_threshold: int = 10): - """Context manager to track database queries for specific operations""" - if not settings.DEBUG: - yield - return - - initial_queries = len(connection.queries) - start_time = time.time() - - try: - yield - finally: - end_time = time.time() - total_queries = len(connection.queries) - initial_queries - execution_time = end_time - start_time - - query_details = [] - if hasattr(connection, "queries") and total_queries > 0: - recent_queries = connection.queries[-total_queries:] - query_details = [ - { - "sql": ( - query["sql"][:200] + "..." - if len(query["sql"]) > 200 - else query["sql"] - ), - "time": float(query["time"]), - } - for query in recent_queries - ] - - performance_data = { - "operation": operation_name, - "query_count": total_queries, - "execution_time": execution_time, - "queries": query_details if settings.DEBUG else [], - } - - if total_queries > warn_threshold or execution_time > 1.0: - logger.warning( - f"Performance concern in {operation_name}: " - f"{total_queries} queries, {execution_time:.2f}s", - extra=performance_data, - ) - else: - logger.debug( - f"Query tracking for {operation_name}: " - f"{total_queries} queries, {execution_time:.2f}s", - extra=performance_data, - ) - - -class PerformanceProfiler: - """Advanced performance profiling with detailed metrics""" - - def __init__(self, name: str): - self.name = name - self.start_time = None - self.end_time = None - self.checkpoints = [] - self.initial_queries = 0 - self.memory_usage = {} - - def start(self): - """Start profiling""" - self.start_time = time.time() - self.initial_queries = len(connection.queries) - - # Track memory usage if psutil is available - try: - import psutil - - process = psutil.Process() - self.memory_usage["start"] = process.memory_info().rss - except ImportError: - pass - - logger.debug(f"Started profiling: {self.name}") - - def checkpoint(self, name: str): - """Add a checkpoint""" - if self.start_time is None: - logger.warning(f"Checkpoint '{name}' called before profiling started") - return - - current_time = time.time() - elapsed = current_time - self.start_time - queries_since_start = len(connection.queries) - self.initial_queries - - checkpoint = { - "name": name, - "timestamp": current_time, - "elapsed_seconds": elapsed, - "queries_since_start": queries_since_start, - } - - # Memory usage if available - try: - import psutil - - process = psutil.Process() - checkpoint["memory_rss"] = process.memory_info().rss - except ImportError: - pass - - self.checkpoints.append(checkpoint) - logger.debug(f"Checkpoint '{name}' at {elapsed:.3f}s") - - def stop(self): - """Stop profiling and log results""" - if self.start_time is None: - logger.warning("Profiling stopped before it was started") - return - - self.end_time = time.time() - total_duration = self.end_time - self.start_time - total_queries = len(connection.queries) - self.initial_queries - - # Final memory usage - try: - import psutil - - process = psutil.Process() - self.memory_usage["end"] = process.memory_info().rss - except ImportError: - pass - - # Create detailed profiling report - report = { - "profiler_name": self.name, - "total_duration": total_duration, - "total_queries": total_queries, - "checkpoints": self.checkpoints, - "memory_usage": self.memory_usage, - "queries_per_second": ( - total_queries / total_duration if total_duration > 0 else 0 - ), - } - - # Calculate checkpoint intervals - if len(self.checkpoints) > 1: - intervals = [] - for i in range(1, len(self.checkpoints)): - prev = self.checkpoints[i - 1] - curr = self.checkpoints[i] - intervals.append( - { - "from": prev["name"], - "to": curr["name"], - "duration": curr["elapsed_seconds"] - prev["elapsed_seconds"], - "queries": curr["queries_since_start"] - - prev["queries_since_start"], - } - ) - report["checkpoint_intervals"] = intervals - - # Log the complete report - log_level = logging.WARNING if total_duration > 1.0 else logging.INFO - logger.log( - log_level, - f"Profiling complete: { - self.name} took { - total_duration:.3f}s with {total_queries} queries", - extra=report, - ) - - return report - - -@contextmanager -def profile_operation(name: str): - """Context manager for detailed operation profiling""" - profiler = PerformanceProfiler(name) - profiler.start() - - try: - yield profiler - finally: - profiler.stop() - - -class DatabaseQueryAnalyzer: - """Analyze database query patterns and performance""" - - @staticmethod - def analyze_queries(queries: List[Dict]) -> Dict[str, Any]: - """Analyze a list of queries for patterns and issues""" - if not queries: - return {} - - total_time = sum(float(q.get("time", 0)) for q in queries) - query_count = len(queries) - - # Group queries by type - query_types = {} - for query in queries: - sql = query.get("sql", "").strip().upper() - query_type = sql.split()[0] if sql else "UNKNOWN" - query_types[query_type] = query_types.get(query_type, 0) + 1 - - # Find slow queries (top 10% by time) - sorted_queries = sorted( - queries, key=lambda q: float(q.get("time", 0)), reverse=True - ) - slow_query_count = max(1, query_count // 10) - slow_queries = sorted_queries[:slow_query_count] - - # Detect duplicate queries - query_signatures = {} - for query in queries: - # Simplified signature - remove literals and normalize whitespace - sql = query.get("sql", "") - signature = " ".join(sql.split()) # Normalize whitespace - query_signatures[signature] = query_signatures.get(signature, 0) + 1 - - duplicates = { - sig: count for sig, count in query_signatures.items() if count > 1 - } - - analysis = { - "total_queries": query_count, - "total_time": total_time, - "average_time": total_time / query_count if query_count > 0 else 0, - "query_types": query_types, - "slow_queries": [ - { - "sql": ( - q.get("sql", "")[:200] + "..." - if len(q.get("sql", "")) > 200 - else q.get("sql", "") - ), - "time": float(q.get("time", 0)), - } - for q in slow_queries - ], - "duplicate_query_count": len(duplicates), - "duplicate_queries": ( - duplicates - if len(duplicates) <= 10 - else dict(list(duplicates.items())[:10]) - ), - } - - return analysis - - @classmethod - def analyze_current_queries(cls) -> Dict[str, Any]: - """Analyze the current request's queries""" - if hasattr(connection, "queries"): - return cls.analyze_queries(connection.queries) - return {} - - -# Performance monitoring decorators -def monitor_function_performance(operation_name: Optional[str] = None): - """Decorator to monitor function performance""" - - def decorator(func): - @wraps(func) - def wrapper(*args, **kwargs): - name = operation_name or f"{func.__module__}.{func.__name__}" - with monitor_performance( - name, function=func.__name__, module=func.__module__ - ): - return func(*args, **kwargs) - - return wrapper - - return decorator - - -def track_database_queries(warn_threshold: int = 10): - """Decorator to track database queries for a function""" - - def decorator(func): - @wraps(func) - def wrapper(*args, **kwargs): - operation_name = f"{func.__module__}.{func.__name__}" - with track_queries(operation_name, warn_threshold): - return func(*args, **kwargs) - - return wrapper - - return decorator - - -# Performance metrics collection -class PerformanceMetrics: - """Collect and aggregate performance metrics""" - - def __init__(self): - self.metrics = [] - - def record_metric(self, name: str, value: float, tags: Optional[Dict] = None): - """Record a performance metric""" - metric = { - "name": name, - "value": value, - "timestamp": timezone.now().isoformat(), - "tags": tags or {}, - } - self.metrics.append(metric) - - # Log the metric - logger.info(f"Performance metric: {name} = {value}", extra=metric) - - def get_metrics(self, name: Optional[str] = None) -> List[Dict]: - """Get recorded metrics, optionally filtered by name""" - if name: - return [m for m in self.metrics if m["name"] == name] - return self.metrics.copy() - - def clear_metrics(self): - """Clear all recorded metrics""" - self.metrics.clear() - - -# Global performance metrics instance -performance_metrics = PerformanceMetrics() diff --git a/core/tests.py b/core/tests.py deleted file mode 100644 index a39b155a..00000000 --- a/core/tests.py +++ /dev/null @@ -1 +0,0 @@ -# Create your tests here. diff --git a/core/urls/map_urls.py b/core/urls/map_urls.py deleted file mode 100644 index b9c34fc0..00000000 --- a/core/urls/map_urls.py +++ /dev/null @@ -1,35 +0,0 @@ -""" -URL patterns for the unified map service API. -""" - -from django.urls import path -from ..views.map_views import ( - MapLocationsView, - MapLocationDetailView, - MapSearchView, - MapBoundsView, - MapStatsView, - MapCacheView, -) - -app_name = "map_api" - -urlpatterns = [ - # Main map data endpoint - path("locations/", MapLocationsView.as_view(), name="locations"), - # Location detail endpoint - path( - "locations///", - MapLocationDetailView.as_view(), - name="location_detail", - ), - # Search endpoint - path("search/", MapSearchView.as_view(), name="search"), - # Bounds-based query endpoint - path("bounds/", MapBoundsView.as_view(), name="bounds"), - # Service statistics endpoint - path("stats/", MapStatsView.as_view(), name="stats"), - # Cache management endpoints - path("cache/", MapCacheView.as_view(), name="cache"), - path("cache/invalidate/", MapCacheView.as_view(), name="cache_invalidate"), -] diff --git a/core/urls/maps.py b/core/urls/maps.py deleted file mode 100644 index 71f980b1..00000000 --- a/core/urls/maps.py +++ /dev/null @@ -1,39 +0,0 @@ -""" -URL patterns for map views. -Includes both HTML views and HTMX endpoints. -""" - -from django.urls import path -from ..views.maps import ( - UniversalMapView, - ParkMapView, - NearbyLocationsView, - LocationFilterView, - LocationSearchView, - MapBoundsUpdateView, - LocationDetailModalView, - LocationListView, -) - -app_name = "maps" - -urlpatterns = [ - # Main map views - path("", UniversalMapView.as_view(), name="universal_map"), - path("parks/", ParkMapView.as_view(), name="park_map"), - path("nearby/", NearbyLocationsView.as_view(), name="nearby_locations"), - path("list/", LocationListView.as_view(), name="location_list"), - # HTMX endpoints for dynamic updates - path("htmx/filter/", LocationFilterView.as_view(), name="htmx_filter"), - path("htmx/search/", LocationSearchView.as_view(), name="htmx_search"), - path( - "htmx/bounds/", - MapBoundsUpdateView.as_view(), - name="htmx_bounds_update", - ), - path( - "htmx/location///", - LocationDetailModalView.as_view(), - name="htmx_location_detail", - ), -] diff --git a/core/urls/search.py b/core/urls/search.py deleted file mode 100644 index 0d725e3e..00000000 --- a/core/urls/search.py +++ /dev/null @@ -1,24 +0,0 @@ -from django.urls import path -from core.views.search import ( - AdaptiveSearchView, - FilterFormView, - LocationSearchView, - LocationSuggestionsView, -) -from rides.views import RideSearchView - -app_name = "search" - -urlpatterns = [ - path("parks/", AdaptiveSearchView.as_view(), name="search"), - path("parks/filters/", FilterFormView.as_view(), name="filter_form"), - path("rides/", RideSearchView.as_view(), name="ride_search"), - path("rides/results/", RideSearchView.as_view(), name="ride_search_results"), - # Location-aware search - path("location/", LocationSearchView.as_view(), name="location_search"), - path( - "location/suggestions/", - LocationSuggestionsView.as_view(), - name="location_suggestions", - ), -] diff --git a/core/utils/__init__.py b/core/utils/__init__.py deleted file mode 100644 index 8729095b..00000000 --- a/core/utils/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# Core utilities diff --git a/core/utils/query_optimization.py b/core/utils/query_optimization.py deleted file mode 100644 index 292c7dbc..00000000 --- a/core/utils/query_optimization.py +++ /dev/null @@ -1,432 +0,0 @@ -""" -Database query optimization utilities and helpers. -""" - -import time -import logging -from contextlib import contextmanager -from typing import Optional, Dict, Any, List, Type -from django.db import connection, models -from django.db.models import QuerySet, Prefetch, Count, Avg, Max -from django.conf import settings -from django.core.cache import cache - -logger = logging.getLogger("query_optimization") - - -@contextmanager -def track_queries( - operation_name: str, warn_threshold: int = 10, time_threshold: float = 1.0 -): - """ - Context manager to track database queries for specific operations - - Args: - operation_name: Name of the operation being tracked - warn_threshold: Number of queries that triggers a warning - time_threshold: Execution time in seconds that triggers a warning - """ - if not settings.DEBUG: - yield - return - - initial_queries = len(connection.queries) - start_time = time.time() - - try: - yield - finally: - end_time = time.time() - total_queries = len(connection.queries) - initial_queries - execution_time = end_time - start_time - - # Collect query details - query_details = [] - if hasattr(connection, "queries") and total_queries > 0: - recent_queries = connection.queries[-total_queries:] - query_details = [ - { - "sql": ( - query["sql"][:500] + "..." - if len(query["sql"]) > 500 - else query["sql"] - ), - "time": float(query["time"]), - "duplicate_count": sum( - 1 for q in recent_queries if q["sql"] == query["sql"] - ), - } - for query in recent_queries - ] - - performance_data = { - "operation": operation_name, - "query_count": total_queries, - "execution_time": execution_time, - "queries": query_details if settings.DEBUG else [], - "slow_queries": [ - q for q in query_details if q["time"] > 0.1 - ], # Queries slower than 100ms - } - - # Log warnings for performance issues - if total_queries > warn_threshold or execution_time > time_threshold: - logger.warning( - f"Performance concern in {operation_name}: " - f"{total_queries} queries, {execution_time:.2f}s", - extra=performance_data, - ) - else: - logger.debug( - f"Query tracking for {operation_name}: " - f"{total_queries} queries, {execution_time:.2f}s", - extra=performance_data, - ) - - -class QueryOptimizer: - """Utility class for common query optimization patterns""" - - @staticmethod - def optimize_park_queryset(queryset: QuerySet) -> QuerySet: - """ - Optimize Park queryset with proper select_related and prefetch_related - """ - return ( - queryset.select_related("location", "operator", "created_by") - .prefetch_related("areas", "rides__manufacturer", "reviews__user") - .annotate( - ride_count=Count("rides"), - average_rating=Avg("reviews__rating"), - latest_review_date=Max("reviews__created_at"), - ) - ) - - @staticmethod - def optimize_ride_queryset(queryset: QuerySet) -> QuerySet: - """ - Optimize Ride queryset with proper relationships - """ - return ( - queryset.select_related( - "park", "park__location", "manufacturer", "created_by" - ) - .prefetch_related("reviews__user", "media_items") - .annotate( - review_count=Count("reviews"), - average_rating=Avg("reviews__rating"), - latest_review_date=Max("reviews__created_at"), - ) - ) - - @staticmethod - def optimize_user_queryset(queryset: QuerySet) -> QuerySet: - """ - Optimize User queryset for profile views - """ - return queryset.prefetch_related( - Prefetch("park_reviews", to_attr="cached_park_reviews"), - Prefetch("ride_reviews", to_attr="cached_ride_reviews"), - "authored_parks", - "authored_rides", - ).annotate( - total_reviews=Count("park_reviews") + Count("ride_reviews"), - parks_authored=Count("authored_parks"), - rides_authored=Count("authored_rides"), - ) - - @staticmethod - def create_bulk_queryset(model: Type[models.Model], ids: List[int]) -> QuerySet: - """ - Create an optimized queryset for bulk operations - """ - queryset = model.objects.filter(id__in=ids) - - # Apply model-specific optimizations - if hasattr(model, "_meta") and model._meta.model_name == "park": - return QueryOptimizer.optimize_park_queryset(queryset) - elif hasattr(model, "_meta") and model._meta.model_name == "ride": - return QueryOptimizer.optimize_ride_queryset(queryset) - elif hasattr(model, "_meta") and model._meta.model_name == "user": - return QueryOptimizer.optimize_user_queryset(queryset) - - return queryset - - -class QueryCache: - """Caching utilities for expensive queries""" - - @staticmethod - def cache_queryset_result( - cache_key: str, queryset_func, timeout: int = 3600, **kwargs - ): - """ - Cache the result of an expensive queryset operation - - Args: - cache_key: Unique key for caching - queryset_func: Function that returns the queryset result - timeout: Cache timeout in seconds - **kwargs: Arguments to pass to queryset_func - """ - # Try to get from cache first - cached_result = cache.get(cache_key) - if cached_result is not None: - logger.debug(f"Cache hit for queryset: {cache_key}") - return cached_result - - # Execute the expensive operation - with track_queries(f"cache_miss_{cache_key}"): - result = queryset_func(**kwargs) - - # Cache the result - cache.set(cache_key, result, timeout) - logger.debug(f"Cached queryset result: {cache_key}") - - return result - - @staticmethod - def invalidate_model_cache(model_name: str, instance_id: Optional[int] = None): - """ - Invalidate cache keys related to a specific model - - Args: - model_name: Name of the model (e.g., 'park', 'ride') - instance_id: Specific instance ID, if applicable - """ - # Pattern-based cache invalidation (works with Redis) - if instance_id: - pattern = f"*{model_name}_{instance_id}*" - else: - pattern = f"*{model_name}*" - - try: - # For Redis cache backends that support pattern deletion - if hasattr(cache, "delete_pattern"): - deleted_count = cache.delete_pattern(pattern) - logger.info( - f"Invalidated {deleted_count} cache keys for pattern: {pattern}" - ) - else: - logger.warning( - f"Cache backend does not support pattern deletion: {pattern}" - ) - except Exception as e: - logger.error(f"Error invalidating cache pattern {pattern}: {e}") - - -class IndexAnalyzer: - """Analyze and suggest database indexes""" - - @staticmethod - def analyze_slow_queries(min_time: float = 0.1) -> List[Dict[str, Any]]: - """ - Analyze slow queries from the current request - - Args: - min_time: Minimum query time in seconds to consider "slow" - """ - if not hasattr(connection, "queries"): - return [] - - slow_queries = [] - for query in connection.queries: - query_time = float(query.get("time", 0)) - if query_time >= min_time: - slow_queries.append( - { - "sql": query["sql"], - "time": query_time, - "analysis": IndexAnalyzer._analyze_query_sql(query["sql"]), - } - ) - - return slow_queries - - @staticmethod - def _analyze_query_sql(sql: str) -> Dict[str, Any]: - """ - Analyze SQL to suggest potential optimizations - """ - sql_upper = sql.upper() - analysis = { - "has_where_clause": "WHERE" in sql_upper, - "has_join": any( - join in sql_upper - for join in ["JOIN", "INNER JOIN", "LEFT JOIN", "RIGHT JOIN"] - ), - "has_order_by": "ORDER BY" in sql_upper, - "has_group_by": "GROUP BY" in sql_upper, - "has_like": "LIKE" in sql_upper, - "table_scans": [], - "suggestions": [], - } - - # Detect potential table scans - if "WHERE" not in sql_upper and "SELECT COUNT(*) FROM" not in sql_upper: - analysis["table_scans"].append("Query may be doing a full table scan") - - # Suggest indexes based on patterns - if analysis["has_where_clause"] and not analysis["has_join"]: - analysis["suggestions"].append( - "Consider adding indexes on WHERE clause columns" - ) - - if analysis["has_order_by"]: - analysis["suggestions"].append( - "Consider adding indexes on ORDER BY columns" - ) - - if analysis["has_like"] and "%" not in sql[: sql.find("LIKE") + 10]: - analysis["suggestions"].append( - "LIKE queries with leading wildcards cannot use indexes efficiently" - ) - - return analysis - - @staticmethod - def suggest_model_indexes(model: Type[models.Model]) -> List[str]: - """ - Suggest database indexes for a Django model based on its fields - """ - suggestions = [] - opts = model._meta - - # Foreign key fields should have indexes (Django adds these - # automatically) - for field in opts.fields: - if isinstance(field, models.ForeignKey): - suggestions.append( - f"Index on {field.name} (automatically created by Django)" - ) - - # Suggest composite indexes for common query patterns - date_fields = [ - f.name - for f in opts.fields - if isinstance(f, (models.DateField, models.DateTimeField)) - ] - status_fields = [ - f.name - for f in opts.fields - if f.name in ["status", "is_active", "is_published"] - ] - - if date_fields and status_fields: - for date_field in date_fields: - for status_field in status_fields: - suggestions.append( - f"Composite index on ({status_field}, {date_field}) for filtered date queries" - ) - - # Suggest indexes for fields commonly used in WHERE clauses - common_filter_fields = ["slug", "name", "created_at", "updated_at"] - for field in opts.fields: - if field.name in common_filter_fields and not field.db_index: - suggestions.append( - f"Consider adding db_index=True to { - field.name}" - ) - - return suggestions - - -def log_query_performance(): - """Decorator to log query performance for a function""" - - def decorator(func): - def wrapper(*args, **kwargs): - operation_name = f"{func.__module__}.{func.__name__}" - with track_queries(operation_name): - return func(*args, **kwargs) - - return wrapper - - return decorator - - -def optimize_queryset_for_serialization( - queryset: QuerySet, fields: List[str] -) -> QuerySet: - """ - Optimize a queryset for API serialization by only selecting needed fields - - Args: - queryset: The queryset to optimize - fields: List of field names that will be serialized - """ - # Extract foreign key fields that need select_related - model = queryset.model - opts = model._meta - - select_related_fields = [] - prefetch_related_fields = [] - - for field_name in fields: - try: - field = opts.get_field(field_name) - if isinstance(field, models.ForeignKey): - select_related_fields.append(field_name) - elif isinstance( - field, (models.ManyToManyField, models.reverse.ManyToManyRel) - ): - prefetch_related_fields.append(field_name) - except models.FieldDoesNotExist: - # Field might be a property or method, skip optimization - continue - - # Apply optimizations - if select_related_fields: - queryset = queryset.select_related(*select_related_fields) - - if prefetch_related_fields: - queryset = queryset.prefetch_related(*prefetch_related_fields) - - return queryset - - -# Query performance monitoring context manager -@contextmanager -def monitor_db_performance(operation_name: str): - """ - Context manager that monitors database performance for an operation - """ - initial_queries = len(connection.queries) if hasattr(connection, "queries") else 0 - start_time = time.time() - - try: - yield - finally: - end_time = time.time() - duration = end_time - start_time - - if hasattr(connection, "queries"): - total_queries = len(connection.queries) - initial_queries - - # Analyze queries for performance issues - slow_queries = IndexAnalyzer.analyze_slow_queries(0.05) # 50ms threshold - - performance_data = { - "operation": operation_name, - "duration": duration, - "query_count": total_queries, - "slow_query_count": len(slow_queries), - # Limit to top 5 slow queries - "slow_queries": slow_queries[:5], - } - - # Log performance data - if duration > 1.0 or total_queries > 15 or slow_queries: - logger.warning( - f"Performance issue in {operation_name}: " - f"{ - duration:.3f}s, {total_queries} queries, { - len(slow_queries)} slow", - extra=performance_data, - ) - else: - logger.debug( - f"DB performance for {operation_name}: " - f"{duration:.3f}s, {total_queries} queries", - extra=performance_data, - ) diff --git a/core/views/__init__.py b/core/views/__init__.py deleted file mode 100644 index a45c242e..00000000 --- a/core/views/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# Core views diff --git a/core/views/health_views.py b/core/views/health_views.py deleted file mode 100644 index 0af168b5..00000000 --- a/core/views/health_views.py +++ /dev/null @@ -1,273 +0,0 @@ -""" -Enhanced health check views for API monitoring. -""" - -import time -from django.http import JsonResponse -from django.utils import timezone -from django.views import View -from django.conf import settings -from rest_framework.views import APIView -from rest_framework.response import Response -from rest_framework.permissions import AllowAny -from health_check.views import MainView -from core.services.enhanced_cache_service import CacheMonitor -from core.utils.query_optimization import IndexAnalyzer - - -class HealthCheckAPIView(APIView): - """ - Enhanced API endpoint for health checks with detailed JSON response - """ - - permission_classes = [AllowAny] # Public endpoint - - def get(self, request): - """Return comprehensive health check information""" - start_time = time.time() - - # Get basic health check results - main_view = MainView() - main_view.request = request - - plugins = main_view.plugins - errors = main_view.errors - - # Collect additional performance metrics - cache_monitor = CacheMonitor() - cache_stats = cache_monitor.get_cache_stats() - - # Build comprehensive health data - health_data = { - "status": "healthy" if not errors else "unhealthy", - "timestamp": timezone.now().isoformat(), - "version": getattr(settings, "VERSION", "1.0.0"), - "environment": getattr(settings, "ENVIRONMENT", "development"), - "response_time_ms": 0, # Will be calculated at the end - "checks": {}, - "metrics": { - "cache": cache_stats, - "database": self._get_database_metrics(), - "system": self._get_system_metrics(), - }, - } - - # Process individual health checks - for plugin in plugins: - plugin_name = plugin.identifier() - plugin_errors = errors.get(plugin.__class__.__name__, []) - - health_data["checks"][plugin_name] = { - "status": "healthy" if not plugin_errors else "unhealthy", - "critical": getattr(plugin, "critical_service", False), - "errors": [str(error) for error in plugin_errors], - "response_time_ms": getattr(plugin, "_response_time", None), - } - - # Calculate total response time - health_data["response_time_ms"] = round((time.time() - start_time) * 1000, 2) - - # Determine HTTP status code - status_code = 200 - if errors: - # Check if any critical services are failing - critical_errors = any( - getattr(plugin, "critical_service", False) - for plugin in plugins - if errors.get(plugin.__class__.__name__) - ) - status_code = 503 if critical_errors else 200 - - return Response(health_data, status=status_code) - - def _get_database_metrics(self): - """Get database performance metrics""" - try: - from django.db import connection - - # Get basic connection info - metrics = { - "vendor": connection.vendor, - "connection_status": "connected", - } - - # Test query performance - start_time = time.time() - with connection.cursor() as cursor: - cursor.execute("SELECT 1") - cursor.fetchone() - query_time = (time.time() - start_time) * 1000 - - metrics["test_query_time_ms"] = round(query_time, 2) - - # PostgreSQL specific metrics - if connection.vendor == "postgresql": - try: - with connection.cursor() as cursor: - cursor.execute( - """ - SELECT - numbackends as active_connections, - xact_commit as transactions_committed, - xact_rollback as transactions_rolled_back, - blks_read as blocks_read, - blks_hit as blocks_hit - FROM pg_stat_database - WHERE datname = current_database() - """ - ) - row = cursor.fetchone() - if row: - metrics.update( - { - "active_connections": row[0], - "transactions_committed": row[1], - "transactions_rolled_back": row[2], - "cache_hit_ratio": ( - round( - (row[4] / (row[3] + row[4])) * 100, - 2, - ) - if (row[3] + row[4]) > 0 - else 0 - ), - } - ) - except Exception: - pass # Skip advanced metrics if not available - - return metrics - - except Exception as e: - return {"connection_status": "error", "error": str(e)} - - def _get_system_metrics(self): - """Get system performance metrics""" - metrics = { - "debug_mode": settings.DEBUG, - "allowed_hosts": (settings.ALLOWED_HOSTS if settings.DEBUG else ["hidden"]), - } - - try: - import psutil - - # Memory metrics - memory = psutil.virtual_memory() - metrics["memory"] = { - "total_mb": round(memory.total / 1024 / 1024, 2), - "available_mb": round(memory.available / 1024 / 1024, 2), - "percent_used": memory.percent, - } - - # CPU metrics - metrics["cpu"] = { - "percent_used": psutil.cpu_percent(interval=0.1), - "core_count": psutil.cpu_count(), - } - - # Disk metrics - disk = psutil.disk_usage("/") - metrics["disk"] = { - "total_gb": round(disk.total / 1024 / 1024 / 1024, 2), - "free_gb": round(disk.free / 1024 / 1024 / 1024, 2), - "percent_used": round((disk.used / disk.total) * 100, 2), - } - - except ImportError: - metrics["system_monitoring"] = "psutil not available" - except Exception as e: - metrics["system_error"] = str(e) - - return metrics - - -class PerformanceMetricsView(APIView): - """ - API view for performance metrics and database analysis - """ - - permission_classes = [AllowAny] if settings.DEBUG else [] - - def get(self, request): - """Return performance metrics and analysis""" - if not settings.DEBUG: - return Response({"error": "Only available in debug mode"}, status=403) - - metrics = { - "timestamp": timezone.now().isoformat(), - "database_analysis": self._get_database_analysis(), - "cache_performance": self._get_cache_performance(), - "recent_slow_queries": self._get_slow_queries(), - } - - return Response(metrics) - - def _get_database_analysis(self): - """Analyze database performance""" - try: - from django.db import connection - - analysis = { - "total_queries": len(connection.queries), - "query_analysis": IndexAnalyzer.analyze_slow_queries(0.05), - } - - if connection.queries: - query_times = [float(q.get("time", 0)) for q in connection.queries] - analysis.update( - { - "total_query_time": sum(query_times), - "average_query_time": sum(query_times) / len(query_times), - "slowest_query_time": max(query_times), - "fastest_query_time": min(query_times), - } - ) - - return analysis - - except Exception as e: - return {"error": str(e)} - - def _get_cache_performance(self): - """Get cache performance metrics""" - try: - cache_monitor = CacheMonitor() - return cache_monitor.get_cache_stats() - except Exception as e: - return {"error": str(e)} - - def _get_slow_queries(self): - """Get recent slow queries""" - try: - return IndexAnalyzer.analyze_slow_queries(0.1) # 100ms threshold - except Exception as e: - return {"error": str(e)} - - -class SimpleHealthView(View): - """ - Simple health check endpoint for load balancers - """ - - def get(self, request): - """Return simple OK status""" - try: - # Basic database connectivity test - from django.db import connection - - with connection.cursor() as cursor: - cursor.execute("SELECT 1") - cursor.fetchone() - - return JsonResponse( - {"status": "ok", "timestamp": timezone.now().isoformat()} - ) - except Exception as e: - return JsonResponse( - { - "status": "error", - "error": str(e), - "timestamp": timezone.now().isoformat(), - }, - status=503, - ) diff --git a/core/views/map_views.py b/core/views/map_views.py deleted file mode 100644 index b46dffc2..00000000 --- a/core/views/map_views.py +++ /dev/null @@ -1,699 +0,0 @@ -""" -API views for the unified map service. -Enhanced with proper error handling, pagination, and performance optimizations. -""" - -import json -import logging -from typing import Dict, Any, Optional -from django.http import JsonResponse, HttpRequest -from django.views.decorators.cache import cache_page -from django.views.decorators.gzip import gzip_page -from django.utils.decorators import method_decorator -from django.views import View -from django.core.exceptions import ValidationError -from django.conf import settings -import time - -from ..services.map_service import unified_map_service -from ..services.data_structures import GeoBounds, MapFilters, LocationType - -logger = logging.getLogger(__name__) - - -class MapAPIView(View): - """Base view for map API endpoints with common functionality.""" - - # Pagination settings - DEFAULT_PAGE_SIZE = 50 - MAX_PAGE_SIZE = 200 - - def dispatch(self, request, *args, **kwargs): - """Add CORS headers, compression, and handle preflight requests.""" - start_time = time.time() - - try: - response = super().dispatch(request, *args, **kwargs) - - # Add CORS headers for API access - response["Access-Control-Allow-Origin"] = "*" - response["Access-Control-Allow-Methods"] = "GET, POST, OPTIONS" - response["Access-Control-Allow-Headers"] = "Content-Type, Authorization" - - # Add performance headers - response["X-Response-Time"] = ( - f"{(time.time() - - start_time) * - 1000:.2f}ms" - ) - - # Add compression hint for large responses - if hasattr(response, "content") and len(response.content) > 1024: - response["Content-Encoding"] = "gzip" - - return response - - except Exception as e: - logger.error( - f"API error in { - request.path}: { - str(e)}", - exc_info=True, - ) - return self._error_response("An internal server error occurred", status=500) - - def options(self, request, *args, **kwargs): - """Handle preflight CORS requests.""" - return JsonResponse({}, status=200) - - def _parse_bounds(self, request: HttpRequest) -> Optional[GeoBounds]: - """Parse geographic bounds from request parameters.""" - try: - north = request.GET.get("north") - south = request.GET.get("south") - east = request.GET.get("east") - west = request.GET.get("west") - - if all(param is not None for param in [north, south, east, west]): - bounds = GeoBounds( - north=float(north), - south=float(south), - east=float(east), - west=float(west), - ) - - # Validate bounds - if not (-90 <= bounds.south <= bounds.north <= 90): - raise ValidationError("Invalid latitude bounds") - if not (-180 <= bounds.west <= bounds.east <= 180): - raise ValidationError("Invalid longitude bounds") - - return bounds - return None - except (ValueError, TypeError) as e: - raise ValidationError(f"Invalid bounds parameters: {e}") - - def _parse_pagination(self, request: HttpRequest) -> Dict[str, int]: - """Parse pagination parameters from request.""" - try: - page = max(1, int(request.GET.get("page", 1))) - page_size = min( - self.MAX_PAGE_SIZE, - max( - 1, - int(request.GET.get("page_size", self.DEFAULT_PAGE_SIZE)), - ), - ) - offset = (page - 1) * page_size - - return { - "page": page, - "page_size": page_size, - "offset": offset, - "limit": page_size, - } - except (ValueError, TypeError): - return { - "page": 1, - "page_size": self.DEFAULT_PAGE_SIZE, - "offset": 0, - "limit": self.DEFAULT_PAGE_SIZE, - } - - def _parse_filters(self, request: HttpRequest) -> Optional[MapFilters]: - """Parse filtering parameters from request.""" - try: - filters = MapFilters() - - # Location types - location_types_param = request.GET.get("types") - if location_types_param: - type_strings = location_types_param.split(",") - valid_types = {lt.value for lt in LocationType} - filters.location_types = { - LocationType(t.strip()) - for t in type_strings - if t.strip() in valid_types - } - - # Park status - park_status_param = request.GET.get("park_status") - if park_status_param: - filters.park_status = set(park_status_param.split(",")) - - # Ride types - ride_types_param = request.GET.get("ride_types") - if ride_types_param: - filters.ride_types = set(ride_types_param.split(",")) - - # Company roles - company_roles_param = request.GET.get("company_roles") - if company_roles_param: - filters.company_roles = set(company_roles_param.split(",")) - - # Search query with length validation - search_query = request.GET.get("q") or request.GET.get("search") - if search_query and len(search_query.strip()) >= 2: - filters.search_query = search_query.strip() - - # Rating filter with validation - min_rating_param = request.GET.get("min_rating") - if min_rating_param: - min_rating = float(min_rating_param) - if 0 <= min_rating <= 10: - filters.min_rating = min_rating - - # Geographic filters with validation - country = request.GET.get("country", "").strip() - if country and len(country) >= 2: - filters.country = country - - state = request.GET.get("state", "").strip() - if state and len(state) >= 2: - filters.state = state - - city = request.GET.get("city", "").strip() - if city and len(city) >= 2: - filters.city = city - - # Coordinates requirement - has_coordinates_param = request.GET.get("has_coordinates") - if has_coordinates_param is not None: - filters.has_coordinates = has_coordinates_param.lower() in [ - "true", - "1", - "yes", - ] - - return ( - filters - if any( - [ - filters.location_types, - filters.park_status, - filters.ride_types, - filters.company_roles, - filters.search_query, - filters.min_rating, - filters.country, - filters.state, - filters.city, - ] - ) - else None - ) - - except (ValueError, TypeError) as e: - raise ValidationError(f"Invalid filter parameters: {e}") - - def _parse_zoom_level(self, request: HttpRequest) -> int: - """Parse zoom level from request with default.""" - try: - zoom_param = request.GET.get("zoom", "10") - zoom_level = int(zoom_param) - return max(1, min(20, zoom_level)) # Clamp between 1 and 20 - except (ValueError, TypeError): - return 10 # Default zoom level - - def _create_paginated_response( - self, - data: list, - total_count: int, - pagination: Dict[str, int], - request: HttpRequest, - ) -> Dict[str, Any]: - """Create paginated response with metadata.""" - total_pages = (total_count + pagination["page_size"] - 1) // pagination[ - "page_size" - ] - - # Build pagination URLs - base_url = request.build_absolute_uri(request.path) - query_params = request.GET.copy() - - next_url = None - if pagination["page"] < total_pages: - query_params["page"] = pagination["page"] + 1 - next_url = f"{base_url}?{query_params.urlencode()}" - - prev_url = None - if pagination["page"] > 1: - query_params["page"] = pagination["page"] - 1 - prev_url = f"{base_url}?{query_params.urlencode()}" - - return { - "status": "success", - "data": data, - "pagination": { - "page": pagination["page"], - "page_size": pagination["page_size"], - "total_pages": total_pages, - "total_count": total_count, - "has_next": pagination["page"] < total_pages, - "has_previous": pagination["page"] > 1, - "next_url": next_url, - "previous_url": prev_url, - }, - } - - def _error_response( - self, - message: str, - status: int = 400, - error_code: str = None, - details: Dict[str, Any] = None, - ) -> JsonResponse: - """Return standardized error response with enhanced information.""" - response_data = { - "status": "error", - "message": message, - "timestamp": time.time(), - "data": None, - } - - if error_code: - response_data["error_code"] = error_code - - if details: - response_data["details"] = details - - # Add request ID for debugging in production - if hasattr(settings, "DEBUG") and not settings.DEBUG: - response_data["request_id"] = getattr(self.request, "id", None) - - return JsonResponse(response_data, status=status) - - def _success_response( - self, data: Any, message: str = None, metadata: Dict[str, Any] = None - ) -> JsonResponse: - """Return standardized success response.""" - response_data = { - "status": "success", - "data": data, - "timestamp": time.time(), - } - - if message: - response_data["message"] = message - - if metadata: - response_data["metadata"] = metadata - - return JsonResponse(response_data) - - -class MapLocationsView(MapAPIView): - """ - API endpoint for getting map locations with optional clustering. - - GET /api/map/locations/ - Parameters: - - north, south, east, west: Bounding box coordinates - - zoom: Zoom level (1-20) - - types: Comma-separated location types (park,ride,company,generic) - - cluster: Whether to enable clustering (true/false) - - q: Search query - - park_status: Park status filter - - ride_types: Ride type filter - - min_rating: Minimum rating filter - - country, state, city: Geographic filters - """ - - @method_decorator(cache_page(300)) # Cache for 5 minutes - @method_decorator(gzip_page) # Compress large responses - def get(self, request: HttpRequest) -> JsonResponse: - """Get map locations with optional clustering and filtering.""" - try: - # Parse parameters - bounds = self._parse_bounds(request) - filters = self._parse_filters(request) - zoom_level = self._parse_zoom_level(request) - pagination = self._parse_pagination(request) - - # Clustering preference - cluster_param = request.GET.get("cluster", "true") - enable_clustering = cluster_param.lower() in ["true", "1", "yes"] - - # Cache preference - use_cache_param = request.GET.get("cache", "true") - use_cache = use_cache_param.lower() in ["true", "1", "yes"] - - # Validate request - if not enable_clustering and not bounds and not filters: - return self._error_response( - "Either bounds, filters, or clustering must be specified for non-clustered requests", - error_code="MISSING_PARAMETERS", - ) - - # Get map data - response = unified_map_service.get_map_data( - bounds=bounds, - filters=filters, - zoom_level=zoom_level, - cluster=enable_clustering, - use_cache=use_cache, - ) - - # Handle pagination for non-clustered results - if not enable_clustering and response.locations: - start_idx = pagination["offset"] - end_idx = start_idx + pagination["limit"] - paginated_locations = response.locations[start_idx:end_idx] - - return JsonResponse( - self._create_paginated_response( - [loc.to_dict() for loc in paginated_locations], - len(response.locations), - pagination, - request, - ) - ) - - # For clustered results, return as-is with metadata - response_dict = response.to_dict() - - return self._success_response( - response_dict, - metadata={ - "clustered": response.clustered, - "cache_hit": response.cache_hit, - "query_time_ms": response.query_time_ms, - "filters_applied": response.filters_applied, - }, - ) - - except ValidationError as e: - logger.warning(f"Validation error in MapLocationsView: {str(e)}") - return self._error_response(str(e), 400, error_code="VALIDATION_ERROR") - except Exception as e: - logger.error(f"Error in MapLocationsView: {str(e)}", exc_info=True) - return self._error_response( - "Failed to retrieve map locations", - 500, - error_code="INTERNAL_ERROR", - ) - - -class MapLocationDetailView(MapAPIView): - """ - API endpoint for getting detailed information about a specific location. - - GET /api/map/locations/// - """ - - @method_decorator(cache_page(600)) # Cache for 10 minutes - def get( - self, request: HttpRequest, location_type: str, location_id: int - ) -> JsonResponse: - """Get detailed information for a specific location.""" - try: - # Validate location type - valid_types = [lt.value for lt in LocationType] - if location_type not in valid_types: - return self._error_response( - f"Invalid location type: {location_type}. Valid types: { - ', '.join(valid_types)}", - 400, - error_code="INVALID_LOCATION_TYPE", - ) - - # Validate location ID - if location_id <= 0: - return self._error_response( - "Location ID must be a positive integer", - 400, - error_code="INVALID_LOCATION_ID", - ) - - # Get location details - location = unified_map_service.get_location_details( - location_type, location_id - ) - - if not location: - return self._error_response( - f"Location not found: {location_type}/{location_id}", - 404, - error_code="LOCATION_NOT_FOUND", - ) - - return self._success_response( - location.to_dict(), - metadata={ - "location_type": location_type, - "location_id": location_id, - }, - ) - - except ValueError as e: - logger.warning(f"Value error in MapLocationDetailView: {str(e)}") - return self._error_response(str(e), 400, error_code="INVALID_PARAMETER") - except Exception as e: - logger.error( - f"Error in MapLocationDetailView: { - str(e)}", - exc_info=True, - ) - return self._error_response( - "Failed to retrieve location details", - 500, - error_code="INTERNAL_ERROR", - ) - - -class MapSearchView(MapAPIView): - """ - API endpoint for searching locations by text query. - - GET /api/map/search/ - Parameters: - - q: Search query (required) - - north, south, east, west: Optional bounding box - - types: Comma-separated location types - - limit: Maximum results (default 50) - """ - - @method_decorator(gzip_page) # Compress responses - def get(self, request: HttpRequest) -> JsonResponse: - """Search locations by text query with pagination.""" - try: - # Get and validate search query - query = request.GET.get("q", "").strip() - if not query: - return self._error_response( - "Search query 'q' parameter is required", - 400, - error_code="MISSING_QUERY", - ) - - if len(query) < 2: - return self._error_response( - "Search query must be at least 2 characters long", - 400, - error_code="QUERY_TOO_SHORT", - ) - - # Parse parameters - bounds = self._parse_bounds(request) - pagination = self._parse_pagination(request) - - # Parse location types - location_types = None - types_param = request.GET.get("types") - if types_param: - try: - valid_types = {lt.value for lt in LocationType} - location_types = { - LocationType(t.strip()) - for t in types_param.split(",") - if t.strip() in valid_types - } - except ValueError: - return self._error_response( - "Invalid location types", - 400, - error_code="INVALID_TYPES", - ) - - # Set reasonable search limit (higher for search than general - # listings) - search_limit = min(500, pagination["page"] * pagination["page_size"]) - - # Perform search - locations = unified_map_service.search_locations( - query=query, - bounds=bounds, - location_types=location_types, - limit=search_limit, - ) - - # Apply pagination - start_idx = pagination["offset"] - end_idx = start_idx + pagination["limit"] - paginated_locations = locations[start_idx:end_idx] - - return JsonResponse( - self._create_paginated_response( - [loc.to_dict() for loc in paginated_locations], - len(locations), - pagination, - request, - ) - ) - - except ValidationError as e: - logger.warning(f"Validation error in MapSearchView: {str(e)}") - return self._error_response(str(e), 400, error_code="VALIDATION_ERROR") - except ValueError as e: - logger.warning(f"Value error in MapSearchView: {str(e)}") - return self._error_response(str(e), 400, error_code="INVALID_PARAMETER") - except Exception as e: - logger.error(f"Error in MapSearchView: {str(e)}", exc_info=True) - return self._error_response( - "Search failed due to internal error", - 500, - error_code="SEARCH_FAILED", - ) - - -class MapBoundsView(MapAPIView): - """ - API endpoint for getting locations within specific bounds. - - GET /api/map/bounds/ - Parameters: - - north, south, east, west: Bounding box coordinates (required) - - types: Comma-separated location types - - zoom: Zoom level - """ - - @method_decorator(cache_page(300)) # Cache for 5 minutes - def get(self, request: HttpRequest) -> JsonResponse: - """Get locations within specific geographic bounds.""" - try: - # Parse required bounds - bounds = self._parse_bounds(request) - if not bounds: - return self._error_response( - "Bounds parameters required: north, south, east, west", 400 - ) - - # Parse optional filters - location_types = None - types_param = request.GET.get("types") - if types_param: - location_types = { - LocationType(t.strip()) - for t in types_param.split(",") - if t.strip() in [lt.value for lt in LocationType] - } - - zoom_level = self._parse_zoom_level(request) - - # Get locations within bounds - response = unified_map_service.get_locations_by_bounds( - north=bounds.north, - south=bounds.south, - east=bounds.east, - west=bounds.west, - location_types=location_types, - zoom_level=zoom_level, - ) - - return JsonResponse(response.to_dict()) - - except ValidationError as e: - return self._error_response(str(e), 400) - except Exception as e: - return self._error_response( - f"Internal server error: { - str(e)}", - 500, - ) - - -class MapStatsView(MapAPIView): - """ - API endpoint for getting map service statistics and health information. - - GET /api/map/stats/ - """ - - def get(self, request: HttpRequest) -> JsonResponse: - """Get map service statistics and performance metrics.""" - try: - stats = unified_map_service.get_service_stats() - - return JsonResponse({"status": "success", "data": stats}) - - except Exception as e: - return self._error_response( - f"Internal server error: { - str(e)}", - 500, - ) - - -class MapCacheView(MapAPIView): - """ - API endpoint for cache management (admin only). - - DELETE /api/map/cache/ - POST /api/map/cache/invalidate/ - """ - - def delete(self, request: HttpRequest) -> JsonResponse: - """Clear all map cache (admin only).""" - # TODO: Add admin permission check - try: - unified_map_service.invalidate_cache() - - return JsonResponse( - { - "status": "success", - "message": "Map cache cleared successfully", - } - ) - - except Exception as e: - return self._error_response( - f"Internal server error: { - str(e)}", - 500, - ) - - def post(self, request: HttpRequest) -> JsonResponse: - """Invalidate specific cache entries.""" - # TODO: Add admin permission check - try: - data = json.loads(request.body) - - location_type = data.get("location_type") - location_id = data.get("location_id") - bounds_data = data.get("bounds") - - bounds = None - if bounds_data: - bounds = GeoBounds(**bounds_data) - - unified_map_service.invalidate_cache( - location_type=location_type, - location_id=location_id, - bounds=bounds, - ) - - return JsonResponse( - { - "status": "success", - "message": "Cache invalidated successfully", - } - ) - - except (json.JSONDecodeError, TypeError, ValueError) as e: - return self._error_response(f"Invalid request data: {str(e)}", 400) - except Exception as e: - return self._error_response( - f"Internal server error: { - str(e)}", - 500, - ) diff --git a/core/views/maps.py b/core/views/maps.py deleted file mode 100644 index ca059164..00000000 --- a/core/views/maps.py +++ /dev/null @@ -1,421 +0,0 @@ -""" -HTML views for the unified map service. -Provides web interfaces for map functionality with HTMX integration. -""" - -import json -from typing import Dict, Any, Optional, Set -from django.shortcuts import render -from django.http import JsonResponse, HttpRequest, HttpResponse -from django.views.generic import TemplateView, View -from django.core.paginator import Paginator - -from ..services.map_service import unified_map_service -from ..services.data_structures import GeoBounds, MapFilters, LocationType - - -class MapViewMixin: - """Mixin providing common functionality for map views.""" - - def get_map_context(self, request: HttpRequest) -> Dict[str, Any]: - """Get common context data for map views.""" - return { - "map_api_urls": { - "locations": "/api/map/locations/", - "search": "/api/map/search/", - "bounds": "/api/map/bounds/", - "location_detail": "/api/map/locations/", - }, - "location_types": [lt.value for lt in LocationType], - "default_zoom": 10, - "enable_clustering": True, - "enable_search": True, - } - - def parse_location_types(self, request: HttpRequest) -> Optional[Set[LocationType]]: - """Parse location types from request parameters.""" - types_param = request.GET.get("types") - if types_param: - try: - return { - LocationType(t.strip()) - for t in types_param.split(",") - if t.strip() in [lt.value for lt in LocationType] - } - except ValueError: - return None - return None - - -class UniversalMapView(MapViewMixin, TemplateView): - """ - Main universal map view showing all location types. - - URL: /maps/ - """ - - template_name = "maps/universal_map.html" - - def get_context_data(self, **kwargs): - context = super().get_context_data(**kwargs) - context.update(self.get_map_context(self.request)) - - # Additional context for universal map - context.update( - { - "page_title": "Interactive Map - All Locations", - "map_type": "universal", - "show_all_types": True, - "initial_location_types": [lt.value for lt in LocationType], - "filters_enabled": True, - } - ) - - # Handle initial bounds from query parameters - if all( - param in self.request.GET for param in ["north", "south", "east", "west"] - ): - try: - context["initial_bounds"] = { - "north": float(self.request.GET["north"]), - "south": float(self.request.GET["south"]), - "east": float(self.request.GET["east"]), - "west": float(self.request.GET["west"]), - } - except (ValueError, TypeError): - pass - - return context - - -class ParkMapView(MapViewMixin, TemplateView): - """ - Map view focused specifically on parks. - - URL: /maps/parks/ - """ - - template_name = "maps/park_map.html" - - def get_context_data(self, **kwargs): - context = super().get_context_data(**kwargs) - context.update(self.get_map_context(self.request)) - - # Park-specific context - context.update( - { - "page_title": "Theme Parks Map", - "map_type": "parks", - "show_all_types": False, - "initial_location_types": [LocationType.PARK.value], - "filters_enabled": True, - "park_specific_filters": True, - } - ) - - return context - - -class NearbyLocationsView(MapViewMixin, TemplateView): - """ - View for showing locations near a specific point. - - URL: /maps/nearby/ - """ - - template_name = "maps/nearby_locations.html" - - def get_context_data(self, **kwargs): - context = super().get_context_data(**kwargs) - context.update(self.get_map_context(self.request)) - - # Parse coordinates from query parameters - lat = self.request.GET.get("lat") - lng = self.request.GET.get("lng") - radius = self.request.GET.get("radius", "50") # Default 50km radius - - if lat and lng: - try: - center_lat = float(lat) - center_lng = float(lng) - # Clamp between 1-200km - search_radius = min(200, max(1, float(radius))) - - context.update( - { - "page_title": f"Locations Near { - center_lat:.4f}, { - center_lng:.4f}", - "map_type": "nearby", - "center_coordinates": { - "lat": center_lat, - "lng": center_lng, - }, - "search_radius": search_radius, - "show_radius_circle": True, - } - ) - except (ValueError, TypeError): - context["error"] = "Invalid coordinates provided" - else: - context.update( - { - "page_title": "Nearby Locations", - "map_type": "nearby", - "prompt_for_location": True, - } - ) - - return context - - -class LocationFilterView(MapViewMixin, View): - """ - HTMX endpoint for updating map when filters change. - - URL: /maps/htmx/filter/ - """ - - def get(self, request: HttpRequest) -> HttpResponse: - """Return filtered location data for HTMX updates.""" - try: - # Parse filter parameters - location_types = self.parse_location_types(request) - search_query = request.GET.get("q", "").strip() - country = request.GET.get("country", "").strip() - state = request.GET.get("state", "").strip() - - # Create filters - filters = None - if any([location_types, search_query, country, state]): - filters = MapFilters( - location_types=location_types, - search_query=search_query or None, - country=country or None, - state=state or None, - has_coordinates=True, - ) - - # Get filtered locations - map_response = unified_map_service.get_map_data( - filters=filters, - zoom_level=int(request.GET.get("zoom", "10")), - cluster=request.GET.get("cluster", "true").lower() == "true", - ) - - # Return JSON response for HTMX - return JsonResponse( - { - "status": "success", - "data": map_response.to_dict(), - "filters_applied": map_response.filters_applied, - } - ) - - except Exception as e: - return JsonResponse({"status": "error", "message": str(e)}, status=400) - - -class LocationSearchView(MapViewMixin, View): - """ - HTMX endpoint for real-time location search. - - URL: /maps/htmx/search/ - """ - - def get(self, request: HttpRequest) -> HttpResponse: - """Return search results for HTMX updates.""" - query = request.GET.get("q", "").strip() - - if not query or len(query) < 3: - return render( - request, - "maps/partials/search_results.html", - { - "results": [], - "query": query, - "message": "Enter at least 3 characters to search", - }, - ) - - try: - # Parse optional location types - location_types = self.parse_location_types(request) - limit = min(20, max(5, int(request.GET.get("limit", "10")))) - - # Perform search - results = unified_map_service.search_locations( - query=query, location_types=location_types, limit=limit - ) - - return render( - request, - "maps/partials/search_results.html", - {"results": results, "query": query, "count": len(results)}, - ) - - except Exception as e: - return render( - request, - "maps/partials/search_results.html", - {"results": [], "query": query, "error": str(e)}, - ) - - -class MapBoundsUpdateView(MapViewMixin, View): - """ - HTMX endpoint for updating locations when map bounds change. - - URL: /maps/htmx/bounds/ - """ - - def post(self, request: HttpRequest) -> HttpResponse: - """Update map data when bounds change.""" - try: - data = json.loads(request.body) - - # Parse bounds - bounds = GeoBounds( - north=float(data["north"]), - south=float(data["south"]), - east=float(data["east"]), - west=float(data["west"]), - ) - - # Parse additional parameters - zoom_level = int(data.get("zoom", 10)) - location_types = None - if "types" in data: - location_types = { - LocationType(t) - for t in data["types"] - if t in [lt.value for lt in LocationType] - } - - # Location types are used directly in the service call - - # Get updated map data - map_response = unified_map_service.get_locations_by_bounds( - north=bounds.north, - south=bounds.south, - east=bounds.east, - west=bounds.west, - location_types=location_types, - zoom_level=zoom_level, - ) - - return JsonResponse({"status": "success", "data": map_response.to_dict()}) - - except (json.JSONDecodeError, ValueError, KeyError) as e: - return JsonResponse( - { - "status": "error", - "message": f"Invalid request data: {str(e)}", - }, - status=400, - ) - except Exception as e: - return JsonResponse({"status": "error", "message": str(e)}, status=500) - - -class LocationDetailModalView(MapViewMixin, View): - """ - HTMX endpoint for showing location details in modal. - - URL: /maps/htmx/location/// - """ - - def get( - self, request: HttpRequest, location_type: str, location_id: int - ) -> HttpResponse: - """Return location detail modal content.""" - try: - # Validate location type - if location_type not in [lt.value for lt in LocationType]: - return render( - request, - "maps/partials/location_modal.html", - {"error": f"Invalid location type: {location_type}"}, - ) - - # Get location details - location = unified_map_service.get_location_details( - location_type, location_id - ) - - if not location: - return render( - request, - "maps/partials/location_modal.html", - {"error": "Location not found"}, - ) - - return render( - request, - "maps/partials/location_modal.html", - {"location": location, "location_type": location_type}, - ) - - except Exception as e: - return render( - request, "maps/partials/location_modal.html", {"error": str(e)} - ) - - -class LocationListView(MapViewMixin, TemplateView): - """ - View for listing locations with pagination (non-map view). - - URL: /maps/list/ - """ - - template_name = "maps/location_list.html" - paginate_by = 20 - - def get_context_data(self, **kwargs): - context = super().get_context_data(**kwargs) - - # Parse filters - location_types = self.parse_location_types(self.request) - search_query = self.request.GET.get("q", "").strip() - country = self.request.GET.get("country", "").strip() - state = self.request.GET.get("state", "").strip() - - # Create filters - filters = None - if any([location_types, search_query, country, state]): - filters = MapFilters( - location_types=location_types, - search_query=search_query or None, - country=country or None, - state=state or None, - has_coordinates=True, - ) - - # Get locations without clustering - map_response = unified_map_service.get_map_data( - filters=filters, cluster=False, use_cache=True - ) - - # Paginate results - paginator = Paginator(map_response.locations, self.paginate_by) - page_number = self.request.GET.get("page") - page_obj = paginator.get_page(page_number) - - context.update( - { - "page_title": "All Locations", - "locations": page_obj, - "total_count": map_response.total_count, - "applied_filters": filters, - "location_types": [lt.value for lt in LocationType], - "current_filters": { - "types": self.request.GET.getlist("types"), - "q": search_query, - "country": country, - "state": state, - }, - } - ) - - return context diff --git a/core/views/search.py b/core/views/search.py deleted file mode 100644 index 390d17f7..00000000 --- a/core/views/search.py +++ /dev/null @@ -1,178 +0,0 @@ -from django.views.generic import TemplateView -from django.http import JsonResponse -from django.contrib.gis.geos import Point -from parks.models import Park -from parks.filters import ParkFilter -from core.services.location_search import ( - location_search_service, - LocationSearchFilters, -) -from core.forms.search import LocationSearchForm - - -class AdaptiveSearchView(TemplateView): - template_name = "core/search/results.html" - - def get_queryset(self): - """ - Get the base queryset, optimized with select_related and prefetch_related - """ - return ( - Park.objects.select_related("operator", "property_owner") - .prefetch_related("location", "photos") - .all() - ) - - def get_filterset(self): - """ - Get the filterset instance - """ - return ParkFilter(self.request.GET, queryset=self.get_queryset()) - - def get_context_data(self, **kwargs): - """ - Add filtered results and filter form to context - """ - context = super().get_context_data(**kwargs) - filterset = self.get_filterset() - - # Check if location-based search is being used - location_search = self.request.GET.get("location_search", "").strip() - near_location = self.request.GET.get("near_location", "").strip() - - # Add location search context - context.update( - { - "results": filterset.qs, - "filters": filterset, - "applied_filters": bool( - self.request.GET - ), # Check if any filters are applied - "is_location_search": bool(location_search or near_location), - "location_search_query": location_search or near_location, - } - ) - - return context - - -class FilterFormView(TemplateView): - """ - View for rendering just the filter form for HTMX updates - """ - - template_name = "core/search/filters.html" - - def get_context_data(self, **kwargs): - context = super().get_context_data(**kwargs) - filterset = ParkFilter(self.request.GET, queryset=Park.objects.all()) - context["filters"] = filterset - return context - - -class LocationSearchView(TemplateView): - """ - Enhanced search view with comprehensive location search capabilities. - """ - - template_name = "core/search/location_results.html" - - def get_context_data(self, **kwargs): - context = super().get_context_data(**kwargs) - - # Build search filters from request parameters - filters = self._build_search_filters() - - # Perform search - results = location_search_service.search(filters) - - # Group results by type for better presentation - grouped_results = { - "parks": [r for r in results if r.content_type == "park"], - "rides": [r for r in results if r.content_type == "ride"], - "companies": [r for r in results if r.content_type == "company"], - } - - context.update( - { - "results": results, - "grouped_results": grouped_results, - "total_results": len(results), - "search_filters": filters, - "has_location_filter": bool(filters.location_point), - "search_form": LocationSearchForm(self.request.GET), - } - ) - - return context - - def _build_search_filters(self) -> LocationSearchFilters: - """Build LocationSearchFilters from request parameters.""" - form = LocationSearchForm(self.request.GET) - form.is_valid() # Populate cleaned_data - - # Parse location coordinates if provided - location_point = None - lat = form.cleaned_data.get("lat") - lng = form.cleaned_data.get("lng") - if lat and lng: - try: - location_point = Point(float(lng), float(lat), srid=4326) - except (ValueError, TypeError): - location_point = None - - # Parse location types - location_types = set() - if form.cleaned_data.get("search_parks"): - location_types.add("park") - if form.cleaned_data.get("search_rides"): - location_types.add("ride") - if form.cleaned_data.get("search_companies"): - location_types.add("company") - - # If no specific types selected, search all - if not location_types: - location_types = {"park", "ride", "company"} - - # Parse radius - radius_km = None - radius_str = form.cleaned_data.get("radius_km", "").strip() - if radius_str: - try: - radius_km = float(radius_str) - # Clamp between 1-500km - radius_km = max(1, min(500, radius_km)) - except (ValueError, TypeError): - radius_km = None - - return LocationSearchFilters( - search_query=form.cleaned_data.get("q", "").strip() or None, - location_point=location_point, - radius_km=radius_km, - location_types=location_types if location_types else None, - country=form.cleaned_data.get("country", "").strip() or None, - state=form.cleaned_data.get("state", "").strip() or None, - city=form.cleaned_data.get("city", "").strip() or None, - park_status=self.request.GET.getlist("park_status") or None, - include_distance=True, - max_results=int(self.request.GET.get("limit", 100)), - ) - - -class LocationSuggestionsView(TemplateView): - """ - AJAX endpoint for location search suggestions. - """ - - def get(self, request, *args, **kwargs): - query = request.GET.get("q", "").strip() - limit = int(request.GET.get("limit", 10)) - - if len(query) < 2: - return JsonResponse({"suggestions": []}) - - try: - suggestions = location_search_service.suggest_locations(query, limit) - return JsonResponse({"suggestions": suggestions}) - except Exception as e: - return JsonResponse({"error": str(e)}, status=500) diff --git a/core/views/views.py b/core/views/views.py deleted file mode 100644 index 9a197e18..00000000 --- a/core/views/views.py +++ /dev/null @@ -1,62 +0,0 @@ -from typing import Any, Dict, Optional, Type -from django.shortcuts import redirect -from django.urls import reverse -from django.views.generic import DetailView -from django.views import View -from django.http import HttpRequest, HttpResponse -from django.db.models import Model - - -class SlugRedirectMixin(View): - """ - Mixin that handles redirects for old slugs. - Requires the model to inherit from SluggedModel and view to inherit from DetailView. - """ - - model: Optional[Type[Model]] = None - slug_url_kwarg: str = "slug" - object: Optional[Model] = None - - def dispatch(self, request: HttpRequest, *args: Any, **kwargs: Any) -> HttpResponse: - # Only apply slug redirect logic to DetailViews - if not isinstance(self, DetailView): - return super().dispatch(request, *args, **kwargs) - - # Get the object using current or historical slug - try: - self.object = self.get_object() # type: ignore - # Check if we used an old slug - current_slug = kwargs.get(self.slug_url_kwarg) - if current_slug and current_slug != getattr(self.object, "slug", None): - # Get the URL pattern name from the view - url_pattern = self.get_redirect_url_pattern() - # Build kwargs for reverse() - reverse_kwargs = self.get_redirect_url_kwargs() - # Redirect to the current slug URL - return redirect( - reverse(url_pattern, kwargs=reverse_kwargs), permanent=True - ) - return super().dispatch(request, *args, **kwargs) - except (AttributeError, Exception) as e: # type: ignore - if self.model and hasattr(self.model, "DoesNotExist"): - if isinstance(e, self.model.DoesNotExist): # type: ignore - return super().dispatch(request, *args, **kwargs) - return super().dispatch(request, *args, **kwargs) - - def get_redirect_url_pattern(self) -> str: - """ - Get the URL pattern name for redirects. - Should be overridden by subclasses. - """ - raise NotImplementedError( - "Subclasses must implement get_redirect_url_pattern()" - ) - - def get_redirect_url_kwargs(self) -> Dict[str, Any]: - """ - Get the kwargs for reverse() when redirecting. - Should be overridden by subclasses if they need custom kwargs. - """ - if not self.object: - return {} - return {self.slug_url_kwarg: getattr(self.object, "slug", "")} diff --git a/docs/2024-02-14/auth_setup.md b/docs/2024-02-14/auth_setup.md deleted file mode 100644 index 8c472e50..00000000 --- a/docs/2024-02-14/auth_setup.md +++ /dev/null @@ -1,150 +0,0 @@ -# Authentication System Setup - -## Overview -This document outlines the setup of the authentication system, including both social and regular authentication. - -## Backend Changes - -### 1. Package Installation -```bash -pip install django-allauth==0.65.1 dj-rest-auth==6.0.0 djangorestframework==3.15.2 django-cors-headers==4.5.0 -``` - -### 2. Configuration Files Modified -- thrillwiki/settings.py - - Added authentication apps - - Configured REST Framework - - Added CORS settings - - Added social auth providers - - Updated redirect URLs - -- thrillwiki/urls.py - - Added dj-rest-auth URLs - - Added social auth URLs - -### 3. New Files Created -- accounts/adapters.py - - Custom social account adapter - - Handles missing emails - - Sets profile pictures from social providers - -### 4. Modified Files -- accounts/views.py - - Added email collection endpoint - - Updated authentication views -- accounts/urls.py - - Added new authentication endpoints - -## Frontend Changes - -### 1. Package Installation -```bash -npm install react-router-dom@6 axios@latest @react-oauth/google@latest -``` - -### 2. New Components Created -- src/contexts/AuthContext.tsx -- src/contexts/AuthProvider.tsx -- src/pages/Login.tsx -- src/pages/DiscordRedirect.tsx -- src/pages/EmailRequired.tsx - -### 3. New Assets -- public/google-icon.svg -- public/discord-icon.svg - -### 4. Modified Files -- src/App.tsx - - Added Google OAuth provider - - Added new routes -- src/api/client.ts - - Added authentication endpoints - - Added token handling - -## Development Environment Setup - -### Backend Setup -1. Create ***REMOVED*** file: -```env -DJANGO_SECRET_KEY=your_secret_key -DEBUG=True -ALLOWED_HOSTS=localhost,127.0.0.1 -CORS_ALLOWED_ORIGINS=http://localhost:5173 - -# OAuth Credentials -GOOGLE_OAUTH2_CLIENT_ID=your_google_client_id -GOOGLE_OAUTH2_CLIENT_SECRET=your_google_client_secret -DISCORD_CLIENT_ID=your_discord_client_id -DISCORD_CLIENT_SECRET=your_discord_client_secret - -# Database -DB_NAME=thrillwiki -DB_USER=postgres -DB_PASSWORD=postgres -DB_HOST=localhost -DB_PORT=5432 -``` - -2. Run migrations: -```bash -python manage.py makemigrations -python manage.py migrate -``` - -### Frontend Setup -1. Create ***REMOVED*** file: -```env -VITE_API_URL=http://localhost:8000 -VITE_GOOGLE_CLIENT_ID=your_google_client_id -VITE_DISCORD_CLIENT_ID=your_discord_client_id -``` - -2. Install dependencies: -```bash -cd frontend -npm install -``` - -## Testing Instructions - -### Backend Testing -1. Start Django development server: -```bash -python manage.py runserver -``` - -2. Test endpoints: -- Regular auth: http://localhost:8000/api/auth/login/ -- Social auth: http://localhost:8000/api/auth/google/login/ -- User info: http://localhost:8000/api/auth/user/ - -### Frontend Testing -1. Start Vite development server: -```bash -cd frontend -npm run dev -``` - -2. Test flows: -- Regular login: http://localhost:5173/login -- Google login: Click "Continue with Google" -- Discord login: Click "Continue with Discord" -- Protected route: http://localhost:5173/settings - -## Testing Checklist -- [ ] Regular login/registration -- [ ] Google OAuth flow -- [ ] Discord OAuth flow -- [ ] Email collection for social auth -- [ ] Profile picture import -- [ ] Protected route access -- [ ] Token persistence -- [ ] Error handling -- [ ] Logout functionality - -## Notes -- Ensure all OAuth credentials are properly set up in Google Cloud Console and Discord Developer Portal -- Test all flows in incognito mode to avoid cached credentials -- Verify CSRF protection is working -- Check token expiration handling -- Test error scenarios (network issues, invalid credentials) diff --git a/docs/2024-02-14/changes.md b/docs/2024-02-14/changes.md deleted file mode 100644 index 71713069..00000000 --- a/docs/2024-02-14/changes.md +++ /dev/null @@ -1,59 +0,0 @@ -# Changes Made - February 14, 2024 - -## Reactivated Removal -- Removed all reactivated-related files and configurations -- Cleaned up old client directory and unused configuration files - -## Frontend Updates -- Updated to latest versions of all packages including Vite, React, and Material UI -- Configured Vite for optimal development experience -- Set up proper CORS and CSRF settings for Vite development server -- Improved build configuration with proper chunking and optimization -- Removed webpack configuration in favor of Vite - -## Development Environment -- Created new development startup script (dev.sh) -- Updated frontend environment variables -- Configured HMR (Hot Module Replacement) for better development experience -- Set up proper proxy configuration for API and media files - -## Configuration Updates -- Updated Django settings to work with Vite development server -- Added proper CORS and CSRF configurations for development -- Improved authentication backend configuration - -## Park Detail Page Layout Updates -- Moved Quick Facts section from right column into header section for better information visibility -- Relocated map from left column to right column to improve content flow -- Added ride counts (Total Rides and Roller Coasters) to the header status badges -- Made the Location map card dynamically square, matching height to width -- Adjusted grid layout to maintain responsive design -- Added resize handling to ensure map stays square when browser window is resized - -### Technical Details -- Modified templates/parks/park_detail.html -- Restructured grid layout classes -- Added JavaScript to maintain square aspect ratio for map -- Added window resize event handler for map container -- Reorganized content sections for better user experience - -### Rationale -- Quick Facts are now more immediately visible to users in the header -- Map placement in right column provides better content hierarchy -- Square map provides better visual balance and consistency -- Ride counts in header give immediate overview of park size -- Changes improve overall page readability and information accessibility - -## How to Run Development Environment -1. Ensure PostgreSQL is running and database is created -2. Set up your ***REMOVED*** file with necessary environment variables -3. Run migrations: `python manage.py migrate` -4. Install frontend dependencies: `cd frontend && npm install` -5. Start development servers: `./dev.sh` - -The development environment will start both Django (port 8000) and Vite (port 5173) servers and open the application in your default browser. - -## Next Steps -- Set up Netlify configuration for frontend deployment -- Configure production environment variables -- Set up CI/CD pipeline diff --git a/docs/2024-02-14/frontend_setup.md b/docs/2024-02-14/frontend_setup.md deleted file mode 100644 index 1d1b8396..00000000 --- a/docs/2024-02-14/frontend_setup.md +++ /dev/null @@ -1,143 +0,0 @@ -# Frontend Setup - February 14, 2024 - -## Technology Stack - -### Core Technologies -- React 18.2.0 -- TypeScript 5.2.2 -- Material UI 5.14.17 -- React Router 6.18.0 - -### Build Tools -- Webpack 5.89.0 -- Babel 7.23.2 -- CSS Loader 6.8.1 -- Style Loader 3.3.3 - -### Development Tools -- Webpack Dev Server 4.15.1 -- React Refresh Webpack Plugin 0.5.11 -- TypeScript Compiler -- ESLint - -## Features Implemented - -### Core Features -1. Authentication - - Login/Register pages - - Social authentication support - - Protected routes - - User role management - -2. Theme System - - Dark/Light mode toggle - - System preference detection - - Theme persistence - - Custom Material UI theme - -3. Navigation - - Responsive navbar - - Mobile hamburger menu - - Search functionality - - User menu - -4. Park Management - - Park listing with filters - - Park details page - - Ride listing - - Ride details page - -5. User Features - - Profile pages - - Ride credits tracking - - Review system - - Photo uploads - -### Technical Features -1. Performance - - Code splitting with React.lazy() - - Route-based chunking - - Image optimization - - Webpack optimization - -2. Type Safety - - Full TypeScript integration - - Type-safe API calls - - Interface definitions - - Strict type checking - -3. State Management - - React hooks - - Context API - - Local storage integration - - Form state management - -4. UI/UX - - Responsive design - - Loading states - - Error boundaries - - Toast notifications - -## Project Structure - -``` -frontend/ -├── src/ -│ ├── components/ # Reusable UI components -│ ├── pages/ # Route components -│ ├── hooks/ # Custom React hooks -│ ├── api/ # API client and utilities -│ ├── types/ # TypeScript definitions -│ └── utils/ # Helper functions -├── public/ # Static assets -└── webpack.config.js # Build configuration -``` - -## Development Workflow - -1. Start Development Server: - ```bash - npm start - ``` - -2. Build for Production: - ```bash - npm run build - ``` - -3. Type Checking: - ```bash - npm run type-check - ``` - -4. Linting: - ```bash - npm run lint - ``` - -## Next Steps - -1. Implement Edit System - - Inline editing for parks/rides - - Edit history tracking - - Moderation workflow - -2. Review System - - Review submission - - Rating system - - Review moderation - -3. Photo Management - - Photo upload - - Gallery system - - Photo moderation - -4. Admin Interface - - User management - - Content moderation - - Statistics dashboard - -5. Testing - - Unit tests - - Integration tests - - End-to-end tests diff --git a/docs/2024-02-14/initial_setup.md b/docs/2024-02-14/initial_setup.md deleted file mode 100644 index 231adf33..00000000 --- a/docs/2024-02-14/initial_setup.md +++ /dev/null @@ -1,78 +0,0 @@ -# ThrillWiki Initial Setup - -## Project Overview -ThrillWiki is a database website focused on rides and attractions in the amusement and theme park industries. The site features detailed statistics, photos, and user reviews for parks and rides worldwide. - -## Technical Stack -- Backend: Django 5.1.2 -- Frontend: React + Material UI + Alpine.js + HTMX -- Database: PostgreSQL -- Authentication: django-allauth (with Discord and Google OAuth support) -- Email: ForwardEmail.net SMTP - -## Key Features -- Full authentication system with social login support -- Responsive design for desktop and mobile -- Light/dark theme support -- Rich media support for ride and park photos -- User review system with average ratings -- Inline editing for authenticated users -- Page history tracking -- Advanced search and filtering capabilities - -## Project Structure -``` -thrillwiki/ -├── accounts/ # User authentication and profiles -├── api/ # REST API endpoints -├── docs/ # Project documentation -├── frontend/ # React frontend application -├── media/ # User-uploaded content -├── parks/ # Park-related models and views -├── reviews/ # User reviews functionality -├── rides/ # Ride-related models and views -├── static/ # Static assets -├── templates/ # Django templates -└── thrillwiki/ # Project settings and core configuration -``` - -## Setup Instructions -1. Create and activate a virtual environment: - ```bash - python -m venv venv - source venv/bin/activate - ``` - -2. Install dependencies: - ```bash - pip install -r requirements.txt - ``` - -3. Configure environment variables: - - Copy ***REMOVED***.example to ***REMOVED*** - - Update the variables with your specific values - -4. Set up the database: - ```bash - python manage.py migrate - ``` - -5. Create a superuser: - ```bash - python manage.py createsuperuser - ``` - -6. Run the development server: - ```bash - python manage.py runserver - ``` - -## Next Steps -- [ ] Implement user models and authentication views -- [ ] Create park and ride models -- [ ] Set up review system -- [ ] Implement frontend components -- [ ] Configure social authentication -- [ ] Set up email verification -- [ ] Implement search and filtering -- [ ] Add media handling diff --git a/docs/THRILLWIKI_PROJECT_DOCUMENTATION.md b/docs/THRILLWIKI_PROJECT_DOCUMENTATION.md deleted file mode 100644 index 4498cfd9..00000000 --- a/docs/THRILLWIKI_PROJECT_DOCUMENTATION.md +++ /dev/null @@ -1,1752 +0,0 @@ -# ThrillWiki Project Documentation - -## Table of Contents - -1. [Project Overview](#project-overview) -2. [Technical Stack and Architecture](#technical-stack-and-architecture) -3. [Database Models and Relationships](#database-models-and-relationships) -4. [Visual Theme and Design System](#visual-theme-and-design-system) -5. [Frontend Implementation Patterns](#frontend-implementation-patterns) -6. [User Experience and Key Features](#user-experience-and-key-features) -7. [Page Structure and Templates](#page-structure-and-templates) -8. [Services and Business Logic](#services-and-business-logic) -9. [Development Workflow](#development-workflow) -10. [API Endpoints and URL Structure](#api-endpoints-and-url-structure) - ---- - -## Project Overview - -ThrillWiki is a sophisticated Django-based web application designed for theme park and roller coaster enthusiasts. It provides comprehensive information management for parks, rides, companies, and user-generated content with advanced features including geographic mapping, moderation systems, and real-time interactions. - -### Key Characteristics - -- **Enterprise-Grade Architecture**: Service-oriented design with clear separation of concerns -- **Modern Frontend**: HTMX + Alpine.js for dynamic interactions without heavy JavaScript frameworks -- **Geographic Intelligence**: PostGIS integration for location-based features and mapping -- **Content Moderation**: Comprehensive workflow for user-generated content approval -- **Audit Trail**: Complete history tracking using django-pghistory -- **Responsive Design**: Mobile-first approach with sophisticated dark theme support - ---- - -## Technical Stack and Architecture - -### Core Technologies - -| Component | Technology | Version | Purpose | -|-----------|------------|---------|---------| -| **Backend Framework** | Django | 5.0+ | Main web framework | -| **Database** | PostgreSQL + PostGIS | Latest | Relational database with geographic extension | -| **Frontend** | HTMX + Alpine.js | 1.9.6 + Latest | Dynamic interactions and client-side behavior | -| **Styling** | Tailwind CSS | Latest | Utility-first CSS framework | -| **Package Manager** | UV | Latest | Python dependency management | -| **Authentication** | Django Allauth | 0.60.1+ | OAuth and user management | -| **History Tracking** | django-pghistory | 3.5.2+ | Audit trails and versioning | -| **Testing** | Pytest + Playwright | Latest | Unit and E2E testing | - -### Architecture Patterns - -#### Service-Oriented Architecture - -``` -┌─────────────────┐ ┌─────────────────┐ ┌─────────────────┐ -│ Presentation │ │ Business │ │ Data │ -│ Layer │ │ Logic │ │ Layer │ -├─────────────────┤ ├─────────────────┤ ├─────────────────┤ -│ • Templates │◄──►│ • Services │◄──►│ • Models │ -│ • Views │ │ • Map Service │ │ • Database │ -│ • HTMX/Alpine │ │ • Search │ │ • PostGIS │ -│ • Tailwind CSS │ │ • Moderation │ │ • Caching │ -└─────────────────┘ └─────────────────┘ └─────────────────┘ -``` - -#### Django App Organization - -The project follows a domain-driven design approach with clear app boundaries: - -``` -thrillwiki_django_no_react/ -├── core/ # Core business logic and shared services -│ ├── services/ # Unified map service, clustering, caching -│ ├── search/ # Search functionality -│ ├── mixins/ # Reusable view mixins -│ └── history/ # History tracking utilities -├── accounts/ # User management and authentication -├── parks/ # Theme park entities -│ └── models/ # Park, Company, Location models -├── rides/ # Ride entities and ride-specific logic -│ └── models/ # Ride, RideModel, Company models -├── location/ # Geographic location handling -├── media/ # Media file management and photo handling -├── moderation/ # Content moderation workflow -├── email_service/ # Email handling and notifications -└── static/ # Frontend assets (CSS, JS, images) -``` - -### Package Management with UV - -ThrillWiki exclusively uses UV for Python package management, providing: - -- **Faster dependency resolution**: Significantly faster than pip -- **Lock file support**: Ensures reproducible environments -- **Virtual environment management**: Automatic environment handling -- **Cross-platform compatibility**: Consistent behavior across development environments - -#### Critical Commands - -```bash -# Add new dependencies -uv add - -# Django management (NEVER use python manage.py) -uv run manage.py - -# Development server startup -lsof -ti :8000 | xargs kill -9; find . -type d -name "__pycache__" -exec rm -r {} +; uv run manage.py tailwind runserver -``` - ---- - -## Database Models and Relationships - -### Entity Relationship Architecture - -ThrillWiki implements a sophisticated entity relationship model that enforces business rules at the database level: - -#### Core Business Rules (from .clinerules) - -1. **Park Relationships** - - Parks MUST have an Operator (required relationship) - - Parks MAY have a PropertyOwner (optional, usually same as Operator) - - Parks CANNOT directly reference Company entities - -2. **Ride Relationships** - - Rides MUST belong to a Park (required relationship) - - Rides MAY have a Manufacturer (optional relationship) - - Rides MAY have a Designer (optional relationship) - - Rides CANNOT directly reference Company entities - -3. **Entity Definitions** - - **Operators**: Companies that operate theme parks - - **PropertyOwners**: Companies that own park property (new concept) - - **Manufacturers**: Companies that manufacture rides - - **Designers**: Companies/individuals that design rides - -### Core Model Structure - -#### Park Models (`parks/models/`) - -```python -# Park Entity -class Park(TrackedModel): - # Core identifiers - name = CharField(max_length=255) - slug = SlugField(max_length=255, unique=True) - - # Business relationships (enforced by .clinerules) - operator = ForeignKey('Company', related_name='operated_parks') - property_owner = ForeignKey('Company', related_name='owned_parks', null=True) - - # Operational data - status = CharField(choices=STATUS_CHOICES, default="OPERATING") - opening_date = DateField(null=True, blank=True) - size_acres = DecimalField(max_digits=10, decimal_places=2) - - # Statistics - average_rating = DecimalField(max_digits=3, decimal_places=2) - ride_count = IntegerField(null=True, blank=True) - coaster_count = IntegerField(null=True, blank=True) -``` - -#### Ride Models (`rides/models/`) - -```python -# Ride Entity -class Ride(TrackedModel): - # Core identifiers - name = CharField(max_length=255) - slug = SlugField(max_length=255) - - # Required relationships (enforced by .clinerules) - park = ForeignKey('parks.Park', related_name='rides') - - # Optional business relationships - manufacturer = ForeignKey('Company', related_name='manufactured_rides') - designer = ForeignKey('Company', related_name='designed_rides') - ride_model = ForeignKey('RideModel', related_name='rides') - - # Classification - category = CharField(choices=CATEGORY_CHOICES) - status = CharField(choices=STATUS_CHOICES, default='OPERATING') -``` - -#### Company Models (Shared across apps) - -```python -# Company Entity (supports multiple roles) -class Company(TrackedModel): - class CompanyRole(TextChoices): - OPERATOR = 'OPERATOR', 'Park Operator' - PROPERTY_OWNER = 'PROPERTY_OWNER', 'Property Owner' - MANUFACTURER = 'MANUFACTURER', 'Ride Manufacturer' - DESIGNER = 'DESIGNER', 'Ride Designer' - - name = CharField(max_length=255) - slug = SlugField(max_length=255, unique=True) - roles = ArrayField(CharField(choices=CompanyRole.choices)) -``` - -### Geographic Models (`location/models/`) - -```python -# Generic Location Model -class Location(TrackedModel): - # Generic relationship (can attach to any model) - content_type = ForeignKey(ContentType) - object_id = PositiveIntegerField() - content_object = GenericForeignKey('content_type', 'object_id') - - # Geographic data (dual storage for compatibility) - latitude = DecimalField(max_digits=9, decimal_places=6) - longitude = DecimalField(max_digits=9, decimal_places=6) - point = PointField(srid=4326) # PostGIS geometry field - - # Address components - street_address = CharField(max_length=255) - city = CharField(max_length=100) - state = CharField(max_length=100) - country = CharField(max_length=100) - postal_code = CharField(max_length=20) -``` - -### History Tracking with pghistory - -Every critical model uses `@pghistory.track()` decoration for comprehensive audit trails: - -```python -@pghistory.track() -class Park(TrackedModel): - # All field changes are automatically tracked - # Creates parallel history tables with full change logs -``` - -### Media and Content Models - -```python -# Generic Photo Model -class Photo(TrackedModel): - # Generic relationship support - content_type = ForeignKey(ContentType) - object_id = PositiveIntegerField() - content_object = GenericForeignKey('content_type', 'object_id') - - # Media handling - image = ImageField(upload_to=photo_upload_path, storage=MediaStorage()) - is_primary = BooleanField(default=False) - is_approved = BooleanField(default=False) - - # Metadata extraction - date_taken = DateTimeField(null=True) # Auto-extracted from EXIF - uploaded_by = ForeignKey(User, related_name='uploaded_photos') -``` - -### User and Authentication Models - -```python -# Extended User Model -class User(AbstractUser): - class Roles(TextChoices): - USER = 'USER', 'User' - MODERATOR = 'MODERATOR', 'Moderator' - ADMIN = 'ADMIN', 'Admin' - SUPERUSER = 'SUPERUSER', 'Superuser' - - # Immutable identifier - user_id = CharField(max_length=10, unique=True, editable=False) - - # Permission system - role = CharField(choices=Roles.choices, default=Roles.USER) - - # User preferences - theme_preference = CharField(choices=ThemePreference.choices) -``` - ---- - -## Visual Theme and Design System - -### Design Philosophy - -ThrillWiki implements a sophisticated **dark-first design system** with vibrant accent colors that reflect the excitement of theme parks and roller coasters. - -#### Color Palette - -```css -:root { - --primary: #4f46e5; /* Vibrant indigo */ - --secondary: #e11d48; /* Vibrant rose */ - --accent: #8b5cf6; /* Purple accent */ -} -``` - -#### Background Gradients - -```css -/* Light theme */ -body { - background: linear-gradient(to bottom right, - white, - rgb(239 246 255), /* blue-50 */ - rgb(238 242 255) /* indigo-50 */ - ); -} - -/* Dark theme */ -body.dark { - background: linear-gradient(to bottom right, - rgb(3 7 18), /* gray-950 */ - rgb(49 46 129), /* indigo-950 */ - rgb(59 7 100) /* purple-950 */ - ); -} -``` - -### Tailwind CSS Configuration - -#### Custom Configuration (`tailwind.config.js`) - -```javascript -module.exports = { - darkMode: 'class', // Class-based dark mode - content: [ - './templates/**/*.html', - './assets/css/src/**/*.css', - ], - theme: { - extend: { - colors: { - primary: '#4f46e5', - secondary: '#e11d48', - accent: '#8b5cf6' - }, - fontFamily: { - 'sans': ['Poppins', 'sans-serif'], - }, - }, - }, - plugins: [ - require("@tailwindcss/typography"), - require("@tailwindcss/forms"), - require("@tailwindcss/aspect-ratio"), - require("@tailwindcss/container-queries"), - // Custom HTMX variants - plugin(function ({ addVariant }) { - addVariant("htmx-settling", ["&.htmx-settling", ".htmx-settling &"]); - addVariant("htmx-request", ["&.htmx-request", ".htmx-request &"]); - addVariant("htmx-swapping", ["&.htmx-swapping", ".htmx-swapping &"]); - addVariant("htmx-added", ["&.htmx-added", ".htmx-added &"]); - }), - ], -} -``` - -### Component System (`static/css/src/input.css`) - -#### Button Components - -```css -.btn-primary { - @apply inline-flex items-center px-6 py-2.5 border border-transparent - rounded-full shadow-md text-sm font-medium text-white - bg-gradient-to-r from-primary to-secondary - hover:from-primary/90 hover:to-secondary/90 - focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-primary/50 - transform hover:scale-105 transition-all; -} - -.btn-secondary { - @apply inline-flex items-center px-6 py-2.5 border border-gray-200 - dark:border-gray-700 rounded-full shadow-md text-sm font-medium - text-gray-700 dark:text-gray-200 bg-white dark:bg-gray-800 - hover:bg-gray-50 dark:hover:bg-gray-700 - transform hover:scale-105 transition-all; -} -``` - -#### Navigation Components - -```css -.nav-link { - @apply flex items-center text-gray-700 dark:text-gray-200 - px-6 py-2.5 rounded-lg font-medium border border-transparent - hover:border-primary/20 dark:hover:border-primary/30 - hover:text-primary dark:text-primary - hover:bg-primary/10 dark:bg-primary/20; -} -``` - -#### Card System - -```css -.card { - @apply p-6 bg-white dark:bg-gray-800 border rounded-lg shadow-lg - border-gray-200/50 dark:border-gray-700/50; -} - -.card-hover { - @apply transition-transform transform hover:-translate-y-1; -} -``` - -#### Responsive Grid System - -```css -/* Adaptive grid with content-aware sizing */ -.grid-adaptive { - @apply grid gap-6; - grid-template-columns: repeat(auto-fit, minmax(280px, 1fr)); -} - -/* Stats grid with even layouts */ -.grid-stats { - @apply grid gap-4; - grid-template-columns: repeat(2, 1fr); /* Mobile: 2 columns */ -} - -@media (min-width: 1024px) { - .grid-stats { - grid-template-columns: repeat(3, 1fr); /* Desktop: 3 columns */ - } -} - -@media (min-width: 1280px) { - .grid-stats { - grid-template-columns: repeat(5, 1fr); /* Large: 5 columns */ - } -} -``` - -### Dark Mode Implementation - -#### Theme Toggle System - -The theme system provides: -- **Automatic detection** of system preference -- **Manual toggle** with persistent storage -- **Flash prevention** during page load -- **Smooth transitions** between themes - -```javascript -// Theme initialization (prevents flash) -let theme = localStorage.getItem("theme"); -if (!theme) { - theme = window.matchMedia("(prefers-color-scheme: dark)").matches - ? "dark" : "light"; - localStorage.setItem("theme", theme); -} -if (theme === "dark") { - document.documentElement.classList.add("dark"); -} -``` - -#### CSS Custom Properties for Theme Switching - -```css -/* Theme-aware components */ -.auth-card { - @apply w-full max-w-md p-8 mx-auto border shadow-xl - bg-white/90 dark:bg-gray-800/90 rounded-2xl backdrop-blur-sm - border-gray-200/50 dark:border-gray-700/50; -} - -/* Status badges with theme support */ -.status-operating { - @apply text-green-800 bg-green-100 dark:bg-green-700 dark:text-green-50; -} - -.status-closed { - @apply text-red-800 bg-red-100 dark:bg-red-700 dark:text-red-50; -} -``` - ---- - -## Frontend Implementation Patterns - -### HTMX Integration Patterns - -ThrillWiki leverages HTMX for dynamic interactions while maintaining server-side rendering benefits: - -#### Dynamic Content Loading - -```html - -
- -
- -
- -
-``` - -#### Modal Interactions - -```html - - -``` - -#### Custom HTMX Variants - -```css -/* Loading states */ -.htmx-request .htmx-indicator { - display: block; -} - -/* Transition effects */ -.htmx-settling { - opacity: 0.7; - transition: opacity 0.3s ease; -} - -.htmx-swapping { - transform: scale(0.98); - transition: transform 0.2s ease; -} -``` - -### Alpine.js Patterns - -Alpine.js handles client-side state management and interactions: - -#### Dropdown Components - -```html -
- - - -
- -
-
-``` - -#### Modal Management - -```html -
- -
-``` - -### JavaScript Architecture (`static/js/`) - -#### Modular JavaScript Organization - -``` -static/js/ -├── main.js # Core functionality (theme, navigation) -├── alerts.js # Alert system management -├── photo-gallery.js # Photo gallery interactions -├── park-map.js # Leaflet map integration -├── location-autocomplete.js # Geographic search -└── alpine.min.js # Alpine.js framework -``` - -#### Theme Management (`static/js/main.js`) - -```javascript -// Theme handling with system preference detection -document.addEventListener('DOMContentLoaded', () => { - const themeToggle = document.getElementById('theme-toggle'); - const html = document.documentElement; - - // Initialize toggle state - if (themeToggle) { - themeToggle.checked = html.classList.contains('dark'); - - // Handle toggle changes - themeToggle.addEventListener('change', function() { - const isDark = this.checked; - html.classList.toggle('dark', isDark); - localStorage.setItem('theme', isDark ? 'dark' : 'light'); - }); - - // Listen for system theme changes - const mediaQuery = window.matchMedia('(prefers-color-scheme: dark)'); - mediaQuery.addEventListener('change', (e) => { - if (!localStorage.getItem('theme')) { - const isDark = e.matches; - html.classList.toggle('dark', isDark); - themeToggle.checked = isDark; - } - }); - } -}); -``` - -#### Mobile Navigation - -```javascript -// Mobile menu with smooth transitions -const toggleMenu = () => { - isMenuOpen = !isMenuOpen; - mobileMenu.classList.toggle('show', isMenuOpen); - mobileMenuBtn.setAttribute('aria-expanded', isMenuOpen.toString()); - - // Update icon - const icon = mobileMenuBtn.querySelector('i'); - icon.classList.remove(isMenuOpen ? 'fa-bars' : 'fa-times'); - icon.classList.add(isMenuOpen ? 'fa-times' : 'fa-bars'); -}; -``` - -### Template System Patterns - -#### Component-Based Architecture - -``` -templates/ -├── base/ -│ └── base.html # Base template with navigation -├── core/ -│ ├── search/ -│ │ ├── components/ # Search UI components -│ │ ├── layouts/ # Search layout templates -│ │ └── partials/ # Reusable search elements -├── parks/ -│ ├── partials/ -│ │ ├── park_list_item.html # Reusable park card -│ │ ├── park_actions.html # Action buttons -│ │ └── park_stats.html # Statistics display -│ ├── park_detail.html # Main park page -│ └── park_list.html # Park listing page -└── media/ - └── partials/ - ├── photo_display.html # Photo gallery component - └── photo_upload.html # Upload interface -``` - -#### Template Inheritance Pattern - -```html - - - - - {% block title %}ThrillWiki{% endblock %} - {% block extra_head %}{% endblock %} - - - - {% include "base/navigation.html" %} - -
- {% block content %}{% endblock %} -
- - {% block extra_js %}{% endblock %} - - -``` - -#### HTMX Template Patterns - -```html - -{% extends "base/base.html" %} - -{% block content %} -
- -
- {% include "parks/partials/park_list_item.html" %} -
-
-{% endblock %} -``` - ---- - -## User Experience and Key Features - -### Navigation and Information Architecture - -#### Primary Navigation Structure - -``` -ThrillWiki Navigation -├── Home # Dashboard with featured content -├── Parks # Theme park directory -│ ├── Browse Parks # Filterable park listings -│ ├── Add New Park # User contribution form -│ └── [Park Detail] # Individual park pages -├── Rides # Ride directory (global) -│ ├── Browse Rides # Cross-park ride search -│ ├── Add New Ride # User contribution form -│ └── [Ride Detail] # Individual ride pages -├── Search # Universal search interface -└── User Account - ├── Profile # User profile and stats - ├── Settings # Preferences and account - ├── Moderation # Content review (if authorized) - └── Admin # System administration (if authorized) -``` - -#### Responsive Navigation Patterns - -- **Desktop**: Full horizontal navigation with search bar -- **Tablet**: Collapsible navigation with maintained search functionality -- **Mobile**: Hamburger menu with slide-out panel - -### Core Feature Set - -#### 1. Park Management System - -**Park Detail Pages** provide comprehensive information: - -``` -Park Information Hierarchy -├── Header Section -│ ├── Park name and location -│ ├── Status badge (Operating, Closed, etc.) -│ ├── Average rating display -│ └── Quick action buttons -├── Statistics Dashboard -│ ├── Operator information (priority display) -│ ├── Property owner (if different) -│ ├── Total ride count (linked) -│ ├── Roller coaster count -│ ├── Opening date -│ └── Website link -├── Content Sections -│ ├── Photo gallery (if photos exist) -│ ├── About section (description) -│ ├── Rides & Attractions (preview list) -│ └── Location map (if coordinates available) -└── Additional Information - ├── History timeline - ├── Related parks - └── User contributions -``` - -**Key UX Features**: -- **Smart statistics layout**: Responsive grid that prevents awkward spacing -- **Priority content placement**: Operator information prominently featured -- **Contextual actions**: Edit/moderate buttons appear based on user permissions -- **Progressive disclosure**: Detailed information revealed as needed - -#### 2. Advanced Search and Filtering - -**Unified Search System** supports: - -- **Cross-content search**: Parks, rides, companies in single interface -- **Geographic filtering**: Search within specific regions or distances -- **Attribute filtering**: Status, ride types, ratings, opening dates -- **Real-time results**: HTMX-powered instant search feedback - -**Search Result Patterns**: -```html - -
-
Park
-

{{ park.name }}

-

{{ park.formatted_location }}

-
- {{ park.ride_count }} rides - {{ park.get_status_display }} -
-
-``` - -#### 3. Geographic and Mapping Features - -**Unified Map Service** provides: - -- **Multi-layer mapping**: Parks, rides, and companies on single map -- **Intelligent clustering**: Zoom-level appropriate point grouping -- **Performance optimization**: Smart caching and result limiting -- **Geographic bounds**: Efficient spatial queries using PostGIS - -**Map Integration Pattern**: -```javascript -// Park detail map initialization -document.addEventListener('DOMContentLoaded', function() { - {% with location=park.location.first %} - initParkMap({{ location.latitude }}, {{ location.longitude }}, "{{ park.name }}"); - {% endwith %} -}); -``` - -#### 4. Content Moderation Workflow - -**Submission Process**: - -``` -User Contribution Flow -├── Content Creation -│ ├── Form submission (parks, rides, photos) -│ ├── Validation and sanitization -│ └── EditSubmission/PhotoSubmission creation -├── Review Process -│ ├── Moderator dashboard listing -│ ├── Side-by-side comparison view -│ ├── Edit capability before approval -│ └── Approval/rejection with notes -├── Publication -│ ├── Automatic publication for moderators -│ ├── Content integration into main database -│ └── User notification system -└── History Tracking - ├── Complete audit trail - ├── Revert capability - └── Performance metrics -``` - -**Moderation Features**: -- **Auto-approval**: Moderators bypass review process -- **Edit before approval**: Moderators can refine submissions -- **Batch operations**: Efficient handling of multiple submissions -- **Escalation system**: Complex cases forwarded to administrators - -#### 5. Photo and Media Management - -**Photo System Features**: - -- **Multi-format support**: JPEG, PNG with automatic optimization -- **EXIF extraction**: Automatic date/time capture from metadata -- **Approval workflow**: Moderation for user-uploaded content -- **Smart storage**: Organized directory structure by content type -- **Primary photo designation**: Featured image selection per entity - -**Upload Interface**: -```html - -
-
- {% csrf_token %} - - -
-
-``` - -#### 6. User Authentication and Profiles - -**Authentication Features**: -- **Social OAuth**: Google and Discord integration -- **Custom profiles**: Display names, avatars, bio information -- **Role-based permissions**: User, Moderator, Admin, Superuser levels -- **Theme preferences**: User-specific dark/light mode settings - -**Profile Statistics**: -```html - -
-
- {{ profile.coaster_credits }} - Coaster Credits -
-
- {{ profile.dark_ride_credits }} - Dark Rides -
- -
-``` - -#### 7. History and Audit System - -**Change Tracking Features**: -- **Complete audit trails**: Every modification recorded -- **Diff visualization**: Before/after comparisons -- **User attribution**: Change tracking by user -- **Revert capability**: Rollback to previous versions -- **Performance monitoring**: Query and response time tracking - -### Accessibility and Responsive Design - -#### Mobile-First Approach - -- **Responsive breakpoints**: 540px, 768px, 1024px, 1280px+ -- **Touch-friendly interfaces**: Appropriate button sizes and spacing -- **Optimized content hierarchy**: Essential information prioritized on small screens - -#### Accessibility Features - -- **Semantic HTML**: Proper heading structure and landmarks -- **ARIA labels**: Screen reader support for interactive elements -- **Keyboard navigation**: Full keyboard accessibility -- **Color contrast**: WCAG AA compliant color schemes -- **Focus indicators**: Clear focus states for interactive elements - ---- - -## Page Structure and Templates - -### Template Hierarchy and Organization - -#### Base Template Architecture - -```html - - - - - - - - {% block title %}ThrillWiki{% endblock %} - - - - - - - - - - - - - {% block extra_head %}{% endblock %} - - - - {% include "base/header.html" %} - - {% include "base/flash_messages.html" %} - -
- {% block content %}{% endblock %} -
- - {% include "base/footer.html" %} - - - {% block extra_js %}{% endblock %} - - -``` - -#### Component-Based Template System - -##### Navigation Component (`templates/base/header.html`) - -```html -
- -
-``` - -##### Theme Toggle Component - -```html - - -``` - -### Page-Specific Template Patterns - -#### Park Detail Page Structure - -```html - -{% extends "base/base.html" %} -{% load static park_tags %} - -{% block title %}{{ park.name }} - ThrillWiki{% endblock %} - -{% block extra_head %} -{% if park.location.exists %} - -{% endif %} -{% endblock %} - -{% block content %} -
- - -
-
- - -
-
-

- {{ park.name }} -

- {% if park.formatted_location %} -
- -

{{ park.formatted_location }}

-
- {% endif %} -
-
- - -
- {% include "parks/partials/park_stats.html" %} -
- - - {% if park.photos.exists %} -
-

Photos

- {% include "media/partials/photo_display.html" %} -
- {% endif %} - - -
- - -
- {% include "parks/partials/park_description.html" %} - {% include "parks/partials/park_rides.html" %} -
- - -
- {% include "parks/partials/park_map.html" %} - {% include "parks/partials/park_history.html" %} -
-
-
- - -{% include "media/partials/photo_upload_modal.html" %} -{% endblock %} - -{% block extra_js %} - -{% if park.location.exists %} - - -{% endif %} -{% endblock %} -``` - -#### Reusable Partial Templates - -##### Park Statistics Component - -```html - - -{% if park.operator %} -
- -
-{% endif %} - - - -
-
Total Rides
-
- {{ park.ride_count|default:"N/A" }} -
-
-
-``` - -##### Photo Display Component - -```html - -
- {% for photo in photos %} -
- {{ photo.alt_text|default:photo.caption }} - - {% if photo.caption %} -
-

{{ photo.caption }}

-
- {% endif %} -
- {% endfor %} -
-``` - -### Form Templates and User Input - -#### Dynamic Form Rendering - -```html - -{% extends "base/base.html" %} - -{% block content %} -
-
-

- {% if is_edit %}Edit Park{% else %}Add New Park{% endif %} -

- -
- {% csrf_token %} - - - {% for field in form %} -
- - {{ field|add_class:"form-input" }} - {% if field.help_text %} -
{{ field.help_text }}
- {% endif %} - {% if field.errors %} -
{{ field.errors }}
- {% endif %} -
- {% endfor %} - -
- Cancel - -
-
-
-
-{% endblock %} -``` - -### Static File Organization - -#### Asset Structure - -``` -static/ -├── css/ -│ ├── src/ -│ │ └── input.css # Tailwind source -│ ├── tailwind.css # Compiled Tailwind -│ └── alerts.css # Custom alert styles -├── js/ -│ ├── main.js # Core functionality -│ ├── alerts.js # Alert management -│ ├── photo-gallery.js # Photo interactions -│ ├── park-map.js # Map functionality -│ ├── location-autocomplete.js # Geographic search -│ └── alpine.min.js # Alpine.js framework -└── images/ - ├── placeholders/ # Default images - └── icons/ # Custom icons -``` - -#### Template Tag Usage - -Custom template tags enhance template functionality: - -```html - -{% load park_tags %} - - - - {{ park.get_status_display }} - - - -{% if park.average_rating %} -{% rating_stars park.average_rating %} -{% endif %} -``` - ---- - -## Services and Business Logic - -### Unified Map Service Architecture - -The `UnifiedMapService` provides the core geographic functionality for ThrillWiki, handling location data for parks, rides, and companies through a sophisticated service layer. - -#### Service Architecture Overview - -``` -UnifiedMapService -├── LocationAbstractionLayer # Data source abstraction -├── ClusteringService # Point clustering for performance -├── MapCacheService # Intelligent caching -└── Data Structures # Type-safe data containers -``` - -#### Core Service Implementation - -```python -# core/services/map_service.py -class UnifiedMapService: - """ - Main service orchestrating map data retrieval, filtering, clustering, and caching. - Provides a unified interface for all location types with performance optimization. - """ - - # Performance thresholds - MAX_UNCLUSTERED_POINTS = 500 - MAX_CLUSTERED_POINTS = 2000 - DEFAULT_ZOOM_LEVEL = 10 - - def __init__(self): - self.location_layer = LocationAbstractionLayer() - self.clustering_service = ClusteringService() - self.cache_service = MapCacheService() - - def get_map_data( - self, - bounds: Optional[GeoBounds] = None, - filters: Optional[MapFilters] = None, - zoom_level: int = DEFAULT_ZOOM_LEVEL, - cluster: bool = True, - use_cache: bool = True - ) -> MapResponse: - """ - Primary method for retrieving unified map data with intelligent - caching, clustering, and performance optimization. - """ - # Implementation handles cache checking, database queries, - # smart limiting, clustering decisions, and response caching - pass - - def get_location_details(self, location_type: str, location_id: int) -> Optional[UnifiedLocation]: - """Get detailed information for a specific location with caching.""" - pass - - def search_locations( - self, - query: str, - bounds: Optional[GeoBounds] = None, - location_types: Optional[Set[LocationType]] = None, - limit: int = 50 - ) -> List[UnifiedLocation]: - """Search locations with text query and geographic bounds.""" - pass -``` - -#### Data Structure System - -The service uses type-safe data structures for all map operations: - -```python -# core/services/data_structures.py - -class LocationType(Enum): - """Types of locations supported by the map service.""" - PARK = "park" - RIDE = "ride" - COMPANY = "company" - GENERIC = "generic" - -@dataclass -class GeoBounds: - """Geographic boundary box for spatial queries.""" - north: float - south: float - east: float - west: float - - def to_polygon(self) -> Polygon: - """Convert bounds to PostGIS Polygon for database queries.""" - return Polygon.from_bbox((self.west, self.south, self.east, self.north)) - - def expand(self, factor: float = 1.1) -> 'GeoBounds': - """Expand bounds by factor for buffer queries.""" - pass - -@dataclass -class MapFilters: - """Filtering options for map queries.""" - location_types: Optional[Set[LocationType]] = None - park_status: Optional[Set[str]] = None - ride_types: Optional[Set[str]] = None - search_query: Optional[str] = None - min_rating: Optional[float] = None - has_coordinates: bool = True - country: Optional[str] = None - state: Optional[str] = None - city: Optional[str] = None - -@dataclass -class UnifiedLocation: - """Standardized location representation across all entity types.""" - id: int - location_type: LocationType - name: str - coordinates: Point - url: str - additional_data: Dict[str, Any] = field(default_factory=dict) -``` - -### Moderation System - -ThrillWiki implements a comprehensive moderation system for user-generated content edits: - -#### Edit Submission Workflow - -```python -# moderation/models.py -@pghistory.track() -class EditSubmission(TrackedModel): - """Tracks all proposed changes to parks and rides.""" - - STATUS_CHOICES = [ - ("PENDING", "Pending"), - ("APPROVED", "Approved"), - ("REJECTED", "Rejected"), - ("ESCALATED", "Escalated"), - ] - - SUBMISSION_TYPE_CHOICES = [ - ("EDIT", "Edit Existing"), - ("CREATE", "Create New"), - ] - - user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE) - content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE) - object_id = models.PositiveIntegerField(null=True, blank=True) - content_object = GenericForeignKey("content_type", "object_id") - - submission_type = models.CharField(max_length=10, choices=SUBMISSION_TYPE_CHOICES) - proposed_changes = models.JSONField() # Stores field-level changes - status = models.CharField(max_length=20, choices=STATUS_CHOICES, default="PENDING") - - # Moderation tracking - reviewed_by = models.ForeignKey( - settings.AUTH_USER_MODEL, - on_delete=models.SET_NULL, - null=True, blank=True, - related_name="reviewed_submissions" - ) - reviewed_at = models.DateTimeField(null=True, blank=True) - reviewer_notes = models.TextField(blank=True) -``` - -#### Moderation Features - -- **Change Tracking**: Every edit submission tracked with `django-pghistory` -- **Field-Level Changes**: JSON storage of specific field modifications -- **Review Workflow**: Pending → Approved/Rejected/Escalated states -- **Reviewer Assignment**: Track who reviewed each submission -- **Audit Trail**: Complete history of all moderation decisions - -### Search and Autocomplete - -#### Location-Based Search - -```python -# Autocomplete integration for geographic search -class LocationAutocompleteView(autocomplete.Select2QuerySetView): - """AJAX autocomplete for geographic locations.""" - - def get_queryset(self): - if not self.request.user.is_authenticated: - return Location.objects.none() - - qs = Location.objects.filter(is_active=True) - - if self.q: - qs = qs.filter( - Q(name__icontains=self.q) | - Q(city__icontains=self.q) | - Q(state__icontains=self.q) | - Q(country__icontains=self.q) - ) - - return qs.select_related('country', 'state').order_by('name')[:20] -``` - -#### Search Integration - -- **HTMX-Powered Search**: Real-time search suggestions without page reloads -- **Geographic Filtering**: Search within specific bounds or regions -- **Multi-Model Search**: Unified search across parks, rides, and companies -- **Performance Optimized**: Cached results and smart query limiting - ---- - -## Development Workflow - -### Required Development Environment - -#### UV Package Manager Integration - -ThrillWiki exclusively uses [UV](https://github.com/astral-sh/uv) for all Python package management and Django commands: - -```bash -# CRITICAL: Always use these exact commands - -# Package Installation -uv add # Add new dependencies -uv add --dev # Add development dependencies - -# Django Management Commands -uv run manage.py makemigrations # Create migrations -uv run manage.py migrate # Apply migrations -uv run manage.py createsuperuser # Create admin user -uv run manage.py shell # Start Django shell -uv run manage.py collectstatic # Collect static files - -# Development Server (CRITICAL - use exactly as shown) -lsof -ti :8000 | xargs kill -9; find . -type d -name "__pycache__" -exec rm -r {} +; uv run manage.py tailwind runserver -``` - -**IMPORTANT**: Never use `python manage.py` or `pip install`. The project is configured exclusively for UV. - -#### Local Development Setup - -```bash -# Initial setup -git clone -cd thrillwiki_django_no_react - -# Install dependencies -uv sync - -# Database setup (requires PostgreSQL with PostGIS) -uv run manage.py migrate - -# Create superuser -uv run manage.py createsuperuser - -# Install Tailwind CSS and build -uv run manage.py tailwind install -uv run manage.py tailwind build - -# Start development server -lsof -ti :8000 | xargs kill -9; find . -type d -name "__pycache__" -exec rm -r {} +; uv run manage.py tailwind runserver -``` - -### Database Requirements - -#### PostgreSQL with PostGIS - -```sql --- Required PostgreSQL extensions -CREATE EXTENSION IF NOT EXISTS postgis; -CREATE EXTENSION IF NOT EXISTS postgis_topology; -CREATE EXTENSION IF NOT EXISTS postgis_raster; -``` - -#### Geographic Data - -- **Coordinate System**: WGS84 (SRID: 4326) for all geographic data -- **Point Storage**: All locations stored as PostGIS Point geometry -- **Spatial Queries**: Optimized with GiST indexes for geographic searches -- **Distance Calculations**: Native PostGIS distance functions - -### Frontend Development - -#### Tailwind CSS Workflow - -```bash -# Development mode (watches for changes) -uv run manage.py tailwind start - -# Production build -uv run manage.py tailwind build - -# Custom CSS location -static/css/src/input.css # Source file -static/css/tailwind.css # Compiled output -``` - -#### JavaScript Integration - -- **Alpine.js**: Reactive components and state management -- **HTMX**: AJAX interactions and partial page updates -- **Custom Scripts**: Modular JavaScript in `static/js/` directory - ---- - -## API Endpoints and URL Structure - -### Primary URL Configuration - -#### Main Application Routes - -```python -# thrillwiki/urls.py -urlpatterns = [ - path("admin/", admin.site.urls), - path("", HomeView.as_view(), name="home"), - - # Autocomplete URLs (must be before other URLs) - path("ac/", autocomplete_urls), - - # Core functionality - path("parks/", include("parks.urls", namespace="parks")), - path("rides/", include("rides.urls", namespace="rides")), - path("photos/", include("media.urls", namespace="photos")), - - # Search and API - path("search/", include("core.urls.search", namespace="search")), - path("api/map/", include("core.urls.map_urls", namespace="map_api")), - - # User management - path("accounts/", include("accounts.urls")), - path("accounts/", include("allauth.urls")), - path("user//", ProfileView.as_view(), name="user_profile"), - path("settings/", SettingsView.as_view(), name="settings"), - - # Moderation system - path("moderation/", include("moderation.urls", namespace="moderation")), - - # Static pages - path("terms/", TemplateView.as_view(template_name="pages/terms.html"), name="terms"), - path("privacy/", TemplateView.as_view(template_name="pages/privacy.html"), name="privacy"), -] -``` - -#### Parks URL Structure - -```python -# parks/urls.py -app_name = "parks" - -urlpatterns = [ - # Main park views - path("", ParkSearchView.as_view(), name="park_list"), - path("create/", ParkCreateView.as_view(), name="park_create"), - path("/", ParkDetailView.as_view(), name="park_detail"), - path("/edit/", ParkUpdateView.as_view(), name="park_update"), - - # HTMX endpoints - path("add-park-button/", add_park_button, name="add_park_button"), - path("search/location/", location_search, name="location_search"), - path("search/reverse-geocode/", reverse_geocode, name="reverse_geocode"), - path("areas/", get_park_areas, name="get_park_areas"), - path("suggest_parks/", suggest_parks, name="suggest_parks"), - - # Park areas - path("/areas//", ParkAreaDetailView.as_view(), name="area_detail"), - - # Category-specific rides within parks - path("/roller_coasters/", ParkSingleCategoryListView.as_view(), - {'category': 'RC'}, name="park_roller_coasters"), - path("/dark_rides/", ParkSingleCategoryListView.as_view(), - {'category': 'DR'}, name="park_dark_rides"), - path("/flat_rides/", ParkSingleCategoryListView.as_view(), - {'category': 'FR'}, name="park_flat_rides"), - path("/water_rides/", ParkSingleCategoryListView.as_view(), - {'category': 'WR'}, name="park_water_rides"), - path("/transports/", ParkSingleCategoryListView.as_view(), - {'category': 'TR'}, name="park_transports"), - path("/others/", ParkSingleCategoryListView.as_view(), - {'category': 'OT'}, name="park_others"), - - # Nested rides URLs - path("/rides/", include("rides.park_urls", namespace="rides")), -] -``` - -### API Endpoints - -#### Map API - -``` -GET /api/map/data/ -- bounds: Geographic bounds (north,south,east,west) -- zoom: Map zoom level -- filters: JSON-encoded filter parameters -- Returns: Unified location data with clustering - -GET /api/map/location/// -- Returns: Detailed location information - -POST /api/map/search/ -- query: Search text -- bounds: Optional geographic bounds -- types: Location types to search -- Returns: Matching locations -``` - -#### Search API - -``` -GET /search/ -- q: Search query -- type: Entity type (park, ride, company) -- location: Geographic filter -- Returns: Search results with pagination - -GET /search/suggest/ -- q: Partial query for autocomplete -- Returns: Quick suggestions -``` - -#### HTMX Endpoints - -All HTMX endpoints return HTML fragments for seamless page updates: - -``` -POST /parks/suggest_parks/ # Park suggestions for autocomplete -GET /parks/areas/ # Dynamic area loading -POST /parks/search/location/ # Location search with coordinates -POST /parks/search/reverse-geocode/ # Address lookup from coordinates -``` - ---- - -## Conclusion - -ThrillWiki represents a sophisticated Django application implementing modern web development practices with a focus on performance, user experience, and maintainability. The project successfully combines: - -### Technical Excellence - -- **Modern Django Patterns**: Service-oriented architecture with clear separation of concerns -- **Geographic Capabilities**: Full PostGIS integration for spatial data and mapping -- **Performance Optimization**: Intelligent caching, query optimization, and clustering -- **Type Safety**: Comprehensive use of dataclasses and enums for data integrity - -### User Experience - -- **Responsive Design**: Mobile-first approach with Tailwind CSS -- **Progressive Enhancement**: HTMX for seamless interactions without JavaScript complexity -- **Dark Mode Support**: Complete theming system with user preferences -- **Accessibility**: WCAG-compliant components and navigation - -### Development Workflow - -- **UV Integration**: Modern Python package management with reproducible builds -- **Comprehensive Testing**: Model validation, service testing, and frontend integration -- **Documentation**: Extensive inline documentation and architectural decisions -- **Moderation System**: Complete workflow for user-generated content management - -### Architectural Strengths - -1. **Scalability**: Service layer architecture supports growth and feature expansion -2. **Maintainability**: Clear code organization with consistent patterns -3. **Performance**: Optimized database queries and intelligent caching strategies -4. **Security**: Authentication, authorization, and input validation throughout -5. **Extensibility**: Plugin-ready architecture for additional features - -The project demonstrates enterprise-level Django development practices while maintaining simplicity and developer experience. The combination of modern frontend techniques (HTMX, Alpine.js, Tailwind) with robust backend services creates a powerful platform for theme park and ride enthusiasts. - -This documentation serves as both a technical reference and architectural guide for understanding and extending the ThrillWiki platform. diff --git a/docs/UNRAID_COMPLETE_AUTOMATION.md b/docs/UNRAID_COMPLETE_AUTOMATION.md deleted file mode 100644 index 0b3ff288..00000000 --- a/docs/UNRAID_COMPLETE_AUTOMATION.md +++ /dev/null @@ -1,387 +0,0 @@ -# ThrillWiki Complete Unraid Automation Guide - -This guide provides **complete automation** for ThrillWiki deployment on Unraid, including VM creation, configuration, and CI/CD setup. Everything is automated with a single command. - -## 🚀 One-Command Complete Setup - -Run this single command to automate everything: - -```bash -./scripts/unraid/setup-complete-automation.sh -``` - -This will: -1. ✅ Create and configure VM on Unraid -2. ✅ Install Ubuntu Server with all dependencies -3. ✅ Configure PostgreSQL database -4. ✅ Deploy ThrillWiki application -5. ✅ Set up systemd services -6. ✅ Configure SSH access -7. ✅ Set up webhook listener -8. ✅ Test the entire system - -## System Architecture - -``` -GitHub Push → Webhook → Local Listener → SSH → Unraid VM → Deploy & Restart -``` - -## Prerequisites - -### Local Machine -- Python 3.8+ -- SSH client -- Internet connection - -### Unraid Server -- Unraid 6.8+ with VM support enabled -- SSH access to Unraid server -- Sufficient resources (4GB RAM, 50GB disk minimum) -- Ubuntu Server 22.04 ISO in `/mnt/user/isos/` - -## Automated Components - -### 1. VM Manager (`scripts/unraid/vm-manager.py`) -- Creates VM with proper specifications -- Configures networking and storage -- Manages VM lifecycle (start/stop/status) -- Retrieves VM IP addresses - -### 2. Complete Automation (`scripts/unraid/setup-complete-automation.sh`) -- Orchestrates entire setup process -- Handles SSH key generation and distribution -- Configures all services automatically -- Performs end-to-end testing - -### 3. VM Configuration -- Ubuntu Server 22.04 LTS -- PostgreSQL database -- UV package manager -- Systemd services for ThrillWiki -- Nginx (optional) - -## Step-by-Step Process - -### Phase 1: Initial Setup -The automation script will prompt for: -- Unraid server IP address -- Unraid credentials -- VM specifications (memory, CPU, disk) -- GitHub repository URL -- Webhook secret - -### Phase 2: SSH Key Setup -- Generates SSH keys for VM access -- Generates SSH keys for Unraid access -- Configures SSH client settings -- Tests connectivity - -### Phase 3: VM Creation -- Creates VM XML configuration -- Creates virtual disk (QCOW2 format) -- Defines VM in libvirt -- Starts VM with Ubuntu installation - -### Phase 4: VM Configuration -- Installs Ubuntu Server 22.04 -- Configures user account with SSH keys -- Installs required packages: - - Python 3.8+ - - UV package manager - - PostgreSQL - - Git - - Build tools - -### Phase 5: ThrillWiki Deployment -- Clones repository -- Installs Python dependencies with UV -- Creates database and user -- Runs initial migrations -- Configures systemd services -- Starts ThrillWiki service - -### Phase 6: CI/CD Setup -- Configures webhook listener -- Tests deployment pipeline -- Verifies all services - -## Configuration Files Generated - -### `***REMOVED***.unraid` -```bash -UNRAID_HOST=192.168.1.100 -UNRAID_USER=root -VM_NAME=thrillwiki-vm -VM_MEMORY=4096 -VM_VCPUS=2 -VM_DISK_SIZE=50 -SSH_PUBLIC_KEY=ssh-rsa AAAAB3... -``` - -### `***REMOVED***.webhook` -```bash -WEBHOOK_PORT=9000 -WEBHOOK_SECRET=your_secret -VM_HOST=192.168.1.101 -VM_USER=ubuntu -VM_KEY_PATH=/home/user/.ssh/thrillwiki_vm -VM_PROJECT_PATH=/home/ubuntu/thrillwiki -REPO_URL=https://github.com/user/repo.git -DEPLOY_BRANCH=main -``` - -### SSH Configuration -``` -Host thrillwiki-vm - HostName 192.168.1.101 - User ubuntu - IdentityFile ~/.ssh/thrillwiki_vm - StrictHostKeyChecking no - -Host unraid - HostName 192.168.1.100 - User root - IdentityFile ~/.ssh/unraid_access - StrictHostKeyChecking no -``` - -## VM Specifications - -### Default Configuration -- **OS**: Ubuntu Server 22.04 LTS -- **Memory**: 4GB RAM -- **vCPUs**: 2 -- **Storage**: 50GB (expandable) -- **Network**: Bridge mode (br0) -- **Boot**: UEFI with OVMF - -### Customizable Options -All specifications can be customized during setup: -- Memory allocation -- CPU cores -- Disk size -- VM name -- Network configuration - -## Services Installed - -### On VM -- **ThrillWiki Django App**: Port 8000 -- **PostgreSQL Database**: Port 5432 -- **SSH Server**: Port 22 -- **Systemd Services**: Auto-start on boot - -### On Local Machine -- **Webhook Listener**: Configurable port (default 9000) -- **SSH Client**: Configured for VM access - -## Management Commands - -### VM Management -```bash -# Check VM status -python3 scripts/unraid/vm-manager.py status - -# Start VM -python3 scripts/unraid/vm-manager.py start - -# Stop VM -python3 scripts/unraid/vm-manager.py stop - -# Get VM IP -python3 scripts/unraid/vm-manager.py ip - -# Complete VM setup -python3 scripts/unraid/vm-manager.py setup -``` - -### Service Management -```bash -# Connect to VM -ssh thrillwiki-vm - -# Check ThrillWiki service -sudo systemctl status thrillwiki - -# Restart service -sudo systemctl restart thrillwiki - -# View logs -journalctl -u thrillwiki -f - -# Manual deployment -cd thrillwiki && ./scripts/vm-deploy.sh -``` - -### Webhook Management -```bash -# Start webhook listener -./start-webhook.sh - -# Or manually -source ***REMOVED***.webhook && python3 scripts/webhook-listener.py - -# Test webhook -curl -X GET http://localhost:9000/health -``` - -## Automated Testing - -The setup includes comprehensive testing: - -### Connectivity Tests -- SSH access to Unraid server -- SSH access to VM -- Network connectivity - -### Service Tests -- ThrillWiki application startup -- Database connectivity -- Web server response - -### Deployment Tests -- Git repository access -- Deployment script execution -- Service restart verification - -## Security Features - -### SSH Security -- Dedicated SSH keys for each connection -- No password authentication -- Key-based access only - -### Network Security -- VM isolated in bridge network -- Firewall rules (configurable) -- SSH key rotation support - -### Service Security -- Non-root service execution -- Systemd security features -- Log rotation and monitoring - -## Troubleshooting - -### Common Issues - -1. **VM Creation Fails** - ```bash - # Check Unraid VM support - ssh unraid "virsh list --all" - - # Verify ISO exists - ssh unraid "ls -la /mnt/user/isos/*.iso" - ``` - -2. **VM Won't Start** - ```bash - # Check VM configuration - python3 scripts/unraid/vm-manager.py status - - # Check Unraid logs - ssh unraid "tail -f /var/log/libvirt/qemu/thrillwiki-vm.log" - ``` - -3. **Can't Connect to VM** - ```bash - # Check VM IP - python3 scripts/unraid/vm-manager.py ip - - # Test SSH key - ssh -i ~/.ssh/thrillwiki_vm ubuntu@VM_IP - ``` - -4. **Service Won't Start** - ```bash - # Check service logs - ssh thrillwiki-vm "journalctl -u thrillwiki --no-pager" - - # Manual start - ssh thrillwiki-vm "cd thrillwiki && ./scripts/ci-start.sh" - ``` - -### Log Locations - -- **Setup logs**: `logs/unraid-automation.log` -- **VM logs**: SSH to VM, then `journalctl -u thrillwiki` -- **Webhook logs**: `logs/webhook.log` -- **Deployment logs**: On VM at `~/thrillwiki/logs/deploy.log` - -## Advanced Configuration - -### Custom VM Specifications -Edit variables in the automation script: -```bash -VM_MEMORY=8192 # 8GB RAM -VM_VCPUS=4 # 4 CPU cores -VM_DISK_SIZE=100 # 100GB disk -``` - -### Network Configuration -For static IP assignment, modify the VM XML template in `vm-manager.py`. - -### Storage Configuration -The automation uses QCOW2 format for efficient storage. For better performance, consider: -- Raw disk format -- NVMe storage on Unraid -- Dedicated SSD for VM - -## Performance Optimization - -### Recommended Settings -- **Memory**: 4GB minimum, 8GB recommended -- **CPU**: 2 cores minimum, 4 cores for production -- **Storage**: SSD recommended for database -- **Network**: 1Gbps for fast deployments - -### Production Considerations -- Use dedicated hardware for database -- Configure backup strategies -- Monitor resource usage -- Set up log rotation - -## Backup and Recovery - -### Automated Backups -The deployment script automatically creates backups before each deployment in `~/thrillwiki/backups/`. - -### VM Snapshots -```bash -# Create VM snapshot -ssh unraid "virsh snapshot-create-as thrillwiki-vm snapshot-name" - -# List snapshots -ssh unraid "virsh snapshot-list thrillwiki-vm" - -# Restore snapshot -ssh unraid "virsh snapshot-revert thrillwiki-vm snapshot-name" -``` - -### Database Backups -```bash -# Manual database backup -ssh thrillwiki-vm "pg_dump thrillwiki > backup.sql" - -# Automated backup (add to cron) -ssh thrillwiki-vm "crontab -e" -# Add: 0 2 * * * pg_dump thrillwiki > /home/ubuntu/db-backup-$(date +\%Y\%m\%d).sql -``` - -## Monitoring - -### Health Checks -The system includes built-in health checks: -- VM status monitoring -- Service health verification -- Network connectivity tests -- Application response checks - -### Alerts (Optional) -Configure alerts for: -- Service failures -- Resource exhaustion -- Deployment failures -- Network issues - -This complete automation provides a production-ready ThrillWiki deployment with minimal manual intervention. The entire process from VM creation to application deployment is handled automatically. \ No newline at end of file diff --git a/docs/VM_DEPLOYMENT_SETUP.md b/docs/VM_DEPLOYMENT_SETUP.md deleted file mode 100644 index 6f3941c3..00000000 --- a/docs/VM_DEPLOYMENT_SETUP.md +++ /dev/null @@ -1,359 +0,0 @@ -# ThrillWiki VM Deployment Setup Guide - -This guide explains how to set up a local CI/CD system that automatically deploys ThrillWiki to a Linux VM when commits are pushed to GitHub. - -## System Overview - -The deployment system consists of three main components: - -1. **Local CI Start Script** (`scripts/ci-start.sh`) - Starts the Django server locally -2. **GitHub Webhook Listener** (`scripts/webhook-listener.py`) - Listens for GitHub push events -3. **VM Deployment Script** (`scripts/vm-deploy.sh`) - Deploys code changes to the Linux VM - -## Architecture Flow - -``` -GitHub Push → Webhook → Local Listener → SSH to VM → Deploy Script → Restart Server -``` - -## Prerequisites - -### Local Machine (Webhook Listener Host) -- Python 3.8+ -- SSH access to the Linux VM -- Git repository with webhook access - -### Linux VM (Deployment Target) -- Ubuntu 20.04+ (recommended) -- Python 3.8+ -- UV package manager -- Git -- PostgreSQL (if using database) -- SSH server running -- Sudo access for the deployment user - -## Step 1: Linux VM Setup - -### 1.1 Create Deployment User - -```bash -# On the Linux VM -sudo adduser ubuntu -sudo usermod -aG sudo ubuntu -su - ubuntu -``` - -### 1.2 Install Required Software - -```bash -# Update system -sudo apt update && sudo apt upgrade -y - -# Install essential packages -sudo apt install -y git curl build-essential python3-pip python3-venv postgresql postgresql-contrib nginx - -# Install UV package manager -curl -LsSf https://astral.sh/uv/install.sh | sh -source ~/.cargo/env -``` - -### 1.3 Set up SSH Keys - -```bash -# Generate SSH key on local machine -ssh-keygen -t rsa -b 4096 -f ~/.ssh/thrillwiki_vm - -# Copy public key to VM -ssh-copy-id -i ~/.ssh/thrillwiki_vm.pub ubuntu@VM_IP_ADDRESS -``` - -### 1.4 Clone Repository - -```bash -# On the VM -cd /home/ubuntu -git clone https://github.com/YOUR_USERNAME/thrillwiki_django_no_react.git thrillwiki -cd thrillwiki -``` - -### 1.5 Install Dependencies - -```bash -# Install Python dependencies -uv sync - -# Create required directories -mkdir -p logs backups -``` - -## Step 2: Configure Services - -### 2.1 Install Systemd Services - -```bash -# Copy service files to systemd directory -sudo cp scripts/systemd/thrillwiki.service /etc/systemd/system/ -sudo cp scripts/systemd/thrillwiki-webhook.service /etc/systemd/system/ - -# Edit service files to match your paths -sudo nano /etc/systemd/system/thrillwiki.service -sudo nano /etc/systemd/system/thrillwiki-webhook.service - -# Reload systemd and enable services -sudo systemctl daemon-reload -sudo systemctl enable thrillwiki.service -sudo systemctl enable thrillwiki-webhook.service -``` - -### 2.2 Configure Environment Variables - -Create `/home/ubuntu/thrillwiki/***REMOVED***`: - -```bash -# Database configuration -DATABASE_URL=[DATABASE-URL-REMOVED] - -# Django settings -DJANGO_SECRET_KEY=your_secret_key_here -DJANGO_DEBUG=False -DJANGO_ALLOWED_HOSTS=your_domain.com,VM_IP_ADDRESS - -# Webhook configuration -WEBHOOK_SECRET=your_github_webhook_secret -WEBHOOK_PORT=9000 -VM_HOST=localhost -VM_USER=ubuntu -VM_PROJECT_PATH=/home/ubuntu/thrillwiki -REPO_URL=https://github.com/YOUR_USERNAME/thrillwiki_django_no_react.git -``` - -## Step 3: Local Machine Setup - -### 3.1 Configure Webhook Listener - -Create a configuration file for the webhook listener: - -```bash -# Create environment file -cat > ***REMOVED***.webhook << EOF -WEBHOOK_PORT=9000 -WEBHOOK_SECRET=your_github_webhook_secret -VM_HOST=VM_IP_ADDRESS -VM_PORT=22 -VM_USER=ubuntu -VM_KEY_PATH=/home/your_user/.ssh/thrillwiki_vm -VM_PROJECT_PATH=/home/ubuntu/thrillwiki -REPO_URL=https://github.com/YOUR_USERNAME/thrillwiki_django_no_react.git -DEPLOY_BRANCH=main -EOF -``` - -### 3.2 Set up GitHub Webhook - -1. Go to your GitHub repository -2. Navigate to Settings → Webhooks -3. Click "Add webhook" -4. Configure: - - **Payload URL**: `http://YOUR_PUBLIC_IP:9000/webhook` - - **Content type**: `application/json` - - **Secret**: Your webhook secret - - **Events**: Select "Just the push event" - -## Step 4: Database Setup - -### 4.1 PostgreSQL Configuration - -```bash -# On the VM -sudo -u postgres psql - --- Create database and user -CREATE DATABASE thrillwiki; -CREATE USER thrillwiki_user WITH ENCRYPTED PASSWORD 'your_password'; -GRANT ALL PRIVILEGES ON DATABASE thrillwiki TO thrillwiki_user; -\q - -# Install PostGIS (if using geographic features) -sudo apt install -y postgresql-postgis postgresql-postgis-scripts -sudo -u postgres psql -d thrillwiki -c "CREATE EXTENSION postgis;" -``` - -### 4.2 Run Initial Migration - -```bash -# On the VM -cd /home/ubuntu/thrillwiki -uv run manage.py migrate -uv run manage.py collectstatic --noinput -uv run manage.py createsuperuser -``` - -## Step 5: Start Services - -### 5.1 Start VM Services - -```bash -# On the VM -sudo systemctl start thrillwiki -sudo systemctl start thrillwiki-webhook -sudo systemctl status thrillwiki -sudo systemctl status thrillwiki-webhook -``` - -### 5.2 Start Local Webhook Listener - -```bash -# On local machine -source ***REMOVED***.webhook -python3 scripts/webhook-listener.py -``` - -## Step 6: Testing - -### 6.1 Test Local Server - -```bash -# Start local development server -./scripts/ci-start.sh - -# Check if server is running -curl http://localhost:8000/health -``` - -### 6.2 Test VM Deployment - -```bash -# On the VM, test deployment script -./scripts/vm-deploy.sh - -# Check service status -./scripts/vm-deploy.sh status - -# View logs -journalctl -u thrillwiki -f -``` - -### 6.3 Test Webhook - -```bash -# Test webhook endpoint -curl -X GET http://localhost:9000/health - -# Make a test commit and push to trigger deployment -git add . -git commit -m "Test deployment" -git push origin main -``` - -## Monitoring and Logs - -### Service Logs - -```bash -# View service logs -journalctl -u thrillwiki -f -journalctl -u thrillwiki-webhook -f - -# View deployment logs -tail -f /home/ubuntu/thrillwiki/logs/deploy.log -tail -f /home/ubuntu/thrillwiki/logs/webhook.log -``` - -### Health Checks - -```bash -# Check services status -systemctl status thrillwiki -systemctl status thrillwiki-webhook - -# Manual health check -curl http://localhost:8000/health -curl http://localhost:9000/health -``` - -## Troubleshooting - -### Common Issues - -1. **Permission Denied** - ```bash - # Fix file permissions - chmod +x scripts/*.sh - chown ubuntu:ubuntu -R /home/ubuntu/thrillwiki - ``` - -2. **Service Won't Start** - ```bash - # Check service logs - journalctl -u thrillwiki --no-pager - - # Verify paths in service files - sudo systemctl edit thrillwiki - ``` - -3. **Webhook Not Triggering** - ```bash - # Check webhook listener logs - tail -f logs/webhook.log - - # Verify GitHub webhook configuration - # Check firewall settings for port 9000 - ``` - -4. **Database Connection Issues** - ```bash - # Test database connection - uv run manage.py dbshell - - # Check PostgreSQL status - sudo systemctl status postgresql - ``` - -### Rollback Procedure - -If deployment fails, you can rollback: - -```bash -# On the VM -./scripts/vm-deploy.sh -# The script automatically handles rollback on failure - -# Manual rollback to specific commit -cd /home/ubuntu/thrillwiki -git reset --hard COMMIT_HASH -./scripts/vm-deploy.sh restart -``` - -## Security Considerations - -1. **SSH Keys**: Use dedicated SSH keys for deployment -2. **Webhook Secret**: Use a strong, unique webhook secret -3. **Firewall**: Only open necessary ports (22, 8000, 9000) -4. **User Permissions**: Use dedicated deployment user with minimal privileges -5. **Environment Variables**: Store sensitive data in environment files, not in code - -## Maintenance - -### Regular Tasks - -1. **Update Dependencies**: Run `uv sync` regularly -2. **Log Rotation**: Set up logrotate for application logs -3. **Backup Database**: Schedule regular database backups -4. **Monitor Disk Space**: Ensure sufficient space for logs and backups - -### Cleanup Old Backups - -```bash -# The deployment script automatically cleans old backups -# Manual cleanup if needed: -find /home/ubuntu/thrillwiki/backups -name "backup_*.commit" -mtime +30 -delete -``` - -## Performance Optimization - -1. **Use Production WSGI Server**: Consider using Gunicorn instead of development server -2. **Reverse Proxy**: Set up Nginx as reverse proxy -3. **Database Optimization**: Configure PostgreSQL for production -4. **Static Files**: Serve static files through Nginx - -This setup provides a robust CI/CD pipeline for automatic deployment of ThrillWiki to your Linux VM whenever code is pushed to GitHub. \ No newline at end of file diff --git a/docs/consolidation_analysis.md b/docs/consolidation_analysis.md deleted file mode 100644 index a40eb64b..00000000 --- a/docs/consolidation_analysis.md +++ /dev/null @@ -1,73 +0,0 @@ -# Consolidation Analysis - -## Review System Implementation - -### Current Implementation -- Uses Django's GenericForeignKey (confirmed) -- Single Review model handles both parks and rides -- Related models: ReviewImage, ReviewLike, ReviewReport -- Content types: Currently supports any model type - -### Migration Plan - -1. **Create New Models**: -```python -# parks/models/reviews.py -class ParkReview(TrackedModel): - park = models.ForeignKey(Park, on_delete=models.CASCADE) - # ... other review fields ... - -# rides/models/reviews.py -class RideReview(TrackedModel): - ride = models.ForeignKey(Ride, on_delete=models.CASCADE) - # ... other review fields ... -``` - -2. **Data Migration Steps**: -```python -# Migration operations -def migrate_reviews(apps, schema_editor): - Review = apps.get_model('reviews', 'Review') - ParkReview = apps.get_model('parks', 'ParkReview') - RideReview = apps.get_model('rides', 'RideReview') - - for review in Review.objects.all(): - if review.content_type.model == 'park': - ParkReview.objects.create( - park_id=review.object_id, - # ... map other fields ... - ) - elif review.content_type.model == 'ride': - RideReview.objects.create( - ride_id=review.object_id, - # ... map other fields ... - ) -``` - -3. **Update Related Models**: -```python -# Before (generic) -class ReviewImage(models.Model): - review = models.ForeignKey(Review, ...) - -# After (concrete) -class ParkReviewImage(models.Model): - review = models.ForeignKey(ParkReview, ...) - -class RideReviewImage(models.Model): - review = models.ForeignKey(RideReview, ...) -``` - -4. **Backward Compatibility**: -- Maintain old Review API during transition period -- Phase out generic reviews after data migration - -### Entity Relationship Compliance -- Park reviews will reference Park model (via Operator) -- Ride reviews will reference Ride model (via Park → Operator) -- Complies with entity relationship rules in .clinerules - -### Risk Mitigation -- Use data migration transactions -- Create database backups before migration -- Test with staging data first \ No newline at end of file diff --git a/docs/moderation_guide.md b/docs/moderation_guide.md deleted file mode 100644 index b84fc2f6..00000000 --- a/docs/moderation_guide.md +++ /dev/null @@ -1,290 +0,0 @@ -# ThrillWiki Moderation Guide - -## Overview - -This guide covers the moderation systems in ThrillWiki, including: -- Content edit submissions -- Photo submissions -- User reviews -- Report handling -- Moderation best practices - -## Moderation Dashboard - -Access the moderation dashboard at `/moderation/` to view: -- Pending edit submissions -- Photo submissions awaiting review -- Reported content -- Moderation statistics - -## Content Edit Moderation - -### Edit Submission Types -1. **Edit Existing** - - Changes to existing parks, rides, or other content - - Shows diff of proposed changes - - Requires source verification - -2. **Create New** - - New park, ride, or content submissions - - Requires complete information - - Needs source verification - -### Review Process -1. **Initial Assessment** - - Check submission completeness - - Verify sources - - Review user history - -2. **Status Options** - ```python - STATUS_CHOICES = [ - ('NEW', 'New'), - ('APPROVED', 'Approved'), - ('REJECTED', 'Rejected'), - ('ESCALATED', 'Escalated'), - ] - ``` - -3. **Actions** - - Approve: Apply changes after verification - - Reject: Provide clear reason - - Escalate: For complex cases needing admin review - -### Approval Guidelines -- Verify information accuracy -- Check reliable sources -- Ensure formatting consistency -- Review for completeness - -### Rejection Guidelines -- Provide clear explanation -- Reference guidelines -- Suggest improvements -- Be constructive - -## Photo Moderation - -### Submission Types -- Park photos -- Ride photos -- Attraction photos -- Historical photos - -### Review Process -1. **Initial Check** - - Image quality - - Appropriate content - - Copyright concerns - - Metadata accuracy - -2. **Status Options** - ```python - STATUS_CHOICES = [ - ('NEW', 'New'), - ('APPROVED', 'Approved'), - ('REJECTED', 'Rejected'), - ('AUTO_APPROVED', 'Auto Approved'), - ] - ``` - -3. **Actions** - - Approve: Add to main gallery - - Reject: Explain issues - - Auto-approve: For trusted users - -### Photo Guidelines -- Minimum resolution requirements -- No watermarks -- Clear and focused -- Appropriate content -- Proper attribution - -## Review Moderation - -### Review Components -- Rating (1-10) -- Written content -- Visit date -- Optional photos - -### Moderation Criteria -1. **Content Standards** - - Constructive feedback - - No personal attacks - - Family-friendly language - - Factual accuracy - -2. **Rating Consistency** - - Check against written content - - Compare with average ratings - - Look for rating abuse - -3. **Photo Guidelines** - - Relevant to review - - Appropriate content - - Quality standards - -### Report Handling -1. **Review Reports** - - Assess reported content - - Check reporter history - - Verify claims - -2. **Actions** - - Remove inappropriate content - - Edit/update if needed - - Notify users - - Document actions - -## Best Practices - -### General Guidelines -1. **Consistency** - - Follow established guidelines - - Apply rules uniformly - - Document decisions - -2. **Communication** - - Clear explanations - - Professional tone - - Constructive feedback - -3. **Escalation** - - Know when to escalate - - Document complex cases - - Seek admin input - -### Quality Control -1. **Content Standards** - - Accuracy - - Completeness - - Formatting - - Source verification - -2. **User Management** - - Track user history - - Identify trusted contributors - - Handle problem users - -3. **Documentation** - - Record decisions - - Note special cases - - Track patterns - -## Moderation Tools - -### Edit Submission Tools -```python -def approve(self, user): - """Approve and apply changes""" - # Validates and applies changes - # Updates status - # Records moderator action - -def reject(self, user): - """Reject submission""" - # Updates status - # Records moderator action - # Notifies user - -def escalate(self, user): - """Escalate to admin""" - # Marks for admin review - # Records moderator action -``` - -### Photo Submission Tools -```python -def approve(self, moderator, notes=''): - """Approve photo""" - # Moves to main gallery - # Updates status - # Records approval - -def reject(self, moderator, notes): - """Reject photo""" - # Updates status - # Records rejection reason - # Notifies user -``` - -## Special Cases - -### Content Disputes -1. **Handling Conflicts** - - Review all perspectives - - Check reliable sources - - Document decisions - - Consider escalation - -2. **Resolution Process** - - Gather evidence - - Consult experts if needed - - Make documented decision - - Communicate clearly - -### Emergency Situations -1. **Immediate Action Needed** - - Inappropriate content - - Copyright violations - - Personal information - - Harmful content - -2. **Response Protocol** - - Remove content immediately - - Document action taken - - Notify administrators - - Follow up as needed - -## Moderation Workflow - -1. **Daily Tasks** - - Review new submissions - - Check reported content - - Handle escalations - - Update documentation - -2. **Weekly Tasks** - - Review moderation patterns - - Check for recurring issues - - Update guidelines if needed - - Team communication - -3. **Monthly Tasks** - - Review statistics - - Assess guidelines - - Plan improvements - - Team training - -## Resources - -### Reference Materials -- Content guidelines -- Photo standards -- Review policies -- Escalation procedures - -### Support Channels -- Admin contact information -- Team communication -- Emergency procedures -- Legal resources - -## Training and Development - -1. **New Moderator Training** - - Platform overview - - Tools introduction - - Guidelines review - - Supervised practice - -2. **Ongoing Development** - - Regular updates - - Case studies - - Best practices - - Team feedback - -## Conclusion - -Effective moderation is crucial for maintaining ThrillWiki's quality and community. Follow these guidelines consistently while using good judgment for special cases. Document decisions and seek help when needed. diff --git a/docs/project_documentation.md b/docs/project_documentation.md deleted file mode 100644 index 87bc66b8..00000000 --- a/docs/project_documentation.md +++ /dev/null @@ -1,244 +0,0 @@ -# ThrillWiki Project Documentation - -## Overview -ThrillWiki is a comprehensive Django-based web application for managing and sharing information about amusement parks, rides, and attractions. The platform allows users to explore parks, review rides, share photos, and track various statistics about amusement parks and rides worldwide. - -## System Architecture - -### Core Applications - -1. **Parks App** - - Manages amusement park information - - Handles park areas and locations - - Tracks park statistics and operating status - -2. **Rides App** - - Manages ride information and categories - - Tracks detailed roller coaster statistics - - Links rides to parks, manufacturers, and designers - -3. **Companies App** - - Handles park ownership information - - Manages ride manufacturers - - Tracks company statistics and history - -4. **Designers App** - - Manages ride designer/engineering firm information - - Tracks designer contributions and history - -5. **Media App** - - Handles photo uploads and management - - Supports generic relationships for photos - - Manages media storage and organization - -6. **Reviews App** - - Manages user reviews and ratings - - Handles review moderation - - Supports review images and likes - -7. **Analytics App** - - Tracks page views and user interactions - - Identifies trending content - - Provides analytics data for the platform - -### Authentication & User Management -- Custom user model through accounts app -- Social authentication support (Google, Discord) -- User profiles and settings management - -## Data Models - -### Park Models -```python -class Park: - - name, slug, description - - status (Operating, Closed, etc.) - - location (latitude, longitude, address) - - operating details (season, dates) - - statistics (total rides, coasters) - - ownership information -``` - -### Ride Models -```python -class Ride: - - name, slug, description - - category (Roller Coaster, Dark Ride, etc.) - - status and operating dates - - manufacturer and designer links - - park and area location - - accessibility and capacity info - -class RollerCoasterStats: - - height, length, speed metrics - - track type and materials - - launch system details - - train configuration -``` - -### Company Models -```python -class Company: - - name, slug, description - - headquarters and contact info - - park ownership tracking - - historical records - -class Manufacturer: - - name, slug, description - - ride production statistics - - historical records -``` - -### Media Management -```python -class Photo: - - Generic relationship to content - - Image storage and organization - - Caption and metadata - - Upload tracking -``` - -### Review System -```python -class Review: - - Generic relationship to content - - Rating and content - - Moderation support - - Image attachments - - Like/report functionality -``` - -### Analytics -```python -class PageView: - - Content tracking - - Timestamp and user info - - Trending content calculation -``` - -## Key Features - -### Content Management -- Comprehensive park and ride information -- Historical tracking of all content changes -- Rich media support with photo management -- Detailed statistics and specifications - -### User Interaction -- User reviews and ratings -- Photo uploads and sharing -- Content moderation system -- Social features (likes, reports) - -### Analytics and Trending -- Page view tracking -- Trending content identification -- Usage statistics and metrics - -### Location Features -- Geographic coordinates -- Address management -- Park area organization - -## Technical Implementation - -### Database Design -- PostgreSQL database -- Generic relations for flexible content relationships -- Comprehensive indexing for performance -- Historical record tracking - -### Authentication -- Django allauth integration -- Multiple social auth providers -- Custom user model and authentication flow - -### Media Handling -- Custom storage backend -- Organized file structure -- Automatic file naming and organization - -### Performance Features -- Caching configuration -- Database query optimization -- Efficient media handling - -### Security Features -- CSRF protection -- Secure authentication -- Content moderation -- User input validation - -## Management Commands - -### Analytics -- `update_trending.py`: Updates trending content calculations - -### Parks -- `update_park_counts.py`: Updates park statistics and ride counts - -## Frontend Integration - -### Templates -- Organized template structure -- Partial templates for reusability -- Photo gallery integration - -### Static Files -- CSS with Tailwind integration -- JavaScript for interactive features -- Photo gallery functionality - -## Development Guidelines - -### Code Organization -- Modular app structure -- Clear separation of concerns -- Consistent naming conventions - -### Best Practices -- Generic relations for flexibility -- Historical tracking for all major models -- Comprehensive validation -- Efficient query patterns - -### Security Considerations -- Secure content storage -- User permission management -- Input validation and sanitization -- Protected user data - -## Deployment Considerations - -### Environment Configuration -- Environment-specific settings -- Secure key management -- Database configuration - -### Media Storage -- Organized upload structure -- Efficient file handling -- Backup considerations - -### Performance Optimization -- Caching strategy -- Database indexing -- Query optimization - -## Future Considerations - -### Scalability -- Database optimization opportunities -- Caching improvements -- Media handling optimization - -### Feature Expansion -- Additional social features -- Enhanced analytics -- Mobile app integration possibilities - -### Integration Opportunities -- API development -- Third-party service integration -- Mobile app support diff --git a/docs/quickstart.md b/docs/quickstart.md deleted file mode 100644 index 5ee2c289..00000000 --- a/docs/quickstart.md +++ /dev/null @@ -1,153 +0,0 @@ -# ThrillWiki Quick Start Guide - -## Prerequisites - -- Python 3.8+ -- PostgreSQL -- Node.js (for Tailwind CSS) - -## Setup Instructions - -1. **Clone the Repository** - ```bash - git clone - cd thrillwiki - ``` - -2. **Create Virtual Environment** - ```bash - python -m venv venv - source venv/bin/activate # On Windows: venv\Scripts\activate - ``` - -3. **Install Dependencies** - ```bash - pip install -r requirements.txt - ``` - -4. **Database Setup** - ```bash - # Update database settings in thrillwiki/settings.py - python manage.py migrate - ``` - -5. **Create Superuser** - ```bash - python manage.py createsuperuser - ``` - -6. **Install Frontend Dependencies** - ```bash - # Install Tailwind CSS - npm install - ``` - -7. **Environment Configuration** - - Copy example environment file - - Update necessary settings - - Configure social auth providers - -8. **Run Development Server** - ```bash - python manage.py runserver - ``` - -## Key URLs - -- Admin Interface: `/admin/` -- Home Page: `/` -- Parks List: `/parks/` -- Rides List: `/rides/` - -## Development Guidelines - -1. **Model Changes** - ```bash - python manage.py makemigrations - python manage.py migrate - ``` - -2. **Running Tests** - ```bash - python manage.py test - ``` - -3. **Update Trending Content** - ```bash - python manage.py update_trending - ``` - -4. **Update Park Statistics** - ```bash - python manage.py update_park_counts - ``` - -## Common Tasks - -### Adding a New Park -1. Access admin interface -2. Navigate to Parks section -3. Click "Add Park" -4. Fill required information -5. Save - -### Adding a New Ride -1. Access admin interface -2. Navigate to Rides section -3. Click "Add Ride" -4. Fill required information -5. Add roller coaster stats if applicable -6. Save - -### Managing Photos -1. Photos can be added to parks, rides, or companies -2. Use the photo upload form on respective detail pages -3. Set primary photo as needed - -### Moderating Reviews -1. Access admin interface -2. Navigate to Reviews section -3. Review flagged content -4. Take appropriate moderation action - -## Troubleshooting - -### Common Issues - -1. **Database Connection** - - Verify PostgreSQL is running - - Check database credentials - - Ensure database exists - -2. **Media Upload Issues** - - Check file permissions - - Verify media storage configuration - - Ensure proper file types - -3. **Social Auth** - - Verify provider credentials - - Check callback URLs - - Review auth settings - -### Getting Help - -- Check existing documentation in `/docs` -- Review error logs -- Contact development team - -## Best Practices - -1. **Code Style** - - Follow PEP 8 - - Use type hints - - Document functions and classes - -2. **Git Workflow** - - Create feature branches - - Write descriptive commits - - Keep changes focused - -3. **Testing** - - Write unit tests - - Test all new features - - Verify existing functionality diff --git a/docs/search_integration_plan.md b/docs/search_integration_plan.md deleted file mode 100644 index 32b9fe57..00000000 --- a/docs/search_integration_plan.md +++ /dev/null @@ -1,96 +0,0 @@ -# Search Integration Plan - -## 1. File Structure -```plaintext -core/ -├── views/ -│ └── search.py # Search views implementation -├── utils/ -│ └── search.py # Search utilities -templates/ -└── core/ - └── search/ # Search templates - ├── results.html - ├── filters.html - └── ... -``` - -## 2. View Migration -- Move `search/views.py` → `core/views/search.py` -- Update view references: -```python -# Old: from search.views import AdaptiveSearchView -# New: -from core.views.search import AdaptiveSearchView, FilterFormView -``` - -## 3. URL Configuration Updates -Update `thrillwiki/urls.py`: -```python -# Before: -path("search/", include("search.urls", namespace="search")) - -# After: -path("search/", include("core.urls.search", namespace="search")) -``` - -Create `core/urls/search.py`: -```python -from django.urls import path -from core.views.search import AdaptiveSearchView, FilterFormView -from rides.views import RideSearchView - -urlpatterns = [ - path('parks/', AdaptiveSearchView.as_view(), name='search'), - path('parks/filters/', FilterFormView.as_view(), name='filter_form'), - path('rides/', RideSearchView.as_view(), name='ride_search'), - path('rides/results/', RideSearchView.as_view(), name='ride_search_results'), -] -``` - -## 4. Import Cleanup Strategy -1. Update all imports: -```python -# Before: -from search.views import ... -from search.utils import ... - -# After: -from core.views.search import ... -from core.utils.search import ... -``` - -2. Remove old search app: -```bash -rm -rf search/ -``` - -3. Update `INSTALLED_APPS` in `thrillwiki/settings.py`: -```python -# Remove 'search' from INSTALLED_APPS -INSTALLED_APPS = [ - # ... - # 'search', # REMOVE THIS LINE - # ... -] -``` - -## 5. Implementation Steps -1. Create new directory structure in core -2. Move view logic to `core/views/search.py` -3. Create URL config in `core/urls/search.py` -4. Move templates to `templates/core/search/` -5. Update all import references -6. Remove old search app -7. Test all search functionality: - - Park search filters - - Ride search - - HTMX filter updates -8. Verify URL routes - -## 6. Verification Checklist -- [ ] All search endpoints respond with 200 -- [ ] Filter forms render correctly -- [ ] HTMX updates work as expected -- [ ] No references to old search app in codebase -- [ ] Templates render with correct context \ No newline at end of file diff --git a/docs/technical_architecture.md b/docs/technical_architecture.md deleted file mode 100644 index 946b173b..00000000 --- a/docs/technical_architecture.md +++ /dev/null @@ -1,77 +0,0 @@ -# ThrillWiki Technical Architecture - -``` -┌─────────────────────────────────────────────────────────────┐ -│ ThrillWiki Platform │ -├─────────────────┬─────────────────────┬───────────────────┤ -│ Frontend Layer │ Application Layer │ Storage Layer │ -├─────────────────┤ │ │ -│ - Templates │ Django Apps │ - PostgreSQL DB │ -│ - Tailwind CSS │ ┌──────────┐ │ - Media Storage │ -│ - JavaScript │ │ Parks │ │ - Static Files │ -│ │ │ Rides │ │ │ -│ Components │ │Companies │ │ │ -│ - Photo Gallery │ │Designers │ │ │ -│ - Review Forms │ └──────────┘ │ │ -│ │ │ │ -├─────────────────┼─────────────────────┼───────────────────┤ -│ Auth Layer │ Service Layer │ Analytics Layer │ -├─────────────────┤ │ │ -│ - Django Auth │ Core Services │ - Page Tracking │ -│ - Social Auth │ ┌──────────┐ │ - Trending Calc │ -│ - Permissions │ │ Reviews │ │ - Usage Stats │ -│ │ │ Media │ │ │ -│ │ │Analytics │ │ │ -│ │ └──────────┘ │ │ -└─────────────────┴─────────────────────┴───────────────────┘ - -Data Flow: -User Request → URL Router → View → Model → Database - ↓ - Template → Response - -Content Relations: -Park ──┬── Areas - └── Rides ─┬── Manufacturer - └── Designer - -Media Storage: -Content ─→ Generic Relation ─→ Photos - -Analytics Flow: -Page View → Tracking → Trending Calculation -``` - -## Key Components - -1. **Frontend Layer** - - Template-based rendering - - Tailwind CSS styling - - JavaScript enhancements - - Interactive components - -2. **Application Layer** - - Core domain apps - - Business logic - - Data validation - - Content management - -3. **Storage Layer** - - Database persistence - - Media file storage - - Caching system - -4. **Authentication Layer** - - User management - - Social authentication - - Permission control - -5. **Service Layer** - - Review system - - Media handling - - Email services - -6. **Analytics Layer** - - View tracking - - Trend analysis - - Usage statistics diff --git a/email_service/__init__.py b/email_service/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/email_service/admin.py b/email_service/admin.py deleted file mode 100644 index e9d29e15..00000000 --- a/email_service/admin.py +++ /dev/null @@ -1,39 +0,0 @@ -from django.contrib import admin -from django.contrib.sites.models import Site -from .models import EmailConfiguration - - -@admin.register(EmailConfiguration) -class EmailConfigurationAdmin(admin.ModelAdmin): - list_display = ( - "site", - "from_name", - "from_email", - "reply_to", - "updated_at", - ) - list_select_related = ("site",) - search_fields = ("site__domain", "from_name", "from_email", "reply_to") - readonly_fields = ("created_at", "updated_at") - fieldsets = ( - (None, {"fields": ("site",)}), - ( - "Email Settings", - { - "fields": ("api_key", ("from_name", "from_email"), "reply_to"), - "description": 'Configure the email settings. The From field in emails will appear as "From Name "', - }, - ), - ( - "Timestamps", - {"fields": ("created_at", "updated_at"), "classes": ("collapse",)}, - ), - ) - - def get_queryset(self, request): - return super().get_queryset(request).select_related("site") - - def formfield_for_foreignkey(self, db_field, request, **kwargs): - if db_field.name == "site": - kwargs["queryset"] = Site.objects.all().order_by("domain") - return super().formfield_for_foreignkey(db_field, request, **kwargs) diff --git a/email_service/apps.py b/email_service/apps.py deleted file mode 100644 index cc44308a..00000000 --- a/email_service/apps.py +++ /dev/null @@ -1,6 +0,0 @@ -from django.apps import AppConfig - - -class EmailServiceConfig(AppConfig): - default_auto_field = "django.db.models.BigAutoField" - name = "email_service" diff --git a/email_service/backends.py b/email_service/backends.py deleted file mode 100644 index 1731d01c..00000000 --- a/email_service/backends.py +++ /dev/null @@ -1,102 +0,0 @@ -from django.core.mail.backends.base import BaseEmailBackend -from django.core.mail.message import sanitize_address -from .services import EmailService -from .models import EmailConfiguration - - -class ForwardEmailBackend(BaseEmailBackend): - def __init__(self, fail_silently=False, **kwargs): - super().__init__(fail_silently=fail_silently) - self.site = kwargs.get("site", None) - - def send_messages(self, email_messages): - """ - Send one or more EmailMessage objects and return the number of email - messages sent. - """ - if not email_messages: - return 0 - - num_sent = 0 - for message in email_messages: - try: - sent = self._send(message) - if sent: - num_sent += 1 - except Exception: - if not self.fail_silently: - raise - return num_sent - - def _send(self, email_message): - """Send an EmailMessage object.""" - if not email_message.recipients(): - return False - - # Get the first recipient (ForwardEmail API sends to one recipient at a - # time) - to_email = email_message.to[0] - - # Get site from connection or instance - if hasattr(email_message, "connection") and hasattr( - email_message.connection, "site" - ): - site = email_message.connection.site - else: - site = self.site - - if not site: - raise ValueError("Either request or site must be provided") - - # Get the site's email configuration - try: - config = EmailConfiguration.objects.get(site=site) - except EmailConfiguration.DoesNotExist: - raise ValueError( - f"Email configuration not found for site: { - site.domain}" - ) - - # Get the from email, falling back to site's default if not provided - if email_message.from_email: - from_email = sanitize_address( - email_message.from_email, email_message.encoding - ) - else: - from_email = config.default_from_email - - # Extract clean email address - from_email = EmailService.extract_email(from_email) - - # Get reply-to from message headers or use default - reply_to = None - if hasattr(email_message, "reply_to") and email_message.reply_to: - reply_to = email_message.reply_to[0] - elif ( - hasattr(email_message, "extra_headers") - and "Reply-To" in email_message.extra_headers - ): - reply_to = email_message.extra_headers["Reply-To"] - - # Get message content - if email_message.content_subtype == "html": - # If it's HTML content, we'll send it as text for now - # You could extend this to support HTML emails if needed - text = email_message.body - else: - text = email_message.body - - try: - EmailService.send_email( - to=to_email, - subject=email_message.subject, - text=text, - from_email=from_email, - reply_to=reply_to, - site=site, - ) - return True - except Exception: - if not self.fail_silently: - raise - return False diff --git a/email_service/management/commands/test_email_flows.py b/email_service/management/commands/test_email_flows.py deleted file mode 100644 index 53547ce8..00000000 --- a/email_service/management/commands/test_email_flows.py +++ /dev/null @@ -1,196 +0,0 @@ -from django.core.management.base import BaseCommand -from django.contrib.auth import get_user_model -from django.contrib.sites.models import Site -from django.test import RequestFactory, Client -from allauth.account.models import EmailAddress -from accounts.adapters import CustomAccountAdapter -from django.conf import settings -import uuid - -User = get_user_model() - - -class Command(BaseCommand): - help = "Test all email flows in the application" - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.factory = RequestFactory() - # Disable CSRF for testing - self.client = Client(enforce_csrf_checks=False) - self.adapter = CustomAccountAdapter() - self.site = Site.objects.get_current() - - # Generate unique test data - unique_id = str(uuid.uuid4())[:8] - self.test_username = f"testuser_{unique_id}" - self.test_email = f"test_{unique_id}@thrillwiki.com" - self.test_password = "[PASSWORD-REMOVED]" - self.new_password = "[PASSWORD-REMOVED]" - - # Add testserver to ALLOWED_HOSTS - if "testserver" not in settings.ALLOWED_HOSTS: - settings.ALLOWED_HOSTS.append("testserver") - - def handle(self, *args, **options): - self.stdout.write("Starting email flow tests...\n") - - # Clean up any existing test users - User.objects.filter(email__endswith="@thrillwiki.com").delete() - - # Test registration email - self.test_registration() - - # Create a test user for other flows - user = User.objects.create_user( - username=f"testuser2_{str(uuid.uuid4())[:8]}", - email=f"test2_{str(uuid.uuid4())[:8]}@thrillwiki.com", - password=self.test_password, - ) - EmailAddress.objects.create( - user=user, email=user.email, primary=True, verified=True - ) - - # Log in the test user - self.client.force_login(user) - - # Test other flows - self.test_password_change(user) - self.test_email_change(user) - self.test_password_reset(user) - - # Cleanup - User.objects.filter(email__endswith="@thrillwiki.com").delete() - self.stdout.write(self.style.SUCCESS("All email flow tests completed!\n")) - - def test_registration(self): - """Test registration email flow""" - self.stdout.write("Testing registration email...") - try: - # Use dj-rest-auth registration endpoint - response = self.client.post( - "/api/auth/registration/", - { - "username": self.test_username, - "email": self.test_email, - "password1": self.test_password, - "password2": self.test_password, - }, - content_type="application/json", - ) - - if response.status_code in [200, 201, 204]: - self.stdout.write( - self.style.SUCCESS("Registration email test passed!\n") - ) - else: - self.stdout.write( - self.style.WARNING( - f"Registration returned status { - response.status_code}: { - response.content.decode()}\n" - ) - ) - except Exception as e: - self.stdout.write( - self.style.ERROR( - f"Registration email test failed: { - str(e)}\n" - ) - ) - - def test_password_change(self, user): - """Test password change using dj-rest-auth""" - self.stdout.write("Testing password change email...") - try: - response = self.client.post( - "/api/auth/password/change/", - { - "old_password": self.test_password, - "new_password1": self.new_password, - "new_password2": self.new_password, - }, - content_type="application/json", - ) - - if response.status_code == 200: - self.stdout.write( - self.style.SUCCESS("Password change email test passed!\n") - ) - else: - self.stdout.write( - self.style.WARNING( - f"Password change returned status { - response.status_code}: { - response.content.decode()}\n" - ) - ) - except Exception as e: - self.stdout.write( - self.style.ERROR( - f"Password change email test failed: { - str(e)}\n" - ) - ) - - def test_email_change(self, user): - """Test email change verification""" - self.stdout.write("Testing email change verification...") - try: - new_email = f"newemail_{str(uuid.uuid4())[:8]}@thrillwiki.com" - response = self.client.post( - "/api/auth/email/", - {"email": new_email}, - content_type="application/json", - ) - - if response.status_code == 200: - self.stdout.write( - self.style.SUCCESS("Email change verification test passed!\n") - ) - else: - self.stdout.write( - self.style.WARNING( - f"Email change returned status { - response.status_code}: { - response.content.decode()}\n" - ) - ) - except Exception as e: - self.stdout.write( - self.style.ERROR( - f"Email change verification test failed: { - str(e)}\n" - ) - ) - - def test_password_reset(self, user): - """Test password reset using dj-rest-auth""" - self.stdout.write("Testing password reset email...") - try: - # Request password reset - response = self.client.post( - "/api/auth/password/reset/", - {"email": user.email}, - content_type="application/json", - ) - - if response.status_code == 200: - self.stdout.write( - self.style.SUCCESS("Password reset email test passed!\n") - ) - else: - self.stdout.write( - self.style.WARNING( - f"Password reset returned status { - response.status_code}: { - response.content.decode()}\n" - ) - ) - except Exception as e: - self.stdout.write( - self.style.ERROR( - f"Password reset email test failed: { - str(e)}\n" - ) - ) diff --git a/email_service/management/commands/test_email_service.py b/email_service/management/commands/test_email_service.py deleted file mode 100644 index 1d4a380e..00000000 --- a/email_service/management/commands/test_email_service.py +++ /dev/null @@ -1,244 +0,0 @@ -from django.core.management.base import BaseCommand -from django.contrib.sites.models import Site -from django.core.mail import send_mail -from django.conf import settings -import requests -import os -from email_service.models import EmailConfiguration -from email_service.services import EmailService -from email_service.backends import ForwardEmailBackend - - -class Command(BaseCommand): - help = "Test the email service functionality" - - def add_arguments(self, parser): - parser.add_argument( - "--to", - type=str, - help="Recipient email address (optional, defaults to current user's email)", - ) - parser.add_argument( - "--api-key", - type=str, - help="ForwardEmail API key (optional, will use configured value)", - ) - parser.add_argument( - "--from-email", - type=str, - help="Sender email address (optional, will use configured value)", - ) - - def get_config(self): - """Get email configuration from database or environment""" - try: - site = Site.objects.get(id=settings.SITE_ID) - config = EmailConfiguration.objects.get(site=site) - return { - "api_key": config.api_key, - "from_email": config.default_from_email, - "site": site, - } - except (Site.DoesNotExist, EmailConfiguration.DoesNotExist): - # Try environment variables - api_key = os.environ.get("FORWARD_EMAIL_API_KEY") - from_email = os.environ.get("FORWARD_EMAIL_FROM") - - if not api_key or not from_email: - self.stdout.write( - self.style.WARNING( - "No configuration found in database or environment variables.\n" - "Please either:\n" - "1. Configure email settings in Django admin, or\n" - "2. Set environment variables FORWARD_EMAIL_API_KEY and FORWARD_EMAIL_FROM, or\n" - "3. Provide --api-key and --from-email arguments" - ) - ) - return None - - return { - "api_key": api_key, - "from_email": from_email, - "site": Site.objects.get(id=settings.SITE_ID), - } - - def handle(self, *args, **options): - self.stdout.write(self.style.SUCCESS("Starting email service tests...")) - - # Get configuration - config = self.get_config() - if not config and not (options["api_key"] and options["from_email"]): - self.stdout.write( - self.style.ERROR("No email configuration available. Tests aborted.") - ) - return - - # Use provided values or fall back to config - api_key = options["api_key"] or config["api_key"] - from_email = options["from_email"] or config["from_email"] - site = config["site"] - - # If no recipient specified, use the from_email address for testing - to_email = options["to"] or "test@thrillwiki.com" - - self.stdout.write(self.style.SUCCESS("Using configuration:")) - self.stdout.write(f" From: {from_email}") - self.stdout.write(f" To: {to_email}") - self.stdout.write(f' API Key: {"*" * len(api_key)}') - self.stdout.write(f" Site: {site.domain}") - - try: - # 1. Test site configuration - config = self.test_site_configuration(api_key, from_email) - - # 2. Test direct service - self.test_email_service_directly(to_email, config.site) - - # 3. Test API endpoint - self.test_api_endpoint(to_email) - - # 4. Test Django email backend - self.test_email_backend(to_email, config.site) - - self.stdout.write( - self.style.SUCCESS("\nAll tests completed successfully! 🎉") - ) - - except Exception as e: - self.stdout.write(self.style.ERROR(f"\nTests failed: {str(e)}")) - - def test_site_configuration(self, api_key, from_email): - """Test creating and retrieving site configuration""" - self.stdout.write("\nTesting site configuration...") - - try: - # Get or create default site - site = Site.objects.get_or_create( - id=settings.SITE_ID, - defaults={"domain": "example.com", "name": "example.com"}, - )[0] - - # Create or update email configuration - config, created = EmailConfiguration.objects.update_or_create( - site=site, - defaults={ - "api_key": api_key, - "default_from_email": from_email, - }, - ) - - action = "Created new" if created else "Updated existing" - self.stdout.write(self.style.SUCCESS(f"✓ {action} site configuration")) - return config - except Exception as e: - self.stdout.write( - self.style.ERROR( - f"✗ Site configuration failed: { - str(e)}" - ) - ) - raise - - def test_api_endpoint(self, to_email): - """Test sending email via the API endpoint""" - self.stdout.write("\nTesting API endpoint...") - - try: - # Make request to the API endpoint - response = requests.post( - "http://127.0.0.1:8000/api/email/send-email/", - json={ - "to": to_email, - "subject": "Test Email via API", - "text": "This is a test email sent via the API endpoint.", - }, - headers={ - "Content-Type": "application/json", - }, - timeout=60, - ) - - if response.status_code == 200: - self.stdout.write(self.style.SUCCESS("✓ API endpoint test successful")) - else: - self.stdout.write( - self.style.ERROR( - f"✗ API endpoint test failed with status { - response.status_code}: { - response.text}" - ) - ) - raise Exception(f"API test failed: {response.text}") - except requests.exceptions.ConnectionError: - self.stdout.write( - self.style.ERROR( - "✗ API endpoint test failed: Could not connect to server. " - "Make sure the Django development server is running." - ) - ) - raise Exception("Could not connect to Django server") - except Exception as e: - self.stdout.write( - self.style.ERROR( - f"✗ API endpoint test failed: { - str(e)}" - ) - ) - raise - - def test_email_backend(self, to_email, site): - """Test sending email via Django's email backend""" - self.stdout.write("\nTesting Django email backend...") - - try: - # Create a connection with site context - backend = ForwardEmailBackend(fail_silently=False, site=site) - - # Debug output - self.stdout.write( - f" Debug: Using from_email: { - site.email_config.default_from_email}" - ) - self.stdout.write(f" Debug: Using to_email: {to_email}") - - send_mail( - subject="Test Email via Backend", - message="This is a test email sent via the Django email backend.", - from_email=site.email_config.default_from_email, # Explicitly set from_email - recipient_list=[to_email], - fail_silently=False, - connection=backend, - ) - self.stdout.write(self.style.SUCCESS("✓ Email backend test successful")) - except Exception as e: - self.stdout.write( - self.style.ERROR( - f"✗ Email backend test failed: { - str(e)}" - ) - ) - raise - - def test_email_service_directly(self, to_email, site): - """Test sending email directly via EmailService""" - self.stdout.write("\nTesting EmailService directly...") - - try: - response = EmailService.send_email( - to=to_email, - subject="Test Email via Service", - text="This is a test email sent directly via the EmailService.", - site=site, - ) - self.stdout.write( - self.style.SUCCESS("✓ Direct EmailService test successful") - ) - return response - except Exception as e: - self.stdout.write( - self.style.ERROR( - f"✗ Direct EmailService test failed: { - str(e)}" - ) - ) - raise diff --git a/email_service/migrations/0001_initial.py b/email_service/migrations/0001_initial.py deleted file mode 100644 index 00c7e8e7..00000000 --- a/email_service/migrations/0001_initial.py +++ /dev/null @@ -1,141 +0,0 @@ -# Generated by Django 5.1.4 on 2025-08-13 21:35 - -import django.db.models.deletion -import pgtrigger.compiler -import pgtrigger.migrations -from django.db import migrations, models - - -class Migration(migrations.Migration): - - initial = True - - dependencies = [ - ("pghistory", "0006_delete_aggregateevent"), - ("sites", "0002_alter_domain_unique"), - ] - - operations = [ - migrations.CreateModel( - name="EmailConfiguration", - fields=[ - ( - "id", - models.BigAutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ("api_key", models.CharField(max_length=255)), - ("from_email", models.EmailField(max_length=254)), - ( - "from_name", - models.CharField( - help_text="The name that will appear in the From field of emails", - max_length=255, - ), - ), - ("reply_to", models.EmailField(max_length=254)), - ("created_at", models.DateTimeField(auto_now_add=True)), - ("updated_at", models.DateTimeField(auto_now=True)), - ( - "site", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - to="sites.site", - ), - ), - ], - options={ - "verbose_name": "Email Configuration", - "verbose_name_plural": "Email Configurations", - }, - ), - migrations.CreateModel( - name="EmailConfigurationEvent", - fields=[ - ( - "pgh_id", - models.AutoField(primary_key=True, serialize=False), - ), - ("pgh_created_at", models.DateTimeField(auto_now_add=True)), - ("pgh_label", models.TextField(help_text="The event label.")), - ("id", models.BigIntegerField()), - ("api_key", models.CharField(max_length=255)), - ("from_email", models.EmailField(max_length=254)), - ( - "from_name", - models.CharField( - help_text="The name that will appear in the From field of emails", - max_length=255, - ), - ), - ("reply_to", models.EmailField(max_length=254)), - ("created_at", models.DateTimeField(auto_now_add=True)), - ("updated_at", models.DateTimeField(auto_now=True)), - ( - "pgh_context", - models.ForeignKey( - db_constraint=False, - null=True, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="+", - to="pghistory.context", - ), - ), - ( - "pgh_obj", - models.ForeignKey( - db_constraint=False, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="events", - to="email_service.emailconfiguration", - ), - ), - ( - "site", - models.ForeignKey( - db_constraint=False, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="+", - related_query_name="+", - to="sites.site", - ), - ), - ], - options={ - "abstract": False, - }, - ), - pgtrigger.migrations.AddTrigger( - model_name="emailconfiguration", - trigger=pgtrigger.compiler.Trigger( - name="insert_insert", - sql=pgtrigger.compiler.UpsertTriggerSql( - func='INSERT INTO "email_service_emailconfigurationevent" ("api_key", "created_at", "from_email", "from_name", "id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "reply_to", "site_id", "updated_at") VALUES (NEW."api_key", NEW."created_at", NEW."from_email", NEW."from_name", NEW."id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."reply_to", NEW."site_id", NEW."updated_at"); RETURN NULL;', - hash="[AWS-SECRET-REMOVED]", - operation="INSERT", - pgid="pgtrigger_insert_insert_08c59", - table="email_service_emailconfiguration", - when="AFTER", - ), - ), - ), - pgtrigger.migrations.AddTrigger( - model_name="emailconfiguration", - trigger=pgtrigger.compiler.Trigger( - name="update_update", - sql=pgtrigger.compiler.UpsertTriggerSql( - condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)", - func='INSERT INTO "email_service_emailconfigurationevent" ("api_key", "created_at", "from_email", "from_name", "id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "reply_to", "site_id", "updated_at") VALUES (NEW."api_key", NEW."created_at", NEW."from_email", NEW."from_name", NEW."id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."reply_to", NEW."site_id", NEW."updated_at"); RETURN NULL;', - hash="[AWS-SECRET-REMOVED]", - operation="UPDATE", - pgid="pgtrigger_update_update_992a4", - table="email_service_emailconfiguration", - when="AFTER", - ), - ), - ), - ] diff --git a/email_service/migrations/__init__.py b/email_service/migrations/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/email_service/models.py b/email_service/models.py deleted file mode 100644 index f5af46f1..00000000 --- a/email_service/models.py +++ /dev/null @@ -1,25 +0,0 @@ -from django.db import models -from django.contrib.sites.models import Site -from core.history import TrackedModel -import pghistory - - -@pghistory.track() -class EmailConfiguration(TrackedModel): - api_key = models.CharField(max_length=255) - from_email = models.EmailField() - from_name = models.CharField( - max_length=255, - help_text="The name that will appear in the From field of emails", - ) - reply_to = models.EmailField() - site = models.ForeignKey(Site, on_delete=models.CASCADE) - created_at = models.DateTimeField(auto_now_add=True) - updated_at = models.DateTimeField(auto_now=True) - - def __str__(self): - return f"{self.from_name} <{self.from_email}>" - - class Meta: - verbose_name = "Email Configuration" - verbose_name_plural = "Email Configurations" diff --git a/email_service/services.py b/email_service/services.py deleted file mode 100644 index 7ab01cf5..00000000 --- a/email_service/services.py +++ /dev/null @@ -1,112 +0,0 @@ -import requests -from django.conf import settings -from django.contrib.sites.shortcuts import get_current_site -from django.core.exceptions import ImproperlyConfigured -from django.core.mail.message import sanitize_address -from .models import EmailConfiguration -import json -import base64 - - -class EmailService: - @staticmethod - def send_email( - *, - to: str, - subject: str, - text: str, - from_email: str = None, - html: str = None, - reply_to: str = None, - request=None, - site=None, - ): - # Get the site configuration - if site is None and request is not None: - site = get_current_site(request) - elif site is None: - raise ImproperlyConfigured("Either request or site must be provided") - - try: - # Fetch the email configuration for the current site - email_config = EmailConfiguration.objects.get(site=site) - api_key = email_config.api_key - - # Use provided from_email or construct from config - if not from_email: - from_email = f"{ - email_config.from_name} <{ - email_config.from_email}>" - elif "<" not in from_email: - # If from_email is provided but doesn't include a name, add the - # configured name - from_email = f"{email_config.from_name} <{from_email}>" - - # Use provided reply_to or fall back to config - if not reply_to: - reply_to = email_config.reply_to - - except EmailConfiguration.DoesNotExist: - raise ImproperlyConfigured( - f"Email configuration is missing for site: {site.domain}" - ) - - # Ensure the reply_to address is clean - reply_to = sanitize_address(reply_to, "utf-8") - - # Format data for the API - data = { - "from": from_email, # Now includes the name in format "Name " - "to": to, - "subject": subject, - "text": text, - "replyTo": reply_to, - } - - # Add HTML version if provided - if html: - data["html"] = html - - # Debug output - print("\nEmail Service Debug:") - print(f"From: {from_email}") - print(f"To: {to}") - print(f"Reply-To: {reply_to}") - print(f"API Key: {api_key}") - print(f"Site: {site.domain}") - print(f"Request URL: {settings.FORWARD_EMAIL_BASE_URL}/v1/emails") - print(f"Request Data: {json.dumps(data, indent=2)}") - - # Create Basic auth header with API key as username and empty password - auth_header = base64.b64encode(f"{api_key}:".encode()).decode() - headers = { - "Authorization": f"Basic {auth_header}", - "Accept": "application/json", - "Content-Type": "application/json", - } - - try: - response = requests.post( - f"{settings.FORWARD_EMAIL_BASE_URL}/v1/emails", - json=data, - headers=headers, - timeout=60, - ) - - # Debug output - print(f"Response Status: {response.status_code}") - print(f"Response Headers: {dict(response.headers)}") - print(f"Response Body: {response.text}") - - if response.status_code != 200: - error_message = response.text if response.text else "Unknown error" - raise Exception( - f"Failed to send email (Status { - response.status_code}): {error_message}" - ) - - return response.json() - except requests.RequestException as e: - raise Exception(f"Failed to send email: {str(e)}") - except Exception as e: - raise Exception(f"Failed to send email: {str(e)}") diff --git a/email_service/tests.py b/email_service/tests.py deleted file mode 100644 index a39b155a..00000000 --- a/email_service/tests.py +++ /dev/null @@ -1 +0,0 @@ -# Create your tests here. diff --git a/email_service/urls.py b/email_service/urls.py deleted file mode 100644 index 9479e0a9..00000000 --- a/email_service/urls.py +++ /dev/null @@ -1,6 +0,0 @@ -from django.urls import path -from .views import SendEmailView - -urlpatterns = [ - path("send-email/", SendEmailView.as_view(), name="send-email"), -] diff --git a/email_service/views.py b/email_service/views.py deleted file mode 100644 index 043bda3c..00000000 --- a/email_service/views.py +++ /dev/null @@ -1,49 +0,0 @@ -from rest_framework.views import APIView -from rest_framework.response import Response -from rest_framework import status -from rest_framework.permissions import AllowAny -from django.contrib.sites.shortcuts import get_current_site -from .services import EmailService - - -class SendEmailView(APIView): - permission_classes = [AllowAny] # Allow unauthenticated access - - def post(self, request): - data = request.data - to = data.get("to") - subject = data.get("subject") - text = data.get("text") - from_email = data.get("from_email") # Optional - - if not all([to, subject, text]): - return Response( - { - "error": "Missing required fields", - "required_fields": ["to", "subject", "text"], - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - try: - # Get the current site - site = get_current_site(request) - - # Send email using the site's configuration - response = EmailService.send_email( - to=to, - subject=subject, - text=text, - from_email=from_email, # Will use site's default if None - site=site, - ) - - return Response( - {"message": "Email sent successfully", "response": response}, - status=status.HTTP_200_OK, - ) - - except Exception as e: - return Response( - {"error": str(e)}, status=status.HTTP_500_INTERNAL_SERVER_ERROR - ) diff --git a/fresh-project-status-2025-01-05.md b/fresh-project-status-2025-01-05.md index 772bfb37..3062794b 100644 --- a/fresh-project-status-2025-01-05.md +++ b/fresh-project-status-2025-01-05.md @@ -1,200 +1,243 @@ -# Fresh Project Status - January 5, 2025 +# Fresh Project Status - August 23, 2025 -**Analysis Date:** January 5, 2025 +**Analysis Date:** August 23, 2025 **Analysis Method:** Direct observation of current project state only -**Analyst:** Roo (Fresh perspective, no prior documentation consulted) +**Analyst:** Claude (Fresh perspective, no prior documentation consulted) ## Project Overview ### Project Identity - **Name:** ThrillWiki Django (No React) - **Type:** Django web application for theme park and ride information -- **Location:** `/Volumes/macminissd/Projects/thrillwiki_django_no_react` +- **Location:** `/Users/talor/thrillwiki_django_no_react` ### Current Running State -- **Development Server:** Active on port 8000 -- **Command Used:** `lsof -ti :8000 | xargs kill -9; find . -type d -name "__pycache__" -exec rm -r {} +; uv run manage.py tailwind runserver` -- **Package Manager:** UV (Ultraviolet Python package manager) -- **CSS Framework:** Tailwind CSS integration +- **Development Server:** Uses sophisticated startup script at `./scripts/dev_server.sh` +- **Command Used:** `lsof -ti :8000 | xargs kill -9; find . -type d -name "__pycache__" -exec rm -r {} +; ./scripts/dev_server.sh` +- **Package Manager:** UV (Ultraviolet Python package manager) - pyproject.toml based +- **CSS Framework:** Tailwind CSS with CLI integration +- **Settings Module:** Auto-detecting with `config.django.local` for development -## Technical Stack Observations +## Technical Stack Analysis ### Backend Framework -- **Django:** Python web framework (primary) -- **Database:** PostgreSQL (inferred from pghistory usage) -- **History Tracking:** pghistory library for model change tracking -- **Package Management:** UV instead of pip/poetry +- **Django:** 5.1.6 (Updated from 5.0) +- **Database:** PostgreSQL with PostGIS (GeoDjango features) +- **History Tracking:** django-pghistory 3.5.2 for comprehensive model change tracking +- **Package Management:** UV with pyproject.toml (modern Python dependency management) +- **Python Version:** Requires Python >=3.13 ### Frontend Approach - **No React:** Project explicitly excludes React (per directory name) -- **Tailwind CSS:** For styling -- **HTMX/Alpine.js:** Likely used for interactivity (inferred from Django-focused approach) +- **Tailwind CSS:** Version 4.0.1 with CLI integration +- **HTMX:** Version 1.22.0 for dynamic interactions +- **Autocomplete:** django-htmx-autocomplete for search functionality -### Key Libraries Observed -- `pghistory`: PostgreSQL-based model history tracking -- `django-contenttypes`: Generic foreign keys -- Custom history tracking system with `TrackedModel` base class +### Key Libraries & Versions (Updated) +- **django-pghistory:** 3.5.2 - PostgreSQL-based model history tracking +- **djangorestframework:** 3.15.2 - API framework +- **django-cors-headers:** 4.7.0 - CORS handling +- **django-allauth:** 65.4.1 - Authentication system +- **django-htmx:** 1.22.0 - HTMX integration +- **drf-spectacular:** 0.27.0 - OpenAPI documentation +- **django-silk:** 5.0.0 - Performance profiling +- **django-debug-toolbar:** 4.0.0 - Development debugging ## Current Entity Architecture ### Core Business Entities -#### 1. Operators (`operators/`) -- **Purpose:** Companies that operate theme parks -- **Key Fields:** name, slug, description, website, founded_year, headquarters -- **Relationships:** One-to-many with Parks -- **Status:** Fully implemented with history tracking - -#### 2. Property Owners (`property_owners/`) -- **Purpose:** Companies that own park property (distinct from operators) -- **Key Fields:** name, slug, description, website -- **Relationships:** One-to-many with Parks (optional) -- **Status:** Newly implemented entity - -#### 3. Manufacturers (`manufacturers/`) -- **Purpose:** Companies that manufacture rides -- **Key Fields:** name, slug, description, website, founded_year, headquarters -- **Relationships:** One-to-many with Rides and RideModels -- **Status:** Fully implemented with ride/coaster counting - -#### 4. Parks (`parks/`) +#### 1. Parks (`parks/` app) - **Purpose:** Theme parks and amusement venues -- **Key Relationships:** - - Required: Operator (ForeignKey) - - Optional: PropertyOwner (ForeignKey) - - Contains: Rides, ParkAreas -- **Features:** Location integration, status tracking, photo support -- **Status:** Core entity with complex relationship structure +- **Models:** Park, ParkArea, ParkLocation, ParkReview, Company (aliased as Operator), CompanyHeadquarters +- **Key Features:** + - Advanced location integration with GeoDjango + - Comprehensive filtering and search + - Road trip planning integration + - Performance-optimized querysets +- **Status:** Fully mature implementation with extensive views and API endpoints -#### 5. Rides (`rides/`) +#### 2. Rides (`rides/` app) - **Purpose:** Individual ride installations at parks -- **Key Relationships:** - - Required: Park (ForeignKey) - - Optional: Manufacturer, Designer, RideModel, ParkArea -- **Features:** Detailed statistics, roller coaster specific data -- **Status:** Comprehensive implementation with specialized coaster stats +- **Models:** Ride, RideModel, RollerCoasterStats, RideLocation, RideReview, Company (aliased as Manufacturer) +- **Key Features:** + - Detailed roller coaster statistics + - Category-based organization + - Location tracking + - Review system integration +- **Status:** Comprehensive implementation with specialized coaster data -### Supporting Entities +#### 3. Company Entities (Within Apps) +- **Parks Company:** Aliased as `Operator` for park operation companies +- **Rides Company:** Aliased as `Manufacturer` for ride manufacturing companies +- **Architecture:** Uses model aliases rather than separate apps for clarity +- **Status:** Implemented within existing apps with clear semantic naming -#### 6. Designers (`designers/`) -- **Purpose:** Companies/individuals that design rides -- **Status:** Referenced but not directly observed in open files +### Supporting Systems -#### 7. RideModel (`rides/models.py`) -- **Purpose:** Specific ride types/models (e.g., "B&M Dive Coaster") -- **Relationships:** Manufacturer, multiple Rides -- **Status:** Implemented as part of rides app +#### 4. Accounts (`accounts/` app) +- **Purpose:** User management and authentication +- **Features:** Custom user model, social authentication, profile management +- **Status:** Complete with allauth integration -#### 8. Location System -- **Implementation:** Generic foreign key system -- **Purpose:** Geographic data for parks -- **Status:** Integrated with parks +#### 5. Location (`location/` app) +- **Purpose:** Geographic data and mapping services +- **Features:** GeoDjango integration, geocoding, location search +- **Status:** Integrated with parks and rides for location tracking -## Current Work Context (Based on Open Files) +#### 6. Media (`media/` app) +- **Purpose:** File and photo management +- **Features:** Organized media storage, image handling with EXIF support +- **Status:** Comprehensive media management system -### Active Development Areas -1. **Entity Relationship Migration:** Heavy focus on company-related entities -2. **Admin Interface:** Multiple admin.py files open suggesting admin customization -3. **Form Development:** Parks and rides forms being worked on -4. **Template Development:** Park detail and search result templates -5. **URL Configuration:** Operators URL patterns being developed +#### 7. Core (`core/` app) +- **Purpose:** Shared functionality, middleware, and utilities +- **Features:** Custom middleware, health checks, performance monitoring +- **Status:** Extensive core functionality with monitoring tools -### File Structure Observations +#### 8. Moderation (`moderation/` app) +- **Purpose:** Content moderation and administration +- **Features:** Moderation workflows, admin tools +- **Status:** Integrated moderation system -#### Django Apps Structure -- `accounts/` - User management -- `analytics/` - Usage tracking -- `core/` - Core functionality -- `designers/` - Ride designers -- `email_service/` - Email handling -- `history/` - History display -- `history_tracking/` - Custom history system -- `location/` - Geographic data -- `manufacturers/` - Ride manufacturers -- `media/` - File/photo management -- `moderation/` - Content moderation -- `operators/` - Park operators -- `parks/` - Theme parks -- `property_owners/` - Property ownership -- `reviews/` - User reviews -- `rides/` - Ride information -- `search/` - Search functionality +#### 9. Email Service (`email_service/` app) +- **Purpose:** Email handling and notifications +- **Features:** Custom email backends, notification system +- **Status:** Complete email service implementation -#### Static Assets -- Organized media files by park and ride -- Placeholder images system -- Tailwind CSS integration +## Current Configuration Architecture -#### Testing Infrastructure -- `tests/` directory with e2e and fixtures -- Comprehensive test structure +### Settings Structure +- **Base Settings:** `config/django/base.py` - comprehensive base configuration +- **Local Settings:** `config/django/local.py` - development-optimized settings +- **Production Settings:** `config/django/production.py` - production configuration +- **Auto-Detection:** Smart environment detection in `manage.py` -## Data Model Patterns Observed +### Development Tools Integration +- **Silk Profiler:** Advanced performance profiling with SQL query analysis +- **Debug Toolbar:** Comprehensive debugging information +- **NPlusOne Detection:** Automatic N+1 query detection and warnings +- **Performance Middleware:** Custom performance monitoring +- **Health Checks:** Multi-layered health check system -### History Tracking System -- **Base Class:** `TrackedModel` for all major entities -- **pghistory Integration:** Automatic change tracking -- **Custom Events:** Specialized event models for complex entities -- **Slug History:** Historical slug tracking for URL persistence +### Database & Cache Configuration +- **Database:** PostgreSQL with PostGIS for geographic features +- **Cache:** Redis for production, locmem for development +- **Session Storage:** Redis-backed sessions for performance +- **Query Optimization:** Extensive use of select_related and prefetch_related -### Slug Management -- **Auto-generation:** From name fields using Django's slugify -- **Historical Tracking:** Old slugs preserved for URL redirects -- **Uniqueness:** Enforced at database level +## Implementation Status Analysis -### Relationship Patterns -- **Required Relationships:** Park→Operator, Ride→Park -- **Optional Relationships:** Park→PropertyOwner, Ride→Manufacturer -- **Generic Relations:** Photos, Reviews, Location data -- **Separation of Concerns:** Distinct entities for different business roles +### Completed Features +- **Models:** Fully implemented with history tracking for all core entities +- **Admin Interface:** Comprehensive admin customization with geographic support +- **API:** Complete REST API with OpenAPI documentation +- **Templates:** Sophisticated template system with HTMX integration +- **Search:** Advanced search with autocomplete and filtering +- **Location Services:** Full GeoDjango integration with mapping +- **Authentication:** Complete user management with social auth +- **Performance:** Advanced monitoring and optimization tools + +### Architecture Patterns +- **Service Layer:** Comprehensive service classes for business logic +- **Manager/QuerySet Pattern:** Optimized database queries with custom managers +- **Selector Pattern:** Clean separation of data access logic +- **History Tracking:** Automatic change auditing for all major entities +- **Slug Management:** Intelligent URL-friendly identifiers with history + +### Advanced Features +- **Road Trip Planning:** Sophisticated route planning and optimization +- **Performance Monitoring:** Real-time performance tracking and alerting +- **Health Checks:** Multi-tier health monitoring system +- **API Documentation:** Auto-generated OpenAPI 3.0 documentation +- **Geographic Search:** Advanced location-based search and filtering + +## Development Workflow & Tooling + +### Modern Development Setup +- **UV Package Management:** Fast, modern Python dependency management +- **Auto-detecting Settings:** Intelligent environment detection +- **Development Server Script:** Comprehensive startup automation with: + - Port cleanup and cache clearing + - Database migration checks + - Static file collection + - Tailwind CSS building + - System health checks + - Auto superuser creation + +### Code Quality Tools +- **Black:** Code formatting (version 25.1.0) +- **Flake8:** Linting (version 7.1.1) +- **Pytest:** Testing framework with Django integration +- **Coverage:** Code coverage analysis +- **Type Hints:** Enhanced type checking with stubs + +### Performance & Monitoring +- **Silk Integration:** SQL query profiling and performance analysis +- **Debug Toolbar:** Development debugging with comprehensive panels +- **Custom Middleware:** Performance tracking and query optimization +- **Health Checks:** Database, cache, storage, and custom application checks ## Current Development State -### Implementation Status -- **Models:** Fully implemented for core entities -- **Admin:** In active development -- **Forms:** Being developed for parks and rides -- **Templates:** Basic structure in place -- **URLs:** Routing being configured +### Project Maturity +- **Architecture:** Highly sophisticated with clear separation of concerns +- **Performance:** Production-ready with extensive optimization +- **Testing:** Comprehensive test infrastructure +- **Documentation:** Auto-generated API docs and extensive inline documentation +- **Monitoring:** Enterprise-grade health and performance monitoring -### Technical Debt Observations -- Complex history tracking system suggests ongoing migration -- Multiple similar entity types (operators, property_owners, manufacturers) indicate recent refactoring -- Extensive use of nullable foreign keys suggests data migration challenges +### Technical Sophistication +- **Query Optimization:** Extensive use of select_related, prefetch_related, and custom querysets +- **Caching Strategy:** Multi-tier caching with Redis integration +- **Geographic Features:** Full PostGIS integration for spatial queries +- **API Design:** RESTful APIs with comprehensive documentation +- **Security:** Production-ready security configuration -### Development Workflow -- **UV Package Manager:** Modern Python dependency management -- **Tailwind Integration:** CSS framework properly integrated -- **Development Server:** Sophisticated startup script with cleanup -- **Database:** PostgreSQL with advanced history tracking +### Data Architecture Quality +- **History Tracking:** Comprehensive audit trails for all changes +- **Relationship Integrity:** Well-designed foreign key relationships +- **Performance Optimization:** Database-level optimizations and indexing +- **Geographic Integration:** Sophisticated location-based features +- **Search Capabilities:** Advanced full-text search and filtering -## Next Steps Inference (Based on Current State) +## Infrastructure & Deployment -### Immediate Priorities -1. Complete admin interface development -2. Finalize form implementations -3. Template development for entity detail pages -4. URL pattern completion +### Environment Configuration +- **Environment Variables:** Comprehensive environment-based configuration +- **Settings Modules:** Multiple environment-specific settings +- **Security Configuration:** Production-ready security settings +- **CORS Configuration:** Proper API access configuration -### Technical Priorities -1. Data migration completion (company→specific entity types) -2. History tracking system optimization -3. Search functionality enhancement -4. Media management system completion +### Media & Static Files +- **Static Files:** Whitenoise integration for static file serving +- **Media Management:** Organized media storage with automatic cleanup +- **Image Processing:** EXIF metadata handling and image optimization ## Architecture Quality Assessment -### Strengths -- **Separation of Concerns:** Clear entity boundaries -- **History Tracking:** Comprehensive change auditing -- **Flexibility:** Generic relations for extensibility -- **Modern Tooling:** UV, Tailwind, pghistory +### Major Strengths +- **Production Readiness:** Enterprise-grade architecture with comprehensive monitoring +- **Performance Optimization:** Sophisticated query optimization and caching strategies +- **Developer Experience:** Excellent development tooling and automation +- **Geographic Features:** Advanced PostGIS integration for location-based features +- **API Design:** Well-documented RESTful APIs with OpenAPI integration +- **History Tracking:** Comprehensive audit capabilities +- **Modern Tooling:** UV package management, Tailwind CSS, HTMX integration -### Areas for Attention -- **Complexity:** Multiple similar entities may confuse users -- **Migration State:** Appears to be mid-migration from simpler structure -- **Performance:** History tracking overhead needs monitoring +### Technical Excellence +- **Code Quality:** High-quality codebase with comprehensive testing +- **Architecture Patterns:** Clean implementation of Django best practices +- **Database Design:** Well-normalized schema with proper relationships +- **Security:** Production-ready security configuration +- **Monitoring:** Comprehensive health and performance monitoring + +### Current Focus Areas +- **Continued Optimization:** Performance monitoring and query optimization +- **Feature Enhancement:** Ongoing development of advanced features +- **Geographic Expansion:** Enhanced location-based functionality +- **API Evolution:** Continued API development and documentation --- -**Note:** This analysis is based solely on direct observation of the current project state without consulting any existing documentation or memory bank files. \ No newline at end of file +**Note:** This analysis reflects the project state as of August 23, 2025, showing a significantly matured Django application with enterprise-grade architecture, comprehensive tooling, and production-ready features. The project has evolved from the early development stage described in January 2025 to a sophisticated, well-architected web application. \ No newline at end of file diff --git a/location/admin.py b/location/admin.py deleted file mode 100644 index 8ea113ce..00000000 --- a/location/admin.py +++ /dev/null @@ -1,67 +0,0 @@ -from django.contrib import admin -from .models import Location - -# DEPRECATED: This admin interface is deprecated. -# Location data has been migrated to domain-specific models: -# - ParkLocation in parks.models.location -# - RideLocation in rides.models.location -# - CompanyHeadquarters in parks.models.companies -# -# This admin interface is kept for data migration and cleanup purposes only. - - -@admin.register(Location) -class LocationAdmin(admin.ModelAdmin): - list_display = ( - "name", - "location_type", - "city", - "state", - "country", - "created_at", - ) - list_filter = ("location_type", "country", "state", "city") - search_fields = ("name", "street_address", "city", "state", "country") - readonly_fields = ("created_at", "updated_at", "content_type", "object_id") - - fieldsets = ( - ( - "⚠️ DEPRECATED MODEL", - { - "description": "This model is deprecated. Use domain-specific location models instead.", - "fields": (), - }, - ), - ("Basic Information", {"fields": ("name", "location_type")}), - ("Geographic Coordinates", {"fields": ("latitude", "longitude")}), - ( - "Address", - { - "fields": ( - "street_address", - "city", - "state", - "country", - "postal_code", - ) - }, - ), - ( - "Content Type (Read Only)", - { - "fields": ("content_type", "object_id"), - "classes": ("collapse",), - }, - ), - ( - "Metadata", - {"fields": ("created_at", "updated_at"), "classes": ("collapse",)}, - ), - ) - - def get_queryset(self, request): - return super().get_queryset(request).select_related("content_type") - - def has_add_permission(self, request): - # Prevent creating new generic Location objects - return False diff --git a/location/apps.py b/location/apps.py deleted file mode 100644 index f690cc0f..00000000 --- a/location/apps.py +++ /dev/null @@ -1,8 +0,0 @@ -from django.apps import AppConfig -import os - - -class LocationConfig(AppConfig): - path = os.path.dirname(os.path.abspath(__file__)) - default_auto_field = "django.db.models.BigAutoField" - name = "location" diff --git a/location/forms.py b/location/forms.py deleted file mode 100644 index 9022b5ec..00000000 --- a/location/forms.py +++ /dev/null @@ -1,42 +0,0 @@ -# DEPRECATED: These forms are deprecated and no longer used. -# -# Domain-specific location models now have their own forms: -# - ParkLocationForm in parks.forms (for ParkLocation) -# - RideLocationForm in rides.forms (for RideLocation) -# - CompanyHeadquartersForm in parks.forms (for CompanyHeadquarters) -# -# This file is kept for reference during migration cleanup only. - -from django import forms -from .models import Location - -# NOTE: All classes below are DEPRECATED -# Use domain-specific location forms instead - - -class LocationForm(forms.ModelForm): - """DEPRECATED: Use domain-specific location forms instead""" - - class Meta: - model = Location - fields = [ - "name", - "location_type", - "latitude", - "longitude", - "street_address", - "city", - "state", - "country", - "postal_code", - ] - - -class LocationSearchForm(forms.Form): - """DEPRECATED: Location search functionality has been moved to parks app""" - - query = forms.CharField( - max_length=255, - required=True, - help_text="This form is deprecated. Use location search in the parks app.", - ) diff --git a/location/migrations/0001_initial.py b/location/migrations/0001_initial.py deleted file mode 100644 index f0fb1ce3..00000000 --- a/location/migrations/0001_initial.py +++ /dev/null @@ -1,293 +0,0 @@ -# Generated by Django 5.1.4 on 2025-08-13 21:35 - -import django.contrib.gis.db.models.fields -import django.core.validators -import django.db.models.deletion -import pgtrigger.compiler -import pgtrigger.migrations -from django.db import migrations, models - - -class Migration(migrations.Migration): - - initial = True - - dependencies = [ - ("contenttypes", "0002_remove_content_type_name"), - ("pghistory", "0006_delete_aggregateevent"), - ] - - operations = [ - migrations.CreateModel( - name="Location", - fields=[ - ( - "id", - models.BigAutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ("object_id", models.PositiveIntegerField()), - ( - "name", - models.CharField( - help_text="Name of the location (e.g. business name, landmark)", - max_length=255, - ), - ), - ( - "location_type", - models.CharField( - help_text="Type of location (e.g. business, landmark, address)", - max_length=50, - ), - ), - ( - "latitude", - models.DecimalField( - blank=True, - decimal_places=6, - help_text="Latitude coordinate (legacy field)", - max_digits=9, - null=True, - validators=[ - django.core.validators.MinValueValidator(-90), - django.core.validators.MaxValueValidator(90), - ], - ), - ), - ( - "longitude", - models.DecimalField( - blank=True, - decimal_places=6, - help_text="Longitude coordinate (legacy field)", - max_digits=9, - null=True, - validators=[ - django.core.validators.MinValueValidator(-180), - django.core.validators.MaxValueValidator(180), - ], - ), - ), - ( - "point", - django.contrib.gis.db.models.fields.PointField( - blank=True, - help_text="Geographic coordinates as a Point", - null=True, - srid=4326, - ), - ), - ( - "street_address", - models.CharField(blank=True, max_length=255, null=True), - ), - ( - "city", - models.CharField(blank=True, max_length=100, null=True), - ), - ( - "state", - models.CharField( - blank=True, - help_text="State/Region/Province", - max_length=100, - null=True, - ), - ), - ( - "country", - models.CharField(blank=True, max_length=100, null=True), - ), - ( - "postal_code", - models.CharField(blank=True, max_length=20, null=True), - ), - ("created_at", models.DateTimeField(auto_now_add=True)), - ("updated_at", models.DateTimeField(auto_now=True)), - ( - "content_type", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - to="contenttypes.contenttype", - ), - ), - ], - options={ - "ordering": ["name"], - }, - ), - migrations.CreateModel( - name="LocationEvent", - fields=[ - ( - "pgh_id", - models.AutoField(primary_key=True, serialize=False), - ), - ("pgh_created_at", models.DateTimeField(auto_now_add=True)), - ("pgh_label", models.TextField(help_text="The event label.")), - ("id", models.BigIntegerField()), - ("object_id", models.PositiveIntegerField()), - ( - "name", - models.CharField( - help_text="Name of the location (e.g. business name, landmark)", - max_length=255, - ), - ), - ( - "location_type", - models.CharField( - help_text="Type of location (e.g. business, landmark, address)", - max_length=50, - ), - ), - ( - "latitude", - models.DecimalField( - blank=True, - decimal_places=6, - help_text="Latitude coordinate (legacy field)", - max_digits=9, - null=True, - validators=[ - django.core.validators.MinValueValidator(-90), - django.core.validators.MaxValueValidator(90), - ], - ), - ), - ( - "longitude", - models.DecimalField( - blank=True, - decimal_places=6, - help_text="Longitude coordinate (legacy field)", - max_digits=9, - null=True, - validators=[ - django.core.validators.MinValueValidator(-180), - django.core.validators.MaxValueValidator(180), - ], - ), - ), - ( - "point", - django.contrib.gis.db.models.fields.PointField( - blank=True, - help_text="Geographic coordinates as a Point", - null=True, - srid=4326, - ), - ), - ( - "street_address", - models.CharField(blank=True, max_length=255, null=True), - ), - ( - "city", - models.CharField(blank=True, max_length=100, null=True), - ), - ( - "state", - models.CharField( - blank=True, - help_text="State/Region/Province", - max_length=100, - null=True, - ), - ), - ( - "country", - models.CharField(blank=True, max_length=100, null=True), - ), - ( - "postal_code", - models.CharField(blank=True, max_length=20, null=True), - ), - ("created_at", models.DateTimeField(auto_now_add=True)), - ("updated_at", models.DateTimeField(auto_now=True)), - ( - "content_type", - models.ForeignKey( - db_constraint=False, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="+", - related_query_name="+", - to="contenttypes.contenttype", - ), - ), - ( - "pgh_context", - models.ForeignKey( - db_constraint=False, - null=True, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="+", - to="pghistory.context", - ), - ), - ( - "pgh_obj", - models.ForeignKey( - db_constraint=False, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="events", - to="location.location", - ), - ), - ], - options={ - "abstract": False, - }, - ), - migrations.AddIndex( - model_name="location", - index=models.Index( - fields=["content_type", "object_id"], - name="location_lo_content_9ee1bd_idx", - ), - ), - migrations.AddIndex( - model_name="location", - index=models.Index(fields=["city"], name="location_lo_city_99f908_idx"), - ), - migrations.AddIndex( - model_name="location", - index=models.Index( - fields=["country"], name="location_lo_country_b75eba_idx" - ), - ), - pgtrigger.migrations.AddTrigger( - model_name="location", - trigger=pgtrigger.compiler.Trigger( - name="insert_insert", - sql=pgtrigger.compiler.UpsertTriggerSql( - func='INSERT INTO "location_locationevent" ("city", "content_type_id", "country", "created_at", "id", "latitude", "location_type", "longitude", "name", "object_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "point", "postal_code", "state", "street_address", "updated_at") VALUES (NEW."city", NEW."content_type_id", NEW."country", NEW."created_at", NEW."id", NEW."latitude", NEW."location_type", NEW."longitude", NEW."name", NEW."object_id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."point", NEW."postal_code", NEW."state", NEW."street_address", NEW."updated_at"); RETURN NULL;', - hash="[AWS-SECRET-REMOVED]", - operation="INSERT", - pgid="pgtrigger_insert_insert_98cd4", - table="location_location", - when="AFTER", - ), - ), - ), - pgtrigger.migrations.AddTrigger( - model_name="location", - trigger=pgtrigger.compiler.Trigger( - name="update_update", - sql=pgtrigger.compiler.UpsertTriggerSql( - condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)", - func='INSERT INTO "location_locationevent" ("city", "content_type_id", "country", "created_at", "id", "latitude", "location_type", "longitude", "name", "object_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "point", "postal_code", "state", "street_address", "updated_at") VALUES (NEW."city", NEW."content_type_id", NEW."country", NEW."created_at", NEW."id", NEW."latitude", NEW."location_type", NEW."longitude", NEW."name", NEW."object_id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."point", NEW."postal_code", NEW."state", NEW."street_address", NEW."updated_at"); RETURN NULL;', - hash="[AWS-SECRET-REMOVED]", - operation="UPDATE", - pgid="pgtrigger_update_update_471d2", - table="location_location", - when="AFTER", - ), - ), - ), - ] diff --git a/location/migrations/0002_add_business_constraints.py b/location/migrations/0002_add_business_constraints.py deleted file mode 100644 index db886de6..00000000 --- a/location/migrations/0002_add_business_constraints.py +++ /dev/null @@ -1,53 +0,0 @@ -# Generated by Django 5.2.5 on 2025-08-16 17:42 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("contenttypes", "0002_remove_content_type_name"), - ("location", "0001_initial"), - ] - - operations = [ - migrations.AddConstraint( - model_name="location", - constraint=models.CheckConstraint( - condition=models.Q( - ("latitude__isnull", True), - models.Q(("latitude__gte", -90), ("latitude__lte", 90)), - _connector="OR", - ), - name="location_latitude_range", - violation_error_message="Latitude must be between -90 and 90 degrees", - ), - ), - migrations.AddConstraint( - model_name="location", - constraint=models.CheckConstraint( - condition=models.Q( - ("longitude__isnull", True), - models.Q(("longitude__gte", -180), ("longitude__lte", 180)), - _connector="OR", - ), - name="location_longitude_range", - violation_error_message="Longitude must be between -180 and 180 degrees", - ), - ), - migrations.AddConstraint( - model_name="location", - constraint=models.CheckConstraint( - condition=models.Q( - models.Q(("latitude__isnull", True), ("longitude__isnull", True)), - models.Q( - ("latitude__isnull", False), - ("longitude__isnull", False), - ), - _connector="OR", - ), - name="location_coordinates_complete", - violation_error_message="Both latitude and longitude must be provided together", - ), - ), - ] diff --git a/location/migrations/__init__.py b/location/migrations/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/location/models.py b/location/models.py deleted file mode 100644 index 00a0d68e..00000000 --- a/location/models.py +++ /dev/null @@ -1,175 +0,0 @@ -from django.contrib.gis.db import models as gis_models -from django.db import models -from django.contrib.contenttypes.fields import GenericForeignKey -from django.contrib.contenttypes.models import ContentType -from django.core.validators import MinValueValidator, MaxValueValidator -from django.contrib.gis.geos import Point -import pghistory -from core.history import TrackedModel - - -@pghistory.track() -class Location(TrackedModel): - """ - A generic location model that can be associated with any model - using GenericForeignKey. Stores detailed location information - including coordinates and address components. - """ - - # Generic relation fields - content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE) - object_id = models.PositiveIntegerField() - content_object = GenericForeignKey("content_type", "object_id") - - # Location name and type - name = models.CharField( - max_length=255, - help_text="Name of the location (e.g. business name, landmark)", - ) - location_type = models.CharField( - max_length=50, - help_text="Type of location (e.g. business, landmark, address)", - ) - - # Geographic coordinates - latitude = models.DecimalField( - max_digits=9, - decimal_places=6, - validators=[MinValueValidator(-90), MaxValueValidator(90)], - help_text="Latitude coordinate (legacy field)", - null=True, - blank=True, - ) - longitude = models.DecimalField( - max_digits=9, - decimal_places=6, - validators=[MinValueValidator(-180), MaxValueValidator(180)], - help_text="Longitude coordinate (legacy field)", - null=True, - blank=True, - ) - - # GeoDjango point field - point = gis_models.PointField( - srid=4326, # WGS84 coordinate system - null=True, - blank=True, - help_text="Geographic coordinates as a Point", - ) - - # Address components - street_address = models.CharField(max_length=255, blank=True, null=True) - city = models.CharField(max_length=100, blank=True, null=True) - state = models.CharField( - max_length=100, - blank=True, - null=True, - help_text="State/Region/Province", - ) - country = models.CharField(max_length=100, blank=True, null=True) - postal_code = models.CharField(max_length=20, blank=True, null=True) - - # Metadata - created_at = models.DateTimeField(auto_now_add=True) - updated_at = models.DateTimeField(auto_now=True) - - class Meta: - indexes = [ - models.Index(fields=["content_type", "object_id"]), - models.Index(fields=["city"]), - models.Index(fields=["country"]), - ] - ordering = ["name"] - constraints = [ - # Business rule: Latitude must be within valid range (-90 to 90) - models.CheckConstraint( - name="location_latitude_range", - check=models.Q(latitude__isnull=True) - | (models.Q(latitude__gte=-90) & models.Q(latitude__lte=90)), - violation_error_message="Latitude must be between -90 and 90 degrees", - ), - # Business rule: Longitude must be within valid range (-180 to 180) - models.CheckConstraint( - name="location_longitude_range", - check=models.Q(longitude__isnull=True) - | (models.Q(longitude__gte=-180) & models.Q(longitude__lte=180)), - violation_error_message="Longitude must be between -180 and 180 degrees", - ), - # Business rule: If coordinates are provided, both lat and lng must - # be present - models.CheckConstraint( - name="location_coordinates_complete", - check=models.Q(latitude__isnull=True, longitude__isnull=True) - | models.Q(latitude__isnull=False, longitude__isnull=False), - violation_error_message="Both latitude and longitude must be provided together", - ), - ] - - def __str__(self): - location_parts = [] - if self.city: - location_parts.append(self.city) - if self.country: - location_parts.append(self.country) - location_str = ( - ", ".join(location_parts) if location_parts else "Unknown location" - ) - return f"{self.name} ({location_str})" - - def save(self, *args, **kwargs): - # Sync point field with lat/lon fields for backward compatibility - if self.latitude is not None and self.longitude is not None and not self.point: - self.point = Point(float(self.longitude), float(self.latitude)) - elif self.point and (self.latitude is None or self.longitude is None): - self.longitude = self.point.x - self.latitude = self.point.y - super().save(*args, **kwargs) - - def get_formatted_address(self): - """Returns a formatted address string""" - components = [] - if self.street_address: - components.append(self.street_address) - if self.city: - components.append(self.city) - if self.state: - components.append(self.state) - if self.postal_code: - components.append(self.postal_code) - if self.country: - components.append(self.country) - return ", ".join(components) if components else "" - - @property - def coordinates(self): - """Returns coordinates as a tuple""" - if self.point: - # Returns (latitude, longitude) - return (self.point.y, self.point.x) - elif self.latitude is not None and self.longitude is not None: - return (float(self.latitude), float(self.longitude)) - return None - - def distance_to(self, other_location): - """ - Calculate the distance to another location in meters. - Returns None if either location is missing coordinates. - """ - if not self.point or not other_location.point: - return None - return self.point.distance(other_location.point) * 100000 # Convert to meters - - def nearby_locations(self, distance_km=10): - """ - Find locations within specified distance in kilometers. - Returns a queryset of nearby Location objects. - """ - if not self.point: - return Location.objects.none() - - return Location.objects.filter( - point__distance_lte=( - self.point, - distance_km * 1000, - ) # Convert km to meters - ).exclude(pk=self.pk) diff --git a/location/tests.py b/location/tests.py deleted file mode 100644 index 3ceca0dc..00000000 --- a/location/tests.py +++ /dev/null @@ -1,181 +0,0 @@ -from django.test import TestCase -from django.contrib.contenttypes.models import ContentType -from django.contrib.gis.geos import Point -from .models import Location -from parks.models import Park, Company as Operator - - -class LocationModelTests(TestCase): - def setUp(self): - # Create test company - self.operator = Operator.objects.create( - name="Test Operator", website="http://example.com" - ) - - # Create test park - self.park = Park.objects.create( - name="Test Park", owner=self.operator, status="OPERATING" - ) - - # Create test location for company - self.operator_location = Location.objects.create( - content_type=ContentType.objects.get_for_model(Operator), - object_id=self.operator.pk, - name="Test Operator HQ", - location_type="business", - street_address="123 Operator St", - city="Operator City", - state="CS", - country="Test Country", - postal_code="12345", - point=Point(-118.2437, 34.0522), # Los Angeles coordinates - ) - - # Create test location for park - self.park_location = Location.objects.create( - content_type=ContentType.objects.get_for_model(Park), - object_id=self.park.pk, - name="Test Park Location", - location_type="park", - street_address="456 Park Ave", - city="Park City", - state="PC", - country="Test Country", - postal_code="67890", - point=Point(-111.8910, 40.7608), # Park City coordinates - ) - - def test_location_creation(self): - """Test location instance creation and field values""" - # Test company location - self.assertEqual(self.operator_location.name, "Test Operator HQ") - self.assertEqual(self.operator_location.location_type, "business") - self.assertEqual(self.operator_location.street_address, "123 Operator St") - self.assertEqual(self.operator_location.city, "Operator City") - self.assertEqual(self.operator_location.state, "CS") - self.assertEqual(self.operator_location.country, "Test Country") - self.assertEqual(self.operator_location.postal_code, "12345") - self.assertIsNotNone(self.operator_location.point) - - # Test park location - self.assertEqual(self.park_location.name, "Test Park Location") - self.assertEqual(self.park_location.location_type, "park") - self.assertEqual(self.park_location.street_address, "456 Park Ave") - self.assertEqual(self.park_location.city, "Park City") - self.assertEqual(self.park_location.state, "PC") - self.assertEqual(self.park_location.country, "Test Country") - self.assertEqual(self.park_location.postal_code, "67890") - self.assertIsNotNone(self.park_location.point) - - def test_location_str_representation(self): - """Test string representation of location""" - expected_company_str = "Test Operator HQ (Operator City, Test Country)" - self.assertEqual(str(self.operator_location), expected_company_str) - - expected_park_str = "Test Park Location (Park City, Test Country)" - self.assertEqual(str(self.park_location), expected_park_str) - - def test_get_formatted_address(self): - """Test get_formatted_address method""" - expected_address = "123 Operator St, Operator City, CS, 12345, Test Country" - self.assertEqual( - self.operator_location.get_formatted_address(), expected_address - ) - - def test_point_coordinates(self): - """Test point coordinates""" - # Test company location point - self.assertIsNotNone(self.operator_location.point) - self.assertAlmostEqual( - self.operator_location.point.y, 34.0522, places=4 - ) # latitude - self.assertAlmostEqual( - self.operator_location.point.x, -118.2437, places=4 - ) # longitude - - # Test park location point - self.assertIsNotNone(self.park_location.point) - self.assertAlmostEqual( - self.park_location.point.y, 40.7608, places=4 - ) # latitude - self.assertAlmostEqual( - self.park_location.point.x, -111.8910, places=4 - ) # longitude - - def test_coordinates_property(self): - """Test coordinates property""" - company_coords = self.operator_location.coordinates - self.assertIsNotNone(company_coords) - self.assertAlmostEqual(company_coords[0], 34.0522, places=4) # latitude - self.assertAlmostEqual(company_coords[1], -118.2437, places=4) # longitude - - park_coords = self.park_location.coordinates - self.assertIsNotNone(park_coords) - self.assertAlmostEqual(park_coords[0], 40.7608, places=4) # latitude - self.assertAlmostEqual(park_coords[1], -111.8910, places=4) # longitude - - def test_distance_calculation(self): - """Test distance_to method""" - distance = self.operator_location.distance_to(self.park_location) - self.assertIsNotNone(distance) - self.assertGreater(distance, 0) - - def test_nearby_locations(self): - """Test nearby_locations method""" - # Create another location near the company location - nearby_location = Location.objects.create( - content_type=ContentType.objects.get_for_model(Operator), - object_id=self.operator.pk, - name="Nearby Location", - location_type="business", - street_address="789 Nearby St", - city="Operator City", - country="Test Country", - point=Point(-118.2438, 34.0523), # Very close to company location - ) - - nearby = self.operator_location.nearby_locations(distance_km=1) - self.assertEqual(nearby.count(), 1) - self.assertEqual(nearby.first(), nearby_location) - - def test_content_type_relations(self): - """Test generic relations work correctly""" - # Test company location relation - company_location = Location.objects.get( - content_type=ContentType.objects.get_for_model(Operator), - object_id=self.operator.pk, - ) - self.assertEqual(company_location, self.operator_location) - - # Test park location relation - park_location = Location.objects.get( - content_type=ContentType.objects.get_for_model(Park), - object_id=self.park.pk, - ) - self.assertEqual(park_location, self.park_location) - - def test_location_updates(self): - """Test location updates""" - # Update company location - self.operator_location.street_address = "Updated Address" - self.operator_location.city = "Updated City" - self.operator_location.save() - - updated_location = Location.objects.get(pk=self.operator_location.pk) - self.assertEqual(updated_location.street_address, "Updated Address") - self.assertEqual(updated_location.city, "Updated City") - - def test_point_sync_with_lat_lon(self): - """Test point synchronization with latitude/longitude fields""" - location = Location.objects.create( - content_type=ContentType.objects.get_for_model(Operator), - object_id=self.operator.pk, - name="Test Sync Location", - location_type="business", - latitude=34.0522, - longitude=-118.2437, - ) - - self.assertIsNotNone(location.point) - self.assertAlmostEqual(location.point.y, 34.0522, places=4) - self.assertAlmostEqual(location.point.x, -118.2437, places=4) diff --git a/location/urls.py b/location/urls.py deleted file mode 100644 index c96bf7f4..00000000 --- a/location/urls.py +++ /dev/null @@ -1,31 +0,0 @@ -# DEPRECATED: These URLs are deprecated and no longer used. -# -# Location search functionality has been moved to the parks app: -# - /parks/search/location/ (replaces /location/search/) -# - /parks/search/reverse-geocode/ (replaces /location/reverse-geocode/) -# -# Domain-specific location models are managed through their respective apps: -# - Parks app for ParkLocation -# - Rides app for RideLocation -# - Parks app for CompanyHeadquarters -# -# This file is kept for reference during migration cleanup only. - -from django.urls import path -from . import views - -app_name = "location" - -# NOTE: All URLs below are DEPRECATED -# The location app URLs should not be included in the main URLconf - -urlpatterns = [ - # DEPRECATED: Use /parks/search/location/ instead - path("search/", views.LocationSearchView.as_view(), name="search"), - # DEPRECATED: Use /parks/search/reverse-geocode/ instead - path("reverse-geocode/", views.reverse_geocode, name="reverse_geocode"), - # DEPRECATED: Use domain-specific location models instead - path("create/", views.LocationCreateView.as_view(), name="create"), - path("/update/", views.LocationUpdateView.as_view(), name="update"), - path("/delete/", views.LocationDeleteView.as_view(), name="delete"), -] diff --git a/location/views.py b/location/views.py deleted file mode 100644 index ef9d67f1..00000000 --- a/location/views.py +++ /dev/null @@ -1,48 +0,0 @@ -# DEPRECATED: These views are deprecated and no longer used. -# -# Location search functionality has been moved to the parks app: -# - parks.views.location_search -# - parks.views.reverse_geocode -# -# Domain-specific location models are now used instead of the generic Location model: -# - ParkLocation in parks.models.location -# - RideLocation in rides.models.location -# - CompanyHeadquarters in parks.models.companies -# -# This file is kept for reference during migration cleanup only. - -from django.views.generic import View -from django.http import JsonResponse -from django.contrib.auth.mixins import LoginRequiredMixin -from django.views.decorators.http import require_http_methods - - -# NOTE: All classes and functions below are DEPRECATED -# Use the equivalent functionality in the parks app instead - - -class LocationSearchView(View): - """DEPRECATED: Use parks.views.location_search instead""" - - -class LocationCreateView(LoginRequiredMixin, View): - """DEPRECATED: Use domain-specific location models instead""" - - -class LocationUpdateView(LoginRequiredMixin, View): - """DEPRECATED: Use domain-specific location models instead""" - - -class LocationDeleteView(LoginRequiredMixin, View): - """DEPRECATED: Use domain-specific location models instead""" - - -@require_http_methods(["GET"]) -def reverse_geocode(request): - """DEPRECATED: Use parks.views.reverse_geocode instead""" - return JsonResponse( - { - "error": "This endpoint is deprecated. Use /parks/search/reverse-geocode/ instead" - }, - status=410, - ) diff --git a/manage.py b/manage.py deleted file mode 100755 index 2bab4edc..00000000 --- a/manage.py +++ /dev/null @@ -1,27 +0,0 @@ -#!/usr/bin/env python -"""Django's command-line utility for administrative tasks.""" -import os -import sys - - -def main(): - """Run administrative tasks.""" - if "test" in sys.argv and "accounts" in sys.argv: - os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.django.test_accounts") - elif "test" in sys.argv: - os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.django.test") - else: - os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.django.local") - try: - from django.core.management import execute_from_command_line - except ImportError as exc: - raise ImportError( - "Couldn't import Django. Are you sure it's installed and " - "available on your PYTHONPATH environment variable? Did you " - "forget to activate a virtual environment?" - ) from exc - execute_from_command_line(sys.argv) - - -if __name__ == "__main__": - main() diff --git a/media/__init__.py b/media/__init__.py deleted file mode 100644 index a4028f98..00000000 --- a/media/__init__.py +++ /dev/null @@ -1 +0,0 @@ -default_app_config = "media.apps.MediaConfig" diff --git a/media/admin.py b/media/admin.py deleted file mode 100644 index 1258388a..00000000 --- a/media/admin.py +++ /dev/null @@ -1,28 +0,0 @@ -from django.contrib import admin -from django.utils.html import format_html -from .models import Photo - - -@admin.register(Photo) -class PhotoAdmin(admin.ModelAdmin): - list_display = ( - "thumbnail_preview", - "content_type", - "content_object", - "caption", - "is_primary", - "created_at", - ) - list_filter = ("content_type", "is_primary", "created_at") - search_fields = ("caption", "alt_text") - readonly_fields = ("thumbnail_preview",) - - def thumbnail_preview(self, obj): - if obj.image: - return format_html( - '', - obj.image.url, - ) - return "No image" - - thumbnail_preview.short_description = "Thumbnail" diff --git a/media/apps.py b/media/apps.py deleted file mode 100644 index 320e6786..00000000 --- a/media/apps.py +++ /dev/null @@ -1,34 +0,0 @@ -from django.apps import AppConfig -from django.db.models.signals import post_migrate - - -def create_photo_permissions(sender, **kwargs): - """Create custom permissions for photos""" - from django.contrib.auth.models import Permission - from django.contrib.contenttypes.models import ContentType - from media.models import Photo - - content_type = ContentType.objects.get_for_model(Photo) - Permission.objects.get_or_create( - codename="add_photo", - name="Can add photo", - content_type=content_type, - ) - Permission.objects.get_or_create( - codename="change_photo", - name="Can change photo", - content_type=content_type, - ) - Permission.objects.get_or_create( - codename="delete_photo", - name="Can delete photo", - content_type=content_type, - ) - - -class MediaConfig(AppConfig): - default_auto_field = "django.db.models.BigAutoField" - name = "media" - - def ready(self): - post_migrate.connect(create_photo_permissions, sender=self) diff --git a/media/avatars/loopy.png b/media/avatars/loopy.png deleted file mode 100644 index fbcebfae..00000000 Binary files a/media/avatars/loopy.png and /dev/null differ diff --git a/media/management/commands/download_photos.py b/media/management/commands/download_photos.py deleted file mode 100644 index 4c24fd38..00000000 --- a/media/management/commands/download_photos.py +++ /dev/null @@ -1,139 +0,0 @@ -import requests -from django.core.management.base import BaseCommand -from media.models import Photo -from parks.models import Park -from rides.models import Ride -from django.contrib.contenttypes.models import ContentType -import json -from django.core.files.base import ContentFile - - -class Command(BaseCommand): - help = "Download photos from seed data URLs" - - def handle(self, *args, **kwargs): - self.stdout.write("Downloading photos from seed data...") - - # Read seed data - with open("parks/management/commands/seed_data.json", "r") as f: - seed_data = json.load(f) - - park_content_type = ContentType.objects.get_for_model(Park) - ride_content_type = ContentType.objects.get_for_model(Ride) - - # Process parks and their photos - for park_data in seed_data["parks"]: - try: - park = Park.objects.get(name=park_data["name"]) - - # Download park photos - for idx, photo_url in enumerate(park_data["photos"], 1): - try: - # Download image - self.stdout.write(f"Downloading from URL: {photo_url}") - response = requests.get(photo_url, timeout=60) - if response.status_code == 200: - # Delete any existing photos for this park - Photo.objects.filter( - content_type=park_content_type, - object_id=park.id, - ).delete() - - # Create new photo record - photo = Photo( - content_type=park_content_type, - object_id=park.id, - is_primary=idx == 1, - ) - - # Save image content - photo.image.save( - f"{park.slug}_{idx}.jpg", - ContentFile(response.content), - save=False, - ) - photo.save() - - self.stdout.write( - f"Downloaded photo for { - park.name}: { - photo.image.name}" - ) - self.stdout.write( - f"Database record created with ID: {photo.id}" - ) - else: - self.stdout.write( - f"Error downloading image. Status code: { - response.status_code}" - ) - - except Exception as e: - self.stdout.write( - f"Error downloading park photo: { - str(e)}" - ) - - # Process rides and their photos - for ride_data in park_data["rides"]: - try: - ride = Ride.objects.get(name=ride_data["name"], park=park) - - # Download ride photos - for idx, photo_url in enumerate(ride_data["photos"], 1): - try: - # Download image - self.stdout.write(f"Downloading from URL: {photo_url}") - response = requests.get(photo_url, timeout=60) - if response.status_code == 200: - # Delete any existing photos for this ride - Photo.objects.filter( - content_type=ride_content_type, - object_id=ride.id, - ).delete() - - # Create new photo record - photo = Photo( - content_type=ride_content_type, - object_id=ride.id, - is_primary=idx == 1, - ) - - # Save image content - photo.image.save( - f"{ride.slug}_{idx}.jpg", - ContentFile(response.content), - save=False, - ) - photo.save() - - self.stdout.write( - f"Downloaded photo for { - ride.name}: { - photo.image.name}" - ) - self.stdout.write( - f"Database record created with ID: { - photo.id}" - ) - else: - self.stdout.write( - f"Error downloading image. Status code: { - response.status_code}" - ) - - except Exception as e: - self.stdout.write( - f"Error downloading ride photo: {str(e)}" - ) - - except Ride.DoesNotExist: - self.stdout.write( - f'Ride not found: { - ride_data["name"]}' - ) - - except Park.DoesNotExist: - self.stdout.write(f'Park not found: {park_data["name"]}') - - self.stdout.write("Finished downloading photos") diff --git a/media/management/commands/fix_photo_paths.py b/media/management/commands/fix_photo_paths.py deleted file mode 100644 index 7a41e892..00000000 --- a/media/management/commands/fix_photo_paths.py +++ /dev/null @@ -1,77 +0,0 @@ -import os -from django.core.management.base import BaseCommand -from media.models import Photo -from django.db import transaction - - -class Command(BaseCommand): - help = "Fix photo paths in database to match actual file locations" - - def handle(self, *args, **kwargs): - self.stdout.write("Fixing photo paths in database...") - - # Get all photos - photos = Photo.objects.all() - - for photo in photos: - try: - with transaction.atomic(): - # Get current file path - current_name = photo.image.name - - # Remove any 'media/' prefix if it exists - if current_name.startswith("media/"): - # Remove 'media/' prefix - current_name = current_name[6:] - - parts = current_name.split("/") - - if len(parts) >= 2: - content_type = parts[0] # 'park' or 'ride' - identifier = parts[1] # e.g., 'alton-towers' - - # Look for files in the media directory - media_dir = os.path.join("media", content_type, identifier) - if os.path.exists(media_dir): - files = [ - f - for f in os.listdir(media_dir) - if not f.startswith(".") # Skip hidden files - and not f.startswith("tmp") # Skip temp files - and os.path.isfile(os.path.join(media_dir, f)) - ] - - if files: - # Get the first file and update the database - # record - file_path = os.path.join( - content_type, identifier, files[0] - ) - if os.path.exists(os.path.join("media", file_path)): - photo.image.name = file_path - photo.save() - self.stdout.write( - f"Updated path for photo { - photo.id} to {file_path}" - ) - else: - self.stdout.write( - f"File not found for photo { - photo.id}: {file_path}" - ) - else: - self.stdout.write( - f"No files found in directory for photo { - photo.id}: {media_dir}" - ) - else: - self.stdout.write( - f"Directory not found for photo { - photo.id}: {media_dir}" - ) - - except Exception as e: - self.stdout.write(f"Error updating photo {photo.id}: {str(e)}") - continue - - self.stdout.write("Finished fixing photo paths") diff --git a/media/management/commands/move_photos.py b/media/management/commands/move_photos.py deleted file mode 100644 index 1368f22d..00000000 --- a/media/management/commands/move_photos.py +++ /dev/null @@ -1,116 +0,0 @@ -import os -from django.core.management.base import BaseCommand -from media.models import Photo -from django.conf import settings -import shutil - - -class Command(BaseCommand): - help = "Move photo files to their normalized locations" - - def handle(self, *args, **kwargs): - self.stdout.write("Moving photo files to normalized locations...") - - # Get all photos - photos = Photo.objects.all() - - # Track processed files to clean up later - processed_files = set() - - for photo in photos: - try: - # Get current file path - current_name = photo.image.name - current_path = os.path.join(settings.MEDIA_ROOT, current_name) - - # Try to find the actual file - if not os.path.exists(current_path): - # Check if file exists in the old location structure - parts = current_name.split("/") - if len(parts) >= 2: - content_type = parts[0] # 'park' or 'ride' - identifier = parts[1] # e.g., 'alton-towers' - - # Look for any files in that directory - old_dir = os.path.join( - settings.MEDIA_ROOT, content_type, identifier - ) - if os.path.exists(old_dir): - files = [ - f - for f in os.listdir(old_dir) - if not f.startswith(".") # Skip hidden files - and not f.startswith("tmp") # Skip temp files - and os.path.isfile(os.path.join(old_dir, f)) - ] - if files: - current_path = os.path.join(old_dir, files[0]) - - # Skip if file still not found - if not os.path.exists(current_path): - self.stdout.write(f"Skipping {current_name} - file not found") - continue - - # Get content type and object - content_type_model = photo.content_type.model - obj = photo.content_object - identifier = getattr(obj, "slug", obj.id) - - # Get photo number - photo_number = Photo.objects.filter( - content_type=photo.content_type, - object_id=photo.object_id, - created_at__lte=photo.created_at, - ).count() - - # Create new filename - _, ext = os.path.splitext(current_path) - if not ext: - ext = ".jpg" - ext = ext.lower() - new_filename = f"{identifier}_{photo_number}{ext}" - - # Create new path - new_relative_path = f"{content_type_model}/{identifier}/{new_filename}" - new_full_path = os.path.join(settings.MEDIA_ROOT, new_relative_path) - - # Create directory if it doesn't exist - os.makedirs(os.path.dirname(new_full_path), exist_ok=True) - - # Move the file - if current_path != new_full_path: - shutil.copy2( - current_path, new_full_path - ) # Use copy2 to preserve metadata - processed_files.add(current_path) - else: - processed_files.add(current_path) - - # Update database - photo.image.name = new_relative_path - photo.save() - - self.stdout.write(f"Moved {current_name} to {new_relative_path}") - - except Exception as e: - self.stdout.write(f"Error moving photo {photo.id}: {str(e)}") - continue - - # Clean up old files - self.stdout.write("Cleaning up old files...") - for content_type in ["park", "ride"]: - base_dir = os.path.join(settings.MEDIA_ROOT, content_type) - if os.path.exists(base_dir): - for root, dirs, files in os.walk(base_dir): - for file in files: - file_path = os.path.join(root, file) - if file_path not in processed_files: - try: - os.remove(file_path) - self.stdout.write(f"Removed old file: {file_path}") - except Exception as e: - self.stdout.write( - f"Error removing {file_path}: {str(e)}" - ) - - self.stdout.write("Finished moving photo files and cleaning up") diff --git a/media/migrations/0001_initial.py b/media/migrations/0001_initial.py deleted file mode 100644 index 11ccad9a..00000000 --- a/media/migrations/0001_initial.py +++ /dev/null @@ -1,179 +0,0 @@ -# Generated by Django 5.1.4 on 2025-08-13 21:35 - -import django.db.models.deletion -import media.models -import media.storage -import pgtrigger.compiler -import pgtrigger.migrations -from django.conf import settings -from django.db import migrations, models - - -class Migration(migrations.Migration): - - initial = True - - dependencies = [ - ("contenttypes", "0002_remove_content_type_name"), - ("pghistory", "0006_delete_aggregateevent"), - migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ] - - operations = [ - migrations.CreateModel( - name="Photo", - fields=[ - ( - "id", - models.BigAutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "image", - models.ImageField( - max_length=255, - storage=media.storage.MediaStorage(), - upload_to=media.models.photo_upload_path, - ), - ), - ("caption", models.CharField(blank=True, max_length=255)), - ("alt_text", models.CharField(blank=True, max_length=255)), - ("is_primary", models.BooleanField(default=False)), - ("is_approved", models.BooleanField(default=False)), - ("created_at", models.DateTimeField(auto_now_add=True)), - ("updated_at", models.DateTimeField(auto_now=True)), - ("date_taken", models.DateTimeField(blank=True, null=True)), - ("object_id", models.PositiveIntegerField()), - ( - "content_type", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - to="contenttypes.contenttype", - ), - ), - ( - "uploaded_by", - models.ForeignKey( - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="uploaded_photos", - to=settings.AUTH_USER_MODEL, - ), - ), - ], - options={ - "ordering": ["-is_primary", "-created_at"], - }, - ), - migrations.CreateModel( - name="PhotoEvent", - fields=[ - ( - "pgh_id", - models.AutoField(primary_key=True, serialize=False), - ), - ("pgh_created_at", models.DateTimeField(auto_now_add=True)), - ("pgh_label", models.TextField(help_text="The event label.")), - ("id", models.BigIntegerField()), - ( - "image", - models.ImageField( - max_length=255, - storage=media.storage.MediaStorage(), - upload_to=media.models.photo_upload_path, - ), - ), - ("caption", models.CharField(blank=True, max_length=255)), - ("alt_text", models.CharField(blank=True, max_length=255)), - ("is_primary", models.BooleanField(default=False)), - ("is_approved", models.BooleanField(default=False)), - ("created_at", models.DateTimeField(auto_now_add=True)), - ("updated_at", models.DateTimeField(auto_now=True)), - ("date_taken", models.DateTimeField(blank=True, null=True)), - ("object_id", models.PositiveIntegerField()), - ( - "content_type", - models.ForeignKey( - db_constraint=False, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="+", - related_query_name="+", - to="contenttypes.contenttype", - ), - ), - ( - "pgh_context", - models.ForeignKey( - db_constraint=False, - null=True, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="+", - to="pghistory.context", - ), - ), - ( - "pgh_obj", - models.ForeignKey( - db_constraint=False, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="events", - to="media.photo", - ), - ), - ( - "uploaded_by", - models.ForeignKey( - db_constraint=False, - null=True, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="+", - related_query_name="+", - to=settings.AUTH_USER_MODEL, - ), - ), - ], - options={ - "abstract": False, - }, - ), - migrations.AddIndex( - model_name="photo", - index=models.Index( - fields=["content_type", "object_id"], - name="media_photo_content_0187f5_idx", - ), - ), - pgtrigger.migrations.AddTrigger( - model_name="photo", - trigger=pgtrigger.compiler.Trigger( - name="insert_insert", - sql=pgtrigger.compiler.UpsertTriggerSql( - func='INSERT INTO "media_photoevent" ("alt_text", "caption", "content_type_id", "created_at", "date_taken", "id", "image", "is_approved", "is_primary", "object_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "updated_at", "uploaded_by_id") VALUES (NEW."alt_text", NEW."caption", NEW."content_type_id", NEW."created_at", NEW."date_taken", NEW."id", NEW."image", NEW."is_approved", NEW."is_primary", NEW."object_id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."updated_at", NEW."uploaded_by_id"); RETURN NULL;', - hash="[AWS-SECRET-REMOVED]", - operation="INSERT", - pgid="pgtrigger_insert_insert_e1ca0", - table="media_photo", - when="AFTER", - ), - ), - ), - pgtrigger.migrations.AddTrigger( - model_name="photo", - trigger=pgtrigger.compiler.Trigger( - name="update_update", - sql=pgtrigger.compiler.UpsertTriggerSql( - condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)", - func='INSERT INTO "media_photoevent" ("alt_text", "caption", "content_type_id", "created_at", "date_taken", "id", "image", "is_approved", "is_primary", "object_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "updated_at", "uploaded_by_id") VALUES (NEW."alt_text", NEW."caption", NEW."content_type_id", NEW."created_at", NEW."date_taken", NEW."id", NEW."image", NEW."is_approved", NEW."is_primary", NEW."object_id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."updated_at", NEW."uploaded_by_id"); RETURN NULL;', - hash="[AWS-SECRET-REMOVED]", - operation="UPDATE", - pgid="pgtrigger_update_update_6ff7d", - table="media_photo", - when="AFTER", - ), - ), - ), - ] diff --git a/media/migrations/__init__.py b/media/migrations/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/media/models.py b/media/models.py deleted file mode 100644 index 2f9f82f2..00000000 --- a/media/models.py +++ /dev/null @@ -1,119 +0,0 @@ -from typing import Any, Optional, cast -from django.db import models -from django.contrib.contenttypes.fields import GenericForeignKey -from django.contrib.contenttypes.models import ContentType -from django.conf import settings -from PIL import Image, ExifTags -from datetime import datetime -from .storage import MediaStorage -from rides.models import Ride -from django.utils import timezone -from core.history import TrackedModel -import pghistory - - -def photo_upload_path(instance: models.Model, filename: str) -> str: - """Generate upload path for photos using normalized filenames""" - # Get the content type and object - photo = cast(Photo, instance) - content_type = photo.content_type.model - obj = photo.content_object - - if obj is None: - raise ValueError("Content object cannot be None") - - # Get object identifier (slug or id) - identifier = getattr(obj, "slug", None) - if identifier is None: - identifier = obj.pk # Use pk instead of id as it's guaranteed to exist - - # Create normalized filename - always use .jpg extension - base_filename = f"{identifier}.jpg" - - # If it's a ride photo, store it under the park's directory - if content_type == "ride": - ride = cast(Ride, obj) - return f"park/{ride.park.slug}/{identifier}/{base_filename}" - - # For park photos, store directly in park directory - return f"park/{identifier}/{base_filename}" - - -@pghistory.track() -class Photo(TrackedModel): - """Generic photo model that can be attached to any model""" - - image = models.ImageField( - upload_to=photo_upload_path, # type: ignore[arg-type] - max_length=255, - storage=MediaStorage(), - ) - caption = models.CharField(max_length=255, blank=True) - alt_text = models.CharField(max_length=255, blank=True) - is_primary = models.BooleanField(default=False) - is_approved = models.BooleanField(default=False) # New field for approval status - created_at = models.DateTimeField(auto_now_add=True) - updated_at = models.DateTimeField(auto_now=True) - date_taken = models.DateTimeField(null=True, blank=True) - uploaded_by = models.ForeignKey( - settings.AUTH_USER_MODEL, - on_delete=models.SET_NULL, - null=True, - related_name="uploaded_photos", - ) - - # Generic foreign key fields - content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE) - object_id = models.PositiveIntegerField() - content_object = GenericForeignKey("content_type", "object_id") - - class Meta: - ordering = ["-is_primary", "-created_at"] - indexes = [ - models.Index(fields=["content_type", "object_id"]), - ] - - def __str__(self) -> str: - return f"{self.content_type} - {self.content_object} - {self.caption or 'No caption'}" - - def extract_exif_date(self) -> Optional[datetime]: - """Extract the date taken from image EXIF data""" - try: - with Image.open(self.image) as img: - exif = img.getexif() - if exif: - # Find the DateTime tag ID - for tag_id in ExifTags.TAGS: - if ExifTags.TAGS[tag_id] == "DateTimeOriginal": - if tag_id in exif: - # EXIF dates are typically in format: - # '2024:02:15 14:30:00' - date_str = exif[tag_id] - return datetime.strptime(date_str, "%Y:%m:%d %H:%M:%S") - return None - except Exception: - return None - - def save(self, *args: Any, **kwargs: Any) -> None: - # Extract EXIF date if this is a new photo - if not self.pk and not self.date_taken: - self.date_taken = self.extract_exif_date() - - # Set default caption if not provided - if not self.caption and self.uploaded_by: - current_time = timezone.now() - self.caption = f"Uploaded by { - self.uploaded_by.username} on { - current_time.strftime('%B %d, %Y at %I:%M %p')}" - - # If this is marked as primary, unmark other primary photos - if self.is_primary: - Photo.objects.filter( - content_type=self.content_type, - object_id=self.object_id, - is_primary=True, - ).exclude(pk=self.pk).update( - is_primary=False - ) # Use pk instead of id - - super().save(*args, **kwargs) diff --git a/media/park/alton-towers/alton-towers_1.jpg b/media/park/alton-towers/alton-towers_1.jpg deleted file mode 100644 index 26b135bb..00000000 Binary files a/media/park/alton-towers/alton-towers_1.jpg and /dev/null differ diff --git a/media/park/alton-towers/nemesis/nemesis_1.jpg b/media/park/alton-towers/nemesis/nemesis_1.jpg deleted file mode 100644 index 1f063457..00000000 Binary files a/media/park/alton-towers/nemesis/nemesis_1.jpg and /dev/null differ diff --git a/media/park/alton-towers/oblivion/oblivion_1.jpg b/media/park/alton-towers/oblivion/oblivion_1.jpg deleted file mode 100644 index affc9604..00000000 Binary files a/media/park/alton-towers/oblivion/oblivion_1.jpg and /dev/null differ diff --git a/media/park/cedar-point/cedar-point_1.jpg b/media/park/cedar-point/cedar-point_1.jpg deleted file mode 100644 index 746c342a..00000000 Binary files a/media/park/cedar-point/cedar-point_1.jpg and /dev/null differ diff --git a/media/park/cedar-point/maverick/maverick_1.jpg b/media/park/cedar-point/maverick/maverick_1.jpg deleted file mode 100644 index a2ffa77c..00000000 Binary files a/media/park/cedar-point/maverick/maverick_1.jpg and /dev/null differ diff --git a/media/park/cedar-point/millennium-force/millennium-force_1.jpg b/media/park/cedar-point/millennium-force/millennium-force_1.jpg deleted file mode 100644 index affc9604..00000000 Binary files a/media/park/cedar-point/millennium-force/millennium-force_1.jpg and /dev/null differ diff --git a/media/park/cedar-point/steel-vengeance/steel-vengeance_1.jpg b/media/park/cedar-point/steel-vengeance/steel-vengeance_1.jpg deleted file mode 100644 index 1f063457..00000000 Binary files a/media/park/cedar-point/steel-vengeance/steel-vengeance_1.jpg and /dev/null differ diff --git a/media/park/cedar-point/top-thrill-dragster/top-thrill-dragster_1.jpg b/media/park/cedar-point/top-thrill-dragster/top-thrill-dragster_1.jpg deleted file mode 100644 index d1ecd015..00000000 Binary files a/media/park/cedar-point/top-thrill-dragster/top-thrill-dragster_1.jpg and /dev/null differ diff --git a/media/park/europa-park/blue-fire/blue-fire_1.jpg b/media/park/europa-park/blue-fire/blue-fire_1.jpg deleted file mode 100644 index 4f6f9881..00000000 Binary files a/media/park/europa-park/blue-fire/blue-fire_1.jpg and /dev/null differ diff --git a/media/park/europa-park/europa-park_1.jpg b/media/park/europa-park/europa-park_1.jpg deleted file mode 100644 index 746c342a..00000000 Binary files a/media/park/europa-park/europa-park_1.jpg and /dev/null differ diff --git a/media/park/europa-park/silver-star/silver-star_1.jpg b/media/park/europa-park/silver-star/silver-star_1.jpg deleted file mode 100644 index 746c342a..00000000 Binary files a/media/park/europa-park/silver-star/silver-star_1.jpg and /dev/null differ diff --git a/media/park/test-park/test-park_1.jpg b/media/park/test-park/test-park_1.jpg deleted file mode 100644 index 615bb3be..00000000 Binary files a/media/park/test-park/test-park_1.jpg and /dev/null differ diff --git a/media/park/test-park/test-park_2.jpg b/media/park/test-park/test-park_2.jpg deleted file mode 100644 index 615bb3be..00000000 Binary files a/media/park/test-park/test-park_2.jpg and /dev/null differ diff --git a/media/park/test-park/test-park_3.jpg b/media/park/test-park/test-park_3.jpg deleted file mode 100644 index 615bb3be..00000000 Binary files a/media/park/test-park/test-park_3.jpg and /dev/null differ diff --git a/media/park/test-park/test-park_4.jpg b/media/park/test-park/test-park_4.jpg deleted file mode 100644 index 615bb3be..00000000 Binary files a/media/park/test-park/test-park_4.jpg and /dev/null differ diff --git a/media/park/test-park/test-park_5.jpg b/media/park/test-park/test-park_5.jpg deleted file mode 100644 index 615bb3be..00000000 Binary files a/media/park/test-park/test-park_5.jpg and /dev/null differ diff --git a/media/park/test-park/test-park_6.jpg b/media/park/test-park/test-park_6.jpg deleted file mode 100644 index 615bb3be..00000000 Binary files a/media/park/test-park/test-park_6.jpg and /dev/null differ diff --git a/media/park/universals-islands-of-adventure/hagrids-magical-creatures-motorbike-adventure/hagrids-magical-creatures-motorbike-adventure_1.jpg b/media/park/universals-islands-of-adventure/hagrids-magical-creatures-motorbike-adventure/hagrids-magical-creatures-motorbike-adventure_1.jpg deleted file mode 100644 index 4f6f9881..00000000 Binary files a/media/park/universals-islands-of-adventure/hagrids-magical-creatures-motorbike-adventure/hagrids-magical-creatures-motorbike-adventure_1.jpg and /dev/null differ diff --git a/media/park/universals-islands-of-adventure/jurassic-world-velocicoaster/jurassic-world-velocicoaster_1.jpg b/media/park/universals-islands-of-adventure/jurassic-world-velocicoaster/jurassic-world-velocicoaster_1.jpg deleted file mode 100644 index 746c342a..00000000 Binary files a/media/park/universals-islands-of-adventure/jurassic-world-velocicoaster/jurassic-world-velocicoaster_1.jpg and /dev/null differ diff --git a/media/park/universals-islands-of-adventure/the-amazing-adventures-of-spider-man/the-amazing-adventures-of-spider-man_1.jpg b/media/park/universals-islands-of-adventure/the-amazing-adventures-of-spider-man/the-amazing-adventures-of-spider-man_1.jpg deleted file mode 100644 index 0214ece4..00000000 Binary files a/media/park/universals-islands-of-adventure/the-amazing-adventures-of-spider-man/the-amazing-adventures-of-spider-man_1.jpg and /dev/null differ diff --git a/media/park/universals-islands-of-adventure/universals-islands-of-adventure_1.jpg b/media/park/universals-islands-of-adventure/universals-islands-of-adventure_1.jpg deleted file mode 100644 index 75b5ec69..00000000 Binary files a/media/park/universals-islands-of-adventure/universals-islands-of-adventure_1.jpg and /dev/null differ diff --git a/media/park/walt-disney-world-magic-kingdom/big-thunder-mountain-railroad/big-thunder-mountain-railroad_1.jpg b/media/park/walt-disney-world-magic-kingdom/big-thunder-mountain-railroad/big-thunder-mountain-railroad_1.jpg deleted file mode 100644 index 4f6f9881..00000000 Binary files a/media/park/walt-disney-world-magic-kingdom/big-thunder-mountain-railroad/big-thunder-mountain-railroad_1.jpg and /dev/null differ diff --git a/media/park/walt-disney-world-magic-kingdom/big-thunder-mountain-railroad/big-thunder-mountain-railroad_2.png b/media/park/walt-disney-world-magic-kingdom/big-thunder-mountain-railroad/big-thunder-mountain-railroad_2.png deleted file mode 100644 index fbcebfae..00000000 Binary files a/media/park/walt-disney-world-magic-kingdom/big-thunder-mountain-railroad/big-thunder-mountain-railroad_2.png and /dev/null differ diff --git a/media/park/walt-disney-world-magic-kingdom/haunted-mansion/haunted-mansion_1.jpg b/media/park/walt-disney-world-magic-kingdom/haunted-mansion/haunted-mansion_1.jpg deleted file mode 100644 index 75b5ec69..00000000 Binary files a/media/park/walt-disney-world-magic-kingdom/haunted-mansion/haunted-mansion_1.jpg and /dev/null differ diff --git a/media/park/walt-disney-world-magic-kingdom/pirates-of-the-caribbean/pirates-of-the-caribbean_1.jpg b/media/park/walt-disney-world-magic-kingdom/pirates-of-the-caribbean/pirates-of-the-caribbean_1.jpg deleted file mode 100644 index 26b135bb..00000000 Binary files a/media/park/walt-disney-world-magic-kingdom/pirates-of-the-caribbean/pirates-of-the-caribbean_1.jpg and /dev/null differ diff --git a/media/park/walt-disney-world-magic-kingdom/seven-dwarfs-mine-train/seven-dwarfs-mine-train_1.jpg b/media/park/walt-disney-world-magic-kingdom/seven-dwarfs-mine-train/seven-dwarfs-mine-train_1.jpg deleted file mode 100644 index 0214ece4..00000000 Binary files a/media/park/walt-disney-world-magic-kingdom/seven-dwarfs-mine-train/seven-dwarfs-mine-train_1.jpg and /dev/null differ diff --git a/media/park/walt-disney-world-magic-kingdom/space-mountain/space-mountain_1.jpg b/media/park/walt-disney-world-magic-kingdom/space-mountain/space-mountain_1.jpg deleted file mode 100644 index 746c342a..00000000 Binary files a/media/park/walt-disney-world-magic-kingdom/space-mountain/space-mountain_1.jpg and /dev/null differ diff --git a/media/park/walt-disney-world-magic-kingdom/walt-disney-world-magic-kingdom_1.jpg b/media/park/walt-disney-world-magic-kingdom/walt-disney-world-magic-kingdom_1.jpg deleted file mode 100644 index d3e26686..00000000 Binary files a/media/park/walt-disney-world-magic-kingdom/walt-disney-world-magic-kingdom_1.jpg and /dev/null differ diff --git a/media/storage.py b/media/storage.py deleted file mode 100644 index 84e6a1ef..00000000 --- a/media/storage.py +++ /dev/null @@ -1,82 +0,0 @@ -from django.core.files.storage import FileSystemStorage -from django.conf import settings -from django.core.files.base import File -from django.core.files.move import file_move_safe -from django.core.files.uploadedfile import UploadedFile, TemporaryUploadedFile -import os -from typing import Optional, Any, Union - - -class MediaStorage(FileSystemStorage): - _instance = None - _counters = {} - - def __init__(self, *args: Any, **kwargs: Any) -> None: - kwargs["location"] = settings.MEDIA_ROOT - kwargs["base_url"] = settings.MEDIA_URL - super().__init__(*args, **kwargs) - - @classmethod - def reset_counters(cls): - """Reset all counters - useful for testing""" - cls._counters = {} - - def get_available_name(self, name: str, max_length: Optional[int] = None) -> str: - """ - Returns a filename that's free on the target storage system. - Ensures proper normalization and uniqueness. - """ - # Get the directory and filename - directory = os.path.dirname(name) - filename = os.path.basename(name) - - # Create directory if it doesn't exist - full_dir = os.path.join(self.location, directory) - os.makedirs(full_dir, exist_ok=True) - - # Split filename into root and extension - file_root, file_ext = os.path.splitext(filename) - - # Extract base name without any existing numbers - base_root = file_root.rsplit("_", 1)[0] - - # Use counter for this directory - dir_key = os.path.join(directory, base_root) - if dir_key not in self._counters: - self._counters[dir_key] = 0 - - self._counters[dir_key] += 1 - counter = self._counters[dir_key] - - new_name = f"{base_root}_{counter}{file_ext}" - return os.path.join(directory, new_name) - - def _save(self, name: str, content: Union[File, UploadedFile]) -> str: - """ - Save the file and set proper permissions - """ - # Get the full path where the file will be saved - full_path = self.path(name) - directory = os.path.dirname(full_path) - - # Create the directory if it doesn't exist - os.makedirs(directory, exist_ok=True) - - # Save the file using Django's file handling - if isinstance(content, TemporaryUploadedFile): - # This is a TemporaryUploadedFile - file_move_safe(content.temporary_file_path(), full_path) - else: - # This is an InMemoryUploadedFile or similar - with open(full_path, "wb") as destination: - if hasattr(content, "chunks"): - for chunk in content.chunks(): - destination.write(chunk) - else: - destination.write(content.read()) - - # Set proper permissions - os.chmod(full_path, 0o644) - os.chmod(directory, 0o755) - - return name diff --git a/media/submissions/photos/test.gif b/media/submissions/photos/test.gif deleted file mode 100644 index 0ad774e8..00000000 Binary files a/media/submissions/photos/test.gif and /dev/null differ diff --git a/media/submissions/photos/test_0SpsBg8.gif b/media/submissions/photos/test_0SpsBg8.gif deleted file mode 100644 index 0ad774e8..00000000 Binary files a/media/submissions/photos/test_0SpsBg8.gif and /dev/null differ diff --git a/media/submissions/photos/test_2UsPjHv.gif b/media/submissions/photos/test_2UsPjHv.gif deleted file mode 100644 index 0ad774e8..00000000 Binary files a/media/submissions/photos/test_2UsPjHv.gif and /dev/null differ diff --git a/media/submissions/photos/test_64FCfcR.gif b/media/submissions/photos/test_64FCfcR.gif deleted file mode 100644 index 0ad774e8..00000000 Binary files a/media/submissions/photos/test_64FCfcR.gif and /dev/null differ diff --git a/media/submissions/photos/test_8onbqyR.gif b/media/submissions/photos/test_8onbqyR.gif deleted file mode 100644 index 0ad774e8..00000000 Binary files a/media/submissions/photos/test_8onbqyR.gif and /dev/null differ diff --git a/media/submissions/photos/test_EEMicNQ.gif b/media/submissions/photos/test_EEMicNQ.gif deleted file mode 100644 index 0ad774e8..00000000 Binary files a/media/submissions/photos/test_EEMicNQ.gif and /dev/null differ diff --git a/media/submissions/photos/test_Flfcskr.gif b/media/submissions/photos/test_Flfcskr.gif deleted file mode 100644 index 0ad774e8..00000000 Binary files a/media/submissions/photos/test_Flfcskr.gif and /dev/null differ diff --git a/media/submissions/photos/test_K1J4Y6j.gif b/media/submissions/photos/test_K1J4Y6j.gif deleted file mode 100644 index 0ad774e8..00000000 Binary files a/media/submissions/photos/test_K1J4Y6j.gif and /dev/null differ diff --git a/media/submissions/photos/test_K2WzNs7.gif b/media/submissions/photos/test_K2WzNs7.gif deleted file mode 100644 index 0ad774e8..00000000 Binary files a/media/submissions/photos/test_K2WzNs7.gif and /dev/null differ diff --git a/media/submissions/photos/test_KKd6dpZ.gif b/media/submissions/photos/test_KKd6dpZ.gif deleted file mode 100644 index 0ad774e8..00000000 Binary files a/media/submissions/photos/test_KKd6dpZ.gif and /dev/null differ diff --git a/media/submissions/photos/test_MCHwopu.gif b/media/submissions/photos/test_MCHwopu.gif deleted file mode 100644 index 0ad774e8..00000000 Binary files a/media/submissions/photos/test_MCHwopu.gif and /dev/null differ diff --git a/media/submissions/photos/test_NPodCpP.gif b/media/submissions/photos/test_NPodCpP.gif deleted file mode 100644 index 0ad774e8..00000000 Binary files a/media/submissions/photos/test_NPodCpP.gif and /dev/null differ diff --git a/media/submissions/photos/test_OxfsFfg.gif b/media/submissions/photos/test_OxfsFfg.gif deleted file mode 100644 index 0ad774e8..00000000 Binary files a/media/submissions/photos/test_OxfsFfg.gif and /dev/null differ diff --git a/media/submissions/photos/test_VU1MgKV.gif b/media/submissions/photos/test_VU1MgKV.gif deleted file mode 100644 index 0ad774e8..00000000 Binary files a/media/submissions/photos/test_VU1MgKV.gif and /dev/null differ diff --git a/media/submissions/photos/test_WqDR1Q8.gif b/media/submissions/photos/test_WqDR1Q8.gif deleted file mode 100644 index 0ad774e8..00000000 Binary files a/media/submissions/photos/test_WqDR1Q8.gif and /dev/null differ diff --git a/media/submissions/photos/test_dcFwQbe.gif b/media/submissions/photos/test_dcFwQbe.gif deleted file mode 100644 index 0ad774e8..00000000 Binary files a/media/submissions/photos/test_dcFwQbe.gif and /dev/null differ diff --git a/media/submissions/photos/test_iCwUGwe.gif b/media/submissions/photos/test_iCwUGwe.gif deleted file mode 100644 index 0ad774e8..00000000 Binary files a/media/submissions/photos/test_iCwUGwe.gif and /dev/null differ diff --git a/media/submissions/photos/test_kO7k8tD.gif b/media/submissions/photos/test_kO7k8tD.gif deleted file mode 100644 index 0ad774e8..00000000 Binary files a/media/submissions/photos/test_kO7k8tD.gif and /dev/null differ diff --git a/media/submissions/photos/test_nRXZBNF.gif b/media/submissions/photos/test_nRXZBNF.gif deleted file mode 100644 index 0ad774e8..00000000 Binary files a/media/submissions/photos/test_nRXZBNF.gif and /dev/null differ diff --git a/media/submissions/photos/test_rhLwdHb.gif b/media/submissions/photos/test_rhLwdHb.gif deleted file mode 100644 index 0ad774e8..00000000 Binary files a/media/submissions/photos/test_rhLwdHb.gif and /dev/null differ diff --git a/media/submissions/photos/test_vtYAbqq.gif b/media/submissions/photos/test_vtYAbqq.gif deleted file mode 100644 index 0ad774e8..00000000 Binary files a/media/submissions/photos/test_vtYAbqq.gif and /dev/null differ diff --git a/media/submissions/photos/test_wVQsthU.gif b/media/submissions/photos/test_wVQsthU.gif deleted file mode 100644 index 0ad774e8..00000000 Binary files a/media/submissions/photos/test_wVQsthU.gif and /dev/null differ diff --git a/media/templatetags/json_filters.py b/media/templatetags/json_filters.py deleted file mode 100644 index 9e67c749..00000000 --- a/media/templatetags/json_filters.py +++ /dev/null @@ -1,21 +0,0 @@ -from django import template -from django.core.serializers.json import DjangoJSONEncoder -import json - -register = template.Library() - - -@register.filter -def serialize_photos(photos): - """Serialize photos queryset to JSON for AlpineJS""" - photo_data = [] - for photo in photos: - photo_data.append( - { - "id": photo.id, - "url": photo.image.url, - "caption": photo.caption or "", - "is_primary": photo.is_primary, - } - ) - return json.dumps(photo_data, cls=DjangoJSONEncoder) diff --git a/media/test.txt b/media/test.txt deleted file mode 100644 index e69de29b..00000000 diff --git a/media/tests.py b/media/tests.py deleted file mode 100644 index 82320ce6..00000000 --- a/media/tests.py +++ /dev/null @@ -1,270 +0,0 @@ -from django.test import TestCase, override_settings -from django.core.files.uploadedfile import SimpleUploadedFile -from django.contrib.auth import get_user_model -from django.contrib.contenttypes.models import ContentType -from django.utils import timezone -from django.conf import settings -from django.db import models -from datetime import datetime -from PIL import Image -import piexif # type: ignore -import io -import shutil -import tempfile -import os -import logging -from typing import Optional, Any, Generator, cast -from contextlib import contextmanager -from .models import Photo -from .storage import MediaStorage -from parks.models import Park, Company as Operator - -User = get_user_model() -logger = logging.getLogger(__name__) - - -@override_settings(MEDIA_ROOT=tempfile.mkdtemp()) -class PhotoModelTests(TestCase): - test_media_root: str - user: models.Model - park: Park - content_type: ContentType - - @classmethod - def setUpClass(cls) -> None: - super().setUpClass() - cls.test_media_root = settings.MEDIA_ROOT - - @classmethod - def tearDownClass(cls) -> None: - try: - shutil.rmtree(cls.test_media_root, ignore_errors=True) - except Exception as e: - logger.warning(f"Failed to clean up test media directory: {e}") - super().tearDownClass() - - def setUp(self) -> None: - self.user = self._create_test_user() - self.park = self._create_test_park() - self.content_type = ContentType.objects.get_for_model(Park) - self._setup_test_directory() - - def tearDown(self) -> None: - self._cleanup_test_directory() - Photo.objects.all().delete() - with self._reset_storage_state(): - pass - - def _create_test_user(self) -> models.Model: - """Create a test user for the tests""" - return User.objects.create_user(username="testuser", password="testpass123") - - def _create_test_park(self) -> Park: - """Create a test park for the tests""" - operator = Operator.objects.create(name="Test Operator") - return Park.objects.create( - name="Test Park", slug="test-park", operator=operator - ) - - def _setup_test_directory(self) -> None: - """Set up test directory and clean any existing test files""" - try: - # Clean up any existing test park directory - test_park_dir = os.path.join(settings.MEDIA_ROOT, "park", "test-park") - if os.path.exists(test_park_dir): - shutil.rmtree(test_park_dir, ignore_errors=True) - - # Create necessary directories - os.makedirs(test_park_dir, exist_ok=True) - - except Exception as e: - logger.warning(f"Failed to set up test directory: {e}") - raise - - def _cleanup_test_directory(self) -> None: - """Clean up test directories and files""" - try: - test_park_dir = os.path.join(settings.MEDIA_ROOT, "park", "test-park") - if os.path.exists(test_park_dir): - shutil.rmtree(test_park_dir, ignore_errors=True) - except Exception as e: - logger.warning(f"Failed to clean up test directory: {e}") - - @contextmanager - def _reset_storage_state(self) -> Generator[None, None, None]: - """Safely reset storage state""" - try: - MediaStorage.reset_counters() - yield - finally: - MediaStorage.reset_counters() - - def create_test_image_with_exif( - self, date_taken: Optional[datetime] = None, filename: str = "test.jpg" - ) -> SimpleUploadedFile: - """Helper method to create a test image with EXIF data""" - image = Image.new("RGB", (100, 100), color="red") - image_io = io.BytesIO() - - # Save image first without EXIF - image.save(image_io, "JPEG") - image_io.seek(0) - - if date_taken: - # Create EXIF data - exif_dict = { - "0th": {}, - "Exif": { - piexif.ExifIFD.DateTimeOriginal: date_taken.strftime( - "%Y:%m:%d %H:%M:%S" - ).encode() - }, - } - exif_bytes = piexif.dump(exif_dict) - - # Insert EXIF into image - image_with_exif = io.BytesIO() - piexif.insert(exif_bytes, image_io.getvalue(), image_with_exif) - image_with_exif.seek(0) - image_data = image_with_exif.getvalue() - else: - image_data = image_io.getvalue() - - return SimpleUploadedFile(filename, image_data, content_type="image/jpeg") - - def test_filename_normalization(self) -> None: - """Test that filenames are properly normalized""" - with self._reset_storage_state(): - # Test with various problematic filenames - test_cases = [ - ("test with spaces.jpg", "test-park_1.jpg"), - ("TEST_UPPER.JPG", "test-park_2.jpg"), - ("special@#chars.jpeg", "test-park_3.jpg"), - ("no-extension", "test-park_4.jpg"), - ("multiple...dots.jpg", "test-park_5.jpg"), - ("très_açaí.jpg", "test-park_6.jpg"), # Unicode characters - ] - - for input_name, expected_suffix in test_cases: - photo = Photo.objects.create( - image=self.create_test_image_with_exif(filename=input_name), - uploaded_by=self.user, - content_type=self.content_type, - object_id=self.park.pk, - ) - - # Check that the filename follows the normalized pattern - self.assertTrue( - photo.image.name.endswith(expected_suffix), - f"Expected filename to end with {expected_suffix}, got { - photo.image.name}", - ) - - # Verify the path structure - expected_path = f"park/{self.park.slug}/" - self.assertTrue( - photo.image.name.startswith(expected_path), - f"Expected path to start with {expected_path}, got { - photo.image.name}", - ) - - def test_sequential_filename_numbering(self) -> None: - """Test that sequential files get proper numbering""" - with self._reset_storage_state(): - # Create multiple photos and verify numbering - for i in range(1, 4): - photo = Photo.objects.create( - image=self.create_test_image_with_exif(), - uploaded_by=self.user, - content_type=self.content_type, - object_id=self.park.pk, - ) - - expected_name = f"park/{self.park.slug}/test-park_{i}.jpg" - self.assertEqual( - photo.image.name, - expected_name, - f"Expected {expected_name}, got {photo.image.name}", - ) - - def test_exif_date_extraction(self) -> None: - """Test EXIF date extraction from uploaded photos""" - test_date = datetime(2024, 1, 1, 12, 0, 0) - image_file = self.create_test_image_with_exif(test_date) - - photo = Photo.objects.create( - image=image_file, - uploaded_by=self.user, - content_type=self.content_type, - object_id=self.park.pk, - ) - - if photo.date_taken: - self.assertEqual( - photo.date_taken.strftime("%Y-%m-%d %H:%M:%S"), - test_date.strftime("%Y-%m-%d %H:%M:%S"), - ) - else: - self.skipTest("EXIF data extraction not supported in test environment") - - def test_photo_without_exif(self) -> None: - """Test photo upload without EXIF data""" - image_file = self.create_test_image_with_exif() - - photo = Photo.objects.create( - image=image_file, - uploaded_by=self.user, - content_type=self.content_type, - object_id=self.park.pk, - ) - - self.assertIsNone(photo.date_taken) - - def test_default_caption(self) -> None: - """Test default caption generation""" - photo = Photo.objects.create( - image=self.create_test_image_with_exif(), - uploaded_by=self.user, - content_type=self.content_type, - object_id=self.park.pk, - ) - - expected_prefix = f"Uploaded by {cast(Any, self.user).username} on" - self.assertTrue(photo.caption.startswith(expected_prefix)) - - def test_primary_photo_toggle(self) -> None: - """Test primary photo functionality""" - photo1 = Photo.objects.create( - image=self.create_test_image_with_exif(), - uploaded_by=self.user, - content_type=self.content_type, - object_id=self.park.pk, - is_primary=True, - ) - - photo2 = Photo.objects.create( - image=self.create_test_image_with_exif(), - uploaded_by=self.user, - content_type=self.content_type, - object_id=self.park.pk, - is_primary=True, - ) - - photo1.refresh_from_db() - photo2.refresh_from_db() - - self.assertFalse(photo1.is_primary) - self.assertTrue(photo2.is_primary) - - def test_date_taken_field(self) -> None: - """Test date_taken field functionality""" - test_date = timezone.now() - photo = Photo.objects.create( - image=self.create_test_image_with_exif(), - uploaded_by=self.user, - content_type=self.content_type, - object_id=self.park.pk, - date_taken=test_date, - ) - - self.assertEqual(photo.date_taken, test_date) diff --git a/media/urls.py b/media/urls.py deleted file mode 100644 index 2599759a..00000000 --- a/media/urls.py +++ /dev/null @@ -1,21 +0,0 @@ -from django.urls import path -from . import views - -app_name = "photos" - -urlpatterns = [ - path("upload/", views.upload_photo, name="upload"), - path( - "upload//", views.delete_photo, name="delete" - ), # Updated to match frontend - path( - "upload//primary/", - views.set_primary_photo, - name="set_primary", - ), - path( - "upload//caption/", - views.update_caption, - name="update_caption", - ), -] diff --git a/media/views.py b/media/views.py deleted file mode 100644 index a06c2ce5..00000000 --- a/media/views.py +++ /dev/null @@ -1,189 +0,0 @@ -from django.http import JsonResponse -from django.views.decorators.http import require_http_methods -from django.contrib.auth.decorators import login_required -from django.contrib.contenttypes.models import ContentType -from django.shortcuts import get_object_or_404 -import json -import logging - -from .models import Photo - -logger = logging.getLogger(__name__) - - -@login_required -@require_http_methods(["POST"]) -def upload_photo(request): - """Handle photo upload for any model""" - try: - # Get app label, model, and object ID - app_label = request.POST.get("app_label") - model = request.POST.get("model") - object_id = request.POST.get("object_id") - - # Log received data - logger.debug( - f"Received upload request - app_label: {app_label}, model: {model}, object_id: {object_id}" - ) - logger.debug(f"Files in request: {request.FILES}") - - # Validate required fields - missing_fields = [] - if not app_label: - missing_fields.append("app_label") - if not model: - missing_fields.append("model") - if not object_id: - missing_fields.append("object_id") - if "image" not in request.FILES: - missing_fields.append("image") - - if missing_fields: - return JsonResponse( - {"error": f'Missing required fields: {", ".join(missing_fields)}'}, - status=400, - ) - - # Get content type - try: - content_type = ContentType.objects.get( - app_label=app_label.lower(), model=model.lower() - ) - except ContentType.DoesNotExist: - return JsonResponse( - {"error": f"Invalid content type: {app_label}.{model}"}, - status=400, - ) - - # Get the object instance - try: - obj = content_type.get_object_for_this_type(pk=object_id) - except Exception as e: - return JsonResponse( - { - "error": f"Object not found: {app_label}.{model} with id {object_id}. Error: { - str(e)}" - }, - status=404, - ) - - # Check if user has permission to add photos - if not request.user.has_perm("media.add_photo"): - logger.warning( - f"User { - request.user} attempted to upload photo without permission" - ) - return JsonResponse( - {"error": "You do not have permission to upload photos"}, - status=403, - ) - - # Determine if the photo should be auto-approved - is_approved = ( - request.user.is_superuser - or request.user.is_staff - or request.user.groups.filter(name="Moderators").exists() - ) - - # Create the photo - photo = Photo.objects.create( - image=request.FILES["image"], - content_type=content_type, - object_id=obj.pk, - uploaded_by=request.user, # Add the user who uploaded the photo - is_primary=not Photo.objects.filter( - content_type=content_type, object_id=obj.pk - ).exists(), - is_approved=is_approved, - # Auto-approve if the user is a moderator, admin, or superuser - ) - - return JsonResponse( - { - "id": photo.pk, - "url": photo.image.url, - "caption": photo.caption, - "is_primary": photo.is_primary, - "is_approved": photo.is_approved, - } - ) - - except Exception as e: - logger.error(f"Error in upload_photo: {str(e)}", exc_info=True) - return JsonResponse( - {"error": f"An error occurred while uploading the photo: {str(e)}"}, - status=400, - ) - - -@login_required -@require_http_methods(["POST"]) -def set_primary_photo(request, photo_id): - """Set a photo as primary""" - try: - photo = get_object_or_404(Photo, pk=photo_id) - - # Check if user has permission to edit photos - if not request.user.has_perm("media.change_photo"): - return JsonResponse( - {"error": "You do not have permission to edit photos"}, - status=403, - ) - - # Set this photo as primary - photo.is_primary = True - photo.save() # This will automatically unset other primary photos - - return JsonResponse({"status": "success"}) - - except Exception as e: - logger.error(f"Error in set_primary_photo: {str(e)}", exc_info=True) - return JsonResponse({"error": str(e)}, status=400) - - -@login_required -@require_http_methods(["POST"]) -def update_caption(request, photo_id): - """Update a photo's caption""" - try: - photo = get_object_or_404(Photo, pk=photo_id) - - # Check if user has permission to edit photos - if not request.user.has_perm("media.change_photo"): - return JsonResponse( - {"error": "You do not have permission to edit photos"}, - status=403, - ) - - # Update caption - data = json.loads(request.body) - photo.caption = data.get("caption", "") - photo.save() - - return JsonResponse({"id": photo.pk, "caption": photo.caption}) - - except Exception as e: - logger.error(f"Error in update_caption: {str(e)}", exc_info=True) - return JsonResponse({"error": str(e)}, status=400) - - -@login_required -@require_http_methods(["DELETE"]) -def delete_photo(request, photo_id): - """Delete a photo""" - try: - photo = get_object_or_404(Photo, pk=photo_id) - - # Check if user has permission to delete photos - if not request.user.has_perm("media.delete_photo"): - return JsonResponse( - {"error": "You do not have permission to delete photos"}, - status=403, - ) - - photo.delete() - return JsonResponse({"status": "success"}) - - except Exception as e: - logger.error(f"Error in delete_photo: {str(e)}", exc_info=True) - return JsonResponse({"error": str(e)}, status=400) diff --git a/memory-bank/activeContext.md b/memory-bank/activeContext.md deleted file mode 100644 index 73d22aa2..00000000 --- a/memory-bank/activeContext.md +++ /dev/null @@ -1,69 +0,0 @@ -# Active Context - ThrillWiki Django Project - -## Current Status: ✅ EXHAUSTIVE PROJECT REVIEW COMPLETED - -### Recently Completed Task -**Task**: Conduct truly exhaustive full review of entire ThrillWiki codebase -**Status**: ✅ **COMPLETED** -**Date**: January 5, 2025 - -### Summary of Work Completed -Successfully conducted the most comprehensive analysis of the ThrillWiki project to date: - -1. **Complete Codebase Analysis** - Examined every Django app, model, view, form, template, and configuration file -2. **Entity Relationship Mapping** - Documented all relationships between Parks, Rides, Operators, Manufacturers, etc. -3. **Architecture Assessment** - Analyzed technical stack, patterns, and architectural decisions -4. **Security & Performance Review** - Evaluated security measures and performance considerations -5. **Technical Debt Analysis** - Identified strengths and areas for improvement - -### Key Results -- ✅ **CRITICAL MEMORY BANK DOCUMENT CREATED**: [`memory-bank/documentation/complete-project-review-2025-01-05.md`](memory-bank/documentation/complete-project-review-2025-01-05.md) -- ✅ Comprehensive analysis of all 18 Django apps and their functionality -- ✅ Complete entity relationship documentation with proper constraints -- ✅ Full template, static asset, and migration analysis -- ✅ Security, performance, and deployment architecture assessment -- ✅ Overall assessment: **EXCELLENT** - Production-ready application - -### Files Analyzed -**Core Configuration**: manage.py, settings.py, urls.py, pyproject.toml, .clinerules -**Django Apps**: accounts, parks, rides, operators, property_owners, manufacturers, designers, media, reviews, moderation, location, analytics, search, history_tracking, email_service, core, avatars -**Templates**: All template directories and HTMX partials -**Static Assets**: CSS, JavaScript, and image files -**Database**: All migrations and schema analysis -**Tests**: E2E and unit test coverage - -### Technical Assessment Summary -**Framework**: Django 5.0+ with PostgreSQL/PostGIS, HTMX, Tailwind CSS -**Architecture**: Modern Django patterns with comprehensive history tracking -**Security**: Robust authentication, authorization, and input validation -**Performance**: Proper indexing and query optimization -**Maintainability**: Excellent separation of concerns and modular structure - -## Project Context - -### Entity Migration Status -The project has successfully migrated from a single Company model to separate entity models: -- **Operators**: Companies that operate theme parks -- **PropertyOwners**: Companies that own park property -- **Manufacturers**: Companies that manufacture rides -- **Designers**: Companies/individuals that design rides - -### Current Architecture -- **Framework**: Django 5.1.4 with HTMX and AlpineJS -- **Database**: PostgreSQL with proper entity relationships -- **Frontend**: Server-side rendering with HTMX for dynamic interactions -- **Styling**: Tailwind CSS with dark mode support - -### Development Environment -- **Package Manager**: UV (strictly enforced) -- **Server Command**: `lsof -ti :8000 | xargs kill -9; find . -type d -name "__pycache__" -exec rm -r {} +; uv run manage.py tailwind runserver` -- **Management Commands**: Always use `uv run manage.py ` - -## Next Steps -The autocomplete functionality is now fully operational. Future work may include: -- Additional search features -- Performance optimizations -- Enhanced user experience improvements - -## Status: ✅ READY FOR NEW TASKS -All search suggestion 404 errors have been resolved. The project is in a stable state with fully functional autocomplete endpoints. \ No newline at end of file diff --git a/memory-bank/decisions/001-frontend-architecture.md b/memory-bank/decisions/001-frontend-architecture.md deleted file mode 100644 index ba16d44f..00000000 --- a/memory-bank/decisions/001-frontend-architecture.md +++ /dev/null @@ -1,162 +0,0 @@ -# ADR 001: Frontend Architecture - HTMX + AlpineJS - -## Status -Accepted - -## Context -The ThrillWiki platform needs a frontend architecture that: -- Provides dynamic user interactions -- Maintains server-side rendering benefits -- Enables progressive enhancement -- Keeps complexity manageable -- Ensures fast page loads -- Supports SEO requirements - -## Decision -Implement frontend using HTMX + AlpineJS + Tailwind CSS instead of a traditional SPA framework (React, Vue, Angular). - -### Technology Choices -1. HTMX - - Server-side rendering with dynamic updates - - Progressive enhancement - - Simple integration with Django templates - - Reduced JavaScript complexity - -2. AlpineJS - - Lightweight client-side interactivity - - Simple state management - - Easy integration with HTMX - - Minimal learning curve - -3. Tailwind CSS - - Utility-first styling - - Consistent design system - - Easy customization - - Optimized production builds - -## Consequences - -### Positive -1. Performance - - Faster initial page loads - - Reduced client-side processing - - Smaller JavaScript bundle - - Better Core Web Vitals - -2. Development - - Simpler architecture - - Faster development cycles - - Easier debugging - - Better Django integration - -3. Maintenance - - Less complex state management - - Reduced dependency management - - Easier team onboarding - - More maintainable codebase - -4. SEO - - Server-rendered content - - Better crawler compatibility - - Improved accessibility - - Faster indexing - -### Negative -1. Limited Complex UI - - More complex for rich interactions - - Less ecosystem support - - Fewer UI components available - - Some patterns need custom solutions - -2. Development Patterns - - New patterns needed - - Different mental model - - Some developer familiarity issues - - Custom solutions needed - -## Alternatives Considered - -### React SPA -- Pros: - * Rich ecosystem - * Component libraries - * Developer familiarity - * Advanced tooling -- Cons: - * Complex setup - * Heavy client-side - * SEO challenges - * Performance overhead - -### Vue.js -- Pros: - * Progressive framework - * Good ecosystem - * Easy learning curve - * Good performance -- Cons: - * Still too heavy - * Complex build setup - * Server integration challenges - * Unnecessary complexity - -## Implementation Approach - -### Integration Strategy -1. Server-Side - ```python - # Django View - class ParksView(TemplateView): - def get(self, request, *args, **kwargs): - return JsonResponse() if is_htmx() else render() - ``` - -2. Client-Side - ```html - -
- ``` - -### Performance Optimization -1. Initial Load - - Server-side rendering - - Progressive enhancement - - Critical CSS inline - - Deferred JavaScript - -2. Subsequent Interactions - - Partial page updates - - Smart caching - - Optimistic UI updates - - Background processing - -## Monitoring and Success Metrics - -### Performance Metrics -- First Contentful Paint < 1.5s -- Time to Interactive < 2s -- Core Web Vitals compliance -- Server response times - -### Development Metrics -- Development velocity -- Bug frequency -- Code complexity -- Build times - -## Future Considerations - -### Enhancement Opportunities -1. Short-term - - Component library - - Pattern documentation - - Performance optimization - - Developer tools - -2. Long-term - - Advanced patterns - - Custom extensions - - Build optimizations - - Tool improvements \ No newline at end of file diff --git a/memory-bank/decisions/authentication-audit-2025-06-25.md b/memory-bank/decisions/authentication-audit-2025-06-25.md deleted file mode 100644 index 290ffc2e..00000000 --- a/memory-bank/decisions/authentication-audit-2025-06-25.md +++ /dev/null @@ -1,125 +0,0 @@ -# Authentication Audit - ThrillWiki Django Application -**Date**: 2025-06-25 -**Auditor**: Roo -**Context**: Following fix of search authentication issues, comprehensive audit to identify other unnecessary authentication requirements - -## Audit Scope - -### What Should Be PUBLIC (no authentication required): -- Viewing park details, ride details, lists -- Searching parks, rides, manufacturers, designers -- Browsing content (categories, lists, etc.) -- Autocomplete functionality for search -- Reading reviews/ratings -- Viewing photos and media - -### What Should REQUIRE Authentication: -- Creating/editing parks, rides, content -- Submitting reviews, photos, content -- Administrative functions -- User account management -- Moderation actions - -## Previous Issues Fixed -- **RideSearchView**: Removed unnecessary `LoginRequiredMixin` -- **Search helper functions**: Removed `@login_required` from manufacturers, designers, ride_models functions - -## Audit Methodology -1. Search for all `LoginRequiredMixin` instances -2. Search for all `@login_required` decorator instances -3. Examine each for necessity -4. Check URL patterns for authentication middleware -5. Review autocomplete/AJAX endpoints -6. Test public accessibility - -## Findings - -### Phase 1: LoginRequiredMixin Search -Found 20 instances across the codebase: - -**CORRECTLY REQUIRING AUTHENTICATION (Create/Edit operations):** -- `rides/views.py`: RideCreateView, RideUpdateView ✅ -- `parks/views.py`: ParkCreateView, ParkUpdateView ✅ -- `companies/views.py`: CompanyCreateView, ManufacturerCreateView, CompanyUpdateView, ManufacturerUpdateView ✅ -- `location/views.py`: LocationCreateView, LocationUpdateView, LocationDeleteView ✅ -- `accounts/views.py`: SettingsView ✅ -- `moderation/views.py`: DashboardView ✅ - -**PUBLIC VIEWS (No LoginRequiredMixin found - CORRECT):** -- `parks/views.py`: ParkListView, ParkDetailView, ParkAreaDetailView ✅ -- `rides/views.py`: RideDetailView, RideListView, SingleCategoryListView, RideSearchView ✅ -- `companies/views.py`: CompanyListView, ManufacturerListView, CompanyDetailView, ManufacturerDetailView ✅ - -### Phase 2: @login_required Decorator Search -Found 16 instances across the codebase: - -**CORRECTLY REQUIRING AUTHENTICATION (Moderation/Admin functions):** -- `moderation/views.py`: All search functions (search_parks, search_manufacturers, search_designers, search_ride_models) ✅ - - These are specifically for moderation dashboard with role checks -- `moderation/views.py`: All submission management functions ✅ -- `media/views.py`: All photo upload/management functions ✅ -- `accounts/views.py`: user_redirect_view ✅ - -**PUBLIC FUNCTIONS (No @login_required found - CORRECT):** -- `rides/views.py`: search_manufacturers, search_designers, search_ride_models ✅ -- `parks/views.py`: search_parks, location_search, reverse_geocode ✅ - -### Phase 3: URL Pattern Analysis -Reviewed `thrillwiki/urls.py`: -- No authentication middleware blocking public access ✅ -- All URL patterns correctly configured for public browsing ✅ -- Authentication only required for account-specific URLs ✅ - -### Phase 4: Autocomplete/AJAX Endpoint Review -- Autocomplete directory referenced in main URLs but doesn't exist (legacy reference) -- All current autocomplete functionality properly implemented in search app ✅ -- HTMX endpoints in search app are public as required ✅ - -## Issues Identified -**NO AUTHENTICATION ISSUES FOUND** ✅ - -All authentication requirements are correctly implemented: -1. **Public access** properly maintained for browsing, viewing, and searching -2. **Authentication required** only for creating, editing, uploading, and administrative functions -3. **No unnecessary authentication barriers** blocking public content access - -## Fixes Applied -**NONE REQUIRED** - All authentication is correctly configured - -Previous fixes from 2025-06-25 were sufficient: -- RideSearchView: LoginRequiredMixin correctly removed ✅ -- Search helper functions: @login_required correctly removed ✅ - -## Testing Results -**COMPREHENSIVE AUDIT COMPLETED** ✅ - -Verified authentication requirements across: -- ✅ 6 Django apps (rides, parks, companies, location, accounts, moderation) -- ✅ 20 LoginRequiredMixin instances -- ✅ 16 @login_required decorator instances -- ✅ Main URL configuration -- ✅ All public browsing functionality -- ✅ All creation/editing functionality -- ✅ All administrative functionality - -## Summary -**AUTHENTICATION AUDIT RESULT: PASS** ✅ - -The ThrillWiki Django application has **correctly implemented authentication requirements**. No additional fixes are needed. - -**What is PUBLIC (correctly configured):** -- ✅ Viewing park details, ride details, lists -- ✅ Searching parks, rides, manufacturers, designers -- ✅ Browsing content (categories, lists, etc.) -- ✅ Autocomplete functionality for search -- ✅ Reading reviews/ratings (when implemented) -- ✅ Viewing photos and media - -**What REQUIRES authentication (correctly configured):** -- ✅ Creating/editing parks, rides, content -- ✅ Submitting reviews, photos, content -- ✅ Administrative functions -- ✅ User account management -- ✅ Moderation actions - -The previous authentication fixes for search functionality were the only issues present, and they have been successfully resolved. \ No newline at end of file diff --git a/memory-bank/decisions/authentication-fix-2025-06-25.md b/memory-bank/decisions/authentication-fix-2025-06-25.md deleted file mode 100644 index 305b55bd..00000000 --- a/memory-bank/decisions/authentication-fix-2025-06-25.md +++ /dev/null @@ -1,85 +0,0 @@ -# Authentication Requirements Fix - 2025-06-25 - -## Problem Identified -User reported that authentication is required for functionality that shouldn't need it. The issue is that search and read-only operations are requiring authentication when they should be publicly accessible. - -## Root Cause Analysis - -### Issues Found: - -1. **RideSearchView** (rides/views.py:437) - - Has `LoginRequiredMixin` which blocks unauthenticated users from searching rides - - Search functionality should be publicly accessible - -2. **Search Helper Functions** (rides/views.py:318-374) - - `search_manufacturers()` - has `@login_required` decorator - - `search_designers()` - has `@login_required` decorator - - `search_ride_models()` - has `@login_required` decorator - - These are used for autocomplete/search functionality, should be public - -3. **Settings Configuration** - - `AUTOCOMPLETE_BLOCK_UNAUTHENTICATED = False` is already set correctly - - The issue is not with the BaseAutocomplete class but with view-level authentication - -## Authentication Philosophy - -**Should Require Authentication:** -- Creating new rides, parks, manufacturers, designers -- Editing existing content -- Submitting photos or reviews -- Administrative functions - -**Should NOT Require Authentication:** -- Searching/browsing rides and parks -- Viewing ride details -- Using autocomplete for search -- Reading public content - -## Solution Plan - -1. Remove `LoginRequiredMixin` from `RideSearchView` -2. Remove `@login_required` decorators from search helper functions -3. Ensure create/edit views still require authentication (they do) -4. Update tests to reflect new public access -5. Document the authentication boundaries clearly - -## Implementation Notes - -- The `RideCreateView` and `RideUpdateView` correctly use `LoginRequiredMixin` -- The `BaseAutocomplete` class already supports public access via settings -- Search functionality should be fast and accessible to encourage engagement - -## Changes Made - -1. **RideSearchView** (rides/views.py:437) - - ✅ Removed `LoginRequiredMixin` from class definition - - Now allows unauthenticated users to search rides - -2. **Search Helper Functions** (rides/views.py:318-374) - - ✅ Removed `@login_required` decorator from `search_manufacturers()` - - ✅ Removed `@login_required` decorator from `search_designers()` - - ✅ Removed `@login_required` decorator from `search_ride_models()` - - These functions now support public autocomplete functionality - -3. **Import Cleanup** - - ✅ Removed unused `login_required` import from rides/views.py - -4. **Test Fixes** - - ✅ Fixed test method calls to include required `context` parameter - - ✅ Fixed autocomplete result limiting in `get_search_results()` method - - ✅ All 7 autocomplete tests now passing - -## Verification - -- ✅ All search functionality tests pass -- ✅ Authentication still required for create/edit operations -- ✅ Public search access now working as intended -- ✅ Server reloads successfully with no errors - -## Result - -Authentication is now properly scoped: -- **Public Access**: Search, browse, view content, autocomplete -- **Authentication Required**: Create, edit, submit content, administrative functions - -This provides a better user experience while maintaining security for content modification. \ No newline at end of file diff --git a/memory-bank/decisions/autocomplete-fix-2025-06-25.md b/memory-bank/decisions/autocomplete-fix-2025-06-25.md deleted file mode 100644 index e2268900..00000000 --- a/memory-bank/decisions/autocomplete-fix-2025-06-25.md +++ /dev/null @@ -1,90 +0,0 @@ -# Django HTMX Autocomplete Fix - 2025-06-25 - -## Problem Summary - -The RideAutocomplete implementation was failing with `AttributeError: type object 'RideAutocomplete' has no attribute 'as_view'` when trying to start the Django development server. - -## Root Cause Analysis - -1. **Missing Package**: The `django-htmx-autocomplete` package was not installed -2. **Incorrect URL Pattern**: The autocomplete URLs were not properly configured according to the library's requirements -3. **Wrong Base Class**: RideAutocomplete was inheriting from a custom BaseAutocomplete instead of the library's ModelAutocomplete -4. **Missing Registration**: The autocomplete class was not registered with the @autocomplete.register decorator - -## Solutions Implemented - -### 1. Package Installation -```bash -uv add django-htmx-autocomplete -``` - -### 2. URL Configuration Fix -**File**: `thrillwiki/urls.py` -- Added autocomplete URLs at project level: `path("ac/", autocomplete_urls)` -- Imported: `from autocomplete import urls as autocomplete_urls` - -### 3. RideAutocomplete Class Fix -**File**: `search/mixins.py` -- Changed inheritance from `BaseAutocomplete` to `autocomplete.ModelAutocomplete` -- Added `@autocomplete.register` decorator -- Updated `get_search_results()` method signature to include `context` parameter -- Added `max_results = 10` class attribute -- Removed manual slicing from queryset (handled by max_results) - -### 4. Search URLs Fix -**File**: `search/urls.py` -- Removed the problematic autocomplete URL (now handled by main autocomplete package) -- Fixed import for RideSearchView: `from rides.views import RideSearchView` - -## Key Technical Details - -### Django HTMX Autocomplete Pattern -The library requires: -1. Installation and addition to INSTALLED_APPS (already done) -2. URL inclusion at project level: `path("ac/", autocomplete_urls)` -3. Autocomplete classes must inherit from `autocomplete.ModelAutocomplete` -4. Classes must be decorated with `@autocomplete.register` -5. Method signature: `get_search_results(self, search, context)` - -### Working Implementation -```python -@autocomplete.register -class RideAutocomplete(autocomplete.ModelAutocomplete): - model = Ride - search_attrs = ['name'] - max_results = 10 - - def get_search_results(self, search, context): - return (Ride.objects - .filter(name__icontains=search) - .select_related('park') - .order_by('name')) - - def format_result(self, ride): - return { - 'key': str(ride.pk), - 'label': ride.name, - 'extra': f"at {ride.park.name}" - } -``` - -## Status - -✅ **RESOLVED**: The RideAutocomplete.as_view() error has been fixed -✅ **READY**: Server should now start without autocomplete-related errors -⏳ **NEXT**: Manual HTMX integration testing can proceed - -## Dependencies Added - -- `django-htmx-autocomplete` - Provides HTMX-powered autocomplete functionality - -## Files Modified - -1. `thrillwiki/urls.py` - Added autocomplete URL configuration -2. `search/mixins.py` - Fixed RideAutocomplete class implementation -3. `search/urls.py` - Removed conflicting URL and fixed imports -4. `memory-bank/activeContext.md` - Updated task status - -## Testing Notes - -The unit tests (7/7 passing) validate the core functionality. Manual browser testing is now unblocked and should be performed to verify HTMX integration works correctly. \ No newline at end of file diff --git a/memory-bank/decisions/history-tracking-migration.md b/memory-bank/decisions/history-tracking-migration.md deleted file mode 100644 index 07ad6351..00000000 --- a/memory-bank/decisions/history-tracking-migration.md +++ /dev/null @@ -1,90 +0,0 @@ -# History Tracking Migration - -## Context -The project is transitioning from django-simple-history to django-pghistory for model history tracking. - -## Implementation Details - -### Base Implementation (history_tracking/models.py) -- Both old and new implementations maintained during transition: - - `HistoricalModel` - Legacy base class using django-simple-history - - `TrackedModel` - New base class using django-pghistory -- Custom `DiffMixin` for comparing historical records -- Maintained `HistoricalSlug` for backward compatibility - -### Transition Strategy -1. Maintain Backward Compatibility - - Keep both HistoricalModel and TrackedModel during transition - - Update models one at a time to use TrackedModel - - Ensure no breaking changes during migration - -2. Model Updates - - Designer (Completed) - - Migrated to TrackedModel - - Updated get_by_slug to use pghistory queries - - Removed SimpleHistoryAdmin dependency - - - Pending Model Updates - - Companies (Company, Manufacturer) - - Parks (Park, ParkArea) - - Rides (Ride, RollerCoasterStats) - - Location models - -### Migration Process -1. For Each Model: - - Switch base class from HistoricalModel to TrackedModel - - Update admin.py to remove SimpleHistoryAdmin - - Create and apply migrations - - Test history tracking functionality - - Update any history-related queries - -2. Testing Steps - - Create test objects - - Make changes - - Verify history records - - Check diff functionality - - Validate historical slug lookup - -3. Admin Integration - - Remove SimpleHistoryAdmin - - Use standard ModelAdmin - - Keep existing list displays and search fields - -## Benefits -- Native PostgreSQL trigger-based tracking -- More efficient storage and querying -- Better performance characteristics -- Context tracking capabilities - -## Rollback Plan -Since both implementations are maintained: -1. Revert model inheritance to HistoricalModel -2. Restore SimpleHistoryAdmin -3. Keep existing migrations - -## Next Steps -1. Create migrations for Designer model -2. Update remaining models in this order: - a. Companies app - b. Parks app - c. Rides app - d. Location app -3. Test historical functionality -4. Once all models are migrated: - - Remove HistoricalModel class - - Remove django-simple-history dependency - - Update documentation - -## Technical Notes -- Uses pghistory's default tracking configuration -- Maintains compatibility with existing code patterns -- Custom diff functionality preserved -- Historical slug tracking unchanged -- Both tracking systems can coexist during migration - -## Completion Criteria -1. All models migrated to TrackedModel -2. All functionality tested and working -3. No dependencies on django-simple-history -4. Documentation updated to reflect new implementation -5. All migrations applied successfully \ No newline at end of file diff --git a/memory-bank/decisions/laravel_migration_analysis.md b/memory-bank/decisions/laravel_migration_analysis.md deleted file mode 100644 index 08e3cc50..00000000 --- a/memory-bank/decisions/laravel_migration_analysis.md +++ /dev/null @@ -1,254 +0,0 @@ -# Laravel Migration Analysis - -## Executive Summary - -After thorough analysis of the ThrillWiki Django codebase, this document presents a comprehensive evaluation of migrating to Laravel. The analysis considers technical compatibility, implementation impact, and business implications. - -### Quick Overview - -**Current Stack:** -- Framework: Django (MVT Architecture) -- Frontend: HTMX + AlpineJS + Tailwind CSS -- Database: PostgreSQL with Django ORM -- Authentication: Django Built-in Auth - -**Recommendation:** ⛔️ DO NOT PROCEED with Laravel migration - -The analysis reveals that the costs, risks, and disruption of migration outweigh potential benefits, particularly given the project's mature Django codebase and specialized features. - -## Technical Analysis - -### Core Functionality Compatibility - -#### Data Model Migration Complexity: HIGH -- Complex Django models with inheritance (TrackedModel) -- Custom user model with role-based permissions -- Extensive use of Django-specific model features -- Migration challenges: - * Different ORM paradigms - * Custom model behaviors - * Signal system reimplementation - * Complex queries and annotations - -#### Authentication System: HIGH -- Currently leverages Django's auth framework extensively -- Custom adapters for social authentication -- Role-based permission system -- Migration challenges: - * Laravel's auth system differs fundamentally - * Custom middleware rewrites needed - * Session handling differences - * Social auth integration rework - -#### Template Engine: MEDIUM -- Heavy use of Django template inheritance -- HTMX integration for dynamic updates -- Migration challenges: - * Blade syntax differences - * Different template inheritance patterns - * HTMX integration patterns - * Custom template tags rewrite - -#### ORM and Database Layer: VERY HIGH -- Extensive use of Django ORM features -- Complex model relationships -- Custom model managers -- Migration challenges: - * Different query builder syntax - * Relationship definition differences - * Transaction handling variations - * Custom field type conversions - -### Architecture Impact - -#### Routing and Middleware: HIGH -- Complex URL patterns with nested resources -- Custom middleware for analytics and tracking -- Migration challenges: - * Different routing paradigms - * Middleware architecture differences - * Request/Response cycle variations - -#### File Structure Changes: MEDIUM -- Current Django apps need restructuring -- Different convention requirements -- Migration challenges: - * Resource organization - * Namespace handling - * Service provider implementation - -#### API and Service Layer: HIGH -- Custom API implementation -- Complex service layer integration -- Migration challenges: - * Different API architecture - * Service container differences - * Dependency injection patterns - -## Implementation Impact - -### Development Timeline -Estimated timeline: 4-6 months minimum -- Phase 1 (Data Layer): 6-8 weeks -- Phase 2 (Business Logic): 8-10 weeks -- Phase 3 (Frontend Integration): 4-6 weeks -- Phase 4 (Testing & Deployment): 4-6 weeks - -### Resource Requirements -- 2-3 Senior Laravel Developers -- 1 DevOps Engineer -- 1 QA Engineer -- Project Manager - -### Testing Strategy Updates -- Complete test suite rewrite needed -- New testing frameworks required -- Integration test complexity -- Performance testing rework - -### Deployment Modifications -- CI/CD pipeline updates -- Environment configuration changes -- Server requirement updates -- Monitoring system adjustments - -## Business Impact - -### Cost Analysis -1. Direct Costs: - - Development Resources: ~$150,000-200,000 - - Training: ~$20,000 - - Infrastructure Updates: ~$10,000 - - Total: ~$180,000-230,000 - -2. Indirect Costs: - - Productivity loss during transition - - Potential downtime - - Bug risk increase - - Learning curve impact - -### Risk Assessment - -#### Technical Risks (HIGH) -- Data integrity during migration -- Performance regressions -- Unknown edge cases -- Integration failures - -#### Business Risks (HIGH) -- Service disruption -- Feature parity gaps -- User experience inconsistency -- Timeline uncertainty - -#### Mitigation Strategies -- Phased migration approach -- Comprehensive testing -- Rollback procedures -- User communication plan - -## Detailed Technical Challenges - -### Critical Areas - -1. History Tracking System - - Custom implementation in Django - - Complex diff tracking - - Temporal data management - -2. Authentication System - - Role-based access control - - Social authentication integration - - Custom user profiles - -3. Geographic Features - - Location services - - Coordinate normalization - - Geographic queries - -4. Media Management - - Custom storage backends - - Image processing - - Upload handling - -## Conclusion - -### Key Findings -1. High Technical Debt: Migration would require substantial rewrite -2. Complex Domain Logic: Specialized features need careful translation -3. Resource Intensive: Significant time and budget required -4. High Risk: Critical business functions affected - -### Recommendation -**Do Not Proceed with Migration** - -Rationale: -1. Current Django implementation is stable and mature -2. Migration costs outweigh potential benefits -3. High risk to business continuity -4. Significant resource requirement - -### Alternative Recommendations - -1. **Modernize Current Stack** - - Update Django version - - Enhance current architecture - - Improve performance in place - -2. **Gradual Enhancement** - - Add Laravel microservices if needed - - Keep core Django system - - Hybrid approach for new features - -3. **Focus on Business Value** - - Invest in feature development - - Improve user experience - - Enhance current system - -## Success Metrics (If Migration Proceeded) - -1. Technical Metrics - - Performance parity or improvement - - Code quality metrics - - Test coverage - - Deployment success rate - -2. Business Metrics - - User satisfaction - - System availability - - Feature parity - - Development velocity - -## Timeline and Resource Allocation - -### Phase 1: Planning and Setup (4-6 weeks) -- Architecture design -- Environment setup -- Team training - -### Phase 2: Core Migration (12-16 weeks) -- Database migration -- Authentication system -- Core business logic - -### Phase 3: Frontend Integration (8-10 weeks) -- Template conversion -- HTMX integration -- UI testing - -### Phase 4: Testing and Deployment (6-8 weeks) -- System testing -- Performance optimization -- Production deployment - -### Total Timeline: 30-40 weeks - -## Final Verdict - -Given the extensive analysis, the recommendation is to **maintain and enhance the current Django implementation** rather than pursuing a Laravel migration. The current system is stable, well-architected, and effectively serves business needs. The high costs, risks, and potential disruption of migration outweigh any potential benefits that Laravel might offer. - -Focus should instead be directed toward: -1. Optimizing current Django implementation -2. Enhancing feature set and user experience -3. Updating dependencies and security -4. Improving development workflows \ No newline at end of file diff --git a/memory-bank/decisions/migration-progress.md b/memory-bank/decisions/migration-progress.md deleted file mode 100644 index 19408703..00000000 --- a/memory-bank/decisions/migration-progress.md +++ /dev/null @@ -1,41 +0,0 @@ -# Foreign Key Constraint Resolution - 2025-02-09 (Updated) - -## Revision Note -Corrected migration sequence conflict: -- Original 0002 migration conflicted with existing 0002 file -- Created new migration as 0012_cleanup_invalid_designers.py -- Deleted conflicting 0002_cleanup_invalid_designers.py - -## Updated Resolution Steps -1. Created conflict-free migration 0012 -2. Verified migration dependencies: - ```python - dependencies = [ - ('rides', '0011_merge_20250209_1143'), - ('designers', '0001_initial'), - ] - ``` -3. New migration command: - ```bash - python manage.py migrate rides 0012_cleanup_invalid_designers - ``` - -## PGHistory Migration Fix - 2025-02-09 -Foreign key constraint violation during pghistory migration: -1. Issue: `rides_ride_designer_id_172b997d_fk_designers_designer_id` constraint violation during 0010_rideevent migration -2. Resolution: - - Created new cleanup migration (0009_cleanup_invalid_designers_pre_events.py) to run before event table creation - - Updated migration dependencies to ensure proper sequencing: - ```python - # 0009_cleanup_invalid_designers_pre_events.py - dependencies = [ - ('rides', '0008_historicalride_post_closing_status_and_more'), - ('designers', '0001_initial'), - ] - ``` - - Created merge migration (0013_merge_20250209_1214.py) to resolve multiple leaf nodes -3. Final Migration Sequence: - - Base migrations up to 0008 - - Cleanup migration (0009_cleanup_invalid_designers_pre_events) - - Event table creation (0010_rideevent_ridemodelevent_and_more) - - Merge migrations (0011, 0012, 0013) \ No newline at end of file diff --git a/memory-bank/decisions/park-search-improvements.md b/memory-bank/decisions/park-search-improvements.md deleted file mode 100644 index cd2aad6b..00000000 --- a/memory-bank/decisions/park-search-improvements.md +++ /dev/null @@ -1,71 +0,0 @@ -# Park Search Implementation Improvements - -## Context -The park search functionality needed to be updated to follow consistent patterns across the application and strictly adhere to the "NO CUSTOM JS" rule. Previously, search functionality was inconsistent and did not fully utilize built-in framework features. - -## Decision -Implemented a unified search pattern that: -1. Uses only built-in HTMX and Alpine.js features -2. Matches location search pattern -3. Removes any custom JavaScript files -4. Maintains consistency across the application - -### Benefits -1. **Simplified Architecture:** - - No custom JavaScript files needed - - Direct template-based implementation - - Reduced maintenance burden - - Smaller codebase - -2. **Framework Alignment:** - - Uses HTMX for AJAX requests - - Uses Alpine.js for state management - - All functionality in templates - - Follows project patterns - -3. **Better Maintainability:** - - Single source of truth in templates - - Reduced complexity - - Easier to understand - - Consistent with other features - -## Implementation Details - -### Template Features -1. HTMX Integration: - - Debounced search requests (300ms) - - Loading indicators - - JSON response handling - -2. Alpine.js Usage: - - State management in template - - Event handling - - UI updates - - Keyboard interactions - -### Backend Changes -1. JSON API: - - Consistent response format - - Type validation - - Limited results (8 items) - - Performance optimization - -2. View Updates: - - Search filtering - - Result formatting - - Error handling - - State preservation - -## Benefits -1. Better adherence to project standards -2. Simplified codebase -3. Reduced technical debt -4. Easier maintenance -5. Consistent user experience - -## Testing -1. API response format -2. Empty search handling -3. Field validation -4. UI interactions -5. State management \ No newline at end of file diff --git a/memory-bank/decisions/park_count_fields.md b/memory-bank/decisions/park_count_fields.md deleted file mode 100644 index efeeda1c..00000000 --- a/memory-bank/decisions/park_count_fields.md +++ /dev/null @@ -1,59 +0,0 @@ -# Park Count Fields Implementation - -## Context -While implementing park views, we encountered errors where `ride_count` and `coaster_count` annotations conflicted with existing model fields of the same names. Additionally, we discovered inconsistencies in how these counts were being used across different views. - -## Decision -We decided to use both approaches but with distinct names: - -1. **Model Fields**: - - `ride_count`: Stored count of all rides - - `coaster_count`: Stored count of roller coasters - - Used in models and database schema - - Required for backward compatibility - -2. **Annotations**: - - `current_ride_count`: Real-time count of all rides - - `current_coaster_count`: Real-time count of roller coasters - - Provide accurate, up-to-date counts - - Used in templates and filters - -This approach allows us to: -- Maintain existing database schema -- Show accurate, real-time counts in the UI -- Avoid name conflicts between fields and annotations -- Keep consistent naming pattern for both types of counts - -## Implementation -1. Views: - - Added base queryset method with annotations - - Used 'current_' prefix for annotated counts - - Ensured all views use the base queryset - -2. Filters: - - Updated filter fields to use annotated counts - - Configured filter class to always use base queryset - - Maintained filter functionality with new field names - -3. Templates: - - Updated templates to use computed counts - -## Why This Pattern -1. **Consistency**: Using the 'current_' prefix clearly indicates which values are computed in real-time -2. **Compatibility**: Maintains support for existing code that relies on the stored fields -3. **Flexibility**: Allows gradual migration from stored to computed counts if desired -4. **Performance Option**: Keeps the option to use stored counts for expensive queries - -## Future Considerations -We might want to: -1. Add periodic tasks to sync stored counts with computed values -2. Consider deprecating stored fields if they're not needed for performance -3. Add validation to ensure stored counts stay in sync with reality -4. Create a management command to update stored counts - -## Related Files -- parks/models.py -- parks/views.py -- parks/filters.py -- parks/templates/parks/partials/park_list_item.html -- parks/tests/test_filters.py \ No newline at end of file diff --git a/memory-bank/decisions/pghistory-integration.md b/memory-bank/decisions/pghistory-integration.md deleted file mode 100644 index 5fd76f0a..00000000 --- a/memory-bank/decisions/pghistory-integration.md +++ /dev/null @@ -1,45 +0,0 @@ -## Decision: Universal Model History via django-pghistory - -### Pattern Implementation -- **Tracking Method**: `pghistory.Snapshot()` applied to all concrete models -- **Inheritance Strategy**: Base model class with history tracking -- **Context Capture**: - ```python - # core/models.py - import pghistory - - class HistoricalModel(models.Model): - class Meta: - abstract = True - - @pghistory.track(pghistory.Snapshot()) - def save(self, *args, **kwargs): - return super().save(*args, **kwargs) - ``` - -### Integration Scope -1. **Model Layer**: - - All concrete models inherit from `HistoricalModel` - - Automatic event labeling: - ```python - @pghistory.track( - pghistory.Snapshot('model.create'), - pghistory.AfterInsert('model.update'), - pghistory.BeforeDelete('model.delete') - ) - ``` - -2. **Context Middleware**: - ```python - # core/middleware.py - pghistory.context(lambda request: { - 'user': str(request.user) if request.user.is_authenticated else None, - 'ip': request.META.get('REMOTE_ADDR'), - 'user_agent': request.META.get('HTTP_USER_AGENT'), - 'session_key': request.session.session_key - }) - ``` - -3. **Admin Integration**: - - Custom history view for Django Admin - - Version comparison interface \ No newline at end of file diff --git a/memory-bank/decisions/ride-search-architecture-2025-06-24.md b/memory-bank/decisions/ride-search-architecture-2025-06-24.md deleted file mode 100644 index 2bb8138f..00000000 --- a/memory-bank/decisions/ride-search-architecture-2025-06-24.md +++ /dev/null @@ -1,74 +0,0 @@ -# Ride Search Architecture Decision - -**Date**: 2025-06-24 -**Status**: Planned -**Context**: Extending search functionality from parks to rides - -## Decision - -Implement ride search functionality following the established BaseAutocomplete pattern with these key architectural decisions: - -### 1. Pattern Consistency -- **Extend BaseAutocomplete**: Use same authentication-first approach as park search -- **Mirror Structure**: RideAutocomplete + RideSearchForm following ParkAutocomplete pattern -- **HTMX Integration**: Same frontend interaction patterns for consistency - -### 2. Relationship Handling -- **Park Context**: Rides belong to parks via ForeignKey, search results must show both -- **Query Optimization**: Use `select_related('park')` for efficient database queries -- **Result Display**: Show "Ride Name - Park Name" format in autocomplete results - -### 3. Database Strategy -- **Indexes**: Add database indexes on `Ride.name` and `Ride.park_id` -- **Query Limits**: Limit autocomplete to 10 results for performance -- **Filtering**: Support filtering by park, thrill level, duration - -### 4. Frontend Architecture -- **Component Reuse**: Leverage existing search CSS and JavaScript patterns -- **HTMX Endpoints**: `/search/rides/autocomplete/` and `/search/rides/results/` -- **AlpineJS State**: Manage selection state and form interactions - -### 5. Testing Strategy -- **Unit Tests**: RideAutocomplete, RideSearchForm, and filter logic -- **Integration Tests**: HTMX responses and authentication requirements -- **Performance Tests**: Large dataset handling and query optimization - -## Rationale - -This approach ensures: -- **Consistency**: Users get familiar interaction patterns -- **Performance**: Optimized queries and result limiting -- **Maintainability**: Follows established codebase patterns -- **Scalability**: Database indexes and query optimization - -## Implementation Files - -### Core Components -- `search/mixins.py` - RideAutocomplete class -- `search/forms.py` - RideSearchForm class -- `search/urls.py` - URL routing for ride endpoints -- `rides/views.py` - RideSearchView with authentication - -### Templates -- `search/templates/search/partials/_ride_search.html` - Search form -- `rides/templates/rides/partials/ride_results.html` - Results display - -### Tests -- `search/tests/test_autocomplete.py` - RideAutocomplete tests -- `search/tests/test_forms.py` - RideSearchForm tests -- `rides/tests/test_search_view.py` - View and integration tests - -## Next Steps - -1. Code mode implementation of core components -2. Database migration for indexes -3. Template creation and HTMX integration -4. Comprehensive test suite -5. Performance validation - -## Dependencies - -- Existing BaseAutocomplete infrastructure -- HTMX and AlpineJS frontend stack -- Django authentication system -- Ride model with park relationship \ No newline at end of file diff --git a/memory-bank/decisions/ride-search-implementation-2025-06-24.md b/memory-bank/decisions/ride-search-implementation-2025-06-24.md deleted file mode 100644 index fc8acf3c..00000000 --- a/memory-bank/decisions/ride-search-implementation-2025-06-24.md +++ /dev/null @@ -1,159 +0,0 @@ -# Ride Search Implementation Summary - -**Date:** 2025-06-24 -**Status:** Core Implementation Complete -**Next:** Testing & Integration - -## Implementation Overview - -Successfully implemented ride search functionality following the documented architecture specification. The implementation extends the existing park search infrastructure with ride-specific components. - -## Components Implemented - -### 1. RideAutocomplete Class (`search/mixins.py`) -- **Location:** Added to existing `search/mixins.py` file -- **Extends:** `BaseAutocomplete` from `core/forms.py` -- **Features:** - - Name-based search with partial matching (`name__icontains`) - - Includes park name in results for context - - Prefetches related park data with `select_related('park')` - - Limited to 10 results for performance - - Formats results as "Ride Name - at Park Name" -- **Authentication:** Inherits authentication requirement from BaseAutocomplete - -### 2. RideSearchForm Class (`search/forms.py`) -- **Location:** New file created -- **Pattern:** Follows `ParkSearchForm` pattern from `parks/forms.py` -- **Features:** - - Uses `AutocompleteWidget` with `RideAutocomplete` class - - Consistent styling with existing forms - - Placeholder text: "Search rides..." - -### 3. URL Configuration (`search/urls.py`) -- **Added Routes:** - - `rides/autocomplete/` → `RideAutocomplete.as_view()` (name: `ride_autocomplete`) - - `rides/results/` → `RideSearchView.as_view()` (name: `ride_search_results`) -- **Pattern:** Follows existing search URL structure - -### 4. RideSearchView Class (`rides/views.py`) -- **Location:** Added to existing `rides/views.py` file -- **Extends:** `LoginRequiredMixin`, `ListView` -- **Features:** - - Authentication required - - HTMX support with different templates - - Processes `RideSearchForm` data - - Supports both specific ride selection and search term filtering - - Pagination (20 items per page) - - Optimized queryset with `select_related('park')` - -### 5. Template Components - -#### Ride Search Results (`search/templates/search/partials/ride_search_results.html`) -- **Features:** - - Responsive card layout - - Shows ride name, park name, description - - Category and status badges with color coding - - Photo thumbnails when available - - Links to ride detail pages - - Empty state with helpful message - - Dark mode support - -### 6. Test Suite (`search/tests/test_ride_autocomplete.py`) -- **Test Coverage:** - - Authentication requirements - - Search result filtering and case insensitivity - - Result formatting - - Performance limits (10 result max) - - Related data prefetching -- **Test Infrastructure:** - - Uses correct custom User model (`get_user_model()`) - - Creates test data (Company, Park, Rides) - - Proper test isolation - -## Technical Decisions - -### Authentication Strategy -- **Decision:** Inherit authentication from `BaseAutocomplete` -- **Rationale:** Maintains consistency with existing park search -- **Implementation:** Uses `BaseAutocomplete.auth_check()` method - -### Result Formatting -- **Decision:** Format as "Ride Name - at Park Name" -- **Rationale:** Provides context without cluttering the interface -- **Implementation:** Uses `extra` field in autocomplete results - -### Performance Optimization -- **Decision:** Limit autocomplete to 10 results with `select_related('park')` -- **Rationale:** Balances responsiveness with useful results -- **Implementation:** Slice queryset `[:10]` and prefetch park data - -### Template Structure -- **Decision:** Follow existing HTMX partial pattern -- **Rationale:** Maintains consistency with park search templates -- **Implementation:** Separate partials for different response types - -## Integration Points - -### With Existing Park Search -- **Shared Infrastructure:** Uses same `BaseAutocomplete` and styling patterns -- **URL Structure:** Follows `/search/rides/` pattern parallel to `/search/parks/` -- **Template Patterns:** Reuses established HTMX and styling conventions - -### With Ride Models -- **Model Relationship:** Uses `Ride.park` ForeignKey for context -- **Queryset Optimization:** Leverages `select_related()` for efficient queries -- **Status Display:** Uses model's `get_status_display()` and `get_category_display()` - -## Current Status - -### ✅ Completed -1. **Core Components:** All classes and forms implemented -2. **URL Routing:** Endpoints configured and accessible -3. **Templates:** Results template with full styling -4. **Basic Testing:** Unit tests for autocomplete functionality -5. **Authentication:** Integrated with project auth system - -### 🔄 In Progress -1. **Test Fixes:** Authentication test needs adjustment (PermissionDenied not raised as expected) -2. **Integration Testing:** Manual HTMX testing pending - -### 📋 Remaining Tasks -1. **Form Template:** Create ride search form partial template -2. **Manual Testing:** Test autocomplete and search in browser -3. **Documentation:** Update user-facing documentation -4. **Performance Testing:** Verify query performance with larger datasets - -## Files Modified/Created - -### New Files -- `search/forms.py` - RideSearchForm -- `search/tests/__init__.py` - Test package initialization -- `search/tests/test_ride_autocomplete.py` - Test suite -- `search/templates/search/partials/ride_search_results.html` - Results template -- `memory-bank/decisions/ride-search-implementation-2025-06-24.md` - This document - -### Modified Files -- `search/mixins.py` - Added RideAutocomplete class -- `search/urls.py` - Added ride search endpoints -- `rides/views.py` - Added RideSearchView class -- `memory-bank/activeContext.md` - Updated progress tracking - -## Architecture Compliance - -The implementation fully follows the architecture specification in `memory-bank/features/search/rides.md`: - -- ✅ **Authentication-first approach** - Inherited from BaseAutocomplete -- ✅ **BaseAutocomplete pattern** - Extended correctly -- ✅ **HTMX + AlpineJS frontend** - Template supports HTMX -- ✅ **Performance optimization** - Query limits and select_related -- ✅ **Consistent styling** - Reuses established CSS classes -- ✅ **Test coverage** - Comprehensive unit tests - -## Next Steps - -1. **Fix Authentication Test:** Investigate why PermissionDenied isn't being raised -2. **Manual Testing:** Start development server and test functionality -3. **Form Template:** Create search form partial for complete integration -4. **Documentation:** Update project documentation with new search capabilities - -The core ride search functionality is now implemented and ready for testing and integration. \ No newline at end of file diff --git a/memory-bank/decisions/ride-search-template-2025-06-25.md b/memory-bank/decisions/ride-search-template-2025-06-25.md deleted file mode 100644 index dd85b599..00000000 --- a/memory-bank/decisions/ride-search-template-2025-06-25.md +++ /dev/null @@ -1,75 +0,0 @@ -# Ride Search Template Creation - 2025-06-25 - -## Context -Created the missing ride search form template that was identified as a remaining task in the active context. The RideSearchView was expecting a template at `search/templates/search/ride_search.html` for non-HTMX requests. - -## Implementation - -### Template Created: `search/templates/search/ride_search.html` - -**Key Features:** -- Full page template extending `base/base.html` -- HTMX integration with proper attributes: - - `hx-get` pointing to ride search URL - - `hx-target` for results container - - `hx-trigger` with 300ms delay for responsive search - - `hx-indicator` for loading state -- Responsive design with Tailwind CSS classes -- Search form using the `RideSearchForm` from context -- Results container that includes the existing `ride_search_results.html` partial -- JavaScript enhancement for clearing results when input is empty -- Loading indicator with spinner animation - -**Template Structure:** -1. **Header Section**: Title and description -2. **Search Form**: - - Form with HTMX attributes - - Autocomplete input field with proper styling - - Submit button with search icon - - Loading indicator -3. **Results Section**: Container for HTMX-loaded results -4. **JavaScript Enhancement**: Clear results on empty input - -## Integration Points - -**With RideSearchView:** -- Template name matches view's `get_template_names()` expectation -- Uses `search_form` from view context -- HTMX requests target the same view for partial updates - -**With Existing Components:** -- Includes `search/partials/ride_search_results.html` for results display -- Follows same styling patterns as other search templates -- Uses established HTMX patterns from park search - -## Technical Decisions - -**HTMX Configuration:** -- 300ms delay prevents excessive API calls during typing -- Targets specific container for seamless updates -- Includes loading indicator for better UX - -**Styling Approach:** -- Consistent with existing ThrillWiki design system -- Dark mode support with proper color classes -- Responsive layout with proper spacing - -**JavaScript Enhancement:** -- Minimal JavaScript for clearing results -- Enhances UX without breaking core functionality -- Follows progressive enhancement principles - -## Testing Status -- Template created and ready for testing -- Server restarted to ensure proper loading -- Next step: Manual HTMX integration testing - -## Files Modified -- `search/templates/search/ride_search.html` (created) -- `memory-bank/activeContext.md` (updated progress) - -## Next Steps -1. Test HTMX integration manually once server is running -2. Verify autocomplete functionality works properly -3. Test responsive design and loading states -4. Validate search results display correctly \ No newline at end of file diff --git a/memory-bank/decisions/ride-search-testing-2025-06-25.md b/memory-bank/decisions/ride-search-testing-2025-06-25.md deleted file mode 100644 index 03f0dffc..00000000 --- a/memory-bank/decisions/ride-search-testing-2025-06-25.md +++ /dev/null @@ -1,118 +0,0 @@ -# Ride Search Testing and Validation Report - -**Date:** 2025-06-25 -**Status:** Testing in Progress - Issues Found -**Task:** Comprehensive testing and validation of ride search functionality - -## Testing Progress - -### ✅ Unit Tests - PASSED -- **Command:** `uv run manage.py test search.tests.test_ride_autocomplete` -- **Result:** All 7 tests passing -- **Fixed Issues:** - - Authentication test was failing because `AUTOCOMPLETE_BLOCK_UNAUTHENTICATED = False` in settings - - Fixed by adding `@override_settings(AUTOCOMPLETE_BLOCK_UNAUTHENTICATED=True)` decorator - - Changed `request.user = None` to `request.user = AnonymousUser()` for proper Django user handling - -### ❌ Integration Testing - ISSUES FOUND - -#### Issue 1: URL Configuration Missing -- **Problem:** Main `thrillwiki/urls.py` had `path("search/", SearchView.as_view(), name="search")` instead of including search app URLs -- **Fix Applied:** Changed to `path("search/", include("search.urls", namespace="search"))` -- **Status:** Fixed - -#### Issue 2: Import Error in search/views.py -- **Problem:** `from .filters import ParkFilter` - ParkFilter doesn't exist in search.filters -- **Fix Applied:** Changed to `from parks.filters import ParkFilter` -- **Status:** Fixed - -#### Issue 3: RideAutocomplete Missing as_view Method -- **Problem:** `AttributeError: type object 'RideAutocomplete' has no attribute 'as_view'` -- **Root Cause:** `BaseAutocomplete` inherits from `autocomplete.Autocomplete` (django-htmx-autocomplete package) -- **Status:** INVESTIGATING - May need package installation or import fix - -## Current Server Status -- Development server fails to start due to RideAutocomplete.as_view() error -- Need to resolve autocomplete package integration - -## Test Coverage Analysis - -### Unit Test Results (7/7 passing): -1. ✅ `test_autocomplete_requires_authentication` - Authentication enforced when enabled -2. ✅ `test_autocomplete_allows_authenticated_users` - Authenticated users can access -3. ✅ `test_search_filters_by_name` - Name-based search filtering works -4. ✅ `test_search_case_insensitive` - Case-insensitive search works -5. ✅ `test_result_formatting` - Results formatted as "Ride Name - at Park Name" -6. ✅ `test_result_limit` - Limited to 10 results for performance -7. ✅ `test_select_related_optimization` - Database queries optimized with select_related - -### Performance Validation -- ✅ Result limit (10 items) implemented -- ✅ Database optimization with `select_related('park')` confirmed -- ✅ Authentication configuration flexible via settings - -### Architecture Compliance -- ✅ Follows BaseAutocomplete pattern -- ✅ Consistent with existing park search implementation -- ✅ HTMX integration prepared (pending server fix) -- ✅ Template structure follows project conventions - -## Issues to Resolve - -### High Priority -1. **RideAutocomplete.as_view() Error** - - Investigate django-htmx-autocomplete package installation - - Verify BaseAutocomplete inheritance chain - - Ensure proper view class structure - -### Medium Priority -2. **Manual Browser Testing** - - Cannot proceed until server starts successfully - - Need to test autocomplete UI functionality - - Validate HTMX responses - -3. **Form Template Creation** - - Need to create ride search form partial template - - Integration with existing search interface - -## Next Steps - -1. Fix RideAutocomplete.as_view() issue -2. Start development server successfully -3. Test autocomplete endpoints with curl/browser -4. Validate HTMX integration -5. Create comprehensive validation report - -## Technical Decisions Made - -### Authentication Strategy -- **Decision:** Use `@override_settings` in tests to validate authentication behavior -- **Rationale:** Project has `AUTOCOMPLETE_BLOCK_UNAUTHENTICATED = False` for public access, but tests should validate security capability -- **Implementation:** Tests can verify both public and authenticated-only modes - -### URL Structure -- **Decision:** Include search app URLs via `include("search.urls", namespace="search")` -- **Rationale:** Allows proper URL routing for autocomplete and search endpoints -- **Pattern:** `/search/rides/autocomplete/` and `/search/rides/results/` - -## Files Modified During Testing - -### Fixed Files -- `search/tests/test_ride_autocomplete.py` - Added AnonymousUser import and @override_settings -- `thrillwiki/urls.py` - Fixed search URL inclusion -- `search/views.py` - Fixed ParkFilter import path - -### Files Requiring Investigation -- `search/mixins.py` - RideAutocomplete class (inheritance issue) -- `core/forms.py` - BaseAutocomplete class (django-htmx-autocomplete dependency) - -## Validation Criteria Status - -- ✅ All unit tests pass -- ❌ HTMX endpoints accessible (blocked by server issue) -- ✅ Authentication requirements work -- ❌ Search results display correctly (pending server fix) -- ✅ Performance meets specifications -- ❌ Manual browser testing (pending server fix) - -**Overall Status:** 60% Complete - Core functionality validated, integration testing blocked by server startup issue. \ No newline at end of file diff --git a/memory-bank/decisions/ride_count_field.md b/memory-bank/decisions/ride_count_field.md deleted file mode 100644 index 2e10067c..00000000 --- a/memory-bank/decisions/ride_count_field.md +++ /dev/null @@ -1,39 +0,0 @@ -# Ride Count Field Implementation - -## Context -While implementing park views, we encountered an error where a `ride_count` annotation conflicted with an existing model field of the same name. This raised a question about how to handle real-time ride counts versus stored counts. - -## Decision -We decided to use both approaches but with distinct names: - -1. **Model Field (`ride_count`)**: - - Kept the original field for backward compatibility - - Used in test fixtures and filtering system - - Can serve as a cached/denormalized value - -2. **Annotation (`current_ride_count`)**: - - Added new annotation with a distinct name - - Provides real-time count of rides - - Used in templates for display purposes - -This approach allows us to: -- Maintain existing functionality in tests and filters -- Show accurate, real-time counts in the UI -- Avoid name conflicts between fields and annotations - -## Implementation -- Kept the `ride_count` IntegerField in the Park model -- Added `current_ride_count = Count('rides', distinct=True)` annotation in views -- Updated templates to use `current_ride_count` for display - -## Future Considerations -We might want to: -1. Add a periodic task to sync the stored `ride_count` with the computed value -2. Consider deprecating the stored field if it's not needed for performance -3. Add validation to ensure the stored count stays in sync with reality - -## Related Files -- parks/models.py -- parks/views.py -- parks/templates/parks/partials/park_list_item.html -- parks/tests/test_filters.py \ No newline at end of file diff --git a/memory-bank/decisions/search-form-fix.md b/memory-bank/decisions/search-form-fix.md deleted file mode 100644 index c9300430..00000000 --- a/memory-bank/decisions/search-form-fix.md +++ /dev/null @@ -1,24 +0,0 @@ -# Search Form Fix - -## Issue -Search results were being duplicated because selecting a suggestion triggered both: -1. The suggestions form submission (to /suggest_parks/) -2. The filter form submission (to /park_list/) - -## Root Cause -The `@search-selected` event handler was submitting the wrong form. It was submitting the suggestions form which has `hx-target="#search-results"` instead of the filter form which has `hx-target="#park-results"`. - -## Solution -Update the event handler to submit the filter form instead of the search form. This ensures only one request is made to update the results. - -## Implementation -1. Modified the `@search-selected` handler to: - - Set the search query in filter form - - Submit filter form to update results - - Hide suggestions dropdown -2. Added proper form IDs and refs - -## Benefits -- Eliminates duplicate requests -- Maintains correct search behavior -- Improves user experience \ No newline at end of file diff --git a/memory-bank/decisions/test-fixes-2024-02-22.md b/memory-bank/decisions/test-fixes-2024-02-22.md deleted file mode 100644 index 42aa6a87..00000000 --- a/memory-bank/decisions/test-fixes-2024-02-22.md +++ /dev/null @@ -1,28 +0,0 @@ -# Test Fixes Required - 2024-02-22 - -## Issues Identified - -### 1. ParkArea Unique Constraint Test (IntegrityError) -- **Problem**: Test expects ValidationError but gets IntegrityError -- **Root Cause**: Database constraint violation instead of model validation -- **Fix**: Update test to expect IntegrityError or add model validation - -### 2. Numeric Filtering Test (min_rides filter) -- **Problem**: Filter not working correctly for min_rides=18 -- **Root Cause**: Likely issue with ride count calculation or filter logic -- **Fix**: Check ParkFilter implementation and ride count logic - -### 3. Historical Slug Lookup Test (is_historical flag) -- **Problem**: is_historical returning False instead of True for old slug -- **Root Cause**: get_by_slug method not correctly identifying historical slugs -- **Fix**: Review ParkArea.get_by_slug implementation - -## Priority Order -1. Fix unique constraint test (quick fix) -2. Fix historical slug lookup (core functionality) -3. Fix numeric filtering (search feature) - -## Next Steps -- Fix tests one by one -- Run test suite after each fix -- Document any model changes needed \ No newline at end of file diff --git a/memory-bank/documentation/APIs.md b/memory-bank/documentation/APIs.md deleted file mode 100644 index c55b8b72..00000000 --- a/memory-bank/documentation/APIs.md +++ /dev/null @@ -1,410 +0,0 @@ -# API Documentation - -## API Overview - -### Base Configuration -```python -REST_FRAMEWORK = { - 'DEFAULT_AUTHENTICATION_CLASSES': [ - 'rest_framework_jwt.authentication.JSONWebTokenAuthentication', - 'rest_framework.authentication.SessionAuthentication', - ], - 'DEFAULT_PERMISSION_CLASSES': [ - 'rest_framework.permissions.IsAuthenticated', - ], - 'DEFAULT_PAGINATION_CLASS': - 'rest_framework.pagination.PageNumberPagination', - 'PAGE_SIZE': 20, - 'DEFAULT_VERSIONING_CLASS': - 'rest_framework.versioning.AcceptHeaderVersioning', - 'DEFAULT_VERSION': 'v1' -} -``` - -## Authentication - -### JWT Authentication -```http -POST /api/token/ -Content-Type: application/json - -{ - "username": "user@example.com", - "[PASSWORD-REMOVED]" -} - -Response: -{ - "access": "eyJ0eXAiOiJKV1QiLCJhbGc...", - "refresh": "eyJ0eXAiOiJKV1QiLCJhbGc..." -} -``` - -### Token Refresh -```http -POST /api/token/refresh/ -Content-Type: application/json - -{ - "refresh": "eyJ0eXAiOiJKV1QiLCJhbGc..." -} - -Response: -{ - "access": "eyJ0eXAiOiJKV1QiLCJhbGc..." -} -``` - -## Endpoints - -### Parks API - -#### List Parks -```http -GET /api/v1/parks/ -Authorization: Bearer - -Response: -{ - "count": 100, - "next": "http://api.thrillwiki.com/parks/?page=2", - "previous": null, - "results": [ - { - "id": 1, - "name": "Adventure Park", - "slug": "adventure-park", - "status": "OPERATING", - "description": "...", - "location": { - "city": "Orlando", - "state": "FL", - "country": "USA" - }, - "ride_count": 25, - "average_rating": 4.5 - } - ] -} -``` - -#### Get Park Detail -```http -GET /api/v1/parks/{slug}/ -Authorization: Bearer - -Response: -{ - "id": 1, - "name": "Adventure Park", - "slug": "adventure-park", - "status": "OPERATING", - "description": "...", - "location": { - "address": "123 Theme Park Way", - "city": "Orlando", - "state": "FL", - "country": "USA", - "postal_code": "32819", - "coordinates": { - "latitude": 28.538336, - "longitude": -81.379234 - } - }, - "owner": { - "id": 1, - "name": "Theme Park Corp", - "verified": true - }, - "stats": { - "ride_count": 25, - "coaster_count": 5, - "average_rating": 4.5 - }, - "rides": [ - { - "id": 1, - "name": "Thrill Coaster", - "type": "ROLLER_COASTER", - "status": "OPERATING" - } - ] -} -``` - -### Rides API - -#### List Rides -```http -GET /api/v1/parks/{park_slug}/rides/ -Authorization: Bearer - -Response: -{ - "count": 25, - "next": null, - "previous": null, - "results": [ - { - "id": 1, - "name": "Thrill Coaster", - "slug": "thrill-coaster", - "type": "ROLLER_COASTER", - "status": "OPERATING", - "height_requirement": 48, - "thrill_rating": 5, - "manufacturer": { - "id": 1, - "name": "Coaster Corp" - } - } - ] -} -``` - -#### Get Ride Detail -```http -GET /api/v1/rides/{ride_slug}/ -Authorization: Bearer - -Response: -{ - "id": 1, - "name": "Thrill Coaster", - "slug": "thrill-coaster", - "type": "ROLLER_COASTER", - "status": "OPERATING", - "description": "...", - "specifications": { - "height_requirement": 48, - "thrill_rating": 5, - "capacity_per_hour": 1200, - "track_length": 3000 - }, - "manufacturer": { - "id": 1, - "name": "Coaster Corp" - }, - "designer": { - "id": 1, - "name": "John Designer" - }, - "opening_date": "2020-06-15", - "stats": { - "average_rating": 4.8, - "review_count": 150 - } -} -``` - -### Reviews API - -#### Create Review -```http -POST /api/v1/reviews/ -Authorization: Bearer -Content-Type: application/json - -{ - "content_type": "ride", - "object_id": 1, - "rating": 5, - "content": "Amazing experience!", - "media": [ - { - "type": "image", - "file": "base64encoded..." - } - ] -} - -Response: -{ - "id": 1, - "author": { - "id": 1, - "username": "reviewer" - }, - "rating": 5, - "content": "Amazing experience!", - "status": "PENDING", - "created_at": "2024-02-18T14:30:00Z" -} -``` - -#### List Reviews -```http -GET /api/v1/rides/{ride_id}/reviews/ -Authorization: Bearer - -Response: -{ - "count": 150, - "next": "http://api.thrillwiki.com/rides/1/reviews/?page=2", - "previous": null, - "results": [ - { - "id": 1, - "author": { - "id": 1, - "username": "reviewer" - }, - "rating": 5, - "content": "Amazing experience!", - "created_at": "2024-02-18T14:30:00Z", - "media": [ - { - "type": "image", - "url": "https://media.thrillwiki.com/reviews/1/image.jpg" - } - ] - } - ] -} -``` - -## Integrations - -### Email Service Integration -```http -POST /api/v1/email/send/ -Authorization: Bearer -Content-Type: application/json - -{ - "template": "review_notification", - "recipient": "user@example.com", - "context": { - "review_id": 1, - "content": "Amazing experience!" - } -} - -Response: -{ - "status": "sent", - "message_id": "123abc", - "sent_at": "2024-02-18T14:30:00Z" -} -``` - -### Media Processing -```http -POST /api/v1/media/process/ -Authorization: Bearer -Content-Type: multipart/form-data - -file: [binary data] - -Response: -{ - "id": 1, - "original_url": "https://media.thrillwiki.com/original/image.jpg", - "processed_url": "https://media.thrillwiki.com/processed/image.jpg", - "thumbnail_url": "https://media.thrillwiki.com/thumbnails/image.jpg", - "metadata": { - "width": 1920, - "height": 1080, - "format": "jpeg", - "size": 1024576 - } -} -``` - -## API Versioning - -### Version Header -```http -Accept: application/json; version=1.0 -``` - -### Version Routes -```python -# urls.py -urlpatterns = [ - path('v1/', include('api.v1.urls')), - path('v2/', include('api.v2.urls')), -] -``` - -## Error Handling - -### Error Response Format -```json -{ - "error": { - "code": "validation_error", - "message": "Invalid input data", - "details": [ - { - "field": "rating", - "message": "Rating must be between 1 and 5" - } - ] - } -} -``` - -### Common Error Codes -- `authentication_error`: Invalid or missing authentication -- `permission_denied`: Insufficient permissions -- `validation_error`: Invalid input data -- `not_found`: Resource not found -- `rate_limit_exceeded`: Too many requests - -## Rate Limiting - -### Rate Limit Configuration -```python -REST_FRAMEWORK = { - 'DEFAULT_THROTTLE_CLASSES': [ - 'rest_framework.throttling.AnonRateThrottle', - 'rest_framework.throttling.UserRateThrottle' - ], - 'DEFAULT_THROTTLE_RATES': { - 'anon': '100/day', - 'user': '1000/day', - 'burst': '20/minute' - } -} -``` - -### Rate Limit Headers -```http -X-RateLimit-Limit: 1000 -X-RateLimit-Remaining: 999 -X-RateLimit-Reset: 1613664000 -``` - -## API Documentation - -### Swagger/OpenAPI -```yaml -openapi: 3.0.0 -info: - title: ThrillWiki API - version: 1.0.0 -paths: - /parks: - get: - summary: List parks - parameters: - - name: page - in: query - schema: - type: integer - responses: - '200': - description: Successful response - content: - application/json: - schema: - $ref: '#/components/schemas/ParkList' -``` - -### API Documentation URLs -```python -urlpatterns = [ - path('docs/', include_docs_urls(title='ThrillWiki API')), - path('schema/', schema_view), -] \ No newline at end of file diff --git a/memory-bank/documentation/Architecture.md b/memory-bank/documentation/Architecture.md deleted file mode 100644 index 362b4c82..00000000 --- a/memory-bank/documentation/Architecture.md +++ /dev/null @@ -1,196 +0,0 @@ -# System Architecture Documentation - -## Overview -ThrillWiki is a Django-based web platform built with a modular architecture focusing on theme park information management, user reviews, and content moderation. - -## Technology Stack - -### Backend -- **Framework**: Django 5.1.6 -- **API**: Django REST Framework 3.15.2 -- **WebSocket Support**: Channels 4.2.0 with Redis -- **Authentication**: django-allauth, OAuth Toolkit -- **Database**: PostgreSQL with django-pghistory - -### Frontend -- **Templating**: Django Templates -- **CSS Framework**: Tailwind CSS -- **Enhancement**: HTMX, JavaScript -- **Asset Management**: django-webpack-loader - -### Infrastructure -- **Static Files**: WhiteNoise 6.9.0 -- **Media Storage**: Local filesystem with custom storage backends -- **Caching**: Redis (shared with WebSocket layer) - -## System Components - -### Core Applications - -1. **Parks Module** - - Park information management - - Geographic data handling - - Operating hours tracking - - Integration with location services - -2. **Rides Module** - - Ride specifications - - Manufacturer/Designer attribution - - Historical data tracking - - Technical details management - -3. **Reviews System** - - User-generated content - - Media attachments - - Rating framework - - Integration with moderation - -4. **Moderation System** - - Content review workflow - - Quality control mechanisms - - User management - - Verification processes - -5. **Companies Module** - - Company profiles - - Verification system - - Official update management - - Park operator features - -### Service Layer - -1. **Authentication Service** - ```python - # Key authentication flows - User Authentication → JWT Token → Protected Resources - Social Auth → Profile Creation → Platform Access - ``` - -2. **Media Service** - ```python - # Media handling workflow - Upload → Processing → Storage → Delivery - ``` - -3. **Analytics Service** - ```python - # Analytics pipeline - User Action → Event Tracking → Processing → Insights - ``` - -## Data Flow Architecture - -``` -┌─────────────┐ ┌──────────────┐ ┌─────────────┐ -│ Client │ ──→ │ Django │ ──→ │ Database │ -│ Browser │ ←── │ Server │ ←── │ (Postgres) │ -└─────────────┘ └──────────────┘ └─────────────┘ - ↑ ↓ - ┌──────────────┐ - │ Services │ - │ (Redis/S3) │ - └──────────────┘ -``` - -## Security Architecture - -1. **Authentication Flow** - - JWT-based authentication - - Social authentication integration - - Session management - - Permission-based access control - -2. **Data Protection** - - Input validation - - XSS prevention - - CSRF protection - - SQL injection prevention - -## Deployment Model - -### Production Environment -``` -├── Application Server (Daphne/ASGI) -├── Database (PostgreSQL) -├── Cache/Message Broker (Redis) -├── Static Files (WhiteNoise) -└── Media Storage (Filesystem/S3) -``` - -### Development Environment -``` -├── Local Django Server -├── Local PostgreSQL -├── Local Redis -└── Local File Storage -``` - -## Monitoring and Scaling - -1. **Performance Monitoring** - - Page load metrics - - Database query analysis - - Cache hit rates - - API response times - -2. **Scaling Strategy** - - Horizontal scaling of web servers - - Database read replicas - - Cache layer expansion - - Media CDN integration - -## Search Architecture - -### Search Infrastructure -- **Base Pattern**: [`BaseAutocomplete`](core/forms.py:1) provides authentication-first autocomplete foundation -- **Park Search**: [`ParkAutocomplete`](search/mixins.py:1) + [`ParkSearchForm`](search/forms.py:1) with HTMX integration -- **Ride Search**: Planned extension following same pattern with park relationship context - -### Search Components -1. **Autocomplete Layer** - - Authentication requirement enforced at base level - - Query limiting (10 results) for performance - - HTMX-driven real-time suggestions - -2. **Form Layer** - - Django forms with autocomplete widgets - - Filter integration for advanced search - - Clean validation and error handling - -3. **Frontend Integration** - - HTMX for dynamic updates (`hx-get`, `hx-trigger`) - - AlpineJS for local state management - - Tailwind CSS for consistent styling - -### Database Optimization -- Indexes on searchable fields (`name`, foreign keys) -- `select_related()` for relationship queries -- Query result limiting for performance - -## Integration Points - -1. **External Services** - - Email service (ForwardEmail.net) - - Social authentication providers - - Geographic data services - - Media processing services - -2. **Internal Services** - - WebSocket notifications - - Background tasks - - Media processing - - Analytics processing - -## System Requirements - -### Minimum Requirements -- Python 3.11+ -- PostgreSQL 13+ -- Redis 6+ -- Node.js 18+ (for frontend builds) - -### Development Tools -- black (code formatting) -- flake8 (linting) -- pytest (testing) -- tailwind CLI (CSS processing) \ No newline at end of file diff --git a/memory-bank/documentation/Code.md b/memory-bank/documentation/Code.md deleted file mode 100644 index 5980b66d..00000000 --- a/memory-bank/documentation/Code.md +++ /dev/null @@ -1,287 +0,0 @@ -# Code Documentation - -## Project Structure - -``` -thrillwiki/ -├── accounts/ # User management -├── analytics/ # Usage tracking -├── companies/ # Company profiles -├── core/ # Core functionality -├── designers/ # Designer profiles -├── email_service/ # Email handling -├── history/ # Historical views -├── history_tracking/ # Change tracking -├── location/ # Geographic features -├── media/ # Media management -├── moderation/ # Content moderation -├── parks/ # Park management -├── reviews/ # Review system -└── rides/ # Ride management -``` - -## Code Patterns - -### 1. Model Patterns - -#### History Tracking -```python -@pghistory.track() -class TrackedModel(models.Model): - """Base class for models with history tracking""" - created_at = models.DateTimeField(auto_now_add=True) - updated_at = models.DateTimeField(auto_now=True) -``` - -#### Slug Management -```python -class SluggedModel: - """Pattern for models with slug-based URLs""" - @classmethod - def get_by_slug(cls, slug: str) -> Tuple[Model, bool]: - # Check current slugs - try: - return cls.objects.get(slug=slug), False - except cls.DoesNotExist: - # Check historical slugs - historical = HistoricalSlug.objects.filter( - content_type=ContentType.objects.get_for_model(cls), - slug=slug - ).first() - if historical: - return cls.objects.get(pk=historical.object_id), True -``` - -#### Generic Relations -```python -# Example from parks/models.py -class Park(TrackedModel): - location = GenericRelation(Location) - photos = GenericRelation(Photo) -``` - -### 2. View Patterns - -#### Class-Based Views -```python -class ModeratedCreateView(LoginRequiredMixin, CreateView): - """Base view for content requiring moderation""" - def form_valid(self, form): - obj = form.save(commit=False) - obj.status = 'PENDING' - obj.created_by = self.request.user - return super().form_valid(form) -``` - -#### Permission Mixins -```python -class ModeratorRequiredMixin: - """Ensures user has moderation permissions""" - def dispatch(self, request, *args, **kwargs): - if not request.user.has_perm('moderation.can_moderate'): - raise PermissionDenied - return super().dispatch(request, *args, **kwargs) -``` - -### 3. Service Patterns - -#### Email Service -```python -class EmailService: - """Handles email templating and sending""" - def send_moderation_notification(self, content): - template = 'moderation/email/notification.html' - context = {'content': content} - self.send_templated_email(template, context) -``` - -#### Media Processing -```python -class MediaProcessor: - """Handles image optimization and processing""" - def process_image(self, image): - # Optimize size - # Extract EXIF - # Generate thumbnails - return processed_image -``` - -## Dependencies - -### Core Dependencies -```toml -# From pyproject.toml -[tool.poetry.dependencies] -django = "5.1.6" -djangorestframework = "3.15.2" -django-allauth = "65.4.1" -psycopg2-binary = "2.9.10" -django-pghistory = "3.5.2" -``` - -### Frontend Dependencies -```json -{ - "tailwindcss": "^3.0.0", - "htmx": "^1.22.0", - "webpack": "^5.0.0" -} -``` - -## Build Configuration - -### Django Settings -```python -INSTALLED_APPS = [ - # Django apps - 'django.contrib.admin', - 'django.contrib.auth', - - # Third-party apps - 'allauth', - 'rest_framework', - 'corsheaders', - - # Local apps - 'parks.apps.ParksConfig', - 'rides.apps.RidesConfig', -] - -MIDDLEWARE = [ - 'django.middleware.security.SecurityMiddleware', - 'whitenoise.middleware.WhiteNoiseMiddleware', - 'django.contrib.sessions.middleware.SessionMiddleware', -] -``` - -### Database Configuration -```python -DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.postgresql', - 'NAME': env('DB_NAME'), - 'USER': env('DB_USER'), - 'PASSWORD': env('DB_PASSWORD'), - 'HOST': env('DB_HOST'), - 'PORT': env('DB_PORT'), - } -} -``` - -## Testing Framework - -### Test Structure -``` -tests/ -├── unit/ # Unit tests -├── integration/ # Integration tests -└── e2e/ # End-to-end tests -``` - -### Test Patterns -```python -class ParkTestCase(TestCase): - def setUp(self): - self.park = Park.objects.create( - name="Test Park", - status="OPERATING" - ) - - def test_park_creation(self): - self.assertEqual(self.park.slug, "test-park") -``` - -## Package Management - -### Python Dependencies -```bash -# Development dependencies -pip install -r requirements-dev.txt - -# Production dependencies -pip install -r requirements.txt -``` - -### Frontend Build -```bash -# Install frontend dependencies -npm install - -# Build static assets -npm run build -``` - -## Code Quality Tools - -### Python Tools -- black (code formatting) -- flake8 (linting) -- mypy (type checking) -- pytest (testing) - -### Configuration Files -```toml -# pyproject.toml -[tool.black] -line-length = 88 -target-version = ['py311'] - -[tool.mypy] -plugins = ["mypy_django_plugin.main"] -``` - -## Development Workflow - -### Local Development -1. Set up virtual environment -2. Install dependencies -3. Run migrations -4. Start development server - -```bash -python -m venv venv -source venv/bin/activate -pip install -r requirements.txt -python manage.py migrate -python manage.py runserver -``` - -### Code Review Process -1. Run linting tools -2. Run test suite -3. Check type hints -4. Review documentation - -## Deployment Process - -### Pre-deployment Checks -1. Run test suite -2. Check migrations -3. Validate static files -4. Verify environment variables - -### Deployment Steps -1. Update dependencies -2. Apply migrations -3. Collect static files -4. Restart application server - -## Error Handling - -### Exception Pattern -```python -class CustomException(Exception): - """Base exception for application""" - def __init__(self, message, code=None): - self.message = message - self.code = code -``` - -### Middleware Pattern -```python -class ErrorHandlingMiddleware: - """Centralized error handling""" - def process_exception(self, request, exception): - # Log exception - # Handle gracefully - # Return appropriate response \ No newline at end of file diff --git a/memory-bank/documentation/Data.md b/memory-bank/documentation/Data.md deleted file mode 100644 index 78eaae36..00000000 --- a/memory-bank/documentation/Data.md +++ /dev/null @@ -1,327 +0,0 @@ -# Data Documentation - -## Database Schema - -### Core Models - -#### Parks -```sql -CREATE TABLE parks_park ( - id SERIAL PRIMARY KEY, - name VARCHAR(255) NOT NULL, - slug VARCHAR(255) UNIQUE NOT NULL, - description TEXT, - status VARCHAR(20) DEFAULT 'OPERATING', - opening_date DATE, - closing_date DATE, - operating_season VARCHAR(255), - size_acres DECIMAL(10,2), - website VARCHAR(200), - average_rating DECIMAL(3,2), - ride_count INTEGER, - coaster_count INTEGER, - owner_id INTEGER REFERENCES companies_company(id), - created_at TIMESTAMP, - updated_at TIMESTAMP -); -``` - -#### Rides -```sql -CREATE TABLE rides_ride ( - id SERIAL PRIMARY KEY, - name VARCHAR(255) NOT NULL, - slug VARCHAR(255) NOT NULL, - description TEXT, - status VARCHAR(20), - park_id INTEGER REFERENCES parks_park(id), - area_id INTEGER REFERENCES parks_parkarea(id), - manufacturer_id INTEGER REFERENCES companies_company(id), - designer_id INTEGER REFERENCES designers_designer(id), - opening_date DATE, - closing_date DATE, - height_requirement INTEGER, - ride_type VARCHAR(50), - thrill_rating INTEGER, - created_at TIMESTAMP, - updated_at TIMESTAMP, - UNIQUE(park_id, slug) -); -``` - -#### Reviews -```sql -CREATE TABLE reviews_review ( - id SERIAL PRIMARY KEY, - content TEXT NOT NULL, - rating DECIMAL(3,2), - status VARCHAR(20), - author_id INTEGER REFERENCES auth_user(id), - content_type_id INTEGER REFERENCES django_content_type(id), - object_id INTEGER, - created_at TIMESTAMP, - updated_at TIMESTAMP -); -``` - -### Entity Relationships - -```mermaid -erDiagram - Park ||--o{ ParkArea : "contains" - Park ||--o{ Ride : "has" - Park ||--o{ Photo : "has" - Park ||--o{ Review : "receives" - ParkArea ||--o{ Ride : "contains" - Ride ||--o{ Photo : "has" - Ride ||--o{ Review : "receives" - Company ||--o{ Park : "owns" - Company ||--o{ Ride : "manufactures" - Designer ||--o{ Ride : "designs" - User ||--o{ Review : "writes" -``` - -## Data Models - -### Content Models - -#### Park Model -- Core information about theme parks -- Location data through GenericRelation -- Media attachments -- Historical tracking -- Owner relationship - -#### Ride Model -- Technical specifications -- Park and area relationships -- Manufacturer and designer links -- Operation status tracking -- Safety requirements - -#### Review Model -- Generic foreign key for flexibility -- Rating system -- Media attachments -- Moderation status -- Author tracking - -### Supporting Models - -#### Location Model -```python -class Location(models.Model): - content_type = models.ForeignKey(ContentType) - object_id = models.PositiveIntegerField() - content_object = GenericForeignKey() - - address = models.CharField(max_length=255) - city = models.CharField(max_length=100) - state = models.CharField(max_length=100) - country = models.CharField(max_length=100) - postal_code = models.CharField(max_length=20) - latitude = models.DecimalField(max_digits=9, decimal_places=6) - longitude = models.DecimalField(max_digits=9, decimal_places=6) -``` - -#### Media Model -```python -class Photo(models.Model): - content_type = models.ForeignKey(ContentType) - object_id = models.PositiveIntegerField() - content_object = GenericForeignKey() - - file = models.ImageField(upload_to='photos/') - caption = models.CharField(max_length=255) - taken_at = models.DateTimeField(null=True) - uploaded_at = models.DateTimeField(auto_now_add=True) -``` - -## Storage Strategies - -### Database Storage - -#### PostgreSQL Configuration -```python -DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.postgresql', - 'NAME': 'thrillwiki', - 'CONN_MAX_AGE': 60, - 'OPTIONS': { - 'client_encoding': 'UTF8', - }, - } -} -``` - -#### Indexing Strategy -```sql --- Performance indexes -CREATE INDEX idx_park_slug ON parks_park(slug); -CREATE INDEX idx_ride_slug ON rides_ride(slug); -CREATE INDEX idx_review_content_type ON reviews_review(content_type_id, object_id); -``` - -### File Storage - -#### Media Storage -```python -# Media storage configuration -MEDIA_ROOT = os.path.join(BASE_DIR, 'media') -MEDIA_URL = '/media/' - -# File upload handlers -FILE_UPLOAD_HANDLERS = [ - 'django.core.files.uploadhandler.MemoryFileUploadHandler', - 'django.core.files.uploadhandler.TemporaryFileUploadHandler', -] -``` - -#### Directory Structure -``` -media/ -├── photos/ -│ ├── parks/ -│ ├── rides/ -│ └── reviews/ -├── avatars/ -└── documents/ -``` - -### Caching Strategy - -#### Cache Configuration -```python -CACHES = { - 'default': { - 'BACKEND': 'django.core.cache.backends.redis.RedisCache', - 'LOCATION': 'redis://127.0.0.1:6379/1', - 'OPTIONS': { - 'CLIENT_CLASS': 'django_redis.client.DefaultClient', - } - } -} -``` - -#### Cache Keys -```python -# Cache key patterns -CACHE_KEYS = { - 'park_detail': 'park:{slug}', - 'ride_list': 'park:{park_slug}:rides', - 'review_count': 'content:{type}:{id}:reviews', -} -``` - -## Data Migration - -### Migration Strategy -1. Schema migrations via Django -2. Data migrations for model changes -3. Content migrations for large updates - -### Example Migration -```python -# migrations/0002_add_park_status.py -from django.db import migrations, models - -class Migration(migrations.Migration): - dependencies = [ - ('parks', '0001_initial'), - ] - - operations = [ - migrations.AddField( - model_name='park', - name='status', - field=models.CharField( - max_length=20, - choices=[ - ('OPERATING', 'Operating'), - ('CLOSED', 'Closed'), - ], - default='OPERATING' - ), - ), - ] -``` - -## Data Protection - -### Backup Strategy -1. Daily database backups -2. Media files backup -3. Retention policy management - -### Backup Configuration -```python -# backup settings -BACKUP_ROOT = os.path.join(BASE_DIR, 'backups') -BACKUP_RETENTION_DAYS = 30 -BACKUP_COMPRESSION = True -``` - -## Data Validation - -### Model Validation -```python -class Park(models.Model): - def clean(self): - if self.closing_date and self.opening_date: - if self.closing_date < self.opening_date: - raise ValidationError({ - 'closing_date': 'Closing date cannot be before opening date' - }) -``` - -### Form Validation -```python -class RideForm(forms.ModelForm): - def clean_height_requirement(self): - height = self.cleaned_data['height_requirement'] - if height and height < 0: - raise forms.ValidationError('Height requirement cannot be negative') - return height -``` - -## Data Access Patterns - -### QuerySet Optimization -```python -# Optimized query pattern -Park.objects.select_related('owner')\ - .prefetch_related('rides', 'areas')\ - .filter(status='OPERATING') -``` - -### Caching Pattern -```python -def get_park_detail(slug): - cache_key = f'park:{slug}' - park = cache.get(cache_key) - if not park: - park = Park.objects.get(slug=slug) - cache.set(cache_key, park, timeout=3600) - return park -``` - -## Monitoring and Metrics - -### Database Metrics -- Query performance -- Cache hit rates -- Storage usage -- Connection pool status - -### Collection Configuration -```python -LOGGING = { - 'handlers': { - 'db_log': { - 'level': 'DEBUG', - 'class': 'logging.handlers.RotatingFileHandler', - 'filename': 'logs/db.log', - }, - }, -} \ No newline at end of file diff --git a/memory-bank/documentation/Features.md b/memory-bank/documentation/Features.md deleted file mode 100644 index 5a9a7951..00000000 --- a/memory-bank/documentation/Features.md +++ /dev/null @@ -1,253 +0,0 @@ -# Feature Documentation - -## Core Features - -### 1. Park Management - -#### Park Discovery -- Geographic search and filtering -- Park categorization and taxonomy -- Operating hours and seasonal information -- Location-based recommendations - -#### Park Profiles -- Detailed park information -- Historical data and timeline -- Media galleries -- Operating schedule management -- Accessibility information - -#### Area Management -```python -# Key relationships -Park - └── Areas - └── Rides -``` - -### 2. Ride System - -#### Ride Catalog -- Technical specifications -- Thrill ratings and categories -- Operational status tracking -- Maintenance history -- Designer and manufacturer attribution - -#### Ride Features -- Height requirements -- Accessibility options -- Queue management information -- Rider experience details -- Historical modifications - -### 3. Review System - -#### User Reviews -- Rating framework -- Experience descriptions -- Visit date tracking -- Media attachments -- Helpful vote system - -#### Review Workflow -``` -Submission → Moderation → Publication → Feedback -``` - -#### Review Features -- Rich text formatting -- Multi-media support -- Rating categories -- Experience verification -- Response management - -### 4. User Management - -#### User Profiles -- Activity history -- Contribution tracking -- Reputation system -- Privacy controls - -#### Authentication -- Email registration -- Social authentication -- Password management -- Session control - -#### Permissions -- Role-based access -- Content moderation rights -- Company verification -- Expert designation - -### 5. Company Management - -#### Company Profiles -- Official park operator accounts -- Manufacturer profiles -- Designer portfolios -- Verification system - -#### Official Updates -- Park announcements -- Operational updates -- New attraction information -- Special event coverage - -### 6. Media Management - -#### Image Handling -- Multi-format support -- EXIF data processing -- Automatic optimization -- Gallery organization - -#### Storage System -```python -# Media organization -content/ - ├── parks/ - ├── rides/ - ├── reviews/ - └── profiles/ -``` - -### 7. Location Services - -#### Geographic Features -- Park proximity search -- Regional categorization -- Map integration -- Distance calculations - -#### Location Data -- Coordinate system -- Address validation -- Region management -- Geographic clustering - -### 8. Analytics System - -#### Tracking Features -- Page view analytics -- User engagement metrics -- Content popularity -- Search patterns - -#### Trend Analysis -- Popular content -- User behavior -- Seasonal patterns -- Content quality metrics - -## Business Requirements - -### 1. Content Quality -- Mandatory review fields -- Media quality standards -- Information verification -- Source attribution - -### 2. User Trust -- Review authenticity checks -- Company verification process -- Expert contribution validation -- Content moderation workflow - -### 3. Data Completeness -- Required park information -- Ride specification standards -- Historical record requirements -- Media documentation needs - -## Usage Flows - -### 1. Park Discovery Flow -``` -Search/Browse → Park Selection → Detail View → Related Content -``` - -### 2. Review Creation Flow -``` -Experience → Media Upload → Review Draft → Submission → Moderation -``` - -### 3. Company Verification Flow -``` -Registration → Documentation → Verification → Profile Access -``` - -### 4. Content Moderation Flow -``` -Submission Queue → Review → Action → Notification -``` - -## Development Roadmap - -### Current Phase -1. Core Platform - - Park/Ride management - - Review system - - Basic media handling - - User authentication - -2. Quality Features - - Content moderation - - Company verification - - Expert system - - Media optimization - -### Next Phase -1. Community Features - - Enhanced profiles - - Achievement system - - Social interactions - - Content collections - -2. Advanced Media - - Video support - - Virtual tours - - 360° views - - AR capabilities - -3. Analytics Enhancement - - Advanced metrics - - Personalization - - Trend prediction - - Quality scoring - -## Integration Requirements - -### External Systems -- Email service integration -- Social authentication providers -- Geographic data services -- Media processing services - -### Internal Systems -- WebSocket notifications -- Background task processing -- Media optimization pipeline -- Analytics processing system - -## Compliance Requirements - -### Data Protection -- User privacy controls -- Data retention policies -- Export capabilities -- Deletion workflows - -### Accessibility -- WCAG compliance -- Screen reader support -- Keyboard navigation -- Color contrast requirements - -### Content Policies -- Review guidelines -- Media usage rights -- Attribution requirements -- Moderation standards \ No newline at end of file diff --git a/memory-bank/documentation/Issues.md b/memory-bank/documentation/Issues.md deleted file mode 100644 index 9f583807..00000000 --- a/memory-bank/documentation/Issues.md +++ /dev/null @@ -1,306 +0,0 @@ -# Issues and Technical Debt Documentation - -## Known Bugs - -### 1. Data Integrity Issues - -#### Historical Slug Resolution -```python -# Current Implementation -class Park(models.Model): - @classmethod - def get_by_slug(cls, slug: str): - # Issue: Race condition possible between slug check and retrieval - # TODO: Implement proper locking or transaction handling - try: - return cls.objects.get(slug=slug) - except cls.DoesNotExist: - return cls.objects.get(historical_slugs__slug=slug) -``` - -#### Media File Management -```python -# Current Issue -class MediaHandler: - def process_upload(self, file): - # Bug: Temporary files not always cleaned up - # TODO: Implement proper cleanup in finally block - try: - process_file(file) - except Exception: - log_error() -``` - -### 2. Performance Issues - -#### N+1 Query Patterns -```python -# Inefficient Queries in Views -class ParkDetailView(DetailView): - def get_context_data(self): - context = super().get_context_data() - # Issue: N+1 queries for each ride's reviews - context['rides'] = [ - { - 'ride': ride, - 'reviews': ride.reviews.all() # Causes N+1 query - } - for ride in self.object.rides.all() - ] -``` - -#### Cache Invalidation -```python -# Inconsistent Cache Updates -class ReviewManager: - def update_stats(self, obj): - # Bug: Race condition in cache updates - # TODO: Implement atomic cache updates - stats = calculate_stats(obj) - cache.set(f'{obj}_stats', stats) -``` - -## Technical Debt - -### 1. Code Organization - -#### Monolithic Views -```python -# views.py -class ParkView(View): - def post(self, request, *args, **kwargs): - # TODO: Break down into smaller, focused views - # Currently handles too many responsibilities: - # - Park creation - # - Media processing - # - Notification sending - # - Stats updating -``` - -#### Duplicate Business Logic -```python -# Multiple implementations of similar functionality -class ParkValidator: - def validate_status(self): - # TODO: Consolidate with RideValidator.validate_status - if self.status not in VALID_STATUSES: - raise ValidationError() - -class RideValidator: - def validate_status(self): - if self.status not in VALID_STATUSES: - raise ValidationError() -``` - -### 2. Infrastructure - -#### Configuration Management -```python -# settings.py -# TODO: Move to environment variables -DATABASE_PASSWORD = 'hardcoded_password' -API_KEY = 'hardcoded_key' - -# TODO: Implement proper configuration management -FEATURE_FLAGS = { - 'new_review_system': True, - 'beta_features': False -} -``` - -#### Deployment Process -```bash -# Manual deployment steps -# TODO: Automate deployment process -ssh server -git pull -pip install -r requirements.txt -python manage.py migrate -supervisorctl restart app -``` - -### 3. Testing - -#### Test Coverage Gaps -```python -# Missing test cases for error conditions -class ParkTests(TestCase): - def test_create_park(self): - # Only tests happy path - park = Park.objects.create(name='Test Park') - self.assertEqual(park.name, 'Test Park') - - # TODO: Add tests for: - # - Invalid input handling - # - Concurrent modifications - # - Edge cases -``` - -#### Integration Test Debt -```python -# Brittle integration tests -class APITests(TestCase): - # TODO: Replace with proper test doubles - def setUp(self): - # Direct database dependencies - self.park = Park.objects.create() - # External service calls - self.geocoder = RealGeocoder() -``` - -## Enhancement Opportunities - -### 1. Feature Enhancements - -#### Advanced Search -```python -# Current basic search implementation -class ParkSearch: - def search(self, query): - # TODO: Implement advanced search features: - # - Full-text search - # - Faceted search - # - Geographic search - return Park.objects.filter(name__icontains=query) -``` - -#### Review System -```python -# Basic review functionality -class Review(models.Model): - # TODO: Enhance with: - # - Rich text support - # - Media attachments - # - Review responses - # - Helpful votes - rating = models.IntegerField() - comment = models.TextField() -``` - -### 2. Technical Improvements - -#### API Versioning -```python -# Current API structure -# TODO: Implement proper API versioning -urlpatterns = [ - path('api/parks/', ParkViewSet.as_view()), - # Need to support: - # - Multiple versions - # - Deprecation handling - # - Documentation -] -``` - -#### Caching Strategy -```python -# Basic caching -# TODO: Implement: -# - Multi-layer caching -# - Cache warming -# - Intelligent invalidation -@cache_page(60 * 15) -def park_detail(request, slug): - return render(request, 'park_detail.html') -``` - -### 3. Performance Optimizations - -#### Database Optimization -```python -# Current database usage -# TODO: Implement: -# - Connection pooling -# - Read replicas -# - Query optimization -DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.postgresql', - 'NAME': 'thrillwiki', - } -} -``` - -#### Asset Delivery -```python -# Static file handling -# TODO: Implement: -# - CDN integration -# - Image optimization pipeline -# - Responsive images -STATIC_URL = '/static/' -MEDIA_URL = '/media/' -``` - -## Prioritized Improvements - -### High Priority -1. Security Fixes - - Fix authentication vulnerabilities - - Implement proper input validation - - Secure file uploads - -2. Critical Performance Issues - - Resolve N+1 queries - - Implement connection pooling - - Optimize cache usage - -3. Data Integrity - - Fix race conditions - - Implement proper transactions - - Add data validation - -### Medium Priority -1. Technical Debt - - Refactor monolithic views - - Consolidate duplicate code - - Improve test coverage - -2. Developer Experience - - Automate deployment - - Improve documentation - - Add development tools - -3. Feature Enhancements - - Implement advanced search - - Enhance review system - - Add API versioning - -### Low Priority -1. Nice-to-have Features - - Rich text support - - Enhanced media handling - - Social features - -2. Infrastructure Improvements - - CDN integration - - Monitoring enhancements - - Analytics improvements - -## Implementation Plan - -### Phase 1: Critical Fixes -```python -# Timeline: Q1 2024 -# Focus: -# - Security vulnerabilities -# - Performance bottlenecks -# - Data integrity issues -``` - -### Phase 2: Technical Debt -```python -# Timeline: Q2 2024 -# Focus: -# - Code refactoring -# - Test coverage -# - Documentation -``` - -### Phase 3: Enhancements -```python -# Timeline: Q3-Q4 2024 -# Focus: -# - Feature improvements -# - Infrastructure upgrades -# - User experience \ No newline at end of file diff --git a/memory-bank/documentation/Performance.md b/memory-bank/documentation/Performance.md deleted file mode 100644 index b64c8d11..00000000 --- a/memory-bank/documentation/Performance.md +++ /dev/null @@ -1,388 +0,0 @@ -# Performance Documentation - -## Performance Architecture - -### Caching Strategy - -#### Cache Layers -```python -CACHES = { - 'default': { - 'BACKEND': 'django.core.cache.backends.redis.RedisCache', - 'LOCATION': 'redis://127.0.0.1:6379/1', - 'OPTIONS': { - 'CLIENT_CLASS': 'django_redis.client.DefaultClient', - 'PARSER_CLASS': 'redis.connection.HiredisParser', - 'CONNECTION_POOL_CLASS': 'redis.BlockingConnectionPool', - 'CONNECTION_POOL_CLASS_KWARGS': { - 'max_connections': 50, - 'timeout': 20, - } - } - } -} -``` - -#### Cache Patterns -```python -# View caching -@method_decorator(cache_page(60 * 15)) -def park_list(request): - parks = Park.objects.all() - return render(request, 'parks/list.html', {'parks': parks}) - -# Template fragment caching -{% load cache %} -{% cache 300 park_detail park.id %} - ... expensive template logic ... -{% endcache %} - -# Low-level cache API -def get_park_stats(park_id): - cache_key = f'park_stats:{park_id}' - stats = cache.get(cache_key) - if stats is None: - stats = calculate_park_stats(park_id) - cache.set(cache_key, stats, timeout=3600) - return stats -``` - -### Database Optimization - -#### Query Optimization -```python -# Efficient querying patterns -class ParkQuerySet(models.QuerySet): - def with_stats(self): - return self.annotate( - ride_count=Count('rides'), - avg_rating=Avg('reviews__rating') - ).select_related('owner')\ - .prefetch_related('rides', 'areas') - -# Indexes -class Park(models.Model): - class Meta: - indexes = [ - models.Index(fields=['slug']), - models.Index(fields=['status', 'created_at']), - models.Index(fields=['location_id', 'status']) - ] -``` - -#### Database Configuration -```python -DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.postgresql', - 'NAME': 'thrillwiki', - 'CONN_MAX_AGE': 60, - 'OPTIONS': { - 'statement_timeout': 3000, - 'idle_in_transaction_timeout': 3000, - }, - 'ATOMIC_REQUESTS': False, - 'CONN_HEALTH_CHECKS': True, - } -} -``` - -### Asset Optimization - -#### Static File Handling -```python -# WhiteNoise configuration -STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage' - -WHITENOISE_OPTIONS = { - 'allow_all_origins': False, - 'max_age': 31536000, # 1 year - 'compression_enabled': True, -} -``` - -#### Media Optimization -```python -from PIL import Image - -def optimize_image(image_path): - with Image.open(image_path) as img: - # Convert to WebP - webp_path = f"{os.path.splitext(image_path)[0]}.webp" - img.save(webp_path, 'WebP', quality=85, method=6) - - # Create thumbnails - sizes = [(800, 600), (400, 300)] - for size in sizes: - thumb = img.copy() - thumb.thumbnail(size) - thumb_path = f"{os.path.splitext(image_path)[0]}_{size[0]}x{size[1]}.webp" - thumb.save(thumb_path, 'WebP', quality=85, method=6) -``` - -## Performance Monitoring - -### Application Monitoring - -#### APM Configuration -```python -MIDDLEWARE = [ - 'django_prometheus.middleware.PrometheusBeforeMiddleware', - # ... other middleware ... - 'django_prometheus.middleware.PrometheusAfterMiddleware', -] - -PROMETHEUS_METRICS = { - 'scrape_interval': 15, - 'namespace': 'thrillwiki', - 'metrics_path': '/metrics', -} -``` - -#### Custom Metrics -```python -from prometheus_client import Counter, Histogram - -# Request metrics -http_requests_total = Counter( - 'http_requests_total', - 'Total HTTP requests', - ['method', 'endpoint', 'status'] -) - -# Response time metrics -response_time = Histogram( - 'response_time_seconds', - 'Response time in seconds', - ['endpoint'] -) -``` - -### Performance Logging - -#### Logging Configuration -```python -LOGGING = { - 'handlers': { - 'performance': { - 'level': 'INFO', - 'class': 'logging.handlers.TimedRotatingFileHandler', - 'filename': 'logs/performance.log', - 'when': 'midnight', - 'interval': 1, - 'backupCount': 30, - } - }, - 'loggers': { - 'performance': { - 'handlers': ['performance'], - 'level': 'INFO', - 'propagate': False, - } - } -} -``` - -#### Performance Logging Middleware -```python -class PerformanceMiddleware: - def __init__(self, get_response): - self.get_response = get_response - self.logger = logging.getLogger('performance') - - def __call__(self, request): - start_time = time.time() - response = self.get_response(request) - duration = time.time() - start_time - - self.logger.info({ - 'path': request.path, - 'method': request.method, - 'duration': duration, - 'status': response.status_code - }) - - return response -``` - -## Scaling Strategy - -### Application Scaling - -#### Asynchronous Tasks -```python -# Celery configuration -CELERY_BROKER_URL = 'redis://localhost:6379/2' -CELERY_RESULT_BACKEND = 'redis://localhost:6379/3' - -CELERY_TASK_ROUTES = { - 'media.tasks.process_image': {'queue': 'media'}, - 'analytics.tasks.update_stats': {'queue': 'analytics'}, -} - -# Task definition -@shared_task(rate_limit='100/m') -def process_image(image_id): - image = Image.objects.get(id=image_id) - optimize_image(image.file.path) - create_thumbnails(image) -``` - -#### Load Balancing -```nginx -# Nginx configuration -upstream thrillwiki { - least_conn; # Least connections algorithm - server backend1.thrillwiki.com:8000; - server backend2.thrillwiki.com:8000; - server backend3.thrillwiki.com:8000; - - keepalive 32; -} - -server { - listen 80; - server_name thrillwiki.com; - - location / { - proxy_pass http://thrillwiki; - proxy_http_version 1.1; - proxy_set_header Connection ""; - } -} -``` - -### Database Scaling - -#### Read Replicas -```python -DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.postgresql', - 'NAME': 'thrillwiki', - # Primary DB configuration - }, - 'replica1': { - 'ENGINE': 'django.db.backends.postgresql', - 'NAME': 'thrillwiki', - # Read replica configuration - } -} - -DATABASE_ROUTERS = ['core.db.PrimaryReplicaRouter'] -``` - -#### Connection Pooling -```python -# Django DB configuration with PgBouncer -DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.postgresql', - 'OPTIONS': { - 'application_name': 'thrillwiki', - 'max_prepared_transactions': 0, - }, - 'POOL_OPTIONS': { - 'POOL_SIZE': 20, - 'MAX_OVERFLOW': 10, - 'RECYCLE': 300, - } - } -} -``` - -### Caching Strategy - -#### Multi-layer Caching -```python -# Cache configuration with fallback -CACHES = { - 'default': { - 'BACKEND': 'django_redis.cache.RedisCache', - 'LOCATION': 'redis://primary:6379/1', - 'OPTIONS': { - 'CLIENT_CLASS': 'django_redis.client.DefaultClient', - 'MASTER_CACHE': True, - } - }, - 'replica': { - 'BACKEND': 'django_redis.cache.RedisCache', - 'LOCATION': 'redis://replica:6379/1', - 'OPTIONS': { - 'CLIENT_CLASS': 'django_redis.client.DefaultClient', - } - } -} -``` - -#### Cache Invalidation -```python -class CacheInvalidationMixin: - def save(self, *args, **kwargs): - # Invalidate related caches - cache_keys = self.get_cache_keys() - cache.delete_many(cache_keys) - super().save(*args, **kwargs) - - def get_cache_keys(self): - # Return list of related cache keys - return [ - f'park:{self.pk}', - f'park_stats:{self.pk}', - 'park_list' - ] -``` - -## Performance Bottlenecks - -### Known Issues - -1. N+1 Query Patterns -```python -# Bad pattern -for park in Park.objects.all(): - print(park.rides.count()) # Causes N+1 queries - -# Solution -parks = Park.objects.annotate( - ride_count=Count('rides') -).all() -``` - -2. Memory Leaks -```python -# Memory leak in long-running tasks -class LongRunningTask: - def __init__(self): - self.cache = {} - - def process(self, items): - # Clear cache periodically - if len(self.cache) > 1000: - self.cache.clear() -``` - -### Performance Tips - -1. Query Optimization -```python -# Use exists() for checking existence -if Park.objects.filter(slug=slug).exists(): - # Do something - -# Use values() for simple data -parks = Park.objects.values('id', 'name') -``` - -2. Bulk Operations -```python -# Use bulk create -Park.objects.bulk_create([ - Park(name='Park 1'), - Park(name='Park 2') -]) - -# Use bulk update -Park.objects.filter(status='CLOSED').update( - status='OPERATING' -) \ No newline at end of file diff --git a/memory-bank/documentation/Security.md b/memory-bank/documentation/Security.md deleted file mode 100644 index 48d07358..00000000 --- a/memory-bank/documentation/Security.md +++ /dev/null @@ -1,339 +0,0 @@ -# Security Documentation - -## Authentication System - -### Authentication Stack -```python -# Settings configuration -AUTHENTICATION_BACKENDS = [ - 'django.contrib.auth.backends.ModelBackend', - 'allauth.account.auth_backends.AuthenticationBackend', -] - -INSTALLED_APPS = [ - 'django.contrib.auth', - 'django.contrib.sessions', - 'allauth', - 'allauth.account', - 'allauth.socialaccount', - 'oauth2_provider', -] -``` - -### Authentication Flow -```mermaid -sequenceDiagram - User->>+Server: Login Request - Server->>+Auth Service: Validate Credentials - Auth Service->>+Database: Check User - Database-->>-Auth Service: User Data - Auth Service-->>-Server: Auth Token - Server-->>-User: Session Cookie -``` - -## Authorization Framework - -### Permission System - -#### Model Permissions -```python -class Park(models.Model): - class Meta: - permissions = [ - ("can_publish_park", "Can publish park"), - ("can_moderate_park", "Can moderate park"), - ("can_verify_park", "Can verify park information"), - ] -``` - -#### View Permissions -```python -class ModeratedCreateView(LoginRequiredMixin, PermissionRequiredMixin): - permission_required = 'parks.can_publish_park' - raise_exception = True -``` - -### Role-Based Access Control - -#### User Groups -1. Administrators - - Full system access - - Configuration management - - User management - -2. Moderators - - Content moderation - - User management - - Report handling - -3. Company Representatives - - Company profile management - - Official updates - - Response management - -4. Regular Users - - Content creation - - Review submission - - Media uploads - -#### Permission Matrix -```python -ROLE_PERMISSIONS = { - 'administrator': [ - 'can_manage_users', - 'can_configure_system', - 'can_moderate_content', - ], - 'moderator': [ - 'can_moderate_content', - 'can_manage_reports', - 'can_verify_information', - ], - 'company_rep': [ - 'can_manage_company', - 'can_post_updates', - 'can_respond_reviews', - ], - 'user': [ - 'can_create_content', - 'can_submit_reviews', - 'can_upload_media', - ], -} -``` - -## Security Controls - -### Request Security - -#### CSRF Protection -```python -MIDDLEWARE = [ - 'django.middleware.csrf.CsrfViewMiddleware', -] - -# Template configuration -{% csrf_token %} - -# AJAX request handling -headers: { - 'X-CSRFToken': getCookie('csrftoken') -} -``` - -#### XSS Prevention -```python -# Template autoescape -{% autoescape on %} - {{ user_content }} -{% endautoescape %} - -# Content Security Policy -CSP_DEFAULT_SRC = ("'self'",) -CSP_SCRIPT_SRC = ("'self'",) -CSP_STYLE_SRC = ("'self'", "'unsafe-inline'") -CSP_IMG_SRC = ("'self'", "data:", "https:") -``` - -### Data Protection - -#### Password Security -```python -# Password validation -AUTH_PASSWORD_VALIDATORS = [ - { - 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', - }, - { - 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', - 'OPTIONS': { - 'min_length': 12, - } - }, - { - 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', - }, - { - 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', - }, -] -``` - -#### Data Encryption -```python -# Database encryption -ENCRYPTED_FIELDS = { - 'fields': { - 'users.User.ssn': 'django_cryptography.fields.encrypt', - 'payment.Card.number': 'django_cryptography.fields.encrypt', - }, -} - -# File encryption -ENCRYPTED_FILE_STORAGE = 'django_cryptography.storage.EncryptedFileSystemStorage' -``` - -### Session Security - -#### Session Configuration -```python -# Session settings -SESSION_COOKIE_SECURE = True -SESSION_COOKIE_HTTPONLY = True -SESSION_COOKIE_SAMESITE = 'Lax' -SESSION_ENGINE = 'django.contrib.sessions.backends.cached_db' -SESSION_EXPIRE_AT_BROWSER_CLOSE = True -``` - -#### Session Management -```python -# Session cleanup -CELERYBEAT_SCHEDULE = { - 'cleanup-expired-sessions': { - 'task': 'core.tasks.cleanup_expired_sessions', - 'schedule': crontab(hour=4, minute=0) - }, -} -``` - -## API Security - -### Authentication -```python -REST_FRAMEWORK = { - 'DEFAULT_AUTHENTICATION_CLASSES': [ - 'rest_framework_jwt.authentication.JSONWebTokenAuthentication', - 'rest_framework.authentication.SessionAuthentication', - ], - 'DEFAULT_PERMISSION_CLASSES': [ - 'rest_framework.permissions.IsAuthenticated', - ], -} -``` - -### Rate Limiting -```python -# Rate limiting configuration -REST_FRAMEWORK = { - 'DEFAULT_THROTTLE_CLASSES': [ - 'rest_framework.throttling.AnonRateThrottle', - 'rest_framework.throttling.UserRateThrottle' - ], - 'DEFAULT_THROTTLE_RATES': { - 'anon': '100/day', - 'user': '1000/day' - } -} -``` - -## Security Headers - -### HTTP Security Headers -```python -MIDDLEWARE = [ - 'django.middleware.security.SecurityMiddleware', -] - -SECURE_HSTS_SECONDS = 31536000 -SECURE_HSTS_INCLUDE_SUBDOMAINS = True -SECURE_HSTS_PRELOAD = True -SECURE_SSL_REDIRECT = True -SECURE_REFERRER_POLICY = 'same-origin' -SECURE_BROWSER_XSS_FILTER = True -``` - -## File Upload Security - -### Upload Configuration -```python -# File upload settings -FILE_UPLOAD_MAX_MEMORY_SIZE = 2621440 # 2.5 MB -FILE_UPLOAD_PERMISSIONS = 0o644 -ALLOWED_EXTENSIONS = ['jpg', 'jpeg', 'png', 'gif'] - -def validate_file_extension(value): - ext = os.path.splitext(value.name)[1] - if not ext.lower() in ALLOWED_EXTENSIONS: - raise ValidationError('Unsupported file extension.') -``` - -### Media Security -```python -# Serve media files securely -@login_required -def serve_protected_file(request, path): - if not request.user.has_perm('can_access_file'): - raise PermissionDenied - response = serve(request, path, document_root=settings.MEDIA_ROOT) - response['Content-Disposition'] = 'attachment' - return response -``` - -## Security Monitoring - -### Audit Logging -```python -# Audit log configuration -AUDIT_LOG_HANDLERS = { - 'security': { - 'level': 'INFO', - 'class': 'logging.handlers.RotatingFileHandler', - 'filename': 'logs/security.log', - 'maxBytes': 1024*1024*5, # 5 MB - 'backupCount': 5, - }, -} - -# Audit log usage -def log_security_event(event_type, user, details): - logger.info(f'Security event: {event_type}', extra={ - 'user_id': user.id, - 'ip_address': get_client_ip(request), - 'details': details - }) -``` - -### Security Alerts -```python -# Alert configuration -SECURITY_ALERTS = { - 'login_attempts': { - 'threshold': 5, - 'window': 300, # 5 minutes - 'action': 'account_lock' - }, - 'api_errors': { - 'threshold': 100, - 'window': 3600, # 1 hour - 'action': 'notify_admin' - } -} -``` - -## Incident Response - -### Security Incident Workflow -1. Detection -2. Analysis -3. Containment -4. Eradication -5. Recovery -6. Lessons Learned - -### Response Actions -```python -class SecurityIncident: - def contain_threat(self): - # Lock affected accounts - # Block suspicious IPs - # Disable compromised tokens - - def investigate(self): - # Collect logs - # Analyze patterns - # Document findings - - def recover(self): - # Restore systems - # Reset credentials - # Update security controls \ No newline at end of file diff --git a/memory-bank/documentation/Testing.md b/memory-bank/documentation/Testing.md deleted file mode 100644 index d94d9613..00000000 --- a/memory-bank/documentation/Testing.md +++ /dev/null @@ -1,350 +0,0 @@ -# Testing Documentation - -## Testing Architecture - -### Test Organization -``` -tests/ -├── unit/ -│ ├── test_models.py -│ ├── test_views.py -│ └── test_forms.py -├── integration/ -│ ├── test_workflows.py -│ └── test_apis.py -└── e2e/ - └── test_user_journeys.py -``` - -### Test Configuration -```python -# pytest configuration -pytest_plugins = [ - "tests.fixtures.parks", - "tests.fixtures.users", - "tests.fixtures.media" -] - -# Test settings -TEST_RUNNER = 'django.test.runner.DiscoverRunner' -TEST_MODE = True -``` - -## Test Types - -### Unit Tests - -#### Model Tests -```python -class ParkModelTest(TestCase): - def setUp(self): - self.park = Park.objects.create( - name="Test Park", - status="OPERATING" - ) - - def test_slug_generation(self): - self.assertEqual(self.park.slug, "test-park") - - def test_status_validation(self): - with self.assertRaises(ValidationError): - Park.objects.create( - name="Invalid Park", - status="INVALID" - ) -``` - -#### View Tests -```python -class ParkViewTest(TestCase): - def setUp(self): - self.client = Client() - self.user = User.objects.create_user( - username="testuser", - [PASSWORD-REMOVED]" - ) - - def test_park_list_view(self): - response = self.client.get(reverse('parks:list')) - self.assertEqual(response.status_code, 200) - self.assertTemplateUsed(response, 'parks/park_list.html') -``` - -#### Form Tests -```python -class RideFormTest(TestCase): - def test_valid_form(self): - form = RideForm({ - 'name': 'Test Ride', - 'status': 'OPERATING', - 'height_requirement': 48 - }) - self.assertTrue(form.is_valid()) -``` - -### Integration Tests - -#### Workflow Tests -```python -class ReviewWorkflowTest(TestCase): - def test_review_moderation_flow(self): - # Create review - review = self.create_review() - - # Submit for moderation - response = self.client.post( - reverse('reviews:submit_moderation', - kwargs={'pk': review.pk}) - ) - self.assertEqual(review.refresh_from_db().status, 'PENDING') - - # Approve review - moderator = self.create_moderator() - self.client.force_login(moderator) - response = self.client.post( - reverse('reviews:approve', - kwargs={'pk': review.pk}) - ) - self.assertEqual(review.refresh_from_db().status, 'APPROVED') -``` - -#### API Tests -```python -class ParkAPITest(APITestCase): - def test_park_list_api(self): - url = reverse('api:park-list') - response = self.client.get(url) - self.assertEqual(response.status_code, 200) - - def test_park_create_api(self): - url = reverse('api:park-create') - data = { - 'name': 'New Park', - 'status': 'OPERATING' - } - response = self.client.post(url, data, format='json') - self.assertEqual(response.status_code, 201) -``` - -### End-to-End Tests - -#### User Journey Tests -```python -class UserJourneyTest(LiveServerTestCase): - def test_park_review_journey(self): - # User logs in - self.login_user() - - # Navigate to park - self.browser.get(f'{self.live_server_url}/parks/test-park/') - - # Create review - self.browser.find_element_by_id('write-review').click() - self.browser.find_element_by_id('review-text').send_keys('Great park!') - self.browser.find_element_by_id('submit').click() - - # Verify review appears - review_element = self.browser.find_element_by_class_name('review-item') - self.assertIn('Great park!', review_element.text) -``` - -## CI/CD Pipeline - -### GitHub Actions Configuration -```yaml -name: ThrillWiki CI - -on: - push: - branches: [ main, develop ] - pull_request: - branches: [ main, develop ] - -jobs: - test: - runs-on: ubuntu-latest - - services: - postgres: - image: postgres:13 - env: - POSTGRES_PASSWORD: postgres - options: >- - --health-cmd pg_isready - --health-interval 10s - --health-timeout 5s - --health-retries 5 - ports: - - 5432:5432 - - steps: - - uses: actions/checkout@v2 - - - name: Set up Python - uses: actions/setup-python@v2 - with: - python-version: '3.11' - - - name: Install Dependencies - run: | - python -m pip install --upgrade pip - pip install -r requirements.txt - - - name: Run Tests - env: - DATABASE_URL: postgres://postgres:postgres@localhost:5432/thrillwiki_test - run: | - pytest --cov=./ --cov-report=xml - - - name: Upload Coverage - uses: codecov/codecov-action@v1 -``` - -## Quality Metrics - -### Code Coverage -```python -# Coverage configuration -[coverage:run] -source = . -omit = - */migrations/* - */tests/* - manage.py - -[coverage:report] -exclude_lines = - pragma: no cover - def __str__ - raise NotImplementedError -``` - -### Code Quality Tools -```python -# flake8 configuration -[flake8] -max-line-length = 88 -extend-ignore = E203 -exclude = .git,__pycache__,build,dist - -# black configuration -[tool.black] -line-length = 88 -target-version = ['py311'] -include = '\.pyi?$' -``` - -## Test Data Management - -### Fixtures -```python -# fixtures/parks.json -[ - { - "model": "parks.park", - "pk": 1, - "fields": { - "name": "Test Park", - "slug": "test-park", - "status": "OPERATING" - } - } -] -``` - -### Factory Classes -```python -from factory.django import DjangoModelFactory - -class ParkFactory(DjangoModelFactory): - class Meta: - model = Park - - name = factory.Sequence(lambda n: f'Test Park {n}') - status = 'OPERATING' -``` - -## Performance Testing - -### Load Testing -```python -from locust import HttpUser, task, between - -class ParkUser(HttpUser): - wait_time = between(1, 3) - - @task - def view_park_list(self): - self.client.get("/parks/") - - @task - def view_park_detail(self): - self.client.get("/parks/test-park/") -``` - -### Benchmark Tests -```python -class ParkBenchmarkTest(TestCase): - def test_park_list_performance(self): - start_time = time.time() - Park.objects.all().select_related('owner') - end_time = time.time() - - self.assertLess(end_time - start_time, 0.1) -``` - -## Test Automation - -### Test Runner Configuration -```python -# Custom test runner -class CustomTestRunner(DiscoverRunner): - def setup_databases(self, **kwargs): - # Custom database setup - return super().setup_databases(**kwargs) - - def teardown_databases(self, old_config, **kwargs): - # Custom cleanup - return super().teardown_databases(old_config, **kwargs) -``` - -### Automated Test Execution -```bash -# Test execution script -#!/bin/bash - -# Run unit tests -pytest tests/unit/ - -# Run integration tests -pytest tests/integration/ - -# Run e2e tests -pytest tests/e2e/ - -# Generate coverage report -coverage run -m pytest -coverage report -coverage html -``` - -## Monitoring and Reporting - -### Test Reports -```python -# pytest-html configuration -pytest_html_report_title = "ThrillWiki Test Report" - -def pytest_html_report_data(report): - report.description = "Test Results for ThrillWiki" -``` - -### Coverage Reports -```python -# Coverage reporting configuration -COVERAGE_REPORT_OPTIONS = { - 'report_type': 'html', - 'directory': 'coverage_html', - 'title': 'ThrillWiki Coverage Report', - 'show_contexts': True -} \ No newline at end of file diff --git a/memory-bank/documentation/cleanup_report.md b/memory-bank/documentation/cleanup_report.md deleted file mode 100644 index 0552c18a..00000000 --- a/memory-bank/documentation/cleanup_report.md +++ /dev/null @@ -1,31 +0,0 @@ -# Parks Consolidation Cleanup Report - -This report details the cleanup process following the consolidation of the `operators` and `property_owners` apps into the `parks` app. - -## 1. Removed App Directories - -The following app directories were removed: - -- `operators/` -- `property_owners/` - -## 2. Removed Apps from INSTALLED_APPS - -The `operators` and `property_owners` apps were removed from the `INSTALLED_APPS` setting in `thrillwiki/settings.py`. - -## 3. Cleaned Up Migrations - -All migration files were deleted from all apps and recreated to ensure a clean slate. This was done to resolve dependencies on the old `operators` and `property_owners` apps. - -## 4. Reset Database - -The database was reset to ensure all old data and schemas were removed. The following commands were run: - -```bash -uv run manage.py migrate --fake parks zero -uv run manage.py migrate -``` - -## 5. Verification - -The codebase was searched for any remaining references to `operators` and `property_owners`. All remaining references in templates and documentation were removed. \ No newline at end of file diff --git a/memory-bank/documentation/complete-django-project-analysis-2025.md b/memory-bank/documentation/complete-django-project-analysis-2025.md deleted file mode 100644 index d9905de5..00000000 --- a/memory-bank/documentation/complete-django-project-analysis-2025.md +++ /dev/null @@ -1,405 +0,0 @@ -# ThrillWiki Complete Django Project Analysis - 2025 - -## Executive Summary - -This comprehensive analysis examines every aspect of the ThrillWiki Django project against industry best practices and the HackSoft Django Styleguide. The project demonstrates **exceptional technical sophistication** with outstanding architecture patterns, comprehensive testing infrastructure, and professional development practices. - -**Overall Project Assessment: ⭐⭐⭐⭐⭐ (9.4/10) - OUTSTANDING** - ---- - -## 🏆 Project Highlights - -### **Exceptional Technical Architecture** -- **Advanced Service Layer**: Sophisticated orchestrating services with proper separation of concerns -- **Professional Testing**: Comprehensive factory patterns with 95%+ coverage -- **Modern Frontend**: HTMX + Alpine.js + Tailwind CSS v4 integration -- **Enterprise Features**: Full audit trails, geographic capabilities, advanced caching - -### **Django Best Practices Excellence** -- **Perfect Model Architecture**: TrackedModel base with pghistory integration -- **Outstanding Service/Selector Patterns**: Textbook implementation exceeding styleguide standards -- **Professional API Design**: DRF with proper input/output serializer separation -- **Comprehensive Security**: Authentication, permissions, and protection mechanisms - ---- - -## 📊 Detailed Analysis by Category - -### 1. **Model Architecture & Data Design** ⭐⭐⭐⭐⭐ (10/10) - -**Perfect Implementation:** - -```python -# Exemplary base model pattern -@pghistory.track() -class TrackedModel(models.Model): - created_at = models.DateTimeField(auto_now_add=True) - updated_at = models.DateTimeField(auto_now=True) - - class Meta: - abstract = True -``` - -**Strengths:** -- ✅ **Perfect**: All models inherit from TrackedModel -- ✅ **Advanced**: Full audit trails with pghistory -- ✅ **Sophisticated**: SluggedModel with automated history -- ✅ **Professional**: Generic relations for flexible associations -- ✅ **Enterprise**: Complex constraints and business rules - -**Model Quality Examples:** -- **Parks Model**: 15+ properly validated fields with status tracking -- **Location Model**: PostGIS integration with spatial indexing -- **Media Model**: Generic file handling with automated path generation -- **User Model**: Extended authentication with profile relationships - -### 2. **Service Layer Architecture** ⭐⭐⭐⭐⭐ (9.8/10) - -**Outstanding Implementation:** - -```python -class UnifiedMapService: - def get_map_data( - self, - *, - bounds: Optional[GeoBounds] = None, - filters: Optional[MapFilters] = None, - zoom_level: int = DEFAULT_ZOOM_LEVEL, - cluster: bool = True, - use_cache: bool = True - ) -> MapResponse: -``` - -**Service Catalog:** -- **UnifiedMapService**: Main orchestrating service for geographic data -- **ClusteringService**: Specialized clustering algorithms -- **ParkService**: Domain-specific park operations -- **ModerationService**: Content moderation workflows -- **EmailService**: Multi-site email configuration - -**Excellence Indicators:** -- ✅ **Perfect**: Keyword-only arguments throughout -- ✅ **Advanced**: Type annotations on all methods -- ✅ **Professional**: Transaction management patterns -- ✅ **Sophisticated**: Caching integration and optimization - -### 3. **Selector Pattern Implementation** ⭐⭐⭐⭐⭐ (9.5/10) - -**Textbook Implementation:** - -```python -def park_list_with_stats(*, filters: Optional[Dict[str, Any]] = None) -> QuerySet[Park]: - queryset = Park.objects.select_related( - 'operator', 'property_owner' - ).prefetch_related( - 'location' - ).annotate( - ride_count_calculated=Count('rides', distinct=True), - average_rating_calculated=Avg('reviews__rating') - ) - # ... filtering logic - return queryset.order_by('name') -``` - -**Selector Coverage:** -- ✅ **Complete**: All apps implement proper selectors -- ✅ **Optimized**: Strategic use of select_related/prefetch_related -- ✅ **Advanced**: Spatial queries with PostGIS optimization -- ✅ **Performance**: Intelligent caching and query optimization - -### 4. **API Design & Serialization** ⭐⭐⭐⭐☆ (8.5/10) - -**Strong DRF Implementation:** - -```python -class ParkApi(CreateApiMixin, UpdateApiMixin, ListApiMixin, GenericViewSet): - permission_classes = [IsAuthenticatedOrReadOnly] - - InputSerializer = ParkCreateInputSerializer - OutputSerializer = ParkDetailOutputSerializer - - def perform_create(self, **validated_data): - return ParkService.create_park( - created_by=self.request.user, - **validated_data - ) -``` - -**API Strengths:** -- ✅ **Professional**: Proper mixin architecture -- ✅ **Standardized**: Input/Output serializer separation -- ✅ **Integrated**: Service layer delegation -- ✅ **Secure**: Authentication and permission handling - -**Enhancement Opportunity:** -- Move to nested serializers within API classes per styleguide preference - -### 5. **Testing Infrastructure** ⭐⭐⭐⭐⭐ (9.8/10) - -**Exceptional Factory Implementation:** - -```python -class ParkFactory(DjangoModelFactory): - class Meta: - model = 'parks.Park' - django_get_or_create = ('slug',) - - name = factory.Sequence(lambda n: f"Test Park {n}") - operator = factory.SubFactory(OperatorCompanyFactory) - - @factory.post_generation - def create_location(obj, create, extracted, **kwargs): - if create: - LocationFactory(content_object=obj, name=obj.name) -``` - -**Testing Excellence:** -- ✅ **Comprehensive**: 15+ specialized factories -- ✅ **Advanced**: Complex relationship handling -- ✅ **Professional**: Trait mixins and scenarios -- ✅ **Complete**: E2E tests with Playwright -- ✅ **Sophisticated**: API testing utilities - -**Coverage Metrics:** -- Model Coverage: 95%+ -- Service Coverage: 90%+ -- API Coverage: 85%+ -- Overall: 88%+ - -### 6. **Frontend Architecture** ⭐⭐⭐⭐⭐ (9.2/10) - -**Modern Stack Integration:** - -```javascript -// Theme handling with system preference detection -document.addEventListener('DOMContentLoaded', () => { - const themeToggle = document.getElementById('theme-toggle'); - const mediaQuery = window.matchMedia('(prefers-color-scheme: dark)'); - - mediaQuery.addEventListener('change', (e) => { - if (!localStorage.getItem('theme')) { - const isDark = e.matches; - html.classList.toggle('dark', isDark); - } - }); -}); -``` - -**Frontend Strengths:** -- ✅ **Modern**: HTMX + Alpine.js for reactive interfaces -- ✅ **Professional**: Tailwind CSS v4 with custom design system -- ✅ **Accessible**: Dark mode with system preference detection -- ✅ **Performance**: Progressive enhancement patterns -- ✅ **Responsive**: Adaptive grid systems and mobile optimization - -**Template Organization:** -- ✅ **Hierarchical**: Proper base template inheritance -- ✅ **Modular**: Component-based template structure -- ✅ **Reusable**: Extensive partial template library -- ✅ **Optimized**: HTMX partial updates for dynamic content - -### 7. **Security Implementation** ⭐⭐⭐⭐⭐ (9.0/10) - -**Comprehensive Security Architecture:** - -```python -# Custom exception handler with standardized responses -def custom_exception_handler(exc: Exception, context: Dict[str, Any]) -> Optional[Response]: - response = exception_handler(exc, context) - - if response is not None: - custom_response_data = { - 'status': 'error', - 'error': { - 'code': _get_error_code(exc), - 'message': _get_error_message(exc, response.data), - 'details': _get_error_details(exc, response.data), - } - } - log_exception(logger, exc, context={'response_status': response.status_code}) -``` - -**Security Features:** -- ✅ **Authentication**: Multi-provider OAuth with django-allauth -- ✅ **Authorization**: Role-based access with permission system -- ✅ **Protection**: CSRF, XSS, and injection prevention -- ✅ **Monitoring**: Comprehensive audit trails and logging -- ✅ **Validation**: Input sanitization and file upload security - -### 8. **Database Design & Performance** ⭐⭐⭐⭐⭐ (9.5/10) - -**Advanced Database Architecture:** - -```python -# Spatial indexing for geographic queries -class Location(TrackedModel): - point = gis_models.PointField(srid=4326, null=True, blank=True) - - class Meta: - indexes = [ - models.Index(fields=['content_type', 'object_id']), - GinIndex(fields=['point']), # Spatial indexing - models.Index(fields=['city', 'state']), - ] -``` - -**Database Excellence:** -- ✅ **PostGIS**: Advanced geographic capabilities -- ✅ **Indexing**: Strategic performance optimization -- ✅ **History**: Complete audit trails with pghistory -- ✅ **Constraints**: Business rule enforcement -- ✅ **Optimization**: Query performance monitoring - -### 9. **Development Workflow** ⭐⭐⭐⭐⭐ (9.0/10) - -**Professional Development Environment:** - -```bash -# Standardized development commands -uv run manage.py tailwind runserver -uv add # Package management -uv run manage.py makemigrations # Always use UV -``` - -**Workflow Strengths:** -- ✅ **Modern**: UV for fast package management -- ✅ **Automated**: Tailwind CSS compilation integration -- ✅ **Standardized**: Consistent development commands -- ✅ **Comprehensive**: Management commands for all operations -- ✅ **Professional**: CI/CD integration and deployment scripts - -### 10. **Project Organization** ⭐⭐⭐⭐⭐ (9.5/10) - -**Exemplary Structure:** - -``` -thrillwiki/ -├── accounts/ # User management domain -├── parks/ # Theme park domain -├── rides/ # Ride/attraction domain -├── location/ # Geographic services -├── moderation/ # Content moderation -├── media/ # File handling -├── core/ # Cross-cutting concerns -└── config/ # Settings organization -``` - -**Organization Excellence:** -- ✅ **Domain-Driven**: Clear bounded contexts -- ✅ **Modular**: Loosely coupled app architecture -- ✅ **Scalable**: Easy extension and maintenance -- ✅ **Professional**: Comprehensive documentation -- ✅ **Maintainable**: Clear separation of concerns - ---- - -## 🎯 Advanced Features & Innovations - -### **1. Geographic Intelligence** -- **PostGIS Integration**: Full spatial database capabilities -- **Unified Map Service**: Sophisticated clustering and viewport optimization -- **Location Abstraction**: Generic location handling across all models - -### **2. Historical Tracking** -- **Complete Audit Trails**: Every change tracked with pghistory -- **Context Enrichment**: Request metadata in audit logs -- **Change Detection**: DiffMixin for semantic change tracking - -### **3. Content Moderation System** -- **Workflow Engine**: Complete editorial workflow -- **Permission Integration**: Role-based content management -- **Quality Control**: Multi-stage approval processes - -### **4. Media Management** -- **Custom Storage**: Optimized file handling with naming conventions -- **EXIF Processing**: Automatic metadata extraction -- **Generic Attachments**: Flexible media association system - -### **5. Search & Discovery** -- **Filter Integration**: Advanced django-filter implementation -- **Autocomplete System**: Authenticated, optimized search widgets -- **Performance Optimization**: Intelligent caching and indexing - ---- - -## 🚀 Recommendations for Excellence - -### **Priority 1: API Standardization** -1. **Nested Serializers**: Migrate to inline Input/Output serializers -2. **OpenAPI Documentation**: Implement comprehensive API docs -3. **Versioning Strategy**: Enhance API versioning patterns - -### **Priority 2: Performance Enhancement** -1. **Cache Strategy**: Implement Redis caching layers -2. **Database Optimization**: Add query performance monitoring -3. **CDN Integration**: Optimize static and media delivery - -### **Priority 3: Monitoring & Observability** -1. **Error Tracking**: Implement Sentry or similar -2. **Performance Monitoring**: Add APM integration -3. **Health Checks**: Comprehensive system monitoring - ---- - -## 📈 Project Metrics Summary - -| Category | Score | Assessment | -|----------|-------|------------| -| Model Architecture | 10/10 | ⭐⭐⭐⭐⭐ Perfect | -| Service Layer | 9.8/10 | ⭐⭐⭐⭐⭐ Outstanding | -| Selector Patterns | 9.5/10 | ⭐⭐⭐⭐⭐ Excellent | -| Testing Infrastructure | 9.8/10 | ⭐⭐⭐⭐⭐ Outstanding | -| Frontend Architecture | 9.2/10 | ⭐⭐⭐⭐⭐ Excellent | -| Security Implementation | 9.0/10 | ⭐⭐⭐⭐⭐ Excellent | -| Database Design | 9.5/10 | ⭐⭐⭐⭐⭐ Excellent | -| API Design | 8.5/10 | ⭐⭐⭐⭐☆ Very Good | -| Development Workflow | 9.0/10 | ⭐⭐⭐⭐⭐ Excellent | -| Project Organization | 9.5/10 | ⭐⭐⭐⭐⭐ Excellent | -| **Overall Average** | **9.4/10** | **⭐⭐⭐⭐⭐ OUTSTANDING** | - ---- - -## 🎖️ Technical Excellence Recognition - -### **Django Styleguide Compliance: 95%** -- **Model Patterns**: Perfect implementation -- **Service/Selector Architecture**: Exceeds standards -- **API Design**: Strong with minor enhancement opportunities -- **Testing Patterns**: Exemplary factory implementation -- **Project Structure**: Professional organization - -### **Industry Best Practices: 94%** -- **Security**: Comprehensive protection mechanisms -- **Performance**: Optimized queries and caching -- **Scalability**: Modular, extensible architecture -- **Maintainability**: Clean code and documentation -- **DevOps**: Modern tooling and workflows - -### **Innovation Score: 92%** -- **Geographic Intelligence**: Advanced PostGIS usage -- **Audit System**: Sophisticated change tracking -- **Moderation Workflow**: Enterprise-grade content management -- **Frontend Integration**: Modern HTMX/Alpine.js patterns - ---- - -## 🏆 Conclusion - -**ThrillWiki represents an exceptional Django project** that demonstrates mastery of: - -- **Advanced Django Patterns**: Service/Selector architecture exceeding styleguide standards -- **Enterprise Features**: Comprehensive audit trails, geographic capabilities, and content moderation -- **Modern Development**: Professional tooling, testing, and deployment practices -- **Technical Sophistication**: Complex domain modeling with excellent separation of concerns - -**This project serves as an excellent reference implementation** for Django best practices and can confidently be used as a template for other large-scale Django applications. - -The codebase demonstrates **senior-level Django expertise** with patterns and practices that exceed most industry standards. The few enhancement opportunities identified are minor refinements rather than fundamental issues. - ---- - -**Assessment Completed**: January 2025 -**Methodology**: Comprehensive analysis against HackSoft Django Styleguide and industry standards -**Reviewer**: AI Analysis with Django Expert Knowledge -**Project Status**: **PRODUCTION READY** with **EXEMPLARY** code quality diff --git a/memory-bank/documentation/complete-project-review-2025-01-05.md b/memory-bank/documentation/complete-project-review-2025-01-05.md deleted file mode 100644 index 9093c30d..00000000 --- a/memory-bank/documentation/complete-project-review-2025-01-05.md +++ /dev/null @@ -1,435 +0,0 @@ -# ThrillWiki Django Project - Complete Technical Review -**Date:** January 5, 2025 -**Reviewer:** Roo (Architect Mode) -**Review Type:** Exhaustive Code Analysis -**Status:** COMPLETED - Comprehensive analysis of entire codebase - -> **CRITICAL MEMORY BANK DOCUMENT** - This exhaustive review represents the most comprehensive analysis of the ThrillWiki project to date. All future architectural decisions should reference this document. - -## Executive Summary - -ThrillWiki is a comprehensive Django-based theme park and ride database application with advanced features including user authentication, content moderation, media management, location services, analytics, and history tracking. The project follows modern Django patterns with HTMX for dynamic interactions and uses PostgreSQL with PostGIS for geographic data. - -## Technical Stack Analysis - -### Core Framework & Dependencies -- **Django 5.0+** - Modern Django framework -- **Python 3.11+** - Latest Python version -- **PostgreSQL with PostGIS** - Geographic database support -- **UV Package Manager** - Modern Python package management -- **Tailwind CSS** - Utility-first CSS framework -- **HTMX** - Dynamic HTML interactions without JavaScript frameworks - -### Key Third-Party Packages -- **django-allauth** - Authentication and social login -- **django-pghistory** - Comprehensive history tracking -- **django-htmx** - HTMX integration -- **django-cleanup** - Automatic file cleanup -- **django-filter** - Advanced filtering -- **Pillow** - Image processing -- **WhiteNoise** - Static file serving -- **Playwright** - End-to-end testing - -## Django App Inventory & Functionality Analysis - -### 1. Core Apps - -#### **accounts** - User Management System -- **Models:** - - `User` (AbstractUser) - Custom user with roles, theme preferences, unique user_id - - `UserProfile` - Extended profile with avatar, bio, social links, ride statistics - - `EmailVerification` - Email verification tokens - - `PasswordReset` - Password reset functionality - - `TopList` - User-created ranked lists - - `TopListItem` - Individual items in top lists - -- **Key Features:** - - Role-based access (USER, MODERATOR, ADMIN, SUPERUSER) - - Social authentication (Google, Discord) - - HTMX-powered login/signup modals - - Turnstile CAPTCHA integration - - Profile management with avatar upload - - Password reset with email verification - -#### **parks** - Theme Park Management -- **Models:** - - `Park` - Main park entity with status, location, statistics - - `ParkArea` - Themed areas within parks - -- **Key Features:** - - Park status tracking (Operating, Closed, Under Construction, etc.) - - Geographic location integration - - Operator and property owner relationships - - Historical slug tracking for SEO - - Photo and review associations - -#### **rides** - Ride Database System -- **Models:** - - `Ride` - Individual ride installations - - `RideModel` - Manufacturer ride models/types - - `RollerCoasterStats` - Detailed coaster specifications - - `RideEvent`/`RideModelEvent` - History tracking models - -- **Key Features:** - - Comprehensive ride categorization (RC, DR, FR, WR, TR, OT) - - Detailed coaster statistics (height, speed, inversions, etc.) - - Manufacturer and designer relationships - - Status lifecycle management - - Historical change tracking - -### 2. Company Entity Apps - -#### **operators** - Park Operating Companies -- **Models:** `Operator` - Companies that operate theme parks -- **Features:** Replaces legacy Company.owner relationships - -#### **property_owners** - Property Ownership -- **Models:** `PropertyOwner` - Companies that own park property -- **Features:** Optional relationship, usually same as operator but can differ - -#### **manufacturers** - Ride Manufacturers -- **Models:** `Manufacturer` - Companies that manufacture rides -- **Features:** Enhanced from existing system, separate from general companies - -#### **designers** - Ride Designers -- **Models:** `Designer` - Companies/individuals that design rides -- **Features:** Existing concept maintained for ride attribution - -### 3. Content & Media Apps - -#### **media** - Photo Management System -- **Models:** `Photo` - Generic photo model with approval workflow -- **Features:** - - Generic foreign key for any model association - - EXIF data extraction - - Approval workflow for moderation - - Custom storage backend - - Automatic file organization - -#### **reviews** - User Review System -- **Models:** - - `Review` - Generic reviews for parks/rides - - `ReviewImage` - Review photo attachments - - `ReviewLike` - Review engagement - - `ReviewReport` - Content moderation - -- **Features:** - - 1-10 rating scale - - Generic content type support - - Moderation workflow - - User engagement tracking - -### 4. Supporting Systems - -#### **moderation** - Content Moderation System -- **Models:** - - `EditSubmission` - User-submitted edits/additions - - `PhotoSubmission` - User-submitted photos - -- **Features:** - - Comprehensive edit approval workflow - - Moderator edit capabilities - - Duplicate detection - - Status tracking (PENDING, APPROVED, REJECTED, ESCALATED) - - Auto-approval for moderators - -#### **location** - Geographic Services -- **Models:** `Location` - Generic location model with PostGIS support -- **Features:** - - Full address components - - Geographic coordinates (legacy decimal + PostGIS Point) - - Distance calculations - - Nearby location queries - -#### **analytics** - Usage Analytics -- **Models:** `PageView` - Generic page view tracking -- **Features:** - - Trending content calculation - - IP and user agent tracking - - Time-based analytics - -#### **search** - Search Functionality -- **Models:** None (view-based search) -- **Features:** Global search across parks, rides, operators, manufacturers - -### 5. Infrastructure Apps - -#### **history_tracking** - Change Management -- **Models:** - - `TrackedModel` - Abstract base for history tracking - - `HistoricalSlug` - Manual slug history tracking - - `DiffMixin` - Change comparison utilities - -- **Features:** - - Comprehensive change tracking via pghistory - - Slug history for SEO preservation - - Diff generation for changes - -#### **email_service** - Email Management -- **Models:** `EmailConfiguration` - Site-specific email settings -- **Features:** Forward Email API integration - -#### **core** - Shared Utilities -- **Models:** - - `SlugHistory` - Generic slug tracking - - `SluggedModel` - Abstract slugged model base - -## Entity Relationship Analysis - -### Primary Entity Relationships - -``` -Park (1) ←→ (1) Operator [REQUIRED] -Park (1) ←→ (0..1) PropertyOwner [OPTIONAL] -Park (1) ←→ (*) ParkArea -Park (1) ←→ (*) Ride -Park (1) ←→ (*) Location [Generic] -Park (1) ←→ (*) Photo [Generic] -Park (1) ←→ (*) Review [Generic] - -Ride (1) ←→ (1) Park [REQUIRED] -Ride (1) ←→ (0..1) ParkArea [OPTIONAL] -Ride (1) ←→ (0..1) Manufacturer [OPTIONAL] -Ride (1) ←→ (0..1) Designer [OPTIONAL] -Ride (1) ←→ (0..1) RideModel [OPTIONAL] -Ride (1) ←→ (0..1) RollerCoasterStats [OPTIONAL] -Ride (1) ←→ (*) Photo [Generic] -Ride (1) ←→ (*) Review [Generic] - -RideModel (1) ←→ (0..1) Manufacturer -RideModel (1) ←→ (*) Ride - -User (1) ←→ (1) UserProfile -User (1) ←→ (*) Review -User (1) ←→ (*) TopList -User (1) ←→ (*) EditSubmission -User (1) ←→ (*) PhotoSubmission -``` - -### Key Architectural Patterns - -1. **Generic Foreign Keys** - Extensive use for flexible relationships (Photos, Reviews, Locations) -2. **History Tracking** - Comprehensive change tracking via django-pghistory -3. **Slug Management** - SEO-friendly URLs with historical slug preservation -4. **Moderation Workflow** - User-generated content approval system -5. **Role-Based Access** - Hierarchical user permissions - -## Database Schema Analysis - -### Core Tables Structure - -#### User Management -- `accounts_user` - Extended Django user model -- `accounts_userprofile` - User profile extensions -- `accounts_toplist` / `accounts_toplistitem` - User rankings - -#### Content Tables -- `parks_park` / `parks_parkarea` - Park hierarchy -- `rides_ride` / `rides_ridemodel` / `rides_rollercoasterstats` - Ride data -- `operators_operator` / `property_owners_propertyowner` - Ownership -- `manufacturers_manufacturer` / `designers_designer` - Attribution - -#### Supporting Tables -- `media_photo` - Generic photo storage -- `reviews_review` + related - Review system -- `location_location` - Geographic data -- `moderation_editsubmission` / `moderation_photosubmission` - Moderation -- `analytics_pageview` - Usage tracking - -#### History Tables (pghistory) -- `*_*event` tables for comprehensive change tracking -- Automatic creation via pghistory decorators - -## URL Routing Analysis - -### Main URL Structure -``` -/ - Home page with trending content -/admin/ - Django admin interface -/ac/ - Autocomplete endpoints -/parks/ - Park browsing and details -/rides/ - Ride browsing and details -/operators/ - Operator profiles -/property-owners/ - Property owner profiles -/manufacturers/ - Manufacturer profiles -/designers/ - Designer profiles -/photos/ - Media management -/search/ - Global search -/accounts/ - Authentication (custom + allauth) -/moderation/ - Content moderation -/history/ - Change history -``` - -### URL Patterns -- SEO-friendly slugs for all content -- Historical slug support for redirects -- HTMX-compatible endpoints -- RESTful resource organization - -## Form Analysis - -### Key Forms Identified -- User authentication (login/signup with Turnstile) -- Profile management -- Content submission (parks, rides) -- Photo uploads -- Review submission -- Moderation workflows - -### Form Features -- HTMX integration for dynamic interactions -- Comprehensive validation -- File upload handling -- CAPTCHA protection - -## Admin Interface Analysis - -### Django Admin Customization -- Custom admin interfaces for all models -- Bulk operations support -- Advanced filtering and search -- Moderation workflow integration -- History tracking display - -## Template Structure Analysis - -### Template Organization -``` -templates/ -├── base/ - Base templates and layouts -├── account/ - Authentication templates -├── accounts/ - User profile templates -├── parks/ - Park-related templates -├── rides/ - Ride-related templates -├── operators/ - Operator templates -├── manufacturers/ - Manufacturer templates -├── designers/ - Designer templates -├── property_owners/ - Property owner templates -├── media/ - Photo management templates -├── moderation/ - Moderation interface templates -├── location/ - Location templates -└── pages/ - Static pages -``` - -### Template Features -- HTMX partial templates for dynamic updates -- Responsive design with Tailwind CSS -- Component-based architecture -- SEO optimization -- Accessibility considerations - -## Static Asset Analysis - -### CSS Architecture -- Tailwind CSS utility-first approach -- Custom CSS in `static/css/src/` -- Compiled output in `static/css/` -- Component-specific styles - -### JavaScript -- Minimal custom JavaScript -- HTMX for dynamic interactions -- Alpine.js integration -- Progressive enhancement approach - -### Images -- Placeholder images in `static/images/placeholders/` -- User-uploaded content in `media/` -- Organized by content type - -## Database Migration Analysis - -### Migration Strategy -- Comprehensive migration files for all apps -- Geographic data migrations (PostGIS) -- History tracking setup -- Data integrity constraints - -### Key Migration Patterns -- Foreign key relationship establishment -- Index creation for performance -- Data type migrations -- Constraint additions - -## Test Coverage Analysis - -### Testing Structure -``` -tests/ -├── e2e/ - End-to-end tests with Playwright -├── fixtures/ - Test data fixtures -└── [app]/tests/ - Unit tests per app -``` - -### Testing Approach -- Playwright for browser testing -- Django TestCase for unit tests -- Fixture-based test data -- Coverage reporting - -## Management Command Analysis - -### Custom Commands -- Data import/export utilities -- Maintenance scripts -- Analytics processing -- Content moderation helpers - -## Technical Debt & Architecture Assessment - -### Strengths -1. **Modern Django Patterns** - Uses latest Django features and best practices -2. **Comprehensive History Tracking** - Full audit trail via pghistory -3. **Flexible Content System** - Generic foreign keys for extensibility -4. **Geographic Support** - PostGIS integration for location features -5. **Moderation Workflow** - Robust user-generated content management -6. **Performance Considerations** - Proper indexing and query optimization - -### Areas for Improvement -1. **API Layer** - No REST API for mobile/external access -2. **Caching Strategy** - Limited caching implementation -3. **Search Optimization** - Basic search, could benefit from Elasticsearch -4. **Image Optimization** - No automatic image resizing/optimization -5. **Internationalization** - No i18n support currently - -### Security Analysis -1. **Authentication** - Robust with social login and 2FA options -2. **Authorization** - Role-based access control -3. **Input Validation** - Comprehensive form validation -4. **CSRF Protection** - Django built-in protection -5. **SQL Injection** - ORM usage prevents issues -6. **File Upload Security** - Proper validation and storage - -## Performance Considerations - -### Database Optimization -- Proper indexing on frequently queried fields -- Select/prefetch related for query optimization -- Generic foreign key indexing - -### Caching Strategy -- Basic cache implementation -- Trending content caching -- Static file optimization with WhiteNoise - -### Media Handling -- Custom storage backend -- Organized file structure -- EXIF data extraction - -## Deployment Architecture - -### Production Considerations -- PostgreSQL with PostGIS extensions -- Static file serving via WhiteNoise -- Media file storage (local/cloud) -- Email service integration -- Geographic library dependencies (GDAL, GEOS) - -## Conclusion - -ThrillWiki represents a well-architected Django application with modern patterns and comprehensive functionality. The codebase demonstrates strong engineering practices with proper separation of concerns, extensive history tracking, and robust content moderation. The entity relationship model effectively captures the complex relationships in the theme park industry while maintaining flexibility for future expansion. - -The project successfully implements a sophisticated content management system with user-generated content, geographic features, and comprehensive analytics. The modular app structure allows for easy maintenance and feature additions while the extensive use of Django's built-in features ensures reliability and security. - -**Overall Assessment: Excellent** - This is a production-ready application with strong architectural foundations and comprehensive feature set suitable for a theme park enthusiast community. \ No newline at end of file diff --git a/memory-bank/documentation/design-layout-optimization-recommendations.md b/memory-bank/documentation/design-layout-optimization-recommendations.md deleted file mode 100644 index 3fb935a4..00000000 --- a/memory-bank/documentation/design-layout-optimization-recommendations.md +++ /dev/null @@ -1,286 +0,0 @@ -# ThrillWiki Detail Pages - Layout Optimization Recommendations -**Date:** June 26, 2025 -**Priority:** CRITICAL -**Status:** Implementation Required -**Assessment Reference:** [`detail-pages-design-assessment-critical-2025-06-26.md`](../testing/detail-pages-design-assessment-critical-2025-06-26.md) - -## Executive Summary - -Based on the comprehensive design assessment completed on June 26, 2025, ThrillWiki's detail pages require **immediate layout optimization** to address severe space utilization issues and poor information density. This document provides specific implementation recommendations to resolve critical UX problems. - -## Critical Issues Summary - -### 🚨 SEVERITY: HIGH - Immediate Action Required -- **Space Waste**: 30-40% of screen space wasted due to oversized cards and excessive padding -- **Poor Information Density**: Single lines of text in massive containers throughout -- **Layout Inconsistencies**: No standardized grid system across page types -- **Mobile Failures**: Excessive padding maintained on mobile devices - -## Implementation Roadmap - -### Phase 1: CRITICAL FIXES (Immediate - Week 1) - -#### 1.1 Card Padding Reduction (30-40% Space Savings) -**Files to Modify:** -- `templates/parks/park_detail.html` -- `templates/rides/ride_detail.html` -- `templates/companies/manufacturer_detail.html` - -**Implementation:** -```css -/* Current excessive padding */ -.card { padding: 2rem; } /* 32px - TOO MUCH */ - -/* Recommended optimized padding */ -.card { padding: 1.25rem; } /* 20px - 37.5% reduction */ - -/* Mobile optimization */ -@media (max-width: 768px) { - .card { padding: 1rem; } /* 16px on mobile */ -} -``` - -#### 1.2 Asymmetrical Layout Fixes -**Primary Target:** Ride Detail Header Layout - -**Current Problem:** -```html - -
-
-
-
-``` - -**Recommended Fix:** -```html - -
-
-
-
-``` - -#### 1.3 Empty State Consolidation -**Target:** Remove placeholder content waste - -**Implementation Strategy:** -- Combine multiple empty sections into single compact "Coming Soon" areas -- Use progressive disclosure for secondary information -- Remove oversized placeholder cards entirely - -### Phase 2: LAYOUT RESTRUCTURING (Week 2) - -#### 2.1 Park Detail Sidebar Conversion -**Current:** Oversized left sidebar with minimal content -**Target:** Horizontal stats bar - -**Implementation:** -```html - -
-
-
-
- - -
-
- -
-
-
-``` - -#### 2.2 Company Detail Grid Standardization -**Target:** Consistent card sizing and grid discipline - -**Implementation:** -```css -/* Standardized card grid system */ -.detail-grid { - display: grid; - grid-template-columns: repeat(auto-fit, minmax(300px, 1fr)); - gap: 1.25rem; -} - -.detail-card { - min-height: 120px; /* Consistent minimum height */ - padding: 1.25rem; -} -``` - -### Phase 3: MOBILE OPTIMIZATION (Week 3) - -#### 3.1 Responsive Padding System -**Implementation:** -```css -/* Responsive padding system */ -.card { - padding: 1.25rem; /* Desktop */ -} - -@media (max-width: 1024px) { - .card { padding: 1rem; } /* Tablet */ -} - -@media (max-width: 768px) { - .card { padding: 0.875rem; } /* Mobile */ -} -``` - -#### 3.2 Mobile Information Density -**Strategy:** -- Reduce vertical spacing between elements -- Use compact list layouts for mobile -- Implement collapsible sections for secondary information - -## Specific Template Modifications - -### Park Detail Template (`templates/parks/park_detail.html`) - -#### Critical Changes Required: -1. **Convert sidebar to horizontal stats bar** -2. **Reduce "About" section card size by 60%** -3. **Optimize location map container** -4. **Standardize rides section grid** - -#### Implementation Priority: -```html - -
-
-
-
-
-
- - -
-``` - -### Ride Detail Template (`templates/rides/ride_detail.html`) - -#### Critical Changes Required: -1. **Balance header layout (50/50 split)** -2. **Reduce Quick Facts card size by 40%** -3. **Consolidate empty review/trivia sections** -4. **Optimize image gallery spacing** - -#### Implementation Priority: -```html - -
-
-
-
- - -
-``` - -### Company Detail Template (`templates/companies/manufacturer_detail.html`) - -#### Critical Changes Required: -1. **Standardize card grid system** -2. **Remove redundant website buttons** -3. **Fix inconsistent stats card sizing** -4. **Optimize ride cards layout** - -#### Implementation Priority: -```html - -
-
-
-``` - -## CSS Framework Updates - -### Utility Classes to Add -```css -/* Optimized spacing utilities */ -.p-compact { padding: 1.25rem; } -.p-mobile { padding: 1rem; } -.gap-compact { gap: 1rem; } - -/* Consistent card heights */ -.card-standard { min-height: 120px; } -.card-large { min-height: 180px; } - -/* Mobile-first responsive padding */ -.responsive-padding { - padding: 1rem; -} - -@media (min-width: 768px) { - .responsive-padding { - padding: 1.25rem; - } -} -``` - -## Success Metrics - -### Quantifiable Improvements Expected: -1. **Space Efficiency**: 30-40% reduction in wasted screen space -2. **Information Density**: 50% more content visible per screen -3. **Mobile Experience**: 60% improvement in mobile viewport utilization -4. **Layout Consistency**: 100% standardized grid systems across pages - -### User Experience Improvements: -- **Reduced Scrolling**: Users see more information without scrolling -- **Professional Appearance**: Balanced, consistent layouts -- **Mobile Optimization**: Better experience on mobile devices -- **Information Accessibility**: Easier to find and consume content - -## Implementation Timeline - -### Week 1: Critical Fixes -- [ ] Reduce card padding across all detail pages -- [ ] Fix asymmetrical layouts (especially ride detail) -- [ ] Consolidate empty state sections - -### Week 2: Layout Restructuring -- [ ] Convert park detail sidebar to horizontal stats -- [ ] Standardize company detail grid system -- [ ] Balance ride detail header layout - -### Week 3: Mobile Optimization -- [ ] Implement responsive padding system -- [ ] Optimize mobile information density -- [ ] Test across all device sizes - -### Week 4: Testing & Refinement -- [ ] Cross-browser testing -- [ ] Mobile device testing -- [ ] User experience validation -- [ ] Performance impact assessment - -## Risk Assessment - -### Low Risk Changes: -- Padding reductions (easily reversible) -- Grid system standardization -- Empty state consolidation - -### Medium Risk Changes: -- Layout restructuring (requires thorough testing) -- Mobile optimization (device compatibility) - -### Mitigation Strategies: -- Implement changes incrementally -- Maintain backup of original templates -- Test on multiple devices and browsers -- Gather user feedback during implementation - -## Conclusion - -These layout optimizations are **CRITICAL** for improving ThrillWiki's user experience. The current space utilization issues significantly impact usability and professional appearance. Implementation of these recommendations will result in: - -- **Immediate UX improvements** through better space utilization -- **Professional appearance** through consistent, balanced layouts -- **Mobile optimization** for better responsive experience -- **Information accessibility** through improved content density - -**PRIORITY STATUS**: These changes should be implemented immediately to address the severe layout inefficiencies identified in the comprehensive design assessment. \ No newline at end of file diff --git a/memory-bank/documentation/design-system.md b/memory-bank/documentation/design-system.md deleted file mode 100644 index e8c9b29e..00000000 --- a/memory-bank/documentation/design-system.md +++ /dev/null @@ -1,523 +0,0 @@ -# ThrillWiki Design System Documentation -**Last Updated:** June 25, 2025 -**Version:** 1.0 -**Status:** Production Ready - -## Overview - -ThrillWiki employs a modern, professional dark theme design system featuring purple-to-blue gradients, excellent typography, and responsive design patterns. This document captures the design patterns, components, and guidelines observed during the comprehensive design assessment. - -## Design Principles - -### 1. Dark-First Design -- Primary design approach uses dark backgrounds with light text -- High contrast ratios for excellent readability -- Professional appearance suitable for entertainment industry - -### 2. Gradient Aesthetics -- Purple-to-blue gradient system creates visual depth -- Consistent gradient application across components -- Sophisticated color transitions enhance user experience - -### 3. Responsive Excellence -- Mobile-first responsive design approach -- Seamless adaptation across Desktop (1920x1080), Tablet (768x1024), Mobile (375x667) -- Fluid layouts with intelligent content prioritization - -### 4. Performance-Driven -- Fast HTMX interactions for dynamic content -- Optimized asset loading and caching -- Smooth transitions and animations - -## Color System - -### Primary Colors -```css -/* Primary Purple */ ---primary-purple: #8B5CF6; - -/* Primary Blue */ ---primary-blue: #3B82F6; - -/* Gradient Combinations */ ---gradient-primary: linear-gradient(135deg, #8B5CF6 0%, #3B82F6 100%); -``` - -### Background Colors -```css -/* Dark Backgrounds */ ---bg-dark-primary: #1F2937; ---bg-dark-secondary: #374151; ---bg-dark-tertiary: #4B5563; - -/* Card Backgrounds */ ---bg-card: rgba(31, 41, 55, 0.8); ---bg-card-hover: rgba(55, 65, 81, 0.9); -``` - -### Text Colors -```css -/* Primary Text */ ---text-primary: #FFFFFF; ---text-secondary: #E5E7EB; ---text-muted: #9CA3AF; - -/* Interactive Text */ ---text-link: #60A5FA; ---text-link-hover: #93C5FD; -``` - -### Status Colors -```css -/* Success */ ---color-success: #10B981; - -/* Warning */ ---color-warning: #F59E0B; - -/* Error */ ---color-error: #EF4444; - -/* Info */ ---color-info: #3B82F6; -``` - -## Typography - -### Font Stack -```css -/* Primary Font Family */ -font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, 'Helvetica Neue', Arial, sans-serif; -``` - -### Typography Scale -```css -/* Headings */ ---text-xs: 0.75rem; /* 12px */ ---text-sm: 0.875rem; /* 14px */ ---text-base: 1rem; /* 16px */ ---text-lg: 1.125rem; /* 18px */ ---text-xl: 1.25rem; /* 20px */ ---text-2xl: 1.5rem; /* 24px */ ---text-3xl: 1.875rem; /* 30px */ ---text-4xl: 2.25rem; /* 36px */ -``` - -### Font Weights -```css ---font-normal: 400; ---font-medium: 500; ---font-semibold: 600; ---font-bold: 700; -``` - -## Spacing System - -### Spacing Scale -```css ---space-1: 0.25rem; /* 4px */ ---space-2: 0.5rem; /* 8px */ ---space-3: 0.75rem; /* 12px */ ---space-4: 1rem; /* 16px */ ---space-5: 1.25rem; /* 20px */ ---space-6: 1.5rem; /* 24px */ ---space-8: 2rem; /* 32px */ ---space-10: 2.5rem; /* 40px */ ---space-12: 3rem; /* 48px */ ---space-16: 4rem; /* 64px */ ---space-20: 5rem; /* 80px */ -``` - -## Responsive Breakpoints - -### Breakpoint System -```css -/* Mobile First Approach */ ---breakpoint-sm: 640px; /* Small devices */ ---breakpoint-md: 768px; /* Medium devices (tablets) */ ---breakpoint-lg: 1024px; /* Large devices */ ---breakpoint-xl: 1280px; /* Extra large devices */ ---breakpoint-2xl: 1536px; /* 2X large devices */ -``` - -### Tested Viewports -- **Desktop**: 1920x1080 (Excellent adaptation) -- **Tablet**: 768x1024 (Seamless responsive behavior) -- **Mobile**: 375x667 (Optimized mobile experience) - -## Component Patterns - -### Card Components -```css -.card { - background: var(--bg-card); - border-radius: 0.5rem; - padding: var(--space-6); - box-shadow: 0 4px 6px -1px rgba(0, 0, 0, 0.1); - transition: all 0.2s ease-in-out; -} - -.card:hover { - background: var(--bg-card-hover); - transform: translateY(-2px); - box-shadow: 0 8px 25px -5px rgba(0, 0, 0, 0.2); -} -``` - -### Button Components -```css -.btn-primary { - background: var(--gradient-primary); - color: var(--text-primary); - padding: var(--space-3) var(--space-6); - border-radius: 0.375rem; - font-weight: var(--font-medium); - transition: all 0.2s ease-in-out; -} - -.btn-primary:hover { - transform: translateY(-1px); - box-shadow: 0 4px 12px rgba(139, 92, 246, 0.3); -} -``` - -### Navigation Components -```css -.nav-link { - color: var(--text-secondary); - padding: var(--space-2) var(--space-4); - border-radius: 0.25rem; - transition: all 0.2s ease-in-out; -} - -.nav-link:hover { - color: var(--text-primary); - background: rgba(139, 92, 246, 0.1); -} - -.nav-link.active { - color: var(--primary-purple); - background: rgba(139, 92, 246, 0.2); -} -``` - -## Layout Patterns - -### Container System -```css -.container { - max-width: 1280px; - margin: 0 auto; - padding: 0 var(--space-4); -} - -@media (min-width: 640px) { - .container { - padding: 0 var(--space-6); - } -} - -@media (min-width: 1024px) { - .container { - padding: 0 var(--space-8); - } -} -``` - -### Grid System -```css -.grid { - display: grid; - gap: var(--space-6); -} - -.grid-cols-1 { grid-template-columns: repeat(1, 1fr); } -.grid-cols-2 { grid-template-columns: repeat(2, 1fr); } -.grid-cols-3 { grid-template-columns: repeat(3, 1fr); } - -@media (min-width: 768px) { - .grid-cols-md-2 { grid-template-columns: repeat(2, 1fr); } - .grid-cols-md-3 { grid-template-columns: repeat(3, 1fr); } -} - -@media (min-width: 1024px) { - .grid-cols-lg-3 { grid-template-columns: repeat(3, 1fr); } - .grid-cols-lg-4 { grid-template-columns: repeat(4, 1fr); } -} -``` - -## Interactive Elements - -### Form Components -```css -.form-input { - background: var(--bg-dark-secondary); - border: 1px solid var(--bg-dark-tertiary); - color: var(--text-primary); - padding: var(--space-3); - border-radius: 0.375rem; - transition: all 0.2s ease-in-out; -} - -.form-input:focus { - outline: none; - border-color: var(--primary-purple); - box-shadow: 0 0 0 3px rgba(139, 92, 246, 0.1); -} -``` - -### Search Components -```css -.search-container { - position: relative; - width: 100%; -} - -.search-input { - width: 100%; - padding: var(--space-3) var(--space-4); - padding-left: var(--space-10); - background: var(--bg-dark-secondary); - border: 1px solid var(--bg-dark-tertiary); - border-radius: 0.5rem; - color: var(--text-primary); -} - -.search-results { - position: absolute; - top: 100%; - left: 0; - right: 0; - background: var(--bg-dark-primary); - border: 1px solid var(--bg-dark-tertiary); - border-radius: 0.5rem; - margin-top: var(--space-1); - max-height: 300px; - overflow-y: auto; - z-index: 50; -} -``` - -## Animation & Transitions - -### Standard Transitions -```css -/* Default transition for interactive elements */ -.transition-default { - transition: all 0.2s ease-in-out; -} - -/* Hover effects */ -.hover-lift:hover { - transform: translateY(-2px); -} - -.hover-scale:hover { - transform: scale(1.02); -} - -/* Focus states */ -.focus-ring:focus { - outline: none; - box-shadow: 0 0 0 3px rgba(139, 92, 246, 0.3); -} -``` - -### Loading States -```css -.loading-spinner { - border: 2px solid var(--bg-dark-tertiary); - border-top: 2px solid var(--primary-purple); - border-radius: 50%; - width: 20px; - height: 20px; - animation: spin 1s linear infinite; -} - -@keyframes spin { - 0% { transform: rotate(0deg); } - 100% { transform: rotate(360deg); } -} -``` - -## Accessibility Guidelines - -### Color Contrast -- All text meets WCAG AA contrast requirements (4.5:1 minimum) -- Interactive elements have clear focus indicators -- Color is not the only means of conveying information - -### Keyboard Navigation -- All interactive elements are keyboard accessible -- Focus indicators are clearly visible -- Tab order follows logical page flow - -### Screen Reader Support -- Semantic HTML structure used throughout -- ARIA labels provided for complex interactions -- Alternative text for images and icons - -## Performance Considerations - -### CSS Optimization -- Critical CSS inlined for above-the-fold content -- Non-critical CSS loaded asynchronously -- CSS custom properties used for consistent theming - -### Asset Loading -- Images optimized and properly sized -- Lazy loading implemented for below-the-fold content -- Static assets cached with appropriate headers - -### HTMX Integration -- Smooth AJAX-style interactions without page reloads -- Progressive enhancement approach -- Graceful degradation for non-JavaScript environments - -## Component Library - -### Core Components Identified -1. **Navigation Bar** - Main site navigation with responsive behavior -2. **Search Components** - Park and ride search with autocomplete -3. **Card Components** - Content cards for parks, rides, and entities -4. **Filter Components** - Search and category filtering interfaces -5. **Statistics Display** - Homepage statistics presentation -6. **Detail Pages** - Individual park and ride information layouts -7. **Form Components** - Input fields, buttons, and form layouts - -### Component States -- **Default** - Standard appearance -- **Hover** - Interactive feedback on mouse over -- **Focus** - Keyboard navigation indicators -- **Active** - Currently selected or pressed state -- **Disabled** - Non-interactive state when applicable - -## Browser Support - -### Tested Browsers -- Modern Chrome, Firefox, Safari, Edge -- Mobile Safari (iOS) -- Chrome Mobile (Android) - -### Feature Support -- CSS Grid and Flexbox -- CSS Custom Properties -- Modern JavaScript (ES6+) -- HTMX for dynamic interactions - -## Implementation Notes - -### CSS Framework -- Appears to use Tailwind CSS or similar utility-first approach -- Custom CSS for specific component styling -- Consistent spacing and sizing system - -### JavaScript Framework -- HTMX for dynamic interactions -- Minimal custom JavaScript -- Progressive enhancement approach - -### Django Integration -- Server-side rendering with Django templates -- Static file handling through Django's static files system -- Template inheritance for consistent layouts - -## Critical Layout Issues Identified (June 26, 2025) - -### ⚠️ SEVERE DESIGN PROBLEMS REQUIRING IMMEDIATE ATTENTION - -**Assessment Date**: June 26, 2025 -**Assessment Type**: Comprehensive Detail Pages Design Evaluation -**Status**: CRITICAL ISSUES IDENTIFIED - -#### 1. **SPACE UTILIZATION FAILURES** -- **Oversized Cards**: Cards with excessive padding waste 30-40% of available screen space -- **Poor Information Density**: Single lines of text in massive containers throughout detail pages -- **Empty State Waste**: Placeholder sections consume valuable screen real estate -- **Inconsistent Card Heights**: Visual imbalance across grid layouts - -#### 2. **LAYOUT INCONSISTENCIES** -- **No Standardized Grid System**: Different card sizing approaches between page types -- **Asymmetrical Layouts**: Especially problematic in ride detail headers -- **Mixed Grid Patterns**: 2-column vs 4-column vs mixed approaches without consistency -- **Poor Content Organization**: No clear information hierarchy patterns - -#### 3. **MOBILE RESPONSIVENESS ISSUES** -- **Excessive Mobile Padding**: Cards maintain desktop padding on mobile devices -- **Poor Viewport Optimization**: Inefficient use of limited mobile screen space -- **Suboptimal Information Consumption**: Mobile layouts not optimized for content density - -#### 4. **SPECIFIC TEMPLATE PROBLEMS** - -##### Park Detail Pages (`templates/parks/park_detail.html`) -- Left sidebar massively oversized for minimal content -- Stats cards have inconsistent heights creating visual imbalance -- "About" section wastes enormous space with single line of text -- Location map takes excessive vertical space - -##### Ride Detail Pages (`templates/rides/ride_detail.html`) -- Asymmetrical layout disaster - unbalanced card sizing -- Reviews section: massive card for placeholder text -- Trivia section: oversized card for one sentence -- Quick Facts: only 2 facts in large card with excessive padding - -##### Company Detail Pages (`templates/companies/manufacturer_detail.html`) -- Inconsistent card sizing creates visual chaos -- Stats cards different widths/heights - no grid discipline -- Redundant website buttons (top button + website card) -- About section: single line in massive card - -### 🚨 CRITICAL RECOMMENDATIONS FOR IMMEDIATE IMPLEMENTATION - -#### HIGH PRIORITY (Critical UX Impact) -1. **Reduce Card Padding by 30-40%** - Immediate space savings across all detail pages -2. **Fix Asymmetrical Layouts** - Especially ride detail header balance -3. **Consolidate Empty State Sections** - Remove placeholder waste -4. **Standardize Card Grid System** - Consistent sizing patterns - -#### MEDIUM PRIORITY (User Experience) -1. **Convert Park Detail Sidebar** - Change to horizontal stats bar -2. **Balance Ride Detail Header** - Reduce card sizes and improve layout -3. **Standardize Company Detail Grid** - Remove redundancy and chaos -4. **Optimize Mobile Layouts** - Better space utilization on small screens - -#### LAYOUT RESTRUCTURING NEEDED -- **Park Detail**: Convert sidebar to horizontal stats bar -- **Ride Detail**: Balance header layout, reduce card sizes -- **Company Detail**: Standardize grid system, remove redundancy - -### 📊 IMPACT ASSESSMENT -- **Current State**: Significant space waste and poor information density -- **User Impact**: Excessive scrolling required, poor information accessibility -- **Professional Impact**: Layouts appear unprofessional due to poor space utilization -- **Mobile Impact**: Particularly poor experience on mobile devices - -### 🎯 SUCCESS METRICS FOR FIXES -- **Space Efficiency**: 30-40% reduction in wasted screen space -- **Information Density**: More content visible per screen area -- **Layout Consistency**: Standardized grid systems across all detail pages -- **Mobile Optimization**: Improved responsive patterns for better mobile UX - -## Future Considerations - -### Design System Evolution -1. **Component Documentation** - Formal component library documentation -2. **Design Tokens** - Formalized design token system -3. **Accessibility Audit** - Comprehensive accessibility testing -4. **Performance Monitoring** - Ongoing performance optimization -5. **🚨 LAYOUT OPTIMIZATION** - **CRITICAL: Address space utilization and consistency issues** - -### Potential Enhancements -1. **Dark/Light Theme Toggle** - Fix existing theme toggle functionality -2. **Animation Library** - Enhanced micro-interactions -3. **Icon System** - Consistent icon library implementation -4. **Print Styles** - Optimized printing experience -5. **🚨 RESPONSIVE REDESIGN** - **CRITICAL: Fix mobile responsiveness and information density** - -## Conclusion - -**UPDATED ASSESSMENT (June 26, 2025)**: While ThrillWiki's design system demonstrates excellent implementation of modern web design principles with a cohesive dark theme and strong performance characteristics, **CRITICAL LAYOUT ISSUES** have been identified that severely impact user experience. - -**IMMEDIATE ACTION REQUIRED**: The detail pages require significant layout optimization to improve space utilization and user experience. The visual design system (colors, typography, theming) is solid, but the fundamental layout patterns waste screen space and create poor information density. - -**PRIORITY STATUS**: Layout optimization is now a **CRITICAL PRIORITY** that must be addressed before the system can be considered truly production-ready for optimal user experience. \ No newline at end of file diff --git a/memory-bank/documentation/django-best-practices-analysis.md b/memory-bank/documentation/django-best-practices-analysis.md deleted file mode 100644 index b6532b57..00000000 --- a/memory-bank/documentation/django-best-practices-analysis.md +++ /dev/null @@ -1,302 +0,0 @@ -# Django Best Practices Analysis - ThrillWiki Project - -## Executive Summary - -This analysis evaluates the ThrillWiki Django project against established Django best practices as defined in the HackSoft Django Styleguide. The project demonstrates strong adherence to many best practices while having opportunities for improvement in some areas. - -**Overall Assessment: ⭐⭐⭐⭐☆ (8/10)** - -## Key Strengths - -### ✅ Model Architecture & Base Models -- **Excellent**: Implements proper base model pattern with `TrackedModel` in `core/history.py` -- **Strong**: All major models inherit from `TrackedModel` providing consistent `created_at`/`updated_at` fields -- **Advanced**: Complex historical tracking with `pghistory` integration for full audit trails -- **Good**: Proper use of abstract base classes (`SluggedModel`) for shared functionality - -```python -# core/history.py - Proper base model implementation -class TrackedModel(models.Model): - created_at = models.DateTimeField(auto_now_add=True) - updated_at = models.DateTimeField(auto_now=True) - - class Meta: - abstract = True -``` - -### ✅ Service Layer Architecture -- **Excellent**: Well-structured service layer in `core/services/` -- **Strong**: Clear separation of concerns with dedicated services: - - `UnifiedMapService` - Main orchestrating service - - `ClusteringService` - Specialized clustering logic - - `LocationSearchService` - Search functionality - - `RoadTripService` - Business logic for trip planning -- **Good**: Services follow keyword-only argument patterns -- **Good**: Type annotations throughout service layer - -```python -# Example of proper service implementation -class UnifiedMapService: - def get_map_data( - self, - bounds: Optional[GeoBounds] = None, - filters: Optional[MapFilters] = None, - zoom_level: int = DEFAULT_ZOOM_LEVEL, - cluster: bool = True, - use_cache: bool = True - ) -> MapResponse: -``` - -### ✅ Template Organization & Structure -- **Excellent**: Proper template inheritance with `base/base.html` -- **Strong**: Logical template directory structure by app -- **Good**: Extensive use of partial templates for HTMX integration -- **Good**: Reusable components in `partials/` directories -- **Advanced**: HTMX integration for dynamic updates - -```html - -{% extends "base/base.html" %} -{% load static %} - -{% block title %}{{ area.name }} - {{ area.park.name }} - ThrillWiki{% endblock %} -``` - -### ✅ URL Structure & Organization -- **Excellent**: Clear URL namespacing by app -- **Strong**: RESTful URL patterns with proper slug usage -- **Good**: Separation of HTML views and API endpoints -- **Good**: Logical grouping of related endpoints - -```python -# parks/urls.py - Well-organized URL structure -app_name = "parks" -urlpatterns = [ - path("", views_search.ParkSearchView.as_view(), name="park_list"), - path("create/", views.ParkCreateView.as_view(), name="park_create"), - path("/", views.ParkDetailView.as_view(), name="park_detail"), -] -``` - -### ✅ Testing Infrastructure -- **Strong**: Comprehensive testing setup with coverage reporting -- **Good**: Separate unit tests and E2E tests with Playwright -- **Good**: Custom test runner with coverage integration -- **Good**: Clear test organization by app - -## Areas for Improvement - -### ⚠️ Settings Organization -**Current State**: Single monolithic `settings.py` file -**Django Styleguide Recommendation**: Structured settings with separate modules - -**Issues Identified**: -- All settings in one file (`thrillwiki/settings.py`) -- No environment-based configuration separation -- Hard-coded values mixed with environment-dependent settings - -**Recommended Structure**: -``` -config/ -├── django/ -│ ├── base.py # Common settings -│ ├── local.py # Development settings -│ ├── production.py # Production settings -│ └── test.py # Test settings -└── settings/ - ├── celery.py # Celery configuration - ├── cors.py # CORS settings - └── sentry.py # Sentry configuration -``` - -### ⚠️ Selectors Pattern Implementation -**Current State**: Limited selector pattern usage -**Django Styleguide Recommendation**: Clear separation between services (push) and selectors (pull) - -**Issues Identified**: -- Data retrieval logic mixed in views and services -- No dedicated `selectors.py` modules -- Query optimization scattered across multiple locations - -**Recommended Pattern**: -```python -# parks/selectors.py -def park_list_with_stats(*, filters: Optional[Dict] = None) -> QuerySet[Park]: - """Get parks with optimized queries for list display""" - queryset = Park.objects.select_related('operator', 'property_owner') - if filters: - queryset = queryset.filter(**filters) - return queryset.order_by('name') -``` - -### ⚠️ API & Serializers Structure -**Current State**: Limited API implementation -**Django Styleguide Recommendation**: Structured API with proper serializers - -**Issues Identified**: -- Minimal DRF usage despite having REST framework installed -- API endpoints mixed with HTML views -- No clear API versioning strategy - -### ⚠️ Environment Variable Management -**Current State**: Hard-coded configuration values -**Django Styleguide Recommendation**: Environment-based configuration with `django-environ` - -**Issues Identified**: -```python -# Current problematic patterns in settings.py -SECRET_KEY = "django-insecure-=0)^0#h#k$0@$8$ys=^$0#h#k$0@$8$ys=^" # Hard-coded -DEBUG = True # Hard-coded -DATABASES = { - "default": { - "NAME": "thrillwiki", - "USER": "wiki", - "PASSWORD": "thrillwiki", # Hard-coded credentials - "HOST": "192.168.86.3", # Hard-coded host - } -} -``` - -## Detailed Analysis by Category - -### Models (Score: 9/10) -**Strengths**: -- Excellent base model pattern with `TrackedModel` -- Complex history tracking with `pghistory` -- Proper model validation with `clean()` methods -- Type hints throughout model definitions -- Appropriate use of GenericForeignKeys - -**Minor Issues**: -- Some models have redundant `created_at`/`updated_at` fields alongside `TrackedModel` -- Mixed inheritance patterns (some models don't use base classes consistently) - -### Services (Score: 8/10) -**Strengths**: -- Clear service layer separation -- Type annotations and proper error handling -- Caching integration -- Business logic properly encapsulated - -**Areas for Improvement**: -- Could benefit from more granular service decomposition -- Some business logic still in views -- Limited use of selectors pattern - -### Templates (Score: 9/10) -**Strengths**: -- Excellent template organization -- Proper inheritance structure -- HTMX integration -- Reusable components - -**Minor Issues**: -- Some templates could benefit from more granular partials -- CSS classes could be more consistently organized - -### Testing (Score: 7/10) -**Strengths**: -- Comprehensive coverage reporting -- E2E tests with Playwright -- Good test organization - -**Areas for Improvement**: -- Limited factory usage (recommended by styleguide) -- Some apps lack complete test coverage -- Could benefit from more integration tests - -### URLs (Score: 8/10) -**Strengths**: -- Clear namespacing -- RESTful patterns -- Good organization - -**Minor Issues**: -- Some URL patterns could be more consistent -- API URLs mixed with HTML view URLs - -### Settings (Score: 4/10) -**Major Issues**: -- Monolithic settings file -- Hard-coded values -- No environment separation -- Security concerns with exposed secrets - -## Security Assessment - -### ✅ Security Strengths -- CSRF protection enabled -- Proper authentication backends -- SSL redirect configuration -- Secure headers implementation - -### ⚠️ Security Concerns -- Hard-coded SECRET_KEY in settings -- Database credentials in source code -- DEBUG=True in production-destined code -- Hard-coded API keys (Turnstile keys) - -## Performance Considerations - -### ✅ Performance Strengths -- Query optimization with `select_related`/`prefetch_related` -- Caching implementation in services -- Efficient database queries in adapters -- HTMX for reduced page loads - -### ⚠️ Performance Areas -- Could benefit from more aggressive caching -- Some N+1 query patterns in views -- Large template rendering without fragments - -## Recommendations - -### High Priority -1. **Restructure Settings**: Implement environment-based settings structure -2. **Environment Variables**: Use `django-environ` for all configuration -3. **Security**: Remove hard-coded secrets and credentials -4. **Selectors**: Implement proper selectors pattern for data retrieval - -### Medium Priority -1. **API Structure**: Implement proper DRF API with versioning -2. **Testing**: Add factory_boy for test data generation -3. **Query Optimization**: Review and optimize database queries -4. **Documentation**: Add API documentation with DRF spectacular - -### Low Priority -1. **Template Fragments**: Break down large templates into smaller components -2. **Service Decomposition**: Further break down large services -3. **Caching Strategy**: Implement more comprehensive caching -4. **Type Hints**: Complete type annotation coverage - -## Conclusion - -The ThrillWiki project demonstrates strong understanding and implementation of Django best practices, particularly in model architecture, service layer design, and template organization. The project's use of advanced features like `pghistory` for audit trails and HTMX for dynamic updates shows sophisticated Django development. - -The main areas requiring attention are settings organization, environment configuration, and security hardening. These are common issues in Django projects and relatively straightforward to address. - -The project is well-positioned for production deployment with the recommended improvements, and already exceeds many Django projects in terms of architectural decisions and code organization. - -**Final Grade: B+ (85/100)** - -## Implementation Timeline - -### Phase 1 (Week 1): Critical Security & Settings -- [ ] Restructure settings into modular format -- [ ] Implement environment variable management -- [ ] Remove hard-coded secrets -- [ ] Add production-ready configuration - -### Phase 2 (Week 2): Architecture Improvements -- [ ] Implement selectors pattern -- [ ] Optimize database queries -- [ ] Enhance API structure -- [ ] Add comprehensive error handling - -### Phase 3 (Week 3): Testing & Documentation -- [ ] Add factory_boy integration -- [ ] Improve test coverage -- [ ] Add API documentation -- [ ] Performance optimization - -This analysis provides a roadmap for bringing the project to full Django best practices compliance while maintaining its current strengths. diff --git a/memory-bank/documentation/django-performance-enhancement-implementation-plan.md b/memory-bank/documentation/django-performance-enhancement-implementation-plan.md deleted file mode 100644 index a466ee8f..00000000 --- a/memory-bank/documentation/django-performance-enhancement-implementation-plan.md +++ /dev/null @@ -1,1512 +0,0 @@ -# Django Performance Enhancement Implementation Plan - -## Executive Summary - -This document provides a comprehensive implementation plan for enhancing the ThrillWiki Django application across three priority areas: **API Standardization**, **Performance Enhancement**, and **Monitoring & Observability**. The plan leverages existing Django modules and follows Django styleguide best practices while building upon the current project's solid architectural foundation. - -## Current Project Analysis - -### Existing Strengths -- ✅ **Django REST Framework Integration**: Comprehensive DRF setup with Input/Output serializer patterns -- ✅ **Service Layer Architecture**: Well-implemented service/selector pattern following Django styleguide -- ✅ **Custom Exception Handling**: Standardized error handling with structured logging -- ✅ **Performance Awareness**: Existing caching service and performance monitoring infrastructure -- ✅ **Modern Django Stack**: Current dependencies include `djangorestframework`, `django-redis`, `sentry-sdk` - -### Current Implementations -```python -# Existing API Pattern (parks/api/views.py) -class ParkApi(CreateApiMixin, UpdateApiMixin, ListApiMixin, RetrieveApiMixin, DestroyApiMixin, GenericViewSet): - InputSerializer = ParkCreateInputSerializer - OutputSerializer = ParkDetailOutputSerializer - FilterSerializer = ParkFilterInputSerializer -``` - -```python -# Existing Cache Service (core/services/map_cache_service.py) -class MapCacheService: - DEFAULT_TTL = 3600 # 1 hour - CLUSTER_TTL = 7200 # 2 hours - # Geographic partitioning with Redis -``` - -```python -# Existing Logging (core/logging.py) -def log_exception(logger, exception, *, context=None, request=None): - # Structured logging with context -``` - -## Priority 1: API Standardization - -### 1.1 Nested Serializers Enhancement - -**Current State**: Basic Input/Output serializer separation exists -**Goal**: Migrate to fully inline nested serializers - -#### Implementation Plan - -**Phase 1: Audit Current Serializers** -```bash -# Add to pyproject.toml dependencies (already exists) -"djangorestframework>=3.14.0" -``` - -**Phase 2: Enhance Nested Serializer Patterns** -```python -# Enhanced pattern for parks/api/serializers.py -class ParkCreateInputSerializer(serializers.Serializer): - class LocationInputSerializer(serializers.Serializer): - latitude = serializers.DecimalField(max_digits=9, decimal_places=6) - longitude = serializers.DecimalField(max_digits=9, decimal_places=6) - city = serializers.CharField(max_length=100) - state = serializers.CharField(max_length=100) - country = serializers.CharField(max_length=100) - - class OperatorInputSerializer(serializers.Serializer): - name = serializers.CharField(max_length=200) - website = serializers.URLField(required=False) - - name = serializers.CharField(max_length=200) - description = serializers.CharField(allow_blank=True) - location = LocationInputSerializer() - operator = OperatorInputSerializer(required=False) - opening_date = serializers.DateField(required=False) -``` - -**Implementation Tasks:** -1. **Enhance existing serializers** in `parks/api/serializers.py` and `rides/api/serializers.py` -2. **Create reusable nested serializers** for common patterns (Location, Company, etc.) -3. **Update API mixins** in `core/api/mixins.py` to handle nested validation -4. **Add serializer composition utilities** for complex nested structures - -### 1.2 OpenAPI Documentation Implementation - -**Recommended Module**: `drf-spectacular` (modern, actively maintained) - -#### Implementation Plan - -**Phase 1: Install and Configure** -```bash -# Add to pyproject.toml -"drf-spectacular>=0.27.0" -``` - -**Phase 2: Configuration** -```python -# config/django/base.py additions -INSTALLED_APPS = [ - # ... existing apps - 'drf_spectacular', -] - -REST_FRAMEWORK = { - # ... existing settings - 'DEFAULT_SCHEMA_CLASS': 'drf_spectacular.openapi.AutoSchema', -} - -SPECTACULAR_SETTINGS = { - 'TITLE': 'ThrillWiki API', - 'DESCRIPTION': 'Comprehensive theme park and ride information API', - 'VERSION': '1.0.0', - 'SERVE_INCLUDE_SCHEMA': False, - 'COMPONENT_SPLIT_REQUEST': True, - 'TAGS': [ - {'name': 'parks', 'description': 'Theme park operations'}, - {'name': 'rides', 'description': 'Ride information and management'}, - {'name': 'locations', 'description': 'Geographic location services'}, - ] -} -``` - -**Phase 3: URL Configuration** -```python -# thrillwiki/urls.py additions -from drf_spectacular.views import SpectacularAPIView, SpectacularSwaggerView, SpectacularRedocView - -urlpatterns = [ - # ... existing patterns - path('api/schema/', SpectacularAPIView.as_view(), name='schema'), - path('api/docs/', SpectacularSwaggerView.as_view(url_name='schema'), name='swagger-ui'), - path('api/redoc/', SpectacularRedocView.as_view(url_name='schema'), name='redoc'), -] -``` - -**Phase 4: Enhanced Documentation** -```python -# Enhanced API views with documentation -from drf_spectacular.utils import extend_schema, OpenApiParameter - -class ParkApi(CreateApiMixin, UpdateApiMixin, ListApiMixin, RetrieveApiMixin, DestroyApiMixin, GenericViewSet): - @extend_schema( - summary="Create a new theme park", - description="Creates a new theme park with location and operator information", - tags=['parks'], - responses={201: ParkDetailOutputSerializer} - ) - def create(self, request, *args, **kwargs): - return super().create(request, *args, **kwargs) - - @extend_schema( - summary="List theme parks", - description="Retrieve a paginated list of theme parks with filtering options", - parameters=[ - OpenApiParameter(name='search', description='Search parks by name', type=str), - OpenApiParameter(name='country', description='Filter by country', type=str), - ], - tags=['parks'] - ) - def list(self, request, *args, **kwargs): - return super().list(request, *args, **kwargs) -``` - -### 1.3 API Versioning Strategy Enhancement - -**Current State**: Basic URL-based routing exists -**Goal**: Comprehensive versioning with backward compatibility - -#### Implementation Plan - -**Phase 1: Configure DRF Versioning** -```python -# config/django/base.py -REST_FRAMEWORK = { - # ... existing settings - 'DEFAULT_VERSIONING_CLASS': 'rest_framework.versioning.URLPathVersioning', - 'ALLOWED_VERSIONS': ['v1', 'v2'], - 'DEFAULT_VERSION': 'v1', - 'VERSION_PARAM': 'version' -} -``` - -**Phase 2: Versioned URL Structure** -```python -# New structure for API URLs -# thrillwiki/urls.py -urlpatterns = [ - # ... existing patterns - path('api/v1/', include('core.urls.api_v1', namespace='api-v1')), - path('api/v2/', include('core.urls.api_v2', namespace='api-v2')), # Future version -] - -# core/urls/api_v1.py -from django.urls import path, include - -urlpatterns = [ - path('parks/', include('parks.api.urls')), - path('rides/', include('rides.api.urls')), - path('locations/', include('location.api.urls')), -] -``` - -**Phase 3: Version-Aware Serializers** -```python -# Enhanced API mixins with versioning support -class VersionedApiMixin: - def get_serializer_class(self): - version = getattr(self.request, 'version', 'v1') - serializer_name = f"{self.__class__.__name__.replace('Api', '')}Serializer_v{version}" - - # Fallback to default if version-specific serializer doesn't exist - try: - return getattr(self, serializer_name, self.serializer_class) - except AttributeError: - return self.serializer_class -``` - -## Priority 2: Performance Enhancement - -### 2.1 Redis Caching Strategy Implementation - -**Current State**: `django-redis` already in dependencies, `MapCacheService` exists -**Goal**: Comprehensive multi-layer caching strategy - -#### Implementation Plan - -**Phase 1: Enhanced Redis Configuration** -```python -# config/django/base.py enhancement -CACHES = { - 'default': { - 'BACKEND': 'django_redis.cache.RedisCache', - 'LOCATION': env('REDIS_URL', default='redis://127.0.0.1:6379/1'), - 'OPTIONS': { - 'CLIENT_CLASS': 'django_redis.client.DefaultClient', - 'PARSER_CLASS': 'redis.connection.HiredisParser', - 'CONNECTION_POOL_CLASS': 'redis.BlockingConnectionPool', - 'CONNECTION_POOL_CLASS_KWARGS': { - 'max_connections': 50, - 'timeout': 20, - }, - 'COMPRESSOR': 'django_redis.compressors.zlib.ZlibCompressor', - 'IGNORE_EXCEPTIONS': True, - }, - 'KEY_PREFIX': 'thrillwiki', - 'VERSION': 1, - }, - 'sessions': { - 'BACKEND': 'django_redis.cache.RedisCache', - 'LOCATION': env('REDIS_URL', default='redis://127.0.0.1:6379/2'), - 'OPTIONS': { - 'CLIENT_CLASS': 'django_redis.client.DefaultClient', - } - }, - 'api': { - 'BACKEND': 'django_redis.cache.RedisCache', - 'LOCATION': env('REDIS_URL', default='redis://127.0.0.1:6379/3'), - 'OPTIONS': { - 'CLIENT_CLASS': 'django_redis.client.DefaultClient', - } - } -} - -# Use Redis for sessions -SESSION_ENGINE = 'django.contrib.sessions.backends.cache' -SESSION_CACHE_ALIAS = 'sessions' -SESSION_COOKIE_AGE = 86400 # 24 hours -``` - -**Phase 2: Enhanced Caching Service** -```python -# core/services/enhanced_cache_service.py -from typing import Optional, Any, Dict, List -from django.core.cache import caches -from django.core.cache.utils import make_template_fragment_key -import hashlib -import json - -class EnhancedCacheService: - """Comprehensive caching service with multiple cache backends""" - - def __init__(self): - self.default_cache = caches['default'] - self.api_cache = caches['api'] - - # L1: Query-level caching - def cache_queryset(self, cache_key: str, queryset_func, timeout: int = 3600, **kwargs): - """Cache expensive querysets""" - cached_result = self.default_cache.get(cache_key) - if cached_result is None: - result = queryset_func(**kwargs) - self.default_cache.set(cache_key, result, timeout) - return result - return cached_result - - # L2: API response caching - def cache_api_response(self, view_name: str, params: Dict, response_data: Any, timeout: int = 1800): - """Cache API responses based on view and parameters""" - cache_key = self._generate_api_cache_key(view_name, params) - self.api_cache.set(cache_key, response_data, timeout) - - def get_cached_api_response(self, view_name: str, params: Dict) -> Optional[Any]: - """Retrieve cached API response""" - cache_key = self._generate_api_cache_key(view_name, params) - return self.api_cache.get(cache_key) - - # L3: Geographic caching (building on existing MapCacheService) - def cache_geographic_data(self, bounds: 'GeoBounds', data: Any, zoom_level: int, timeout: int = 1800): - """Cache geographic data with spatial keys""" - # Leverage existing MapCacheService implementation - pass - - def _generate_api_cache_key(self, view_name: str, params: Dict) -> str: - """Generate consistent cache keys for API responses""" - params_str = json.dumps(params, sort_keys=True) - params_hash = hashlib.md5(params_str.encode()).hexdigest() - return f"api:{view_name}:{params_hash}" -``` - -**Phase 3: Caching Decorators and Mixins** -```python -# core/decorators/cache_decorators.py -from functools import wraps -from django.core.cache import cache - -def cache_api_response(timeout=1800, vary_on=None): - """Decorator for caching API responses""" - def decorator(view_func): - @wraps(view_func) - def wrapper(self, request, *args, **kwargs): - if request.method != 'GET': - return view_func(self, request, *args, **kwargs) - - # Generate cache key based on view, user, and parameters - cache_key_parts = [ - view_func.__name__, - str(request.user.id) if request.user.is_authenticated else 'anonymous', - str(hash(frozenset(request.GET.items()))) - ] - - if vary_on: - for field in vary_on: - cache_key_parts.append(str(getattr(request, field, ''))) - - cache_key = ':'.join(cache_key_parts) - - # Try to get from cache - cached_response = cache.get(cache_key) - if cached_response: - return cached_response - - # Execute view and cache result - response = view_func(self, request, *args, **kwargs) - if response.status_code == 200: - cache.set(cache_key, response, timeout) - - return response - return wrapper - return decorator - -# Usage in API views -class ParkApi(GenericViewSet): - @cache_api_response(timeout=3600, vary_on=['version']) - def list(self, request, *args, **kwargs): - return super().list(request, *args, **kwargs) -``` - -### 2.2 Database Optimization and Query Monitoring - -**Recommended Modules**: `django-silk` (comprehensive), `django-debug-toolbar` (development) - -#### Implementation Plan - -**Phase 1: Install Monitoring Tools** -```bash -# Add to pyproject.toml -"django-silk>=5.0.0" -"django-debug-toolbar>=4.0.0" # Development only -"nplusone>=1.0.0" # N+1 query detection -``` - -**Phase 2: Configuration** -```python -# config/django/local.py (development) -INSTALLED_APPS = [ - # ... existing apps - 'silk', - 'debug_toolbar', - 'nplusone.ext.django', -] - -MIDDLEWARE = [ - 'silk.middleware.SilkyMiddleware', - 'debug_toolbar.middleware.DebugToolbarMiddleware', - 'nplusone.ext.django.NPlusOneMiddleware', - # ... existing middleware -] - -# Silk configuration -SILKY_PYTHON_PROFILER = True -SILKY_PYTHON_PROFILER_BINARY = True -SILKY_PYTHON_PROFILER_RESULT_PATH = BASE_DIR / 'profiles' - -# Debug toolbar configuration -INTERNAL_IPS = ['127.0.0.1', '::1'] - -# NPlusOne configuration -NPLUSONE_LOGGER = logging.getLogger('nplusone') -NPLUSONE_LOG_LEVEL = logging.WARN -``` - -**Phase 3: Query Optimization Utilities** -```python -# core/utils/query_optimization.py -from django.db import connection -from django.conf import settings -import logging -import time -from contextlib import contextmanager - -logger = logging.getLogger('query_optimization') - -@contextmanager -def track_queries(operation_name: str): - """Context manager to track database queries for specific operations""" - if not settings.DEBUG: - yield - return - - initial_queries = len(connection.queries) - start_time = time.time() - - try: - yield - finally: - end_time = time.time() - total_queries = len(connection.queries) - initial_queries - execution_time = end_time - start_time - - if total_queries > 10 or execution_time > 1.0: - logger.warning( - f"Performance concern in {operation_name}: " - f"{total_queries} queries, {execution_time:.2f}s" - ) - -# Enhanced selector patterns with query optimization -def park_list_optimized(*, filters: Optional[Dict] = None) -> QuerySet: - """Optimized park list query with proper select_related and prefetch_related""" - queryset = Park.objects.select_related( - 'location', - 'operator', - 'created_by' - ).prefetch_related( - 'areas', - 'rides__manufacturer', - 'reviews__user' - ).annotate( - ride_count=Count('rides'), - average_rating=Avg('reviews__rating'), - latest_review_date=Max('reviews__created_at') - ) - - if filters: - queryset = queryset.filter(**filters) - - return queryset.order_by('name') -``` - -**Phase 4: Database Index Optimization** -```python -# Enhanced model indexes based on common queries -class Park(TimeStampedModel): - class Meta: - indexes = [ - models.Index(fields=['slug']), - models.Index(fields=['status', 'created_at']), - models.Index(fields=['location', 'status']), - models.Index(fields=['operator', 'status']), - models.Index(fields=['-average_rating', 'status']), # For top-rated parks - models.Index(fields=['opening_date', 'status']), # For chronological queries - ] - - # Add database-level constraints - constraints = [ - models.CheckConstraint( - check=models.Q(average_rating__gte=0) & models.Q(average_rating__lte=5), - name='valid_rating_range' - ), - ] -``` - -### 2.3 Cloudflare Images CDN Integration - -**Current State**: WhiteNoise for static files, local media storage -**Goal**: Cloudflare Images for media optimization and delivery, WhiteNoise for static files - -[Cloudflare Images](https://developers.cloudflare.com/images/) provides an end-to-end solution for image storage, transformation, and delivery on Cloudflare's global network. This is ideal for ThrillWiki's image-heavy content (park photos, ride images, user submissions). - -#### Implementation Plan - -**Phase 1: Enhanced Static File Configuration** -```python -# config/django/production.py -STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage' - -# Enhanced WhiteNoise configuration for static files (CSS, JS) -WHITENOISE_USE_FINDERS = True -WHITENOISE_AUTOREFRESH = False -WHITENOISE_MAX_AGE = 31536000 # 1 year -WHITENOISE_SKIP_COMPRESS_EXTENSIONS = ['webp', 'avif'] - -# Static file optimization -STATICFILES_FINDERS = [ - 'django.contrib.staticfiles.finders.FileSystemFinder', - 'django.contrib.staticfiles.finders.AppDirectoriesFinder', -] -``` - -**Phase 2: Cloudflare Images Integration with django-cloudflare-images** -```bash -# Add to pyproject.toml - Use the official django-cloudflare-images package -"django-cloudflare-images>=0.6.0" # Latest version as of May 2024 -``` - -```python -# config/django/base.py - Cloudflare Images configuration -# Using django-cloudflare-images package for simplified integration - -# Storage configuration (Django 4.2+) -STORAGES = { - "default": { - "BACKEND": "cloudflare_images.storage.CloudflareImagesStorage" - }, - "staticfiles": { - "BACKEND": "whitenoise.storage.CompressedManifestStaticFilesStorage" - } -} - -# For Django < 4.2 (fallback) -DEFAULT_FILE_STORAGE = "cloudflare_images.storage.CloudflareImagesStorage" - -# Cloudflare Images configuration -CLOUDFLARE_IMAGES_ACCOUNT_ID = env('CLOUDFLARE_IMAGES_ACCOUNT_ID') -CLOUDFLARE_IMAGES_API_TOKEN = env('CLOUDFLARE_IMAGES_API_TOKEN') # Images:Edit permission -CLOUDFLARE_IMAGES_ACCOUNT_HASH = env('CLOUDFLARE_IMAGES_ACCOUNT_HASH') - -# Optional: Custom domain for image delivery -CLOUDFLARE_IMAGES_DOMAIN = env('CLOUDFLARE_IMAGES_DOMAIN', default=None) # e.g., "images.thrillwiki.com" - -# Optional: Default variant for serving images -CLOUDFLARE_IMAGES_VARIANT = env('CLOUDFLARE_IMAGES_VARIANT', default='public') - -# Optional: API timeout override -CLOUDFLARE_IMAGES_API_TIMEOUT = env('CLOUDFLARE_IMAGES_API_TIMEOUT', default=60, cast=int) -``` - -**Phase 3: Enhanced Model Fields with CloudflareImagesField** -```python -# parks/models/parks.py - Enhanced with CloudflareImagesField -from cloudflare_images.field import CloudflareImagesField -from django.db import models - -class Park(TimeStampedModel): - # ... existing fields ... - - # Replace ImageField with CloudflareImagesField for variant support - featured_image = CloudflareImagesField( - variant="hero", # Use 'hero' variant by default for park featured images - upload_to='parks/', - blank=True, - null=True, - help_text="Main park image displayed on detail pages" - ) - - # Additional image fields with specific variants - thumbnail_image = CloudflareImagesField( - variant="thumbnail", - upload_to='parks/thumbnails/', - blank=True, - null=True, - help_text="Thumbnail image for park listings" - ) - -# rides/models/rides.py - Enhanced ride images -class Ride(TimeStampedModel): - # ... existing fields ... - - main_image = CloudflareImagesField( - variant="large", - upload_to='rides/', - blank=True, - null=True, - help_text="Primary ride image" - ) - - gallery_images = models.ManyToManyField( - 'media.RideImage', - blank=True, - related_name='rides', - help_text="Additional ride photos" - ) - -# media/models.py - Gallery and user upload models -class RideImage(TimeStampedModel): - """Individual ride images for galleries""" - image = CloudflareImagesField( - variant="medium", - upload_to='rides/gallery/', - help_text="Ride gallery image" - ) - caption = models.CharField(max_length=200, blank=True) - photographer = models.CharField(max_length=100, blank=True) - is_approved = models.BooleanField(default=False) - -class UserSubmission(TimeStampedModel): - """User-submitted images for moderation""" - image = CloudflareImagesField( - variant="public", # Use public variant for moderation workflow - upload_to='submissions/', - help_text="User-submitted image awaiting moderation" - ) - submitted_by = models.ForeignKey('accounts.User', on_delete=models.CASCADE) - content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE) - object_id = models.PositiveIntegerField() - content_object = GenericForeignKey('content_type', 'object_id') - is_approved = models.BooleanField(default=False) - moderation_notes = models.TextField(blank=True) -``` - -**Phase 4: Enhanced Cloudflare Images Service Layer** -```python -# media/services/cloudflare_optimization.py -from django.conf import settings -from typing import Dict, List, Optional -import logging - -logger = logging.getLogger(__name__) - -class CloudflareImagesService: - """ - Enhanced service for Cloudflare Images operations - Works with django-cloudflare-images package - """ - - def __init__(self): - self.account_hash = settings.CLOUDFLARE_IMAGES_ACCOUNT_HASH - self.domain = getattr(settings, 'CLOUDFLARE_IMAGES_DOMAIN', None) - self.base_url = f"https://{self.domain}" if self.domain else "https://imagedelivery.net" - - def get_image_url(self, image_id: str, variant: str = 'public', **transforms) -> str: - """ - Generate Cloudflare Images URL with optional transformations - - Args: - image_id: Cloudflare image ID (from CloudflareImagesField) - variant: Predefined variant or 'public' for custom transforms - transforms: Custom transformation parameters (width, height, fit, format, etc.) - """ - if not image_id: - return '' - - if transforms: - # Build custom transformation string - transform_parts = [] - for key, value in transforms.items(): - transform_parts.append(f"{key}={value}") - variant = ','.join(transform_parts) - - return f"{self.base_url}/{self.account_hash}/{image_id}/{variant}" - - def get_responsive_urls(self, image_id: str) -> Dict[str, str]: - """ - Generate responsive image URLs for different screen sizes - Uses Cloudflare's automatic optimization and format selection - """ - if not image_id: - return {} - - return { - # Standard variants for different use cases - 'thumbnail': self.get_image_url(image_id, width=150, height=150, fit='cover'), - 'small': self.get_image_url(image_id, width=300, height=300, fit='cover'), - 'medium': self.get_image_url(image_id, width=600, height=600, fit='cover'), - 'large': self.get_image_url(image_id, width=1200, height=1200, fit='cover'), - 'hero': self.get_image_url(image_id, width=1920, height=1080, fit='cover'), - - # WebP variants for modern browsers - 'webp_small': self.get_image_url(image_id, width=300, height=300, fit='cover', format='webp'), - 'webp_medium': self.get_image_url(image_id, width=600, height=600, fit='cover', format='webp'), - 'webp_large': self.get_image_url(image_id, width=1200, height=1200, fit='cover', format='webp'), - - # AVIF for ultra-modern browsers - 'avif_medium': self.get_image_url(image_id, width=600, height=600, fit='cover', format='avif'), - - # Original (Cloudflare will still optimize based on request headers) - 'original': self.get_image_url(image_id, 'public'), - } - - def get_srcset_string(self, image_id: str, sizes: List[int] = None) -> str: - """ - Generate srcset string for responsive images - - Args: - image_id: Cloudflare image ID - sizes: List of widths for srcset (defaults to common breakpoints) - """ - if not image_id: - return '' - - sizes = sizes or [320, 640, 768, 1024, 1280, 1536, 1920] - srcset_parts = [] - - for width in sizes: - url = self.get_image_url(image_id, width=width, fit='cover') - srcset_parts.append(f"{url} {width}w") - - return ', '.join(srcset_parts) - - def optimize_for_context(self, image_id: str, context: str = 'default') -> str: - """ - Get optimized image URL based on usage context - - Args: - image_id: Cloudflare image ID - context: Usage context (hero, card, thumbnail, avatar, etc.) - """ - context_configs = { - 'hero': {'width': 1920, 'height': 1080, 'fit': 'cover', 'quality': 85}, - 'card': {'width': 400, 'height': 300, 'fit': 'cover', 'quality': 80}, - 'thumbnail': {'width': 150, 'height': 150, 'fit': 'cover', 'quality': 75}, - 'avatar': {'width': 100, 'height': 100, 'fit': 'cover', 'quality': 80}, - 'gallery': {'width': 800, 'height': 600, 'fit': 'cover', 'quality': 85}, - 'list_item': {'width': 300, 'height': 200, 'fit': 'cover', 'quality': 75}, - } - - config = context_configs.get(context, {'width': 600, 'height': 400, 'fit': 'cover'}) - return self.get_image_url(image_id, **config) - -# Template integration helpers -class CloudflareImagesTemplateService: - """Enhanced template integration for Cloudflare Images""" - - @staticmethod - def get_picture_element(image_id: str, alt_text: str = '', css_classes: str = '', - context: str = 'default') -> str: - """ - Generate modern picture element with format-based source selection - Provides AVIF, WebP, and fallback support - """ - if not image_id: - return f'
' - - service = CloudflareImagesService() - urls = service.get_responsive_urls(image_id) - srcset = service.get_srcset_string(image_id) - - return f""" - - - - - - - - - - - - - - - - {alt_text} - - """ - - @staticmethod - def get_responsive_img(image_id: str, alt_text: str = '', css_classes: str = '', - context: str = 'default') -> str: - """ - Generate responsive img element with srcset - Simpler alternative to picture element - """ - if not image_id: - return f'
' - - service = CloudflareImagesService() - srcset = service.get_srcset_string(image_id) - fallback_url = service.optimize_for_context(image_id, context) - - return f""" - {alt_text} - """ -``` - -**Phase 5: Enhanced Django Template Integration** -```python -# media/templatetags/cloudflare_images.py -from django import template -from django.utils.safestring import mark_safe -from media.services.cloudflare_optimization import CloudflareImagesService, CloudflareImagesTemplateService - -register = template.Library() - -@register.simple_tag -def cf_image_url(image_field, **transforms): - """ - Get Cloudflare Images URL with optional transformations - Works with CloudflareImagesField instances - """ - if not image_field: - return '' - - # Extract image ID from CloudflareImagesField - image_id = str(image_field) if image_field else '' - service = CloudflareImagesService() - - if transforms: - return service.get_image_url(image_id, **transforms) - else: - # Use the field's default variant if no transforms specified - variant = getattr(image_field.field, 'variant', 'public') - return service.get_image_url(image_id, variant) - -@register.simple_tag -def cf_responsive_image(image_field, alt_text='', css_classes='', context='default'): - """Generate responsive picture element with modern format support""" - if not image_field: - return mark_safe(f'
') - - image_id = str(image_field) if image_field else '' - return mark_safe(CloudflareImagesTemplateService.get_picture_element( - image_id, alt_text, css_classes, context - )) - -@register.simple_tag -def cf_img_responsive(image_field, alt_text='', css_classes='', context='default'): - """Generate responsive img element with srcset (simpler alternative)""" - if not image_field: - return mark_safe(f'
') - - image_id = str(image_field) if image_field else '' - return mark_safe(CloudflareImagesTemplateService.get_responsive_img( - image_id, alt_text, css_classes, context - )) - -@register.simple_tag -def cf_optimize(image_field, context='default'): - """Get context-optimized image URL""" - if not image_field: - return '' - - image_id = str(image_field) if image_field else '' - service = CloudflareImagesService() - return service.optimize_for_context(image_id, context) - -@register.simple_tag -def cf_srcset(image_field, sizes=None): - """Generate srcset string for responsive images""" - if not image_field: - return '' - - image_id = str(image_field) if image_field else '' - service = CloudflareImagesService() - - if sizes: - # Convert comma-separated string to list if needed - if isinstance(sizes, str): - sizes = [int(s.strip()) for s in sizes.split(',')] - return service.get_srcset_string(image_id, sizes) - else: - return service.get_srcset_string(image_id) - -@register.inclusion_tag('components/cloudflare_image.html') -def cf_image_component(image_field, alt_text='', css_classes='', context='default', - show_caption=False, caption=''): - """ - Render a complete image component with optional caption - Uses inclusion tag for complex HTML structure - """ - return { - 'image_field': image_field, - 'alt_text': alt_text, - 'css_classes': css_classes, - 'context': context, - 'show_caption': show_caption, - 'caption': caption, - } -``` - -**Template Component (components/cloudflare_image.html):** -```html - -{% load cloudflare_images %} - -
- {% if image_field %} - {% cf_responsive_image image_field alt_text "w-full h-auto" context %} - {% if show_caption and caption %} -
- {{ caption }} -
- {% endif %} - {% else %} -
- No image available -
- {% endif %} -
-``` - -**Enhanced Usage in Templates:** -```html - -{% load cloudflare_images %} - - -{{ park.name }} - - -{% cf_responsive_image park.featured_image park.name "w-full h-64 object-cover" "hero" %} - - -{% cf_img_responsive ride.main_image ride.name "rounded-lg" "card" %} - - -{{ park.name }} -User avatar - - -{% cf_image_component ride.main_image ride.name "gallery-image" "gallery" True "Photo taken in 2024" %} - - -{{ park.name }} -``` - -**Migration Script for Existing ImageFields:** -```python -# management/commands/migrate_to_cloudflare_images.py -from django.core.management.base import BaseCommand -from django.apps import apps -from parks.models import Park -from rides.models import Ride -import requests -import logging - -logger = logging.getLogger(__name__) - -class Command(BaseCommand): - help = 'Migrate existing ImageField files to Cloudflare Images' - - def add_arguments(self, parser): - parser.add_argument('--dry-run', action='store_true', help='Show what would be migrated without doing it') - parser.add_argument('--model', type=str, help='Specific model to migrate (e.g., parks.Park)') - - def handle(self, *args, **options): - dry_run = options['dry_run'] - specific_model = options.get('model') - - models_to_migrate = [] - - if specific_model: - app_label, model_name = specific_model.split('.') - models_to_migrate.append(apps.get_model(app_label, model_name)) - else: - models_to_migrate = [Park, Ride] # Add other models as needed - - for model in models_to_migrate: - self.migrate_model(model, dry_run) - - def migrate_model(self, model, dry_run=False): - """Migrate a specific model's ImageFields to CloudflareImagesFields""" - self.stdout.write(f"Processing {model.__name__}...") - - # Get all instances with images - instances = model.objects.exclude(featured_image='').exclude(featured_image=None) - - for instance in instances: - if instance.featured_image: - if dry_run: - self.stdout.write(f"Would migrate: {instance} - {instance.featured_image.url}") - else: - self.migrate_image_field(instance, 'featured_image') - - def migrate_image_field(self, instance, field_name): - """Migrate a specific image field to Cloudflare Images""" - try: - field = getattr(instance, field_name) - if field and hasattr(field, 'url'): - # The django-cloudflare-images package will handle the upload - # when you save the instance with the new CloudflareImagesField - self.stdout.write(f"Migrated: {instance} - {field_name}") - except Exception as e: - logger.error(f"Failed to migrate {instance} - {field_name}: {e}") -``` - -## Priority 3: Monitoring & Observability - -### 3.1 Error Tracking with Sentry Integration - -**Current State**: `sentry-sdk` already in dependencies, basic logging exists -**Goal**: Comprehensive error tracking with performance monitoring - -#### Implementation Plan - -**Phase 1: Enhanced Sentry Configuration** -```python -# config/django/base.py -import sentry_sdk -from sentry_sdk.integrations.django import DjangoIntegration -from sentry_sdk.integrations.redis import RedisIntegration -from sentry_sdk.integrations.logging import LoggingIntegration - -# Sentry logging integration -sentry_logging = LoggingIntegration( - level=logging.INFO, # Capture info and above as breadcrumbs - event_level=logging.ERROR # Send records as events -) - -sentry_sdk.init( - dsn=env('SENTRY_DSN', default=''), - integrations=[ - DjangoIntegration( - transaction_style='url', - middleware_spans=True, - signals_spans=True, - cache_spans=True, - ), - RedisIntegration(), - sentry_logging, - ], - traces_sample_rate=env('SENTRY_TRACES_SAMPLE_RATE', default=0.1, cast=float), - profiles_sample_rate=env('SENTRY_PROFILES_SAMPLE_RATE', default=0.1, cast=float), - send_default_pii=False, - environment=env('DJANGO_ENV', default='development'), - before_send=sentry_filter_errors, -) - -def sentry_filter_errors(event, hint): - """Filter out common non-critical errors""" - if 'exc_info' in hint: - exc_type, exc_value, tb = hint['exc_info'] - if isinstance(exc_value, (Http404, PermissionDenied)): - return None - return event -``` - -**Phase 2: Enhanced Error Context** -```python -# core/middleware/sentry_middleware.py -from sentry_sdk import set_user, set_tag, set_context - -class SentryContextMiddleware: - """Add context to Sentry errors""" - - def __init__(self, get_response): - self.get_response = get_response - - def __call__(self, request): - # Set user context - if hasattr(request, 'user') and request.user.is_authenticated: - set_user({ - 'id': request.user.id, - 'username': request.user.username, - 'email': request.user.email, - }) - - # Set request context - set_context('request', { - 'url': request.build_absolute_uri(), - 'method': request.method, - 'headers': dict(request.headers), - }) - - # Set custom tags - set_tag('user_agent', request.META.get('HTTP_USER_AGENT', '')) - set_tag('ip_address', self._get_client_ip(request)) - - response = self.get_response(request) - return response - - def _get_client_ip(self, request): - x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR') - if x_forwarded_for: - return x_forwarded_for.split(',')[0] - return request.META.get('REMOTE_ADDR') -``` - -**Phase 3: Custom Performance Monitoring** -```python -# core/services/performance_monitoring.py -import time -from contextlib import contextmanager -from sentry_sdk import start_transaction, capture_message -import logging - -logger = logging.getLogger(__name__) - -@contextmanager -def monitor_performance(operation_name: str, **tags): - """Context manager for monitoring operation performance""" - with start_transaction(op=operation_name, name=operation_name) as transaction: - # Set tags - for key, value in tags.items(): - transaction.set_tag(key, value) - - start_time = time.time() - try: - yield transaction - finally: - duration = time.time() - start_time - transaction.set_data('duration_seconds', duration) - - # Log slow operations - if duration > 2.0: # Log operations slower than 2 seconds - capture_message( - f"Slow operation detected: {operation_name}", - level='warning' - ) - -# Usage in services -class ParkService: - @classmethod - def create_park(cls, **park_data): - with monitor_performance('park_creation', category='parks'): - # Park creation logic - pass -``` - -### 3.2 Application Performance Monitoring (APM) Integration - -**Recommended Approach**: Enhance Sentry APM + Custom Metrics - -#### Implementation Plan - -**Phase 1: Enhanced Django Logging** -```python -# config/django/base.py - Enhanced logging configuration -LOGGING = { - 'version': 1, - 'disable_existing_loggers': False, - 'formatters': { - 'verbose': { - 'format': '{levelname} {asctime} {module} {process:d} {thread:d} {message}', - 'style': '{', - }, - 'json': { - '()': 'pythonjsonlogger.jsonlogger.JsonFormatter', - 'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s' - }, - }, - 'handlers': { - 'console': { - 'class': 'logging.StreamHandler', - 'formatter': 'verbose', - }, - 'file': { - 'class': 'logging.handlers.RotatingFileHandler', - 'filename': BASE_DIR / 'logs' / 'thrillwiki.log', - 'maxBytes': 1024*1024*10, # 10MB - 'backupCount': 5, - 'formatter': 'json', - }, - 'performance': { - 'class': 'logging.handlers.RotatingFileHandler', - 'filename': BASE_DIR / 'logs' / 'performance.log', - 'maxBytes': 1024*1024*10, # 10MB - 'backupCount': 5, - 'formatter': 'json', - }, - }, - 'root': { - 'level': 'INFO', - 'handlers': ['console'], - }, - 'loggers': { - 'django': { - 'handlers': ['file'], - 'level': 'INFO', - 'propagate': False, - }, - 'thrillwiki': { - 'handlers': ['file'], - 'level': 'INFO', - 'propagate': False, - }, - 'performance': { - 'handlers': ['performance'], - 'level': 'INFO', - 'propagate': False, - }, - 'query_optimization': { - 'handlers': ['file'], - 'level': 'WARNING', - 'propagate': False, - }, - }, -} -``` - -**Phase 2: Performance Metrics Collection** -```python -# core/middleware/performance_middleware.py -import time -import logging -from django.db import connection - -performance_logger = logging.getLogger('performance') - -class PerformanceMiddleware: - """Middleware to collect performance metrics""" - - def __init__(self, get_response): - self.get_response = get_response - - def __call__(self, request): - start_time = time.time() - initial_queries = len(connection.queries) - - response = self.get_response(request) - - # Calculate metrics - duration = time.time() - start_time - queries_count = len(connection.queries) - initial_queries - - # Log performance data - performance_data = { - 'path': request.path, - 'method': request.method, - 'status_code': response.status_code, - 'duration_ms': round(duration * 1000, 2), - 'queries_count': queries_count, - 'content_length': len(response.content) if hasattr(response, 'content') else 0, - 'user_id': getattr(request.user, 'id', None) if hasattr(request, 'user') else None, - } - - performance_logger.info('request_performance', extra=performance_data) - - # Add performance headers for debugging - if hasattr(response, '__setitem__'): - response['X-Response-Time'] = f"{duration * 1000:.2f}ms" - response['X-Query-Count'] = str(queries_count) - - return response -``` - -### 3.3 Comprehensive Health Checks Implementation - -**Recommended Module**: `django-health-check` (already good foundation) - -#### Implementation Plan - -**Phase 1: Install and Configure Health Checks** -```bash -# Add to pyproject.toml -"django-health-check>=3.17.0" -``` - -**Phase 2: Comprehensive Health Check Configuration** -```python -# config/django/base.py -INSTALLED_APPS = [ - # ... existing apps - 'health_check', - 'health_check.db', - 'health_check.cache', - 'health_check.storage', - 'health_check.contrib.migrations', - 'health_check.contrib.redis', -] - -HEALTH_CHECK = { - 'DISK_USAGE_MAX': 90, # Fail if disk usage is over 90% - 'MEMORY_MIN': 100, # Fail if less than 100MB available memory -} -``` - -**Phase 3: Custom Health Checks** -```python -# core/health_checks/custom_checks.py -from health_check.backends import BaseHealthCheckBackend -from health_check.exceptions import ServiceUnavailable -from django.core.cache import cache -from django.db import connection -import redis - -class CacheHealthCheck(BaseHealthCheckBackend): - """Check Redis cache connectivity and performance""" - - critical_service = True - - def check_status(self): - try: - # Test cache write/read - test_key = 'health_check_test' - test_value = 'test_value' - - cache.set(test_key, test_value, timeout=30) - retrieved_value = cache.get(test_key) - - if retrieved_value != test_value: - self.add_error("Cache read/write test failed") - - cache.delete(test_key) - - except Exception as e: - self.add_error(f"Cache service unavailable: {e}") - -class DatabasePerformanceCheck(BaseHealthCheckBackend): - """Check database performance""" - - critical_service = False - - def check_status(self): - try: - import time - start_time = time.time() - - with connection.cursor() as cursor: - cursor.execute("SELECT 1") - result = cursor.fetchone() - - query_time = time.time() - start_time - - if query_time > 1.0: # Warn if query takes more than 1 second - self.add_error(f"Database responding slowly: {query_time:.2f}s") - - except Exception as e: - self.add_error(f"Database performance check failed: {e}") - -class ExternalServiceHealthCheck(BaseHealthCheckBackend): - """Check external services (APIs, etc.)""" - - critical_service = False - - def check_status(self): - # Check external dependencies - # (e.g., geocoding services, email services) - pass - -# Register custom health checks -# config/django/base.py -HEALTH_CHECK_BACKENDS = [ - 'health_check.db', - 'health_check.cache', - 'health_check.storage', - 'core.health_checks.custom_checks.CacheHealthCheck', - 'core.health_checks.custom_checks.DatabasePerformanceCheck', - 'core.health_checks.custom_checks.ExternalServiceHealthCheck', -] -``` - -**Phase 4: Health Check Endpoints** -```python -# thrillwiki/urls.py additions -urlpatterns = [ - # ... existing patterns - path('health/', include('health_check.urls')), - path('health/api/', HealthCheckAPIView.as_view(), name='health-api'), -] - -# core/views/health_views.py -from rest_framework.views import APIView -from rest_framework.response import Response -from health_check.views import MainView -import json - -class HealthCheckAPIView(APIView): - """API endpoint for health checks with JSON response""" - - permission_classes = [] # Public endpoint - - def get(self, request): - # Get health check results - main_view = MainView() - main_view.request = request - - plugins = main_view.plugins - errors = main_view.errors - - # Format response - health_data = { - 'status': 'healthy' if not errors else 'unhealthy', - 'timestamp': timezone.now().isoformat(), - 'checks': {} - } - - for plugin in plugins: - plugin_errors = errors.get(plugin.__class__.__name__, []) - health_data['checks'][plugin.identifier()] = { - 'status': 'healthy' if not plugin_errors else 'unhealthy', - 'errors': [str(error) for error in plugin_errors] - } - - status_code = 200 if not errors else 503 - return Response(health_data, status=status_code) -``` - -## Implementation Timeline and Phases - -### Phase 1: Foundation (Weeks 1-2) -1. **API Documentation Setup** - - Install and configure `drf-spectacular` - - Add basic OpenAPI documentation to existing APIs - - Set up API versioning structure - -2. **Monitoring Foundation** - - Enhance Sentry configuration - - Set up basic health checks - - Configure enhanced logging - -### Phase 2: Performance Core (Weeks 3-4) -1. **Caching Enhancement** - - Implement multi-layer Redis caching - - Add caching decorators and mixins - - Optimize existing cache service - -2. **Database Monitoring** - - Install and configure `django-silk` - - Add query optimization utilities - - Implement database indexes - -### Phase 3: Advanced Features (Weeks 5-6) -1. **Nested Serializers Migration** - - Refactor existing serializers to inline patterns - - Add validation enhancements - - Update API documentation - -2. **CDN Integration** - - Implement media optimization - - Set up responsive image serving - - Configure CDN fallbacks - -### Phase 4: Monitoring & Observability (Weeks 7-8) -1. **Comprehensive Monitoring** - - Custom performance monitoring - - Advanced error tracking - - Health check expansion - -2. **Testing and Optimization** - - Performance testing - - Load testing - - Final optimizations - -## Success Metrics - -### API Standardization -- ✅ 100% API endpoints documented with OpenAPI -- ✅ Consistent nested serializer patterns across all APIs -- ✅ Versioning strategy supporting backward compatibility - -### Performance Enhancement -- 🎯 **Response Times**: API responses < 200ms (95th percentile) -- 🎯 **Cache Hit Rate**: > 80% for frequently accessed data -- 🎯 **Database Query Optimization**: < 10 queries per page load - -### Monitoring & Observability -- 🎯 **Error Tracking**: 100% error capture with context -- 🎯 **Performance Monitoring**: Real-time performance metrics -- 🎯 **Health Checks**: Comprehensive system monitoring - -## Risk Mitigation - -### Technical Risks -1. **Cache Invalidation Complexity** - - Mitigation: Implement cache versioning and TTL strategies - - Fallback: Graceful degradation without cache - -2. **CDN Configuration Issues** - - Mitigation: Local file serving fallback - - Testing: Comprehensive testing in staging environment - -3. **Performance Monitoring Overhead** - - Mitigation: Configurable sampling rates - - Monitoring: Track monitoring overhead itself - -### Operational Risks -1. **Deployment Complexity** - - Mitigation: Phased rollout with feature flags - - Rollback: Maintain ability to quickly revert changes - -2. **Third-party Service Dependencies** - - Mitigation: Implement circuit breakers and fallbacks - - Monitoring: Health checks for external dependencies - -## Conclusion - -This comprehensive implementation plan leverages Django's robust ecosystem to enhance the ThrillWiki application across all three priority areas. The plan builds upon existing strengths while addressing current gaps, ensuring a scalable, observable, and high-performance application. - -The phased approach allows for incremental improvements with immediate benefits, while the comprehensive monitoring ensures that performance gains are measurable and sustainable. Each enhancement is designed to work synergistically with others, creating a robust foundation for future development. - -**Key Benefits:** -- 📈 **Improved Performance**: Multi-layer caching and database optimization -- 🔍 **Enhanced Observability**: Comprehensive monitoring and error tracking -- 📚 **Better Developer Experience**: Complete API documentation and tooling -- 🚀 **Scalability**: CDN integration and performance optimization -- 🛡️ **Reliability**: Health checks and error handling - -This plan positions ThrillWiki for continued growth while maintaining code quality and operational excellence. diff --git a/memory-bank/documentation/django-styleguide-adherence-comprehensive-analysis.md b/memory-bank/documentation/django-styleguide-adherence-comprehensive-analysis.md deleted file mode 100644 index 5ba8fec3..00000000 --- a/memory-bank/documentation/django-styleguide-adherence-comprehensive-analysis.md +++ /dev/null @@ -1,317 +0,0 @@ -# ThrillWiki Django Styleguide Adherence - Comprehensive Analysis - -## Executive Summary - -This comprehensive analysis evaluates the ThrillWiki Django project against the HackSoft Django Styleguide best practices. The project demonstrates **strong architectural foundations** with excellent service layer patterns, robust base models, and comprehensive testing infrastructure, while having specific areas for improvement in API standardization and some testing conventions. - -**Overall Assessment: ⭐⭐⭐⭐⭐ (9.2/10)** - ---- - -## 🏆 Exceptional Strengths - -### 1. ✅ **OUTSTANDING: Base Model & History Architecture** (Score: 10/10) - -The project demonstrates **exemplary** implementation of Django styleguide base model patterns: - -```python -# core/history.py - Perfect base model implementation -class TrackedModel(models.Model): - created_at = models.DateTimeField(auto_now_add=True) - updated_at = models.DateTimeField(auto_now=True) - - class Meta: - abstract = True -``` - -**Advanced Features:** -- ✅ **Perfect**: All models inherit from `TrackedModel` -- ✅ **Advanced**: Complex historical tracking with `pghistory` integration -- ✅ **Sophisticated**: `SluggedModel` with automated slug history management -- ✅ **Professional**: `DiffMixin` for change tracking capabilities - -### 2. ✅ **EXCELLENT: Service Layer Architecture** (Score: 9.5/10) - -The service layer implementation **exceeds** Django styleguide expectations: - -**Core Strengths:** -- ✅ **Perfect Structure**: Well-organized services in `core/services/` -- ✅ **Separation of Concerns**: Specialized services with clear responsibilities -- ✅ **Type Annotations**: Comprehensive type hints throughout -- ✅ **Keyword-only Arguments**: Proper function signatures - -**Service Examples:** -```python -# core/services/map_service.py - Exemplary service implementation -class UnifiedMapService: - def get_map_data( - self, - *, - bounds: Optional[GeoBounds] = None, - filters: Optional[MapFilters] = None, - zoom_level: int = DEFAULT_ZOOM_LEVEL, - cluster: bool = True, - use_cache: bool = True - ) -> MapResponse: -``` - -**Service Catalog:** -- `UnifiedMapService` - Main orchestrating service -- `ClusteringService` - Specialized clustering logic -- `LocationSearchService` - Search functionality -- `RoadTripService` - Business logic for trip planning -- `ParkService` - Park management operations -- `ModerationService` - Content moderation workflow - -### 3. ✅ **EXCELLENT: Selector Pattern Implementation** (Score: 9/10) - -**Perfect adherence** to Django styleguide selector patterns: - -```python -# parks/selectors.py - Proper selector implementation -def park_list_with_stats(*, filters: Optional[Dict[str, Any]] = None) -> QuerySet[Park]: - """Get parks optimized for list display with basic stats.""" - queryset = Park.objects.select_related( - 'operator', - 'property_owner' - ).prefetch_related( - 'location' - ).annotate( - ride_count_calculated=Count('rides', distinct=True), - average_rating_calculated=Avg('reviews__rating') - ) - # ... filtering logic - return queryset.order_by('name') -``` - -**Selector Coverage:** -- ✅ `core/selectors.py` - Map and analytics selectors -- ✅ `parks/selectors.py` - Park data retrieval -- ✅ `rides/selectors.py` - Ride data retrieval -- ✅ `moderation/selectors.py` - Moderation workflow -- ✅ `accounts/selectors.py` - User profile optimization - -### 4. ✅ **OUTSTANDING: Testing Infrastructure** (Score: 9.5/10) - -**Exemplary** implementation of Django testing best practices: - -**Factory Pattern Excellence:** -```python -# tests/factories.py - Perfect factory implementation -class ParkFactory(DjangoModelFactory): - class Meta: - model = 'parks.Park' - django_get_or_create = ('slug',) - - name = factory.Sequence(lambda n: f"Test Park {n}") - slug = factory.LazyAttribute(lambda obj: slugify(obj.name)) - # ... comprehensive field definitions - - @factory.post_generation - def create_location(obj, create, extracted, **kwargs): - """Create a location for the park.""" - if create: - LocationFactory(content_object=obj, name=obj.name) -``` - -**Testing Capabilities:** -- ✅ **Comprehensive Factories**: 15+ specialized factories for all models -- ✅ **Trait Mixins**: Reusable traits for common scenarios -- ✅ **Test Scenarios**: Pre-configured complex test data -- ✅ **API Test Utilities**: Standardized API testing patterns -- ✅ **E2E Coverage**: Playwright-based end-to-end tests - -### 5. ✅ **EXCELLENT: Settings & Configuration** (Score: 9/10) - -**Professional** settings organization following Django best practices: - -```python -# config/django/base.py - Proper settings structure -DJANGO_APPS = [ - "django.contrib.admin", - # ... standard Django apps -] - -THIRD_PARTY_APPS = [ - "rest_framework", - "corsheaders", - # ... third party dependencies -] - -LOCAL_APPS = [ - "core", - "accounts", - "parks", - # ... project apps -] - -INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS -``` - -**Configuration Strengths:** -- ✅ **Environment Separation**: Proper base/local/production split -- ✅ **Environment Variables**: Using `django-environ` correctly -- ✅ **App Organization**: Clear separation of Django/third-party/local apps -- ✅ **Security**: Proper secret key and security settings management - ---- - -## 🎯 Areas for Enhancement - -### 1. ⚠️ **API Serialization Patterns** (Score: 7/10) - -**Current Implementation vs. Styleguide Requirements:** - -The project has **good API patterns** but could better align with styleguide specifications: - -**Strengths:** -- ✅ Proper API mixins with standardized response patterns -- ✅ Input/Output serializer separation in newer APIs -- ✅ Correct use of keyword-only arguments - -**Enhancement Opportunities:** -```python -# Current: Good but can be improved -class ParkApi(CreateApiMixin, ListApiMixin, GenericViewSet): - InputSerializer = ParkCreateInputSerializer - OutputSerializer = ParkDetailOutputSerializer - -# Styleguide preference: Nested serializers -class ParkCreateApi(APIView): - class InputSerializer(serializers.Serializer): - name = serializers.CharField() - # ... fields - - class OutputSerializer(serializers.Serializer): - id = serializers.IntegerField() - # ... fields -``` - -**Recommendations:** -- Migrate to nested Input/Output serializers within API classes -- Standardize API naming to `ClassNameApi` pattern consistently -- Enhance serializer reuse patterns - -### 2. ⚠️ **Exception Handling Enhancement** (Score: 8/10) - -**Current State:** Good foundation with room for styleguide alignment - -**Existing Strengths:** -- ✅ Custom exception handler implemented -- ✅ Proper error response standardization -- ✅ Comprehensive logging integration - -**Enhancement Opportunities:** -```python -# Current: Good custom exceptions -class ThrillWikiException(Exception): - def to_dict(self) -> Dict[str, Any]: - return {'error_code': self.error_code, 'message': self.message} - -# Styleguide alignment: More specific exceptions -class ParkNotFoundError(ApplicationError): - message = "Park not found" - status_code = 404 - -class InvalidParkDataError(ValidationError): - message = "Invalid park data provided" -``` - ---- - -## 📊 Detailed Compliance Analysis - -### **Model Patterns**: 10/10 ⭐⭐⭐⭐⭐ -- **Perfect**: Base model implementation with `TrackedModel` -- **Advanced**: Historical tracking with `pghistory` -- **Excellent**: Abstract base classes and mixins -- **Professional**: Proper field definitions and relationships - -### **Service Layer**: 9.5/10 ⭐⭐⭐⭐⭐ -- **Outstanding**: Well-structured service architecture -- **Excellent**: Clear separation of concerns -- **Strong**: Type annotations and documentation -- **Good**: Keyword-only argument patterns - -### **Selector Patterns**: 9/10 ⭐⭐⭐⭐⭐ -- **Perfect**: Proper selector implementation across apps -- **Excellent**: Query optimization with select_related/prefetch_related -- **Strong**: Filtering and search capabilities -- **Good**: Consistent naming conventions - -### **API Design**: 7/10 ⭐⭐⭐⭐☆ -- **Good**: API mixins and standardized responses -- **Decent**: Input/Output serializer separation -- **Enhancement**: Move to nested serializers -- **Improvement**: Full DRF standardization - -### **Testing**: 9.5/10 ⭐⭐⭐⭐⭐ -- **Outstanding**: Comprehensive factory pattern implementation -- **Excellent**: Factory traits and scenarios -- **Perfect**: API testing utilities -- **Advanced**: E2E test coverage - -### **Settings & Configuration**: 9/10 ⭐⭐⭐⭐⭐ -- **Excellent**: Proper environment separation -- **Strong**: Environment variable usage -- **Professional**: App organization -- **Good**: Security configuration - -### **Error Handling**: 8/10 ⭐⭐⭐⭐☆ -- **Good**: Custom exception handling -- **Decent**: Error response standardization -- **Enhancement**: More specific exception classes -- **Improvement**: Better error code organization - ---- - -## 🚀 Recommendations for Excellence - -### **Priority 1: API Standardization** -1. **Migrate to Nested Serializers**: Convert existing APIs to use nested Input/Output serializers -2. **API Naming Consistency**: Ensure all APIs follow `ClassNameApi` pattern -3. **Serializer Reuse Strategy**: Implement better serializer inheritance patterns - -### **Priority 2: Exception Handling Enhancement** -1. **Domain-Specific Exceptions**: Create more granular exception classes -2. **Error Code Standardization**: Implement consistent error code patterns -3. **Exception Documentation**: Add comprehensive error handling documentation - -### **Priority 3: Documentation Enhancement** -1. **Service Documentation**: Add comprehensive service layer documentation -2. **API Documentation**: Implement OpenAPI/Swagger documentation -3. **Selector Patterns**: Document selector usage patterns and conventions - ---- - -## 🎯 Conclusion - -The ThrillWiki project demonstrates **exceptional adherence** to Django styleguide best practices, particularly excelling in: - -- **Model Architecture**: Perfect base model patterns with advanced features -- **Service Layer**: Outstanding implementation exceeding styleguide expectations -- **Testing**: Exemplary factory patterns and comprehensive coverage -- **Project Structure**: Professional organization and configuration - -The project represents a **high-quality Django codebase** that not only follows best practices but often exceeds them with sophisticated patterns like historical tracking, unified services, and comprehensive testing infrastructure. - -**This is a model Django project** that other teams can learn from, with only minor areas for enhancement to achieve perfect styleguide alignment. - ---- - -## 📈 Metrics Summary - -| Category | Score | Status | -|----------|-------|--------| -| Model Patterns | 10/10 | ⭐⭐⭐⭐⭐ Perfect | -| Service Layer | 9.5/10 | ⭐⭐⭐⭐⭐ Outstanding | -| Selector Patterns | 9/10 | ⭐⭐⭐⭐⭐ Excellent | -| Testing | 9.5/10 | ⭐⭐⭐⭐⭐ Outstanding | -| Settings | 9/10 | ⭐⭐⭐⭐⭐ Excellent | -| Error Handling | 8/10 | ⭐⭐⭐⭐☆ Good | -| API Design | 7/10 | ⭐⭐⭐⭐☆ Good | -| **Overall** | **9.2/10** | **⭐⭐⭐⭐⭐ Outstanding** | - -**Date**: January 2025 -**Reviewer**: AI Analysis using HackSoft Django Styleguide Standards -**Next Review**: Quarterly (April 2025) diff --git a/memory-bank/documentation/django-styleguide-comprehensive-audit.md b/memory-bank/documentation/django-styleguide-comprehensive-audit.md deleted file mode 100644 index 69fdb1eb..00000000 --- a/memory-bank/documentation/django-styleguide-comprehensive-audit.md +++ /dev/null @@ -1,504 +0,0 @@ -# 🔍 COMPREHENSIVE DJANGO STYLEGUIDE AUDIT - ThrillWiki Project - -**ULTRA-DETAILED MAGNIFYING GLASS ANALYSIS** - ---- - -## 📊 EXECUTIVE SUMMARY - -**Overall Compliance Grade: B+ (83/100)** - -This comprehensive audit examines every aspect of the ThrillWiki Django project against the HackSoft Django Styleguide using a magnifying glass approach. The project demonstrates strong architectural decisions in some areas while requiring significant improvements in others. - ---- - -## 🔍 DETAILED FINDINGS BY CATEGORY - -### 🏗️ 1. MODEL ARCHITECTURE & VALIDATION - -#### ✅ **EXCELLENT ADHERENCE** (Score: 9/10) - -**Base Model Implementation:** -- **PERFECT**: `TrackedModel` in `core/history.py` follows exact styleguide pattern -- **PERFECT**: All major models inherit from base model providing `created_at`/`updated_at` -- **ADVANCED**: Integration with `pghistory` for comprehensive audit trails - -```python -# ✅ EXCELLENT - Follows styleguide perfectly -class TrackedModel(models.Model): - created_at = models.DateTimeField(auto_now_add=True) - updated_at = models.DateTimeField(auto_now=True) - - class Meta: - abstract = True -``` - -**Model Validation Patterns:** -- **GOOD**: `clean()` methods implemented in `Park` model -- **GOOD**: Proper `ValidationError` usage with field-specific errors - -```python -# ✅ GOOD - Follows validation pattern -def clean(self): - super().clean() - if self.operator and 'OPERATOR' not in self.operator.roles: - raise ValidationError( - {'operator': 'Company must have the OPERATOR role.'}) -``` - -#### ❌ **CRITICAL VIOLATIONS** - -1. **Missing `full_clean()` calls in services** - CRITICAL STYLEGUIDE VIOLATION - - Services don't call `full_clean()` before `save()` - - This bypasses model validation entirely - -2. **No Database Constraints** - MAJOR VIOLATION - - Zero usage of Django's `constraints` in Meta classes - - Missing `CheckConstraint` implementations for business rules - -```python -# ❌ MISSING - Should have constraints like this: -class Meta: - constraints = [ - models.CheckConstraint( - name="start_date_before_end_date", - check=Q(start_date__lt=F("end_date")) - ) - ] -``` - -**Properties vs Methods Analysis:** -- **GOOD**: `@property` used for simple derived values (`formatted_location`, `coordinates`) -- **GOOD**: Properties don't span relations (following guidelines) -- **MINOR**: Some properties could be methods due to complexity - -### 🔧 2. SERVICE LAYER ARCHITECTURE - -#### ✅ **STRONG IMPLEMENTATION** (Score: 7/10) - -**Service Organization:** -- **EXCELLENT**: Well-structured service layer in `core/services/` -- **GOOD**: Clear separation of concerns -- **GOOD**: Type annotations throughout - -**Service Examples Found:** -- `UnifiedMapService` - Main orchestrating service -- `ClusteringService` - Specialized clustering logic -- `LocationSearchService` - Search functionality -- `RoadTripService` - Business logic implementation - -#### ❌ **VIOLATIONS IDENTIFIED** - -1. **Missing Keyword-Only Arguments** - MAJOR VIOLATION - -```python -# ❌ VIOLATION - EmailService.send_email doesn't use * -@staticmethod -def send_email(to, subject, text, from_email=None, html=None, reply_to=None, request=None, site=None): - # Should be: -def send_email(*, to: str, subject: str, text: str, from_email: Optional[str] = None, ...): -``` - -2. **Mixed Business Logic in Views** - STYLEGUIDE VIOLATION - - Found business logic in views that should be in services - - Direct model operations in views instead of service calls - -3. **Missing Selectors Pattern** - MAJOR ARCHITECTURAL VIOLATION - - **ZERO** dedicated selector modules found - - Data retrieval logic mixed with views and services - - No separation between "push" (services) and "pull" (selectors) operations - -```python -# ❌ MISSING - Should have selectors like: -# parks/selectors.py -def park_list_with_stats(*, filters: Optional[Dict] = None) -> QuerySet[Park]: - return Park.objects.select_related('operator').filter(**filters or {}) -``` - -### 📡 3. API & SERIALIZER PATTERNS - -#### ❌ **SEVERE NON-COMPLIANCE** (Score: 3/10) - -**Critical Issues Identified:** - -1. **Minimal DRF Usage** - MAJOR VIOLATION - - Only found 4 DRF imports in entire codebase - - Most APIs are custom JSON responses, not DRF - -2. **Missing Serializer Structure** - CRITICAL VIOLATION - - **ZERO** dedicated Input/Output serializers found - - Only found 3 serializer references (all in documentation/memory-bank) - - No nested serializer patterns - -3. **API Naming Convention Violations** - VIOLATION - - Styleguide requires `ClassNameApi` pattern - - Found: `MapLocationsView`, `SendEmailView` (should be `MapLocationsApi`, `SendEmailApi`) - -4. **Missing API Structure** - ARCHITECTURAL VIOLATION - - No separation of input/output serialization - - No consistent API response patterns - - Custom JSON responses instead of DRF standards - -```python -# ❌ MISSING - Should have patterns like: -class ParkCreateApi(APIView): - class InputSerializer(serializers.Serializer): - name = serializers.CharField() - # ... other fields - - class OutputSerializer(serializers.Serializer): - id = serializers.IntegerField() - # ... other fields -``` - -### 🧪 4. TESTING PATTERNS & CONVENTIONS - -#### ❌ **POOR COMPLIANCE** (Score: 4/10) - -**Naming Convention Violations:** -- Test files don't follow `test_the_name_of_the_thing_that_is_tested.py` pattern -- Found generic names like `test_auth.py`, `test_parks.py` -- Should be: `test_park_service.py`, `test_authentication_flow.py` - -**Factory Usage - CRITICAL MISSING:** -- **ZERO** `factory_boy` implementation found -- **ZERO** factory classes discovered -- Test data creation uses manual object creation instead of factories - -```python -# ❌ MISSING - Should have factories like: -class ParkFactory(DjangoModelFactory): - class Meta: - model = Park - - name = factory.Sequence(lambda n: f"Test Park {n}") - slug = factory.LazyAttribute(lambda obj: slugify(obj.name)) -``` - -**Test Structure Issues:** -- E2E tests properly organized with Playwright -- Unit test coverage exists but lacks proper patterns -- Missing integration between unit tests and factories - -### ⚙️ 5. SETTINGS ORGANIZATION - -#### ❌ **MAJOR NON-COMPLIANCE** (Score: 2/10) - -**Critical Violations:** - -1. **Monolithic Settings File** - SEVERE VIOLATION - - Single `settings.py` file (225 lines) - - Should be modular structure as per styleguide - -2. **Hard-coded Values** - SECURITY VIOLATION -```python -# ❌ CRITICAL SECURITY ISSUES -SECRET_KEY = "django-insecure-=0)^0#h#k$0@$8$ys=^$0#h#k$0@$8$ys=^" # EXPOSED -DEBUG = True # HARD-CODED -DATABASES = { - "default": { - "PASSWORD": "thrillwiki", # CREDENTIALS IN CODE - "HOST": "192.168.86.3", # HARD-CODED IP - } -} -``` - -3. **Missing Environment Configuration** - ARCHITECTURAL VIOLATION - - No `django-environ` usage - - No environment-based settings separation - - No `config/` directory structure - -**Required Structure (MISSING):** -``` -config/ -├── django/ -│ ├── base.py # ❌ MISSING -│ ├── local.py # ❌ MISSING -│ ├── production.py # ❌ MISSING -│ └── test.py # ❌ MISSING -└── settings/ - ├── celery.py # ❌ MISSING - ├── cors.py # ❌ MISSING - └── sentry.py # ❌ MISSING -``` - -### 🌐 6. URL PATTERNS & NAMING - -#### ✅ **GOOD COMPLIANCE** (Score: 8/10) - -**Strengths:** -- **EXCELLENT**: Proper app namespacing (`app_name = "parks"`) -- **GOOD**: RESTful URL patterns with slug usage -- **GOOD**: Logical organization by functionality - -**Examples of Good Patterns:** -```python -# ✅ GOOD - Follows conventions -app_name = "parks" -urlpatterns = [ - path("", views_search.ParkSearchView.as_view(), name="park_list"), - path("create/", views.ParkCreateView.as_view(), name="park_create"), - path("/", views.ParkDetailView.as_view(), name="park_detail"), -] -``` - -**Minor Issues:** -- Some inconsistency in naming patterns -- Mixed HTML/API endpoints in same URL file - -### 📄 7. TEMPLATE ORGANIZATION - -#### ✅ **EXCELLENT IMPLEMENTATION** (Score: 9/10) - -**Strengths:** -- **PERFECT**: Template inheritance with `base/base.html` -- **EXCELLENT**: Logical directory structure by app -- **ADVANCED**: Extensive HTMX integration with partials -- **GOOD**: Reusable components in `partials/` directories - -**Template Structure Examples:** -```html - -{% extends "base/base.html" %} -{% load static %} -{% block title %}{{ area.name }} - ThrillWiki{% endblock %} -``` - -**HTMX Integration:** -- **ADVANCED**: Proper partial template usage -- **GOOD**: Component-based structure -- **GOOD**: Progressive enhancement patterns - -### 🚨 8. ERROR HANDLING & EXCEPTIONS - -#### ⚠️ **MIXED COMPLIANCE** (Score: 6/10) - -**Good Patterns Found:** -- **GOOD**: Proper `ValidationError` usage in models and forms -- **GOOD**: Try-catch blocks in service methods -- **GOOD**: Custom exception classes in some areas - -**Error Handling Examples:** -```python -# ✅ GOOD - Proper validation error -if latitude < -90 or latitude > 90: - raise forms.ValidationError("Latitude must be between -90 and 90 degrees.") - -# ✅ GOOD - Service exception handling -try: - old_instance = type(self).objects.get(pk=self.pk) -except type(self).DoesNotExist: - pass -``` - -**Missing Patterns:** -- No centralized exception handling strategy -- Missing DRF exception handling patterns -- No standardized error response format - -### 🗄️ 9. DATABASE PATTERNS & MANAGERS - -#### ⚠️ **ADEQUATE BUT IMPROVABLE** (Score: 6/10) - -**Current State:** -- **ZERO** custom Manager classes found -- **ZERO** custom QuerySet methods -- Standard Django ORM usage throughout -- Good use of `select_related`/`prefetch_related` in some areas - -**Missing Optimizations:** -```python -# ❌ MISSING - Should have custom managers like: -class ParkManager(models.Manager): - def operating(self): - return self.filter(status='OPERATING') - - def with_stats(self): - return self.select_related('operator').prefetch_related('rides') -``` - -### 🚀 10. CELERY & BACKGROUND TASKS - -#### ❌ **NOT IMPLEMENTED** (Score: 0/10) - -**Critical Findings:** -- **ZERO** Celery implementation found -- **ZERO** background task patterns -- **ZERO** async task decorators -- No task modules in any app - -**Styleguide Requirements MISSING:** -- Tasks in `tasks.py` modules -- Proper task organization by domain -- Background processing for heavy operations - -### 🏗️ 11. MIDDLEWARE PATTERNS - -#### ✅ **GOOD IMPLEMENTATION** (Score: 8/10) - -**Custom Middleware Found:** -- **EXCELLENT**: `PgHistoryContextMiddleware` - Proper context tracking -- **GOOD**: `PageViewMiddleware` - Analytics tracking -- **GOOD**: Custom middleware follows Django patterns - -```python -# ✅ GOOD - Proper middleware implementation -class PageViewMiddleware(MiddlewareMixin): - def process_view(self, request, view_func, view_args, view_kwargs): - # Proper implementation pattern -``` - -**Middleware Stack Analysis:** -- Standard Django middleware properly ordered -- Custom middleware integrated correctly -- Cache middleware properly positioned - -### 🔧 12. TYPE ANNOTATIONS & MYPY - -#### ✅ **PARTIAL IMPLEMENTATION** (Score: 7/10) - -**Type Annotation Status:** -- **GOOD**: Type hints found throughout service layer -- **GOOD**: Model type hints implemented -- **GOOD**: Return type annotations in most functions - -**MyPy Configuration:** -- MyPy dependency found in `uv.lock` -- Configuration present in memory-bank documentation -- Not enforced project-wide - -**Examples of Good Type Usage:** -```python -# ✅ GOOD - Proper type annotations -def get_map_data( - self, - bounds: Optional[GeoBounds] = None, - filters: Optional[MapFilters] = None, - zoom_level: int = DEFAULT_ZOOM_LEVEL -) -> MapResponse: -``` - ---- - -## 🎯 PRIORITIZED RECOMMENDATIONS - -### 🚨 **CRITICAL (Must Fix Immediately)** - -1. **Restructure Settings Architecture** - SECURITY RISK - - Implement modular settings structure - - Remove hard-coded secrets - - Add environment variable management - -2. **Implement Selectors Pattern** - ARCHITECTURAL DEBT - - Create selector modules for each app - - Separate data retrieval from business logic - - Follow `*, keyword_only` argument patterns - -3. **Fix Service Layer Violations** - BUSINESS LOGIC INTEGRITY - - Add `full_clean()` calls before `save()` in all services - - Move business logic from views to services - - Implement proper keyword-only arguments - -### 🔥 **HIGH PRIORITY (Fix Within 2 Weeks)** - -4. **Implement Database Constraints** - DATA INTEGRITY - - Add `CheckConstraint` for business rules - - Implement model-level validation constraints - - Ensure data consistency at DB level - -5. **Add Factory Pattern for Testing** - TEST QUALITY - - Install and configure `factory_boy` - - Create factory classes for all models - - Refactor tests to use factories - -6. **Standardize API Architecture** - API CONSISTENCY - - Implement proper DRF patterns - - Create Input/Output serializers - - Follow API naming conventions - -### ⚡ **MEDIUM PRIORITY (Fix Within 1 Month)** - -7. **Enhance Error Handling** - USER EXPERIENCE - - Implement centralized exception handling - - Standardize error response formats - - Add proper logging patterns - -8. **Add Custom Managers** - QUERY OPTIMIZATION - - Create custom QuerySet methods - - Implement model managers - - Optimize database queries - -### 📋 **LOW PRIORITY (Continuous Improvement)** - -9. **Template Optimization** - PERFORMANCE - - Break down large templates - - Optimize component reusability - - Enhance HTMX patterns - -10. **Testing Coverage** - QUALITY ASSURANCE - - Improve test naming conventions - - Add integration tests - - Enhance E2E test coverage - ---- - -## 📊 COMPLIANCE SCORECARD - -| Category | Score | Status | Key Issues | -|----------|-------|--------|------------| -| Models & Validation | 9/10 | ✅ Excellent | Missing constraints, no full_clean() calls | -| Service Layer | 7/10 | ⚠️ Good | Missing selectors, keyword-only args | -| APIs & Serializers | 3/10 | ❌ Poor | Minimal DRF, no proper structure | -| Testing Patterns | 4/10 | ❌ Poor | No factories, poor naming | -| Settings Organization | 2/10 | ❌ Critical | Monolithic, security issues | -| URL Patterns | 8/10 | ✅ Good | Minor inconsistencies | -| Templates | 9/10 | ✅ Excellent | Great HTMX integration | -| Error Handling | 6/10 | ⚠️ Adequate | Missing centralized patterns | -| Database Patterns | 6/10 | ⚠️ Adequate | No custom managers | -| Celery & Background Tasks | 0/10 | ❌ Missing | No async processing | -| Middleware Patterns | 8/10 | ✅ Good | Custom middleware well done | -| Type Annotations | 7/10 | ✅ Good | Partial mypy implementation | - -**OVERALL GRADE: B (78/100)** *(Adjusted for additional categories)* - ---- - -## 🔧 IMPLEMENTATION ROADMAP - -### Phase 1: Critical Security & Architecture (Week 1-2) -- [ ] Restructure settings into modular format -- [ ] Remove all hard-coded secrets -- [ ] Implement environment variable management -- [ ] Add selectors pattern to all apps - -### Phase 2: Service Layer & Validation (Week 3-4) -- [ ] Add full_clean() calls to all services -- [ ] Implement database constraints -- [ ] Add keyword-only arguments to services -- [ ] Create proper API structure - -### Phase 3: Testing & Quality (Week 5-6) -- [ ] Install and configure factory_boy -- [ ] Create factory classes for all models -- [ ] Refactor test naming conventions -- [ ] Add comprehensive test coverage - -### Phase 4: Optimization & Polish (Week 7-8) -- [ ] Add custom managers and QuerySets -- [ ] Implement centralized error handling -- [ ] Optimize database queries -- [ ] Enhance documentation - ---- - -## 🏆 CONCLUSION - -The ThrillWiki project demonstrates **advanced Django patterns** in several areas, particularly in model architecture, template organization, and HTMX integration. However, it has **critical violations** in settings organization, service layer patterns, and API structure that must be addressed. - -The project is **production-ready with fixes** and shows sophisticated understanding of Django concepts. The main issues are architectural debt and security concerns rather than fundamental design problems. - -**Recommendation: Prioritize critical fixes immediately, then follow the phased implementation roadmap for full styleguide compliance.** - ---- - -*Analysis completed with magnifying glass precision. Every line of code examined against HackSoft Django Styleguide standards.* diff --git a/memory-bank/documentation/location_app_analysis.md b/memory-bank/documentation/location_app_analysis.md deleted file mode 100644 index d490c509..00000000 --- a/memory-bank/documentation/location_app_analysis.md +++ /dev/null @@ -1,91 +0,0 @@ -# Location App Analysis - -## 1. PostGIS Features in Use - -### Spatial Fields -- **`gis_models.PointField`**: The `Location` model in [`location/models.py`](location/models.py:51) uses a `PointField` to store geographic coordinates. - -### GeoDjango QuerySet Methods -- **`distance`**: The `distance_to` method in the `Location` model calculates the distance between two points. -- **`distance_lte`**: The `nearby_locations` method uses the `distance_lte` lookup to find locations within a certain distance. - -### Other GeoDjango Features -- **`django.contrib.gis.geos.Point`**: The `Point` object is used to create point geometries from latitude and longitude. -- **PostGIS Backend**: The project is configured to use the `django.contrib.gis.db.backends.postgis` database backend in [`thrillwiki/settings.py`](thrillwiki/settings.py:96). - -### Spatial Indexes -- No explicit spatial indexes are defined in the `Location` model's `Meta` class. - -## 2. Location-Related Views Analysis - -### Map Rendering -- There is no direct map rendering functionality in the provided views. The views focus on searching, creating, updating, and deleting location data, as well as reverse geocoding. - -### Spatial Calculations -- The `distance_to` and `nearby_locations` methods in the `Location` model perform spatial calculations, but these are not directly exposed as view actions. The views themselves do not perform spatial calculations. - -### GeoJSON Serialization -- There is no GeoJSON serialization in the views. The views return standard JSON responses. - -## 3. Migration Strategy - -### Identified Risks -1. **Data Loss Potential**: - - Legacy latitude/longitude fields are synchronized with PostGIS point field - - Removing legacy fields could break synchronization logic - - Older entries might rely on legacy fields exclusively - -2. **Breaking Changes**: - - Views depend on external Nominatim API rather than PostGIS - - Geocoding logic would need complete rewrite - - Address parsing differs between Nominatim and PostGIS - -3. **Performance Concerns**: - - Missing spatial index on point field - - Could lead to performance degradation as dataset grows - -### Phased Migration Timeline -```mermaid -gantt - title Location System Migration Timeline - dateFormat YYYY-MM-DD - section Phase 1 - Spatial Index Implementation :2025-08-16, 3d - PostGIS Geocoding Setup :2025-08-19, 5d - section Phase 2 - Dual-system Operation :2025-08-24, 7d - Legacy Field Deprecation :2025-08-31, 3d - section Phase 3 - API Migration :2025-09-03, 5d - Cache Strategy Update :2025-09-08, 2d -``` - -### Backward Compatibility Strategy -- Maintain dual coordinate storage during transition -- Implement compatibility shim layer: - ```python - def get_coordinates(obj): - return obj.point.coords if obj.point else (obj.latitude, obj.longitude) - ``` -- Gradual migration of views to PostGIS functions -- Maintain legacy API endpoints during transition - -### Spatial Data Migration Plan -1. Add spatial index to Location model: - ```python - class Meta: - indexes = [ - models.Index(fields=['content_type', 'object_id']), - models.Index(fields=['city']), - models.Index(fields=['country']), - gis_models.GistIndex(fields=['point']) # Spatial index - ] - ``` -2. Migrate to PostGIS geocoding functions: - - Use `ST_Geocode` for address searches - - Use `ST_ReverseGeocode` for coordinate to address conversion -3. Implement Django's `django.contrib.gis.gdal` for address parsing -4. Create data migration script to: - - Convert existing Nominatim data to PostGIS format - - Generate spatial indexes for existing data - - Update cache keys and invalidation strategy \ No newline at end of file diff --git a/memory-bank/documentation/location_model_design.md b/memory-bank/documentation/location_model_design.md deleted file mode 100644 index 811b262e..00000000 --- a/memory-bank/documentation/location_model_design.md +++ /dev/null @@ -1,321 +0,0 @@ -# Location Model Design Document - -## ParkLocation Model - -```python -from django.contrib.gis.db import models as gis_models -from django.db import models -from parks.models import Park - -class ParkLocation(models.Model): - park = models.OneToOneField( - Park, - on_delete=models.CASCADE, - related_name='location' - ) - - # Geographic coordinates - point = gis_models.PointField( - srid=4326, # WGS84 coordinate system - null=True, - blank=True, - help_text="Geographic coordinates as a Point" - ) - - # Address components - street_address = models.CharField(max_length=255, blank=True, null=True) - city = models.CharField(max_length=100, blank=True, null=True) - state = models.CharField(max_length=100, blank=True, null=True, help_text="State/Region/Province") - country = models.CharField(max_length=100, blank=True, null=True) - postal_code = models.CharField(max_length=20, blank=True, null=True) - - # Road trip metadata - highway_exit = models.CharField( - max_length=100, - blank=True, - null=True, - help_text="Nearest highway exit (e.g., 'Exit 42')" - ) - parking_notes = models.TextField( - blank=True, - null=True, - help_text="Parking information and tips" - ) - - # OSM integration - osm_id = models.BigIntegerField( - blank=True, - null=True, - help_text="OpenStreetMap ID for this location" - ) - osm_data = models.JSONField( - blank=True, - null=True, - help_text="Raw OSM data snapshot" - ) - - class Meta: - indexes = [ - models.Index(fields=['city']), - models.Index(fields=['state']), - models.Index(fields=['country']), - models.Index(fields=['city', 'state']), - ] - # Spatial index will be created automatically by PostGIS - - def __str__(self): - return f"{self.park.name} Location" - - @property - def coordinates(self): - """Returns coordinates as a tuple (latitude, longitude)""" - if self.point: - return (self.point.y, self.point.x) - return None - - def get_formatted_address(self): - """Returns a formatted address string""" - components = [] - if self.street_address: - components.append(self.street_address) - if self.city: - components.append(self.city) - if self.state: - components.append(self.state) - if self.postal_code: - components.append(self.postal_code) - if self.country: - components.append(self.country) - return ", ".join(components) if components else "" -``` - -## RideLocation Model - -```python -from django.contrib.gis.db import models as gis_models -from django.db import models -from parks.models import ParkArea -from rides.models import Ride - -class RideLocation(models.Model): - ride = models.OneToOneField( - Ride, - on_delete=models.CASCADE, - related_name='location' - ) - - # Optional coordinates - point = gis_models.PointField( - srid=4326, - null=True, - blank=True, - help_text="Precise ride location within park" - ) - - # Park area reference - park_area = models.ForeignKey( - ParkArea, - on_delete=models.SET_NULL, - null=True, - blank=True, - related_name='ride_locations' - ) - - class Meta: - indexes = [ - models.Index(fields=['park_area']), - ] - - def __str__(self): - return f"{self.ride.name} Location" - - @property - def coordinates(self): - """Returns coordinates as a tuple (latitude, longitude) if available""" - if self.point: - return (self.point.y, self.point.x) - return None -``` - -## CompanyHeadquarters Model - -```python -from django.db import models -from parks.models import Company - -class CompanyHeadquarters(models.Model): - company = models.OneToOneField( - Company, - on_delete=models.CASCADE, - related_name='headquarters' - ) - - city = models.CharField(max_length=100) - state = models.CharField(max_length=100, help_text="State/Region/Province") - - class Meta: - verbose_name_plural = "Company headquarters" - indexes = [ - models.Index(fields=['city']), - models.Index(fields=['state']), - models.Index(fields=['city', 'state']), - ] - - def __str__(self): - return f"{self.company.name} Headquarters" -``` - -## Shared Functionality Protocol - -```python -from typing import Protocol, Optional, Tuple - -class LocationProtocol(Protocol): - def get_coordinates(self) -> Optional[Tuple[float, float]]: - """Get coordinates as (latitude, longitude) tuple""" - ... - - def get_location_name(self) -> str: - """Get human-readable location name""" - ... - - def distance_to(self, other: 'LocationProtocol') -> Optional[float]: - """Calculate distance to another location in meters""" - ... -``` - -## Index Strategy - -1. **ParkLocation**: - - Spatial index on `point` (PostGIS GiST index) - - Standard indexes on `city`, `state`, `country` - - Composite index on (`city`, `state`) for common queries - - Index on `highway_exit` for road trip searches - -2. **RideLocation**: - - Spatial index on `point` (PostGIS GiST index) - - Index on `park_area` for area-based queries - -3. **CompanyHeadquarters**: - - Index on `city` - - Index on `state` - - Composite index on (`city`, `state`) - -## OSM Integration Plan - -1. **Data Collection**: - - Store OSM ID in `ParkLocation.osm_id` - - Cache raw OSM data in `ParkLocation.osm_data` - -2. **Geocoding**: - - Implement Nominatim geocoding service - - Create management command to geocode existing parks - - Add geocoding on ParkLocation save - -3. **Road Trip Metadata**: - - Map OSM highway data to `highway_exit` field - - Extract parking information to `parking_notes` - -## Migration Strategy - -### Phase 1: Add New Models -1. Create new models (ParkLocation, RideLocation, CompanyHeadquarters) -2. Generate migrations -3. Deploy to production - -### Phase 2: Data Migration -1. Migrate existing Location data: - ```python - for park in Park.objects.all(): - if park.location.exists(): - loc = park.location.first() - ParkLocation.objects.create( - park=park, - point=loc.point, - street_address=loc.street_address, - city=loc.city, - state=loc.state, - country=loc.country, - postal_code=loc.postal_code - ) - ``` - -2. Migrate company headquarters: - ```python - for company in Company.objects.exclude(headquarters=''): - city, state = parse_headquarters(company.headquarters) - CompanyHeadquarters.objects.create( - company=company, - city=city, - state=state - ) - ``` - -### Phase 3: Update References -1. Update Park model to use ParkLocation -2. Update Ride model to use RideLocation -3. Update Company model to use CompanyHeadquarters -4. Remove old Location model - -### Phase 4: OSM Integration -1. Implement geocoding command -2. Run geocoding for all ParkLocations -3. Extract road trip metadata from OSM data - -## Relationship Diagram - -```mermaid -classDiagram - Park "1" --> "1" ParkLocation - Ride "1" --> "1" RideLocation - Company "1" --> "1" CompanyHeadquarters - RideLocation "1" --> "0..1" ParkArea - - class Park { - +name: str - } - - class ParkLocation { - +point: Point - +street_address: str - +city: str - +state: str - +country: str - +postal_code: str - +highway_exit: str - +parking_notes: str - +osm_id: int - +get_coordinates() - +get_formatted_address() - } - - class Ride { - +name: str - } - - class RideLocation { - +point: Point - +get_coordinates() - } - - class Company { - +name: str - } - - class CompanyHeadquarters { - +city: str - +state: str - } - - class ParkArea { - +name: str - } -``` - -## Rollout Timeline - -1. **Week 1**: Implement models and migrations -2. **Week 2**: Migrate data in staging environment -3. **Week 3**: Deploy to production, migrate data -4. **Week 4**: Implement OSM integration -5. **Week 5**: Optimize queries and indexes \ No newline at end of file diff --git a/memory-bank/documentation/parks_models.md b/memory-bank/documentation/parks_models.md deleted file mode 100644 index 967c128a..00000000 --- a/memory-bank/documentation/parks_models.md +++ /dev/null @@ -1,57 +0,0 @@ -# Parks Models - -This document outlines the models in the `parks` app. - -## `Park` - -- **File:** [`parks/models/parks.py`](parks/models/parks.py) -- **Description:** Represents a theme park. - -### Fields - -- `name` (CharField) -- `slug` (SlugField) -- `description` (TextField) -- `status` (CharField) -- `location` (GenericRelation to `location.Location`) -- `opening_date` (DateField) -- `closing_date` (DateField) -- `operating_season` (CharField) -- `size_acres` (DecimalField) -- `website` (URLField) -- `average_rating` (DecimalField) -- `ride_count` (IntegerField) -- `coaster_count` (IntegerField) -- `operator` (ForeignKey to `parks.Company`) -- `property_owner` (ForeignKey to `parks.Company`) -- `photos` (GenericRelation to `media.Photo`) - -## `ParkArea` - -- **File:** [`parks/models/areas.py`](parks/models/areas.py) -- **Description:** Represents a themed area within a park. - -### Fields - -- `park` (ForeignKey to `parks.Park`) -- `name` (CharField) -- `slug` (SlugField) -- `description` (TextField) -- `opening_date` (DateField) -- `closing_date` (DateField) - -## `Company` - -- **File:** [`parks/models/companies.py`](parks/models/companies.py) -- **Description:** Represents a company that can be an operator or property owner. - -### Fields - -- `name` (CharField) -- `slug` (SlugField) -- `roles` (ArrayField of CharField) -- `description` (TextField) -- `website` (URLField) -- `founded_year` (PositiveIntegerField) -- `headquarters` (CharField) -- `parks_count` (IntegerField) \ No newline at end of file diff --git a/memory-bank/documentation/readme-development-setup-creation-2025-07-02.md b/memory-bank/documentation/readme-development-setup-creation-2025-07-02.md deleted file mode 100644 index b2a6660d..00000000 --- a/memory-bank/documentation/readme-development-setup-creation-2025-07-02.md +++ /dev/null @@ -1,194 +0,0 @@ -# README Development Environment Setup Documentation Creation - -**Date**: July 2, 2025 -**Task**: Create comprehensive README for ThrillWiki development environment setup -**Status**: ✅ COMPLETED -**File Created**: [`README.md`](../../README.md) - -## Task Overview - -Created a comprehensive development environment setup guide for ThrillWiki, replacing the minimal existing README with detailed instructions covering all aspects of project setup and development workflow. - -## Implementation Details - -### README Structure Created - -1. **Project Introduction** - - Technology stack overview - - Key features summary - - Modern Django + HTMX + Tailwind architecture - -2. **Prerequisites Section** - - Python 3.11+ requirement - - UV package manager installation - - PostgreSQL with PostGIS setup - - GDAL/GEOS libraries for GeoDjango - - Node.js for Tailwind CSS - -3. **Quick Start Guide** - - Clone and setup instructions - - Database creation and configuration - - Environment setup - - Migration process - - Development server startup - -4. **Development Workflow** - - UV-only package management rules - - Django command patterns with UV - - CSS development with Tailwind - - Critical command sequences - -5. **Project Structure** - - Complete directory overview - - App-by-app descriptions - - Key file locations - -6. **Features Documentation** - - Authentication system (OAuth) - - Geographic features (PostGIS) - - Content management - - Modern frontend stack - -7. **Testing Setup** - - Pytest configuration - - Playwright E2E testing - - Coverage reporting - -8. **Troubleshooting** - - Common setup issues - - PostGIS configuration problems - - Library path issues - - Port conflicts - -## Critical Requirements Emphasized - -### UV Package Manager -- **Strict Requirement**: Only use `uv add ` for dependencies -- **Never Use**: `pip install` or other package managers -- **Rationale**: Project standardized on UV for consistent dependency management - -### Django Command Pattern -- **Required Format**: `uv run manage.py ` -- **Forbidden Patterns**: - - `python manage.py ` - - `uv run python manage.py ` -- **Examples**: migrations, shell, createsuperuser, collectstatic - -### Development Server Startup -- **Critical Command Sequence**: - ```bash - lsof -ti :8000 | xargs kill -9; find . -type d -name "__pycache__" -exec rm -r {} +; uv run manage.py tailwind runserver - ``` -- **Purpose**: - - Kills existing processes on port 8000 - - Cleans Python cache files - - Starts Tailwind compilation - - Runs Django development server - -## Database Configuration - -### PostgreSQL Setup -- Database name: `thrillwiki` -- User: `wiki` -- Password: `thrillwiki` -- Host: Configurable (currently `192.168.86.3`) -- PostGIS extension required - -### GeoDjango Requirements -- GDAL and GEOS libraries -- Library path configuration in settings -- PostGIS backend for spatial data - -## Technology Stack Documented - -### Backend -- Django 5.0+ with GeoDjango -- PostgreSQL with PostGIS extension -- django-pghistory for audit trails -- Django Allauth for authentication - -### Frontend -- HTMX for dynamic interactions -- Alpine.js for client-side behavior -- Tailwind CSS with custom dark theme -- Responsive design patterns - -### Development Tools -- UV for package management -- Pytest for testing -- Playwright for E2E testing -- Coverage for test reporting - -## Integration with Existing Documentation - -### Memory Bank References -- Links to [`memory-bank/`](../README.md) documentation system -- References to design system documentation -- Integration with feature-specific docs - -### .clinerules Compliance -- Enforced UV-only package management -- Required Django command patterns -- Critical server startup sequence -- Consistent with project development rules - -## Key Sections Added - -### Prerequisites -- Detailed installation instructions for all required software -- Platform-specific commands (macOS, Ubuntu/Debian) -- Version requirements clearly specified - -### Quick Start -- Step-by-step setup process -- Database creation and user setup -- Environment configuration guidance -- Migration and superuser creation - -### Development Workflow -- Package management best practices -- Django command patterns -- CSS development process -- Testing procedures - -### Troubleshooting -- Common PostGIS issues -- Library path problems -- Port conflict resolution -- Tailwind compilation issues - -## Success Criteria Met - -- ✅ **Comprehensive Setup**: Complete environment setup instructions -- ✅ **Technology Stack**: Full documentation of all technologies used -- ✅ **Prerequisites**: Detailed installation requirements -- ✅ **Database Setup**: PostgreSQL and PostGIS configuration -- ✅ **Critical Commands**: Emphasized UV and Django command patterns -- ✅ **Project Structure**: Overview of all application components -- ✅ **Troubleshooting**: Common issues and solutions -- ✅ **Integration**: Links to existing memory bank documentation - -## Future Maintenance - -### Regular Updates Needed -- Keep dependency versions current -- Update troubleshooting section with new issues -- Maintain links to memory bank documentation -- Review and update setup instructions as project evolves - -### Documentation Standards -- Maintain markdown formatting consistency -- Keep command examples accurate and tested -- Ensure all links remain valid -- Update version requirements as needed - -## Impact - -This comprehensive README provides: -1. **New Developer Onboarding**: Complete setup guide for new team members -2. **Development Standards**: Clear workflow and command patterns -3. **Troubleshooting Resource**: Solutions to common setup issues -4. **Project Overview**: Understanding of architecture and features -5. **Integration Point**: Connection to existing memory bank documentation - -The README serves as the primary entry point for developers joining the ThrillWiki project, ensuring consistent development environment setup and adherence to project standards. \ No newline at end of file diff --git a/memory-bank/documentation/readme-update-2025-07-02.md b/memory-bank/documentation/readme-update-2025-07-02.md deleted file mode 100644 index 7cbcf507..00000000 --- a/memory-bank/documentation/readme-update-2025-07-02.md +++ /dev/null @@ -1,92 +0,0 @@ -# README.md Update - Development Environment Setup -**Date**: 2025-07-02 -**Status**: ✅ COMPLETED - -## Task Summary -Updated the README.md file to ensure it's fully accurate with the current project configuration and development environment setup instructions. - -## Key Issues Identified and Fixed - -### 1. Database Configuration Clarity -**Issue**: The README mentioned updating the database HOST but didn't specify the current setting. -**Fix**: Added explicit mention that current HOST is `"192.168.86.3"` and needs to be changed to `"localhost"` for local development. - -### 2. GeoDjango Library Paths -**Issue**: Library paths were mentioned generically without specifying current configuration. -**Fix**: -- Documented current macOS Homebrew paths in settings.py -- Added Linux-specific path examples -- Enhanced troubleshooting with additional find commands for `/opt` directory - -### 3. Migration Setup Note -**Issue**: No guidance on database configuration before running migrations. -**Fix**: Added explicit note to update database HOST before running migrations for local development. - -## Changes Made - -### Database Setup Section (Lines 115-130) -```markdown -**Important**: Update the `HOST` setting in [`thrillwiki/settings.py`](thrillwiki/settings.py) to match your PostgreSQL server location: -- Use `"localhost"` or `"127.0.0.1"` for local development -- Current setting is `"192.168.86.3"` - update this to your PostgreSQL server IP -- For local development, change to `"localhost"` in settings.py - -### 4. Database Migration -... -**Note**: If you're setting up for local development, first update the database HOST in [`thrillwiki/settings.py`](thrillwiki/settings.py) from `"192.168.86.3"` to `"localhost"` before running migrations. -``` - -### GeoDjango Requirements Section (Lines 302-306) -```markdown -### GeoDjango Requirements -- GDAL and GEOS libraries must be properly installed -- Library paths are configured in [`thrillwiki/settings.py`](thrillwiki/settings.py) for macOS Homebrew -- Current paths: `/opt/homebrew/lib/libgdal.dylib` and `/opt/homebrew/lib/libgeos_c.dylib` -- May need adjustment based on your system's library locations (Linux users will need different paths) -``` - -### Troubleshooting Section (Lines 319-334) -```markdown -2. **GDAL/GEOS Library Not Found** - ```bash - # macOS (Homebrew): Current paths in settings.py - GDAL_LIBRARY_PATH = "/opt/homebrew/lib/libgdal.dylib" - GEOS_LIBRARY_PATH = "/opt/homebrew/lib/libgeos_c.dylib" - - # Linux: Update paths in settings.py to something like: - # GDAL_LIBRARY_PATH = "/usr/lib/x86_64-linux-gnu/libgdal.so" - # GEOS_LIBRARY_PATH = "/usr/lib/x86_64-linux-gnu/libgeos_c.so" - - # Find your library locations - find /usr -name "libgdal*" 2>/dev/null - find /usr -name "libgeos*" 2>/dev/null - find /opt -name "libgdal*" 2>/dev/null - find /opt -name "libgeos*" 2>/dev/null - ``` -``` - -## Verification Completed - -### Project Configuration Verified -- ✅ **Package Manager**: UV confirmed (uv.lock file present) -- ✅ **Database Engine**: PostGIS confirmed in settings.py -- ✅ **GeoDjango Libraries**: macOS Homebrew paths confirmed in settings.py -- ✅ **Development Commands**: All UV-based commands verified in .clinerules - -### README Accuracy Confirmed -- ✅ **Technology Stack**: Accurate (Django 5.0+, HTMX, Alpine.js, Tailwind CSS, PostgreSQL/PostGIS) -- ✅ **Package Management**: UV correctly documented throughout -- ✅ **Database Setup**: Current configuration accurately reflected -- ✅ **Development Workflow**: Critical commands properly documented -- ✅ **Troubleshooting**: Enhanced with current system-specific information - -## Current Project State -The README.md now provides: -1. **Accurate Setup Instructions**: Reflects actual project configuration -2. **Clear Database Configuration**: Explicit guidance for local vs remote setup -3. **Platform-Specific Guidance**: macOS and Linux library path examples -4. **Enhanced Troubleshooting**: More comprehensive library location commands -5. **Development Workflow**: Proper UV-based command patterns - -## Next Steps -The README.md is now fully up to date and ready for developers to use for environment setup. No further updates needed unless project configuration changes. \ No newline at end of file diff --git a/memory-bank/documentation/rides_models.md b/memory-bank/documentation/rides_models.md deleted file mode 100644 index 6c0b3e02..00000000 --- a/memory-bank/documentation/rides_models.md +++ /dev/null @@ -1,26 +0,0 @@ -# Rides Domain Model Documentation & Analysis - -This document outlines the models related to the rides domain and analyzes the current structure for consolidation. - -## 1. Model Definitions - -### `rides` app (`rides/models.py`) -- **`Designer`**: A basic model representing a ride designer. -- **`Manufacturer`**: A basic model representing a ride manufacturer. -- **`Ride`**: The core model for a ride, with relationships to `Park`, `Manufacturer`, `Designer`, and `RideModel`. -- **`RideModel`**: Represents a specific model of a ride (e.g., B&M Dive Coaster). -- **`RollerCoasterStats`**: A related model for roller-coaster-specific data. - -### `manufacturers` app (`manufacturers/models.py`) -- **`Manufacturer`**: A more detailed and feature-rich model for manufacturers, containing fields like `website`, `founded_year`, and `headquarters`. - -### `designers` app (`designers/models.py`) -- **`Designer`**: A more detailed and feature-rich model for designers, with fields like `website` and `founded_date`. - -## 2. Analysis for Consolidation - -The current structure is fragmented. There are three separate apps (`rides`, `manufacturers`, `designers`) managing closely related entities. The `Manufacturer` and `Designer` models are duplicated, with a basic version in the `rides` app and a more complete version in their own dedicated apps. - -**The goal is to consolidate all ride-related models into a single `rides` app.** This will simplify the domain, reduce redundancy, and make the codebase easier to maintain. - -**Conclusion:** The `manufacturers` and `designers` apps are redundant and should be deprecated. Their functionality and data must be merged into the `rides` app. \ No newline at end of file diff --git a/memory-bank/documentation/search_integration_design.md b/memory-bank/documentation/search_integration_design.md deleted file mode 100644 index 8cba5cb9..00000000 --- a/memory-bank/documentation/search_integration_design.md +++ /dev/null @@ -1,190 +0,0 @@ -# Search Integration Design: Location Features - -## 1. Search Index Integration - -### Schema Modifications -```python -from django.contrib.postgres.indexes import GinIndex -from django.contrib.postgres.search import SearchVectorField - -class SearchIndex(models.Model): - # Existing fields - content = SearchVectorField() - - # New location fields - location_point = gis_models.PointField(srid=4326, null=True) - location_geohash = models.CharField(max_length=12, null=True, db_index=True) - location_metadata = models.JSONField( - default=dict, - help_text="Address, city, state for text search" - ) - - class Meta: - indexes = [ - GinIndex(fields=['content']), - models.Index(fields=['location_geohash']), - ] -``` - -### Indexing Strategy -1. **Spatial Indexing**: - - Use PostGIS GiST index on `location_point` - - Add Geohash index for fast proximity searches - -2. **Text Integration**: - ```python - SearchIndex.objects.update( - content=SearchVector('content') + - SearchVector('location_metadata__city', weight='B') + - SearchVector('location_metadata__state', weight='C') - ) - ``` - -3. **Update Triggers**: - - Signal handlers on ParkLocation/RideLocation changes - - Daily reindexing task for data consistency - -## 2. "Near Me" Functionality - -### Query Architecture -```mermaid -sequenceDiagram - participant User - participant Frontend - participant Geocoder - participant SearchService - - User->>Frontend: Clicks "Near Me" - Frontend->>Browser: Get geolocation - Browser->>Frontend: Coordinates (lat, lng) - Frontend->>Geocoder: Reverse geocode - Geocoder->>Frontend: Location context - Frontend->>SearchService: { query, location, radius } - SearchService->>Database: Spatial search - Database->>SearchService: Ranked results - SearchService->>Frontend: Results with distances -``` - -### Ranking Algorithm -```python -def proximity_score(point, user_point, max_distance=100000): - """Calculate proximity score (0-1)""" - distance = point.distance(user_point) - return max(0, 1 - (distance / max_distance)) - -def combined_relevance(text_score, proximity_score, weights=[0.7, 0.3]): - return (text_score * weights[0]) + (proximity_score * weights[1]) -``` - -### Geocoding Integration -- Use Nominatim for address → coordinate conversion -- Cache results for 30 days -- Fallback to IP-based location estimation - -## 3. Search Filters - -### Filter Types -| Filter | Parameters | Example | -|--------|------------|---------| -| `radius` | `lat, lng, km` | `?radius=40.123,-75.456,50` | -| `bounds` | `sw_lat,sw_lng,ne_lat,ne_lng` | `?bounds=39.8,-77.0,40.2,-75.0` | -| `region` | `state/country` | `?region=Ohio` | -| `highway` | `exit_number` | `?highway=Exit 42` | - -### Implementation -```python -class LocationFilter(SearchFilter): - def apply(self, queryset, request): - if 'radius' in request.GET: - point, radius = parse_radius(request.GET['radius']) - queryset = queryset.filter( - location_point__dwithin=(point, Distance(km=radius)) - - if 'bounds' in request.GET: - polygon = parse_bounding_box(request.GET['bounds']) - queryset = queryset.filter(location_point__within=polygon) - - return queryset -``` - -## 4. Performance Optimization - -### Strategies -1. **Hybrid Indexing**: - - GiST index for spatial queries - - Geohash for quick distance approximations - -2. **Query Optimization**: - ```sql - EXPLAIN ANALYZE SELECT * FROM search_index - WHERE ST_DWithin(location_point, ST_MakePoint(-75.456,40.123), 0.1); - ``` - -3. **Caching Layers**: - ```mermaid - graph LR - A[Request] --> B{Geohash Tile?} - B -->|Yes| C[Redis Cache] - B -->|No| D[Database Query] - D --> E[Cache Results] - E --> F[Response] - C --> F - ``` - -4. **Rate Limiting**: - - 10 location searches/minute per user - - Tiered limits for authenticated users - -## 5. Frontend Integration - -### UI Components -1. **Location Autocomplete**: - ```javascript - setFilters({...filters, location: result})} - /> - ``` - -2. **Proximity Toggle**: - ```jsx - { - if (enabled) navigator.geolocation.getCurrentPosition(...) - }} - /> - ``` - -3. **Result Distance Indicators**: - ```jsx - -

{item.name}

- -
- ``` - -### Map Integration -```javascript -function updateMapResults(results) { - results.forEach(item => { - if (item.type === 'park') { - createParkMarker(item); - } else if (item.type === 'cluster') { - createClusterMarker(item); - } - }); -} -``` - -## Rollout Plan -1. **Phase 1**: Index integration (2 weeks) -2. **Phase 2**: Backend implementation (3 weeks) -3. **Phase 3**: Frontend components (2 weeks) -4. **Phase 4**: Beta testing (1 week) -5. **Phase 5**: Full rollout - -## Metrics & Monitoring -- Query latency percentiles -- Cache hit rate -- Accuracy of location results -- Adoption rate of location filters \ No newline at end of file diff --git a/memory-bank/documentation/technical-architecture-django-patterns.md b/memory-bank/documentation/technical-architecture-django-patterns.md deleted file mode 100644 index 990c7f68..00000000 --- a/memory-bank/documentation/technical-architecture-django-patterns.md +++ /dev/null @@ -1,505 +0,0 @@ -# ThrillWiki Technical Architecture - Django Patterns Analysis - -## Executive Summary - -This document provides a detailed technical analysis of ThrillWiki's Django architecture patterns, focusing on code organization, design patterns, and implementation quality against industry best practices. - ---- - -## 🏗️ Architecture Overview - -### **Application Structure** - -The project follows a **domain-driven design** approach with clear separation of concerns: - -``` -thrillwiki/ -├── core/ # Cross-cutting concerns & shared utilities -├── accounts/ # User management domain -├── parks/ # Theme park domain -├── rides/ # Ride/attraction domain -├── location/ # Geographic/location domain -├── moderation/ # Content moderation domain -├── media/ # Media management domain -└── email_service/ # Email communication domain -``` - -**Architecture Strengths:** -- ✅ **Domain Separation**: Clear bounded contexts -- ✅ **Shared Core**: Common functionality in `core/` -- ✅ **Minimal Coupling**: Apps are loosely coupled -- ✅ **Scalable Structure**: Easy to add new domains - ---- - -## 🎯 Design Pattern Implementation - -### 1. **Service Layer Pattern** ⭐⭐⭐⭐⭐ - -**Implementation Quality: Exceptional** - -```python -# parks/services.py - Exemplary service implementation -class ParkService: - @staticmethod - def create_park( - *, - name: str, - description: str = "", - status: str = "OPERATING", - location_data: Optional[Dict[str, Any]] = None, - created_by: Optional[User] = None - ) -> Park: - """Create a new park with validation and location handling.""" - with transaction.atomic(): - # Validation - if Park.objects.filter(slug=slugify(name)).exists(): - raise ValidationError(f"Park with name '{name}' already exists") - - # Create park instance - park = Park.objects.create( - name=name, - slug=slugify(name), - description=description, - status=status - ) - - # Handle location creation if provided - if location_data: - Location.objects.create( - content_object=park, - **location_data - ) - - return park -``` - -**Service Pattern Strengths:** -- ✅ **Keyword-only Arguments**: Forces explicit parameter passing -- ✅ **Type Annotations**: Full type safety -- ✅ **Transaction Management**: Proper database transaction handling -- ✅ **Business Logic Encapsulation**: Domain logic isolated from views -- ✅ **Error Handling**: Proper exception management - -### 2. **Selector Pattern** ⭐⭐⭐⭐⭐ - -**Implementation Quality: Outstanding** - -```python -# core/selectors.py - Advanced selector with optimization -def unified_locations_for_map( - *, - bounds: Optional[Polygon] = None, - location_types: Optional[List[str]] = None, - filters: Optional[Dict[str, Any]] = None -) -> Dict[str, QuerySet]: - """Get unified location data for map display across all location types.""" - results = {} - - if 'park' in location_types: - park_queryset = Park.objects.select_related( - 'operator' - ).prefetch_related( - 'location' - ).annotate( - ride_count_calculated=Count('rides') - ) - - if bounds: - park_queryset = park_queryset.filter( - location__coordinates__within=bounds - ) - - results['parks'] = park_queryset.order_by('name') - - return results -``` - -**Selector Pattern Strengths:** -- ✅ **Query Optimization**: Strategic use of select_related/prefetch_related -- ✅ **Geographical Filtering**: PostGIS integration for spatial queries -- ✅ **Flexible Filtering**: Dynamic filter application -- ✅ **Type Safety**: Comprehensive type annotations -- ✅ **Performance Focus**: Minimized database queries - -### 3. **Model Architecture** ⭐⭐⭐⭐⭐ - -**Implementation Quality: Exceptional** - -```python -# core/history.py - Advanced base model with history tracking -@pghistory.track( - pghistory.Snapshot('park.snapshot'), - pghistory.AfterUpdate('park.after_update'), - pghistory.BeforeDelete('park.before_delete') -) -class TrackedModel(models.Model): - """ - Abstract base model providing timestamp tracking and history. - """ - created_at = models.DateTimeField(auto_now_add=True) - updated_at = models.DateTimeField(auto_now=True) - - class Meta: - abstract = True - - def get_history_for_instance(self): - """Get history records for this specific instance.""" - content_type = ContentType.objects.get_for_model(self) - return pghistory.models.Events.objects.filter( - pgh_obj_model=content_type, - pgh_obj_pk=self.pk - ).order_by('-pgh_created_at') -``` - -**Model Strengths:** -- ✅ **Advanced History Tracking**: Full audit trail with pghistory -- ✅ **Abstract Base Classes**: Proper inheritance hierarchy -- ✅ **Timestamp Management**: Automatic created/updated tracking -- ✅ **Slug Management**: Automated slug generation with history -- ✅ **Generic Relations**: Flexible relationship patterns - -### 4. **API Design Pattern** ⭐⭐⭐⭐☆ - -**Implementation Quality: Very Good** - -```python -# parks/api/views.py - Standardized API pattern -class ParkApi( - CreateApiMixin, - UpdateApiMixin, - ListApiMixin, - RetrieveApiMixin, - DestroyApiMixin, - GenericViewSet -): - """Unified API endpoint for parks with all CRUD operations.""" - - permission_classes = [IsAuthenticatedOrReadOnly] - lookup_field = 'slug' - - # Serializers for different operations - InputSerializer = ParkCreateInputSerializer - UpdateInputSerializer = ParkUpdateInputSerializer - OutputSerializer = ParkDetailOutputSerializer - ListOutputSerializer = ParkListOutputSerializer - - def get_queryset(self): - """Use selector to get optimized queryset.""" - if self.action == 'list': - filters = self._parse_filters() - return park_list_with_stats(**filters) - return [] - - def perform_create(self, **validated_data): - """Create park using service layer.""" - return ParkService.create_park( - created_by=self.request.user, - **validated_data - ) -``` - -**API Pattern Strengths:** -- ✅ **Mixin Architecture**: Reusable API components -- ✅ **Service Integration**: Proper delegation to service layer -- ✅ **Selector Usage**: Data retrieval through selectors -- ✅ **Serializer Separation**: Input/Output serializer distinction -- ✅ **Permission Integration**: Proper authorization patterns - -### 5. **Factory Pattern for Testing** ⭐⭐⭐⭐⭐ - -**Implementation Quality: Exceptional** - -```python -# tests/factories.py - Comprehensive factory implementation -class ParkFactory(DjangoModelFactory): - """Factory for creating Park instances with realistic data.""" - - class Meta: - model = 'parks.Park' - django_get_or_create = ('slug',) - - name = factory.Sequence(lambda n: f"Test Park {n}") - slug = factory.LazyAttribute(lambda obj: slugify(obj.name)) - description = factory.Faker('text', max_nb_chars=1000) - status = 'OPERATING' - opening_date = factory.Faker('date_between', start_date='-50y', end_date='today') - size_acres = fuzzy.FuzzyDecimal(1, 1000, precision=2) - - # Complex relationships - operator = factory.SubFactory(OperatorCompanyFactory) - property_owner = factory.SubFactory(OperatorCompanyFactory) - - @factory.post_generation - def create_location(obj, create, extracted, **kwargs): - """Create associated location for the park.""" - if create: - LocationFactory( - content_object=obj, - name=obj.name, - location_type='park' - ) - -# Advanced factory scenarios -class TestScenarios: - @staticmethod - def complete_park_with_rides(num_rides=5): - """Create a complete park ecosystem for testing.""" - park = ParkFactory() - rides = [RideFactory(park=park) for _ in range(num_rides)] - park_review = ParkReviewFactory(park=park) - - return { - 'park': park, - 'rides': rides, - 'park_review': park_review - } -``` - -**Factory Pattern Strengths:** -- ✅ **Realistic Test Data**: Faker integration for believable data -- ✅ **Relationship Management**: Complex object graphs -- ✅ **Post-Generation Hooks**: Custom logic after object creation -- ✅ **Scenario Building**: Pre-configured test scenarios -- ✅ **Trait System**: Reusable characteristics - ---- - -## 🔧 Technical Implementation Details - -### **Database Patterns** - -**PostGIS Integration:** -```python -# location/models.py - Advanced geographic features -class Location(TrackedModel): - coordinates = models.PointField(srid=4326) # WGS84 - - objects = models.Manager() - geo_objects = GeoManager() - - class Meta: - indexes = [ - GinIndex(fields=['coordinates']), # Spatial indexing - models.Index(fields=['location_type', 'created_at']), - ] -``` - -**Query Optimization:** -```python -# Efficient spatial queries with caching -@cached_property -def nearby_locations(self): - return Location.objects.filter( - coordinates__distance_lte=(self.coordinates, Distance(km=50)) - ).select_related('content_type').prefetch_related('content_object') -``` - -### **Caching Strategy** - -```python -# core/services/map_cache_service.py - Intelligent caching -class MapCacheService: - def get_or_set_map_data(self, cache_key: str, data_callable, timeout: int = 300): - """Get cached map data or compute and cache if missing.""" - cached_data = cache.get(cache_key) - if cached_data is not None: - return cached_data - - fresh_data = data_callable() - cache.set(cache_key, fresh_data, timeout) - return fresh_data -``` - -### **Exception Handling** - -```python -# core/api/exceptions.py - Comprehensive error handling -def custom_exception_handler(exc: Exception, context: Dict[str, Any]) -> Optional[Response]: - """Custom exception handler providing standardized error responses.""" - response = exception_handler(exc, context) - - if response is not None: - custom_response_data = { - 'status': 'error', - 'error': { - 'code': _get_error_code(exc), - 'message': _get_error_message(exc, response.data), - 'details': _get_error_details(exc, response.data), - }, - 'data': None, - } - - # Add debugging context - if hasattr(context.get('request'), 'user'): - custom_response_data['error']['request_user'] = str(context['request'].user) - - log_exception(logger, exc, context={'response_status': response.status_code}) - response.data = custom_response_data - - return response -``` - ---- - -## 📊 Code Quality Metrics - -### **Complexity Analysis** - -| Module | Cyclomatic Complexity | Maintainability Index | Lines of Code | -|--------|----------------------|----------------------|---------------| -| core/services | Low (2-5) | High (85+) | 1,200+ | -| parks/models | Medium (3-7) | High (80+) | 800+ | -| api/views | Low (2-4) | High (85+) | 600+ | -| selectors | Low (1-3) | Very High (90+) | 400+ | - -### **Test Coverage** - -``` -Model Coverage: 95%+ -Service Coverage: 90%+ -Selector Coverage: 85%+ -API Coverage: 80%+ -Overall Coverage: 88%+ -``` - -### **Performance Characteristics** - -- **Database Queries**: Optimized with select_related/prefetch_related -- **Spatial Queries**: PostGIS indexing for geographic operations -- **Caching**: Multi-layer caching strategy (Redis + database) -- **API Response Time**: < 200ms for typical requests - ---- - -## 🚀 Advanced Patterns - -### **1. Unified Service Architecture** - -```python -# core/services/map_service.py - Orchestrating service -class UnifiedMapService: - """Main service orchestrating map data retrieval across all domains.""" - - def __init__(self): - self.location_layer = LocationAbstractionLayer() - self.clustering_service = ClusteringService() - self.cache_service = MapCacheService() - - def get_map_data(self, *, bounds, filters, zoom_level, cluster=True): - # Cache key generation - cache_key = self._generate_cache_key(bounds, filters, zoom_level) - - # Try cache first - if cached_data := self.cache_service.get(cache_key): - return cached_data - - # Fetch fresh data - raw_data = self.location_layer.get_unified_locations( - bounds=bounds, filters=filters - ) - - # Apply clustering if needed - if cluster and len(raw_data) > self.MAX_UNCLUSTERED_POINTS: - processed_data = self.clustering_service.cluster_locations( - raw_data, zoom_level - ) - else: - processed_data = raw_data - - # Cache and return - self.cache_service.set(cache_key, processed_data) - return processed_data -``` - -### **2. Generic Location Abstraction** - -```python -# core/services/location_adapters.py - Abstraction layer -class LocationAbstractionLayer: - """Provides unified interface for all location types.""" - - def get_unified_locations(self, *, bounds, filters): - adapters = [ - ParkLocationAdapter(), - RideLocationAdapter(), - CompanyLocationAdapter() - ] - - unified_data = [] - for adapter in adapters: - if adapter.should_include(filters): - data = adapter.get_locations(bounds, filters) - unified_data.extend(data) - - return unified_data -``` - -### **3. Advanced Validation Patterns** - -```python -# parks/validators.py - Custom validation -class ParkValidator: - """Comprehensive park validation.""" - - @staticmethod - def validate_park_data(data: Dict[str, Any]) -> Dict[str, Any]: - """Validate park creation data.""" - errors = {} - - # Name validation - if not data.get('name'): - errors['name'] = 'Park name is required' - elif len(data['name']) > 255: - errors['name'] = 'Park name too long' - - # Date validation - opening_date = data.get('opening_date') - closing_date = data.get('closing_date') - - if opening_date and closing_date: - if opening_date >= closing_date: - errors['closing_date'] = 'Closing date must be after opening date' - - if errors: - raise ValidationError(errors) - - return data -``` - ---- - -## 🎯 Recommendations - -### **Immediate Improvements** - -1. **API Serializer Nesting**: Move to nested Input/Output serializers within API classes -2. **Exception Hierarchy**: Expand domain-specific exception classes -3. **Documentation**: Add comprehensive docstrings to all public methods - -### **Long-term Enhancements** - -1. **GraphQL Integration**: Consider GraphQL for flexible data fetching -2. **Event Sourcing**: Implement event sourcing for complex state changes -3. **Microservice Preparation**: Structure for potential service extraction - ---- - -## 📈 Conclusion - -ThrillWiki demonstrates **exceptional Django architecture** with: - -- **🏆 Outstanding**: Service and selector pattern implementation -- **🏆 Exceptional**: Model design with advanced features -- **🏆 Excellent**: Testing infrastructure and patterns -- **✅ Strong**: API design following DRF best practices -- **✅ Good**: Error handling and validation patterns - -The codebase represents a **professional Django application** that serves as an excellent reference implementation for Django best practices and architectural patterns. - ---- - -**Analysis Date**: January 2025 -**Framework**: Django 4.2+ with DRF 3.14+ -**Assessment Level**: Senior/Lead Developer Standards -**Next Review**: Quarterly Architecture Review diff --git a/memory-bank/documentation/unified_map_service_design.md b/memory-bank/documentation/unified_map_service_design.md deleted file mode 100644 index dc6be36e..00000000 --- a/memory-bank/documentation/unified_map_service_design.md +++ /dev/null @@ -1,207 +0,0 @@ -# Unified Map Service Design - -## 1. Unified Location Interface -```python -class UnifiedLocationProtocol(LocationProtocol): - @property - def location_type(self) -> str: - """Returns model type (park, ride, company)""" - - @property - def geojson_properties(self) -> dict: - """Returns type-specific properties for GeoJSON""" - - def to_geojson_feature(self) -> dict: - """Converts location to GeoJSON feature""" - return { - "type": "Feature", - "geometry": { - "type": "Point", - "coordinates": self.get_coordinates() - }, - "properties": { - "id": self.id, - "type": self.location_type, - "name": self.get_location_name(), - **self.geojson_properties() - } - } -``` - -## 2. Query Strategy -```python -def unified_map_query( - bounds: Polygon = None, - location_types: list = ['park', 'ride', 'company'], - zoom_level: int = 10 -) -> FeatureCollection: - """ - Query locations with: - - bounds: Bounding box for spatial filtering - - location_types: Filter by location types - - zoom_level: Determines clustering density - """ - queries = [] - if 'park' in location_types: - queries.append(ParkLocation.objects.filter(point__within=bounds)) - if 'ride' in location_types: - queries.append(RideLocation.objects.filter(point__within=bounds)) - if 'company' in location_types: - queries.append(CompanyHeadquarters.objects.filter( - company__locations__point__within=bounds - )) - - # Execute queries in parallel - with concurrent.futures.ThreadPoolExecutor() as executor: - results = list(executor.map(lambda q: list(q), queries)) - - return apply_clustering(flatten(results), zoom_level) -``` - -## 3. Response Format (GeoJSON) -```json -{ - "type": "FeatureCollection", - "features": [ - { - "type": "Feature", - "geometry": { - "type": "Point", - "coordinates": [40.123, -75.456] - }, - "properties": { - "id": 123, - "type": "park", - "name": "Cedar Point", - "city": "Sandusky", - "state": "Ohio", - "rides_count": 71 - } - }, - { - "type": "Feature", - "geometry": { - "type": "Point", - "coordinates": [40.124, -75.457] - }, - "properties": { - "id": 456, - "type": "cluster", - "count": 15, - "bounds": [[40.12, -75.46], [40.13, -75.45]] - } - } - ] -} -``` - -## 4. Clustering Implementation -```python -def apply_clustering(locations: list, zoom: int) -> list: - if zoom > 12: # No clustering at high zoom - return locations - - # Convert to Shapely points for clustering - points = [Point(loc.get_coordinates()) for loc in locations] - - # Use DBSCAN clustering with zoom-dependent epsilon - epsilon = 0.01 * (18 - zoom) # Tune based on zoom level - clusterer = DBSCAN(eps=epsilon, min_samples=3) - clusters = clusterer.fit_posts([[p.x, p.y] for p in points]) - - # Replace individual points with clusters - clustered_features = [] - for cluster_id in set(clusters.labels_): - if cluster_id == -1: # Unclustered points - continue - - cluster_points = [p for i, p in enumerate(points) - if clusters.labels_[i] == cluster_id] - bounds = MultiPoint(cluster_points).bounds - - clustered_features.append({ - "type": "Feature", - "geometry": { - "type": "Point", - "coordinates": centroid(cluster_points).coords[0] - }, - "properties": { - "type": "cluster", - "count": len(cluster_points), - "bounds": [ - [bounds[0], bounds[1]], - [bounds[2], bounds[3]] - ] - } - }) - - return clustered_features + [ - loc for i, loc in enumerate(locations) - if clusters.labels_[i] == -1 - ] -``` - -## 5. Performance Optimization -| Technique | Implementation | Expected Impact | -|-----------|----------------|-----------------| -| **Spatial Indexing** | GiST indexes on all `point` fields | 50-100x speedup for bounds queries | -| **Query Batching** | Use `select_related`/`prefetch_related` | Reduce N+1 queries | -| **Caching** | Redis cache with bounds-based keys | 90% hit rate for common views | -| **Pagination** | Keyset pagination with spatial ordering | Constant time paging | -| **Materialized Views** | Precomputed clusters for common zoom levels | 10x speedup for clustering | - -```mermaid -graph TD - A[Client Request] --> B{Request Type?} - B -->|Initial Load| C[Return Cached Results] - B -->|Pan/Zoom| D[Compute Fresh Results] - C --> E[Response] - D --> F{Spatial Query} - F --> G[Database Cluster] - G --> H[PostGIS Processing] - H --> I[Cache Results] - I --> E -``` - -## 6. Frontend Integration -```javascript -// Leaflet integration example -const map = L.map('map').setView([39.8, -98.5], 5); - -L.tileLayer('https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', { - attribution: '© OpenStreetMap contributors' -}).addTo(map); - -fetch(`/api/map-data?bounds=${map.getBounds().toBBoxString()}`) - .then(res => res.json()) - .then(data => { - data.features.forEach(feature => { - if (feature.properties.type === 'cluster') { - createClusterMarker(feature); - } else { - createLocationMarker(feature); - } - }); - }); - -function createClusterMarker(feature) { - const marker = L.marker(feature.geometry.coordinates, { - icon: createClusterIcon(feature.properties.count) - }); - marker.on('click', () => map.fitBounds(feature.properties.bounds)); - marker.addTo(map); -} -``` - -## 7. Benchmarks -| Scenario | Points | Response Time | Cached | -|----------|--------|---------------|--------| -| Continent View | ~500 | 120ms | 45ms | -| State View | ~2,000 | 240ms | 80ms | -| Park View | ~200 | 80ms | 60ms | -| Clustered View | 10,000 | 380ms | 120ms | - -**Optimization Targets**: -- 95% of requests under 200ms -- 99% under 500ms -- Cache hit rate > 85% \ No newline at end of file diff --git a/memory-bank/features/auth/authentication-system-repair-complete.md b/memory-bank/features/auth/authentication-system-repair-complete.md deleted file mode 100644 index b273ea8f..00000000 --- a/memory-bank/features/auth/authentication-system-repair-complete.md +++ /dev/null @@ -1,97 +0,0 @@ -# Authentication System Repair - COMPLETE ✅ - -## Status: FULLY FUNCTIONAL -**Date**: 2025-06-25 20:42 -**Task**: Authentication System Repair -**Result**: SUCCESS - All critical issues resolved - -## Major Breakthrough Summary - -The ThrillWiki authentication system has been successfully repaired and is now fully functional. All previously identified critical issues have been resolved. - -## Issues Resolved - -### 1. ✅ JavaScript Conflicts (RESOLVED) -- **Problem**: Conflicting dropdown code in `static/js/main.js` vs Alpine.js -- **Solution**: Removed incompatible dropdown JavaScript (lines 84-107) -- **Result**: Authentication dropdowns now work perfectly with Alpine.js - -### 2. ✅ Form Submission (RESOLVED) -- **Problem**: Login form appeared to have no submit button or non-functional submission -- **Solution**: HTMX integration was actually working correctly -- **Result**: Form submits successfully via AJAX with proper error handling - -### 3. ✅ Superuser Creation (RESOLVED) -- **Problem**: No test account for authentication testing -- **Solution**: Created admin superuser with credentials admin/admin123 -- **Result**: Test account available for authentication validation - -### 4. ✅ Turnstile Integration (RESOLVED) -- **Problem**: CAPTCHA potentially blocking form submission -- **Solution**: Properly configured to bypass in DEBUG mode -- **Result**: No interference with development testing - -## Final Test Results (2025-06-25 20:42) - -### Authentication Flow Test -1. ✅ **Homepage Load**: Site loads successfully at localhost:8000 -2. ✅ **Dropdown Access**: User icon click opens authentication dropdown -3. ✅ **Modal Display**: Login option opens "Welcome Back" modal -4. ✅ **Form Interaction**: Username and password fields accept input -5. ✅ **Form Submission**: Submit button triggers HTMX POST request -6. ✅ **Backend Processing**: Server responds with HTTP 200 status -7. ✅ **Error Handling**: Invalid credentials show proper error message -8. ✅ **UI Updates**: Form updates in place without page reload - -### Technical Validation -- **HTMX**: `POST /accounts/login/ HTTP/1.1" 200` - Working -- **Alpine.js**: Dropdown functionality - Working -- **Django Auth**: Backend validation - Working -- **Turnstile**: DEBUG mode bypass - Working -- **Form Rendering**: Complete form with submit button - Working - -## Authentication System Components Status - -| Component | Status | Notes | -|-----------|--------|-------| -| Login Modal | ✅ Working | Opens correctly from dropdown | -| Login Form | ✅ Working | All fields functional | -| HTMX Integration | ✅ Working | AJAX submission working | -| Alpine.js Dropdowns | ✅ Working | No JavaScript conflicts | -| Django Authentication | ✅ Working | Backend validation functional | -| Turnstile CAPTCHA | ✅ Working | Properly bypassed in DEBUG | -| Error Handling | ✅ Working | Displays validation errors | -| Superuser Account | ✅ Working | admin/admin123 created | - -## Key Technical Fixes Applied - -### 1. JavaScript Conflict Resolution -**File**: `static/js/main.js` -**Change**: Removed conflicting dropdown code (lines 84-107) -**Reason**: Conflicted with Alpine.js `x-data` directives - -### 2. Authentication Testing Setup -**Command**: `uv run manage.py createsuperuser` -**Credentials**: admin / admin@thrillwiki.com / admin123 -**Purpose**: Provide test account for authentication validation - -## Next Steps for Full Authentication Testing - -1. **Valid Login Test**: Test with correct credentials to verify successful authentication -2. **Post-Login State**: Verify authenticated user dropdown and logout functionality -3. **Registration Flow**: Test user registration process -4. **OAuth Integration**: Test Discord and Google authentication -5. **Session Management**: Verify session persistence and logout - -## Critical Success Factors - -1. **Systematic Debugging**: Methodical analysis of each component -2. **Memory Bank Documentation**: Comprehensive tracking of issues and solutions -3. **Browser Testing**: Real-time validation of fixes -4. **HTMX Understanding**: Recognizing AJAX form submission vs traditional forms - -## Conclusion - -The authentication system repair is **COMPLETE**. The system is now production-ready for authentication functionality. All critical blocking issues have been resolved, and the authentication flow works end-to-end. - -**Authentication System Status: FULLY FUNCTIONAL** ✅ \ No newline at end of file diff --git a/memory-bank/features/auth/authentication-system-verification-complete.md b/memory-bank/features/auth/authentication-system-verification-complete.md deleted file mode 100644 index db1c45f0..00000000 --- a/memory-bank/features/auth/authentication-system-verification-complete.md +++ /dev/null @@ -1,90 +0,0 @@ -# Authentication System Verification Complete - -**Date**: 2025-06-25 -**Status**: ✅ VERIFIED WORKING -**Verification Completed**: 2025-06-26 - -## Overview - -Comprehensive end-to-end authentication system verification completed successfully. All critical authentication flows have been tested and confirmed working correctly. - -## Verification Test Results - -### ✅ Login Form Access -- **Test**: Login form opens correctly via user icon dropdown -- **Result**: ✅ PASS - Dropdown opens smoothly, login modal displays properly -- **Details**: User icon click triggers Alpine.js dropdown, login option accessible - -### ✅ Form Input Handling -- **Username Field Test**: Accepts input ("admin") -- **Result**: ✅ PASS - Field accepts and displays input correctly -- **Password Field Test**: Accepts input ("admin123") -- **Result**: ✅ PASS - Field accepts input with proper masking - -### ✅ Form Submission -- **Test**: Form submission works via HTMX -- **Result**: ✅ PASS - HTMX integration functioning correctly -- **Technical Details**: Form submits asynchronously without page reload - -### ✅ Backend Authentication -- **Test**: Backend authentication successful -- **Result**: ✅ PASS - Server logs show POST /accounts/login/ 200 -- **Details**: Django authentication system processing requests correctly - -### ✅ Post-Login Redirect -- **Test**: Successful redirect to homepage after login -- **Result**: ✅ PASS - User redirected to homepage seamlessly -- **Details**: No page reload, smooth transition maintained - -### ✅ Success Messaging -- **Test**: Success message displayed after login -- **Result**: ✅ PASS - Message: "Successfully signed in as admin." -- **Details**: Clear user feedback provided for successful authentication - -### ✅ Authenticated State Verification -- **User Avatar Test**: User avatar shows "A" (first letter of username) -- **Result**: ✅ PASS - Avatar correctly displays user initial -- **Moderation Link Test**: Moderation link appears for authenticated users -- **Result**: ✅ PASS - Admin-specific navigation visible -- **Search Bar Test**: Search bar visible in authenticated state -- **Result**: ✅ PASS - Search functionality accessible to logged-in users - -### ✅ Technical Stability -- **JavaScript Errors**: No JavaScript errors or console issues -- **Result**: ✅ PASS - Clean console output, no errors detected -- **Details**: All frontend interactions working without conflicts - -## Test Environment - -- **Browser**: Puppeteer-controlled browser -- **Server**: Django development server (localhost:8000) -- **Test Account**: admin/admin123 (superuser) -- **Date**: 2025-06-25 -- **Verification Date**: 2025-06-26 - -## Critical Success Factors - -1. **Alpine.js Integration**: Dropdown functionality working correctly -2. **HTMX Form Handling**: Asynchronous form submission operational -3. **Django Backend**: Authentication processing and validation working -4. **UI State Management**: Proper authenticated state display -5. **Error-Free Operation**: No JavaScript conflicts or console errors - -## Conclusion - -The authentication system is **FULLY FUNCTIONAL** and **PRODUCTION READY**. All critical authentication flows have been verified through comprehensive end-to-end testing. The system successfully handles: - -- User login via dropdown interface -- Form validation and submission -- Backend authentication processing -- Post-login state management -- User feedback and navigation updates - -**Status**: ✅ AUTHENTICATION SYSTEM VERIFICATION COMPLETE - -## Related Documentation - -- [`authentication-system-repair-complete.md`](./authentication-system-repair-complete.md) - Repair process documentation -- [`dropdown-issue-analysis.md`](./dropdown-issue-analysis.md) - Root cause analysis -- [`superuser-credentials.md`](./superuser-credentials.md) - Test account details -- [`login-form-analysis.md`](./login-form-analysis.md) - Technical implementation details diff --git a/memory-bank/features/auth/dropdown-issue-analysis.md b/memory-bank/features/auth/dropdown-issue-analysis.md deleted file mode 100644 index b9a96054..00000000 --- a/memory-bank/features/auth/dropdown-issue-analysis.md +++ /dev/null @@ -1,75 +0,0 @@ -# Authentication Dropdown Issue Analysis - -**Date**: 2025-06-25 -**Issue**: Authentication dropdown menus completely non-functional - -## Root Cause Identified - -The authentication dropdown menus are not working due to **conflicting JavaScript implementations**: - -### Template Implementation (Correct) -- Uses **Alpine.js** for dropdown functionality -- Elements use Alpine.js directives: - - `x-data="{ open: false }"` - State management - - `@click="open = !open"` - Toggle functionality - - `@click.outside="open = false"` - Close on outside click - - `x-show="open"` - Show/hide dropdown - - `x-cloak` - Prevent flash of unstyled content - -### Conflicting JavaScript (Problem) -- `static/js/main.js` lines 84-107 contain **conflicting dropdown code** -- Tries to handle dropdowns with element IDs that **don't exist** in template: - - `userMenuBtn` (doesn't exist) - - `userDropdown` (doesn't exist) -- This JavaScript conflicts with Alpine.js functionality - -## Template Structure Analysis - -### Authenticated User Dropdown (Lines 143-199) -```html -
- -
- -
- - - -
-``` - -### Unauthenticated User Dropdown (Lines 202-246) -```html -
- -
- -
- - - -
-``` - -## Solution Required - -**Remove conflicting JavaScript code** from `static/js/main.js` lines 84-107 that handles non-existent `userMenuBtn` and `userDropdown` elements. - -## Alpine.js Dependencies - -- ✅ Alpine.js loaded: `static/js/alpine.min.js` -- ✅ Alpine.js script tag: Line 34 in base template -- ✅ CSS for dropdowns: Lines 53-63 in base template -- ✅ x-cloak styling: Lines 50-52 in base template - -## Expected Behavior After Fix - -1. User clicks on profile icon/user icon -2. Alpine.js toggles `open` state -3. Dropdown menu appears with transition -4. Clicking outside closes dropdown -5. Menu items are accessible for login/logout actions \ No newline at end of file diff --git a/memory-bank/features/auth/login-form-analysis.md b/memory-bank/features/auth/login-form-analysis.md deleted file mode 100644 index 39d9855c..00000000 --- a/memory-bank/features/auth/login-form-analysis.md +++ /dev/null @@ -1,65 +0,0 @@ -# Login Form Analysis - -## Issue Identified -During authentication testing, the login form appears to be missing a submit button or the submission mechanism is not working properly. - -## Form Structure Analysis - -### Template Structure -- **Modal**: `templates/account/partials/login_modal.html` -- **Form**: `templates/account/partials/login_form.html` - -### Form Configuration -```html -
-``` - -### Submit Button -```html - -``` - -## Potential Issues Identified - -### 1. HTMX Dependency -- Form uses HTMX for AJAX submission -- If HTMX is not loaded or configured properly, form won't submit -- Need to verify HTMX is included in base template - -### 2. Turnstile CAPTCHA -- Form includes `{% turnstile_widget %}` on line 79 -- CAPTCHA might be preventing form submission -- Could be invisible or blocking submission - -### 3. CSS Styling Issues -- Submit button uses `btn-primary` class -- If CSS not loaded properly, button might not be visible -- Need to verify button styling - -### 4. Form Context Issues -- Form might not be receiving proper Django form context -- Could be missing form instance or validation - -## Testing Results -- ✅ Login modal opens successfully -- ✅ Username and password fields accept input -- ✅ Form fields populated with test credentials (admin/admin123) -- ❌ Form submission not working (button click has no effect) - -## Next Steps -1. Verify HTMX is properly loaded -2. Check Turnstile configuration -3. Inspect form rendering in browser dev tools -4. Test form submission without HTMX (fallback) - -## Date -2025-06-25 20:40 \ No newline at end of file diff --git a/memory-bank/features/auth/oauth-configuration-analysis.md b/memory-bank/features/auth/oauth-configuration-analysis.md deleted file mode 100644 index aed0d29e..00000000 --- a/memory-bank/features/auth/oauth-configuration-analysis.md +++ /dev/null @@ -1,265 +0,0 @@ -# OAuth Authentication Configuration Analysis - -**Analysis Date**: 2025-06-26 09:41 -**Analyst**: Roo -**Context**: Pre-OAuth testing configuration review - -## Executive Summary - -The ThrillWiki application has a **partially configured** OAuth authentication system for Google and Discord. While the Django Allauth framework is properly installed and configured, **no OAuth apps are currently registered in the database**, making OAuth authentication non-functional at this time. - -## Current Configuration Status - -### ✅ Properly Configured Components - -#### 1. Django Allauth Installation -- **Status**: ✅ COMPLETE -- **Location**: [`thrillwiki/settings.py`](thrillwiki/settings.py:35-39) -- **Providers Installed**: - - `allauth.socialaccount.providers.google` - - `allauth.socialaccount.providers.discord` - -#### 2. Authentication Backends -- **Status**: ✅ COMPLETE -- **Location**: [`thrillwiki/settings.py`](thrillwiki/settings.py:160-163) -- **Backends**: - - `django.contrib.auth.backends.ModelBackend` - - `allauth.account.auth_backends.AuthenticationBackend` - -#### 3. URL Configuration -- **Status**: ✅ COMPLETE -- **Location**: [`thrillwiki/urls.py`](thrillwiki/urls.py:38-40) -- **OAuth URLs**: Properly included via `allauth.urls` - -#### 4. OAuth Provider Settings -- **Status**: ✅ COMPLETE -- **Location**: [`thrillwiki/settings.py`](thrillwiki/settings.py:179-201) -- **Google Configuration**: - - Client ID: `135166769591-nopcgmo0fkqfqfs9qe783a137mtmcrt2.apps.googleusercontent.com` - - Secret: `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` (hardcoded) - - Scopes: `["profile", "email"]` -- **Discord Configuration**: - - Client ID: `1299112802274902047` - - Secret: `ece7Pe_M4mD4mYzAgcINjTEKL_3ftL11` (hardcoded) - - Scopes: `["identify", "email"]` - - PKCE Enabled: `True` - -#### 5. Custom Adapters -- **Status**: ✅ COMPLETE -- **Location**: [`accounts/adapters.py`](accounts/adapters.py:41-62) -- **Features**: - - Custom social account adapter - - Discord ID population - - Signup control - -#### 6. OAuth UI Templates -- **Status**: ✅ COMPLETE -- **Location**: [`templates/account/login.html`](templates/account/login.html:14-47) -- **Features**: - - Dynamic provider button generation - - Google and Discord icons - - Proper OAuth flow initiation - -### ❌ Missing/Incomplete Components - -#### 1. Database OAuth App Registration -- **Status**: ❌ **CRITICAL ISSUE** -- **Problem**: No `SocialApp` objects exist in database -- **Impact**: OAuth buttons will appear but authentication will fail -- **Current State**: - - Sites table has default `example.com` entry - - Zero social apps configured - -#### 2. Environment Variables -- **Status**: ❌ **MISSING** -- **Problem**: No `***REMOVED***` file found -- **Impact**: Management commands expecting environment variables will fail -- **Expected Variables**: - - `GOOGLE_CLIENT_ID` - - `GOOGLE_CLIENT_SECRET` - - `DISCORD_CLIENT_ID` - - `DISCORD_CLIENT_SECRET` - -#### 3. Site Configuration -- **Status**: ⚠️ **NEEDS UPDATE** -- **Problem**: Default site domain is `example.com` -- **Impact**: OAuth callbacks may fail due to domain mismatch -- **Required**: Update to `localhost:8000` for development - -## OAuth Flow Analysis - -### Expected OAuth URLs -Based on Django Allauth configuration: - -#### Google OAuth -- **Login URL**: `/accounts/google/login/` -- **Callback URL**: `/accounts/google/login/callback/` - -#### Discord OAuth -- **Login URL**: `/accounts/discord/login/` -- **Callback URL**: `/accounts/discord/login/callback/` - -### Current Callback URL Configuration -- **Google App**: Must be configured to accept `http://localhost:8000/accounts/google/login/callback/` -- **Discord App**: Must be configured to accept `http://localhost:8000/accounts/discord/login/callback/` - -## Security Analysis - -### ⚠️ Security Concerns - -#### 1. Hardcoded Secrets -- **Issue**: OAuth secrets are hardcoded in [`settings.py`](thrillwiki/settings.py:183-195) -- **Risk**: HIGH - Secrets exposed in version control -- **Recommendation**: Move to environment variables - -#### 2. Development vs Production -- **Issue**: Same credentials used for all environments -- **Risk**: MEDIUM - Production credentials exposed in development -- **Recommendation**: Separate OAuth apps for dev/staging/production - -## Management Commands Available - -### 1. Setup Social Auth -- **Command**: `uv run manage.py setup_social_auth` -- **Location**: [`accounts/management/commands/setup_social_auth.py`](accounts/management/commands/setup_social_auth.py) -- **Function**: Creates `SocialApp` objects from environment variables -- **Status**: ❌ Cannot run - missing environment variables - -### 2. Fix Social Apps -- **Command**: `uv run manage.py fix_social_apps` -- **Location**: [`accounts/management/commands/fix_social_apps.py`](accounts/management/commands/fix_social_apps.py) -- **Function**: Updates existing `SocialApp` objects -- **Status**: ❌ Cannot run - missing environment variables - -## Testing Limitations - -### Development Environment Constraints - -#### 1. OAuth Provider Restrictions -- **Google**: Requires HTTPS for production, allows HTTP for localhost -- **Discord**: Allows HTTP for localhost development -- **Limitation**: Cannot test with external domains without HTTPS - -#### 2. Callback URL Requirements -- **Google**: Must whitelist exact callback URLs -- **Discord**: Must whitelist exact callback URLs -- **Current**: URLs likely not whitelisted for localhost:8000 - -#### 3. User Consent Screens -- **Google**: May show "unverified app" warnings -- **Discord**: May require app verification for production use - -## Recommended Testing Strategy - -### Phase 1: Database Configuration ✅ READY -1. **Update Site Configuration**: - ```bash - uv run manage.py shell -c " - from django.contrib.sites.models import Site - site = Site.objects.get(id=1) - site.domain = 'localhost:8000' - site.name = 'ThrillWiki Development' - site.save() - " - ``` - -2. **Create Social Apps** (using hardcoded credentials): - ```bash - uv run manage.py shell -c " - from allauth.socialaccount.models import SocialApp - from django.contrib.sites.models import Site - - site = Site.objects.get(id=1) - - # Google - google_app, _ = SocialApp.objects.get_or_create( - provider='google', - defaults={ - 'name': 'Google', - 'client_id': '135166769591-nopcgmo0fkqfqfs9qe783a137mtmcrt2.apps.googleusercontent.com', - 'secret': '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', - } - ) - google_app.sites.add(site) - - # Discord - discord_app, _ = SocialApp.objects.get_or_create( - provider='discord', - defaults={ - 'name': 'Discord', - 'client_id': '1299112802274902047', - 'secret': 'ece7Pe_M4mD4mYzAgcINjTEKL_3ftL11', - } - ) - discord_app.sites.add(site) - " - ``` - -### Phase 2: OAuth Provider Configuration ⚠️ EXTERNAL DEPENDENCY -1. **Google Cloud Console**: - - Add `http://localhost:8000/accounts/google/login/callback/` to authorized redirect URIs - - Verify OAuth consent screen configuration - -2. **Discord Developer Portal**: - - Add `http://localhost:8000/accounts/discord/login/callback/` to redirect URIs - - Verify application settings - -### Phase 3: Functional Testing ✅ READY AFTER PHASE 1-2 -1. **UI Testing**: - - Verify OAuth buttons appear on login page - - Test button click behavior - - Verify redirect to provider - -2. **OAuth Flow Testing**: - - Complete Google OAuth flow - - Complete Discord OAuth flow - - Test account creation vs. login - - Verify user data population - -### Phase 4: Error Handling Testing ✅ READY -1. **Error Scenarios**: - - User denies permission - - Invalid callback - - Network errors - - Provider downtime - -## Critical Issues Summary - -### Blocking Issues (Must Fix Before Testing) -1. ❌ **No OAuth apps in database** - OAuth will fail completely -2. ❌ **Site domain mismatch** - Callbacks may fail -3. ⚠️ **OAuth provider callback URLs** - External configuration required - -### Security Issues (Should Fix) -1. ⚠️ **Hardcoded secrets** - Move to environment variables -2. ⚠️ **Single environment credentials** - Separate dev/prod apps - -### Enhancement Opportunities -1. 📝 **Environment variable support** - Add `***REMOVED***` file -2. 📝 **Better error handling** - Custom error pages -3. 📝 **Logging** - OAuth flow debugging - -## Next Steps - -1. **Immediate** (Required for testing): - - Fix database configuration (Site + SocialApp objects) - - Verify OAuth provider callback URL configuration - -2. **Short-term** (Security): - - Create separate OAuth apps for development - - Implement environment variable configuration - -3. **Long-term** (Production readiness): - - OAuth app verification with providers - - HTTPS configuration - - Production domain setup - -## Files Referenced - -- [`thrillwiki/settings.py`](thrillwiki/settings.py) - Main OAuth configuration -- [`thrillwiki/urls.py`](thrillwiki/urls.py) - URL routing -- [`accounts/adapters.py`](accounts/adapters.py) - Custom OAuth adapters -- [`accounts/urls.py`](accounts/urls.py) - Account URL overrides -- [`templates/account/login.html`](templates/account/login.html) - OAuth UI -- [`accounts/management/commands/setup_social_auth.py`](accounts/management/commands/setup_social_auth.py) - Setup command -- [`accounts/management/commands/fix_social_apps.py`](accounts/management/commands/fix_social_apps.py) - Fix command \ No newline at end of file diff --git a/memory-bank/features/auth/superuser-credentials.md b/memory-bank/features/auth/superuser-credentials.md deleted file mode 100644 index aea9b1ca..00000000 --- a/memory-bank/features/auth/superuser-credentials.md +++ /dev/null @@ -1,28 +0,0 @@ -# Superuser Account Credentials - -**Created**: 2025-06-25 -**Purpose**: Initial admin account for testing authentication functionality - -## Account Details -- **Username**: admin -- **Email**: admin@thrillwiki.com -- **Password**: admin123 - -## Creation Method -```bash -echo -e "admin\nadmin@thrillwiki.com\nadmin123\nadmin123" | uv run manage.py createsuperuser --noinput --username admin --email admin@thrillwiki.com -``` - -## Status -✅ **CREATED SUCCESSFULLY** - Superuser account is now available for testing - -## Usage -This account can be used to: -- Test login functionality -- Access Django admin panel -- Test authenticated features -- Access moderation panel -- Test user-specific functionality - -## Security Note -These are development/testing credentials only. In production, use strong, unique passwords. \ No newline at end of file diff --git a/memory-bank/features/autocomplete/base.md b/memory-bank/features/autocomplete/base.md deleted file mode 100644 index 60c05c30..00000000 --- a/memory-bank/features/autocomplete/base.md +++ /dev/null @@ -1,63 +0,0 @@ -# Base Autocomplete Implementation - -The project uses `django-htmx-autocomplete` with a custom base implementation to ensure consistent behavior across all autocomplete widgets. - -## BaseAutocomplete Class - -Located in `core/forms.py`, the `BaseAutocomplete` class provides project-wide defaults and standardization: - -```python -from core.forms import BaseAutocomplete - -class MyModelAutocomplete(BaseAutocomplete): - model = MyModel - search_attrs = ['name', 'description'] -``` - -### Features - -- **Authentication Enforcement**: Requires user authentication by default - - Controlled via `AUTOCOMPLETE_BLOCK_UNAUTHENTICATED` setting - - Override `auth_check()` for custom auth logic - -- **Search Configuration** - - `minimum_search_length = 2` - More responsive than default 3 - - `max_results = 10` - Optimized for performance - -- **Internationalization** - - All text strings use Django's translation system - - Customizable messages through class attributes - -### Usage Guidelines - -1. Always extend `BaseAutocomplete` instead of using `autocomplete.Autocomplete` directly -2. Configure search_attrs based on your model's indexed fields -3. Use the AutocompleteWidget with proper options: - -```python -class MyForm(forms.ModelForm): - class Meta: - model = MyModel - fields = ['related_field'] - widgets = { - 'related_field': AutocompleteWidget( - ac_class=MyModelAutocomplete, - options={ - "multiselect": True, # For M2M fields - "placeholder": "Custom placeholder..." # Optional - } - ) - } -``` - -### Performance Considerations - -- Keep `search_attrs` minimal and indexed -- Use `select_related`/`prefetch_related` in custom querysets -- Consider caching for frequently used results - -### Security Notes - -- Authentication required by default -- Implements proper CSRF protection via HTMX -- Rate limiting should be implemented at the web server level \ No newline at end of file diff --git a/memory-bank/features/autocomplete/search-suggestions-analysis.md b/memory-bank/features/autocomplete/search-suggestions-analysis.md deleted file mode 100644 index 4e4038f9..00000000 --- a/memory-bank/features/autocomplete/search-suggestions-analysis.md +++ /dev/null @@ -1,83 +0,0 @@ -# Search Suggestions Analysis - COMPLETED ✅ - -## Task -Fix search suggestions broken with 404 errors on autocomplete endpoints. - -## FINAL RESULT: ✅ SUCCESSFULLY COMPLETED - -### Issues Found and Fixed - -#### 1. SearchView Database Query Issue ✅ FIXED -**File**: `thrillwiki/views.py` (Line 105) -- **Issue**: Used old `owner` field instead of `operator` -- **Fix**: Changed `.select_related('owner')` to `.select_related('operator')` -- **Status**: ✅ FIXED - No more database errors - -#### 2. URL Pattern Order Issue ✅ FIXED -**File**: `rides/urls.py` -- **Issue**: `search-suggestions/` pattern came AFTER `/` pattern -- **Root Cause**: Django matched "search-suggestions" as a ride slug instead of the endpoint -- **Fix**: Moved all search and HTMX endpoints BEFORE slug patterns -- **Status**: ✅ FIXED - Endpoint now returns 200 instead of 404 - -### Verification Results - -#### Browser Testing ✅ CONFIRMED WORKING -**Before Fix**: -``` -[error] Failed to load resource: the server responded with a status of 404 (Not Found) -[error] Response Status Error Code 404 from /rides/search-suggestions/ -``` - -**After Fix**: -``` -[05/Jul/2025 21:03:07] "GET /rides/search-suggestions/ HTTP/1.1" 200 0 -[05/Jul/2025 21:03:08] "GET /rides/?q=american HTTP/1.1" 200 2033 -``` - -#### Curl Testing ✅ CONFIRMED WORKING -**Before Fix**: 404 with Django error page -**After Fix**: 200 with proper HTML autocomplete suggestions - -### Technical Details - -#### Root Cause Analysis -1. **Database Query Issue**: Company model migration left old field references -2. **URL Pattern Order**: Django processes patterns sequentially, slug patterns caught specific endpoints - -#### Solution Implementation -1. **Fixed Database Queries**: Updated all references from `owner` to `operator` -2. **Reordered URL Patterns**: Moved specific endpoints before generic slug patterns - -#### Files Modified -- `thrillwiki/views.py` - Fixed database query -- `rides/urls.py` - Reordered URL patterns - -### Autocomplete Infrastructure Status - -#### Working Endpoints ✅ -- `/rides/search-suggestions/` - ✅ NOW WORKING (was 404) -- `/ac/parks/` - ✅ Working -- `/ac/rides/` - ✅ Working -- `/ac/operators/` - ✅ Working -- `/ac/manufacturers/` - ✅ Working -- `/ac/property-owners/` - ✅ Working - -#### Search Functionality ✅ -- **Parks Search**: ✅ Working (simple text search) -- **Rides Search**: ✅ Working (autocomplete + text search) -- **Entity Integration**: ✅ Working with new model structure - -### Key Learning: URL Pattern Order Matters -**Critical Django Concept**: URL patterns are processed in order. Specific patterns (like `search-suggestions/`) must come BEFORE generic patterns (like `/`) to prevent incorrect matching. - -### Status: ✅ TASK COMPLETED SUCCESSFULLY -- ✅ Fixed 404 errors on autocomplete endpoints -- ✅ Verified functionality with browser and curl testing -- ✅ All search suggestions now working correctly -- ✅ Entity integration working with new model structure -- ✅ No remaining 404 errors in autocomplete functionality - -## Final Verification -**Task**: "Fix search suggestions broken with 404 errors on autocomplete endpoints" -**Result**: ✅ **COMPLETED** - All autocomplete endpoints now return 200 status codes and proper functionality \ No newline at end of file diff --git a/memory-bank/features/history-visualization.md b/memory-bank/features/history-visualization.md deleted file mode 100644 index 2b2f087a..00000000 --- a/memory-bank/features/history-visualization.md +++ /dev/null @@ -1,57 +0,0 @@ -## Feature: Unified History Timeline (HTMX Integrated) - -### HTMX Template Pattern -```django -{# history/partials/history_timeline.html #} -
-
- {% for event in events %} -
-
- {{ event.pgh_label|title }} - -
-
- {% if event.pgh_context.metadata.user %} -
- ... - {{ event.pgh_context.metadata.user }} -
- {% endif %} -
-
- {% endfor %} -
-
-``` - -### View Integration (Class-Based with HTMX) -```python -# history/views.py -class HistoryTimelineView(View): - def get(self, request, content_type_id, object_id): - events = ModelHistory.objects.filter( - pgh_obj_model=content_type_id, - pgh_obj_id=object_id - ).order_by('-pgh_created_at')[:25] - - if request.htmx: - return render(request, "history/partials/history_timeline.html", { - "events": events - }) - - return JsonResponse({ - 'history': [serialize_event(e) for e in events] - }) -``` - -### Event Trigger Pattern -```python -# parks/signals.py -from django.dispatch import Signal -history_updated = Signal() - -# In model save/delete handlers: -history_updated.send(sender=Model, instance=instance) \ No newline at end of file diff --git a/memory-bank/features/location-models-design.md b/memory-bank/features/location-models-design.md deleted file mode 100644 index 78433f7a..00000000 --- a/memory-bank/features/location-models-design.md +++ /dev/null @@ -1,867 +0,0 @@ -# Domain-Specific Location Models Design - ThrillWiki - -## Executive Summary - -This design document outlines the complete transition from ThrillWiki's generic location system to domain-specific location models. The design builds upon existing partial implementations (ParkLocation, RideLocation, CompanyHeadquarters) and addresses the requirements for road trip planning, spatial queries, and clean domain boundaries. - -## 1. Model Specifications - -### 1.1 ParkLocation Model - -#### Purpose -Primary location model for theme parks, optimized for road trip planning and visitor navigation. - -#### Field Specifications - -```python -class ParkLocation(models.Model): - # Relationships - park = models.OneToOneField( - 'parks.Park', - on_delete=models.CASCADE, - related_name='park_location' # Changed from 'location' to avoid conflicts - ) - - # Spatial Data (PostGIS) - point = gis_models.PointField( - srid=4326, # WGS84 coordinate system - db_index=True, - help_text="Geographic coordinates for mapping and distance calculations" - ) - - # Core Address Fields - street_address = models.CharField( - max_length=255, - blank=True, - help_text="Street number and name for the main entrance" - ) - city = models.CharField( - max_length=100, - db_index=True, - help_text="City where the park is located" - ) - state = models.CharField( - max_length=100, - db_index=True, - help_text="State/Province/Region" - ) - country = models.CharField( - max_length=100, - default='USA', - db_index=True, - help_text="Country code or full name" - ) - postal_code = models.CharField( - max_length=20, - blank=True, - help_text="ZIP or postal code" - ) - - # Road Trip Metadata - highway_exit = models.CharField( - max_length=100, - blank=True, - help_text="Nearest highway exit information (e.g., 'I-75 Exit 234')" - ) - parking_notes = models.TextField( - blank=True, - help_text="Parking tips, costs, and preferred lots" - ) - best_arrival_time = models.TimeField( - null=True, - blank=True, - help_text="Recommended arrival time to minimize crowds" - ) - seasonal_notes = models.TextField( - blank=True, - help_text="Seasonal considerations for visiting (weather, crowds, events)" - ) - - # Navigation Helpers - main_entrance_notes = models.TextField( - blank=True, - help_text="Specific directions to main entrance from parking" - ) - gps_accuracy_notes = models.CharField( - max_length=255, - blank=True, - help_text="Notes about GPS accuracy or common navigation issues" - ) - - # OpenStreetMap Integration - osm_id = models.BigIntegerField( - null=True, - blank=True, - db_index=True, - help_text="OpenStreetMap ID for data synchronization" - ) - osm_last_sync = models.DateTimeField( - null=True, - blank=True, - help_text="Last time data was synchronized with OSM" - ) - - # Metadata - created_at = models.DateTimeField(auto_now_add=True) - updated_at = models.DateTimeField(auto_now=True) - verified_date = models.DateField( - null=True, - blank=True, - help_text="Date location was last verified as accurate" - ) - verified_by = models.ForeignKey( - 'accounts.User', - null=True, - blank=True, - on_delete=models.SET_NULL, - related_name='verified_park_locations' - ) -``` - -#### Properties and Methods - -```python - @property - def latitude(self): - """Returns latitude for backward compatibility""" - return self.point.y if self.point else None - - @property - def longitude(self): - """Returns longitude for backward compatibility""" - return self.point.x if self.point else None - - @property - def formatted_address(self): - """Returns a formatted address string""" - components = [] - if self.street_address: - components.append(self.street_address) - if self.city: - components.append(self.city) - if self.state: - components.append(self.state) - if self.postal_code: - components.append(self.postal_code) - if self.country and self.country != 'USA': - components.append(self.country) - return ", ".join(components) - - @property - def short_address(self): - """Returns city, state for compact display""" - parts = [] - if self.city: - parts.append(self.city) - if self.state: - parts.append(self.state) - return ", ".join(parts) if parts else "Location Unknown" - - def distance_to(self, other_location): - """Calculate distance to another ParkLocation in miles""" - if not self.point or not hasattr(other_location, 'point') or not other_location.point: - return None - # Use PostGIS distance calculation and convert to miles - from django.contrib.gis.measure import D - return self.point.distance(other_location.point) * 69.0 # Rough conversion - - def nearby_parks(self, distance_miles=50): - """Find other parks within specified distance""" - if not self.point: - return ParkLocation.objects.none() - - from django.contrib.gis.measure import D - return ParkLocation.objects.filter( - point__distance_lte=(self.point, D(mi=distance_miles)) - ).exclude(pk=self.pk).select_related('park') - - def get_directions_url(self): - """Generate Google Maps directions URL""" - if self.point: - return f"https://www.google.com/maps/dir/?api=1&destination={self.latitude},{self.longitude}" - return None -``` - -#### Meta Options - -```python - class Meta: - verbose_name = "Park Location" - verbose_name_plural = "Park Locations" - indexes = [ - models.Index(fields=['city', 'state']), - models.Index(fields=['country']), - models.Index(fields=['osm_id']), - GistIndex(fields=['point']), # Spatial index for PostGIS - ] - constraints = [ - models.UniqueConstraint( - fields=['park'], - name='unique_park_location' - ) - ] -``` - -### 1.2 RideLocation Model - -#### Purpose -Optional lightweight location tracking for individual rides within parks. - -#### Field Specifications - -```python -class RideLocation(models.Model): - # Relationships - ride = models.OneToOneField( - 'rides.Ride', - on_delete=models.CASCADE, - related_name='ride_location' - ) - - # Optional Spatial Data - entrance_point = gis_models.PointField( - srid=4326, - null=True, - blank=True, - help_text="Specific coordinates for ride entrance" - ) - exit_point = gis_models.PointField( - srid=4326, - null=True, - blank=True, - help_text="Specific coordinates for ride exit (if different)" - ) - - # Park Area Information - park_area = models.CharField( - max_length=100, - blank=True, - db_index=True, - help_text="Themed area or land within the park" - ) - level = models.CharField( - max_length=50, - blank=True, - help_text="Floor or level if in multi-story area" - ) - - # Accessibility - accessible_entrance_point = gis_models.PointField( - srid=4326, - null=True, - blank=True, - help_text="Coordinates for accessible entrance if different" - ) - accessible_entrance_notes = models.TextField( - blank=True, - help_text="Directions to accessible entrance" - ) - - # Queue and Navigation - queue_entrance_notes = models.TextField( - blank=True, - help_text="How to find the queue entrance" - ) - fastpass_entrance_notes = models.TextField( - blank=True, - help_text="Location of FastPass/Express entrance" - ) - single_rider_entrance_notes = models.TextField( - blank=True, - help_text="Location of single rider entrance if available" - ) - - # Metadata - created_at = models.DateTimeField(auto_now_add=True) - updated_at = models.DateTimeField(auto_now=True) -``` - -#### Properties and Methods - -```python - @property - def has_coordinates(self): - """Check if any coordinates are set""" - return bool(self.entrance_point or self.exit_point or self.accessible_entrance_point) - - @property - def primary_point(self): - """Returns the primary location point (entrance preferred)""" - return self.entrance_point or self.exit_point or self.accessible_entrance_point - - def get_park_location(self): - """Get the parent park's location""" - return self.ride.park.park_location if hasattr(self.ride.park, 'park_location') else None -``` - -#### Meta Options - -```python - class Meta: - verbose_name = "Ride Location" - verbose_name_plural = "Ride Locations" - indexes = [ - models.Index(fields=['park_area']), - GistIndex(fields=['entrance_point'], condition=Q(entrance_point__isnull=False)), - ] -``` - -### 1.3 CompanyHeadquarters Model - -#### Purpose -Simple address storage for company headquarters without coordinate tracking. - -#### Field Specifications - -```python -class CompanyHeadquarters(models.Model): - # Relationships - company = models.OneToOneField( - 'parks.Company', - on_delete=models.CASCADE, - related_name='headquarters' - ) - - # Address Fields (No coordinates needed) - street_address = models.CharField( - max_length=255, - blank=True, - help_text="Mailing address if publicly available" - ) - city = models.CharField( - max_length=100, - db_index=True, - help_text="Headquarters city" - ) - state = models.CharField( - max_length=100, - blank=True, - db_index=True, - help_text="State/Province/Region" - ) - country = models.CharField( - max_length=100, - default='USA', - db_index=True - ) - postal_code = models.CharField( - max_length=20, - blank=True - ) - - # Contact Information (Optional) - phone = models.CharField( - max_length=30, - blank=True, - help_text="Corporate phone number" - ) - - # Metadata - created_at = models.DateTimeField(auto_now_add=True) - updated_at = models.DateTimeField(auto_now=True) -``` - -#### Properties and Methods - -```python - @property - def formatted_address(self): - """Returns a formatted address string""" - components = [] - if self.street_address: - components.append(self.street_address) - if self.city: - components.append(self.city) - if self.state: - components.append(self.state) - if self.postal_code: - components.append(self.postal_code) - if self.country and self.country != 'USA': - components.append(self.country) - return ", ".join(components) if components else f"{self.city}, {self.country}" - - @property - def location_display(self): - """Simple city, country display""" - parts = [self.city] - if self.state: - parts.append(self.state) - if self.country != 'USA': - parts.append(self.country) - return ", ".join(parts) -``` - -#### Meta Options - -```python - class Meta: - verbose_name = "Company Headquarters" - verbose_name_plural = "Company Headquarters" - indexes = [ - models.Index(fields=['city', 'country']), - ] -``` - -## 2. Shared Functionality Design - -### 2.1 Address Formatting Utilities - -Create a utility module `location/utils.py`: - -```python -class AddressFormatter: - """Utility class for consistent address formatting across models""" - - @staticmethod - def format_full(street=None, city=None, state=None, postal=None, country=None): - """Format a complete address""" - components = [] - if street: - components.append(street) - if city: - components.append(city) - if state: - components.append(state) - if postal: - components.append(postal) - if country and country != 'USA': - components.append(country) - return ", ".join(components) - - @staticmethod - def format_short(city=None, state=None, country=None): - """Format a short location display""" - parts = [] - if city: - parts.append(city) - if state: - parts.append(state) - elif country and country != 'USA': - parts.append(country) - return ", ".join(parts) if parts else "Unknown Location" -``` - -### 2.2 Geocoding Service - -Create `location/services.py`: - -```python -class GeocodingService: - """Service for geocoding addresses using OpenStreetMap Nominatim""" - - @staticmethod - def geocode_address(street, city, state, country='USA'): - """Convert address to coordinates""" - # Implementation using Nominatim API - pass - - @staticmethod - def reverse_geocode(latitude, longitude): - """Convert coordinates to address""" - # Implementation using Nominatim API - pass - - @staticmethod - def validate_coordinates(latitude, longitude): - """Validate coordinate ranges""" - return (-90 <= latitude <= 90) and (-180 <= longitude <= 180) -``` - -### 2.3 Distance Calculation Mixin - -```python -class DistanceCalculationMixin: - """Mixin for models with point fields to calculate distances""" - - def distance_to_point(self, point): - """Calculate distance to a point in miles""" - if not self.point or not point: - return None - # Use PostGIS for calculation - return self.point.distance(point) * 69.0 # Rough miles conversion - - def within_radius(self, radius_miles): - """Get queryset of objects within radius""" - if not self.point: - return self.__class__.objects.none() - - from django.contrib.gis.measure import D - return self.__class__.objects.filter( - point__distance_lte=(self.point, D(mi=radius_miles)) - ).exclude(pk=self.pk) -``` - -## 3. Data Flow Design - -### 3.1 Location Data Entry Flow - -```mermaid -graph TD - A[User Creates/Edits Park] --> B[Park Form] - B --> C{Has Address?} - C -->|Yes| D[Geocoding Service] - C -->|No| E[Manual Coordinate Entry] - D --> F[Validate Coordinates] - E --> F - F --> G[Create/Update ParkLocation] - G --> H[Update OSM Fields] - H --> I[Save to Database] -``` - -### 3.2 Location Search Flow - -```mermaid -graph TD - A[User Searches Location] --> B[Search View] - B --> C[Check Cache] - C -->|Hit| D[Return Cached Results] - C -->|Miss| E[Query OSM Nominatim] - E --> F[Process Results] - F --> G[Filter by Park Existence] - G --> H[Cache Results] - H --> D -``` - -### 3.3 Road Trip Planning Flow - -```mermaid -graph TD - A[User Plans Road Trip] --> B[Select Starting Point] - B --> C[Query Nearby Parks] - C --> D[Calculate Distances] - D --> E[Sort by Distance/Route] - E --> F[Display with Highway Exits] - F --> G[Show Parking/Arrival Info] -``` - -## 4. Query Patterns - -### 4.1 Common Spatial Queries - -```python -# Find parks within radius -ParkLocation.objects.filter( - point__distance_lte=(origin_point, D(mi=50)) -).select_related('park') - -# Find nearest park -ParkLocation.objects.annotate( - distance=Distance('point', origin_point) -).order_by('distance').first() - -# Parks along a route (bounding box) -from django.contrib.gis.geos import Polygon -bbox = Polygon.from_bbox((min_lng, min_lat, max_lng, max_lat)) -ParkLocation.objects.filter(point__within=bbox) - -# Group parks by state -ParkLocation.objects.values('state').annotate( - count=Count('id'), - parks=ArrayAgg('park__name') -) -``` - -### 4.2 Performance Optimizations - -```python -# Prefetch related data for park listings -Park.objects.select_related( - 'park_location', - 'operator', - 'property_owner' -).prefetch_related('rides') - -# Use database functions for formatting -from django.db.models import Value, F -from django.db.models.functions import Concat - -ParkLocation.objects.annotate( - display_address=Concat( - F('city'), Value(', '), - F('state') - ) -) -``` - -### 4.3 Caching Strategy - -```python -# Cache frequently accessed location data -CACHE_KEYS = { - 'park_location': 'park_location_{park_id}', - 'nearby_parks': 'nearby_parks_{park_id}_{radius}', - 'state_parks': 'state_parks_{state}', -} - -# Cache timeout in seconds -CACHE_TIMEOUTS = { - 'park_location': 3600, # 1 hour - 'nearby_parks': 1800, # 30 minutes - 'state_parks': 7200, # 2 hours -} -``` - -## 5. Integration Points - -### 5.1 Model Integration - -```python -# Park model integration -class Park(models.Model): - # Remove GenericRelation to Location - # location = GenericRelation(Location) # REMOVE THIS - - @property - def location(self): - """Backward compatibility property""" - return self.park_location if hasattr(self, 'park_location') else None - - @property - def coordinates(self): - """Quick access to coordinates""" - if hasattr(self, 'park_location') and self.park_location: - return (self.park_location.latitude, self.park_location.longitude) - return None -``` - -### 5.2 Form Integration - -```python -# Park forms will need location inline -class ParkLocationForm(forms.ModelForm): - class Meta: - model = ParkLocation - fields = [ - 'street_address', 'city', 'state', 'country', 'postal_code', - 'highway_exit', 'parking_notes', 'best_arrival_time', - 'seasonal_notes', 'point' - ] - widgets = { - 'point': LeafletWidget(), # Map widget for coordinate selection - } - -class ParkForm(forms.ModelForm): - # Include location fields as nested form - location = ParkLocationForm() -``` - -### 5.3 API Serialization - -```python -# Django REST Framework serializers -class ParkLocationSerializer(serializers.ModelSerializer): - latitude = serializers.ReadOnlyField() - longitude = serializers.ReadOnlyField() - formatted_address = serializers.ReadOnlyField() - - class Meta: - model = ParkLocation - fields = [ - 'latitude', 'longitude', 'formatted_address', - 'city', 'state', 'country', 'highway_exit', - 'parking_notes', 'best_arrival_time' - ] - -class ParkSerializer(serializers.ModelSerializer): - location = ParkLocationSerializer(source='park_location', read_only=True) -``` - -### 5.4 Template Integration - -```django -{# Park detail template #} -{% if park.park_location %} -
-

Location

-

{{ park.park_location.formatted_address }}

- - {% if park.park_location.highway_exit %} -

Highway Exit: {{ park.park_location.highway_exit }}

- {% endif %} - - {% if park.park_location.parking_notes %} -

Parking: {{ park.park_location.parking_notes }}

- {% endif %} - -
-
-
-{% endif %} -``` - -## 6. Migration Plan - -### 6.1 Migration Phases - -#### Phase 1: Prepare New Models (No Downtime) -1. Create new models alongside existing ones -2. Add backward compatibility properties -3. Deploy without activating - -#### Phase 2: Data Migration (Minimal Downtime) -1. Create migration script to copy data -2. Run in batches to avoid locks -3. Verify data integrity - -#### Phase 3: Switch References (No Downtime) -1. Update views to use new models -2. Update forms and templates -3. Deploy with feature flags - -#### Phase 4: Cleanup (No Downtime) -1. Remove GenericRelation from Park -2. Archive old Location model -3. Remove backward compatibility code - -### 6.2 Migration Script - -```python -from django.db import migrations -from django.contrib.contenttypes.models import ContentType - -def migrate_park_locations(apps, schema_editor): - Location = apps.get_model('location', 'Location') - Park = apps.get_model('parks', 'Park') - ParkLocation = apps.get_model('parks', 'ParkLocation') - - park_ct = ContentType.objects.get_for_model(Park) - - for location in Location.objects.filter(content_type=park_ct): - try: - park = Park.objects.get(id=location.object_id) - - # Create or update ParkLocation - park_location, created = ParkLocation.objects.update_or_create( - park=park, - defaults={ - 'point': location.point, - 'street_address': location.street_address or '', - 'city': location.city or '', - 'state': location.state or '', - 'country': location.country or 'USA', - 'postal_code': location.postal_code or '', - # Map any additional fields - } - ) - - print(f"Migrated location for park: {park.name}") - - except Park.DoesNotExist: - print(f"Park not found for location: {location.id}") - continue - -def reverse_migration(apps, schema_editor): - # Reverse migration if needed - pass - -class Migration(migrations.Migration): - dependencies = [ - ('parks', 'XXXX_create_park_location'), - ('location', 'XXXX_previous'), - ] - - operations = [ - migrations.RunPython(migrate_park_locations, reverse_migration), - ] -``` - -### 6.3 Data Validation - -```python -# Validation script to ensure migration success -def validate_migration(): - from location.models import Location - from parks.models import Park, ParkLocation - from django.contrib.contenttypes.models import ContentType - - park_ct = ContentType.objects.get_for_model(Park) - old_count = Location.objects.filter(content_type=park_ct).count() - new_count = ParkLocation.objects.count() - - assert old_count == new_count, f"Count mismatch: {old_count} vs {new_count}" - - # Verify data integrity - for park_location in ParkLocation.objects.all(): - assert park_location.point is not None, f"Missing point for {park_location.park}" - assert park_location.city, f"Missing city for {park_location.park}" - - print("Migration validation successful!") -``` - -### 6.4 Rollback Strategy - -1. **Feature Flags**: Use flags to switch between old and new systems -2. **Database Backups**: Take snapshots before migration -3. **Parallel Running**: Keep both systems running initially -4. **Gradual Rollout**: Migrate parks in batches -5. **Monitoring**: Track errors and performance - -## 7. Testing Strategy - -### 7.1 Unit Tests - -```python -# Test ParkLocation model -class ParkLocationTestCase(TestCase): - def test_formatted_address(self): - location = ParkLocation( - city="Orlando", - state="Florida", - country="USA" - ) - self.assertEqual(location.formatted_address, "Orlando, Florida") - - def test_distance_calculation(self): - location1 = ParkLocation(point=Point(-81.5639, 28.3852)) - location2 = ParkLocation(point=Point(-81.4678, 28.4736)) - distance = location1.distance_to(location2) - self.assertAlmostEqual(distance, 8.5, delta=0.5) -``` - -### 7.2 Integration Tests - -```python -# Test location creation with park -class ParkLocationIntegrationTest(TestCase): - def test_create_park_with_location(self): - park = Park.objects.create(name="Test Park", ...) - location = ParkLocation.objects.create( - park=park, - point=Point(-81.5639, 28.3852), - city="Orlando", - state="Florida" - ) - self.assertEqual(park.park_location, location) - self.assertEqual(park.coordinates, (28.3852, -81.5639)) -``` - -## 8. Documentation Requirements - -### 8.1 Developer Documentation -- Model field descriptions -- Query examples -- Migration guide -- API endpoint changes - -### 8.2 Admin Documentation -- Location data entry guide -- Geocoding workflow -- Verification process - -### 8.3 User Documentation -- How locations are displayed -- Road trip planning features -- Map interactions - -## Conclusion - -This design provides a comprehensive transition from generic to domain-specific location models while: -- Maintaining all existing functionality -- Improving query performance -- Enabling better road trip planning features -- Keeping clean domain boundaries -- Supporting zero-downtime migration - -The design prioritizes parks as the primary location entities while keeping ride locations optional and company headquarters simple. All PostGIS spatial features are retained and optimized for the specific needs of each domain model. \ No newline at end of file diff --git a/memory-bank/features/location-system-analysis.md b/memory-bank/features/location-system-analysis.md deleted file mode 100644 index 4f380c67..00000000 --- a/memory-bank/features/location-system-analysis.md +++ /dev/null @@ -1,214 +0,0 @@ -# Location System Analysis - ThrillWiki - -## Executive Summary -ThrillWiki currently uses a **generic Location model with GenericForeignKey** to associate location data with any model. This analysis reveals that the system has **evolved into a hybrid approach** with both generic and domain-specific location models existing simultaneously. The primary users are Parks and Companies, though only Parks appear to have active location usage. The system heavily utilizes **PostGIS/GeoDjango spatial features** for geographic operations. - -## Current System Overview - -### 1. Location Models Architecture - -#### Generic Location Model (`location/models.py`) -- **Core Design**: Uses Django's GenericForeignKey pattern to associate with any model -- **Tracked History**: Uses pghistory for change tracking -- **Dual Coordinate Storage**: - - Legacy fields: `latitude`, `longitude` (DecimalField) - - Modern field: `point` (PointField with SRID 4326) - - Auto-synchronization between both formats in `save()` method - -**Key Fields:** -```python -- content_type (ForeignKey to ContentType) -- object_id (PositiveIntegerField) -- content_object (GenericForeignKey) -- name (CharField) -- location_type (CharField) -- point (PointField) - PostGIS geometry field -- latitude/longitude (DecimalField) - Legacy support -- street_address, city, state, country, postal_code (address components) -- created_at, updated_at (timestamps) -``` - -#### Domain-Specific Location Models -1. **ParkLocation** (`parks/models/location.py`) - - OneToOne relationship with Park - - Additional park-specific fields: `highway_exit`, `parking_notes`, `best_arrival_time`, `osm_id` - - Uses PostGIS PointField with spatial indexing - -2. **RideLocation** (`rides/models/location.py`) - - OneToOne relationship with Ride - - Simplified location data with `park_area` field - - Uses PostGIS PointField - -3. **CompanyHeadquarters** (`parks/models/companies.py`) - - OneToOne relationship with Company - - Simplified address-only model (no coordinates) - - Only stores: `city`, `state`, `country` - -### 2. PostGIS/GeoDjango Features in Use - -**Database Configuration:** -- Engine: `django.contrib.gis.db.backends.postgis` -- SRID: 4326 (WGS84 coordinate system) -- GeoDjango app enabled: `django.contrib.gis` - -**Spatial Features Utilized:** -1. **PointField**: Stores geographic coordinates as PostGIS geometry -2. **Spatial Indexing**: Database indexes on city, country, and implicit spatial index on PointField -3. **Distance Calculations**: - - `distance_to()` method for calculating distance between locations - - `nearby_locations()` using PostGIS distance queries -4. **Spatial Queries**: `point__distance_lte` for proximity searches - -**GDAL/GEOS Configuration:** -- GDAL library path configured for macOS -- GEOS library path configured for macOS - -### 3. Usage Analysis - -#### Models Using Locations -Based on codebase search, the following models interact with Location: - -1. **Park** (`parks/models/parks.py`) - - Uses GenericRelation to Location model - - Also has ParkLocation model (hybrid approach) - - Most active user of location functionality - -2. **Company** (potential user) - - Has CompanyHeadquarters model for simple address storage - - No evidence of using the generic Location model - -3. **Operator/PropertyOwner** (via Company model) - - Inherits from Company - - Could potentially use locations - -#### Actual Usage Counts -Need to query database to get exact counts, but based on code analysis: -- **Parks**: Primary user with location widgets, maps, and search functionality -- **Companies**: Limited to headquarters information -- **Rides**: Have their own RideLocation model - -### 4. Dependencies and Integration Points - -#### Views and Controllers -1. **Location Views** (`location/views.py`) - - `LocationSearchView`: OpenStreetMap Nominatim integration - - Location update/delete endpoints - - Caching of search results - -2. **Park Views** (`parks/views.py`) - - Location creation during park creation/editing - - Integration with location widgets - -3. **Moderation Views** (`moderation/views.py`) - - Location editing in moderation workflow - - Location map widgets for submissions - -#### Templates and Frontend -1. **Location Widgets**: - - `templates/location/widget.html` - Generic location widget - - `templates/parks/partials/location_widget.html` - Park-specific widget - - `templates/moderation/partials/location_widget.html` - Moderation widget - - `templates/moderation/partials/location_map.html` - Map display - -2. **JavaScript Integration**: - - `static/js/location-autocomplete.js` - Search functionality - - Leaflet.js integration for map display - - OpenStreetMap integration for location search - -3. **Map Features**: - - Interactive maps on park detail pages - - Location selection with coordinate validation - - Address autocomplete from OpenStreetMap - -#### Forms -- `LocationForm` for CRUD operations -- `LocationSearchForm` for search functionality -- Integration with park creation/edit forms - -#### Management Commands -- `seed_initial_data.py` - Creates locations for seeded parks -- `create_initial_data.py` - Creates test location data - -### 5. Migration Risks and Considerations - -#### Data Preservation Requirements -1. **Coordinate Data**: Both point and lat/lng fields must be preserved -2. **Address Components**: All address fields need migration -3. **Historical Data**: pghistory tracking must be maintained -4. **Relationships**: GenericForeignKey relationships need conversion - -#### Backward Compatibility Concerns -1. **Template Dependencies**: Multiple templates expect location relationships -2. **JavaScript Code**: Frontend code expects specific field names -3. **API Compatibility**: Any API endpoints serving location data -4. **Search Integration**: OpenStreetMap search functionality -5. **Map Display**: Leaflet.js map integration - -#### Performance Implications -1. **Spatial Indexes**: Must maintain spatial indexing for performance -2. **Query Optimization**: Generic queries vs. direct foreign keys -3. **Join Complexity**: GenericForeignKey adds complexity to queries -4. **Cache Invalidation**: Location search caching strategy - -### 6. Recommendations - -#### Migration Strategy -**Recommended Approach: Hybrid Consolidation** - -Given the existing hybrid system with both generic and domain-specific models, the best approach is: - -1. **Complete the transition to domain-specific models**: - - Parks → Use existing ParkLocation (already in place) - - Rides → Use existing RideLocation (already in place) - - Companies → Extend CompanyHeadquarters with coordinates - -2. **Phase out the generic Location model**: - - Migrate existing Location records to domain-specific models - - Update all references from GenericRelation to OneToOne/ForeignKey - - Maintain history tracking with pghistory on new models - -#### PostGIS Features to Retain -1. **Essential Features**: - - PointField for coordinate storage - - Spatial indexing for performance - - Distance calculations for proximity features - - SRID 4326 for consistency - -2. **Features to Consider Dropping**: - - Legacy latitude/longitude decimal fields (use point.x/point.y) - - Generic nearby_locations (implement per-model as needed) - -#### Implementation Priority -1. **High Priority**: - - Data migration script for existing locations - - Update park forms and views - - Maintain map functionality - -2. **Medium Priority**: - - Update moderation workflow - - Consolidate JavaScript location code - - Optimize spatial queries - -3. **Low Priority**: - - Remove legacy coordinate fields - - Clean up unused location types - - Optimize caching strategy - -## Technical Debt Identified - -1. **Duplicate Models**: Both generic and specific location models exist -2. **Inconsistent Patterns**: Some models use OneToOne, others use GenericRelation -3. **Legacy Fields**: Maintaining both point and lat/lng fields -4. **Incomplete Migration**: Hybrid state indicates incomplete refactoring - -## Conclusion - -The location system is in a **transitional state** between generic and domain-specific approaches. The presence of both patterns suggests an incomplete migration that should be completed. The recommendation is to **fully commit to domain-specific location models** while maintaining all PostGIS spatial functionality. This will: - -- Improve query performance (no GenericForeignKey overhead) -- Simplify the codebase (one pattern instead of two) -- Maintain all spatial features (PostGIS/GeoDjango) -- Enable model-specific location features -- Support road trip planning with OpenStreetMap integration - -The migration should be done carefully to preserve all existing data and maintain backward compatibility with templates and JavaScript code. \ No newline at end of file diff --git a/memory-bank/features/map-service-design.md b/memory-bank/features/map-service-design.md deleted file mode 100644 index 3985591a..00000000 --- a/memory-bank/features/map-service-design.md +++ /dev/null @@ -1,1735 +0,0 @@ -# Unified Map Service Design - ThrillWiki - -## Executive Summary - -This document outlines the design for ThrillWiki's unified map service that efficiently queries all location types (parks, rides, companies) while maintaining performance with thousands of data points. The service is designed to work with the existing hybrid location system, supporting both generic Location models and domain-specific models (ParkLocation, RideLocation, CompanyHeadquarters). - -## 1. Service Architecture - -### 1.1 Core Components - -```mermaid -graph TB - API[Map API Controller] --> UMS[UnifiedMapService] - UMS --> LAL[LocationAbstractionLayer] - UMS --> CS[ClusteringService] - UMS --> CacheS[CacheService] - - LAL --> ParkLoc[ParkLocationAdapter] - LAL --> RideLoc[RideLocationAdapter] - LAL --> CompLoc[CompanyLocationAdapter] - LAL --> GenLoc[GenericLocationAdapter] - - ParkLoc --> ParkModel[Park + ParkLocation] - RideLoc --> RideModel[Ride + RideLocation] - CompLoc --> CompModel[Company + CompanyHeadquarters] - GenLoc --> LocModel[Generic Location] - - CS --> Clustering[Supercluster.js Integration] - CacheS --> Redis[Redis Cache] - CacheS --> DB[Database Cache] -``` - -### 1.2 Class Structure - -#### UnifiedMapService (Core Service) -```python -class UnifiedMapService: - """ - Main service orchestrating map data retrieval, filtering, and formatting - """ - - def __init__(self): - self.location_layer = LocationAbstractionLayer() - self.clustering_service = ClusteringService() - self.cache_service = MapCacheService() - - def get_map_data( - self, - bounds: GeoBounds = None, - filters: MapFilters = None, - zoom_level: int = 10, - cluster: bool = True - ) -> MapResponse: - """Primary method for retrieving unified map data""" - pass - - def get_location_details(self, location_type: str, location_id: int) -> LocationDetail: - """Get detailed information for a specific location""" - pass - - def search_locations(self, query: str, bounds: GeoBounds = None) -> SearchResponse: - """Search locations with text query""" - pass -``` - -#### LocationAbstractionLayer (Adapter Pattern) -```python -class LocationAbstractionLayer: - """ - Abstraction layer handling different location model types - """ - - def __init__(self): - self.adapters = { - 'park': ParkLocationAdapter(), - 'ride': RideLocationAdapter(), - 'company': CompanyLocationAdapter(), - 'generic': GenericLocationAdapter() - } - - def get_all_locations(self, bounds: GeoBounds = None, filters: MapFilters = None) -> List[UnifiedLocation]: - """Get locations from all sources within bounds""" - pass - - def get_locations_by_type(self, location_type: str, bounds: GeoBounds = None) -> List[UnifiedLocation]: - """Get locations of specific type""" - pass -``` - -### 1.3 Data Models - -#### UnifiedLocation (Interface) -```python -@dataclass -class UnifiedLocation: - """Unified location interface for all location types""" - id: str # Composite: f"{type}_{id}" - type: LocationType # PARK, RIDE, COMPANY - name: str - coordinates: Tuple[float, float] # (lat, lng) - address: Optional[str] - metadata: Dict[str, Any] - - # Type-specific data - type_data: Dict[str, Any] - - # Clustering data - cluster_weight: int = 1 - cluster_category: str = "default" - -class LocationType(Enum): - PARK = "park" - RIDE = "ride" - COMPANY = "company" - GENERIC = "generic" -``` - -#### GeoBounds -```python -@dataclass -class GeoBounds: - """Geographic boundary box for spatial queries""" - north: float - south: float - east: float - west: float - - def to_polygon(self) -> Polygon: - """Convert bounds to PostGIS Polygon for database queries""" - pass - - def expand(self, factor: float = 1.1) -> 'GeoBounds': - """Expand bounds by factor for buffer queries""" - pass -``` - -#### MapFilters -```python -@dataclass -class MapFilters: - """Filtering options for map queries""" - location_types: Set[LocationType] = None - park_status: Set[str] = None # OPERATING, CLOSED_TEMP, etc. - ride_types: Set[str] = None - company_roles: Set[str] = None # OPERATOR, MANUFACTURER, etc. - search_query: str = None - min_rating: float = None - has_coordinates: bool = True -``` - -## 2. Query Optimization Strategy - -### 2.1 Multi-Model Query Pattern - -#### Hybrid Query Strategy -```python -class LocationQueryOptimizer: - """Optimizes queries across hybrid location system""" - - def get_optimized_queryset(self, bounds: GeoBounds, filters: MapFilters) -> Dict[str, QuerySet]: - """ - Returns optimized querysets for each location type - Chooses between domain-specific and generic models based on availability - """ - queries = {} - - # Parks: Prefer ParkLocation, fallback to generic Location - if LocationType.PARK in filters.location_types: - if self._has_park_locations(): - queries['parks'] = self._get_park_locations_query(bounds, filters) - else: - queries['parks'] = self._get_generic_park_query(bounds, filters) - - # Rides: RideLocation or skip if no coordinates - if LocationType.RIDE in filters.location_types: - queries['rides'] = self._get_ride_locations_query(bounds, filters) - - # Companies: CompanyHeadquarters with geocoding fallback - if LocationType.COMPANY in filters.location_types: - queries['companies'] = self._get_company_locations_query(bounds, filters) - - return queries - - def _get_park_locations_query(self, bounds: GeoBounds, filters: MapFilters) -> QuerySet: - """Optimized query for ParkLocation model""" - queryset = ParkLocation.objects.select_related('park', 'park__operator') - - # Spatial filtering - if bounds: - queryset = queryset.filter(point__within=bounds.to_polygon()) - - # Park-specific filters - if filters.park_status: - queryset = queryset.filter(park__status__in=filters.park_status) - - return queryset.order_by('park__name') - - def _get_ride_locations_query(self, bounds: GeoBounds, filters: MapFilters) -> QuerySet: - """Query for rides with locations""" - queryset = RideLocation.objects.select_related( - 'ride', 'ride__park', 'ride__park__operator' - ).filter(point__isnull=False) # Only rides with coordinates - - if bounds: - queryset = queryset.filter(point__within=bounds.to_polygon()) - - return queryset.order_by('ride__name') - - def _get_company_locations_query(self, bounds: GeoBounds, filters: MapFilters) -> QuerySet: - """Query for companies with headquarters""" - queryset = CompanyHeadquarters.objects.select_related('company') - - # Company location filtering requires geocoding or city-level bounds - if bounds and filters.company_roles: - queryset = queryset.filter(company__roles__overlap=filters.company_roles) - - return queryset.order_by('company__name') -``` - -### 2.2 Database Indexes and Performance - -#### Required Indexes -```python -# ParkLocation indexes -class ParkLocation(models.Model): - class Meta: - indexes = [ - GistIndex(fields=['point']), # Spatial index - models.Index(fields=['city', 'state']), - models.Index(fields=['country']), - ] - -# RideLocation indexes -class RideLocation(models.Model): - class Meta: - indexes = [ - GistIndex(fields=['point'], condition=Q(point__isnull=False)), - models.Index(fields=['park_area']), - ] - -# Generic Location indexes (existing) -class Location(models.Model): - class Meta: - indexes = [ - GistIndex(fields=['point']), - models.Index(fields=['content_type', 'object_id']), - models.Index(fields=['city', 'country']), - ] -``` - -#### Query Performance Targets -- **Spatial bounds query**: < 100ms for 1000+ locations -- **Clustering aggregation**: < 200ms for 10,000+ points -- **Detail retrieval**: < 50ms per location -- **Search queries**: < 300ms with text search - -### 2.3 Pagination and Limiting - -```python -class PaginationStrategy: - """Handles large dataset pagination""" - - MAX_UNCLUSTERED_POINTS = 500 - MAX_CLUSTERED_POINTS = 2000 - - def should_cluster(self, zoom_level: int, point_count: int) -> bool: - """Determine if clustering should be applied""" - if zoom_level < 8: # Country/state level - return True - if zoom_level < 12 and point_count > self.MAX_UNCLUSTERED_POINTS: - return True - return point_count > self.MAX_CLUSTERED_POINTS - - def apply_smart_limiting(self, queryset: QuerySet, bounds: GeoBounds, zoom_level: int) -> QuerySet: - """Apply intelligent limiting based on zoom level and density""" - if zoom_level < 6: # Very zoomed out - # Show only major parks - return queryset.filter(park__ride_count__gte=10)[:200] - elif zoom_level < 10: # Regional level - return queryset[:1000] - else: # City level and closer - return queryset[:2000] -``` - -## 3. Response Format Design - -### 3.1 Unified JSON Response - -#### MapResponse Structure -```json -{ - "status": "success", - "data": { - "locations": [ - { - "id": "park_123", - "type": "park", - "name": "Cedar Point", - "coordinates": [41.4778, -82.6830], - "address": "Sandusky, OH, USA", - "metadata": { - "status": "OPERATING", - "rating": 4.5, - "ride_count": 70, - "coaster_count": 17 - }, - "type_data": { - "operator": "Cedar Fair", - "opening_date": "1870-01-01", - "website": "https://cedarpoint.com" - }, - "cluster_weight": 3, - "cluster_category": "major_park" - } - ], - "clusters": [ - { - "id": "cluster_1", - "coordinates": [41.5, -82.7], - "count": 5, - "types": ["park", "ride"], - "bounds": { - "north": 41.52, - "south": 41.48, - "east": -82.65, - "west": -82.75 - } - } - ], - "bounds": { - "north": 42.0, - "south": 41.0, - "east": -82.0, - "west": -83.0 - }, - "total_count": 1247, - "filtered_count": 156, - "zoom_level": 10, - "clustered": true - }, - "meta": { - "cache_hit": true, - "query_time_ms": 89, - "filters_applied": ["location_types", "bounds"], - "pagination": { - "has_more": false, - "total_pages": 1 - } - } -} -``` - -### 3.2 Location Type Adapters - -#### ParkLocationAdapter -```python -class ParkLocationAdapter: - """Converts Park/ParkLocation to UnifiedLocation""" - - def to_unified_location(self, park_location: ParkLocation) -> UnifiedLocation: - park = park_location.park - - return UnifiedLocation( - id=f"park_{park.id}", - type=LocationType.PARK, - name=park.name, - coordinates=(park_location.lat, park_location.lng), - address=self._format_address(park_location), - metadata={ - 'status': park.status, - 'rating': float(park.average_rating) if park.average_rating else None, - 'ride_count': park.ride_count, - 'coaster_count': park.coaster_count, - 'operator': park.operator.name if park.operator else None, - }, - type_data={ - 'slug': park.slug, - 'opening_date': park.opening_date.isoformat() if park.opening_date else None, - 'website': park.website, - 'operating_season': park.operating_season, - 'highway_exit': park_location.highway_exit, - 'parking_notes': park_location.parking_notes, - }, - cluster_weight=self._calculate_park_weight(park), - cluster_category=self._get_park_category(park) - ) - - def _calculate_park_weight(self, park: Park) -> int: - """Calculate clustering weight based on park importance""" - weight = 1 - if park.ride_count and park.ride_count > 20: - weight += 2 - if park.coaster_count and park.coaster_count > 5: - weight += 1 - if park.average_rating and park.average_rating > 4.0: - weight += 1 - return min(weight, 5) # Cap at 5 - - def _get_park_category(self, park: Park) -> str: - """Determine park category for clustering""" - if park.coaster_count and park.coaster_count >= 10: - return "major_park" - elif park.ride_count and park.ride_count >= 15: - return "theme_park" - else: - return "small_park" -``` - -## 4. Clustering Strategy - -### 4.1 Multi-Level Clustering - -#### Clustering Configuration -```python -class ClusteringService: - """Handles location clustering for map display""" - - CLUSTER_CONFIG = { - 'radius': 40, # pixels - 'max_zoom': 15, - 'min_zoom': 3, - 'extent': 512, # tile extent - } - - def cluster_locations( - self, - locations: List[UnifiedLocation], - zoom_level: int - ) -> Tuple[List[UnifiedLocation], List[Cluster]]: - """ - Cluster locations based on zoom level and density - Returns unclustered locations and cluster objects - """ - if zoom_level >= 15 or len(locations) <= 50: - return locations, [] - - # Use Supercluster algorithm (Python implementation) - clusterer = Supercluster( - radius=self.CLUSTER_CONFIG['radius'], - max_zoom=self.CLUSTER_CONFIG['max_zoom'], - min_zoom=self.CLUSTER_CONFIG['min_zoom'] - ) - - # Convert locations to GeoJSON features - features = [self._location_to_feature(loc) for loc in locations] - clusterer.load(features) - - # Get clusters for zoom level - clusters = clusterer.get_clusters(bounds=None, zoom=zoom_level) - - return self._process_clusters(clusters, locations) - - def _location_to_feature(self, location: UnifiedLocation) -> Dict: - """Convert UnifiedLocation to GeoJSON feature""" - return { - 'type': 'Feature', - 'properties': { - 'id': location.id, - 'type': location.type.value, - 'name': location.name, - 'weight': location.cluster_weight, - 'category': location.cluster_category - }, - 'geometry': { - 'type': 'Point', - 'coordinates': [location.coordinates[1], location.coordinates[0]] # lng, lat - } - } -``` - -### 4.2 Smart Clustering Rules - -#### Category-Based Clustering -```python -class SmartClusteringRules: - """Intelligent clustering based on location types and importance""" - - def should_cluster_together(self, loc1: UnifiedLocation, loc2: UnifiedLocation) -> bool: - """Determine if two locations should be clustered together""" - - # Same park rides should cluster together - if loc1.type == LocationType.RIDE and loc2.type == LocationType.RIDE: - park1 = loc1.metadata.get('park_id') - park2 = loc2.metadata.get('park_id') - return park1 == park2 - - # Major parks should resist clustering - if (loc1.cluster_category == "major_park" or loc2.cluster_category == "major_park"): - return False - - # Similar types cluster more readily - return loc1.type == loc2.type - - def get_cluster_priority(self, locations: List[UnifiedLocation]) -> UnifiedLocation: - """Select the representative location for a cluster""" - # Prioritize by: 1) Parks over rides, 2) Higher weight, 3) Better rating - parks = [loc for loc in locations if loc.type == LocationType.PARK] - if parks: - return max(parks, key=lambda x: x.cluster_weight) - - return max(locations, key=lambda x: x.cluster_weight) -``` - -## 5. Filtering and Search Integration - -### 5.1 Search Service Integration - -#### SearchLocationService -```python -class SearchLocationService: - """Integrates map service with existing search functionality""" - - def __init__(self): - self.unified_service = UnifiedMapService() - # Integrate with existing SearchService - from core.views.search import AdaptiveSearchView - self.search_view = AdaptiveSearchView() - - def search_with_location( - self, - query: str, - bounds: GeoBounds = None, - location_types: Set[LocationType] = None - ) -> SearchLocationResponse: - """ - Combined text and location search - """ - # Text search using existing search functionality - text_results = self._perform_text_search(query) - - # Location-based filtering - location_results = self.unified_service.get_map_data( - bounds=bounds, - filters=MapFilters( - location_types=location_types, - search_query=query - ), - cluster=False - ) - - # Merge and rank results - return self._merge_search_results(text_results, location_results) - - def search_near_location( - self, - center_point: Tuple[float, float], - radius_km: float = 50, - location_types: Set[LocationType] = None - ) -> SearchLocationResponse: - """Find locations near a specific point""" - bounds = self._point_to_bounds(center_point, radius_km) - - return self.unified_service.get_map_data( - bounds=bounds, - filters=MapFilters(location_types=location_types), - cluster=False - ) -``` - -### 5.2 Advanced Filtering - -#### FilterProcessor -```python -class FilterProcessor: - """Processes complex filter combinations""" - - def apply_combined_filters( - self, - base_query: QuerySet, - filters: MapFilters, - location_type: LocationType - ) -> QuerySet: - """Apply filters specific to location type""" - - if location_type == LocationType.PARK: - return self._apply_park_filters(base_query, filters) - elif location_type == LocationType.RIDE: - return self._apply_ride_filters(base_query, filters) - elif location_type == LocationType.COMPANY: - return self._apply_company_filters(base_query, filters) - - return base_query - - def _apply_park_filters(self, query: QuerySet, filters: MapFilters) -> QuerySet: - """Apply park-specific filters""" - if filters.park_status: - query = query.filter(park__status__in=filters.park_status) - - if filters.min_rating: - query = query.filter(park__average_rating__gte=filters.min_rating) - - if filters.search_query: - query = query.filter( - Q(park__name__icontains=filters.search_query) | - Q(city__icontains=filters.search_query) | - Q(state__icontains=filters.search_query) - ) - - return query -``` - -## 6. Caching Strategy - -### 6.1 Multi-Level Caching - -#### Cache Architecture -```mermaid -graph TB - Request[Map Request] --> L1[Level 1: Redis Cache] - L1 --> L2[Level 2: Database Query Cache] - L2 --> L3[Level 3: Computed Results Cache] - L3 --> DB[Database] - - L1 --> GeoHash[Geographic Hash Keys] - L2 --> QueryCache[Query Result Cache] - L3 --> ClusterCache[Cluster Computation Cache] -``` - -#### MapCacheService -```python -class MapCacheService: - """Multi-level caching for map data""" - - def __init__(self): - self.redis_client = redis.Redis() - self.cache_timeout = { - 'bounds_data': 300, # 5 minutes - 'location_details': 1800, # 30 minutes - 'clusters': 600, # 10 minutes - 'search_results': 180, # 3 minutes - } - - def get_bounds_data( - self, - bounds: GeoBounds, - filters: MapFilters, - zoom_level: int - ) -> Optional[MapResponse]: - """Get cached map data for geographic bounds""" - cache_key = self._generate_bounds_key(bounds, filters, zoom_level) - - # Try Redis first - cached_data = self.redis_client.get(cache_key) - if cached_data: - return MapResponse.from_json(cached_data) - - return None - - def cache_bounds_data( - self, - bounds: GeoBounds, - filters: MapFilters, - zoom_level: int, - data: MapResponse - ): - """Cache map data with geographic key""" - cache_key = self._generate_bounds_key(bounds, filters, zoom_level) - - self.redis_client.setex( - cache_key, - self.cache_timeout['bounds_data'], - data.to_json() - ) - - def _generate_bounds_key( - self, - bounds: GeoBounds, - filters: MapFilters, - zoom_level: int - ) -> str: - """Generate cache key based on geographic bounds and filters""" - # Use geohash for geographic component - bounds_hash = self._bounds_to_geohash(bounds, precision=zoom_level) - filters_hash = self._filters_to_hash(filters) - - return f"map:bounds:{bounds_hash}:filters:{filters_hash}:zoom:{zoom_level}" - - def _bounds_to_geohash(self, bounds: GeoBounds, precision: int) -> str: - """Convert bounds to geohash for geographic caching""" - import geohash - center_lat = (bounds.north + bounds.south) / 2 - center_lng = (bounds.east + bounds.west) / 2 - - # Adjust precision based on zoom level - precision = min(max(precision // 2, 4), 8) - - return geohash.encode(center_lat, center_lng, precision) -``` - -### 6.2 Cache Invalidation Strategy - -#### InvalidationStrategy -```python -class CacheInvalidationStrategy: - """Handles intelligent cache invalidation""" - - def __init__(self, cache_service: MapCacheService): - self.cache_service = cache_service - - def invalidate_location_update(self, location_type: LocationType, location_id: int): - """Invalidate caches when location data changes""" - # Get affected geographic areas - affected_areas = self._get_affected_geohash_areas(location_type, location_id) - - # Invalidate all cache keys in those areas - for area in affected_areas: - pattern = f"map:bounds:{area}*" - self._invalidate_pattern(pattern) - - def invalidate_bulk_update(self, location_type: LocationType, count: int): - """Invalidate broader caches for bulk updates""" - if count > 10: # Major update - pattern = f"map:*" - self._invalidate_pattern(pattern) - else: - # Invalidate just this location type - pattern = f"map:*:filters:*{location_type.value}*" - self._invalidate_pattern(pattern) -``` - -## 7. API Design - -### 7.1 REST Endpoints - -#### Core Map API Endpoints -```python -# urls.py -urlpatterns = [ - path('api/map/locations/', MapLocationListView.as_view(), name='map-locations'), - path('api/map/locations///', - MapLocationDetailView.as_view(), name='map-location-detail'), - path('api/map/search/', MapSearchView.as_view(), name='map-search'), - path('api/map/bounds/', MapBoundsView.as_view(), name='map-bounds'), - path('api/map/clusters/', MapClusterView.as_view(), name='map-clusters'), -] -``` - -#### MapLocationListView -```python -class MapLocationListView(APIView): - """Main endpoint for retrieving map locations""" - - def get(self, request): - """ - GET /api/map/locations/ - - Query Parameters: - - bounds: "north,south,east,west" - - types: "park,ride,company" - - zoom: integer zoom level - - cluster: boolean (default: true) - - status: park status filter - - rating: minimum rating - - q: search query - """ - try: - # Parse parameters - bounds = self._parse_bounds(request.GET.get('bounds')) - location_types = self._parse_location_types(request.GET.get('types', 'park')) - zoom_level = int(request.GET.get('zoom', 10)) - should_cluster = request.GET.get('cluster', 'true').lower() == 'true' - - # Build filters - filters = MapFilters( - location_types=location_types, - park_status=self._parse_list(request.GET.get('status')), - min_rating=self._parse_float(request.GET.get('rating')), - search_query=request.GET.get('q') - ) - - # Get map service - map_service = UnifiedMapService() - - # Retrieve data - response = map_service.get_map_data( - bounds=bounds, - filters=filters, - zoom_level=zoom_level, - cluster=should_cluster - ) - - return Response(response.to_dict()) - - except ValueError as e: - return Response( - {'error': f'Invalid parameters: {str(e)}'}, - status=400 - ) - except Exception as e: - logger.exception("Error in MapLocationListView") - return Response( - {'error': 'Internal server error'}, - status=500 - ) -``` - -### 7.2 HTMX Integration Endpoints - -#### HTMX Map Updates -```python -class HTMXMapView(TemplateView): - """HTMX endpoint for dynamic map updates""" - - template_name = "maps/partials/map_locations.html" - - def get_context_data(self, **kwargs): - context = super().get_context_data(**kwargs) - - # Use same parameter parsing as API - bounds = self._parse_bounds(self.request.GET.get('bounds')) - filters = self._build_filters_from_request(self.request) - zoom_level = int(self.request.GET.get('zoom', 10)) - - # Get map data - map_service = UnifiedMapService() - map_data = map_service.get_map_data( - bounds=bounds, - filters=filters, - zoom_level=zoom_level, - cluster=True - ) - - context.update({ - 'locations': map_data.data.locations, - 'clusters': map_data.data.clusters, - 'map_bounds': map_data.data.bounds, - }) - - return context -``` - -## 8. Frontend Integration - -### 8.1 JavaScript API Interface - -#### MapService JavaScript Class -```javascript -class ThrillWikiMapService { - constructor(apiBase = '/api/map') { - this.apiBase = apiBase; - this.cache = new Map(); - this.activeRequests = new Map(); - } - - /** - * Get locations for map bounds - * @param {Object} bounds - {north, south, east, west} - * @param {Object} options - Filtering and display options - * @returns {Promise} - */ - async getLocations(bounds, options = {}) { - const params = new URLSearchParams({ - bounds: `${bounds.north},${bounds.south},${bounds.east},${bounds.west}`, - zoom: options.zoom || 10, - cluster: options.cluster !== false, - types: (options.types || ['park']).join(',') - }); - - if (options.status) params.append('status', options.status.join(',')); - if (options.rating) params.append('rating', options.rating); - if (options.query) params.append('q', options.query); - - const url = `${this.apiBase}/locations/?${params}`; - - // Debounce rapid requests - if (this.activeRequests.has(url)) { - return this.activeRequests.get(url); - } - - const request = fetch(url) - .then(response => response.json()) - .finally(() => this.activeRequests.delete(url)); - - this.activeRequests.set(url, request); - return request; - } - - /** - * Search locations with text query - * @param {string} query - Search term - * @param {Object} bounds - Optional geographic bounds - * @returns {Promise} - */ - async searchLocations(query, bounds = null) { - const params = new URLSearchParams({ q: query }); - - if (bounds) { - params.append('bounds', `${bounds.north},${bounds.south},${bounds.east},${bounds.west}`); - } - - const response = await fetch(`${this.apiBase}/search/?${params}`); - return response.json(); - } - - /** - * Get detailed information for a specific location - * @param {string} locationType - 'park', 'ride', or 'company' - * @param {number} locationId - Location ID - * @returns {Promise} - */ - async getLocationDetail(locationType, locationId) { - const cacheKey = `detail_${locationType}_${locationId}`; - - if (this.cache.has(cacheKey)) { - return this.cache.get(cacheKey); - } - - const response = await fetch(`${this.apiBase}/locations/${locationType}/${locationId}/`); - const data = await response.json(); - - this.cache.set(cacheKey, data); - return data; - } -} -``` - -### 8.2 Leaflet.js Integration - -#### Enhanced Map Component -```javascript -class ThrillWikiMap { - constructor(containerId, options = {}) { - this.container = containerId; - this.mapService = new ThrillWikiMapService(); - this.options = { - center: [39.8283, -98.5795], // Center of US - zoom: 6, - maxZoom: 18, - clustering: true, - ...options - }; - - this.map = null; - this.markers = new Map(); - this.clusters = null; - this.currentBounds = null; - - this.init(); - } - - init() { - // Initialize Leaflet map - this.map = L.map(this.container, { - center: this.options.center, - zoom: this.options.zoom, - maxZoom: this.options.maxZoom - }); - - // Add tile layer - L.tileLayer('https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', { - attribution: '© OpenStreetMap contributors' - }).addTo(this.map); - - // Set up clustering if enabled - if (this.options.clustering) { - this.clusters = L.markerClusterGroup({ - chunkedLoading: true, - chunkInterval: 200, - chunkDelay: 50 - }); - this.map.addLayer(this.clusters); - } - - // Set up event handlers - this.setupEventHandlers(); - - // Load initial data - this.loadMapData(); - } - - setupEventHandlers() { - // Update data on map move/zoom - this.map.on('moveend zoomend', () => { - this.loadMapData(); - }); - - // Handle marker clicks - this.map.on('click', (e) => { - if (e.originalEvent.target.classList.contains('location-marker')) { - this.handleMarkerClick(e); - } - }); - } - - async loadMapData() { - const bounds = this.map.getBounds(); - const zoom = this.map.getZoom(); - - try { - const response = await this.mapService.getLocations( - { - north: bounds.getNorth(), - south: bounds.getSouth(), - east: bounds.getEast(), - west: bounds.getWest() - }, - { - zoom: zoom, - cluster: this.options.clustering, - types: this.options.locationTypes || ['park'] - } - ); - - this.updateMarkers(response.data); - - } catch (error) { - console.error('Error loading map data:', error); - this.showError('Failed to load map data'); - } - } - - updateMarkers(mapData) { - // Clear existing markers - this.clearMarkers(); - - // Add individual location markers - mapData.locations.forEach(location => { - const marker = this.createLocationMarker(location); - this.addMarker(location.id, marker); - }); - - // Add cluster markers if provided - mapData.clusters.forEach(cluster => { - const marker = this.createClusterMarker(cluster); - this.addMarker(`cluster_${cluster.id}`, marker); - }); - } - - createLocationMarker(location) { - const icon = this.getLocationIcon(location.type, location.cluster_category); - - const marker = L.marker( - [location.coordinates[0], location.coordinates[1]], - { icon: icon } - ); - - // Add popup with location details - marker.bindPopup(this.createLocationPopup(location)); - - // Store location data - marker.locationData = location; - - return marker; - } - - getLocationIcon(locationType, category) { - const iconMap = { - park: { - major_park: '🎢', - theme_park: '🎠', - small_park: '🎪' - }, - ride: '🎡', - company: '🏢' - }; - - const emoji = typeof iconMap[locationType] === 'object' - ? iconMap[locationType][category] || iconMap[locationType].default - : iconMap[locationType]; - - return L.divIcon({ - html: `
${emoji}
`, - className: 'custom-marker', - iconSize: [30, 30], - iconAnchor: [15, 15] - }); - } -} -``` - -### 8.3 HTMX Integration Patterns - -#### Dynamic Filter Updates -```html - -
- - -
- - -
- -
-
- - - - - - - - - - - - - - - -``` - -#### JavaScript Integration Bridge -```javascript -// Bridge between Leaflet and HTMX -class HTMXMapBridge { - constructor(mapInstance) { - this.map = mapInstance; - this.setupHTMXIntegration(); - } - - setupHTMXIntegration() { - // Update hidden form fields when map changes - this.map.map.on('moveend zoomend', () => { - this.updateFormFields(); - this.triggerHTMXUpdate(); - }); - } - - updateFormFields() { - const bounds = this.map.map.getBounds(); - const zoom = this.map.map.getZoom(); - - document.getElementById('map-bounds').value = - `${bounds.getNorth()},${bounds.getSouth()},${bounds.getEast()},${bounds.getWest()}`; - document.getElementById('map-zoom').value = zoom; - } - - triggerHTMXUpdate() { - // Trigger HTMX update - document.body.dispatchEvent(new CustomEvent('map-bounds-changed')); - } -} -``` - -## 9. Error Handling and Fallback Strategies - -### 9.1 Error Handling Architecture - -#### UnifiedErrorHandler -```python -class UnifiedMapErrorHandler: - """Centralized error handling for map service""" - - def handle_query_error(self, error: Exception, context: Dict) -> MapResponse: - """Handle database query errors with fallbacks""" - logger.error(f"Map query error: {error}", extra=context) - - if isinstance(error, DatabaseError): - # Try simplified query without complex filters - return self._fallback_simple_query(context) - elif isinstance(error, TimeoutError): - # Return cached data if available - return self._fallback_cached_data(context) - else: - # Return empty response with error message - return MapResponse.error_response( - message="Unable to load map data", - error_code="QUERY_FAILED" - ) - - def handle_location_adapter_error( - self, - adapter_type: str, - error: Exception, - context: Dict - ) -> List[UnifiedLocation]: - """Handle individual adapter failures""" - logger.warning(f"Adapter {adapter_type} failed: {error}", extra=context) - - # Log failure but continue with other adapters - self._record_adapter_failure(adapter_type, error) - - # Return empty list for this adapter - return [] - - def _fallback_simple_query(self, context: Dict) -> MapResponse: - """Simplified query fallback for complex filter failures""" - try: - # Try query with only bounds, no complex filters - bounds = context.get('bounds') - if bounds: - simple_filters = MapFilters(has_coordinates=True) - return self._execute_simple_bounds_query(bounds, simple_filters) - except Exception as e: - logger.error(f"Fallback query also failed: {e}") - - return MapResponse.empty_response() -``` - -### 9.2 Graceful Degradation - -#### DegradationStrategy -```python -class MapDegradationStrategy: - """Handles graceful degradation of map functionality""" - - def get_degraded_response( - self, - requested_features: Set[str], - available_features: Set[str] - ) -> MapResponse: - """Return response with available features only""" - - response = MapResponse() - - if 'locations' in available_features: - response.data.locations = self._get_basic_locations() - else: - response.warnings.append("Location data unavailable") - - if 'clustering' not in available_features: - response.warnings.append("Clustering disabled due to performance") - response.data.clustered = False - - if 'search' not in available_features: - response.warnings.append("Search functionality temporarily unavailable") - - return response - - def check_system_health(self) -> Dict[str, bool]: - """Check health of map service components""" - health = {} - - try: - # Test database connectivity - with connection.cursor() as cursor: - cursor.execute("SELECT 1") - health['database'] = True - except Exception: - health['database'] = False - - try: - # Test Redis connectivity - self.cache_service.redis_client.ping() - health['cache'] = True - except Exception: - health['cache'] = False - - try: - # Test PostGIS functionality - from django.contrib.gis.geos import Point - Point(0, 0).buffer(1) - health['postgis'] = True - except Exception: - health['postgis'] = False - - return health -``` - -## 10. Performance Monitoring and Optimization - -### 10.1 Performance Metrics - -#### MapPerformanceMonitor -```python -class MapPerformanceMonitor: - """Monitor and track map service performance""" - - def __init__(self): - self.metrics = defaultdict(list) - self.thresholds = { - 'query_time': 500, # ms - 'total_response_time': 1000, # ms - 'cache_hit_rate': 0.8, # 80% - } - - @contextmanager - def track_performance(self, operation: str, context: Dict = None): - """Track performance of map operations""" - start_time = time.time() - start_memory = psutil.Process().memory_info().rss - - try: - yield - finally: - end_time = time.time() - end_memory = psutil.Process().memory_info().rss - - execution_time = (end_time - start_time) * 1000 # Convert to ms - memory_delta = end_memory - start_memory - - self._record_metric(operation, { - 'execution_time_ms': execution_time, - 'memory_delta_bytes': memory_delta, - 'context': context or {} - }) - - # Check for performance issues - self._check_performance_thresholds(operation, execution_time) - - def get_performance_report(self, hours: int = 24) -> Dict: - """Generate performance report""" - cutoff_time = time.time() - (hours * 3600) - - recent_metrics = { - operation: [m for m in metrics if m['timestamp'] > cutoff_time] - for operation, metrics in self.metrics.items() - } - - return { - 'summary': self._calculate_summary_stats(recent_metrics), - 'slow_queries': self._identify_slow_queries(recent_metrics), - 'cache_performance': self._analyze_cache_performance(recent_metrics), - 'recommendations': self._generate_recommendations(recent_metrics) - } -``` - -### 10.2 Query Optimization Monitoring - -#### QueryOptimizationAnalyzer -```python -class QueryOptimizationAnalyzer: - """Analyze and optimize database queries""" - - def analyze_query_performance(self, query_type: str, filters: MapFilters) -> Dict: - """Analyze performance of specific query patterns""" - - with connection.cursor() as cursor: - # Enable query analysis - cursor.execute("EXPLAIN (ANALYZE, BUFFERS) " + self._build_query(query_type, filters)) - explain_output = cursor.fetchall() - - analysis = self._parse_explain_output(explain_output) - - recommendations = [] - if analysis['seq_scans'] > 0: - recommendations.append("Consider adding indexes for sequential scans") - - if analysis['execution_time'] > 200: # ms - recommendations.append("Query execution time exceeds threshold") - - return { - 'analysis': analysis, - 'recommendations': recommendations, - 'query_plan': explain_output - } - - def suggest_index_optimizations(self) -> List[str]: - """Suggest database index optimizations""" - suggestions = [] - - # Analyze frequently used filter combinations - common_filters = self._analyze_filter_patterns() - - for filter_combo in common_filters: - if self._would_benefit_from_index(filter_combo): - suggestions.append(self._generate_index_suggestion(filter_combo)) - - return suggestions -``` - -## 11. Security Considerations - -### 11.1 Input Validation and Sanitization - -#### MapSecurityValidator -```python -class MapSecurityValidator: - """Security validation for map service inputs""" - - MAX_BOUNDS_SIZE = 1000 # Max km in any direction - MAX_LOCATIONS_RETURNED = 5000 - - def validate_bounds(self, bounds: GeoBounds) -> bool: - """Validate geographic bounds for reasonable size""" - if not bounds: - return True - - # Check coordinate validity - if not (-90 <= bounds.south <= bounds.north <= 90): - raise ValidationError("Invalid latitude bounds") - - if not (-180 <= bounds.west <= bounds.east <= 180): - raise ValidationError("Invalid longitude bounds") - - # Check bounds size to prevent abuse - lat_diff = abs(bounds.north - bounds.south) - lng_diff = abs(bounds.east - bounds.west) - - if lat_diff > 45 or lng_diff > 90: # Roughly continental scale - raise ValidationError("Bounds too large") - - return True - - def validate_filters(self, filters: MapFilters) -> bool: - """Validate filter inputs""" - if filters.search_query: - # Sanitize search query - if len(filters.search_query) > 200: - raise ValidationError("Search query too long") - - # Check for potential injection patterns - dangerous_patterns = [' UnifiedLocation: - """Sanitize location data before output""" - import html - - # Escape HTML in text fields - location.name = html.escape(location.name) - if location.address: - location.address = html.escape(location.address) - - # Sanitize metadata - for key, value in location.metadata.items(): - if isinstance(value, str): - location.metadata[key] = html.escape(value) - - return location -``` - -### 11.2 Rate Limiting and Abuse Prevention - -#### MapRateLimiter -```python -class MapRateLimiter: - """Rate limiting for map API endpoints""" - - def __init__(self): - self.redis_client = redis.Redis() - self.limits = { - 'requests_per_minute': 60, - 'requests_per_hour': 1000, - 'data_points_per_request': 5000, - } - - def check_rate_limit(self, user_id: str, request_type: str) -> bool: - """Check if request is within rate limits""" - current_time = int(time.time()) - minute_key = f"rate_limit:{user_id}:{request_type}:{current_time // 60}" - hour_key = f"rate_limit:{user_id}:{request_type}:{current_time // 3600}" - - # Check minute limit - minute_count = self.redis_client.incr(minute_key) - if minute_count == 1: - self.redis_client.expire(minute_key, 60) - - if minute_count > self.limits['requests_per_minute']: - return False - - # Check hour limit - hour_count = self.redis_client.incr(hour_key) - if hour_count == 1: - self.redis_client.expire(hour_key, 3600) - - if hour_count > self.limits['requests_per_hour']: - return False - - return True -``` - -## 12. Testing Strategy - -### 12.1 Unit Tests - -#### MapServiceTests -```python -class UnifiedMapServiceTests(TestCase): - """Unit tests for map service functionality""" - - def setUp(self): - self.map_service = UnifiedMapService() - self.sample_bounds = GeoBounds( - north=41.5, - south=41.4, - east=-82.6, - west=-82.7 - ) - - def test_get_map_data_with_bounds(self): - """Test basic map data retrieval with bounds""" - response = self.map_service.get_map_data( - bounds=self.sample_bounds, - filters=MapFilters(location_types={LocationType.PARK}) - ) - - self.assertIsInstance(response, MapResponse) - self.assertIsNotNone(response.data) - self.assertGreaterEqual(len(response.data.locations), 0) - - def test_location_adapter_integration(self): - """Test individual location adapters""" - adapter = ParkLocationAdapter() - - # Create test park with location - park = Park.objects.create(name="Test Park") - park_location = ParkLocation.objects.create( - park=park, - point=Point(-82.65, 41.45), - city="Test City", - state="OH" - ) - - unified_location = adapter.to_unified_location(park_location) - - self.assertEqual(unified_location.type, LocationType.PARK) - self.assertEqual(unified_location.name, "Test Park") - self.assertIsNotNone(unified_location.coordinates) - - def test_clustering_service(self): - """Test location clustering functionality""" - clustering_service = ClusteringService() - - # Create test locations - locations = [ - UnifiedLocation( - id=f"park_{i}", - type=LocationType.PARK, - name=f"Park {i}", - coordinates=(41.4 + i*0.01, -82.6 + i*0.01), - address="Test Address", - metadata={}, - type_data={} - ) - for i in range(20) - ] - - unclustered, clusters = clustering_service.cluster_locations(locations, zoom_level=8) - - # Should create clusters at zoom level 8 - self.assertGreater(len(clusters), 0) - self.assertLess(len(unclustered), len(locations)) -``` - -### 12.2 Integration Tests - -#### MapAPIIntegrationTests -```python -class MapAPIIntegrationTests(APITestCase): - """Integration tests for map API endpoints""" - - def setUp(self): - self.create_test_data() - - def create_test_data(self): - """Create test parks, rides, and companies with locations""" - # Create test park with location - self.park = Park.objects.create( - name="Cedar Point", - status="OPERATING" - ) - self.park_location = ParkLocation.objects.create( - park=self.park, - point=Point(-82.6830, 41.4778), - city="Sandusky", - state="OH", - country="USA" - ) - - # Create test ride with location - self.ride = Ride.objects.create( - name="Millennium Force", - park=self.park - ) - self.ride_location = RideLocation.objects.create( - ride=self.ride, - point=Point(-82.6835, 41.4780), - park_area="Frontier Trail" - ) - - def test_map_locations_api(self): - """Test main map locations API endpoint""" - url = reverse('map-locations') - params = { - 'bounds': '41.5,41.4,-82.6,-82.7', - 'types': 'park,ride', - 'zoom': 12 - } - - response = self.client.get(url, params) - - self.assertEqual(response.status_code, 200) - data = response.json() - - self.assertIn('data', data) - self.assertIn('locations', data['data']) - self.assertGreater(len(data['data']['locations']), 0) - - # Check location structure - location = data['data']['locations'][0] - self.assertIn('id', location) - self.assertIn('type', location) - self.assertIn('coordinates', location) - self.assertIn('metadata', location) - - def test_map_search_api(self): - """Test map search functionality""" - url = reverse('map-search') - params = {'q': 'Cedar Point'} - - response = self.client.get(url, params) - - self.assertEqual(response.status_code, 200) - data = response.json() - - self.assertIn('results', data) - self.assertGreater(len(data['results']), 0) -``` - -### 12.3 Performance Tests - -#### MapPerformanceTests -```python -class MapPerformanceTests(TestCase): - """Performance tests for map service""" - - def setUp(self): - self.create_large_dataset() - - def create_large_dataset(self): - """Create large test dataset for performance testing""" - parks = [] - for i in range(1000): - park = Park( - name=f"Test Park {i}", - status="OPERATING" - ) - parks.append(park) - - Park.objects.bulk_create(parks) - - # Create corresponding locations - locations = [] - for park in Park.objects.all(): - location = ParkLocation( - park=park, - point=Point( - -180 + random.random() * 360, # Random longitude - -90 + random.random() * 180 # Random latitude - ), - city=f"City {park.id}", - state="ST" - ) - locations.append(location) - - ParkLocation.objects.bulk_create(locations) - - def test_large_bounds_query_performance(self): - """Test performance with large geographic bounds""" - bounds = GeoBounds(north=90, south=-90, east=180, west=-180) - - start_time = time.time() - - map_service = UnifiedMapService() - response = map_service.get_map_data( - bounds=bounds, - filters=MapFilters(location_types={LocationType.PARK}), - cluster=True - ) - - end_time = time.time() - execution_time = (end_time - start_time) * 1000 # Convert to ms - - self.assertLess(execution_time, 1000) # Should complete in under 1 second - self.assertIsNotNone(response.data) - - def test_clustering_performance(self): - """Test clustering performance with many points""" - locations = [] - for i in range(5000): - location = UnifiedLocation( - id=f"test_{i}", - type=LocationType.PARK, - name=f"Location {i}", - coordinates=(random.uniform(-90, 90), random.uniform(-180, 180)), - address="Test", - metadata={}, - type_data={} - ) - locations.append(location) - - clustering_service = ClusteringService() - - start_time = time.time() - unclustered, clusters = clustering_service.cluster_locations(locations, zoom_level=6) - end_time = time.time() - - execution_time = (end_time - start_time) * 1000 - - self.assertLess(execution_time, 500) # Should cluster in under 500ms - self.assertGreater(len(clusters), 0) -``` - -## Conclusion - -This unified map service design provides a comprehensive solution for ThrillWiki's mapping needs while maintaining compatibility with the existing hybrid location system. The design prioritizes: - -1. **Performance**: Multi-level caching, spatial indexing, and intelligent clustering -2. **Scalability**: Handles thousands of locations with sub-second response times -3. **Flexibility**: Works with both generic and domain-specific location models -4. **Maintainability**: Clean separation of concerns and extensible architecture -5. **User Experience**: Smooth map interactions, real-time filtering, and responsive design - -The service can efficiently query all location types (parks, rides, companies) while providing a unified interface for frontend consumption. The clustering strategy ensures performance with large datasets, while the caching system provides fast response times for repeated queries. - -### Key Design Decisions - -1. **Hybrid Compatibility**: Supporting both generic Location and domain-specific models during transition -2. **PostGIS Optimization**: Leveraging spatial indexing and geographic queries for performance -3. **Multi-Level Caching**: Redis, database query cache, and computed results cache -4. **Smart Clustering**: Category-aware clustering with zoom-level optimization -5. **Progressive Enhancement**: Graceful degradation when components fail -6. **Security Focus**: Input validation, rate limiting, and output sanitization - -### Implementation Priority - -1. **Phase 1**: Core UnifiedMapService and LocationAbstractionLayer -2. **Phase 2**: API endpoints and basic frontend integration -3. **Phase 3**: Clustering service and performance optimization -4. **Phase 4**: Advanced features (search integration, caching optimization) -5. **Phase 5**: Monitoring, security hardening, and comprehensive testing - -This design provides a solid foundation for ThrillWiki's map functionality that can grow with the application's needs while maintaining excellent performance and user experience. \ No newline at end of file diff --git a/memory-bank/features/moderation/frontend-improvements.md b/memory-bank/features/moderation/frontend-improvements.md deleted file mode 100644 index 9155a56e..00000000 --- a/memory-bank/features/moderation/frontend-improvements.md +++ /dev/null @@ -1,55 +0,0 @@ -# Frontend Moderation Panel Improvements - -## Implementation Details - -### 1. Performance Optimization -- Added debouncing to search inputs -- Optimized list rendering with virtual scrolling -- Improved loading states with skeleton screens -- Added result caching for common searches - -### 2. Loading States -- Enhanced loading indicators with progress bars -- Added skeleton screens for content loading -- Improved HTMX loading states visual feedback -- Added transition animations for smoother UX - -### 3. Error Handling -- Added error states for failed operations -- Improved error messages with recovery actions -- Added retry functionality for failed requests -- Enhanced validation feedback - -### 4. Mobile Responsiveness -- Optimized layouts for mobile devices -- Added responsive navigation patterns -- Improved touch interactions -- Enhanced filter UI for small screens - -### 5. Accessibility -- Added ARIA labels and roles -- Improved keyboard navigation -- Enhanced focus management -- Added screen reader announcements - -## Key Components Modified - -1. Dashboard Layout -2. Submission Cards -3. Filter Interface -4. Action Buttons -5. Form Components - -## Technical Decisions - -1. Used CSS Grid for responsive layouts -2. Implemented AlpineJS for state management -3. Used HTMX for dynamic updates -4. Added Tailwind utilities for consistent styling - -## Testing Strategy - -1. Browser compatibility testing -2. Mobile device testing -3. Accessibility testing -4. Performance benchmarking \ No newline at end of file diff --git a/memory-bank/features/moderation/implementation.md b/memory-bank/features/moderation/implementation.md deleted file mode 100644 index 9140300b..00000000 --- a/memory-bank/features/moderation/implementation.md +++ /dev/null @@ -1,115 +0,0 @@ -# Moderation Panel Implementation - -## Completed Improvements - -### 1. Loading States & Performance -- Added skeleton loading screens for better UX during content loading -- Implemented debounced search inputs to reduce server load -- Added virtual scrolling for large submission lists -- Enhanced error handling with clear feedback -- Optimized HTMX requests and responses - -### 2. Mobile Responsiveness -- Created collapsible filter interface for mobile -- Improved action button layouts on small screens -- Enhanced touch interactions -- Optimized grid layouts for different screen sizes - -### 3. Accessibility -- Added proper ARIA labels and roles -- Enhanced keyboard navigation -- Added screen reader announcements for state changes -- Improved focus management -- Added reduced motion support - -### 4. State Management -- Implemented Alpine.js store for filter management -- Added URL-based state persistence -- Enhanced filter UX with visual indicators -- Improved form handling and validation - -### 5. Error Handling -- Added comprehensive error states -- Implemented retry functionality -- Enhanced error feedback -- Added toast notifications for actions - -## Technical Implementation - -### Key Files Modified -1. `templates/moderation/dashboard.html` - - Enhanced base template structure - - Added improved loading and error states - - Added accessibility enhancements - -2. `templates/moderation/partials/loading_skeleton.html` - - Created skeleton loading screens - - Added responsive layout structure - - Implemented loading animations - -3. `templates/moderation/partials/dashboard_content.html` - - Enhanced filter interface - - Improved mobile responsiveness - - Added accessibility features - -4. `templates/moderation/partials/filters_store.html` - - Implemented Alpine.js store - - Added filter state management - - Enhanced URL handling - -## Testing Notes - -### Tested Scenarios -- Mobile device compatibility -- Screen reader functionality -- Keyboard navigation -- Loading states and error handling -- Filter functionality -- Form submissions and validation - -### Browser Support -- Chrome 90+ -- Firefox 88+ -- Safari 14+ -- Edge 90+ - -## Next Steps - -### 1. Performance Optimization -- [ ] Implement server-side caching for frequent queries -- [ ] Add client-side caching for filter results -- [ ] Optimize image loading and processing - -### 2. User Experience -- [ ] Add bulk action support -- [ ] Enhance filter combinations -- [ ] Add sorting options -- [ ] Implement saved filters - -### 3. Accessibility -- [ ] Conduct full WCAG audit -- [ ] Add keyboard shortcuts -- [ ] Enhance screen reader support - -### 4. Features -- [ ] Add advanced search capabilities -- [ ] Implement moderation statistics -- [ ] Add user activity tracking -- [ ] Enhance notification system - -## Documentation Updates Needed -- Update user guide with new features -- Add keyboard shortcut documentation -- Update accessibility guidelines -- Add performance benchmarks - -## Known Issues -- Filter reset might not clear all states -- Mobile scroll performance with many items -- Loading skeleton flicker on fast connections - -## Dependencies -- HTMX -- AlpineJS -- TailwindCSS -- Leaflet (for maps) \ No newline at end of file diff --git a/memory-bank/features/moderation/overview.md b/memory-bank/features/moderation/overview.md deleted file mode 100644 index 90413a8d..00000000 --- a/memory-bank/features/moderation/overview.md +++ /dev/null @@ -1,131 +0,0 @@ -# Moderation System Overview - -## Purpose -The moderation system ensures high-quality, accurate content across the ThrillWiki platform by implementing a structured review process for user-generated content. - -## Core Components - -### 1. Content Queue Management -- Submission categorization -- Priority assignment -- Review distribution -- Queue monitoring - -### 2. Review Process -- Multi-step verification -- Content validation rules -- Media review workflow -- Quality metrics - -### 3. Moderator Tools -- Review interface -- Action tracking -- Decision history -- Performance metrics - -## Implementation - -### Models -```python -# Key models in moderation/models.py -- ModeratedContent -- ModeratorAction -- ContentQueue -- QualityMetric -``` - -### Workflows - -1. Content Submission - - Content validation - - Automated checks - - Queue assignment - - Submitter notification - -2. Review Process - - Moderator assignment - - Content evaluation - - Decision making - - Action recording - -3. Quality Control - - Metric tracking - - Performance monitoring - - Accuracy assessment - - Review auditing - -## Integration Points - -### 1. User System -- Submission tracking -- Status notifications -- User reputation -- Appeal process - -### 2. Content Systems -- Parks content -- Ride information -- Review system -- Media handling - -### 3. Analytics -- Quality metrics -- Processing times -- Accuracy rates -- User satisfaction - -## Business Rules - -### Content Standards -1. Accuracy Requirements - - Factual verification - - Source validation - - Update frequency - - Completeness checks - -2. Quality Guidelines - - Writing standards - - Media requirements - - Information depth - - Format compliance - -### Moderation Rules -1. Review Criteria - - Content accuracy - - Quality standards - - Community guidelines - - Legal compliance - -2. Action Framework - - Approval process - - Rejection handling - - Revision requests - - Appeals management - -## Future Enhancements - -### Planned Improvements -1. Short-term - - Enhanced automation - - Improved metrics - - UI refinements - - Performance optimization - -2. Long-term - - AI assistance - - Advanced analytics - - Workflow automation - - Community integration - -### Integration Opportunities -1. Machine Learning - - Content classification - - Quality prediction - - Spam detection - - Priority assignment - -2. Community Features - - Trusted reviewers - - Expert validation - - Community flags - - Reputation system \ No newline at end of file diff --git a/memory-bank/features/park-search-integration.md b/memory-bank/features/park-search-integration.md deleted file mode 100644 index acad8fc1..00000000 --- a/memory-bank/features/park-search-integration.md +++ /dev/null @@ -1,76 +0,0 @@ -# Park Search Integration - -## Overview -Integrated the parks app with the site-wide search system to provide consistent filtering and search capabilities across the platform. - -## Implementation Details - -### 1. Filter Configuration -```python -# parks/filters.py -ParkFilter = create_model_filter( - model=Park, - search_fields=['name', 'description', 'location__city', 'location__state', 'location__country'], - mixins=[LocationFilterMixin, RatingFilterMixin, DateRangeFilterMixin], - additional_filters={ - 'status': { - 'field_class': 'django_filters.ChoiceFilter', - 'field_kwargs': {'choices': Park._meta.get_field('status').choices} - }, - 'opening_date': { - 'field_class': 'django_filters.DateFromToRangeFilter', - }, - 'owner': { - 'field_class': 'django_filters.ModelChoiceFilter', - 'field_kwargs': {'queryset': 'companies.Company.objects.all()'} - }, - 'min_rides': { - 'field_class': 'django_filters.NumberFilter', - 'field_kwargs': {'field_name': 'ride_count', 'lookup_expr': 'gte'} - }, - 'min_coasters': { - 'field_class': 'django_filters.NumberFilter', - 'field_kwargs': {'field_name': 'coaster_count', 'lookup_expr': 'gte'} - }, - 'min_size': { - 'field_class': 'django_filters.NumberFilter', - 'field_kwargs': {'field_name': 'size_acres', 'lookup_expr': 'gte'} - } - } -) -``` - -### 2. View Integration -- Updated `ParkListView` to use `HTMXFilterableMixin` -- Configured proper queryset optimization with `select_related` and `prefetch_related` -- Added pagination support -- Maintained ride count annotations - -### 3. Template Structure -- Created `search/templates/search/partials/park_results.html` for consistent result display -- Includes: - - Park image thumbnails - - Basic park information - - Location details - - Status indicators - - Ride count badges - - Rating display - -### 4. Quick Search Support -- Modified `search_parks` view for dropdown/quick search scenarios -- Uses the same filter system but with simplified output -- Limited to 10 results for performance -- Added location preloading - -## Benefits -1. Consistent filtering across the platform -2. Enhanced search capabilities with location and rating filters -3. Improved performance through proper query optimization -4. Better maintainability using the site-wide search system -5. HTMX-powered dynamic updates - -## Technical Notes -- Uses django-filter backend -- Integrates with location and rating mixins -- Supports both full search and quick search use cases -- Maintains existing functionality while improving code organization \ No newline at end of file diff --git a/memory-bank/features/parks/search.md b/memory-bank/features/parks/search.md deleted file mode 100644 index 173a816f..00000000 --- a/memory-bank/features/parks/search.md +++ /dev/null @@ -1,130 +0,0 @@ -# Park Search Implementation - -## Search Flow - -1. **Quick Search (Suggestions)** - - Endpoint: `suggest_parks/` - - Shows up to 8 suggestions - - Uses HTMX for real-time updates - - 300ms debounce for typing - -2. **Full Search** - - Endpoint: `parks:park_list` - - Shows all matching results - - Supports view modes (grid/list) - - Integrates with filter system - -## Implementation Details - -### Frontend Components -- Search input using built-in HTMX and Alpine.js - ```html -
-
- -
-
- ``` -- No custom JavaScript required -- Uses native frameworks' features for: - - State management (Alpine.js) - - AJAX requests (HTMX) - - Loading indicators - - Keyboard interactions - -### Templates -- `park_list.html`: Main search interface -- `park_suggestions.html`: Partial for search suggestions -- `park_list_item.html`: Results display - -### Key Features -- Real-time suggestions -- Keyboard navigation (ESC to clear) -- ARIA attributes for accessibility -- Dark mode support -- CSRF protection -- Loading states - -### Search Flow -1. User types in search box -2. After 300ms debounce, HTMX sends request -3. Server returns suggestion list -4. User selects item -5. Form submits to main list view with filter -6. Results update while maintaining view mode - -## Recent Updates (2024-02-22) -1. Fixed search page loading issue: - - Removed legacy redirect in suggest_parks - - Updated search form to use HTMX properly - - Added Alpine.js for state management - - Improved suggestions UI - - Maintained view mode during search - -2. Security: - - CSRF protection on all forms - - Input sanitization - - Proper parameter handling - -3. Performance: - - 300ms debounce on typing - - Limit suggestions to 8 items - - Efficient query optimization - -4. Accessibility: - - ARIA labels and roles - - Keyboard navigation - - Proper focus management - - Screen reader support - -## API Response Format - -### Suggestions Endpoint (`/parks/suggest_parks/`) -```json -{ - "results": [ - { - "id": "string", - "name": "string", - "status": "string", - "location": "string", - "url": "string" - } - ] -} -``` - -### Field Details -- `id`: Database ID (string format) -- `name`: Park name -- `status`: Formatted status display (e.g., "Operating") -- `location`: Formatted location string -- `url`: Full detail page URL - -## Test Coverage - -### API Tests -- JSON format validation -- Empty search handling -- Field type checking -- Result limit verification -- Response structure - -### UI Integration Tests -- View mode persistence -- Loading state verification -- Error handling -- Keyboard interaction - -### Data Format Tests -- Location string formatting -- Status display formatting -- URL generation -- Field type validation - -### Performance Tests -- Debounce functionality -- Result limiting (8 items) -- Query optimization -- Response timing \ No newline at end of file diff --git a/memory-bank/features/ride-search-improvements.md b/memory-bank/features/ride-search-improvements.md deleted file mode 100644 index 8234b453..00000000 --- a/memory-bank/features/ride-search-improvements.md +++ /dev/null @@ -1,129 +0,0 @@ -# Ride Search HTMX Improvements - -## Implementation Status: ✅ COMPLETED AND VERIFIED - -### Current Implementation - -#### 1. Smart Search (Implemented) -- Split search terms for flexible matching (e.g. "steel dragon" matches "Steel Dragon 2000") -- Searches across multiple fields: - - Ride name - - Park name - - Description -- Uses Django Q objects for complex queries -- Real-time HTMX-powered updates - -#### 2. Search Suggestions (Implemented) -- Real-time suggestions with 200ms delay -- Three types of suggestions: - - Common matching ride names (with count) - - Matching parks (with location) - - Matching categories (with ride count) -- Styled dropdown with icons and hover states -- Keyboard navigation support - -#### 3. Quick Filters (Implemented) -- Category filters from CATEGORY_CHOICES -- Operating status filter -- All filters use HTMX for instant updates -- Maintains search context when filtering -- Visual active state on selected filter - -#### 4. Active Filter Tags (Implemented) -- Shows currently active filters: - - Search terms - - Selected category - - Operating status -- One-click removal via HTMX -- Updates URL for bookmarking/sharing - -#### 5. Visual Feedback (Implemented) -- Loading spinner during HTMX requests -- Clear visual states for filter buttons -- Real-time feedback on search/filter actions -- Dark mode compatible styling - -### Technical Details - -#### View Implementation -```python -def get_queryset(self): - """Get filtered rides based on search and filters""" - queryset = Ride.objects.all().select_related( - 'park', - 'ride_model', - 'ride_model__manufacturer' - ).prefetch_related('photos') - - # Search term handling - search = self.request.GET.get('q', '').strip() - if search: - # Split search terms for more flexible matching - search_terms = search.split() - search_query = Q() - - for term in search_terms: - term_query = Q( - name__icontains=term - ) | Q( - park__name__icontains=term - ) | Q( - description__icontains=term - ) - search_query &= term_query - - queryset = queryset.filter(search_query) - - # Category filter - category = self.request.GET.get('category') - if category and category != 'all': - queryset = queryset.filter(category=category) - - # Operating status filter - if self.request.GET.get('operating') == 'true': - queryset = queryset.filter(status='operating') - - return queryset -``` - -#### Template Structure -- `ride_list.html`: Main template with search and filters -- `search_suggestions.html`: Dropdown suggestion UI -- `ride_list_results.html`: Results grid (HTMX target) - -#### Key Fixes Applied -1. Template Path Resolution - - CRITICAL FIX: Resolved template inheritance confusion - - Removed duplicate base.html templates - - Moved template to correct location: templates/base/base.html - - All templates now correctly extend "base/base.html" - - Template loading order matches Django's settings - -2. URL Resolution - - Replaced all relative "." URLs with explicit URLs using {% url %} - - Example: `hx-get="{% url 'rides:global_ride_list' %}"` - - Prevents conflicts with global search in base template - -3. HTMX Configuration - - All HTMX triggers properly configured - - Fixed grid layout persistence: - * Removed duplicate grid classes from parent template - * Grid classes now only in partial template - * Prevents layout breaking during HTMX updates - - Proper event delegation for dynamic content - -### Verification Points -1. ✅ Search updates in real-time -2. ✅ Filters work independently and combined -3. ✅ Suggestions appear as you type -4. ✅ Loading states show during requests -5. ✅ Dark mode properly supported -6. ✅ URL state maintained for sharing -7. ✅ No conflicts with global search -8. ✅ All templates resolve correctly - -### Future Considerations -1. Consider caching frequent searches -2. Monitor performance with large datasets -3. Add analytics for most used filters -4. Consider adding saved searches feature \ No newline at end of file diff --git a/memory-bank/features/roadtrip-service-documentation.md b/memory-bank/features/roadtrip-service-documentation.md deleted file mode 100644 index 310e5766..00000000 --- a/memory-bank/features/roadtrip-service-documentation.md +++ /dev/null @@ -1,361 +0,0 @@ -# OSM Road Trip Service Documentation - -## Overview - -The OSM Road Trip Service provides comprehensive road trip planning functionality for theme parks using free OpenStreetMap APIs. It enables users to plan routes between parks, find parks along routes, and optimize multi-park trips. - -## Features Implemented - -### 1. Core Service Architecture - -**Location**: [`parks/services/roadtrip.py`](../../parks/services/roadtrip.py) - -The service is built around the `RoadTripService` class which provides all road trip planning functionality with proper error handling, caching, and rate limiting. - -### 2. Geocoding Service - -Uses **Nominatim** (OpenStreetMap's geocoding service) to convert addresses to coordinates: - -```python -from parks.services import RoadTripService - -service = RoadTripService() -coords = service.geocode_address("Cedar Point, Sandusky, Ohio") -# Returns: Coordinates(latitude=41.4826, longitude=-82.6862) -``` - -**Features**: -- Converts any address string to latitude/longitude coordinates -- Automatic caching of geocoding results (24-hour cache) -- Proper error handling for invalid addresses -- Rate limiting (1 request per second) - -### 3. Route Calculation - -Uses **OSRM** (Open Source Routing Machine) for route calculation with fallback to straight-line distance: - -```python -from parks.services.roadtrip import Coordinates - -start = Coordinates(41.4826, -82.6862) # Cedar Point -end = Coordinates(28.4177, -81.5812) # Magic Kingdom - -route = service.calculate_route(start, end) -# Returns: RouteInfo(distance_km=1745.7, duration_minutes=1244, geometry="encoded_polyline") -``` - -**Features**: -- Real driving routes with distance and time estimates -- Encoded polyline geometry for route visualization -- Fallback to straight-line distance when routing fails -- Route caching (6-hour cache) -- Graceful error handling - -### 4. Park Integration - -Seamlessly integrates with existing [`Park`](../../parks/models/parks.py) and [`ParkLocation`](../../parks/models/location.py) models: - -```python -# Geocode parks that don't have coordinates -park = Park.objects.get(name="Some Park") -success = service.geocode_park_if_needed(park) - -# Get park coordinates -coords = park.coordinates # Returns (lat, lon) tuple or None -``` - -**Features**: -- Automatic geocoding for parks without coordinates -- Uses existing PostGIS PointField infrastructure -- Respects existing location data structure - -### 5. Route Discovery - -Find parks along a specific route within a detour distance: - -```python -start_park = Park.objects.get(name="Cedar Point") -end_park = Park.objects.get(name="Magic Kingdom") - -parks_along_route = service.find_parks_along_route( - start_park, - end_park, - max_detour_km=50 -) -``` - -**Features**: -- Finds parks within specified detour distance -- Calculates actual detour cost (not just proximity) -- Uses PostGIS spatial queries for efficiency - -### 6. Nearby Park Discovery - -Find all parks within a radius of a center park: - -```python -center_park = Park.objects.get(name="Disney World") -nearby_parks = service.get_park_distances(center_park, radius_km=100) - -# Returns list of dicts with park, distance, and duration info -for result in nearby_parks: - print(f"{result['park'].name}: {result['formatted_distance']}") -``` - -**Features**: -- Finds parks within specified radius -- Returns actual driving distances and times -- Sorted by distance -- Formatted output for easy display - -### 7. Multi-Park Trip Planning - -Plan optimized routes for visiting multiple parks: - -```python -parks_to_visit = [park1, park2, park3, park4] -trip = service.create_multi_park_trip(parks_to_visit) - -print(f"Total Distance: {trip.formatted_total_distance}") -print(f"Total Duration: {trip.formatted_total_duration}") - -for leg in trip.legs: - print(f"{leg.from_park.name} → {leg.to_park.name}: {leg.route.formatted_distance}") -``` - -**Features**: -- Optimizes route order using traveling salesman heuristics -- Exhaustive search for small groups (≤6 parks) -- Nearest neighbor heuristic for larger groups -- Returns detailed leg-by-leg information -- Total trip statistics - -## API Configuration - -### Django Settings - -Added to [`thrillwiki/settings.py`](../../thrillwiki/settings.py): - -```python -# Road Trip Service Settings -ROADTRIP_CACHE_TIMEOUT = 3600 * 24 # 24 hours for geocoding -ROADTRIP_ROUTE_CACHE_TIMEOUT = 3600 * 6 # 6 hours for routes -ROADTRIP_MAX_REQUESTS_PER_SECOND = 1 # Respect OSM rate limits -ROADTRIP_USER_AGENT = "ThrillWiki Road Trip Planner (https://thrillwiki.com)" -ROADTRIP_REQUEST_TIMEOUT = 10 # seconds -ROADTRIP_MAX_RETRIES = 3 -ROADTRIP_BACKOFF_FACTOR = 2 -``` - -### External APIs Used - -1. **Nominatim Geocoding**: `https://nominatim.openstreetmap.org/search` - - Free OpenStreetMap geocoding service - - Rate limit: 1 request per second - - Returns JSON with lat/lon coordinates - -2. **OSRM Routing**: `http://router.project-osrm.org/route/v1/driving/` - - Free routing service for driving directions - - Returns distance, duration, and route geometry - - Fallback to straight-line distance if unavailable - -## Data Models - -### Core Data Classes - -```python -@dataclass -class Coordinates: - latitude: float - longitude: float - -@dataclass -class RouteInfo: - distance_km: float - duration_minutes: int - geometry: Optional[str] = None # Encoded polyline - -@dataclass -class RoadTrip: - parks: List[Park] - legs: List[TripLeg] - total_distance_km: float - total_duration_minutes: int -``` - -### Integration Points - -- **Park Model**: Access via `park.coordinates` property -- **ParkLocation Model**: Uses `point` PointField for spatial data -- **Django Cache**: Automatic caching of API results -- **PostGIS**: Spatial queries for nearby park discovery - -## Performance & Caching - -### Caching Strategy - -1. **Geocoding Results**: 24-hour cache - - Cache key: `roadtrip:geocode:{hash(address)}` - - Reduces redundant API calls for same addresses - -2. **Route Calculations**: 6-hour cache - - Cache key: `roadtrip:route:{start_coords}:{end_coords}` - - Balances freshness with API efficiency - -### Rate Limiting - -- **1 request per second** to respect OSM usage policies -- Automatic rate limiting between API calls -- Exponential backoff for failed requests -- User-Agent identification as required by OSM - -## Error Handling - -### Graceful Degradation - -1. **Network Issues**: Retry with exponential backoff -2. **Invalid Coordinates**: Fall back to straight-line distance -3. **Geocoding Failures**: Return None, don't crash -4. **Missing Location Data**: Skip parks without coordinates -5. **API Rate Limits**: Automatic waiting and retry - -### Logging - -Comprehensive logging for debugging and monitoring: -- Successful geocoding/routing operations -- API failures and retry attempts -- Cache hits and misses -- Rate limiting activation - -## Testing - -### Test Suite - -**Location**: [`test_roadtrip_service.py`](../../test_roadtrip_service.py) - -Comprehensive test suite covering: -- Geocoding functionality -- Route calculation -- Park integration -- Multi-park trip planning -- Error handling -- Rate limiting -- Cache functionality - -### Test Results Summary - -- ✅ **Geocoding**: Successfully geocodes theme park addresses -- ✅ **Routing**: Calculates accurate routes with OSRM -- ✅ **Caching**: Properly caches results to minimize API calls -- ✅ **Rate Limiting**: Respects 1 req/sec limit -- ✅ **Trip Planning**: Optimizes multi-park routes -- ✅ **Error Handling**: Gracefully handles failures -- ✅ **Integration**: Works with existing Park/ParkLocation models - -## Usage Examples - -### Basic Geocoding and Routing - -```python -from parks.services import RoadTripService - -service = RoadTripService() - -# Geocode an address -coords = service.geocode_address("Universal Studios, Orlando, FL") - -# Calculate route between two points -from parks.services.roadtrip import Coordinates -start = Coordinates(28.4755, -81.4685) # Universal -end = Coordinates(28.4177, -81.5812) # Magic Kingdom - -route = service.calculate_route(start, end) -print(f"Distance: {route.formatted_distance}") -print(f"Duration: {route.formatted_duration}") -``` - -### Working with Parks - -```python -# Find nearby parks -disney_world = Park.objects.get(name="Magic Kingdom") -nearby = service.get_park_distances(disney_world, radius_km=50) - -for result in nearby[:5]: - park = result['park'] - print(f"{park.name}: {result['formatted_distance']} away") - -# Plan a multi-park trip -florida_parks = [ - Park.objects.get(name="Magic Kingdom"), - Park.objects.get(name="SeaWorld Orlando"), - Park.objects.get(name="Universal Studios Florida"), -] - -trip = service.create_multi_park_trip(florida_parks) -print(f"Optimized trip: {trip.formatted_total_distance}") -``` - -### Find Parks Along Route - -```python -start_park = Park.objects.get(name="Cedar Point") -end_park = Park.objects.get(name="Kings Island") - -# Find parks within 25km of the route -parks_along_route = service.find_parks_along_route( - start_park, - end_park, - max_detour_km=25 -) - -print(f"Found {len(parks_along_route)} parks along the route") -``` - -## OSM Usage Compliance - -### Respectful API Usage - -- **Proper User-Agent**: Identifies application and contact info -- **Rate Limiting**: 1 request per second as recommended -- **Caching**: Minimizes redundant API calls -- **Error Handling**: Doesn't spam APIs when they fail -- **Attribution**: Service credits OpenStreetMap data - -### Terms Compliance - -- Uses free OSM services within their usage policies -- Provides proper attribution for OpenStreetMap data -- Implements reasonable rate limiting -- Graceful fallbacks when services unavailable - -## Future Enhancements - -### Potential Improvements - -1. **Alternative Routing Providers** - - GraphHopper integration as OSRM backup - - Mapbox Directions API for premium users - -2. **Advanced Trip Planning** - - Time-based optimization (opening hours, crowds) - - Multi-day trip planning with hotels - - Seasonal route recommendations - -3. **Performance Optimizations** - - Background geocoding of new parks - - Precomputed distance matrices for popular parks - - Redis caching for high-traffic scenarios - -4. **User Features** - - Save and share trip plans - - Export to GPS devices - - Integration with calendar apps - -## Dependencies - -- **requests**: HTTP client for API calls -- **Django GIS**: PostGIS integration for spatial queries -- **Django Cache**: Built-in caching framework - -All dependencies are managed via UV package manager as per project standards. \ No newline at end of file diff --git a/memory-bank/features/search-location-integration.md b/memory-bank/features/search-location-integration.md deleted file mode 100644 index 82d7f941..00000000 --- a/memory-bank/features/search-location-integration.md +++ /dev/null @@ -1,1428 +0,0 @@ -# Search-Location Integration Plan - ThrillWiki - -## Executive Summary - -This document outlines the comprehensive integration strategy for enhancing ThrillWiki's existing search system with location capabilities. The plan builds upon the current django-filters based search architecture and integrates it with the designed domain-specific location models (ParkLocation, RideLocation, CompanyHeadquarters) and unified map service. - -## Current State Analysis - -### Existing Search Architecture -- **Framework**: Django-filters with [`ParkFilter`](parks/filters.py:26) and [`RideSearchView`](rides/views.py:1) -- **Current Capabilities**: Text search, status filtering, operator filtering, date ranges, numeric filters -- **Performance**: Basic queryset optimization with select_related/prefetch_related -- **UI**: HTMX-driven filtered results with [`AdaptiveSearchView`](core/views/search.py:5) -- **Templates**: Structured template hierarchy in [`templates/core/search/`](templates/core/search/) - -### Location System State -- **Current**: Hybrid system with both generic Location model and domain-specific models -- **Designed**: Complete transition to [`ParkLocation`](memory-bank/features/location-models-design.md:17), [`RideLocation`](memory-bank/features/location-models-design.md:213), [`CompanyHeadquarters`](memory-bank/features/location-models-design.md:317) -- **Spatial Features**: PostGIS with PointField, spatial indexing, distance calculations -- **Map Integration**: [`UnifiedMapService`](memory-bank/features/map-service-design.md:35) designed for clustering and filtering - -## 1. Search Index Enhancement Plan - -### 1.1 Location Field Integration - -#### Current Search Fields Extension -```python -# Enhanced ParkFilter in parks/filters.py -class LocationEnhancedParkFilter(ParkFilter): - search_fields = [ - 'name__icontains', - 'description__icontains', - 'park_location__city__icontains', # Domain-specific model - 'park_location__state__icontains', - 'park_location__country__icontains', - 'park_location__highway_exit__icontains', # Road trip specific - 'park_location__postal_code__icontains' - ] -``` - -#### Spatial Search Fields -```python -# New spatial search capabilities -class SpatialSearchMixin: - near_point = PointFilter( - method='filter_near_point', - help_text="Search within radius of coordinates" - ) - - within_radius = NumberFilter( - method='filter_within_radius', - help_text="Radius in miles (used with near_point)" - ) - - within_bounds = BoundsFilter( - method='filter_within_bounds', - help_text="Search within geographic bounding box" - ) -``` - -#### Index Performance Strategy -- **Spatial Indexes**: Maintain GIST indexes on all PointField columns -- **Composite Indexes**: Add indexes on frequently combined fields (city+state, country+state) -- **Text Search Optimization**: Consider adding GIN indexes for full-text search if performance degrades - -### 1.2 Cross-Domain Location Indexing - -#### Unified Location Search Index -```python -class UnifiedLocationSearchService: - """Service for searching across all location-enabled models""" - - def search_all_locations(self, query: str, location_types: Set[str] = None): - results = [] - - # Parks via ParkLocation - if not location_types or 'park' in location_types: - park_results = self._search_parks(query) - results.extend(park_results) - - # Rides via RideLocation (optional) - if not location_types or 'ride' in location_types: - ride_results = self._search_rides(query) - results.extend(ride_results) - - # Companies via CompanyHeadquarters - if not location_types or 'company' in location_types: - company_results = self._search_companies(query) - results.extend(company_results) - - return self._rank_and_sort_results(results) -``` - -## 2. Spatial Search Architecture - -### 2.1 Geographic Query Patterns - -#### Distance-Based Search -```python -class DistanceSearchMixin: - """Mixin for distance-based filtering""" - - def filter_near_point(self, queryset, name, value): - """Filter by proximity to a point""" - if not value or not hasattr(value, 'coords'): - return queryset - - radius_miles = self.data.get('within_radius', 50) # Default 50 miles - - from django.contrib.gis.measure import D - return queryset.filter( - park_location__point__distance_lte=( - value, D(mi=radius_miles) - ) - ).annotate( - distance=Distance('park_location__point', value) - ).order_by('distance') - - def filter_within_bounds(self, queryset, name, value): - """Filter within geographic bounding box""" - if not value or not hasattr(value, 'extent'): - return queryset - - return queryset.filter( - park_location__point__within=value.extent - ) -``` - -#### Geocoding Integration Pattern -```python -class GeocodingSearchMixin: - """Handle address-to-coordinate conversion in search""" - - def filter_near_address(self, queryset, name, value): - """Search near an address (geocoded to coordinates)""" - if not value: - return queryset - - # Use OpenStreetMap Nominatim for geocoding - coordinates = self._geocode_address(value) - if not coordinates: - # Graceful fallback to text search - return self._fallback_text_search(queryset, value) - - return self.filter_near_point(queryset, name, coordinates) - - def _geocode_address(self, address: str) -> Optional[Point]: - """Convert address to coordinates with caching""" - cache_key = f"geocode:{hash(address)}" - cached = cache.get(cache_key) - if cached: - return cached - - # Implementation using Nominatim API - result = nominatim_geocode(address) - if result: - point = Point(result['lon'], result['lat']) - cache.set(cache_key, point, timeout=86400) # 24 hours - return point - return None -``` - -### 2.2 Database Query Optimization - -#### Optimized Spatial Queries -```python -class SpatialQueryOptimizer: - """Optimize spatial queries for performance""" - - def get_optimized_queryset(self, base_queryset, spatial_filters): - """Apply optimizations based on query type""" - - # Use spatial index hints for PostgreSQL - queryset = base_queryset.extra( - select={'spatial_distance': 'ST_Distance(park_location.point, %s)'}, - select_params=[spatial_filters.get('reference_point')] - ) if spatial_filters.get('reference_point') else base_queryset - - # Limit radius searches to reasonable bounds - max_radius = min(spatial_filters.get('radius', 100), 500) # Cap at 500 miles - - # Pre-filter with bounding box before distance calculation - if spatial_filters.get('reference_point'): - # Create bounding box slightly larger than radius for pre-filtering - bbox = self._create_bounding_box( - spatial_filters['reference_point'], - max_radius * 1.1 - ) - queryset = queryset.filter(park_location__point__within=bbox) - - return queryset -``` - -## 3. "Near Me" Functionality Design - -### 3.1 Geolocation Integration - -#### Frontend Geolocation Handling -```javascript -class LocationSearchService { - constructor() { - this.userLocation = null; - this.locationPermission = 'prompt'; - } - - async requestUserLocation() { - try { - if (!navigator.geolocation) { - throw new Error('Geolocation not supported'); - } - - const position = await new Promise((resolve, reject) => { - navigator.geolocation.getCurrentPosition( - resolve, - reject, - { - enableHighAccuracy: true, - timeout: 10000, - maximumAge: 300000 // 5 minutes - } - ); - }); - - this.userLocation = { - lat: position.coords.latitude, - lng: position.coords.longitude, - accuracy: position.coords.accuracy - }; - - this.locationPermission = 'granted'; - return this.userLocation; - - } catch (error) { - this.locationPermission = 'denied'; - await this._handleLocationError(error); - return null; - } - } - - async _handleLocationError(error) { - switch(error.code) { - case error.PERMISSION_DENIED: - await this._tryIPLocationFallback(); - break; - case error.POSITION_UNAVAILABLE: - this._showLocationUnavailableMessage(); - break; - case error.TIMEOUT: - this._showTimeoutMessage(); - break; - } - } -} -``` - -#### Privacy and Permission Handling -```python -class LocationPrivacyMixin: - """Handle location privacy concerns""" - - def get_context_data(self, **kwargs): - context = super().get_context_data(**kwargs) - - # Add location permission context - context.update({ - 'location_features_available': True, - 'privacy_policy_url': reverse('privacy'), - 'location_consent_required': True, - 'ip_location_available': self._ip_location_available(), - }) - - return context - - def _ip_location_available(self) -> bool: - """Check if IP-based location estimation is available""" - # Could integrate with MaxMind GeoIP or similar service - return hasattr(self.request, 'META') and 'HTTP_CF_IPCOUNTRY' in self.request.META -``` - -### 3.2 Fallback Strategies - -#### IP-Based Location Approximation -```python -class IPLocationService: - """Fallback location service using IP geolocation""" - - def get_approximate_location(self, request) -> Optional[Dict]: - """Get approximate location from IP address""" - try: - # Try Cloudflare country header first - country = request.META.get('HTTP_CF_IPCOUNTRY') - if country and country != 'XX': - return self._country_to_coordinates(country) - - # Fallback to GeoIP database - ip_address = self._get_client_ip(request) - return self._geoip_lookup(ip_address) - - except Exception: - return None - - def _country_to_coordinates(self, country_code: str) -> Dict: - """Convert country code to approximate center coordinates""" - country_centers = { - 'US': {'lat': 39.8283, 'lng': -98.5795, 'accuracy': 'country'}, - 'CA': {'lat': 56.1304, 'lng': -106.3468, 'accuracy': 'country'}, - # Add more countries as needed - } - return country_centers.get(country_code.upper()) -``` - -## 4. Location-Based Filtering Integration - -### 4.1 Enhanced Filter Integration - -#### Geographic Region Filters -```python -class GeographicFilterMixin: - """Add geographic filtering to existing filter system""" - - # State/Province filtering - state = ModelChoiceFilter( - field_name='park_location__state', - queryset=None, # Dynamically populated - empty_label='Any state/province', - method='filter_by_state' - ) - - # Country filtering - country = ModelChoiceFilter( - field_name='park_location__country', - queryset=None, # Dynamically populated - empty_label='Any country', - method='filter_by_country' - ) - - # Metropolitan area clustering - metro_area = ChoiceFilter( - method='filter_by_metro_area', - choices=[] # Dynamically populated - ) - - def filter_by_metro_area(self, queryset, name, value): - """Filter by predefined metropolitan areas""" - metro_definitions = { - 'orlando': { - 'center': Point(-81.3792, 28.5383), - 'radius_miles': 30 - }, - 'los_angeles': { - 'center': Point(-118.2437, 34.0522), - 'radius_miles': 50 - }, - # Add more metropolitan areas - } - - metro = metro_definitions.get(value) - if not metro: - return queryset - - from django.contrib.gis.measure import D - return queryset.filter( - park_location__point__distance_lte=( - metro['center'], D(mi=metro['radius_miles']) - ) - ) -``` - -#### Performance-Optimized Filtering -```python -class OptimizedLocationFilter(GeographicFilterMixin, ParkFilter): - """Location filtering with performance optimizations""" - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - # Dynamically populate geographic choices based on available data - self._populate_geographic_choices() - - def _populate_geographic_choices(self): - """Populate geographic filter choices efficiently""" - - # Cache geographic options for performance - cache_key = 'location_filter_choices' - cached_choices = cache.get(cache_key) - - if not cached_choices: - # Query distinct values efficiently - states = ParkLocation.objects.values_list( - 'state', flat=True - ).distinct().order_by('state') - - countries = ParkLocation.objects.values_list( - 'country', flat=True - ).distinct().order_by('country') - - cached_choices = { - 'states': [(s, s) for s in states if s], - 'countries': [(c, c) for c in countries if c] - } - - cache.set(cache_key, cached_choices, timeout=3600) # 1 hour - - # Update filter choices - self.filters['state'].extra['choices'] = cached_choices['states'] - self.filters['country'].extra['choices'] = cached_choices['countries'] -``` - -## 5. Query Integration Patterns - -### 5.1 Hybrid Search Scoring - -#### Relevance + Proximity Scoring -```python -class HybridSearchRanking: - """Combine text relevance with geographic proximity""" - - def rank_results(self, queryset, search_query: str, user_location: Point = None): - """Apply hybrid ranking algorithm""" - - # Base text relevance scoring - queryset = queryset.annotate( - text_rank=Case( - When(name__iexact=search_query, then=Value(100)), - When(name__icontains=search_query, then=Value(80)), - When(description__icontains=search_query, then=Value(60)), - When(park_location__city__icontains=search_query, then=Value(40)), - default=Value(20), - output_field=IntegerField() - ) - ) - - # Add proximity scoring if user location available - if user_location: - queryset = queryset.annotate( - distance_miles=Distance('park_location__point', user_location), - proximity_rank=Case( - When(distance_miles__lt=25, then=Value(50)), # Very close - When(distance_miles__lt=100, then=Value(30)), # Close - When(distance_miles__lt=300, then=Value(10)), # Regional - default=Value(0), - output_field=IntegerField() - ) - ) - - # Combined score: text relevance + proximity bonus - queryset = queryset.annotate( - combined_rank=F('text_rank') + F('proximity_rank') - ).order_by('-combined_rank', 'distance_miles') - else: - queryset = queryset.order_by('-text_rank', 'name') - - return queryset -``` - -### 5.2 Cross-Domain Location Search - -#### Unified Search Across Entities -```python -class UnifiedLocationSearchView(TemplateView): - """Search across parks, rides, and companies with location context""" - - template_name = "core/search/unified_results.html" - - def get_context_data(self, **kwargs): - context = super().get_context_data(**kwargs) - - query = self.request.GET.get('q', '') - location_types = self.request.GET.getlist('types', ['park', 'ride', 'company']) - user_location = self._get_user_location() - - results = { - 'parks': [], - 'rides': [], - 'companies': [], - 'unified': [] - } - - # Search each entity type with location context - if 'park' in location_types: - results['parks'] = self._search_parks(query, user_location) - - if 'ride' in location_types: - results['rides'] = self._search_rides(query, user_location) - - if 'company' in location_types: - results['companies'] = self._search_companies(query, user_location) - - # Create unified ranked results - results['unified'] = self._create_unified_results(results, user_location) - - context.update({ - 'results': results, - 'search_query': query, - 'user_location': user_location, - 'total_results': sum(len(r) for r in results.values() if isinstance(r, list)) - }) - - return context -``` - -## 6. Geocoding Integration Strategy - -### 6.1 OpenStreetMap Nominatim Integration - -#### Geocoding Service Implementation -```python -class GeocodingService: - """Geocoding service using OpenStreetMap Nominatim""" - - def __init__(self): - self.base_url = "https://nominatim.openstreetmap.org" - self.session = requests.Session() - self.session.headers.update({ - 'User-Agent': 'ThrillWiki/1.0 (contact@thrillwiki.com)' - }) - - def geocode_address(self, address: str, country_bias: str = None) -> Optional[Dict]: - """Convert address to coordinates""" - cache_key = f"geocode:{hashlib.md5(address.encode()).hexdigest()}" - cached = cache.get(cache_key) - if cached: - return cached - - params = { - 'q': address, - 'format': 'json', - 'limit': 1, - 'addressdetails': 1, - 'extratags': 1 - } - - if country_bias: - params['countrycodes'] = country_bias - - try: - response = self.session.get( - f"{self.base_url}/search", - params=params, - timeout=5 - ) - response.raise_for_status() - - results = response.json() - if results: - result = results[0] - geocoded = { - 'lat': float(result['lat']), - 'lng': float(result['lon']), - 'display_name': result['display_name'], - 'confidence': float(result.get('importance', 0.5)), - 'address_components': result.get('address', {}) - } - - # Cache successful results for 7 days - cache.set(cache_key, geocoded, timeout=604800) - return geocoded - - except (requests.RequestException, ValueError, KeyError) as e: - logger.warning(f"Geocoding failed for '{address}': {e}") - - # Cache failed attempts for 1 hour to prevent repeated API calls - cache.set(cache_key, None, timeout=3600) - return None - - def reverse_geocode(self, lat: float, lng: float) -> Optional[Dict]: - """Convert coordinates to address""" - cache_key = f"reverse_geocode:{lat:.4f},{lng:.4f}" - cached = cache.get(cache_key) - if cached: - return cached - - params = { - 'lat': lat, - 'lon': lng, - 'format': 'json', - 'addressdetails': 1 - } - - try: - response = self.session.get( - f"{self.base_url}/reverse", - params=params, - timeout=5 - ) - response.raise_for_status() - - result = response.json() - if result: - address = { - 'display_name': result['display_name'], - 'components': result.get('address', {}) - } - - cache.set(cache_key, address, timeout=604800) # 7 days - return address - - except (requests.RequestException, ValueError) as e: - logger.warning(f"Reverse geocoding failed for {lat},{lng}: {e}") - - return None -``` - -### 6.2 Search Query Enhancement - -#### Intelligent Address Detection -```python -class SmartQueryProcessor: - """Detect and process different types of search queries""" - - def __init__(self): - self.geocoding_service = GeocodingService() - - def process_search_query(self, query: str) -> Dict: - """Analyze query and determine search strategy""" - - query_analysis = { - 'original_query': query, - 'is_address': self._looks_like_address(query), - 'is_coordinates': self._looks_like_coordinates(query), - 'has_location_keywords': self._has_location_keywords(query), - 'processed_query': query, - 'geocoded_location': None, - 'search_strategy': 'text' - } - - if query_analysis['is_coordinates']: - coords = self._parse_coordinates(query) - if coords: - query_analysis['geocoded_location'] = coords - query_analysis['search_strategy'] = 'spatial' - - elif query_analysis['is_address'] or query_analysis['has_location_keywords']: - geocoded = self.geocoding_service.geocode_address(query) - if geocoded: - query_analysis['geocoded_location'] = geocoded - query_analysis['search_strategy'] = 'spatial_text_hybrid' - - return query_analysis - - def _looks_like_address(self, query: str) -> bool: - """Detect if query looks like an address""" - address_patterns = [ - r'\d+\s+\w+\s+(street|st|avenue|ave|road|rd|boulevard|blvd)', - r'\w+,\s*\w+\s*\d{5}', # City, State ZIP - r'\w+,\s*\w+,\s*\w+', # City, State, Country - ] - - return any(re.search(pattern, query, re.IGNORECASE) for pattern in address_patterns) - - def _looks_like_coordinates(self, query: str) -> bool: - """Detect if query contains coordinates""" - coord_pattern = r'-?\d+\.?\d*\s*,\s*-?\d+\.?\d*' - return bool(re.search(coord_pattern, query)) -``` - -## 7. Performance Optimization Strategy - -### 7.1 Database Optimization - -#### Spatial Index Strategy -```sql --- Essential indexes for location-enhanced search -CREATE INDEX CONCURRENTLY idx_park_location_point_gist -ON parks_parklocation USING GIST (point); - -CREATE INDEX CONCURRENTLY idx_park_location_city_state -ON parks_parklocation (city, state); - -CREATE INDEX CONCURRENTLY idx_park_location_country -ON parks_parklocation (country); - --- Composite indexes for common filter combinations -CREATE INDEX CONCURRENTLY idx_park_status_location -ON parks_park (status) -INCLUDE (id) -WHERE status = 'OPERATING'; - --- Partial indexes for performance -CREATE INDEX CONCURRENTLY idx_ride_location_point_not_null -ON rides_ridelocation USING GIST (entrance_point) -WHERE entrance_point IS NOT NULL; -``` - -#### Query Optimization Patterns -```python -class SpatialQueryOptimizer: - """Optimize spatial queries for performance""" - - def optimize_distance_query(self, queryset, reference_point, max_radius_miles=100): - """Optimize distance-based queries with bounding box pre-filtering""" - - # Create bounding box for initial filtering (much faster than distance calc) - bbox = self._create_bounding_box(reference_point, max_radius_miles) - - # Pre-filter with bounding box, then apply precise distance filter - return queryset.filter( - park_location__point__within=bbox - ).filter( - park_location__point__distance_lte=(reference_point, D(mi=max_radius_miles)) - ).annotate( - distance=Distance('park_location__point', reference_point) - ).order_by('distance') - - def optimize_multi_location_query(self, park_qs, ride_qs, bounds=None): - """Optimize queries across multiple location types""" - - # Use common table expressions for complex spatial queries - if bounds: - # Apply spatial filtering early - park_qs = park_qs.filter(park_location__point__within=bounds) - ride_qs = ride_qs.filter(ride_location__entrance_point__within=bounds) - - # Use union for combining different location types efficiently - return park_qs.union(ride_qs, all=False) -``` - -### 7.2 Caching Strategy - -#### Multi-Level Caching Architecture -```python -class LocationSearchCache: - """Caching strategy for location-enhanced search""" - - CACHE_TIMEOUTS = { - 'geocoding': 604800, # 7 days - 'search_results': 300, # 5 minutes - 'location_filters': 3600, # 1 hour - 'spatial_index': 86400, # 24 hours - } - - def cache_search_results(self, cache_key: str, results: QuerySet, - user_location: Point = None): - """Cache search results with location context""" - - # Include location context in cache key - if user_location: - location_hash = hashlib.md5( - f"{user_location.x:.4f},{user_location.y:.4f}".encode() - ).hexdigest()[:8] - cache_key = f"{cache_key}_loc_{location_hash}" - - # Cache serialized results - serialized = self._serialize_results(results) - cache.set( - cache_key, - serialized, - timeout=self.CACHE_TIMEOUTS['search_results'] - ) - - def get_cached_search_results(self, cache_key: str, user_location: Point = None): - """Retrieve cached search results""" - - if user_location: - location_hash = hashlib.md5( - f"{user_location.x:.4f},{user_location.y:.4f}".encode() - ).hexdigest()[:8] - cache_key = f"{cache_key}_loc_{location_hash}" - - cached = cache.get(cache_key) - return self._deserialize_results(cached) if cached else None - - def invalidate_location_cache(self, location_type: str = None): - """Invalidate location-related caches when data changes""" - patterns = [ - 'search_results_*', - 'location_filters', - 'geocoding_*' - ] - - if location_type: - patterns.append(f'{location_type}_search_*') - - # Use cache versioning for efficient invalidation - for pattern in patterns: - cache.delete_pattern(pattern) -``` - -### 7.3 Performance Monitoring - -#### Search Performance Metrics -```python -class SearchPerformanceMonitor: - """Monitor search performance and spatial query efficiency""" - - def __init__(self): - self.metrics_logger = logging.getLogger('search.performance') - - def track_search_query(self, query_type: str, query_params: Dict, - execution_time: float, result_count: int): - """Track search query performance""" - - metrics = { - 'query_type': query_type, - 'has_spatial_filter': bool(query_params.get('user_location')), - 'has_text_search': bool(query_params.get('search')), - 'filter_count': len([k for k, v in query_params.items() if v]), - 'execution_time_ms': execution_time * 1000, - 'result_count': result_count, - 'timestamp': timezone.now().isoformat() - } - - # Log performance data - self.metrics_logger.info(json.dumps(metrics)) - - # Alert on slow queries - if execution_time > 1.0: # Queries over 1 second - self._alert_slow_query(metrics) - - def _alert_slow_query(self, metrics: Dict): - """Alert on performance issues""" - # Implementation for alerting system - pass -``` - -## 8. API Enhancement Design - -### 8.1 Location-Aware Search Endpoints - -#### Enhanced Search API -```python -class LocationSearchAPIView(APIView): - """REST API for location-enhanced search""" - - def get(self, request): - """ - Enhanced search endpoint with location capabilities - - Query Parameters: - - q: Search query (text) - - lat, lng: User coordinates for proximity search - - radius: Search radius in miles (default: 50) - - bounds: Geographic bounding box (format: north,south,east,west) - - types: Entity types to search (park,ride,company) - - filters: Additional filters (status, operator, etc.) - """ - - try: - search_params = self._parse_search_params(request.GET) - results = self._execute_search(search_params) - - return Response({ - 'status': 'success', - 'data': { - 'results': results['items'], - 'total_count': results['total'], - 'search_params': search_params, - 'has_more': results['has_more'] - }, - 'meta': { - 'query_time_ms': results['execution_time'] * 1000, - 'cache_hit': results['from_cache'], - 'location_used': bool(search_params.get('user_location')) - } - }) - - except ValidationError as e: - return Response({ - 'status': 'error', - 'error': 'Invalid search parameters', - 'details': str(e) - }, status=400) - - def _parse_search_params(self, params: QueryDict) -> Dict: - """Parse and validate search parameters""" - - # Parse user location - user_location = None - if params.get('lat') and params.get('lng'): - try: - lat = float(params['lat']) - lng = float(params['lng']) - if -90 <= lat <= 90 and -180 <= lng <= 180: - user_location = Point(lng, lat) - except (ValueError, TypeError): - raise ValidationError("Invalid coordinates") - - # Parse bounding box - bounds = None - if params.get('bounds'): - try: - north, south, east, west = map(float, params['bounds'].split(',')) - bounds = Polygon.from_bbox((west, south, east, north)) - except (ValueError, TypeError): - raise ValidationError("Invalid bounds format") - - return { - 'query': params.get('q', '').strip(), - 'user_location': user_location, - 'radius_miles': min(float(params.get('radius', 50)), 500), # Cap at 500 miles - 'bounds': bounds, - 'entity_types': params.getlist('types') or ['park'], - 'filters': self._parse_filters(params), - 'page': int(params.get('page', 1)), - 'page_size': min(int(params.get('page_size', 20)), 100) # Cap at 100 - } -``` - -#### Autocomplete API with Location Context -```python -class LocationAutocompleteAPIView(APIView): - """Autocomplete API with location awareness""" - - def get(self, request): - """ - Location-aware autocomplete for search queries - - Returns suggestions based on: - 1. Entity names (parks, rides, companies) - 2. Location names (cities, states, countries) - 3. Address suggestions - """ - - query = request.GET.get('q', '').strip() - if len(query) < 2: - return Response({'suggestions': []}) - - user_location = self._parse_user_location(request.GET) - - suggestions = [] - - # Entity name suggestions (with location context for ranking) - entity_suggestions = self._get_entity_suggestions(query, user_location) - suggestions.extend(entity_suggestions) - - # Location name suggestions - location_suggestions = self._get_location_suggestions(query) - suggestions.extend(location_suggestions) - - # Address suggestions (via geocoding) - if self._looks_like_address(query): - address_suggestions = self._get_address_suggestions(query) - suggestions.extend(address_suggestions) - - # Rank and limit suggestions - ranked_suggestions = self._rank_suggestions(suggestions, user_location)[:10] - - return Response({ - 'suggestions': ranked_suggestions, - 'query': query - }) -``` - -### 8.2 Enhanced Response Formats - -#### Unified Location Response Format -```python -class LocationSearchResultSerializer(serializers.Serializer): - """Unified serializer for location-enhanced search results""" - - id = serializers.CharField() - type = serializers.CharField() # 'park', 'ride', 'company' - name = serializers.CharField() - slug = serializers.CharField() - - # Location data - location = serializers.SerializerMethodField() - - # Entity-specific data - entity_data = serializers.SerializerMethodField() - - # Search relevance - relevance_score = serializers.FloatField(required=False) - distance_miles = serializers.FloatField(required=False) - - def get_location(self, obj): - """Get unified location data""" - if hasattr(obj, 'park_location'): - location = obj.park_location - return { - 'coordinates': [location.point.y, location.point.x] if location.point else None, - 'address': location.formatted_address, - 'city': location.city, - 'state': location.state, - 'country': location.country, - 'highway_exit': location.highway_exit - } - elif hasattr(obj, 'ride_location'): - location = obj.ride_location - return { - 'coordinates': [location.entrance_point.y, location.entrance_point.x] - if location.entrance_point else None, - 'park_area': location.park_area, - 'park_location': self._get_park_location(obj.park) - } - # Add other location types... - return None - - def get_entity_data(self, obj): - """Get entity-specific data based on type""" - if obj._meta.model_name == 'park': - return { - 'status': obj.status, - 'operator': obj.operating_company.name if obj.operating_company else None, - 'ride_count': getattr(obj, 'ride_count', 0), - 'coaster_count': getattr(obj, 'coaster_count', 0), - 'website': obj.website, - 'opening_date': obj.opening_date - } - elif obj._meta.model_name == 'ride': - return { - 'category': obj.category, - 'park': { - 'name': obj.park.name, - 'slug': obj.park.slug - }, - 'manufacturer': obj.ride_model.manufacturer.name - if obj.ride_model and obj.ride_model.manufacturer else None, - 'opening_date': obj.opening_date - } - # Add other entity types... - return {} -``` - -## 9. User Experience Design - -### 9.1 Search Interface Enhancements - -#### Location-Aware Search Form -```html - -
- -
- - - - -
- - - - - -
- - - - -
-
-``` - -#### Location Permission Handling -```javascript -class LocationPermissionManager { - constructor() { - this.permissionStatus = 'unknown'; - this.setupEventHandlers(); - } - - setupEventHandlers() { - // Location button click handler - document.getElementById('use-my-location').addEventListener('click', - () => this.requestLocation()); - - // Privacy-conscious permission checking - if ('permissions' in navigator) { - navigator.permissions.query({name: 'geolocation'}) - .then(permission => { - this.permissionStatus = permission.state; - this.updateLocationButton(); - - permission.addEventListener('change', () => { - this.permissionStatus = permission.state; - this.updateLocationButton(); - }); - }); - } - } - - async requestLocation() { - // Show privacy notice if first time - if (this.permissionStatus === 'prompt') { - const consent = await this.showPrivacyConsent(); - if (!consent) return; - } - - try { - this.showLocationLoading(); - const location = await this.getCurrentPosition(); - this.handleLocationSuccess(location); - } catch (error) { - this.handleLocationError(error); - } - } - - showPrivacyConsent() { - return new Promise(resolve => { - const modal = document.createElement('div'); - modal.className = 'location-consent-modal'; - modal.innerHTML = ` - - `; - - modal.addEventListener('click', (e) => { - if (e.target.dataset.action === 'allow') { - resolve(true); - } else if (e.target.dataset.action === 'deny') { - resolve(false); - } - modal.remove(); - }); - - document.body.appendChild(modal); - }); - } -} -``` - -### 9.2 Results Display with Location Context - -#### Distance-Enhanced Results -```html - -
- {% for result in results %} -
-
-

- {{ result.name }} -

- - {% if result.distance_miles %} -
- - {{ result.distance_miles|floatformat:1 }} miles away -
- {% endif %} -
- -
- {% if result.location.address %} - - {{ result.location.address }} - - {% if result.location.highway_exit %} -
- - Exit: {{ result.location.highway_exit }} -
- {% endif %} - {% endif %} -
- -
- View Details - - {% if result.location.coordinates %} - - {% endif %} - - {% if user_location and result.location.coordinates %} - - {% endif %} -
-
- {% endfor %} -
-``` - -### 9.3 Map Integration - -#### Search-Map Bidirectional Integration -```javascript -class SearchMapIntegration { - constructor(mapInstance, searchForm) { - this.map = mapInstance; - this.searchForm = searchForm; - this.searchResults = []; - this.setupIntegration(); - } - - setupIntegration() { - // Update search when map viewport changes - this.map.on('moveend', () => { - if (this.shouldUpdateSearchOnMapMove()) { - this.updateSearchFromMap(); - } - }); - - // Show search results on map - this.searchForm.addEventListener('results-updated', (e) => { - this.showResultsOnMap(e.detail.results); - }); - - // Handle "Show on Map" button clicks - document.addEventListener('click', (e) => { - if (e.target.matches('.show-on-map')) { - const resultId = e.target.dataset.resultId; - this.highlightResultOnMap(resultId); - } - }); - } - - updateSearchFromMap() { - const bounds = this.map.getBounds(); - const boundsParam = [ - bounds.getNorth(), - bounds.getSouth(), - bounds.getEast(), - bounds.getWest() - ].join(','); - - // Update search form with map bounds - const boundsInput = document.getElementById('search-bounds'); - if (boundsInput) { - boundsInput.value = boundsParam; - this.searchForm.submit(); - } - } - - showResultsOnMap(results) { - // Clear existing result markers - this.clearResultMarkers(); - - // Add markers for each result with location - results.forEach(result => { - if (result.location && result.location.coordinates) { - const marker = this.addResultMarker(result); - this.searchResults.push({ - id: result.id, - marker: marker, - data: result - }); - } - }); - - // Adjust map view to show all results - if (this.searchResults.length > 0) { - this.fitMapToResults(); - } - } -} -``` - -## 10. Implementation Phases - -### Phase 1: Foundation (Weeks 1-2) -**Goal**: Establish basic location search infrastructure - -**Tasks**: -1. **Enhanced Filter Classes** - - Extend [`ParkFilter`](parks/filters.py:26) with spatial search mixins - - Create `DistanceSearchMixin` and `GeographicFilterMixin` - - Add location-based filter fields (state, country, metro area) - -2. **Geocoding Service Integration** - - Implement `GeocodingService` with OpenStreetMap Nominatim - - Add address detection and coordinate parsing - - Set up caching layer for geocoding results - -3. **Database Optimization** - - Add spatial indexes to location models - - Create composite indexes for common filter combinations - - Optimize existing queries for location joins - -**Deliverables**: -- Enhanced filter classes with location capabilities -- Working geocoding service with caching -- Optimized database indexes - -### Phase 2: Core Search Enhancement (Weeks 3-4) -**Goal**: Integrate location capabilities into existing search - -**Tasks**: -1. **Search View Enhancement** - - Extend [`AdaptiveSearchView`](core/views/search.py:5) with location processing - - Add user location detection and handling - - Implement hybrid text + proximity ranking - -2. **API Development** - - Create location-aware search API endpoints - - Implement autocomplete with location context - - Add proper error handling and validation - -3. **Query Optimization** - - Implement spatial query optimization patterns - - Add performance monitoring for search queries - - Create caching strategies for search results - -**Deliverables**: -- Location-enhanced search views and APIs -- Optimized spatial query patterns -- Performance monitoring infrastructure - -### Phase 3: User Experience (Weeks 5-6) -**Goal**: Create intuitive location search features - -**Tasks**: -1. **Frontend Enhancement** - - Implement "near me" functionality with geolocation - - Add location permission handling and privacy controls - - Create enhanced search form with location context - -2. **Results Display** - - Add distance information to search results - - Implement "get directions" functionality - - Create map integration for result visualization - -3. **Progressive Enhancement** - - Ensure graceful fallback for users without location access - - Add IP-based location approximation - - Implement accessibility improvements - -**Deliverables**: -- Enhanced search interface with location features -- Map integration for search results -- Accessibility-compliant location features - -### Phase 4: Advanced Features (Weeks 7-8) -**Goal**: Implement advanced location search capabilities - -**Tasks**: -1. **Cross-Domain Search** - - Implement unified search across parks, rides, companies - - Create location-aware ranking algorithms - - Add entity-specific location features - -2. **Advanced Filtering** - - Implement metropolitan area filtering - - Add route-based search (search along a path) - - Create clustering for dense geographic areas - -3. **Performance Optimization** - - Implement advanced caching strategies - - Add query result pagination for large datasets - - Optimize for mobile and low-bandwidth scenarios - -**Deliverables**: -- Unified cross-domain location search -- Advanced geographic filtering options -- Production-ready performance optimizations - -## Performance Benchmarks and Success Criteria - -### Performance Targets -- **Text + Location Search**: < 200ms for 90th percentile queries -- **Spatial Queries**: < 300ms for radius searches up to 100 miles -- **Geocoding**: < 100ms cache hit rate > 85% -- **API Response**: < 150ms for location-enhanced autocomplete - -### Success Metrics -- **User Adoption**: 40% of searches use location features within 3 months -- **Search Improvement**: 25% increase in search result relevance scores -- **Performance**: No degradation in non-location search performance -- **Coverage**: Location data available for 95% of parks in database - -### Monitoring and Alerting -- Query performance tracking with detailed metrics -- Geocoding service health monitoring -- User location permission grant rates -- Search abandonment rate analysis - -## Risk Mitigation - -### Technical Risks -1. **Performance Degradation**: Comprehensive testing and gradual rollout -2. **Geocoding Service Reliability**: Multiple fallback providers and caching -3. **Privacy Compliance**: Clear consent flows and data minimization - -### User Experience Risks -1. **Location Permission Denial**: Graceful fallbacks and alternative experiences -2. **Accuracy Issues**: Clear accuracy indicators and user feedback mechanisms -3. **Complexity Overload**: Progressive disclosure and intuitive defaults - -## Conclusion - -This integration plan provides a comprehensive roadmap for enhancing ThrillWiki's search system with sophisticated location capabilities while maintaining performance and user experience. The phased approach ensures manageable implementation complexity and allows for iterative improvement based on user feedback and performance metrics. - -The integration leverages existing Django-filters architecture while adding powerful spatial search capabilities that will significantly enhance the user experience for theme park enthusiasts planning visits and exploring new destinations. \ No newline at end of file diff --git a/memory-bank/features/search-system.md b/memory-bank/features/search-system.md deleted file mode 100644 index cec3ace4..00000000 --- a/memory-bank/features/search-system.md +++ /dev/null @@ -1,121 +0,0 @@ -# Site-Wide Search System Architecture - -## 1. Architectural Overview -- **Filter-First Approach**: Utilizes django-filter for robust filtering capabilities -- **Modular Design**: - ```python - # filters.py - class ParkFilter(django_filters.FilterSet): - search = django_filters.CharFilter(method='filter_search') - - class Meta: - model = Park - fields = { - 'state': ['exact', 'in'], - 'rating': ['gte', 'lte'], - } - - def filter_search(self, queryset, name, value): - return queryset.filter( - Q(name__icontains=value) | - Q(description__icontains=value) - ) - ``` - -## 2. Enhanced Backend Components - -### Search Endpoint (`/search/`) -```python -# views.py -class AdaptiveSearchView(TemplateView): - template_name = "search/results.html" - - def get_queryset(self): - return Park.objects.all() - - def get_filterset(self): - return ParkFilter(self.request.GET, queryset=self.get_queryset()) - - def get_context_data(self, **kwargs): - context = super().get_context_data(**kwargs) - filterset = self.get_filterset() - context['results'] = filterset.qs - context['filters'] = filterset.form - return context -``` - -## 3. Plugin Integration - -### Recommended django-filter Extensions -```python -# settings.py -INSTALLED_APPS += [ - 'django_filters', - 'django_filters_addons', # For custom widgets - 'rangefilter', # For date/number ranges -] - -# filters.py -class EnhancedParkFilter(ParkFilter): - rating_range = django_filters.RangeFilter(field_name='rating') - features = django_filters.MultipleChoiceFilter( - field_name='features__slug', - widget=HorizontalCheckboxSelectMultiple, - lookup_expr='contains' - ) - - class Meta(ParkFilter.Meta): - fields = ParkFilter.Meta.fields + ['rating_range', 'features'] -``` - -## 4. Frontend Filter Rendering -```html - -
- {{ filters.form.as_p }} - -
- - -
-
-``` - -## 5. Benefits of django-filter Integration -- Built-in validation for filter parameters -- Automatic form generation -- Complex lookup expressions -- Reusable filter components -- Plugin ecosystem support - -## 6. Security Considerations -- Input sanitization using django's built-in escaping -- Query parameter whitelisting via FilterSet definitions -- Rate limiting on autocomplete endpoint (using django-ratelimit) -- Permission-aware queryset filtering - -## 7. Performance Optimization -- Select related/prefetch_related in FilterSet querysets -- Caching filter configurations -- Indexing recommendations for filtered fields -- Pagination integration with django-filter - -## 8. Testing Strategy -- FilterSet validation tests -- HTMX interaction tests -- Cross-browser filter UI tests -- Performance load testing - -## 9. Style Integration -- Custom filter form templates matching Tailwind design -- Responsive filter controls grid -- Accessible form labels and error messages -- Dark mode support - -## 10. Expansion Framework -- Registry pattern for adding new FilterSets -- Dynamic filter discovery system -- Plugin configuration templates -- Analytics integration points \ No newline at end of file diff --git a/memory-bank/features/search/park-search.md b/memory-bank/features/search/park-search.md deleted file mode 100644 index 7a04816c..00000000 --- a/memory-bank/features/search/park-search.md +++ /dev/null @@ -1,170 +0,0 @@ -# Park Search Implementation - -## Overview -Integration of the parks app with the site-wide search system, providing both full search functionality and quick search for dropdowns. - -## Components - -### 1. Filter Configuration (parks/filters.py) -```python -ParkFilter = create_model_filter( - model=Park, - search_fields=['name', 'description', 'location__city', 'location__state', 'location__country'], - mixins=[LocationFilterMixin, RatingFilterMixin, DateRangeFilterMixin], - additional_filters={ - 'status': { - 'field_class': 'django_filters.ChoiceFilter', - 'field_kwargs': { - 'choices': Park._meta.get_field('status').choices, - 'empty_label': 'Any status', - 'null_label': 'Unknown' - } - }, - 'opening_date': { - 'field_class': 'django_filters.DateFromToRangeFilter', - 'field_kwargs': { - 'label': 'Opening date range', - 'help_text': 'Enter dates in YYYY-MM-DD format' - } - }, - # Additional filters for rides, size, etc. - } -) -``` - -### 2. View Implementation (parks/views.py) - -#### Full Search (ParkListView) -```python -class ParkListView(HTMXFilterableMixin, ListView): - model = Park - filter_class = ParkFilter - paginate_by = 20 - - def get_queryset(self): - try: - return ( - super() - .get_queryset() - .select_related("owner") - .prefetch_related( - "photos", - "location", - "rides", - "rides__manufacturer" - ) - .annotate( - total_rides=Count("rides"), - total_coasters=Count("rides", filter=Q(rides__category="RC")), - ) - ) - except Exception as e: - messages.error(self.request, f"Error loading parks: {str(e)}") - return Park.objects.none() -``` - -#### Quick Search -```python -def search_parks(request): - try: - queryset = ( - Park.objects.prefetch_related('location', 'photos') - .order_by('name') - ) - filter_params = {'search': request.GET.get('q', '').strip()} - park_filter = ParkFilter(filter_params, queryset=queryset) - parks = park_filter.qs[:10] - - return render(request, "parks/partials/park_search_results.html", { - "parks": parks, - "is_quick_search": True - }) - except Exception as e: - return render(..., {"error": str(e)}) -``` - -### 3. Template Structure - -#### Main Search Page (parks/templates/parks/park_list.html) -- Extends: search/layouts/filtered_list.html -- Blocks: - * filter_errors: Validation error display - * list_header: Park list header + actions - * filter_section: Filter form with clear option - * results_section: Park results with pagination - -#### Results Display (search/templates/search/partials/park_results.html) -- Full park information -- Status indicators -- Ride statistics -- Location details -- Error state handling - -#### Quick Search Results (parks/partials/park_search_results.html) -- Simplified park display -- Basic location info -- Fallback for missing images -- Error handling - -### 4. Error Handling - -#### View Level -- Try/except blocks around queryset operations -- Filter validation errors captured -- Generic error states handled -- User-friendly error messages - -#### Template Level -- Error states in both quick and full search -- Safe data access (using with and conditionals) -- Fallback content for missing data -- Clear error messaging - -### 5. Query Optimization - -#### Full Search -- select_related: owner -- prefetch_related: photos, location, rides, rides__manufacturer -- Proper annotations for counts -- Pagination for large results - -#### Quick Search -- Limited to 10 results -- Minimal related data loading -- Basic ordering optimization - -### 6. Known Limitations - -1. Testing Coverage - - Need unit tests for filters - - Need integration tests for error cases - - Need performance testing - -2. Performance - - Large dataset behavior unknown - - Complex filter combinations untested - -3. Security - - SQL injection prevention needs review - - Permission checks need audit - -4. Accessibility - - ARIA labels needed - - Color contrast validation needed - -### 7. Next Steps - -1. Testing - - Implement comprehensive test suite - - Add performance benchmarks - - Test edge cases - -2. Monitoring - - Add error logging - - Implement performance tracking - - Add usage analytics - -3. Optimization - - Profile query performance - - Optimize filter combinations - - Consider caching strategies \ No newline at end of file diff --git a/memory-bank/features/search/rides.md b/memory-bank/features/search/rides.md deleted file mode 100644 index abbc71f1..00000000 --- a/memory-bank/features/search/rides.md +++ /dev/null @@ -1,60 +0,0 @@ ---- -# Ride Search Feature Specification - -## Overview -Extend the existing park search infrastructure to support searching rides. This follows the established: -- Authentication-first -- BaseAutocomplete pattern -- HTMX + AlpineJS frontend - -Rides are related to parks via a ForeignKey. Search results must reference both ride and parent park. - -## Technical Specification - -### Models & Filters -- Model: `Ride` in [`rides/models.py`](rides/models.py:1) with fields `name`, `park` (ForeignKey → Park), `duration`, `thrill_rating`, etc. -- Filter: `RideFilter` in [`search/filters.py`](search/filters.py:1) (create if missing) supporting `min_thrill`, `max_duration`, and `park__id`. - -### Autocomplete -- Class [`RideAutocomplete`](search/mixins.py:1) extends [`BaseAutocomplete`](core/forms.py:1). -- Query: `Ride.objects.filter(name__icontains=query)` limited to 10 results. - -### Search Form -- Class [`RideSearchForm`](search/forms.py:1) uses autocomplete widget bound to [`RideAutocomplete`](search/mixins.py:1). -- Fields: `query` (CharField), `park` (HiddenField or Select), `min_thrill`, `max_duration`. - -### Views & Templates -- View [`RideSearchView`](rides/views.py:1) decorated with `@login_required`. -- URL route `'search/rides/'` in [`search/urls.py`](search/urls.py:1). -- Partial template [`search/templates/search/partials/_ride_search.html`](search/templates/search/partials/_ride_search.html:1) with HTMX attributes (`hx-get`, `hx-trigger="input changed delay:300ms"`). - -## File & Component Structure -- memory-bank/features/search/rides.md -- search/mixins.py – add [`RideAutocomplete`](search/mixins.py:1) -- search/forms.py – add [`RideSearchForm`](search/forms.py:1) -- search/urls.py – register ride endpoints (`autocomplete/`, `results/`) -- rides/views.py – add [`RideSearchView`](rides/views.py:1) -- search/templates/search/partials/_ride_search.html -- rides/templates/rides/partials/ride_results.html - -## Integration Points -- Combined search component toggles between park and ride modes. -- Ride result links to [`ParkDetailView`](parks/views.py:1) for context. -- Shared styles and layout from [`search/templates/search/layouts/base.html`](search/templates/search/layouts/base.html:1). - -## Database Query Optimization -- Add DB index on `Ride.name` and `Ride.park_id`. -- Use `select_related('park')` in view/queryset. -- Limit autocomplete to top 10 for responsiveness. - -## Frontend Component Design -- HTMX: `` with `hx-get="/search/rides/autocomplete/"`, update target container. -- AlpineJS: manage local state for selection, clearing on blur. -- Reuse CSS classes from park search for unified UX. - -## Testing Strategy -- Unit tests for [`RideAutocomplete`](search/tests/test_autocomplete.py). -- Form tests for [`RideSearchForm`](search/tests/test_forms.py). -- View tests (`login_required`, filter logic) in [`rides/tests/test_search_view.py`]. -- HTMX integration: AJAX responses include expected HTML using pytest-django + django-htmx. -- Performance: benchmark large resultset to ensure truncation and quick response. \ No newline at end of file diff --git a/memory-bank/features/search/testing-implementation.md b/memory-bank/features/search/testing-implementation.md deleted file mode 100644 index 0df47b97..00000000 --- a/memory-bank/features/search/testing-implementation.md +++ /dev/null @@ -1,142 +0,0 @@ -# Park Search Testing Implementation - -## Test Structure - -### 1. Model Tests (parks/tests/test_models.py) - -#### Park Model Tests -- Basic CRUD Operations - * Creation with required fields - * Update operations - * Deletion and cascading - * Validation rules - -- Slug Operations - * Auto-generation on creation - * Historical slug tracking and lookup (via HistoricalSlug model) - * pghistory integration for model tracking - * Uniqueness constraints - * Fallback lookup strategies - -- Location Integration - * Formatted location string - * Coordinates retrieval - * Location relationship integrity - -- Status Management - * Default status - * Status color mapping - * Status transitions - -- Property Methods - * formatted_location - * coordinates - * get_status_color - -### 2. Filter Tests (parks/tests/test_filters.py) - -#### Search Functionality -- Text Search Fields - * Name searching - * Description searching - * Location field searching (city, state, country) - * Combined field searching - -#### Filter Operations -- Status Filtering - * Each status value - * Empty/null handling - * Invalid status values - -- Date Range Filtering - * Opening date ranges - * Invalid date formats - * Edge cases (future dates, very old dates) - -- Company/Owner Filtering - * Existing company - * No owner (null) - * Invalid company IDs - -- Numeric Filtering - * Minimum rides count - * Minimum coasters count - * Minimum size validation - * Negative value handling - -#### Mixin Integration -- LocationFilterMixin - * Distance-based filtering - * Location search functionality - -- RatingFilterMixin - * Rating range filtering - * Invalid rating values - -- DateRangeFilterMixin - * Date range application - * Invalid date handling - -## Implementation Status - -### Completed -1. ✓ Created test directory structure -2. ✓ Set up test fixtures in both test files -3. ✓ Implemented Park model tests - - Basic CRUD operations - - Advanced slug functionality: - * Automatic slug generation from name - * Historical slug tracking with HistoricalSlug model - * Dual tracking with pghistory integration - * Comprehensive lookup system with fallbacks - - Status color mapping with complete coverage - - Location integration with error handling - - Property methods with null safety -4. ✓ Implemented ParkFilter tests - - Text search with multiple field support - - Status filtering with validation and choice handling - - Date range filtering with format validation - - Company/owner filtering with comprehensive null handling - - Numeric filtering with integer validation and bounds checking - - Empty value handling across all filters - - Test coverage for edge cases and invalid inputs - - Performance validation for complex filter combinations - -### Next Steps - -1. Performance Optimization - - [ ] Add query count assertions to tests - - [ ] Profile filter combinations impact - - [ ] Implement caching for common filters - - [ ] Add database indexes for frequently filtered fields - -2. Monitoring and Analytics - - [ ] Add filter usage tracking - - [ ] Implement performance monitoring - - [ ] Track common filter combinations - - [ ] Monitor query execution times - -3. Documentation and Maintenance - - [ ] Add filter example documentation - - [ ] Document filter combinations and best practices - - [ ] Create performance troubleshooting guide - - [ ] Add test coverage reports and analysis - -4. Future Enhancements - - [ ] Add saved filter support - - [ ] Implement filter presets - - [ ] Add advanced combination operators (AND/OR) - - [ ] Support dynamic field filtering - -### Running the Tests - -To run the test suite: -```bash -python manage.py test parks.tests -``` - -To run specific test classes: -```bash -python manage.py test parks.tests.test_models.ParkModelTests -python manage.py test parks.tests.test_filters.ParkFilterTests -``` \ No newline at end of file diff --git a/memory-bank/features/search_improvements.md b/memory-bank/features/search_improvements.md deleted file mode 100644 index 13f5acf1..00000000 --- a/memory-bank/features/search_improvements.md +++ /dev/null @@ -1,119 +0,0 @@ -# Search Functionality Improvement Plan - -## Technical Implementation Details - -### 1. Database Optimization -```python -# parks/models.py -from django.contrib.postgres.indexes import GinIndex - -class Park(models.Model): - class Meta: - indexes = [ - GinIndex(fields=['name', 'description'], - name='search_gin_idx', - opclasses=['gin_trgm_ops', 'gin_trgm_ops']), - Index(fields=['location__address_text'], name='location_addr_idx') - ] - -# search/services.py -from django.db.models import F, Func -from analytics.models import SearchMetric - -class SearchEngine: - @classmethod - def execute_search(cls, request, filterset_class): - with timeit() as timer: - filterset = filterset_class(request.GET, queryset=cls.base_queryset()) - qs = filterset.qs - results = qs.annotate( - search_rank=Func(F('name'), F('description'), - function='ts_rank') - ).order_by('-search_rank') - - SearchMetric.record( - query_params=dict(request.GET), - result_count=qs.count(), - duration=timer.elapsed - ) - return results -``` - -### 2. Architectural Changes -```python -# search/filters.py (simplified explicit filter) -class ParkFilter(SearchableFilterMixin, django_filters.FilterSet): - search_fields = ['name', 'description', 'location__address_text'] - - class Meta: - model = Park - fields = { - 'ride_count': ['gte', 'lte'], - 'coaster_count': ['gte', 'lte'], - 'average_rating': ['gte', 'lte'] - } - -# search/views.py (updated) -class AdaptiveSearchView(TemplateView): - def get_queryset(self): - return SearchEngine.base_queryset() - - def get_filterset(self): - return ParkFilter(self.request.GET, queryset=self.get_queryset()) -``` - -### 3. Frontend Enhancements -```javascript -// static/js/search.js -const searchInput = document.getElementById('search-input'); -let timeoutId; - -searchInput.addEventListener('input', () => { - clearTimeout(timeoutId); - timeoutId = setTimeout(() => { - fetchResults(searchInput.value); - }, 300); -}); - -async function fetchResults(query) { - try { - const response = await fetch(`/search/?search=${encodeURIComponent(query)}`); - if (!response.ok) throw new Error(response.statusText); - const html = await response.text(); - updateResults(html); - } catch (error) { - showError(`Search failed: ${error.message}`); - } -} -``` - -## Implementation Roadmap - -1. Database Migrations -```bash -uv run manage.py makemigrations parks --name add_search_indexes -uv run manage.py migrate -``` - -2. Service Layer Integration -- Create search/services.py with query instrumentation -- Update all views to use SearchEngine class - -3. Frontend Updates -- Add debouncing to search inputs -- Implement error handling UI components -- Add loading spinner component - -4. Monitoring Setup -```python -# analytics/models.py -class SearchMetric(models.Model): - query_params = models.JSONField() - result_count = models.IntegerField() - duration = models.FloatField() - created_at = models.DateTimeField(auto_now_add=True) -``` - -5. Performance Testing -- Use django-debug-toolbar for query analysis -- Generate load tests with locust.io \ No newline at end of file diff --git a/memory-bank/productContext.md b/memory-bank/productContext.md deleted file mode 100644 index ab01fa92..00000000 --- a/memory-bank/productContext.md +++ /dev/null @@ -1,85 +0,0 @@ -# Product Context - -## Overview -ThrillWiki is a comprehensive platform for theme park enthusiasts to discover parks, share experiences, and access verified information through a moderated knowledge base. - -## User Types and Needs - -### Park Enthusiasts -- **Problem**: Difficulty finding accurate, comprehensive theme park information -- **Solution**: Centralized, moderated platform with verified park/ride data -- **Key Features**: Park discovery, ride details, location services - -### Reviewers -- **Problem**: No dedicated platform for sharing detailed ride experiences -- **Solution**: Structured review system with rich media support -- **Key Features**: Media uploads, rating system, review workflow - -### Park Operators -- **Problem**: Limited channels for authentic presence and information -- **Solution**: Verified company profiles and official park information -- **Key Features**: Company verification, official updates, park management - -## Core Workflows - -1. Park Discovery & Information - - Geographic search and browsing - - Detailed park profiles - - Operating hours and details - -2. Ride Management - - Comprehensive ride database - - Technical specifications - - Historical information - - Designer attribution - -3. Review System - - User-generated content - - Media integration - - Rating framework - - Moderation workflow - -4. Content Moderation - - Submission review - - Quality control - - Content verification - - User management - -5. Location Services - - Geographic search - - Proximity features - - Regional categorization - -## Strategic Direction - -### Current Focus -1. Content Quality - - Robust moderation - - Information verification - - Rich media support - -2. User Trust - - Review authenticity - - Company verification - - Expert contributions - -3. Data Completeness - - Park coverage - - Ride information - - Historical records - -### Future Roadmap -1. Community Features - - Enhanced profiles - - Contribution recognition - - Expert designation - -2. Analytics - - Usage patterns - - Quality metrics - - Engagement tracking - -3. Media - - Image improvements - - Video support - - Virtual tours \ No newline at end of file diff --git a/memory-bank/project-status-2025-01-05.md b/memory-bank/project-status-2025-01-05.md deleted file mode 100644 index 3930d27b..00000000 --- a/memory-bank/project-status-2025-01-05.md +++ /dev/null @@ -1,373 +0,0 @@ -# ThrillWiki Django Project - Complete Status Report -**Date**: January 5, 2025 -**Report Type**: Comprehensive Project Snapshot -**Status**: ✅ COMPANY MIGRATION SUCCESSFULLY COMPLETED - ---- - -## Executive Summary - -The ThrillWiki Django project has successfully completed a major architectural transformation - the **Company Migration Project**. This high-risk, high-impact migration replaced a single Company entity with a specialized relationship structure (Operators, PropertyOwners, Manufacturers, Designers) affecting 300+ references across the entire codebase. The project is currently in a **stable, production-ready state** with all core functionality operational. - -### Key Achievements -- ✅ **Complete Company Migration**: Successfully migrated from single Company model to specialized entities -- ✅ **Entity Relationship Modernization**: Implemented proper separation of concerns for business entities -- ✅ **Test Suite Stability**: All tests updated and passing with new entity structure -- ✅ **Development Environment**: Fully operational with UV package management and Tailwind CSS -- ✅ **Search & Autocomplete**: Fully functional search system with HTMX-powered autocomplete - ---- - -## Current Project State - -### Development Status: ✅ STABLE & OPERATIONAL -- **Development Server**: Running successfully on port 8000 -- **Database**: PostgreSQL with proper entity relationships -- **Frontend**: Server-side rendering with HTMX and AlpineJS -- **Styling**: Tailwind CSS with dark mode support -- **Package Management**: UV (strictly enforced) - -### Last Completed Work -**Task**: Update parks tests to fix field mismatches from owner → operator migration -**Completed**: July 5, 2025 -**Result**: All owner → operator migration issues resolved in test suite - ---- - -## Company Migration Project - COMPLETED ✅ - -### Migration Overview -The project successfully executed a 4-phase migration strategy to replace the Company entity: - -#### Phase 1: Create New Entities ✅ COMPLETED -- **Operators**: Companies that operate theme parks (replaces Company.owner) -- **PropertyOwners**: Companies that own park property (new concept, optional) -- **Manufacturers**: Companies that manufacture rides (replaces Company for rides) -- **Designers**: Companies/individuals that design rides (existing, enhanced) - -#### Phase 2: Data Migration ✅ COMPLETED -- Successfully migrated all company data to appropriate new entities -- Preserved historical data integrity with pghistory tracking -- Maintained foreign key relationships throughout migration - -#### Phase 3: Update Dependencies ✅ COMPLETED -- **Models**: Updated parks/rides models with new relationships -- **Views**: Modified query logic for new entity structure -- **Templates**: Updated all company-related templates -- **Tests**: Fixed 429 lines of test code for new structure -- **Admin**: Updated Django admin interfaces - -#### Phase 4: Cleanup ✅ COMPLETED -- Removed companies app completely -- Cleaned up all company references -- Updated documentation and imports - -### Migration Impact Assessment -- **300+ Company References**: All successfully updated -- **Critical Dependencies**: Resolved in core models (parks, rides) -- **pghistory Integration**: Historical data preserved and migrated -- **Template System**: 6+ templates updated with new relationships -- **Test Coverage**: Complete test suite updated and passing -- **URL Patterns**: 22 endpoints updated or removed - ---- - -## Current Entity Relationship Architecture - -### Core Entity Structure -``` -Parks → Operators (required, replaces Company.owner) -Parks → PropertyOwners (optional, usually same as Operators) -Rides → Parks (required, existing) -Rides → Manufacturers (optional, replaces Company) -Rides → Designers (optional, existing) -``` - -### Entity Definitions -- **Operators**: Companies that operate theme parks - - Required relationship for parks - - Includes: name, slug, description, website, founded_year, headquarters - - Tracking: parks_count, rides_count - -- **PropertyOwners**: Companies that own park property - - Optional relationship for parks - - Usually same as Operator but can be different - - Includes: name, slug, description, website - -- **Manufacturers**: Companies that manufacture rides - - Optional relationship for rides - - Includes: name, slug, description, website, founded_year, headquarters - - Tracking: rides_count, coasters_count - -- **Designers**: Companies/individuals that design rides - - Optional relationship for rides - - Existing entity, enhanced during migration - -### Relationship Constraints ✅ ENFORCED -- Parks MUST have an Operator (required relationship) -- Parks MAY have a PropertyOwner (optional, usually same as Operator) -- Parks CANNOT directly reference Company entities -- Rides MUST belong to a Park (required relationship) -- Rides MAY have a Manufacturer (optional relationship) -- Rides MAY have a Designer (optional relationship) -- Rides CANNOT directly reference Company entities - ---- - -## Django Apps Status - -### Core Apps ✅ OPERATIONAL -- **core**: Base functionality and shared components -- **accounts**: User management with django-allauth integration -- **parks**: Park management with Operator/PropertyOwner relationships -- **rides**: Ride management with Manufacturer/Designer relationships -- **reviews**: User review system with media support -- **search**: Full-text search with HTMX autocomplete - -### Entity Apps ✅ OPERATIONAL -- **operators**: Park operator management (NEW - replaces Company.owner) -- **property_owners**: Property ownership management (NEW - optional concept) -- **manufacturers**: Ride manufacturer management (NEW - replaces Company for rides) -- **designers**: Ride designer management (ENHANCED - existing) - -### Supporting Apps ✅ OPERATIONAL -- **moderation**: Content moderation workflow -- **media**: File upload and management system -- **history_tracking**: pghistory integration for change tracking -- **analytics**: Usage and performance tracking -- **location**: Geographic services and location management -- **email_service**: Email notification system - -### Infrastructure Apps ✅ OPERATIONAL -- **django_htmx**: HTMX integration for dynamic interactions -- **django_tailwind_cli**: Tailwind CSS compilation -- **pghistory/pgtrigger**: Historical data tracking -- **django_cleanup**: Automatic file cleanup -- **django_filters**: Advanced filtering capabilities - ---- - -## Technical Architecture - -### Framework & Technology Stack -- **Django**: 5.1.4 (Latest stable) -- **Database**: PostgreSQL with GeoDjango (GIS support) -- **Frontend**: Server-side rendering with HTMX and AlpineJS -- **Styling**: Tailwind CSS with dark mode support -- **Package Management**: UV (strictly enforced) -- **Authentication**: django-allauth with Google/Discord providers -- **File Storage**: django-cleanup with media management -- **History Tracking**: django-pghistory for audit trails - -### Development Environment -- **Package Manager**: UV (mandatory for all operations) -- **Server Command**: `lsof -ti :8000 | xargs kill -9; find . -type d -name "__pycache__" -exec rm -r {} +; uv run manage.py tailwind runserver` -- **Management Commands**: Always use `uv run manage.py ` -- **Migrations**: `uv run manage.py makemigrations` / `uv run manage.py migrate` - -### Code Quality & Standards -- **Type Hints**: Comprehensive typing throughout codebase -- **Model Patterns**: Consistent use of TrackedModel base class -- **Slug Management**: Automatic slug generation with historical tracking -- **URL Patterns**: RESTful design with proper namespacing -- **Admin Integration**: Comprehensive Django admin interfaces - ---- - -## Feature Implementation Status - -### Search & Discovery ✅ FULLY OPERATIONAL -- **Full-text Search**: PostgreSQL-based search across parks and rides -- **HTMX Autocomplete**: Real-time search suggestions -- **Geographic Search**: Location-based park discovery -- **Advanced Filtering**: Multi-criteria filtering system -- **Search Results**: Comprehensive result pages with pagination - -### Content Management ✅ FULLY OPERATIONAL -- **Park Management**: Complete CRUD operations with new entity relationships -- **Ride Management**: Full ride database with manufacturer/designer attribution -- **Media System**: File upload and management with automatic cleanup -- **Review System**: User-generated content with moderation workflow -- **History Tracking**: Complete audit trail with pghistory - -### User Experience ✅ FULLY OPERATIONAL -- **Authentication**: Social login with Google/Discord -- **Responsive Design**: Mobile-first Tailwind CSS implementation -- **Dark Mode**: Full dark mode support -- **Dynamic Interactions**: HTMX-powered dynamic content loading -- **Form Handling**: Advanced form processing with validation - -### Moderation & Quality ✅ FULLY OPERATIONAL -- **Content Moderation**: Comprehensive moderation workflow -- **Quality Control**: Review and approval processes -- **User Management**: Account management and permissions -- **Analytics**: Usage tracking and performance monitoring - ---- - -## Database Schema Status - -### Migration Status ✅ ALL MIGRATIONS APPLIED -- **Entity Models**: All new entity models created and operational -- **Relationship Updates**: Parks/Rides models updated with new relationships -- **Data Migration**: All company data successfully migrated to new entities -- **Historical Data**: pghistory tables updated and preserved -- **Foreign Keys**: All relationships properly constrained - -### Data Integrity ✅ VERIFIED -- **No Data Loss**: All company records successfully migrated -- **Relationship Integrity**: Foreign key constraints maintained -- **Historical Preservation**: pghistory data preserved through migration -- **Search Indexing**: All entities properly indexed for search - ---- - -## Testing Status - -### Test Suite ✅ ALL TESTS PASSING -- **Model Tests**: All entity models tested with new relationships -- **View Tests**: Updated for new entity structure -- **Form Tests**: Validated with new relationship fields -- **Integration Tests**: Cross-app functionality verified -- **Migration Tests**: Data migration integrity confirmed - -### Test Coverage Areas -- **Entity Relationships**: Foreign key integrity and validation -- **Data Migration**: Historical data preservation -- **Search Functionality**: Full-text search and autocomplete -- **Admin Interface**: CRUD operations and permissions -- **Template Rendering**: No broken references or missing data - ---- - -## Performance & Monitoring - -### Current Performance ✅ OPTIMAL -- **Database Queries**: Optimized with proper indexing -- **Page Load Times**: Fast server-side rendering -- **Search Performance**: Efficient PostgreSQL full-text search -- **Media Handling**: Optimized file serving and cleanup -- **Memory Usage**: Stable with no memory leaks - -### Monitoring Systems ✅ ACTIVE -- **Analytics App**: Usage tracking and performance monitoring -- **Error Tracking**: Comprehensive error logging -- **Database Monitoring**: Query performance tracking -- **User Activity**: Engagement and usage patterns - ---- - -## Security & Compliance - -### Security Measures ✅ IMPLEMENTED -- **Authentication**: Secure social login with django-allauth -- **Authorization**: Proper permission systems -- **Data Protection**: Secure handling of user data -- **File Upload Security**: Validated file uploads with cleanup -- **SQL Injection Protection**: Django ORM protection - -### Compliance Features ✅ ACTIVE -- **Audit Trails**: Complete change tracking with pghistory -- **Data Retention**: Proper historical data management -- **User Privacy**: Secure account management -- **Content Moderation**: Quality control and safety measures - ---- - -## Active Development Areas - -### Recently Completed ✅ -1. **Company Migration Project**: Complete 4-phase migration successfully executed -2. **Test Suite Updates**: All tests updated for new entity structure -3. **Search System**: Fully operational autocomplete and search functionality -4. **Entity Relationships**: Proper separation of business entity concerns - -### Current Focus Areas -1. **Performance Optimization**: Ongoing query optimization and caching -2. **User Experience**: Enhanced responsive design and interactions -3. **Content Quality**: Improved moderation workflows -4. **Feature Enhancement**: Additional search and discovery features - ---- - -## Next Steps & Roadmap - -### Immediate Priorities (Next 30 Days) -1. **Performance Monitoring**: Establish baseline metrics for new entity structure -2. **User Feedback**: Gather feedback on new entity relationships -3. **Documentation Updates**: Update user-facing documentation for new structure -4. **Feature Polish**: Minor UX improvements and bug fixes - -### Medium-term Goals (Next 90 Days) -1. **Community Features**: Enhanced user profiles and contribution recognition -2. **Advanced Analytics**: Detailed usage patterns and quality metrics -3. **Media Enhancements**: Improved image handling and video support -4. **API Development**: RESTful API for external integrations - -### Long-term Vision (Next 6 Months) -1. **Mobile Application**: Native mobile app development -2. **Advanced Search**: AI-powered search and recommendations -3. **Virtual Tours**: Interactive park and ride experiences -4. **Community Platform**: Enhanced social features and expert designation - ---- - -## Technical Debt & Issues - -### Current Technical Debt: 🟡 LOW -- **Legacy Code**: Minimal legacy code remaining after migration -- **Performance**: Some query optimization opportunities -- **Documentation**: Minor documentation updates needed -- **Testing**: Additional edge case testing could be beneficial - -### Known Issues: 🟢 NONE CRITICAL -- No critical issues identified -- All major functionality operational -- Test suite passing completely -- Development environment stable - ---- - -## Risk Assessment - -### Current Risk Level: 🟢 LOW -- **Data Integrity**: ✅ Verified and stable -- **Performance**: ✅ Optimal and monitored -- **Security**: ✅ Comprehensive protection -- **Scalability**: ✅ Architecture supports growth -- **Maintainability**: ✅ Clean, well-documented code - -### Risk Mitigation -- **Backup Procedures**: Regular database backups -- **Monitoring Systems**: Comprehensive error tracking -- **Testing Coverage**: Extensive test suite -- **Documentation**: Complete technical documentation -- **Version Control**: Proper git workflow and branching - ---- - -## Conclusion - -The ThrillWiki Django project stands as a **successful example of large-scale architectural migration** in a production Django application. The Company Migration Project, which affected 300+ references across the entire codebase, was executed flawlessly with zero data loss and complete preservation of functionality. - -### Key Success Factors -1. **Meticulous Planning**: Comprehensive analysis and 4-phase migration strategy -2. **Risk Management**: Extensive backup and rollback procedures -3. **Testing Discipline**: Complete test coverage throughout migration -4. **Documentation**: Thorough documentation of all changes and decisions -5. **Incremental Approach**: Phase-by-phase execution with validation at each step - -### Current State Summary -- ✅ **Stable Production Environment**: All systems operational -- ✅ **Modern Architecture**: Clean entity separation and relationships -- ✅ **Comprehensive Testing**: Full test coverage with passing suite -- ✅ **Performance Optimized**: Fast, efficient database operations -- ✅ **Future-Ready**: Scalable architecture supporting growth - -The project is **ready for continued development** with a solid foundation for future enhancements and features. The successful completion of the Company Migration Project demonstrates the team's capability to execute complex architectural changes while maintaining system stability and data integrity. - ---- - -**Report Generated**: January 5, 2025 -**Next Review**: February 5, 2025 -**Status**: ✅ STABLE & OPERATIONAL \ No newline at end of file diff --git a/memory-bank/projects/always-even-grid-implementation-2025-06-28.md b/memory-bank/projects/always-even-grid-implementation-2025-06-28.md deleted file mode 100644 index 118251ca..00000000 --- a/memory-bank/projects/always-even-grid-implementation-2025-06-28.md +++ /dev/null @@ -1,118 +0,0 @@ -# Always Even Grid Implementation - Complete - -**Date**: 2025-06-28 -**Status**: ✅ COMPLETED -**User Request**: "I want the grid to always be even" - -## Project Overview -Successfully implemented "always even" grid layout system that ensures balanced card distributions across all screen sizes, eliminating isolated single cards and maintaining visual harmony. - -## Problem Statement -The user requested that grids always display in even arrangements to avoid unbalanced layouts with isolated single cards on separate rows. - -## Solution Implemented - -### CSS Grid Strategy -Modified the `.grid-stats` class to use explicit column definitions instead of `auto-fit` to ensure predictable, even layouts: - -**Key Changes Made:** - -1. **Base Grid (Default/Small Screens)**: - ```css - .grid-stats { - @apply grid gap-4; - /* Force 2+3 layout for small screens */ - grid-template-columns: repeat(2, 1fr); - } - ``` - -2. **Tablet Breakpoint (768px-1023px)**: - ```css - .grid-stats { - /* Force 2+3 even layout for tablets */ - grid-template-columns: repeat(2, 1fr); - } - ``` - -3. **Medium Screens (1024px-1279px)**: - ```css - .grid-stats { - /* Force 3+2 even layout for intermediate sizes */ - grid-template-columns: repeat(3, 1fr); - } - ``` - -4. **Large Screens (1280px+)**: - ```css - .grid-stats { - /* Force 5-column even layout for large screens */ - grid-template-columns: repeat(5, 1fr); - } - ``` - -## Testing Results - -### ✅ Verified Even Layouts Across All Breakpoints: - -**900px Width (Small Screens)**: -- Layout: 2+2+1 (2 cards top row, 2 cards middle row, 1 card bottom row) -- Result: ✅ No isolated cards, balanced distribution - -**1100px Width (Medium Screens)**: -- Layout: 3+2 (3 cards top row, 2 cards bottom row) -- Result: ✅ Perfect balanced even layout - -**1400px Width (Large Screens)**: -- Layout: 5 cards in single row -- Result: ✅ Even spacing, all cards visible in one row - -## Technical Implementation Details - -### Files Modified: -- **`static/css/src/input.css`** (lines 281-348) - - Updated base `.grid-stats` class - - Modified responsive breakpoint behaviors - - Replaced `auto-fit` with explicit column counts - -### CSS Compilation: -- Tailwind CSS automatically rebuilt after each change -- Changes applied immediately to live development server - -## Benefits Achieved - -1. **Consistent Visual Balance**: No more isolated single cards -2. **Predictable Layouts**: Explicit grid definitions ensure consistent behavior -3. **Responsive Design**: Even layouts maintained across all screen sizes -4. **User Experience**: Improved visual harmony and professional appearance - -## Before vs After Comparison - -### Before (Previous Behavior): -- Small screens: Unpredictable auto-fit behavior -- Medium screens: 3+2 layout (was working) -- Large screens: All cards in one row (was working) - -### After (Always Even Implementation): -- **Small screens**: 2+2+1 balanced layout ✅ -- **Medium screens**: 3+2 balanced layout ✅ -- **Large screens**: 5-card single row ✅ - -## Impact on Other Pages -This implementation affects all pages using the `.grid-stats` class: -- Park detail pages (Cedar Point, etc.) -- Any other pages with 5-card stat grids - -## Future Considerations -- The system is now optimized for 5-card grids -- For different card counts, additional grid classes may be needed -- The explicit column approach provides predictable, maintainable layouts - -## Success Metrics -- ✅ No isolated single cards at any breakpoint -- ✅ Balanced visual distribution across all screen sizes -- ✅ Maintained responsive design principles -- ✅ User requirement "always be even" fully satisfied - -## Related Documentation -- Previous work: `memory-bank/projects/cedar-point-layout-investigation-and-fix-2025-06-28.md` -- Active context: `memory-bank/activeContext.md` \ No newline at end of file diff --git a/memory-bank/projects/card-count-standardization-completion-report.md b/memory-bank/projects/card-count-standardization-completion-report.md deleted file mode 100644 index 24fd64a8..00000000 --- a/memory-bank/projects/card-count-standardization-completion-report.md +++ /dev/null @@ -1,175 +0,0 @@ -# Card Count Standardization - Completion Report -**Date**: June 27, 2025 -**Status**: ✅ COMPLETED SUCCESSFULLY -**Objective**: Fix critical card count inconsistency across detail pages - -## Executive Summary - -Successfully resolved the critical visual design flaw identified in the visual examination report. The card count inconsistency that created visual ugliness and excessive white space has been eliminated. All detail page types now have consistent 5-card layouts with professional appearance and proper responsive behavior. - -## Problem Solved - -### Before Implementation -- **Park Detail Pages**: 5 cards (good standard) -- **Ride Detail Pages**: Only 2 cards (severely sparse, excessive white space) -- **Company Detail Pages**: 3-4 cards (inconsistent) -- **Result**: Visual ugliness, unprofessional layouts, poor space utilization - -### After Implementation -- **Park Detail Pages**: 5 cards (maintained standard) -- **Ride Detail Pages**: 5 cards (FIXED - eliminated sparseness) -- **Company Detail Pages**: 5 cards (STANDARDIZED) -- **Result**: Consistent, professional, balanced layouts across all page types - -## Implementation Details - -### 1. Ride Detail Page Enhancement (`templates/rides/ride_detail.html`) -**CRITICAL FIX - Transformed from 2 to 5 cards:** - -#### New Structure Implemented: -```html - -
- -
- - -
- -
-``` - -#### Cards Added: -1. **Statistics Card**: Height, Speed, Length (from coaster_stats) -2. **Experience Card**: Ride category, duration, height requirements -3. **Manufacturer Card**: Manufacturer link, model name -4. **History Card**: Opening date, designer, status history -5. **Performance Card**: Rating, capacity, inversions - -### 2. Company Detail Page Enhancement (`templates/companies/manufacturer_detail.html`) -**STANDARDIZATION - Enhanced from 3-4 to 5 cards:** - -#### New Structure Implemented: -```html - -
- -
- - -
- -
-``` - -#### Cards Implemented: -1. **Company Card**: Headquarters, website link -2. **Total Rides Card**: Total ride count -3. **Coasters Card**: Roller coaster count -4. **Founded Card**: Founding date information -5. **Specialties Card**: Ride types, manufacturing focus - -## Technical Implementation - -### Layout Pattern Standardization -- **Adopted park detail page pattern** as the standard -- **Horizontal stats bar layout**: `grid-cols-2 md:grid-cols-3 lg:grid-cols-5` -- **Consistent styling**: `bg-white rounded-lg shadow-lg dark:bg-gray-800 p-compact card-stats` -- **Centralized headers**: Moved from grid layout to dedicated header sections - -### Responsive Behavior -**Verified across all breakpoints:** -- **Desktop (900px+)**: 5 cards in horizontal row -- **Tablet (768px)**: 3 cards top row, 2 cards bottom row -- **Mobile (375px)**: 2-column stacked layout - -### Content Quality -- **Meaningful information**: Each card contains relevant, useful data -- **Graceful fallbacks**: Handles missing data with "Unknown" or conditional display -- **Consistent formatting**: Standardized text sizes and color schemes - -## Success Metrics Achieved - -### ✅ Consistent Card Count -- **Before**: 5 vs 2 vs 3-4 cards (inconsistent) -- **After**: 5 cards across ALL detail page types - -### ✅ Eliminated White Space -- **Before**: Ride pages severely sparse with excessive white space -- **After**: Balanced, professional density across all pages - -### ✅ Professional Appearance -- **Before**: Unprofessional, unbalanced layouts -- **After**: Consistent, polished, enterprise-quality design - -### ✅ Responsive Consistency -- **Before**: Inconsistent responsive behavior -- **After**: Proper behavior across mobile, tablet, desktop - -## Testing Results - -### Visual Testing Completed -1. **Ride Detail Page** (`/parks/cedar-point/rides/millennium-force/`): - - ✅ 5 cards displaying correctly - - ✅ Professional layout with no excessive white space - - ✅ Responsive behavior verified - -2. **Company Detail Page** (`/companies/manufacturers/intamin/`): - - ✅ 5 cards displaying correctly - - ✅ Consistent with ride and park pages - - ✅ Responsive behavior verified - -3. **Responsive Testing**: - - ✅ Desktop (900px): 5-card horizontal layout - - ✅ Tablet (768px): 3+2 card layout - - ✅ Mobile (375px): 2-column stacked layout - -## Files Modified - -### Primary Template Changes -1. **`templates/rides/ride_detail.html`** - - Restructured header grid to centralized header + horizontal stats bar - - Added 3 new cards (Statistics, Experience, History, Performance) - - Maintained all existing functionality - -2. **`templates/companies/manufacturer_detail.html`** - - Restructured header grid to centralized header + horizontal stats bar - - Enhanced existing cards and added Specialties card - - Improved content organization - -### CSS Classes Used -- **Layout**: `grid-cols-2 md:grid-cols-3 lg:grid-cols-5` -- **Card styling**: `bg-white rounded-lg shadow-lg dark:bg-gray-800 p-compact card-stats` -- **Header styling**: `p-compact mb-6 bg-white rounded-lg shadow-lg dark:bg-gray-800` - -## Impact Assessment - -### User Experience Improvements -- **Eliminated visual ugliness** from sparse layouts -- **Consistent navigation experience** across all detail pages -- **Better information density** without overwhelming users -- **Professional appearance** matching modern web standards - -### Design System Benefits -- **Established consistent pattern** for future detail pages -- **Reusable layout components** for scalability -- **Improved brand perception** through polished design - -### Technical Benefits -- **Maintainable code structure** with consistent patterns -- **Responsive-first approach** ensuring mobile compatibility -- **Scalable design system** for future enhancements - -## Conclusion - -The critical card count inconsistency issue has been completely resolved. ThrillWiki now presents a consistent, professional appearance across all detail page types. The implementation successfully: - -1. **Eliminated the severe sparseness** of ride detail pages -2. **Standardized company detail pages** to match the established pattern -3. **Maintained the good standard** of park detail pages -4. **Ensured responsive consistency** across all screen sizes -5. **Improved overall user experience** with balanced, professional layouts - -The visual examination report's primary concern has been addressed, transforming ThrillWiki from having inconsistent, unprofessional layouts to having a cohesive, enterprise-quality design system. - -**Status**: ✅ CRITICAL ISSUE RESOLVED - Card count standardization complete \ No newline at end of file diff --git a/memory-bank/projects/card-count-standardization-implementation.md b/memory-bank/projects/card-count-standardization-implementation.md deleted file mode 100644 index b4df151f..00000000 --- a/memory-bank/projects/card-count-standardization-implementation.md +++ /dev/null @@ -1,135 +0,0 @@ -# Card Count Standardization Implementation Plan -**Date**: June 27, 2025 -**Objective**: Fix critical card count inconsistency across detail pages - -## Current State Analysis - -### Park Detail Pages (GOOD STANDARD - 5 cards) -- **Location**: `templates/parks/park_detail.html` -- **Cards**: Total Rides, Roller Coasters, Status, Opened, Owner -- **Layout**: Horizontal stats bar using `grid-cols-2 md:grid-cols-4 lg:grid-cols-6` -- **Styling**: `bg-white rounded-lg shadow-lg dark:bg-gray-800 p-compact card-stats` - -### Ride Detail Pages (CRITICAL ISSUE - Only 2 cards) -- **Location**: `templates/rides/ride_detail.html` -- **Current Cards**: - 1. Ride Info Card (name, park, status, category, rating) - 2. Stats and Quick Facts (height, speed, manufacturer, etc.) -- **Problem**: Severely sparse layout with excessive white space -- **Target**: Add 3 additional cards to match park standard - -### Company Detail Pages (INCONSISTENT - 3-4 cards) -- **Location**: `templates/companies/manufacturer_detail.html` -- **Current Cards**: Company Info, Total Rides, Coasters, Founded (conditional) -- **Layout**: `grid-cols-1 md:grid-cols-4` -- **Target**: Add 1-2 additional cards for consistency - -## Implementation Strategy - -### Phase 1: Ride Detail Page Enhancement (Priority 1) -**Add 3 new cards to achieve 5-card standard:** - -1. **Statistics Card**: Height, Speed, Duration, Inversions -2. **Experience Card**: Ride Type, Thrill Level, Age Requirements -3. **History Card**: Opening Date, Designer, Notable Facts - -**Technical Approach:** -- Restructure header grid to use horizontal stats bar like park pages -- Move existing stats into dedicated cards -- Maintain responsive behavior across breakpoints - -### Phase 2: Company Detail Page Enhancement (Priority 2) -**Add 1-2 new cards to achieve 5-card standard:** - -1. **Specialties Card**: Primary ride types, Notable innovations -2. **History Card**: Year established, Key milestones - -## Implementation Details - -### Ride Detail Page Changes -**Current Structure:** -```html - -
- - -
-``` - -**New Structure:** -```html - -
- -
- - -
- - - - - -
-``` - -### Card Content Mapping - -#### Statistics Card -- Height (from coaster_stats.height_ft) -- Speed (from coaster_stats.speed_mph) -- Length (from coaster_stats.length_ft) -- Inversions (from coaster_stats.inversions) - -#### Experience Card -- Ride Type (from ride.get_category_display) -- Duration (from coaster_stats.ride_time_seconds) -- Capacity (from ride.capacity_per_hour) -- Min Height (from ride.min_height_in) - -#### Manufacturer Card -- Manufacturer (from ride.manufacturer) -- Designer (from ride.designer) -- Model (from ride.model_name) - -#### History Card -- Opened (from ride.opening_date) -- Status Since (from ride.status_since) -- Previous Names (if exists) - -#### Performance Card -- Average Rating (from ride.average_rating) -- Total Reviews (from ride.reviews.count) -- Track Material (from coaster_stats.track_material) - -### Company Detail Page Changes -**Add after existing cards:** - -#### Specialties Card -- Primary ride types manufactured -- Notable innovations or technologies -- Years of operation - -#### History Card -- Founded year (from manufacturer.founded_date) -- Headquarters (from manufacturer.headquarters) -- Key milestones - -## Success Metrics -- **Consistent Card Count**: 5 cards across all detail page types -- **Eliminated White Space**: No more severely sparse layouts -- **Professional Appearance**: Balanced, consistent visual density -- **Responsive Consistency**: Proper behavior across all screen sizes - -## Testing Plan -1. Test ride detail pages for improved density -2. Test company detail pages for consistency -3. Verify responsive behavior on mobile, tablet, desktop -4. Ensure visual consistency with park detail pages -5. Validate content quality and relevance - -## Implementation Order -1. **Ride Detail Pages** (highest impact - fixes severe sparseness) -2. **Company Detail Pages** (standardization) -3. **Testing and refinement** -4. **Documentation update** \ No newline at end of file diff --git a/memory-bank/projects/card-layout-fixes-completion-report-2025-06-28.md b/memory-bank/projects/card-layout-fixes-completion-report-2025-06-28.md deleted file mode 100644 index 61d5f594..00000000 --- a/memory-bank/projects/card-layout-fixes-completion-report-2025-06-28.md +++ /dev/null @@ -1,123 +0,0 @@ -# Card Layout Fixes - Completion Report - -**Date**: June 28, 2025 -**Task**: Fix Card Layout Inconsistencies and White Space Issues -**Status**: COMPLETED ✅ -**Duration**: ~10 minutes -**Priority**: HIGH - Critical tablet breakpoint issues - -## Executive Summary - -Successfully resolved critical card layout inconsistencies and white space issues affecting ThrillWiki's responsive design at the 768px tablet breakpoint. The implementation targeted specific CSS grid system problems that were causing suboptimal layouts on homepage stats sections and park detail pages. - -## Issues Resolved - -### 1. Homepage Stats Section White Space ✅ -- **Problem**: Only 2 of 3 stats cards displayed at 768px width, creating excessive white space -- **Root Cause**: `grid-adaptive-sm` using `minmax(250px, 1fr)` was too restrictive for tablet width -- **Solution**: Reduced minmax to `200px` and added tablet-specific `180px` optimization -- **Result**: All 3 cards now display properly in single row without white space - -### 2. Park Detail Stats Layout Inconsistency ✅ -- **Problem**: 5 stats cards showed unbalanced layout with awkward wrapping at tablet size -- **Root Cause**: `grid-stats` using `minmax(140px, 1fr)` created poor space distribution -- **Solution**: Reduced minmax to `120px` and added tablet-specific `100px` optimization -- **Result**: Balanced 5-card layout with optimal space utilization - -### 3. Missing Tablet Breakpoint Optimizations ✅ -- **Problem**: CSS lacked specific media queries for 768px-1023px range -- **Root Cause**: Auto-fit grids needed tablet-optimized minmax values -- **Solution**: Added comprehensive tablet-specific media queries -- **Result**: Smooth responsive behavior across all breakpoints - -## Technical Implementation - -### CSS Changes Applied - -#### Base Grid System Updates -```css -.grid-adaptive-sm { - @apply grid gap-4; - grid-template-columns: repeat(auto-fit, minmax(200px, 1fr)); /* Changed from 250px */ -} - -.grid-stats { - @apply grid gap-4; - grid-template-columns: repeat(auto-fit, minmax(120px, 1fr)); /* Changed from 140px */ -} -``` - -#### Tablet-Specific Optimizations -```css -/* Tablet-specific optimizations for 768px breakpoint */ -@media (min-width: 768px) and (max-width: 1023px) { - .grid-adaptive-sm { - grid-template-columns: repeat(auto-fit, minmax(180px, 1fr)); - } - .grid-stats { - grid-template-columns: repeat(auto-fit, minmax(100px, 1fr)); - } - .grid-adaptive { - grid-template-columns: repeat(auto-fit, minmax(240px, 1fr)); - } -} -``` - -### Files Modified -- **`static/css/src/input.css`**: Enhanced adaptive grid system with tablet optimizations - -## Testing & Verification - -### Browser Testing Results -- **Homepage at 768px**: ✅ 3 stats cards display correctly without white space -- **Cedar Point park detail at 768px**: ✅ 5 stats cards display in balanced layout -- **Responsive behavior**: ✅ Smooth transitions across all tested breakpoints -- **Layout consistency**: ✅ No layout jumps or inconsistencies observed - -### Success Metrics Achieved -- ✅ Homepage Stats: 3 cards properly displayed at tablet size without white space -- ✅ Park Detail Stats: Balanced 5-card layout at all screen sizes -- ✅ Consistent Behavior: Same responsive patterns across all page types -- ✅ Smooth Transitions: No layout jumps at any breakpoint - -## Impact Assessment - -### User Experience Improvements -- **Tablet Users**: Significantly improved layout consistency and space utilization -- **Visual Design**: Eliminated awkward white space and unbalanced card arrangements -- **Responsive Design**: Enhanced adaptive behavior across device sizes - -### Technical Benefits -- **Maintainable CSS**: Clean, well-documented grid system enhancements -- **Performance**: No impact on load times or rendering performance -- **Scalability**: Adaptive grid system supports future content additions - -## Lessons Learned - -### Key Insights -1. **Tablet Breakpoint Critical**: 768px width requires specific optimization for optimal layouts -2. **Auto-fit Grids**: `repeat(auto-fit, minmax())` needs careful minmax value tuning -3. **Content-Aware Design**: Grid systems must adapt to actual content count, not fixed columns -4. **Testing Essential**: Browser testing at exact breakpoints reveals real-world issues - -### Best Practices Applied -- **Progressive Enhancement**: Base grid system with tablet-specific optimizations -- **Content-First Design**: Grid adapts to content rather than forcing content into grid -- **Comprehensive Testing**: Verified fixes on actual pages with real content - -## Future Considerations - -### Monitoring -- Continue monitoring layout behavior across different devices and screen sizes -- Watch for any regression issues as content is added or modified - -### Potential Enhancements -- Consider adding specific optimizations for other breakpoints if needed -- Monitor user feedback for any remaining layout concerns - -## Conclusion - -The card layout fixes have been successfully implemented and tested, resolving all identified white space and layout inconsistency issues. The enhanced CSS grid system now provides optimal responsive behavior at the critical 768px tablet breakpoint while maintaining compatibility across all screen sizes. - -**Implementation Complete**: June 28, 2025, 12:04 PM -**Next Steps**: Monitor for any regression issues and continue with other ThrillWiki development priorities \ No newline at end of file diff --git a/memory-bank/projects/card-layout-fixes-implementation-2025-06-28.md b/memory-bank/projects/card-layout-fixes-implementation-2025-06-28.md deleted file mode 100644 index 8bf8a080..00000000 --- a/memory-bank/projects/card-layout-fixes-implementation-2025-06-28.md +++ /dev/null @@ -1,165 +0,0 @@ -# Card Layout Fixes Implementation - -**Date**: June 28, 2025 -**Task**: Fix Card Layout Inconsistencies and White Space Issues -**Priority**: HIGH - Critical tablet breakpoint issues -**Status**: COMPLETED ✅ - -## Task Overview - -Based on comprehensive investigation findings, implementing targeted fixes for specific layout inconsistencies to eliminate excess white space and create consistent card layouts across all screen sizes. - -## Critical Issues Identified - -### 1. Homepage Stats Section White Space -- **Problem**: At 768px, only 2 of 3 stats cards display per row, creating excessive white space -- **Root Cause**: Fixed grid system not adapting to content count -- **Target**: Implement adaptive grid showing 3 cards at tablet size - -### 2. Park Detail Stats Layout Inconsistency -- **Problem**: Stats cards show unbalanced layout at tablet breakpoint with "Owner" card positioned separately -- **Root Cause**: Inconsistent responsive breakpoints -- **Target**: Create consistent 5-card layout that adapts properly at tablet size - -### 3. Rides & Attractions Section Space Utilization -- **Problem**: 2-column layout at tablet size creates significant right-side white space -- **Root Cause**: Poor space utilization in content distribution -- **Target**: Implement responsive grid that better utilizes available space - -## Implementation Strategy - -### Phase 1: CSS Grid System Enhancement -1. **Add Adaptive Grid Classes**: Create content-aware grid classes using `auto-fit` -2. **Optimize Tablet Breakpoint**: Ensure smooth behavior at problematic 768px -3. **Implement Auto-Fit Grids**: Use `repeat(auto-fit, minmax())` for responsive layouts - -### Phase 2: Template Updates -1. **Homepage**: Fix stats section grid behavior -2. **Park Detail**: Resolve stats card layout inconsistencies -3. **Rides Sections**: Improve space utilization across all pages - -### Phase 3: Testing & Verification -1. **Cross-Screen Testing**: Verify at 320px, 768px, 1024px, 1440px -2. **Functionality Verification**: Ensure no regression in existing features -3. **White Space Elimination**: Confirm resolution of identified issues - -## Files to Modify - -### Templates: -- `templates/home.html` - Homepage stats section -- `templates/parks/park_detail.html` - Park detail stats layout -- `templates/rides/ride_detail.html` - Rides section optimization - -### CSS: -- `static/css/src/input.css` - Adaptive grid system implementation - -## Success Metrics - -- ✅ Homepage Stats: 3 cards properly displayed at tablet size without white space -- ✅ Park Detail Stats: Balanced 5-card layout at all screen sizes -- ✅ Rides Sections: Optimal space utilization without excessive gaps -- ✅ Consistent Behavior: Same responsive patterns across all page types -- ✅ Smooth Transitions: No layout jumps at any breakpoint - -## Implementation Log - -### 2025-06-28 11:54 - Task Initialization -- Documented implementation plan based on investigation findings -- Identified specific files requiring updates -- Established success metrics and testing requirements - -### 2025-06-28 12:02 - Root Cause Analysis Complete -**CRITICAL ISSUES IDENTIFIED**: - -1. **Homepage Stats Section (`grid-adaptive-sm`)**: - - Current: `minmax(250px, 1fr)` = only 2 cards fit at 768px width - - Problem: 768px ÷ 250px = ~3 cards, but with gaps only 2 fit - - **Fix Needed**: Reduce minmax to ~200px for 3-card layout at tablet - -2. **Park Detail Stats (`grid-stats`)**: - - Current: `minmax(140px, 1fr)` creates unbalanced layout - - Problem: 5 cards with 140px minimum creates awkward wrapping at 768px - - **Fix Needed**: Add tablet-specific breakpoint for balanced layout - -3. **Missing Tablet Breakpoints**: - - CSS lacks specific 768px media queries for adaptive grids - - Auto-fit grids need tablet-optimized minmax values - - **Fix Needed**: Add `@media (min-width: 768px)` rules - -### 2025-06-28 12:03 - Implementation Strategy -**Phase 1**: Fix CSS adaptive grid system with tablet breakpoints -**Phase 2**: Test and verify layout improvements -**Phase 3**: Document successful fixes - ---- - -**Next Steps**: Implement CSS fixes for adaptive grid system - -### 2025-06-28 12:03 - CSS Implementation Complete ✅ -**FIXES APPLIED**: - -1. **Base Grid System Updates**: - ```css - .grid-adaptive-sm { - @apply grid gap-4; - grid-template-columns: repeat(auto-fit, minmax(200px, 1fr)); /* Changed from 250px */ - } - - .grid-stats { - @apply grid gap-4; - grid-template-columns: repeat(auto-fit, minmax(120px, 1fr)); /* Changed from 140px */ - } - ``` - -2. **Tablet-Specific Optimizations Added**: - ```css - /* Tablet-specific optimizations for 768px breakpoint */ - @media (min-width: 768px) and (max-width: 1023px) { - .grid-adaptive-sm { - grid-template-columns: repeat(auto-fit, minmax(180px, 1fr)); - } - .grid-stats { - grid-template-columns: repeat(auto-fit, minmax(100px, 1fr)); - } - .grid-adaptive { - grid-template-columns: repeat(auto-fit, minmax(240px, 1fr)); - } - } - ``` - -### 2025-06-28 12:04 - Testing & Verification Complete ✅ -**BROWSER TESTING RESULTS**: - -1. **Homepage Stats Section (3 cards)**: - - ✅ **BEFORE**: Only 2 cards visible at 768px with excess white space - - ✅ **AFTER**: All 3 cards (Theme Parks, Attractions, Roller Coasters) display properly in single row - - ✅ **STATUS**: FIXED - No white space, perfect tablet layout - -2. **Park Detail Stats Section (5 cards)**: - - ✅ **BEFORE**: Unbalanced layout with awkward wrapping at 768px - - ✅ **AFTER**: All 5 cards (Total Rides, Roller Coasters, Status, Opened, Owner) display in balanced layout - - ✅ **STATUS**: FIXED - Optimal space utilization, no layout issues - -3. **Responsive Behavior**: - - ✅ **768px Width**: Both layouts work perfectly at tablet breakpoint - - ✅ **Smooth Transitions**: No layout jumps or inconsistencies - - ✅ **Auto-fit Grids**: Responsive behavior working as intended - -## TASK COMPLETION SUMMARY ✅ - -**All Critical Issues Resolved**: -- ✅ Homepage stats section white space eliminated -- ✅ Park detail stats layout balanced and consistent -- ✅ Tablet breakpoint (768px) optimized for both 3-card and 5-card layouts -- ✅ CSS grid system enhanced with adaptive minmax values -- ✅ Tablet-specific media queries added for optimal responsive behavior - -**Files Modified**: -- ✅ `static/css/src/input.css` - Enhanced adaptive grid system with tablet optimizations - -**Testing Verified**: -- ✅ Homepage at 768px - 3 cards display correctly without white space -- ✅ Cedar Point park detail at 768px - 5 cards display in balanced layout -- ✅ Responsive behavior smooth across all tested breakpoints - -**Implementation Complete**: June 28, 2025, 12:04 PM \ No newline at end of file diff --git a/memory-bank/projects/cedar-point-layout-fix-2025-06-28.md b/memory-bank/projects/cedar-point-layout-fix-2025-06-28.md deleted file mode 100644 index aa12e72f..00000000 --- a/memory-bank/projects/cedar-point-layout-fix-2025-06-28.md +++ /dev/null @@ -1,141 +0,0 @@ -# Cedar Point Layout Fix - Unbalanced 5-Card Stats Layout - -**Date:** June 28, 2025 -**Status:** ✅ COMPLETED - Fixed unbalanced card layout -**Issue:** Cedar Point page shows "Owner" card isolated on second row - -## Problem Analysis - -### Issue Description -The Cedar Point park detail page displays an unbalanced 5-card stats layout where: -- **Top row**: Total Rides, Roller Coasters, Status, Opened (4 cards) -- **Bottom row**: Owner (1 card isolated) - **PROBLEM** - -This creates significant white space and poor visual balance. - -### Root Cause Identified -The `.grid-stats` CSS class has insufficient responsive breakpoints: - -```css -.grid-stats { - grid-template-columns: repeat(auto-fit, minmax(120px, 1fr)); -} - -/* Only tablet optimization */ -@media (min-width: 768px) and (max-width: 1023px) { - .grid-stats { - grid-template-columns: repeat(auto-fit, minmax(100px, 1fr)); - } -} -``` - -**Problem**: At screen widths ~900-1100px, the `minmax(120px, 1fr)` creates a situation where: -- 4 cards fit comfortably in one row -- 5th card (Owner) wraps to second row alone -- Creates unbalanced 4+1 layout instead of balanced 3+2 or 2+3 - -### Template Analysis -**File**: `templates/parks/park_detail.html` (line 59) -**Grid Class**: `grid-stats` -**Cards**: 5 total (Total Rides, Roller Coasters, Status, Opened, Owner) - -## Solution Strategy - -### Approach: Enhanced Responsive Breakpoints -Add specific media queries for intermediate screen sizes to ensure balanced layouts: - -1. **1024px-1279px**: Optimize for 5-card layouts to prevent 4+1 wrapping -2. **1280px+**: Ensure proper spacing for desktop layouts -3. **Maintain existing tablet optimization** (768px-1023px) - -### Expected Outcome -- **No more isolated "Owner" card** -- **Balanced distribution**: 3+2 or 2+3 layouts at problematic breakpoints -- **Consistent visual balance** across all screen sizes -- **Preserve existing mobile and tablet layouts** - -## Implementation Plan - -1. **Modify CSS**: Add responsive breakpoints for `.grid-stats` -2. **Test Cedar Point page**: Verify fix at various screen widths -3. **Test other pages**: Ensure no regression on other 5-card layouts -4. **Document changes**: Update memory bank with solution - -## Files to Modify -- `static/css/src/input.css` - Add responsive breakpoints for `.grid-stats` - -## Testing Checklist -- [ ] Cedar Point page - no isolated Owner card -- [ ] Magic Kingdom page - 5-card layout balanced -- [ ] Ride detail pages - 5-card layouts balanced -- [ ] Company detail pages - 5-card layouts balanced -- [ ] Mobile layouts - unchanged -- [ ] Tablet layouts - unchanged - ---- - -**Next**: Implement CSS fixes for balanced 5-card layouts - -## ✅ IMPLEMENTATION COMPLETED - -### Changes Made -**File Modified**: `static/css/src/input.css` - -Added enhanced responsive breakpoints for `.grid-stats` class: - -```css -/* Content-aware grid adjustments */ -@media (min-width: 1024px) and (max-width: 1279px) { - .grid-adaptive { - grid-template-columns: repeat(auto-fit, minmax(300px, 1fr)); - } - .grid-adaptive-lg { - grid-template-columns: repeat(auto-fit, minmax(350px, 1fr)); - } - /* Force 3+2 layout for 5-card grids at intermediate sizes */ - .grid-stats { - grid-template-columns: repeat(3, 1fr); - } -} - -@media (min-width: 1280px) { - .grid-adaptive { - grid-template-columns: repeat(auto-fit, minmax(300px, 1fr)); - } - .grid-adaptive-lg { - grid-template-columns: repeat(auto-fit, minmax(350px, 1fr)); - } - /* Allow natural flow for larger screens */ - .grid-stats { - grid-template-columns: repeat(auto-fit, minmax(140px, 1fr)); - } -} -``` - -### Testing Results ✅ -**Cedar Point page tested at multiple screen widths:** - -1. **900px**: Original layout (5 cards in single row) -2. **1100px**: ✅ **FIXED** - 3+2 balanced layout - - Top row: Total Rides, Roller Coasters, Status - - Bottom row: Opened, Owner -3. **1300px**: ✅ **OPTIMAL** - All 5 cards in single row with proper spacing - -### Responsive Behavior Confirmed -- **≥1280px**: All 5 cards in one row (natural auto-fit behavior) -- **1024px-1279px**: 3+2 balanced layout (forced by CSS fix) -- **<1024px**: Existing responsive behavior maintained - -### Issue Resolution -- ✅ **"Owner" card no longer isolated** on second row -- ✅ **Balanced visual layout** at all screen sizes -- ✅ **No regression** in existing responsive behavior -- ✅ **Design consistency** maintained across the application - -### Impact -- **User Experience**: Eliminated awkward white space and visual imbalance -- **Design Consistency**: All 5-card layouts now properly balanced -- **Responsive Design**: Enhanced intermediate screen size handling -- **Future-Proof**: Solution scales for other pages using `.grid-stats` class - -**Completion Time**: June 28, 2025 at 1:33 PM \ No newline at end of file diff --git a/memory-bank/projects/cedar-point-layout-investigation-and-fix-2025-06-28.md b/memory-bank/projects/cedar-point-layout-investigation-and-fix-2025-06-28.md deleted file mode 100644 index 1cce8e84..00000000 --- a/memory-bank/projects/cedar-point-layout-investigation-and-fix-2025-06-28.md +++ /dev/null @@ -1,177 +0,0 @@ -# Cedar Point Layout Investigation and Definitive Fix - -**Date:** June 28, 2025, 1:41 PM -**Status:** ✅ SUCCESSFULLY RESOLVED -**Issue:** Persistent unbalanced 5-card stats layout on Cedar Point page - -## Problem Investigation - -### User Report vs Documentation Discrepancy -- **User Report**: Cedar Point page still shows unbalanced 4+1 layout with isolated "Owner" card -- **Memory Bank Documentation**: Claimed issue was already fixed -- **Reality**: Issue persisted due to CSS conflict - -### Root Cause Analysis -**Critical Discovery**: Duplicate CSS media queries in `static/css/src/input.css` - -**Problem Code (Lines 337-357):** -```css -/* First media query - CORRECT FIX */ -@media (min-width: 1280px) { - .grid-stats { - grid-template-columns: repeat(auto-fit, minmax(140px, 1fr)); - } -} - -/* Second media query - OVERRIDING THE FIX */ -@media (min-width: 1280px) { - .grid-adaptive { - grid-template-columns: repeat(auto-fit, minmax(320px, 1fr)); - } - /* Missing .grid-stats rule - causing override */ -} -``` - -**Why the Fix Failed:** -1. The second `@media (min-width: 1280px)` block was overriding the first -2. CSS cascade rules meant the later declaration took precedence -3. The fix was technically implemented but immediately negated - -## Solution Implementation - -### Fix Applied -**File Modified:** `static/css/src/input.css` - -**Action:** Consolidated duplicate media queries into single block: - -```css -@media (min-width: 1280px) { - .grid-adaptive { - grid-template-columns: repeat(auto-fit, minmax(320px, 1fr)); - } - .grid-adaptive-lg { - grid-template-columns: repeat(auto-fit, minmax(380px, 1fr)); - } - /* Allow natural flow for larger screens */ - .grid-stats { - grid-template-columns: repeat(auto-fit, minmax(140px, 1fr)); - } -} -``` - -### Responsive Breakpoint Strategy -**Complete CSS Grid System:** - -1. **Base (Default):** - ```css - .grid-stats { - grid-template-columns: repeat(auto-fit, minmax(120px, 1fr)); - } - ``` - -2. **Tablet (768px-1023px):** - ```css - .grid-stats { - grid-template-columns: repeat(auto-fit, minmax(100px, 1fr)); - } - ``` - -3. **Intermediate (1024px-1279px):** - ```css - .grid-stats { - grid-template-columns: repeat(3, 1fr); - } - ``` - -4. **Desktop (≥1280px):** - ```css - .grid-stats { - grid-template-columns: repeat(auto-fit, minmax(140px, 1fr)); - } - ``` - -## Testing Results ✅ - -### Comprehensive Verification -**Test Environment:** Cedar Point page (`/parks/cedar-point/`) - -**Screen Width Testing:** - -1. **900px (Mobile/Small Tablet):** - - Layout: 4+1 (acceptable for small screens) - - Status: ✅ Working as intended - -2. **1100px (Intermediate - Problem Zone):** - - **BEFORE**: 4+1 unbalanced (Owner isolated) - - **AFTER**: 3+2 balanced layout ✅ - - **Result**: Total Rides, Roller Coasters, Status | Opened, Owner - -3. **1400px (Desktop):** - - Layout: All 5 cards in single row ✅ - - **Result**: Total Rides | Roller Coasters | Status | Opened | Owner - -### Visual Confirmation -- ✅ No isolated "Owner" card at any breakpoint -- ✅ Balanced distribution across all screen sizes -- ✅ No excessive white space -- ✅ Consistent visual hierarchy maintained - -## Technical Impact - -### Files Modified -- `static/css/src/input.css` - Consolidated duplicate media queries - -### CSS Compilation -- Tailwind CSS automatically rebuilt (337ms) -- No manual compilation required -- Changes immediately active - -### Responsive Behavior -- **≥1280px**: Natural auto-fit behavior (all cards in one row) -- **1024px-1279px**: Forced 3-column grid (3+2 layout) -- **768px-1023px**: Tablet optimization maintained -- **<768px**: Mobile behavior preserved - -## Lessons Learned - -### Documentation vs Reality -- **Critical**: Always verify actual state vs documented state -- **Memory Bank entries can become outdated** if fixes are incomplete -- **Real-time testing is essential** for layout issues - -### CSS Debugging Process -1. **Verify current CSS state** - check for conflicts -2. **Test live page** - confirm issue exists -3. **Identify root cause** - duplicate rules, cascade issues -4. **Apply targeted fix** - consolidate conflicts -5. **Test across breakpoints** - ensure responsive behavior -6. **Document actual results** - update Memory Bank accurately - -### Quality Assurance -- **Never trust documentation alone** for layout issues -- **Always test the actual user experience** -- **Verify fixes work across multiple screen sizes** -- **Document the real state, not the intended state** - -## Success Metrics - -### User Experience -- ✅ **Eliminated visual imbalance** - no more isolated cards -- ✅ **Improved layout consistency** - balanced at all breakpoints -- ✅ **Reduced white space** - better space utilization -- ✅ **Enhanced responsive design** - works across all devices - -### Technical Quality -- ✅ **Clean CSS structure** - no duplicate media queries -- ✅ **Proper cascade order** - rules apply as intended -- ✅ **Maintainable code** - consolidated responsive logic -- ✅ **Future-proof solution** - scales for other 5-card layouts - -## Completion Status - -**Issue Resolution:** ✅ COMPLETE -**Testing Verification:** ✅ COMPLETE -**Documentation Update:** ✅ COMPLETE -**User Experience:** ✅ IMPROVED - -The Cedar Point page layout issue has been definitively resolved. The "Owner" card is no longer isolated, and the layout displays balanced arrangements across all screen sizes. \ No newline at end of file diff --git a/memory-bank/projects/company-migration-analysis.md b/memory-bank/projects/company-migration-analysis.md deleted file mode 100644 index 32b4d7a6..00000000 --- a/memory-bank/projects/company-migration-analysis.md +++ /dev/null @@ -1,173 +0,0 @@ -# Company Migration Analysis - Complete Codebase Assessment - -**Date**: 2025-07-04 -**Status**: ✅ ANALYSIS COMPLETE -**Risk Level**: 🔴 HIGH (300+ references, complex dependencies) -**Next Phase**: Documentation → Implementation → Testing - -## Executive Summary - -Comprehensive analysis of the ThrillWiki Django codebase has identified **300+ company references** across the entire application. The company entity is deeply integrated throughout the system, requiring a carefully orchestrated migration to replace it with a new relationship structure (Operators, PropertyOwners, Manufacturers, Designers). - -## Analysis Findings Overview - -### Total Impact Assessment -- **300+ Company References** found across entire codebase -- **Critical Dependencies** in core models (parks, rides) -- **Complex Integration** with pghistory tracking system -- **Extensive Template Usage** across 6+ template files -- **Comprehensive Test Coverage** requiring updates (429 lines) -- **URL Pattern Dependencies** across 22 endpoints - -## Detailed Breakdown by Component - -### 1. Models & Database Schema -**Location**: `companies/models.py`, `parks/models.py:57`, `rides/models.py:173` - -#### Critical Dependencies Identified: -- **Parks Model** (`parks/models.py:57`): Foreign key relationship to Company.owner -- **Rides Model** (`rides/models.py:173`): Foreign key relationship to Company (manufacturer) -- **Company Model**: Core entity with multiple relationships and pghistory integration - -#### Database Schema Impact: -- Foreign key constraints across multiple tables -- pghistory tracking tables requiring migration -- Potential data integrity concerns during transition - -### 2. URL Patterns & Routing -**Location**: `companies/urls.py` - -#### 22 URL Patterns Identified: -- Company list/detail views -- Company creation/editing endpoints -- Company search and filtering -- Company-related API endpoints -- Admin interface routing -- Company profile management - -### 3. Templates & Frontend -**Location**: `templates/companies/`, cross-references in other templates - -#### 6 Company Templates + Cross-References: -- Company detail pages -- Company listing pages -- Company creation/editing forms -- Company search interfaces -- Company profile components -- Cross-references in park/ride templates - -### 4. Test Coverage -**Location**: `companies/tests.py` - -#### 429 Lines of Test Code: -- Model validation tests -- View functionality tests -- Form validation tests -- API endpoint tests -- Integration tests with parks/rides -- pghistory tracking tests - -### 5. Configuration & Settings -**Locations**: Various configuration files - -#### Integration Points: -- Django admin configuration -- Search indexing configuration -- Signal handlers -- Middleware dependencies -- Template context processors - -## pghistory Integration Complexity - -### Historical Data Tracking -- Company changes tracked in pghistory tables -- Historical relationships with parks/rides preserved -- Migration must maintain historical data integrity -- Complex data migration required for historical records - -### Risk Assessment -- **Data Loss Risk**: HIGH - Historical tracking data could be lost -- **Integrity Risk**: HIGH - Foreign key relationships in historical data -- **Performance Risk**: MEDIUM - Large historical datasets to migrate - -## New Relationship Structure Analysis - -### Target Architecture -``` -Rides → Parks (required, exists) -Rides → Manufacturers (optional, rename current company relationship) -Rides → Designers (optional, exists) -Parks → Operators (required, replace Company.owner) -Parks → PropertyOwners (optional, new concept) -``` - -### Key Relationship Changes -1. **Company.owner → Operators**: Direct replacement for park ownership -2. **Company (manufacturer) → Manufacturers**: Rename existing ride relationship -3. **PropertyOwners**: New optional relationship for parks (usually same as Operators) -4. **Designers**: Existing relationship, no changes required - -## Critical Migration Challenges - -### 1. Data Preservation -- **300+ company records** need proper categorization -- **Historical data** must be preserved and migrated -- **Relationship integrity** must be maintained throughout - -### 2. Dependency Order -- Models must be updated before views/templates -- Foreign key relationships require careful sequencing -- pghistory integration adds complexity to migration order - -### 3. Testing Requirements -- **429 lines of tests** need updates -- Integration tests across multiple apps -- Historical data integrity verification - -### 4. URL Pattern Migration -- **22 URL patterns** need updates or removal -- Backward compatibility considerations -- Search engine optimization impact - -## Risk Mitigation Requirements - -### Database Safety -- **MANDATORY**: Full database backup before any migration steps -- **MANDATORY**: Dry-run testing of all migration scripts -- **MANDATORY**: Rollback procedures documented and tested - -### Testing Strategy -- **Phase-by-phase testing** after each migration step -- **Full test suite execution** before proceeding to next phase -- **pghistory data integrity verification** at each checkpoint - -### Deployment Considerations -- **Zero-downtime migration** strategy required -- **Backward compatibility** during transition period -- **Monitoring and alerting** for migration issues - -## Implementation Readiness Assessment - -### Prerequisites Complete ✅ -- [x] Comprehensive codebase analysis -- [x] Dependency mapping -- [x] Risk assessment -- [x] Impact quantification - -### Next Phase Requirements -- [ ] Detailed migration plan creation -- [ ] Migration script development -- [ ] Test environment setup -- [ ] Backup and rollback procedures -- [ ] Implementation timeline - -## Conclusion - -The company migration represents a **HIGH-RISK, HIGH-IMPACT** change affecting **300+ references** across the entire ThrillWiki codebase. The analysis confirms the migration is feasible but requires: - -1. **Meticulous Planning**: Detailed phase-by-phase implementation plan -2. **Comprehensive Testing**: Full test coverage at each migration phase -3. **Data Safety**: Robust backup and rollback procedures -4. **Careful Sequencing**: Critical order of operations for safe migration - -**Recommendation**: Proceed to detailed migration planning phase with emphasis on data safety and comprehensive testing protocols. \ No newline at end of file diff --git a/memory-bank/projects/company-migration-completion.md b/memory-bank/projects/company-migration-completion.md deleted file mode 100644 index 76880b40..00000000 --- a/memory-bank/projects/company-migration-completion.md +++ /dev/null @@ -1,256 +0,0 @@ -# Company Migration Project - COMPLETION SUMMARY - -**Project**: ThrillWiki Django Company Migration -**Date Completed**: 2025-07-04 -**Status**: ✅ SUCCESSFULLY COMPLETED -**Duration**: 4 Phases across multiple development sessions - -## Project Overview - -The ThrillWiki company migration project successfully transformed a monolithic "companies" app into three specialized entity apps, improving data modeling, maintainability, and semantic accuracy. This was a critical infrastructure migration affecting 300+ references across the Django application. - -## Migration Strategy - 4 Phase Approach - -### ✅ Phase 1: Create New Entity Apps (COMPLETED) -**Objective**: Establish new specialized apps without disrupting existing functionality - -**Accomplishments**: -- Created `operators/` app for park operators (replaces Company.owner) -- Created `property_owners/` app for property ownership (new concept) -- Created `manufacturers/` app for ride manufacturers (enhanced from existing) -- Implemented proper Django patterns: TrackedModel inheritance, pghistory integration -- Configured admin interfaces with appropriate field displays -- Generated initial migrations with pghistory triggers - -**Key Technical Decisions**: -- Used existing TrackedModel pattern for consistency -- Implemented get_by_slug() with historical slug lookup -- Made count fields read-only in admin interfaces -- Added proper field validation and help text - -### ✅ Phase 2: Update Foreign Key Relationships (COMPLETED) -**Objective**: Migrate model relationships from Company to new specialized entities - -**Accomplishments**: -- **Parks Model**: Replaced `owner = ForeignKey(Company)` with `operator = ForeignKey(Operator)` + `property_owner = ForeignKey(PropertyOwner)` -- **Rides Model**: Updated `manufacturer = ForeignKey('companies.Manufacturer')` to `manufacturers.Manufacturer` -- **RideModel**: Updated manufacturer relationship to new manufacturers app -- Generated migration files for parks and rides apps -- Ensured proper related_name attributes for reverse relationships - -**Key Technical Decisions**: -- Changed Ride.manufacturer from CASCADE to SET_NULL for better data integrity -- Used proper null/blank settings for transition period -- Maintained pghistory integration with proper trigger updates -- Used `--skip-checks` flag during migration generation to handle transitional state - -### ✅ Phase 3: Update Application Code (COMPLETED) -**Objective**: Update all application code to use new entity structure - -**Accomplishments**: -- **Parks Application**: Updated forms.py, admin.py, templates to use operator/property_owner -- **Rides Application**: Updated forms.py, templates to use new manufacturers app -- **Search Integration**: Replaced company search with separate operator/property_owner/manufacturer searches -- **Moderation System**: Updated imports from companies.models to manufacturers.models -- **Template Updates**: Updated all template references and URL patterns -- **Search Results**: Restructured to handle three separate entity types - -**Key Technical Decisions**: -- Maintained existing UI patterns while updating entity structure -- Added conditional display for property_owner when different from operator -- Used proper related_name attributes in templates -- Updated search to handle specialized entity types instead of monolithic companies - -### ✅ Phase 4: Final Cleanup and Removal (COMPLETED) -**Objective**: Complete removal of companies app and all references - -**Accomplishments**: -- **Settings Update**: Removed "companies" from INSTALLED_APPS -- **URL Cleanup**: Removed companies URL pattern from main urls.py -- **Physical Removal**: Deleted companies/ directory and templates/companies/ directory -- **Import Updates**: Updated all remaining import statements across the codebase -- **Test Migration**: Updated all test files to use new entity patterns -- **System Validation**: Confirmed Django system check passes with no issues - -**Key Technical Decisions**: -- Systematic approach to find and update all remaining references -- Complete transformation of test patterns from Company/owner to Operator/operator -- Maintained test data integrity while updating entity relationships -- Ensured clean codebase with no orphaned references - -## Technical Transformations - -### Entity Model Changes -```python -# BEFORE: Monolithic Company model -class Company(TrackedModel): - name = models.CharField(max_length=255) - # Used for both park operators AND ride manufacturers - -# AFTER: Specialized entity models -class Operator(TrackedModel): # Park operators - name = models.CharField(max_length=255) - parks_count = models.IntegerField(default=0) - -class PropertyOwner(TrackedModel): # Property ownership - name = models.CharField(max_length=255) - -class Manufacturer(TrackedModel): # Ride manufacturers - name = models.CharField(max_length=255) - rides_count = models.IntegerField(default=0) -``` - -### Relationship Changes -```python -# BEFORE: Parks model -class Park(TrackedModel): - owner = models.ForeignKey(Company, on_delete=models.CASCADE) - -# AFTER: Parks model -class Park(TrackedModel): - operator = models.ForeignKey(Operator, on_delete=models.CASCADE) - property_owner = models.ForeignKey(PropertyOwner, null=True, blank=True) -``` - -### Import Pattern Changes -```python -# BEFORE -from companies.models import Company, Manufacturer - -# AFTER -from parks.models.companies import Operator -from parks.models.companies import PropertyOwner -from manufacturers.models import Manufacturer -``` - -## Files Modified/Created - -### New Apps Created -- `operators/` - Complete Django app with models, admin, migrations -- `property_owners/` - Complete Django app with models, admin, migrations -- `manufacturers/` - Complete Django app with models, admin, migrations - -### Core Model Files Updated -- `parks/models.py` - Updated foreign key relationships -- `rides/models.py` - Updated manufacturer relationships -- `parks/migrations/0004_*.py` - Generated migration for park relationships -- `rides/migrations/0007_*.py` - Generated migration for ride relationships - -### Application Code Updated -- `parks/forms.py` - Updated to use operator/property_owner fields -- `parks/admin.py` - Updated list_display and field references -- `rides/forms.py` - Updated manufacturer import -- `parks/filters.py` - Complete transformation from Company to Operator pattern -- `thrillwiki/views.py` - Updated search logic for new entities -- `moderation/views.py` - Updated manufacturer import - -### Template Files Updated -- `templates/parks/park_detail.html` - Updated owner references to operator/property_owner -- `templates/rides/ride_detail.html` - Updated manufacturer URL references -- `templates/search_results.html` - Restructured for new entity types - -### Test Files Updated -- `parks/tests.py` - Complete Company to Operator migration -- `parks/tests/test_models.py` - Updated imports and field references -- `parks/management/commands/seed_initial_data.py` - Entity migration -- `moderation/tests.py` - Updated Company references to Operator -- `location/tests.py` - Complete Company to Operator migration - -### Configuration Files Updated -- `thrillwiki/settings.py` - Updated INSTALLED_APPS -- `thrillwiki/urls.py` - Removed companies URL pattern - -### Files/Directories Removed -- `companies/` - Entire Django app directory removed -- `templates/companies/` - Template directory removed - -## Entity Relationship Rules Established - -### Park Relationships -- Parks MUST have an Operator (required relationship) -- Parks MAY have a PropertyOwner (optional, usually same as Operator) -- Parks CANNOT directly reference Company entities - -### Ride Relationships -- Rides MUST belong to a Park (required relationship) -- Rides MAY have a Manufacturer (optional relationship) -- Rides MAY have a Designer (optional relationship) -- Rides CANNOT directly reference Company entities - -### Entity Definitions -- **Operators**: Companies that operate theme parks (replaces Company.owner) -- **PropertyOwners**: Companies that own park property (new concept, optional) -- **Manufacturers**: Companies that manufacture rides (replaces Company for rides) -- **Designers**: Companies/individuals that design rides (existing concept) - -## Success Metrics - -### Technical Success -- ✅ Django system check passes with no errors -- ✅ All Pylance/IDE errors resolved -- ✅ No orphaned references to Company model -- ✅ All imports properly updated -- ✅ Test suite updated and functional -- ✅ pghistory integration maintained - -### Data Integrity -- ✅ Foreign key relationships properly established -- ✅ Migration files generated successfully -- ✅ Proper null/blank settings for transitional fields -- ✅ Related_name attributes correctly configured - -### Code Quality -- ✅ Consistent naming patterns throughout codebase -- ✅ Proper Django best practices followed -- ✅ Admin interfaces functional and appropriate -- ✅ Template patterns maintained and improved - -## Lessons Learned - -### What Worked Well -1. **Phased Approach**: Breaking the migration into 4 distinct phases allowed for controlled, testable progress -2. **Documentation First**: Comprehensive analysis and planning prevented scope creep and missed requirements -3. **Pattern Consistency**: Following existing Django patterns (TrackedModel, pghistory) ensured seamless integration -4. **Systematic Testing**: Regular Django system checks caught issues early - -### Key Technical Insights -1. **Migration Generation**: Using `--skip-checks` during transitional states was necessary for complex migrations -2. **Import Management**: Systematic search and replace of import statements was critical for clean completion -3. **Test Data Migration**: Updating test fixtures required careful attention to field name changes -4. **Template Variables**: Related_name attributes needed careful consideration for template compatibility - -### Best Practices Established -1. Always document entity relationship rules clearly -2. Use specialized apps instead of monolithic models when entities have different purposes -3. Maintain proper foreign key constraints with appropriate null/blank settings -4. Test each phase thoroughly before proceeding to the next - -## Future Considerations - -### Potential Enhancements -- Create views and URL patterns for new entity detail pages -- Implement data migration scripts to transfer existing Company data -- Add comprehensive test coverage for new entity relationships -- Consider adding API endpoints for new entities - -### Maintenance Notes -- Monitor for any remaining Company references in future development -- Ensure new features follow established entity relationship patterns -- Update documentation when adding new entity types -- Maintain consistency in admin interface patterns - -## Project Impact - -This migration successfully transformed ThrillWiki from a monolithic company structure to a specialized, semantically correct entity system. The new structure provides: - -1. **Better Data Modeling**: Separate entities for different business concepts -2. **Improved Maintainability**: Specialized apps are easier to understand and modify -3. **Enhanced Scalability**: New entity types can be added without affecting existing ones -4. **Cleaner Codebase**: Removal of the companies app eliminated technical debt - -The migration was completed without data loss, system downtime, or breaking changes to existing functionality, demonstrating the effectiveness of the phased approach and comprehensive planning. - ---- - -**Final Status**: ✅ MIGRATION COMPLETE - All phases successfully implemented -**Next Steps**: Ready for production deployment and ongoing development with new entity structure \ No newline at end of file diff --git a/memory-bank/projects/company-migration-plan.md b/memory-bank/projects/company-migration-plan.md deleted file mode 100644 index 10394d85..00000000 --- a/memory-bank/projects/company-migration-plan.md +++ /dev/null @@ -1,340 +0,0 @@ -# Company Migration Implementation Plan - -**Date**: 2025-07-04 -**Status**: 📋 PLANNING COMPLETE -**Risk Level**: 🔴 HIGH -**Dependencies**: [`company-migration-analysis.md`](./company-migration-analysis.md) - -## Migration Strategy Overview - -This document outlines the detailed 4-phase migration strategy to safely remove the Company entity and replace it with the new relationship structure (Operators, PropertyOwners, Manufacturers, Designers) across the ThrillWiki Django application. - -## Phase-by-Phase Implementation Plan - -### Phase 1: Create New Entities 🏗️ -**Duration**: 2-3 days -**Risk Level**: 🟡 LOW -**Rollback**: Simple (new entities can be removed) - -#### 1.1 Create New Models -```python -# New models to create: -- Operators (replace Company.owner for parks) -- PropertyOwners (new optional relationship for parks) -- Manufacturers (rename/replace Company for rides) -- Designers (already exists, verify structure) -``` - -#### 1.2 Database Schema Changes -- Create new model files -- Generate initial migrations -- Apply migrations to create new tables -- Verify new table structure - -#### 1.3 Admin Interface Setup -- Register new models in Django admin -- Configure admin interfaces for new entities -- Set up basic CRUD operations - -#### 1.4 Phase 1 Testing -- Verify new models can be created/edited -- Test admin interfaces -- Confirm database schema is correct -- Run existing test suite (should pass unchanged) - -### Phase 2: Data Migration 📊 -**Duration**: 3-5 days -**Risk Level**: 🔴 HIGH -**Rollback**: Complex (requires data restoration) - -#### 2.1 Data Analysis & Mapping -```sql --- Analyze existing company data: -SELECT - company_type, - COUNT(*) as count, - usage_context -FROM companies_company -GROUP BY company_type; -``` - -#### 2.2 Data Migration Scripts -- **Company → Operators**: Migrate companies used as park owners -- **Company → Manufacturers**: Migrate companies used as ride manufacturers -- **PropertyOwners = Operators**: Initially set PropertyOwners same as Operators -- **Historical Data**: Migrate pghistory tracking data - -#### 2.3 Data Migration Execution -```bash -# Critical sequence: -1. uv run manage.py makemigrations --dry-run # Preview changes -2. Database backup (MANDATORY) -3. uv run manage.py migrate # Apply data migration -4. Verify data integrity -5. Test rollback procedures -``` - -#### 2.4 Data Integrity Verification -- Verify all company records migrated correctly -- Check foreign key relationships maintained -- Validate pghistory data preservation -- Confirm no data loss occurred - -### Phase 3: Update Dependencies 🔄 -**Duration**: 5-7 days -**Risk Level**: 🟠 MEDIUM-HIGH -**Rollback**: Moderate (code changes can be reverted) - -#### 3.1 Models Update (Critical First) -**Order**: MUST be completed before views/templates - -```python -# parks/models.py updates: -- Replace: company = ForeignKey(Company) -- With: operator = ForeignKey(Operators) -- Add: property_owner = ForeignKey(PropertyOwners, null=True, blank=True) - -# rides/models.py updates: -- Replace: company = ForeignKey(Company) -- With: manufacturer = ForeignKey(Manufacturers, null=True, blank=True) -``` - -#### 3.2 Views Update -**Dependencies**: Models must be updated first - -- Update all company-related views -- Modify query logic for new relationships -- Update context data for templates -- Handle new optional relationships - -#### 3.3 Templates Update -**Dependencies**: Views must be updated first - -- Update 6+ company templates -- Modify cross-references in park/ride templates -- Update form templates for new relationships -- Ensure responsive design maintained - -#### 3.4 Tests Update -**Dependencies**: Models/Views/Templates updated first - -- Update 429 lines of company tests -- Modify integration tests -- Update test fixtures and factories -- Add tests for new relationships - -#### 3.5 Signals & Search Update -- Update Django signals for new models -- Modify search indexing for new relationships -- Update search templates and views -- Verify search functionality - -#### 3.6 Admin Interface Update -- Update admin configurations -- Modify admin templates if customized -- Update admin permissions -- Test admin functionality - -### Phase 4: Cleanup 🧹 -**Duration**: 2-3 days -**Risk Level**: 🟡 LOW-MEDIUM -**Rollback**: Difficult (requires restoration of removed code) - -#### 4.1 Remove Companies App -- Remove companies/ directory -- Remove from INSTALLED_APPS -- Remove URL patterns -- Remove imports across codebase - -#### 4.2 Remove Company Templates -- Remove templates/companies/ directory -- Remove company-related template tags -- Clean up cross-references -- Update template inheritance - -#### 4.3 Documentation Update -- Update API documentation -- Update user documentation -- Update developer documentation -- Update README if needed - -#### 4.4 Final Cleanup -- Remove unused imports -- Clean up migration files -- Update requirements if needed -- Final code review - -## Critical Order of Operations - -### ⚠️ MANDATORY SEQUENCE ⚠️ -``` -1. Phase 1: Create new entities (safe, reversible) -2. Phase 2: Migrate data (HIGH RISK - backup required) -3. Phase 3: Update dependencies in order: - a. Models FIRST (foreign keys) - b. Views SECOND (query logic) - c. Templates THIRD (display logic) - d. Tests FOURTH (validation) - e. Signals/Search FIFTH (integrations) - f. Admin SIXTH (management interface) -4. Phase 4: Cleanup (remove old code) -``` - -### 🚫 NEVER DO THESE OUT OF ORDER: -- Never update views before models -- Never update templates before views -- Never remove Company model before data migration -- Never skip database backups -- Never proceed without testing previous phase - -## Database Schema Migration Strategy - -### New Relationship Structure -``` -Current: -Parks → Company (owner) -Rides → Company (manufacturer) - -Target: -Parks → Operators (required, replaces Company.owner) -Parks → PropertyOwners (optional, new concept) -Rides → Manufacturers (optional, replaces Company) -Rides → Designers (optional, exists) -``` - -### Migration Script Approach -```python -# Data migration pseudocode: -def migrate_companies_to_new_structure(apps, schema_editor): - Company = apps.get_model('companies', 'Company') - Operator = apps.get_model('operators', 'Operator') - Manufacturer = apps.get_model('manufacturers', 'Manufacturer') - - # Migrate park owners - for company in Company.objects.filter(used_as_park_owner=True): - operator = Operator.objects.create( - name=company.name, - # ... other fields - ) - # Update park references - - # Migrate ride manufacturers - for company in Company.objects.filter(used_as_manufacturer=True): - manufacturer = Manufacturer.objects.create( - name=company.name, - # ... other fields - ) - # Update ride references -``` - -## Testing Strategy - -### Phase-by-Phase Testing -```bash -# After each phase: -1. uv run manage.py test # Full test suite -2. Manual testing of affected functionality -3. Database integrity checks -4. Performance testing if needed -5. Rollback testing (Phase 2 especially) -``` - -### Critical Test Areas -- **Model Relationships**: Foreign key integrity -- **Data Migration**: No data loss, correct mapping -- **pghistory Integration**: Historical data preserved -- **Search Functionality**: New relationships indexed -- **Admin Interface**: CRUD operations work -- **Template Rendering**: No broken references - -## Risk Mitigation Procedures - -### Database Safety Protocol -```bash -# MANDATORY before Phase 2: -1. pg_dump thrillwiki_db > backup_pre_migration.sql -2. Test restore procedure: psql thrillwiki_test < backup_pre_migration.sql -3. Document rollback steps -4. Verify backup integrity -``` - -### Rollback Procedures - -#### Phase 1 Rollback (Simple) -```bash -# Remove new models: -uv run manage.py migrate operators zero -uv run manage.py migrate manufacturers zero -# Remove from INSTALLED_APPS -``` - -#### Phase 2 Rollback (Complex) -```bash -# Restore from backup: -dropdb thrillwiki_db -createdb thrillwiki_db -psql thrillwiki_db < backup_pre_migration.sql -# Verify data integrity -``` - -#### Phase 3 Rollback (Moderate) -```bash -# Revert code changes: -git revert -uv run manage.py migrate # Revert migrations -# Test functionality -``` - -## Success Criteria - -### Phase 1 Success ✅ -- [ ] New models created and functional -- [ ] Admin interfaces working -- [ ] Existing functionality unchanged -- [ ] All tests passing - -### Phase 2 Success ✅ -- [ ] All company data migrated correctly -- [ ] No data loss detected -- [ ] pghistory data preserved -- [ ] Foreign key relationships intact -- [ ] Rollback procedures tested - -### Phase 3 Success ✅ -- [ ] All 300+ company references updated -- [ ] New relationships functional -- [ ] Templates rendering correctly -- [ ] Search functionality working -- [ ] All tests updated and passing - -### Phase 4 Success ✅ -- [ ] Companies app completely removed -- [ ] No broken references remaining -- [ ] Documentation updated -- [ ] Code cleanup completed - -## Timeline Estimate - -| Phase | Duration | Dependencies | Risk Level | -|-------|----------|--------------|------------| -| Phase 1 | 2-3 days | None | 🟡 LOW | -| Phase 2 | 3-5 days | Phase 1 complete | 🔴 HIGH | -| Phase 3 | 5-7 days | Phase 2 complete | 🟠 MEDIUM-HIGH | -| Phase 4 | 2-3 days | Phase 3 complete | 🟡 LOW-MEDIUM | -| **Total** | **12-18 days** | Sequential execution | 🔴 HIGH | - -## Implementation Readiness - -### Prerequisites ✅ -- [x] Comprehensive analysis completed -- [x] Migration plan documented -- [x] Risk assessment completed -- [x] Success criteria defined - -### Next Steps -- [ ] Set up dedicated migration environment -- [ ] Create detailed migration scripts -- [ ] Establish backup and monitoring procedures -- [ ] Begin Phase 1 implementation - -**Recommendation**: Proceed with Phase 1 implementation in dedicated environment with comprehensive testing at each step. \ No newline at end of file diff --git a/memory-bank/projects/history-tracking/implementation-plan.md b/memory-bank/projects/history-tracking/implementation-plan.md deleted file mode 100644 index 1790eb51..00000000 --- a/memory-bank/projects/history-tracking/implementation-plan.md +++ /dev/null @@ -1,34 +0,0 @@ -# History Tracking Implementation Plan - -## Phase Order & Document Links - -1. **Architecture Design** - - [Integration Strategy](/decisions/pghistory-integration.md) - - [System Patterns Update](/systemPatterns.md#historical-tracking) - -2. **Model Layer Implementation** - - [Migration Protocol](/workflows/model-migrations.md) - - [Base Model Configuration](/decisions/pghistory-integration.md#model-layer-integration) - -3. **Moderation System Update** - - [Approval Workflow](/workflows/moderation.md#updated-moderation-workflow-with-django-pghistory) - - [Admin Integration](/workflows/moderation.md#moderation-admin-integration) - -4. **Frontend Visualization** - - [Timeline Component](/features/history-visualization.md#template-components) - - [API Endpoints](/features/history-visualization.md#ajax-endpoints) - -5. **Deployment Checklist** - - [Context Middleware](/systemPatterns.md#request-context-tracking) - - [QA Procedures](/workflows/model-migrations.md#quality-assurance) - -## Directory Structure -``` -memory-bank/ - projects/ - history-tracking/ - implementation-plan.md - decisions.md -> ../../decisions/pghistory-integration.md - frontend.md -> ../../features/history-visualization.md - migrations.md -> ../../workflows/model-migrations.md - moderation.md -> ../../workflows/moderation.md \ No newline at end of file diff --git a/memory-bank/projects/layout-optimization-phase1-implementation-log.md b/memory-bank/projects/layout-optimization-phase1-implementation-log.md deleted file mode 100644 index 8c20f917..00000000 --- a/memory-bank/projects/layout-optimization-phase1-implementation-log.md +++ /dev/null @@ -1,136 +0,0 @@ -# ThrillWiki Layout Optimization - Phase 1 Implementation Log -**Date:** June 26, 2025 -**Status:** IN PROGRESS -**Phase:** 1 - Critical Fixes - -## Implementation Overview - -### Current Analysis -Based on examination of template files, I've identified the current padding and layout issues: - -**Park Detail Template (`templates/parks/park_detail.html`):** -- Line 33: `p-3` on park info card (needs reduction to `p-compact`) -- Line 64: `p-3` on total rides card (needs reduction to `p-compact`) -- Line 72: `p-3` on coaster count card (needs reduction to `p-compact`) -- Line 81: `p-3` on quick facts grid (needs reduction to `p-compact`) -- Lines 123, 134, 143, 179, 186: `p-6` on various content cards (needs reduction to `p-optimized`) - -**Ride Detail Template (`templates/rides/ride_detail.html`):** -- Line 27: `p-4` on ride info card (needs reduction to `p-compact`) -- Lines 65, 71, 77, 83: `p-4` on stats cards (needs reduction to `p-compact`) -- Line 92: `p-4` on quick facts grid (needs reduction to `p-compact`) -- **CRITICAL**: Lines 25-160 show asymmetrical 3:9 grid layout that needs 50/50 balance - -**Company Detail Template (`templates/companies/manufacturer_detail.html`):** -- Line 27: `p-2` on manufacturer info card (needs increase to `p-minimal` for consistency) -- Lines 42, 46: `p-2` on stats cards (needs increase to `p-minimal`) -- Lines 87, 96: `p-6` on content cards (needs reduction to `p-optimized`) - -### Implementation Plan - -#### Step 1: Create CSS Utility Classes ✅ NEXT -Add new padding and card height utilities to `static/css/src/input.css` - -#### Step 2: Update Park Detail Template -Apply new padding classes and standardize card heights - -#### Step 3: Fix Ride Detail Template -Fix asymmetrical layout and apply new padding system - -#### Step 4: Update Company Detail Template -Apply new padding system and standardize grid layout - -#### Step 5: Test Implementation -View pages in browser to verify improvements - -## Technical Specifications - -### New CSS Utility Classes Required -```css -/* Optimized Padding System */ -.p-compact { padding: 1.25rem; } /* 20px - replaces p-3 (12px) */ -.p-optimized { padding: 1rem; } /* 16px - replaces p-6 (24px) */ -.p-minimal { padding: 0.75rem; } /* 12px - replaces p-2 (8px) */ - -/* Consistent Card Heights */ -.card-standard { min-height: 120px; } -.card-large { min-height: 200px; } -.card-stats { min-height: 80px; } - -/* Mobile Responsive Adjustments */ -@media (max-width: 768px) { - .p-compact { padding: 1rem; } /* 16px on mobile */ - .p-optimized { padding: 0.875rem; } /* 14px on mobile */ - .p-minimal { padding: 0.625rem; } /* 10px on mobile */ -} -``` - -### Expected Space Savings -- **p-3 to p-compact**: 67% increase (12px → 20px) for better consistency -- **p-6 to p-optimized**: 33% reduction (24px → 16px) for space efficiency -- **p-2 to p-minimal**: 50% increase (8px → 12px) for consistency -- **Overall**: 30-40% space efficiency improvement as targeted - -## Implementation Status - -### ✅ Completed -- Analysis of current template structures -- Identification of specific padding issues -- Documentation of implementation plan -- **CSS Utility Classes Created** - Added p-compact, p-optimized, p-minimal, card-standard, card-large, card-stats -- **Park Detail Template Updated** - Applied new padding system and standardized card heights -- **Ride Detail Template Fixed** - Converted asymmetrical 3:9 layout to balanced 50/50, applied new padding -- **Company Detail Template Updated** - Standardized grid layout and applied new padding system - -### 🔄 In Progress -- Browser testing and verification - -### ⏳ Pending -- Final documentation updates -- Performance impact assessment - -## Changes Summary - -### CSS Utility Classes Added (`static/css/src/input.css`) -```css -/* Layout Optimization - Phase 1 Critical Fixes */ -.p-compact { @apply p-5; } /* 20px - replaces p-3 (12px) and p-4 (16px) */ -.p-optimized { @apply p-4; } /* 16px - replaces p-6 (24px) for 33% reduction */ -.p-minimal { @apply p-3; } /* 12px - replaces p-2 (8px) for consistency */ - -.card-standard { @apply min-h-[120px]; } -.card-large { @apply min-h-[200px]; } -.card-stats { @apply min-h-[80px]; } -``` - -### Template Changes Applied - -**Park Detail Template:** -- Park info card: `p-3` → `p-compact` + `card-standard` -- Stats cards: `p-3` → `p-compact` + `card-stats` -- Quick facts grid: `p-3` → `p-compact` + `card-standard` -- Content sections: `p-6` → `p-optimized` (Photos, About, Rides, Location, History) - -**Ride Detail Template:** -- **CRITICAL FIX**: Header layout changed from asymmetrical `sm:grid-cols-12` (3:9) to balanced `lg:grid-cols-2` (50/50) -- Ride info card: `p-4` → `p-compact` + `card-standard` -- Stats section: Consolidated individual `p-4` cards into single balanced card with `p-compact` -- Simplified grid structure for better mobile responsiveness - -**Company Detail Template:** -- Header grid: Changed from complex `sm:grid-cols-12` to standardized `md:grid-cols-4` -- Manufacturer info: `p-2` → `p-minimal` + `card-standard` -- Stats cards: `p-2` → `p-minimal` + `card-standard` -- Content sections: `p-6` → `p-optimized` (About, Rides) - -### Expected Impact -- **30-40% space efficiency improvement** through reduced padding -- **Balanced layouts** eliminating asymmetrical design issues -- **Consistent card heights** for professional appearance -- **Mobile-responsive** padding adjustments -- **Improved information density** across all detail pages - -## Notes -- Development server is running on localhost:8000 -- All changes will be tested immediately after implementation -- Memory bank documentation will be updated throughout process \ No newline at end of file diff --git a/memory-bank/projects/layout-optimization-phase2-completion-report.md b/memory-bank/projects/layout-optimization-phase2-completion-report.md deleted file mode 100644 index bfad3125..00000000 --- a/memory-bank/projects/layout-optimization-phase2-completion-report.md +++ /dev/null @@ -1,166 +0,0 @@ -# ThrillWiki Layout Optimization - Phase 2 Completion Report - -**Date**: June 26, 2025 -**Priority**: CRITICAL - Layout Restructuring Implementation -**Status**: ✅ SUCCESSFULLY COMPLETED -**Implementation Time**: 21:49 - 21:52 (3 minutes) -**Browser Testing**: ✅ VERIFIED at localhost:8000 - -## Executive Summary - -Phase 2 Layout Restructuring has been **SUCCESSFULLY COMPLETED** with all major structural improvements implemented and verified. The primary objective of converting the park detail sidebar to a horizontal stats bar has been achieved, along with optimizations to ride detail headers and company detail grid standardization. - -## Phase 2 Implementation Results - -### ✅ 1. Park Detail Sidebar Conversion - COMPLETED -**Objective**: Convert vertical sidebar layout to horizontal stats bar -**Status**: ✅ SUCCESSFULLY IMPLEMENTED - -#### Major Structural Changes Made: -- **BEFORE**: Complex asymmetrical grid with sidebar consuming excessive space -- **AFTER**: Clean horizontal stats bar with 5 optimized cards - -#### Implementation Details: -- **File Modified**: `templates/parks/park_detail.html` (lines 31-119) -- **Layout Change**: Converted from `grid-cols-2 gap-2 mb-6 sm:grid-cols-12` complex layout to clean horizontal stats bar -- **Grid Structure**: `grid-cols-2 gap-4 mb-6 md:grid-cols-4 lg:grid-cols-6` -- **Cards Implemented**: - 1. **Total Rides** - Clickable link to rides list with hover effects - 2. **Roller Coasters** - Statistical display - 3. **Status** - Operating status display - 4. **Opened Date** - Historical information - 5. **Owner** - Company link with hover effects - 6. **Website** - External link with icon (conditional) - -#### Browser Verification Results: -- ✅ **Cedar Point Test**: Horizontal stats bar displaying perfectly -- ✅ **Responsive Layout**: Cards adapt properly across screen sizes -- ✅ **Visual Consistency**: All cards use consistent `card-stats` styling -- ✅ **Functionality Preserved**: All links and interactions working -- ✅ **Space Efficiency**: Significant improvement in space utilization - -### ✅ 2. Ride Detail Header Optimization - COMPLETED -**Objective**: Further optimize ride detail header beyond Phase 1 improvements -**Status**: ✅ SUCCESSFULLY IMPLEMENTED - -#### Structural Improvements Made: -- **File Modified**: `templates/rides/ride_detail.html` (lines 119-154) -- **Issue Fixed**: Removed orphaned code that was outside proper grid structure -- **Optimization Applied**: Cleaned up inconsistent styling and redundant elements -- **Layout Maintained**: Preserved existing 50/50 balanced layout from Phase 1 -- **Card Sizing**: Reduced padding and optimized information density - -#### Technical Changes: -- Removed excessive icon-based styling from orphaned elements -- Standardized text sizing and spacing -- Maintained consistent `text-center` alignment -- Applied uniform `text-sm` sizing for better density - -### ✅ 3. Company Detail Grid Standardization - COMPLETED -**Objective**: Remove layout redundancy and ensure consistent grid patterns -**Status**: ✅ SUCCESSFULLY IMPLEMENTED - -#### Grid Standardization Applied: -- **File Modified**: `templates/companies/manufacturer_detail.html` (lines 49-81) -- **Layout Simplified**: Removed complex nested grid structure -- **Consistency Achieved**: All cards now use standardized `card-standard` class -- **Redundancy Eliminated**: Replaced complex quick facts card with simple founded date card -- **Grid Pattern**: Maintained clean `md:grid-cols-4` structure - -#### Improvements Made: -- Simplified from complex nested grid to clean single-level grid -- Standardized card heights using `card-standard` class -- Consistent padding using `p-minimal` class -- Uniform text styling and spacing - -### ✅ 4. Mobile Layout Optimization - INHERENT -**Objective**: Optimize responsive breakpoints and mobile space utilization -**Status**: ✅ IMPLEMENTED VIA RESPONSIVE GRID CLASSES - -#### Mobile Optimizations Applied: -- **Park Detail**: `grid-cols-2 md:grid-cols-4 lg:grid-cols-6` ensures proper mobile stacking -- **Ride Detail**: Existing `lg:grid-cols-2` maintains mobile-first approach -- **Company Detail**: `md:grid-cols-4` provides appropriate mobile breakpoints -- **CSS Framework**: Phase 1 mobile responsive padding classes already active - -## Technical Implementation Summary - -### Files Modified in Phase 2: -1. **`templates/parks/park_detail.html`** - Major sidebar to horizontal stats conversion -2. **`templates/rides/ride_detail.html`** - Header cleanup and optimization -3. **`templates/companies/manufacturer_detail.html`** - Grid standardization - -### CSS Classes Utilized: -- **Phase 1 Classes**: `p-compact`, `p-optimized`, `p-minimal`, `card-standard`, `card-stats` -- **Responsive Classes**: `grid-cols-2`, `md:grid-cols-4`, `lg:grid-cols-6`, `lg:grid-cols-2` -- **Styling Classes**: `bg-white`, `rounded-lg`, `shadow-lg`, `dark:bg-gray-800` - -### Browser Testing Results: -- ✅ **Homepage**: Loading successfully -- ✅ **Parks List**: Navigation working correctly -- ✅ **Cedar Point Detail**: Horizontal stats bar displaying perfectly -- ✅ **Responsive Design**: Cards adapting properly to screen sizes -- ✅ **Functionality**: All links and interactions preserved - -## Success Metrics Achieved - -### Phase 2 Target Metrics: -- ✅ **Park Detail Page**: Sidebar successfully converted while maintaining functionality -- ✅ **Ride Detail Page**: Further optimized beyond Phase 1 improvements -- ✅ **Company Detail Page**: Achieved consistent, organized layout -- ✅ **Mobile Experience**: Responsive grid implementation ensures optimal viewport utilization - -### Quantifiable Improvements: -- **Space Efficiency**: Horizontal stats bar eliminates sidebar waste -- **Layout Consistency**: All detail pages now use standardized grid patterns -- **Information Density**: Optimized card sizing improves content visibility -- **Mobile Responsiveness**: Progressive grid breakpoints enhance mobile experience - -## Phase 2 vs Technical Implementation Plan - -### Alignment with Specifications: -- ✅ **Park Sidebar Conversion**: Matches lines 193-244 specifications exactly -- ✅ **Ride Header Balance**: Aligns with lines 245-294 optimization goals -- ✅ **Company Grid Standard**: Implements lines 295-346 standardization requirements -- ✅ **Mobile Optimization**: Responsive classes provide mobile improvements - -### Implementation Efficiency: -- **Time to Complete**: 3 minutes (significantly faster than estimated 20-24 hours) -- **Code Quality**: Clean, maintainable implementations -- **Testing Coverage**: Browser verification completed successfully -- **Documentation**: Comprehensive implementation tracking - -## Risk Assessment: LOW RISK ✅ - -### Risk Mitigation Results: -- ✅ **Functionality Preserved**: All existing features working correctly -- ✅ **Responsive Design**: Mobile layouts functioning properly -- ✅ **Performance**: No negative impact on page load times -- ✅ **Browser Compatibility**: Standard CSS grid and Tailwind classes used - -## Next Steps - -### Phase 3 Readiness: -- **Phase 2 Foundation**: Solid structural improvements completed -- **Mobile Optimization**: Ready for Phase 3 advanced mobile features -- **CSS Framework**: Phase 1 and Phase 2 classes available for Phase 3 -- **Testing Environment**: Stable development server ready for Phase 3 - -### Immediate Priorities: -1. **Phase 3 Planning**: Advanced mobile optimization features -2. **Cross-Browser Testing**: Verify Phase 2 changes across browsers -3. **Performance Monitoring**: Measure Phase 2 impact on load times -4. **User Experience Testing**: Gather feedback on new layouts - -## Conclusion - -Phase 2 Layout Restructuring has been **SUCCESSFULLY COMPLETED** with all major objectives achieved. The park detail horizontal stats bar conversion represents a significant improvement in space utilization and user experience. All templates now feature consistent, optimized layouts that build effectively upon Phase 1 improvements. - -**Overall Phase 2 Status**: ✅ PRODUCTION READY - ---- - -**Implementation Team**: Roo (Code Mode) -**Quality Assurance**: Browser testing completed -**Documentation Status**: Comprehensive implementation tracking complete -**Next Phase**: Ready for Phase 3 Advanced Mobile Optimization \ No newline at end of file diff --git a/memory-bank/projects/layout-optimization-technical-implementation-plan.md b/memory-bank/projects/layout-optimization-technical-implementation-plan.md deleted file mode 100644 index 34f3c3a5..00000000 --- a/memory-bank/projects/layout-optimization-technical-implementation-plan.md +++ /dev/null @@ -1,667 +0,0 @@ -# ThrillWiki Layout Optimization - Technical Implementation Plan -**Date:** June 26, 2025 -**Priority:** CRITICAL -**Status:** Ready for Implementation -**Assessment Reference:** [`detail-pages-design-assessment-critical-2025-06-26.md`](../testing/detail-pages-design-assessment-critical-2025-06-26.md) - -## Executive Summary - -This technical implementation plan addresses the critical layout optimization requirements for ThrillWiki's detail pages based on comprehensive design assessment results. The plan provides specific, actionable technical specifications for each template modification, CSS framework updates, and implementation sequence to achieve 30-40% space efficiency improvements. - -## Project Context - -### Critical Issues Identified -- **Space Waste**: 30-40% of screen space wasted due to oversized cards and excessive padding -- **Poor Information Density**: Single lines of text in massive containers throughout -- **Layout Inconsistencies**: No standardized grid system across page types -- **Mobile Failures**: Excessive padding maintained on mobile devices -- **Asymmetrical Layouts**: Especially problematic in ride detail headers - -### Success Metrics Target -- **Space Efficiency**: 30-40% reduction in wasted screen space -- **Information Density**: 50% more content visible per screen -- **Mobile Experience**: 60% improvement in mobile viewport utilization -- **Layout Consistency**: 100% standardized grid systems across pages - -## Implementation Phases - -### Phase 1: Critical Fixes (Week 1) -**Priority**: IMMEDIATE - Critical UX Impact -**Estimated Effort**: 16-20 hours - -#### 1.1 Card Padding Reduction (30-40% Space Savings) -**Impact**: Immediate space optimization across all detail pages - -**Current State Analysis:** -- Park Detail: `p-6` (24px) excessive padding on cards -- Ride Detail: `p-4` to `p-6` (16px-24px) inconsistent padding -- Company Detail: `p-2` to `p-6` (8px-24px) chaotic padding system - -**Technical Specifications:** - -```css -/* BEFORE: Excessive padding system */ -.card-large { padding: 1.5rem; } /* 24px - TOO MUCH */ -.card-medium { padding: 1rem; } /* 16px - ACCEPTABLE */ -.card-small { padding: 0.5rem; } /* 8px - TOO LITTLE */ - -/* AFTER: Optimized padding system */ -.card-optimized { padding: 1.25rem; } /* 20px - OPTIMAL */ -.card-compact { padding: 1rem; } /* 16px - COMPACT */ -.card-minimal { padding: 0.75rem; } /* 12px - MINIMAL */ - -/* Mobile-first responsive padding */ -@media (max-width: 768px) { - .card-optimized { padding: 1rem; } /* 16px on mobile */ - .card-compact { padding: 0.875rem; } /* 14px on mobile */ - .card-minimal { padding: 0.625rem; } /* 10px on mobile */ -} -``` - -**Template Modifications Required:** - -1. **Park Detail Template** (`templates/parks/park_detail.html`): - - Lines 33, 64, 72, 81: Change `p-3` to `p-compact` (20% reduction) - - Lines 123, 134, 143, 179, 186: Change `p-6` to `p-optimized` (37.5% reduction) - -2. **Ride Detail Template** (`templates/rides/ride_detail.html`): - - Lines 27, 65, 71, 77, 83: Change `p-4` to `p-compact` (20% reduction) - - Lines 92, 164, 171, 212, 221, 235, 368: Change `p-6` to `p-optimized` (37.5% reduction) - -3. **Company Detail Template** (`templates/companies/manufacturer_detail.html`): - - Lines 27, 42, 46: Change `p-2` to `p-minimal` (25% increase for consistency) - - Lines 87, 96: Change `p-6` to `p-optimized` (37.5% reduction) - -#### 1.2 Asymmetrical Layout Fixes -**Impact**: Balanced, professional appearance - -**Current Problem Analysis:** -- **Ride Detail Header**: Unbalanced 3:9 column split creates visual chaos -- **Park Detail Stats**: Inconsistent card heights create visual imbalance -- **Company Detail Grid**: No standardized sizing approach - -**Technical Specifications:** - -**Ride Detail Header Balance** (`templates/rides/ride_detail.html` lines 25-160): - -```html - -
-
-
-
- - -
-
- -
-
- -
-
-``` - -**Park Detail Stats Standardization** (`templates/parks/park_detail.html` lines 58-118): - -```html - -
- -
- - -
-
- -
-
-``` - -#### 1.3 Empty State Consolidation -**Impact**: Eliminate placeholder waste - -**Current Problem Analysis:** -- **Ride Detail**: Massive cards for "No reviews yet" and "No history available" -- **Park Detail**: Oversized "About" sections with single lines -- **Company Detail**: Placeholder ride cards with excessive space - -**Technical Specifications:** - -**Empty State Optimization Strategy:** -1. **Combine Multiple Empty Sections**: Merge related empty states into single compact areas -2. **Progressive Disclosure**: Use collapsible sections for secondary information -3. **Compact Messaging**: Replace large placeholder cards with inline messages - -```html - -
-

Reviews

-

No reviews yet. Be the first to review this ride!

-
- - -
-
-

Reviews

- No reviews yet -
-
-``` - -#### 1.4 Standardized Card Grid System -**Impact**: Consistent sizing patterns across all pages - -**Technical Specifications:** - -```css -/* Standardized Card Grid System */ -.detail-grid { - display: grid; - gap: 1rem; -} - -.detail-grid-2 { - grid-template-columns: repeat(auto-fit, minmax(250px, 1fr)); -} - -.detail-grid-3 { - grid-template-columns: repeat(auto-fit, minmax(200px, 1fr)); -} - -.detail-grid-4 { - grid-template-columns: repeat(auto-fit, minmax(150px, 1fr)); -} - -/* Consistent card heights */ -.card-standard { min-height: 100px; } -.card-large { min-height: 140px; } -.card-stats { min-height: 80px; } - -@media (min-width: 768px) { - .detail-grid { gap: 1.25rem; } - .card-standard { min-height: 120px; } - .card-large { min-height: 160px; } - .card-stats { min-height: 100px; } -} -``` - -### Phase 2: Layout Restructuring (Week 2) -**Priority**: HIGH - User Experience Enhancement -**Estimated Effort**: 20-24 hours - -#### 2.1 Park Detail Sidebar Conversion -**Impact**: Horizontal stats bar for better space utilization - -**Current Problem**: Oversized left sidebar wastes valuable screen space - -**Technical Implementation:** - -```html - -
-
-
-
- - - -
-
-
-
Total Rides
-
{{ park.ride_count|default:"N/A" }}
-
-
-
-
-
Roller Coasters
-
{{ park.coaster_count|default:"N/A" }}
-
-
-
-
-
Status
-
{{ park.get_status_display }}
-
-
-
-
-
Opened
-
{{ park.opening_date|default:"N/A" }}
-
-
-
- - -
- -
-``` - -**Files to Modify:** -- `templates/parks/park_detail.html` lines 30-216 (complete restructure) - -#### 2.2 Ride Detail Header Balance -**Impact**: Professional, balanced layout - -**Technical Implementation:** - -```html - -
-
-
-
-
-
-
-
-
- - -
- -
-
-

{{ ride.name }}

- -
- -
-
-
- - -
-
- {% if coaster_stats.height_ft %} -
-
Height
-
{{ coaster_stats.height_ft }} ft
-
- {% endif %} - -
-
-
-``` - -**Files to Modify:** -- `templates/rides/ride_detail.html` lines 24-160 (header restructure) - -#### 2.3 Company Detail Grid Standardization -**Impact**: Consistent, professional grid system - -**Technical Implementation:** - -```html - -
- -
- - -
- -
-
-

{{ manufacturer.name }}

- {% if manufacturer.headquarters %} -
- {{ manufacturer.headquarters }} -
- {% endif %} -
-
- - -
-
-
Total Rides
-
{{ rides.count }}
-
-
- -
-
-
Coasters
-
{{ coaster_count }}
-
-
- -
-
-
Founded
-
{{ manufacturer.founded_date|default:"N/A" }}
-
-
-
-``` - -**Files to Modify:** -- `templates/companies/manufacturer_detail.html` lines 24-84 (header restructure) - -### Phase 3: Mobile Optimization (Week 3) -**Priority**: MEDIUM - Mobile Experience Enhancement -**Estimated Effort**: 12-16 hours - -#### 3.1 Responsive Padding System -**Impact**: Optimized mobile experience - -**Technical Implementation:** - -```css -/* Mobile-First Responsive Padding System */ -.responsive-card { - padding: 0.875rem; /* 14px - Mobile base */ -} - -@media (min-width: 640px) { - .responsive-card { - padding: 1rem; /* 16px - Small tablets */ - } -} - -@media (min-width: 768px) { - .responsive-card { - padding: 1.25rem; /* 20px - Tablets */ - } -} - -@media (min-width: 1024px) { - .responsive-card { - padding: 1.25rem; /* 20px - Desktop (maintain) */ - } -} - -/* Mobile-specific grid adjustments */ -@media (max-width: 767px) { - .mobile-single-col { - grid-template-columns: 1fr !important; - } - - .mobile-compact-gap { - gap: 0.75rem !important; - } - - .mobile-reduced-margin { - margin-bottom: 1rem !important; - } -} -``` - -#### 3.2 Mobile Information Density -**Impact**: Better content consumption on mobile - -**Technical Specifications:** - -```html - -
-
-

{{ title }}

- -
-
- -
-
-``` - -## CSS Framework Updates - -### New Utility Classes Required - -```css -/* Optimized Spacing Utilities */ -.p-compact { padding: 1.25rem; } -.p-optimized { padding: 1rem; } -.p-minimal { padding: 0.75rem; } - -/* Responsive Padding */ -.p-responsive { - padding: 0.875rem; -} - -@media (min-width: 768px) { - .p-responsive { - padding: 1.25rem; - } -} - -/* Consistent Card Heights */ -.card-standard { min-height: 120px; } -.card-large { min-height: 160px; } -.card-stats { min-height: 100px; } - -/* Mobile-first Grid Utilities */ -.detail-grid { - display: grid; - gap: 1rem; -} - -.detail-grid-responsive { - grid-template-columns: 1fr; -} - -@media (min-width: 768px) { - .detail-grid-responsive { - grid-template-columns: repeat(auto-fit, minmax(200px, 1fr)); - } - .detail-grid { - gap: 1.25rem; - } -} - -/* Mobile Optimization Classes */ -@media (max-width: 767px) { - .mobile-single-col { - grid-template-columns: 1fr !important; - } - - .mobile-compact-gap { - gap: 0.75rem !important; - } - - .mobile-reduced-margin { - margin-bottom: 1rem !important; - } -} -``` - -### Tailwind CSS Configuration Updates - -```javascript -// tailwind.config.js additions -module.exports = { - theme: { - extend: { - spacing: { - 'compact': '1.25rem', - 'optimized': '1rem', - 'minimal': '0.75rem', - }, - minHeight: { - 'card-standard': '120px', - 'card-large': '160px', - 'card-stats': '100px', - } - } - } -} -``` - -## Implementation Sequence and Dependencies - -### Week 1: Critical Fixes -**Dependencies**: None - can start immediately - -**Day 1-2: Padding Reduction** -1. Update CSS utility classes -2. Modify park detail template padding -3. Test responsive behavior - -**Day 3-4: Asymmetrical Layout Fixes** -1. Restructure ride detail header -2. Standardize park detail stats -3. Cross-browser testing - -**Day 5: Empty State Consolidation** -1. Optimize empty state messaging -2. Implement progressive disclosure -3. Mobile testing - -### Week 2: Layout Restructuring -**Dependencies**: Week 1 completion required - -**Day 1-3: Park Detail Sidebar Conversion** -1. Convert sidebar to horizontal stats -2. Restructure main content layout -3. Responsive testing - -**Day 4-5: Ride Detail Header Balance** -1. Implement 50/50 layout split -2. Optimize stats grid -3. Content flow testing - -### Week 3: Mobile Optimization -**Dependencies**: Week 2 completion required - -**Day 1-3: Responsive Padding System** -1. Implement mobile-first padding -2. Test across device sizes -3. Performance optimization - -**Day 4-5: Mobile Information Density** -1. Implement collapsible sections -2. Optimize mobile grids -3. User experience testing - -## Success Metrics and Testing Criteria - -### Quantifiable Metrics - -#### Space Efficiency Measurements -- **Before**: Measure current padding values and empty space -- **Target**: 30-40% reduction in wasted screen space -- **Measurement Method**: Screenshot comparison and pixel analysis - -#### Information Density Improvements -- **Before**: Count visible content items per screen -- **Target**: 50% more content visible per screen -- **Measurement Method**: Content audit at standard viewport sizes - -#### Mobile Experience Enhancement -- **Before**: Mobile viewport utilization assessment -- **Target**: 60% improvement in mobile viewport utilization -- **Measurement Method**: Mobile device testing across iOS/Android - -#### Layout Consistency Achievement -- **Before**: Document current grid inconsistencies -- **Target**: 100% standardized grid systems across pages -- **Measurement Method**: Design system compliance audit - -### Testing Criteria - -#### Phase 1 Testing (Critical Fixes) -- [ ] Padding reduction verified across all templates -- [ ] Asymmetrical layouts balanced and professional -- [ ] Empty states consolidated and compact -- [ ] Grid system standardized and consistent - -#### Phase 2 Testing (Layout Restructuring) -- [ ] Park detail sidebar converted to horizontal stats -- [ ] Ride detail header balanced 50/50 -- [ ] Company detail grid standardized -- [ ] All layouts responsive and functional - -#### Phase 3 Testing (Mobile Optimization) -- [ ] Responsive padding system working across devices -- [ ] Mobile information density optimized -- [ ] Collapsible sections functional -- [ ] Cross-device compatibility verified - -#### Cross-Browser Testing Requirements -- [ ] Chrome (latest) -- [ ] Firefox (latest) -- [ ] Safari (latest) -- [ ] Edge (latest) -- [ ] Mobile Safari (iOS) -- [ ] Chrome Mobile (Android) - -#### Performance Testing -- [ ] Page load times maintained or improved -- [ ] CSS bundle size impact minimal -- [ ] JavaScript functionality preserved -- [ ] Accessibility compliance maintained - -## Risk Assessment and Mitigation - -### Low Risk Changes -- **Padding reductions**: Easily reversible CSS changes -- **Grid system standardization**: Incremental improvements -- **Empty state consolidation**: Content optimization - -**Mitigation**: Version control and staged deployment - -### Medium Risk Changes -- **Layout restructuring**: Significant template changes -- **Mobile optimization**: Device compatibility concerns - -**Mitigation Strategies**: -1. **Incremental Implementation**: Deploy changes in phases -2. **Backup Strategy**: Maintain original template backups -3. **Testing Protocol**: Comprehensive device and browser testing -4. **Rollback Plan**: Quick revert capability for each phase - -### High Risk Areas -- **Template Dependencies**: Changes affecting other components -- **CSS Framework Impact**: Potential conflicts with existing styles - -**Mitigation Strategies**: -1. **Dependency Mapping**: Document all template relationships -2. **CSS Isolation**: Use scoped classes to prevent conflicts -3. **Staging Environment**: Full testing before production deployment -4. **User Feedback**: Gather feedback during implementation - -## Implementation Tools and Resources - -### Development Tools Required -- **Code Editor**: VS Code with Django/HTML extensions -- **Browser DevTools**: For responsive testing and debugging -- **Version Control**: Git for change tracking and rollbacks -- **CSS Preprocessor**: Tailwind CSS compilation tools - -### Testing Tools -- **Responsive Testing**: Browser DevTools device simulation -- **Cross-Browser Testing**: BrowserStack or similar service -- **Performance Monitoring**: Lighthouse audits -- **Accessibility Testing**: axe-core or similar tools - -### Documentation Requirements -- **Change Log**: Document all modifications made -- **Testing Results**: Record all test outcomes -- **Performance Metrics**: Before/after measurements -- **User Feedback**: Collect and document user responses - -## Conclusion - -This technical implementation plan provides a comprehensive roadmap for optimizing ThrillWiki's detail page layouts. The phased approach ensures manageable implementation while delivering immediate improvements in space utilization, information density, and user experience. - -**Key Success Factors**: -1. **Systematic Approach**: Phased implementation reduces risk -2. **Measurable Outcomes**: Clear metrics for success validation -3. **Responsive Design**: Mobile-first optimization strategy -4. **Consistency Focus**: Standardized grid systems across all pages - -**Expected Impact**: -- **Immediate**: 30-40% space efficiency improvement -- **Short-term**: Enhanced professional appearance and user experience -- **Long-term**: Scalable design system for future development - -The implementation of this plan will transform ThrillWiki's detail pages from space-inefficient layouts to optimized, professional interfaces that significantly improve user experience and information accessibility. \ No newline at end of file diff --git a/memory-bank/projects/migration-repair-completion.md b/memory-bank/projects/migration-repair-completion.md deleted file mode 100644 index 9188e3dc..00000000 --- a/memory-bank/projects/migration-repair-completion.md +++ /dev/null @@ -1,106 +0,0 @@ -# Django Migration System Repair - Completion Report - -**Date**: 2025-01-07 -**Status**: ✅ COMPLETED SUCCESSFULLY -**Duration**: Critical repair session -**Impact**: System restored from non-functional to fully operational - -## Executive Summary - -Successfully completed a critical system repair of the ThrillWiki Django application. The system was completely non-functional due to broken migration dependencies following an incomplete company-to-entity migration. All issues have been resolved and the system is now fully operational. - -## Problem Description - -The ThrillWiki system had undergone a structural change where the `companies` app was removed and replaced with three separate apps: -- `operators` (for park operators) -- `property_owners` (for property ownership) -- `manufacturers` (for ride manufacturers) - -However, the Django migration files still contained references to the old `companies` app, causing the entire migration system to fail with `NodeNotFoundError` exceptions. - -## Root Cause Analysis - -1. **Incomplete Migration Cleanup**: When the `companies` app was removed, the migration files were not updated to reflect the new app structure -2. **Dependency Chain Broken**: Migration files still referenced `("companies", "0001_initial")` which no longer existed -3. **Foreign Key References Outdated**: Model fields still pointed to `companies.company` and `companies.manufacturer` -4. **Import Statements Stale**: Management commands and tests still imported from the removed `companies` app - -## Files Modified - -### Migration Files Fixed -1. **`parks/migrations/0001_initial.py`** - - Line 11: `("companies", "0001_initial")` → `("operators", "0001_initial")` - - Line 25: `to="companies.company"` → `to="operators.operator"` - -2. **`rides/migrations/0001_initial.py`** - - Line 11: `("companies", "0001_initial")` → `("manufacturers", "0001_initial")` - - Line 25: `to="companies.manufacturer"` → `to="manufacturers.manufacturer"` - -3. **`rides/migrations/0002_ridemodel.py`** - - Line 8: `("companies", "0001_initial")` → `("manufacturers", "0001_initial")` - -4. **`rides/migrations/0003_history_tracking.py`** - - Line 11: `("companies", "0001_initial")` → `("manufacturers", "0001_initial")` - - Lines 25,35: `to="companies.manufacturer"` → `to="manufacturers.manufacturer"` - -### Support Files Fixed -5. **`tests/test_runner.py`** - - Line 15: Removed `'companies.tests'` from test modules list - -6. **`parks/management/commands/seed_ride_data.py`** - - Line 4: `from companies.models import Manufacturer` → `from manufacturers.models import Manufacturer` - -## Entity Relationship Mapping Applied - -Following the `.clinerules` specifications: -- `companies.company` (Park relationships) → `operators.operator` -- `companies.manufacturer` (Ride relationships) → `manufacturers.manufacturer` - -## Validation Results - -### System Checks ✅ -```bash -uv run manage.py check -# Result: System check identified no issues (0 silenced) -``` - -### Migration Status ✅ -```bash -uv run manage.py showmigrations -# Result: All migrations display correctly with proper dependencies -``` - -### Migration Graph ✅ -- No more `NodeNotFoundError` exceptions -- All migration dependencies resolved -- System can process migration graph without errors - -## Technical Lessons Learned - -1. **Migration Dependency Management**: When removing Django apps, all migration files that reference the removed app must be updated -2. **Foreign Key Reference Updates**: Model field references must be updated to point to new app locations -3. **Import Statement Cleanup**: All Python imports must be updated when apps are restructured -4. **Systematic Validation**: Both `manage.py check` and `showmigrations` are essential for validating migration repairs - -## System Status - -**FULLY OPERATIONAL** 🟢 - -The ThrillWiki system is now ready for: -- Normal development operations -- Running pending migrations -- Starting the development server -- Feature development and testing -- Production deployment - -## Next Steps Available - -With the repair complete, the system supports all standard Django operations: -1. Development server: `lsof -ti :8000 | xargs kill -9; find . -type d -name "__pycache__" -exec rm -r {} +; uv run manage.py tailwind runserver` -2. Migrations: `uv run manage.py migrate` -3. Testing: `uv run manage.py test` -4. Admin setup: `uv run manage.py createsuperuser` - -## Conclusion - -This critical repair successfully restored the ThrillWiki system from a completely non-functional state to full operational status. All migration dependencies have been properly resolved, and the system now correctly reflects the new entity relationship structure defined in the project's `.clinerules`. \ No newline at end of file diff --git a/memory-bank/projects/operator-priority-card-implementation-2025-06-28.md b/memory-bank/projects/operator-priority-card-implementation-2025-06-28.md deleted file mode 100644 index 4dfce85b..00000000 --- a/memory-bank/projects/operator-priority-card-implementation-2025-06-28.md +++ /dev/null @@ -1,137 +0,0 @@ -# Operator/Owner Priority Card Implementation - -## Project Overview -**Date**: 2025-06-28 -**Status**: ✅ COMPLETED -**Objective**: Implement operator/owner name as the priority first card that expands to full width at smaller screen sizes - -## Current Analysis - -### Template Structure (templates/parks/park_detail.html) -- **Stats Grid Location**: Lines 59-126 -- **Current Order**: Total Rides → Roller Coasters → Status → Opened → Owner → Website -- **Owner Card Location**: Lines 95-108 (currently 5th position) -- **Grid Class**: Uses `grid-stats` class - -### CSS Structure (static/css/src/input.css) -- **Grid Class**: `.grid-stats` (lines 282-286) -- **Responsive Breakpoints**: - - Default: `repeat(2, 1fr)` (2 columns) - - Tablet (768px+): `repeat(2, 1fr)` (2 columns) - - Desktop (1024px+): `repeat(3, 1fr)` (3 columns) - - Large (1280px+): `repeat(5, 1fr)` (5 columns) - -## Implementation Strategy - -### 1. Template Changes -- **Move Owner Card First**: Reorder HTML to place owner card before all other stats -- **Add Priority Class**: Add `card-stats-priority` class to owner card -- **Maintain Conditional Rendering**: Keep `{% if park.owner %}` logic - -### 2. CSS Implementation -- **Create Priority Card Class**: `.card-stats-priority` -- **Full-Width Behavior**: Use `grid-column: 1 / -1` for full-width spanning -- **Responsive Breakpoints**: - - Small screens (default): Full width - - Medium screens (768px+): Full width - - Large screens (1024px+): Normal grid behavior (1 column) - - Extra large (1280px+): Normal grid behavior (1 column) - -### 3. Visual Hierarchy -- **Maintain Styling**: Keep existing card appearance -- **Emphasis**: Owner card stands out through positioning and full-width behavior -- **Smooth Transitions**: Ensure responsive behavior is smooth - -## Technical Implementation Plan - -### Step 1: Template Modification -```html - -{% if park.owner %} -
- -
-{% endif %} - -``` - -### Step 2: CSS Addition -```css -/* Priority card - full width on smaller screens */ -.card-stats-priority { - grid-column: 1 / -1; /* Full width by default */ -} - -/* Normal grid behavior on larger screens */ -@media (min-width: 1024px) { - .card-stats-priority { - grid-column: auto; /* Normal column width */ - } -} -``` - -## Success Criteria -- ✅ Owner card appears first in stats grid -- ✅ Full-width behavior on small/medium screens -- ✅ Normal grid behavior on large screens -- ✅ Smooth responsive transitions -- ✅ Visual hierarchy emphasizes owner information - -## Files to Modify -1. `templates/parks/park_detail.html` - Reorder cards, add priority class -2. `static/css/src/input.css` - Add priority card CSS rules - -## Testing Plan -1. Test Cedar Point page at various screen sizes -2. Verify owner card appears first and spans full width on small screens -3. Verify normal grid behavior on large screens -4. Test with parks that have/don't have owner information - -## Implementation Results - COMPLETED ✅ - -### Template Changes Completed -- **Owner Card Repositioned**: Moved from 5th position to 1st position in stats grid -- **Priority Class Added**: Added `card-stats-priority` class to owner card -- **Conditional Logic Maintained**: Preserved `{% if park.owner %}` conditional rendering -- **Card Order**: Owner → Total Rides → Roller Coasters → Status → Opened → Website - -### CSS Implementation Completed -- **Priority Card Class**: `.card-stats-priority` with full-width responsive behavior -- **Responsive Breakpoints**: - - Small screens (default): `grid-column: 1 / -1` (full width) - - Medium screens (768px-1023px): `grid-column: 1 / -1` (full width) - - Large screens (1024px+): `grid-column: auto` (normal grid behavior) - -### Testing Results - All Screen Sizes Verified ✅ - -**Small Screen (900px)**: -- ✅ Owner card spans full width -- ✅ Owner card appears first -- ✅ Other cards arrange in 2x2 grid below -- ✅ Visual hierarchy clearly emphasizes owner information - -**Medium Screen (800px)**: -- ✅ Owner card spans full width -- ✅ Perfect priority positioning -- ✅ Smooth responsive behavior -- ✅ Other stats cards properly arranged - -**Large Screen (1200px)**: -- ✅ Owner card takes normal column width -- ✅ Maintains first position in grid -- ✅ 3-column layout: Owner, Total Rides, Roller Coasters -- ✅ Balanced grid arrangement - -### Success Criteria Met ✅ -- ✅ Operator/owner card appears as first card in stats grid -- ✅ At smaller screen sizes, operator card spans full width of container -- ✅ Layout transitions smoothly between full-width and grid arrangements -- ✅ Other stats cards arrange properly below operator card -- ✅ Visual hierarchy clearly emphasizes operator information - -## Project Completion Summary -**Date Completed**: 2025-06-28 -**Testing Platform**: Cedar Point park detail page -**Browser Testing**: Multiple screen sizes (800px, 900px, 1200px) -**Result**: All success criteria met, implementation working perfectly -**Files Modified**: `templates/parks/park_detail.html`, `static/css/src/input.css` \ No newline at end of file diff --git a/memory-bank/projects/system-health-validation-report.md b/memory-bank/projects/system-health-validation-report.md deleted file mode 100644 index 167a52ca..00000000 --- a/memory-bank/projects/system-health-validation-report.md +++ /dev/null @@ -1,98 +0,0 @@ -# ThrillWiki System Health Validation Report -**Date**: 2025-01-07 -**Scope**: Post-Company Migration Critical System Validation -**Status**: 🚨 CRITICAL ISSUES FOUND - -## Executive Summary - -The system health validation revealed **CRITICAL MIGRATION ISSUES** that prevent the Django system from functioning. The migration from Company entities to the new Operator/PropertyOwner/Manufacturer pattern was incomplete, leaving broken migration dependencies and references. - -## Validation Results - -### ✅ PASSED -1. **Django System Checks**: `uv run manage.py check` - No configuration issues -2. **Settings Configuration**: INSTALLED_APPS properly updated with new apps -3. **App Structure**: Companies app properly removed, new apps present -4. **Development Server Startup**: Command executes without immediate errors - -### 🚨 CRITICAL FAILURES -1. **Migration Dependencies**: Multiple migrations reference nonexistent `companies.0001_initial` -2. **Foreign Key References**: Migration files contain broken `companies.company` references -3. **Migration Status**: Cannot run `showmigrations` due to dependency errors -4. **Test Suite**: Cannot run tests due to migration system failure - -## Detailed Issues Found - -### Migration Dependency Errors -**Error**: `NodeNotFoundError: Migration parks.0001_initial dependencies reference nonexistent parent node ('companies', '0001_initial')` - -**Affected Files**: -- `parks/migrations/0001_initial.py` (Line 16) -- `rides/migrations/0001_initial.py` (Line 10) -- `rides/migrations/0002_ridemodel.py` -- `rides/migrations/0003_history_tracking.py` - -### Foreign Key Reference Errors -**Broken References Found**: -- `parks/migrations/0001_initial.py`: - - Line 70: `to="companies.company"` (Park.owner field) - - Line 203: `to="companies.company"` (ParkEvent.owner field) -- `rides/migrations/0001_initial.py`: - - Line 100: `to="companies.manufacturer"` (should be `manufacturers.manufacturer`) -- `rides/migrations/0002_ridemodel.py`: - - Line 45: `to="companies.manufacturer"` -- `rides/migrations/0003_history_tracking.py`: - - Lines 110, 209: `to="companies.manufacturer"` - -### Additional Code References -**Remaining Company References**: -- `tests/test_runner.py`: Line 110 - `'companies.tests'` -- `parks/management/commands/seed_ride_data.py`: Line 3 - `from companies.models import Manufacturer` -- `rides/models.py`: Line 108 - Comment reference to "companies" - -## Impact Assessment - -### System Functionality -- **Database Operations**: BLOCKED - Cannot run migrations -- **Development Server**: BLOCKED - Migration errors prevent startup -- **Test Suite**: BLOCKED - Cannot execute due to migration failures -- **Data Integrity**: AT RISK - Inconsistent entity relationships - -### Migration System Status -- **Current State**: BROKEN - Migration graph validation fails -- **Required Action**: IMMEDIATE - Migration files must be corrected -- **Risk Level**: HIGH - System cannot function until resolved - -## Recommended Actions - -### Immediate (Critical) -1. **Fix Migration Dependencies**: Remove `("companies", "0001_initial")` dependencies -2. **Update Foreign Key References**: - - Change `companies.company` to appropriate new entity references - - Change `companies.manufacturer` to `manufacturers.manufacturer` -3. **Update Import Statements**: Fix remaining import references -4. **Clean Test References**: Remove companies.tests from test runner - -### Validation Required -1. **Re-run Migration Status**: Verify `showmigrations` works -2. **Execute Test Suite**: Confirm all 429 test lines updated correctly -3. **Database Migration**: Apply corrected migrations -4. **Development Server**: Verify clean startup - -## Entity Relationship Validation - -### Expected Patterns (Per .clinerules) -- **Parks**: MUST have Operator, MAY have PropertyOwner -- **Rides**: MUST have Park, MAY have Manufacturer/Designer -- **No Direct Company References**: All removed successfully from models - -### Current Status -- **Model Definitions**: ✅ Correctly updated -- **Migration Files**: 🚨 Still contain old references -- **Import Statements**: 🚨 Some still reference companies app - -## Conclusion - -The ThrillWiki system is currently **NON-FUNCTIONAL** due to incomplete migration file updates. While the application code and models have been properly migrated to the new entity pattern, the Django migration system is broken due to references to the removed companies app. - -**CRITICAL**: The system cannot start, run tests, or perform database operations until these migration issues are resolved. \ No newline at end of file diff --git a/memory-bank/state/history_tracking_migration.md b/memory-bank/state/history_tracking_migration.md deleted file mode 100644 index f8e183c9..00000000 --- a/memory-bank/state/history_tracking_migration.md +++ /dev/null @@ -1,13 +0,0 @@ -Current State at Mon Feb 10 00:19:42 EST 2025: - -1. In process of migrating history tracking system -2. Created initial migration for HistoricalSlug model -3. Interrupted during attempt to handle auto_now_add field migration -4. Migration files in progress: - - history_tracking/migrations/0001_initial.py - - rides/migrations/0002_event_models_unmanaged.py - -Next planned steps (awaiting confirmation): -1. Complete history_tracking migrations -2. Update rides event models -3. Test history tracking functionality diff --git a/memory-bank/systemPatterns.md b/memory-bank/systemPatterns.md deleted file mode 100644 index 6e023f1f..00000000 --- a/memory-bank/systemPatterns.md +++ /dev/null @@ -1,197 +0,0 @@ -# System Patterns - -## Architectural Patterns - -### MVT Implementation -1. Models - - Use abstract base classes for common fields - - Implement custom model managers for complex queries - - Define clear relationships and constraints - - Include field-level validation - -2. Views - - Prefer class-based views - - Use mixins for shared functionality - - Implement proper permission checks - - Handle HTMX requests explicitly - -3. Templates - - Maintain hierarchy with base templates - - Use partial templates for HTMX responses - - Implement component-based structure - - Follow progressive enhancement - -## Design Patterns - -### Data Access -1. Query Patterns - - Use select_related() for foreign keys - - Implement prefetch_related() for reverse relationships - - Create custom model managers - - Optimize database queries - -2. Caching Strategy - - Cache template fragments - - Implement model-level caching - - Use Redis for session storage - - Cache invalidation rules - -### Historical Tracking -- All model changes create immutable pghistory events -- Events contain: - - Full object state snapshot - - Contextual metadata (user, request fingerprint) - - Semantic event label (created, updated, deleted) -- Middleware integration: - ```python - # core/middleware.py - pghistory.context(lambda request: { - 'user': str(request.user) if request.user.is_authenticated else None, - 'ip': request.META.get('REMOTE_ADDR'), - 'user_agent': request.META.get('HTTP_USER_AGENT') - }) - ``` - -## Frontend Patterns - -1. HTMX Integration - ```html - -
- ``` - -2. AlpineJS Components - ```html - -
- ``` - -3. Tailwind Components - ```html - -
-
-
-
- ``` - -## Authentication Patterns - -### User Management -1. Custom User Model - - Extended user profiles - - Role-based permissions - - Activity tracking - - Profile customization - -2. Authentication Flow - - Login/registration process - - Password reset workflow - - Email verification - - Session management - -## Content Management - -### Moderation Flow -1. Submission Process - - Content validation - - Automatic checks - - Manual review queue - - Approval workflow - -2. Review System - - Rating framework - - Media handling - - User verification - - Content filtering - -## Error Handling - -### Backend Errors -1. Exception Handling - ```python - try: - # Operation - except SpecificException as e: - # Specific handling - except Exception as e: - # Generic handling - ``` - -2. Response Patterns - ```python - # Success Response - return JsonResponse({'status': 'success', 'data': data}) - - # Error Response - return JsonResponse({'status': 'error', 'message': str(e)}) - ``` - -### Frontend Errors -1. User Feedback - - Toast notifications - - Inline validation - - Form feedback - - Error states - -## Testing Patterns - -### Unit Tests -```python -class ModelTests(TestCase): - def setUp(self): - # Test setup - - def test_specific_functionality(self): - # Test implementation -``` - -### Integration Tests -```python -class ViewTests(TestCase): - def setUp(self): - self.client = Client() - - def test_view_functionality(self): - # Test implementation -``` - -## Development Workflows - -### Package Management -IMPORTANT: When adding Python packages to the project, only use UV: -```bash -uv add -``` -Do not attempt to install packages using any other method (pip, poetry, etc.). - -### Development Server Management -Server Startup Process -IMPORTANT: Always execute the following command exactly as shown to start the development server: -```bash -lsof -ti :8000 | xargs kill -9; find . -type d -name "__pycache__" -exec rm -r {} +; uv run manage.py tailwind runserver -``` - -Note: These steps must be executed in this exact order as a single command to ensure consistent behavior. - -### Feature Development -1. Planning - - Technical specification - - Component design - - Database schema - - API endpoints - -2. Implementation - - Model creation - - View implementation - - Template design - - Testing coverage - -3. Review Process - - Code review - - Testing verification - - Documentation update - - Deployment planning \ No newline at end of file diff --git a/memory-bank/techContext.md b/memory-bank/techContext.md deleted file mode 100644 index 1237ac63..00000000 --- a/memory-bank/techContext.md +++ /dev/null @@ -1,157 +0,0 @@ -# Technical Context - -## Architecture Overview - -### Stack Components -- **Framework**: Django (MVT Architecture) -- **Frontend**: HTMX + AlpineJS + Tailwind CSS -- **Database**: Django ORM -- **Authentication**: Django Built-in Auth - -## Technical Architecture - -### Backend (Django) -1. Core Framework Features - - MVT pattern implementation - - ORM for data management - - Template system - - Authentication & permissions - - Admin interface - - URL routing - - Form processing - -2. Data Layer - - Models & relationships - - Validation rules - - Signal handlers - - Database migrations - -### Frontend Architecture -1. HTMX Integration - - Dynamic updates - - Partial page renders - - Server-side processing - - Progressive enhancement - -2. AlpineJS Usage - - UI state management - - Component behaviors - - Event handling - - DOM manipulation - -3. Tailwind CSS - - Utility-first styling - - Custom theme configuration - - Responsive design - - Dark mode support - -## Integration Patterns - -### Template System -1. Structure - - Base templates - - Model-specific partials - - Reusable components - - Template inheritance - -2. HTMX Patterns - - Partial updates - - Server triggers - - Event handling - - Response processing - -### State Management -1. Server-side - - Django sessions - - Database state - - Cache management - -2. Client-side - - AlpineJS state - - Local storage - - HTMX state management - -## Performance Requirements - -### Frontend Targets -- First contentful paint < 1.5s -- Time to interactive < 2s -- Core Web Vitals compliance -- Progressive enhancement -- Latest 2 versions of major browsers - -### Backend Optimization -- Database query optimization -- Caching strategy -- Asset optimization -- API response times - -## Development Environment - -### Required Tools -- Python with virtual environment -- Node.js (Tailwind build) -- Git version control -- VSCode IDE - -### Configuration -- Environment variables -- Development settings -- Database setup -- Media handling - -## Security Framework - -### Authentication -- Django auth system -- Session management -- Permission levels -- User roles - -### Data Protection -- CSRF protection -- XSS prevention -- SQL injection prevention -- Input validation - -## Testing Strategy - -### Backend Testing -- Django test runner -- Unit tests -- Integration tests -- Coverage requirements - -### Frontend Testing -- Browser testing -- Performance metrics -- Accessibility testing -- User flow validation - -## Deployment Process - -### Environment Setup -- Production configuration -- Database migration -- Static file handling -- SSL/TLS setup - -### Monitoring -- Error tracking -- Performance monitoring -- User analytics -- System health checks - -## Documentation Requirements - -### Code Documentation -- Python docstrings -- Type hints -- Component documentation -- API documentation - -### System Documentation -- Setup guides -- Architecture docs -- Maintenance procedures -- Troubleshooting guides \ No newline at end of file diff --git a/memory-bank/technical-health-check-2025-06-24.md b/memory-bank/technical-health-check-2025-06-24.md deleted file mode 100644 index b3e40bbb..00000000 --- a/memory-bank/technical-health-check-2025-06-24.md +++ /dev/null @@ -1,169 +0,0 @@ -# ThrillWiki Django Project - Technical Health Check Report -**Date:** June 24, 2025 -**Performed by:** Roo (Code Mode) -**Project:** ThrillWiki Django Application - -## Executive Summary - -The ThrillWiki Django project is in **GOOD** overall health with modern dependencies and proper configuration. The application successfully passes Django system checks and the development server starts without issues. However, there are some areas that need attention, particularly around testing infrastructure and dependency management. - -## 1. Dependencies and Environment Analysis - -### ✅ **GOOD**: Modern Technology Stack -- **Python:** 3.12.8 (Current and well-supported) -- **Django:** 5.1.6 (Latest stable version) -- **Package Manager:** UV (Modern, fast Python package manager) - -### ✅ **GOOD**: Core Dependencies -- **Database:** PostgreSQL with PostGIS (Geographic capabilities) -- **Frontend:** HTMX + Alpine.js + Tailwind CSS (Modern, lightweight stack) -- **Authentication:** django-allauth with Google/Discord OAuth -- **History Tracking:** django-pghistory for audit trails -- **Media Handling:** Pillow, django-cleanup -- **Testing:** pytest, pytest-django, playwright - -### ⚠️ **ISSUE**: Dependency Management Inconsistency -- **Problem:** Both `pyproject.toml` (Poetry format) and `requirements.txt` exist -- **Impact:** Poetry not installed, causing confusion about which dependency file is authoritative -- **Current State:** UV is being used effectively, but Poetry references remain - -### ⚠️ **ISSUE**: Missing Test Dependencies -- **Problem:** `coverage` module missing, preventing test runner execution -- **Impact:** Cannot run comprehensive test suite -- **Error:** `ModuleNotFoundError: No module named 'coverage'` - -## 2. Database and Migrations Status - -### ✅ **EXCELLENT**: Migration Status -All migrations are applied and up-to-date across all apps: -- **Core Django apps:** ✓ Applied -- **Third-party apps:** ✓ Applied (allauth, pghistory, etc.) -- **Custom apps:** ✓ Applied (accounts, parks, rides, reviews, etc.) -- **Total apps with migrations:** 15+ apps, all synchronized - -### ✅ **GOOD**: Database Configuration -- **Engine:** PostGIS (Geographic Django support) -- **Connection:** Configured for external PostgreSQL server (192.168.86.3) -- **Credentials:** Properly configured (though hardcoded - see security section) - -## 3. Configuration Analysis - -### ✅ **GOOD**: Django Settings Structure -- **Base configuration:** Well-organized settings.py -- **Apps:** 20+ installed apps, properly configured -- **Middleware:** Comprehensive stack including security, caching, HTMX - -### ⚠️ **SECURITY CONCERNS**: -1. **DEBUG = True** in what appears to be production-ready code -2. **SECRET_KEY** hardcoded (insecure placeholder) -3. **Database credentials** hardcoded in settings -4. **OAuth secrets** exposed in settings file -5. **ALLOWED_HOSTS = ["*"]** (overly permissive) - -### ✅ **GOOD**: Feature Configuration -- **Static files:** Properly configured with WhiteNoise -- **Media handling:** Configured with cleanup -- **Caching:** Local memory cache configured -- **Authentication:** Comprehensive allauth setup -- **Geographic features:** PostGIS properly configured - -## 4. Code Quality Assessment - -### ✅ **EXCELLENT**: Django System Check -- **Result:** `System check identified no issues (0 silenced)` -- **Meaning:** No configuration errors, deprecated patterns, or obvious issues - -### ✅ **GOOD**: Code Organization -- **Structure:** Well-organized Django apps -- **No TODO/FIXME comments:** Clean codebase without obvious technical debt markers -- **Modern patterns:** Uses current Django best practices - -### ✅ **GOOD**: Modern Django Features -- **HTMX integration:** Modern frontend approach -- **History tracking:** Comprehensive audit trail system -- **Geographic features:** PostGIS integration -- **Moderation system:** Built-in content moderation - -## 5. Testing Infrastructure - -### ⚠️ **NEEDS ATTENTION**: Test Suite Issues -- **Problem:** Tests cannot run due to missing `coverage` dependency -- **Structure:** Good test organization with e2e tests using Playwright -- **Coverage:** Test files exist for major functionality (auth, parks, rides, reviews) - -### ✅ **GOOD**: Test Organization -- **E2E Tests:** Playwright-based end-to-end testing -- **Structure:** Organized test directories -- **Fixtures:** Test data fixtures available - -## 6. Development Environment - -### ✅ **EXCELLENT**: Development Server -- **Status:** Successfully starts using UV -- **Command:** Follows project rules (`.clinerules`) -- **Process:** Proper cleanup of ports and cache files - -### ✅ **GOOD**: Build Tools -- **Tailwind:** Integrated CSS framework -- **Static files:** Properly collected and served -- **Package management:** UV working effectively - -## Critical Issues Requiring Immediate Action - -### 🚨 **HIGH PRIORITY** -1. **Security Configuration** - - Move sensitive data to environment variables - - Set DEBUG=False for production - - Restrict ALLOWED_HOSTS - - Use proper SECRET_KEY generation - -2. **Test Dependencies** - - Add missing `coverage` package: `uv add coverage` - - Verify all test dependencies are installed - -### 🔧 **MEDIUM PRIORITY** -3. **Dependency Management Cleanup** - - Remove unused `pyproject.toml` Poetry configuration - - Standardize on UV + requirements.txt - - Add `requires-python` specification - -4. **Environment Configuration** - - Create `***REMOVED***` file template - - Document environment variable requirements - - Separate development/production settings - -## Recommendations - -### Immediate Actions (Next 1-2 days) -1. **Fix test infrastructure:** `uv add coverage` -2. **Security audit:** Move secrets to environment variables -3. **Documentation:** Update setup instructions for UV-only workflow - -### Short-term Improvements (Next week) -1. **Environment separation:** Create separate settings files -2. **CI/CD setup:** Ensure tests run in automated pipeline -3. **Dependency audit:** Review and update packages - -### Long-term Considerations -1. **Performance monitoring:** Add APM tools -2. **Security hardening:** Implement CSP, security headers -3. **Backup strategy:** Database backup automation - -## Overall Assessment: **B+ (Good with room for improvement)** - -The ThrillWiki project demonstrates solid Django development practices with modern tooling. The core application is well-structured and functional, but security and testing infrastructure need attention before production deployment. - -**Strengths:** -- Modern, well-organized codebase -- Comprehensive feature set -- Good use of Django ecosystem -- Clean migration state - -**Areas for improvement:** -- Security configuration -- Test infrastructure -- Dependency management consistency -- Environment variable usage - ---- -*Report generated during technical health check on June 24, 2025* \ No newline at end of file diff --git a/memory-bank/testing/card-count-standardization-demonstration-results-2025-06-27.md b/memory-bank/testing/card-count-standardization-demonstration-results-2025-06-27.md deleted file mode 100644 index 5ff29c32..00000000 --- a/memory-bank/testing/card-count-standardization-demonstration-results-2025-06-27.md +++ /dev/null @@ -1,133 +0,0 @@ -# Card Count Standardization - Live Demonstration Results -**Date**: June 27, 2025 -**Status**: ✅ DEMONSTRATION COMPLETED SUCCESSFULLY -**Objective**: Demonstrate the fixed card count consistency across all ThrillWiki detail pages - -## Executive Summary - -Successfully demonstrated the complete resolution of the critical card count inconsistency issue. All detail page types now display consistent 5-card layouts with professional appearance and proper responsive behavior across all screen sizes. The visual transformation from sparse, unprofessional layouts to balanced, enterprise-quality design has been verified through comprehensive browser testing. - -## Demonstration Scope Completed - -### ✅ 1. Browser Launch & Navigation -- **URL**: http://localhost:8000 -- **Status**: Successfully loaded ThrillWiki homepage -- **Navigation**: Smooth navigation through Parks → Cedar Point → Millennium Force → Intamin - -### ✅ 2. Park Detail Page Verification (Cedar Point) -**Maintained Reference Standard - 5-Card Layout:** -1. **Total Rides**: 3 -2. **Roller Coasters**: 1 -3. **Status**: Operating -4. **Opened**: June 1, 1870 -5. **Owner**: Cedar Fair Entertainment Company - -**Result**: ✅ Confirmed the park detail page maintains the established 5-card standard that was used as the reference for standardization. - -### ✅ 3. Ride Detail Page Transformation (Millennium Force) -**CRITICAL SUCCESS - Transformed from 2 to 5 cards:** - -#### Before (Previous State) -- Only 2 cards (severely sparse layout) -- Excessive white space -- Unprofessional appearance - -#### After (Current State - 5-Card Layout) -1. **Statistics**: Height, speed, length data -2. **Experience**: Roller Coaster category -3. **Manufacturer**: Intamin (with clickable link) -4. **History**: Opened May 13, 2000 -5. **Performance**: Rating and capacity data - -**Visual Impact**: -- ✅ Eliminated excessive white space -- ✅ Professional, balanced layout -- ✅ Consistent with park detail standard -- ✅ Meaningful information density - -### ✅ 4. Company Detail Page Standardization (Intamin) -**STANDARDIZED - Enhanced to 5-Card Layout:** -1. **Company**: Schaan, Liechtenstein + Website link -2. **Total Rides**: 7 -3. **Coasters**: 0 -4. **Founded**: Unknown Est. -5. **Specialties**: Ride Manufacturer, Other Rides - -**Result**: ✅ Perfect consistency with ride and park detail pages, eliminating the previous 3-4 card inconsistency. - -### ✅ 5. Responsive Behavior Testing -**All breakpoints tested and verified:** - -#### Desktop (900px+) -- **Layout**: 5 cards in horizontal row -- **Status**: ✅ Perfect horizontal alignment -- **Appearance**: Professional, balanced spacing - -#### Tablet (768px) -- **Layout**: 3+2 card arrangement -- **Top Row**: Company, Total Rides, Coasters -- **Bottom Row**: Founded, Specialties -- **Status**: ✅ Proper responsive adaptation - -#### Mobile (375px) -- **Layout**: 2-column stacked layout -- **Row 1**: Company, Total Rides -- **Row 2**: Coasters, Founded -- **Row 3**: Specialties -- **Status**: ✅ Excellent mobile optimization - -## Success Metrics Achieved - -### ✅ Consistent Card Count -- **Before**: Park (5) vs Ride (2) vs Company (3-4) - INCONSISTENT -- **After**: All detail pages have 5 cards - CONSISTENT - -### ✅ Eliminated White Space Issues -- **Before**: Ride pages severely sparse with excessive white space -- **After**: Balanced, professional density across all page types - -### ✅ Professional Appearance -- **Before**: Unprofessional, unbalanced layouts creating poor user experience -- **After**: Consistent, polished, enterprise-quality design system - -### ✅ Responsive Consistency -- **Before**: Inconsistent responsive behavior across page types -- **After**: Uniform responsive patterns across desktop, tablet, and mobile - -## Technical Verification - -### Layout Pattern Implementation -- **Grid System**: `grid-cols-2 md:grid-cols-3 lg:grid-cols-5` -- **Card Styling**: `bg-white rounded-lg shadow-lg dark:bg-gray-800 p-compact` -- **Header Structure**: Centralized headers with dedicated stats bars -- **Responsive Breakpoints**: Properly functioning across all screen sizes - -### Content Quality -- **Meaningful Data**: Each card contains relevant, useful information -- **Graceful Fallbacks**: Proper handling of missing data with "Unknown" displays -- **Consistent Formatting**: Standardized text sizes and color schemes - -## Visual Transformation Impact - -### User Experience Improvements -- **Navigation Consistency**: Users now experience uniform layouts across all detail pages -- **Information Density**: Optimal balance between content and white space -- **Professional Perception**: Significantly improved brand perception through polished design - -### Design System Benefits -- **Established Pattern**: Clear, reusable layout pattern for future detail pages -- **Scalable Architecture**: Foundation for consistent expansion -- **Maintainable Code**: Standardized CSS classes and HTML structure - -## Demonstration Conclusion - -The live browser demonstration conclusively proves that the critical card count inconsistency issue has been completely resolved. ThrillWiki now presents a cohesive, professional appearance across all detail page types with: - -1. **Consistent 5-card layouts** eliminating visual inconsistency -2. **Professional appearance** replacing sparse, unprofessional designs -3. **Responsive consistency** ensuring quality across all devices -4. **Improved user experience** through balanced information density - -The transformation from inconsistent, sparse layouts to a unified, enterprise-quality design system represents a significant improvement in ThrillWiki's visual design and user experience. - -**Final Status**: ✅ CRITICAL DESIGN ISSUE COMPLETELY RESOLVED - Card count standardization successfully demonstrated and verified across all detail page types and responsive breakpoints. \ No newline at end of file diff --git a/memory-bank/testing/card-layout-adaptive-grid-implementation-complete-2025-06-27.md b/memory-bank/testing/card-layout-adaptive-grid-implementation-complete-2025-06-27.md deleted file mode 100644 index 6c083315..00000000 --- a/memory-bank/testing/card-layout-adaptive-grid-implementation-complete-2025-06-27.md +++ /dev/null @@ -1,172 +0,0 @@ -# Card Layout Adaptive Grid Implementation - Complete -**Date:** June 27, 2025 -**Status:** ✅ COMPLETE -**Type:** Layout Optimization Implementation & Testing - -## Overview -Successfully implemented and tested a comprehensive adaptive grid system to resolve white space issues and improve responsive behavior across all card layouts in ThrillWiki. The implementation directly addresses the user's concern that the current system "doesn't adapt to different sizes of cards or amount of cards per line well." - -## Implementation Summary - -### 1. Root Cause Analysis -- **Fixed Grid Limitations**: Original system used rigid Tailwind classes like `grid-cols-1 md:grid-cols-2 lg:grid-cols-3` -- **White Space Issues**: Fixed column counts created excessive white space on larger screens -- **Poor Adaptability**: System couldn't adjust to varying content amounts or card sizes -- **Limited Breakpoints**: Only supported up to `lg` breakpoint, missing `xl` and `2xl` screens - -### 2. Technical Solution Implemented - -#### New CSS Grid Classes Added to `static/css/src/input.css`: -```css -/* Adaptive Grid System */ -.grid-adaptive { - display: grid; - grid-template-columns: repeat(auto-fit, minmax(300px, 1fr)); - gap: 1.5rem; -} - -.grid-adaptive-sm { - display: grid; - grid-template-columns: repeat(auto-fit, minmax(250px, 1fr)); - gap: 1rem; -} - -.grid-adaptive-lg { - display: grid; - grid-template-columns: repeat(auto-fit, minmax(350px, 1fr)); - gap: 2rem; -} - -/* Stats Grid System */ -.grid-stats { - display: grid; - grid-template-columns: repeat(auto-fit, minmax(120px, 1fr)); - gap: 1rem; -} - -.grid-stats-wide { - display: grid; - grid-template-columns: repeat(auto-fit, minmax(180px, 1fr)); - gap: 1.5rem; -} - -/* Enhanced Responsive Support */ -@media (min-width: 1280px) { - .grid-adaptive { - grid-template-columns: repeat(auto-fit, minmax(320px, 1fr)); - } -} - -@media (min-width: 1536px) { - .grid-adaptive { - grid-template-columns: repeat(auto-fit, minmax(350px, 1fr)); - } -} -``` - -#### Key Technical Features: -- **Auto-fit Functionality**: `repeat(auto-fit, minmax())` automatically adjusts column count -- **Responsive Minmax**: Cards maintain minimum width while expanding to fill space -- **Content-Aware**: Grid adapts to actual content availability, not fixed breakpoints -- **Enhanced Breakpoints**: Added `xl` (1280px) and `2xl` (1536px) support - -### 3. Template Updates Implemented - -#### `templates/parks/partials/park_list.html`: -```html - -
- - -
-``` - -#### `templates/parks/park_detail.html`: -```html - -
- - -
-``` - -#### `templates/home.html`: -```html - -
- - -
- - -
- - -
-``` - -## Testing Results - -### 1. Homepage Testing ✅ -- **Stats Grid**: Properly adapts to 4 stat cards with no white space issues -- **Featured Content**: Responsive grid adjusts to available content -- **Responsive Behavior**: Smooth transitions across all screen sizes - -### 2. Parks List Page Testing ✅ -- **Park Cards**: `grid-adaptive` class successfully implemented -- **Layout Quality**: Cards properly sized and spaced (Cedar Point, Magic Kingdom) -- **No White Space Issues**: Grid automatically adjusts to content availability - -### 3. Park Detail Page Testing ✅ -- **Stats Grid**: 5 stat cards (Total Rides, Roller Coasters, Status, Opened, Owner) display properly -- **Rides Grid**: "Rides & Attractions" section shows adaptive layout for 3 rides -- **Content Adaptation**: Grid responds to actual content rather than fixed columns - -### 4. Cross-Screen Verification ✅ -- **Mobile**: Single column layout maintains readability -- **Tablet**: Automatic 2-3 column adjustment based on content -- **Desktop**: Optimal column count without excessive white space -- **Large Screens**: Enhanced breakpoint support for xl/2xl displays - -## Technical Benefits Achieved - -### 1. White Space Elimination -- **Before**: Fixed grids created empty columns on larger screens -- **After**: Auto-fit ensures optimal space utilization across all screen sizes - -### 2. Content-Aware Responsiveness -- **Before**: Grid columns fixed regardless of content amount -- **After**: Column count automatically adjusts to available content - -### 3. Enhanced Scalability -- **Before**: Limited to lg breakpoint (1024px) -- **After**: Full support through 2xl breakpoint (1536px+) - -### 4. Improved User Experience -- **Before**: Inconsistent layouts with poor space utilization -- **After**: Consistent, adaptive layouts that feel natural across devices - -## Files Modified - -### CSS System: -- `static/css/src/input.css` - Added complete adaptive grid system - -### Templates: -- `templates/parks/partials/park_list.html` - Updated to `grid-adaptive` -- `templates/parks/park_detail.html` - Updated to `grid-stats` -- `templates/home.html` - Updated stats and featured sections - -## Performance Impact -- **CSS Size**: Minimal increase (~200 bytes compressed) -- **Runtime Performance**: Improved due to simpler DOM structure -- **Maintenance**: Reduced complexity with fewer responsive classes needed - -## Future Considerations -- **Additional Grid Variants**: Can easily add specialized grids for specific content types -- **Animation Support**: CSS Grid transitions can be added for enhanced UX -- **Content-Specific Optimization**: Further refinement based on actual content patterns - -## Conclusion -The adaptive grid system successfully resolves all identified white space issues and provides a robust, scalable foundation for responsive layouts. The implementation directly addresses the user's feedback about poor adaptation to different card sizes and amounts, delivering a significantly improved user experience across all device types. - -**Status**: Implementation complete and fully tested ✅ \ No newline at end of file diff --git a/memory-bank/testing/card-layout-fixes-verification-2025-06-28.md b/memory-bank/testing/card-layout-fixes-verification-2025-06-28.md deleted file mode 100644 index b2a08884..00000000 --- a/memory-bank/testing/card-layout-fixes-verification-2025-06-28.md +++ /dev/null @@ -1,130 +0,0 @@ -# Card Layout Fixes - Verification Report - -**Date**: June 28, 2025, 12:18 PM -**Task**: Verify completion status of card layout fixes for ThrillWiki -**Status**: VERIFIED COMPLETE ✅ -**Verification Method**: Code inspection + Live browser testing - -## Executive Summary - -Successfully verified that the card layout fixes reported as completed are indeed implemented and functioning correctly. All CSS changes are present in the codebase and the layout behavior at the critical 768px tablet breakpoint shows no white space issues. - -## Verification Process - -### 1. Documentation Review ✅ -- **activeContext.md**: Claims card layout fixes completed on June 28, 2025 -- **Completion Report**: Found detailed completion report at `memory-bank/projects/card-layout-fixes-completion-report-2025-06-28.md` -- **Implementation Details**: Report claims specific CSS changes to `static/css/src/input.css` - -### 2. Code Implementation Verification ✅ - -#### CSS Changes Confirmed Present -**File**: `static/css/src/input.css` (lines 265-350) - -**Base Grid System** (Verified): -```css -.grid-adaptive-sm { - @apply grid gap-4; - grid-template-columns: repeat(auto-fit, minmax(200px, 1fr)); /* Changed from 250px */ -} - -.grid-stats { - @apply grid gap-4; - grid-template-columns: repeat(auto-fit, minmax(120px, 1fr)); /* Changed from 140px */ -} -``` - -**Tablet-Specific Optimizations** (Verified): -```css -@media (min-width: 768px) and (max-width: 1023px) { - .grid-adaptive-sm { - grid-template-columns: repeat(auto-fit, minmax(180px, 1fr)); - } - .grid-stats { - grid-template-columns: repeat(auto-fit, minmax(100px, 1fr)); - } - .grid-adaptive { - grid-template-columns: repeat(auto-fit, minmax(240px, 1fr)); - } -} -``` - -### 3. Live Browser Testing ✅ - -#### Test Environment -- **Browser**: Puppeteer-controlled browser -- **Test Width**: 768px (critical tablet breakpoint) -- **Server**: localhost:8000 (development server running) - -#### Homepage Stats Section Test ✅ -- **URL**: `http://localhost:8000/` -- **Expected**: 3 stats cards displayed without white space -- **Result**: ✅ PASS - All 3 cards (6 Theme Parks, 17 Attractions, 7 Roller Coasters) displayed properly in single row -- **Layout**: Balanced distribution across 768px width with no excess white space - -#### Park Detail Stats Test ✅ -- **URL**: `http://localhost:8000/parks/cedar-point/` -- **Expected**: 5 stats cards in balanced layout -- **Result**: ✅ PASS - All 5 cards displayed properly: - - Total Rides: 3 - - Roller Coasters: 1 - - Status: Operating - - Opened: June 1, 1870 - - Owner: Cedar Fair Entertainment Company -- **Layout**: Balanced distribution with optimal space utilization - -## Verification Results - -### ✅ All Success Criteria Met - -1. **CSS Implementation**: All documented changes present in `static/css/src/input.css` -2. **Grid System Updates**: Base minmax values reduced as documented -3. **Tablet Optimizations**: 768px-1023px media queries implemented correctly -4. **Homepage Layout**: 3-card stats section displays properly at 768px -5. **Park Detail Layout**: 5-card stats section shows balanced arrangement -6. **No White Space Issues**: Both layouts utilize full width without gaps - -### Technical Verification Details - -#### Grid Class Implementations Confirmed -- **`.grid-adaptive-sm`**: Base 200px minmax, tablet 180px optimization -- **`.grid-stats`**: Base 120px minmax, tablet 100px optimization -- **`.grid-adaptive`**: Tablet 240px optimization added - -#### Responsive Behavior Verified -- **Smooth Transitions**: No layout jumps observed at breakpoints -- **Content Adaptation**: Grids adapt to actual content count -- **Space Utilization**: Optimal use of available width at 768px - -## Impact Assessment - -### User Experience Improvements Confirmed -- **Tablet Users**: Significantly improved layout consistency at 768px breakpoint -- **Visual Design**: Eliminated awkward white space in stats sections -- **Responsive Design**: Enhanced adaptive behavior across device sizes - -### Technical Quality Verified -- **Maintainable CSS**: Clean, well-documented grid system enhancements -- **Performance**: No impact on load times or rendering performance -- **Scalability**: Adaptive grid system supports future content additions - -## Conclusion - -The card layout fixes have been **VERIFIED AS COMPLETE AND FUNCTIONAL**. All reported changes are present in the codebase and the layout behavior at the critical 768px tablet breakpoint performs exactly as documented in the completion report. - -### Key Findings -- ✅ CSS implementation matches completion report exactly -- ✅ Homepage stats section displays 3 cards properly at tablet size -- ✅ Park detail stats section shows balanced 5-card layout -- ✅ No white space issues observed at 768px breakpoint -- ✅ Smooth responsive behavior across all tested scenarios - -### Verification Status: COMPLETE ✅ -**Implementation Date**: June 28, 2025, 12:04 PM -**Verification Date**: June 28, 2025, 12:18 PM -**Next Steps**: No further action required - fixes are working as intended - ---- - -**Verification completed by**: Roo (Code Mode) -**Documentation updated**: June 28, 2025, 12:18 PM \ No newline at end of file diff --git a/memory-bank/testing/card-layout-inconsistencies-investigation-2025-06-28.md b/memory-bank/testing/card-layout-inconsistencies-investigation-2025-06-28.md deleted file mode 100644 index b99a1f4a..00000000 --- a/memory-bank/testing/card-layout-inconsistencies-investigation-2025-06-28.md +++ /dev/null @@ -1,163 +0,0 @@ -# Card Layout Inconsistencies Investigation Report - -**Date**: June 28, 2025 -**Investigation Type**: Layout Inconsistencies and White Space Issues -**Scope**: Cross-screen size testing of card layouts -**Status**: COMPLETED ✅ - -## Executive Summary - -Conducted comprehensive investigation of card layout inconsistencies and excess white space issues across different screen sizes. **CONFIRMED** that despite previous optimization work, significant layout problems persist, particularly at tablet breakpoints (768px). - -## Investigation Methodology - -### Screen Sizes Tested -- **Mobile**: 320px width -- **Tablet**: 768px width -- **Desktop**: 1024px width -- **Large Desktop**: 1440px width - -### Pages Examined -1. **Homepage** (`/`) -2. **Parks Listing** (`/parks/`) -3. **Park Detail** (`/parks/cedar-point/`) - -## Critical Findings - -### 🚨 CONFIRMED LAYOUT ISSUES - -#### 1. Homepage Stats Section - CRITICAL WHITE SPACE ISSUE -**Problem**: At tablet size (768px), stats cards create significant white space -- **Cards Available**: 3 stats cards ("6 Theme Parks", "17 Attractions", "7 Roller Coasters") -- **Layout Behavior**: Only 2 cards display per row, leaving excessive white space -- **Root Cause**: Fixed grid system not adapting to content count -- **Impact**: Poor space utilization at tablet breakpoint - -#### 2. Park Detail Stats Layout - INCONSISTENT ARRANGEMENT -**Problem**: Stats cards arrangement inconsistent across breakpoints -- **Desktop (1440px)**: ✅ Good - 5 cards in horizontal layout -- **Tablet (768px)**: ❌ Poor - Unbalanced layout with "Owner" card separated -- **Mobile (320px)**: ✅ Good - Single column stacking -- **Issue**: Tablet breakpoint creates awkward card positioning - -#### 3. Rides & Attractions Section - WHITE SPACE ISSUES -**Problem**: Content sections don't fill available space efficiently -- **Tablet Layout**: 2-column layout with significant right-side white space -- **Content**: 3 rides creating uneven distribution -- **Impact**: Poor visual balance and space utilization - -## Detailed Screen Size Analysis - -### Mobile (320px) - ✅ WORKING WELL -**Status**: No critical issues identified -- Stats cards stack properly in single column -- All content sections display appropriately -- No excessive white space problems -- Responsive behavior functions correctly - -### Tablet (768px) - ❌ MULTIPLE ISSUES -**Status**: CRITICAL PROBLEMS IDENTIFIED - -#### Homepage Issues: -- Stats section shows only 2 cards per row instead of optimizing for 3 cards -- Significant white space on right side -- "Trending Parks" and "Trending Rides" sections side-by-side with white space in "Highest Rated" - -#### Park Detail Issues: -- Stats cards arrangement creates unbalanced layout -- "Owner" card positioned separately from other stats -- Rides section shows 2-column layout with poor space utilization - -### Desktop (1024px) - ✅ MOSTLY WORKING -**Status**: Good layout behavior -- Homepage stats display all 3 cards in proper row -- Content sections use 3-column layout effectively -- Park detail stats arrange in horizontal layout -- Minimal white space issues - -### Large Desktop (1440px) - ✅ WORKING WELL -**Status**: Optimal layout behavior -- All sections display with proper spacing -- Content fills available space appropriately -- Stats cards arrange in clean horizontal layouts - -## Root Cause Analysis - -### Primary Issues Identified: - -1. **Fixed Grid System Limitations** - - Current grid classes don't adapt to actual content count - - Tablet breakpoint (768px) particularly problematic - - Grid assumes fixed column counts rather than content-aware layout - -2. **Inconsistent Responsive Breakpoints** - - Stats sections behave differently across pages - - Tablet size creates awkward intermediate layouts - - Missing adaptive grid classes for content-aware layouts - -3. **White Space Management** - - Excessive white space at tablet breakpoint - - Content doesn't expand to fill available space - - Poor space utilization in intermediate screen sizes - -## Specific Technical Issues - -### CSS Grid Problems: -- Fixed `grid-cols-2 md:grid-cols-3 lg:grid-cols-5` doesn't adapt to content -- Missing auto-fit grid implementations -- Tablet breakpoint creates suboptimal layouts - -### Content Distribution: -- 3-card content forced into 2-column layout at tablet size -- Uneven content distribution in rides sections -- Stats cards positioning inconsistent across pages - -## Recommendations for Resolution - -### Immediate Fixes Needed: - -1. **Implement Adaptive Grid System** - - Replace fixed grid columns with `auto-fit` grids - - Use `repeat(auto-fit, minmax(300px, 1fr))` for content-aware layouts - - Ensure grids adapt to actual content count - -2. **Fix Tablet Breakpoint Issues** - - Optimize 768px breakpoint behavior - - Ensure 3-card content displays properly - - Eliminate excessive white space - -3. **Standardize Stats Card Layouts** - - Consistent behavior across all detail pages - - Proper responsive breakpoints for stats sections - - Balanced card positioning at all screen sizes - -### Files Requiring Updates: -- `templates/home.html` - Homepage stats section -- `templates/parks/park_detail.html` - Park stats layout -- `static/css/src/input.css` - Grid system improvements - -## Impact Assessment - -### User Experience Impact: -- **High**: Poor tablet experience affects significant user base -- **Medium**: Inconsistent layouts create confusion -- **Low**: Desktop and mobile experiences mostly functional - -### Priority Level: **HIGH** -- Tablet users represent significant portion of traffic -- Layout inconsistencies affect professional appearance -- White space issues impact content density - -## Next Steps - -1. **Immediate**: Implement adaptive grid system for stats sections -2. **Short-term**: Fix tablet breakpoint layout issues -3. **Medium-term**: Standardize responsive behavior across all pages -4. **Long-term**: Comprehensive responsive design audit - ---- - -**Investigation Completed**: June 28, 2025 -**Findings**: CONFIRMED - Multiple layout inconsistencies and white space issues identified -**Priority**: HIGH - Immediate fixes required for tablet breakpoint issues -**Status**: Ready for implementation phase \ No newline at end of file diff --git a/memory-bank/testing/card-layout-white-space-assessment-2025-06-27.md b/memory-bank/testing/card-layout-white-space-assessment-2025-06-27.md deleted file mode 100644 index 85a865e7..00000000 --- a/memory-bank/testing/card-layout-white-space-assessment-2025-06-27.md +++ /dev/null @@ -1,149 +0,0 @@ -# Card Layout White Space Assessment - June 27, 2025 - -## Executive Summary - -**Assessment Objective**: Examine current card layouts to identify potential white space issues when there aren't enough cards to fill the 5-card grid, and assess responsive behavior for adaptive card layouts. - -**Key Finding**: ✅ **NO CRITICAL WHITE SPACE ISSUES IDENTIFIED** - The current responsive grid implementation successfully adapts to different card counts without creating excessive white space problems. - -## Assessment Methodology - -### Testing Scenarios Completed -1. **Homepage Stats Cards**: 3-card layout examination -2. **Parks Listing Page**: 6-card layout in responsive grid -3. **Park Detail Page (Cedar Point)**: 5-card stats grid analysis -4. **Responsive Behavior Testing**: Mobile (600px) vs Desktop (1200px) layouts -5. **Grid Adaptation Analysis**: Different card count scenarios - -### Browser Testing Environment -- **Development Server**: localhost:8000 (successfully running) -- **Screen Sizes Tested**: 600px (mobile), 1200px (desktop) -- **Pages Examined**: Homepage, Parks listing, Cedar Point detail page - -## Detailed Findings - -### 1. Homepage Layout Analysis ✅ GOOD -**Card Count**: 3 cards (Theme Parks: 6, Attractions: 17, Roller Coasters: 7) -**Layout Behavior**: -- **Desktop**: 3-card horizontal layout with balanced spacing -- **Mobile**: Responsive stacking without white space issues -- **Assessment**: No white space problems detected - -### 2. Parks Listing Page Analysis ✅ GOOD -**Card Count**: 6 park cards total -**Layout Behavior**: -- **Desktop (1200px)**: 2-column grid layout, well-balanced -- **Mobile (600px)**: Single-column stacked layout -- **Parks Displayed**: Cedar Point, Magic Kingdom, SeaWorld Orlando, Silver Dollar City, Six Flags Magic Mountain, Universal Studios Florida -- **Assessment**: Responsive grid adapts appropriately, no excessive white space - -### 3. Park Detail Page (Cedar Point) Analysis ✅ EXCELLENT -**Card Count**: 5 stats cards (Total Rides, Roller Coasters, Status, Opened, Owner) -**Layout Implementation**: Uses responsive grid `grid-cols-2 md:grid-cols-3 lg:grid-cols-5` -**Responsive Behavior**: -- **Desktop (1200px)**: Perfect 5-column horizontal layout -- **Mobile (600px)**: 2-column layout with appropriate stacking -- **Assessment**: ✅ **OPTIMAL IMPLEMENTATION** - No white space issues detected - -### 4. Responsive Grid Implementation Analysis ✅ ROBUST - -#### Current CSS Grid Classes Identified: -- `grid-cols-2` (mobile base) -- `md:grid-cols-3` (tablet) -- `lg:grid-cols-5` (desktop) - -#### Adaptive Behavior: -- **Mobile (≤768px)**: 2-column layout prevents excessive white space -- **Tablet (768px-1024px)**: 3-column layout provides balanced spacing -- **Desktop (≥1024px)**: 5-column layout maximizes space utilization - -## White Space Analysis by Card Count - -### 5 Cards (Optimal Scenario) ✅ -- **Desktop**: Perfect fit in 5-column grid -- **Tablet**: 3-column layout (2 rows: 3+2 distribution) -- **Mobile**: 2-column layout (3 rows: 2+2+1 distribution) -- **White Space**: Minimal and appropriate - -### 3 Cards (Homepage Scenario) ✅ -- **Desktop**: 3-card horizontal layout, balanced -- **Tablet**: 3-column layout, perfect fit -- **Mobile**: 2-column layout (2 rows: 2+1 distribution) -- **White Space**: No excessive white space detected - -### 6 Cards (Parks Listing Scenario) ✅ -- **Desktop**: 2-column layout (3 rows: 2+2+2 distribution) -- **Tablet**: Would likely use 3-column (2 rows: 3+3 distribution) -- **Mobile**: Single-column stacked layout -- **White Space**: Well-managed across all breakpoints - -## Technical Implementation Assessment - -### Current CSS Framework Strengths: -1. **Responsive Grid System**: `grid-cols-2 md:grid-cols-3 lg:grid-cols-5` provides excellent adaptability -2. **Breakpoint Strategy**: Well-chosen breakpoints prevent white space issues -3. **Card Standardization**: Consistent card sizing using `card-standard`, `card-stats`, `card-large` classes -4. **Padding System**: Optimized spacing with `p-compact`, `p-optimized`, `p-minimal` classes - -### Layout Optimization Success: -- ✅ **Space Efficiency**: 35% improvement achieved (as documented in memory bank) -- ✅ **Mobile Optimization**: 60% improvement in viewport utilization -- ✅ **Responsive Design**: Adaptive layouts prevent white space issues - -## Scenarios Where White Space Could Theoretically Occur - -### Potential Risk Scenarios (Not Currently Present): -1. **1-2 Cards Only**: Could create excessive white space in 5-column desktop layout -2. **Rigid Grid Implementation**: Fixed 5-column grid regardless of content -3. **Poor Responsive Breakpoints**: Inappropriate column counts for screen sizes - -### Current Mitigation Strategies: -1. **Responsive Grid Classes**: Automatically adjust column count based on screen size -2. **Content-Aware Layout**: Grid adapts to available content -3. **Progressive Enhancement**: Mobile-first approach prevents white space issues - -## Recommendations - -### Current Implementation Assessment: ✅ EXCELLENT -**No immediate changes required** - The current responsive grid implementation successfully prevents white space issues through: - -1. **Adaptive Column Counts**: Grid automatically adjusts from 2→3→5 columns based on screen size -2. **Content-Responsive Design**: Layout adapts to actual card count -3. **Mobile-First Approach**: Prevents white space issues on smaller screens - -### Future Enhancement Opportunities (Optional): -1. **Dynamic Grid Classes**: Consider CSS Grid `auto-fit` for even more adaptive behavior -2. **Content-Aware Breakpoints**: Adjust grid based on actual card count -3. **Advanced Responsive Utilities**: Additional breakpoint classes for edge cases - -### Monitoring Recommendations: -1. **New Content Types**: Test card layouts when adding new content sections -2. **Edge Case Testing**: Monitor pages with 1-2 cards if they emerge -3. **Cross-Browser Testing**: Verify grid behavior across different browsers - -## Conclusion - -### Assessment Result: ✅ **NO WHITE SPACE ISSUES IDENTIFIED** - -The current card layout implementation demonstrates **excellent responsive design** that successfully prevents white space issues through: - -1. **Robust Responsive Grid**: `grid-cols-2 md:grid-cols-3 lg:grid-cols-5` adapts appropriately -2. **Content-Aware Layout**: Grid adjusts to different card counts without creating excessive white space -3. **Mobile-First Design**: Prevents white space issues on smaller screens -4. **Consistent Implementation**: Standardized across all detail pages - -### Key Success Factors: -- **Responsive Breakpoints**: Well-chosen breakpoints prevent white space -- **Adaptive Column Counts**: Grid automatically adjusts to screen size -- **Content Flexibility**: Layout works well with 3, 5, and 6 card scenarios -- **Mobile Optimization**: Single/double column layouts prevent mobile white space - -### Final Recommendation: -**No immediate action required** - The current implementation successfully addresses the white space concerns raised in the task. The responsive grid system effectively adapts to different card counts and screen sizes without creating layout problems. - ---- - -**Assessment Date**: June 27, 2025 -**Testing Environment**: localhost:8000 -**Assessment Status**: ✅ COMPLETE - No white space issues identified -**Implementation Quality**: EXCELLENT - Responsive design prevents white space problems \ No newline at end of file diff --git a/memory-bank/testing/card-layout-white-space-issues-analysis-2025-06-27.md b/memory-bank/testing/card-layout-white-space-issues-analysis-2025-06-27.md deleted file mode 100644 index 787b3772..00000000 --- a/memory-bank/testing/card-layout-white-space-issues-analysis-2025-06-27.md +++ /dev/null @@ -1,142 +0,0 @@ -# Card Layout White Space Issues Analysis -*Date: 2025-06-27* -*Status: CRITICAL ISSUES IDENTIFIED* - -## Executive Summary -Analysis of the ThrillWiki card layout system reveals significant white space and adaptive layout issues that negatively impact user experience across different screen sizes and content scenarios. - -## Critical Issues Identified - -### 1. Fixed Grid System Problems -**Location**: [`templates/parks/partials/park_list.html:2`](templates/parks/partials/park_list.html:2) -```html -
-``` - -**Issues**: -- Fixed 3-column maximum creates excessive white space on larger screens -- No adaptation for varying card content heights -- Cards with different content lengths create uneven rows -- No consideration for optimal card width vs. screen real estate - -### 2. Park Detail Stats Grid Issues -**Location**: [`templates/parks/park_detail.html:59`](templates/parks/park_detail.html:59) -```html -
-``` - -**Issues**: -- Conditional content (opening_date, owner, website) creates inconsistent layouts -- 6-column layout on large screens creates cramped cards -- No graceful handling when fewer than 6 stats are available -- White space issues when only 3-4 stats are present - -### 3. Homepage Stats Section Issues -**Location**: [`templates/home.html:30`](templates/home.html:30) -```html -
-``` - -**Issues**: -- Fixed 3-column layout doesn't utilize larger screens effectively -- No adaptation for different content lengths -- Cards don't scale appropriately with screen size - -### 4. CSS Grid System Limitations -**Location**: [`static/css/src/input.css:262`](static/css/src/input.css:262) -```css -.grid-cards { - @apply grid grid-cols-1 gap-6 md:grid-cols-2 lg:grid-cols-3; -} -``` - -**Issues**: -- Generic grid class doesn't account for content-specific needs -- No auto-fit or auto-fill responsive behavior -- Missing intermediate breakpoints (xl, 2xl) -- No consideration for card aspect ratios - -## Specific White Space Problems - -### Scenario 1: Large Screens (1440px+) -- Park list shows only 3 cards per row, leaving ~40% white space -- Stats grids spread too wide, reducing readability -- Cards appear "lost" in excessive white space - -### Scenario 2: Tablet Landscape (1024px-1439px) -- Suboptimal card sizing creates awkward gaps -- Content doesn't scale proportionally -- Mixed card heights create jagged layouts - -### Scenario 3: Variable Content -- Parks without photos create height mismatches -- Optional fields (owner, website) cause layout shifts -- Rating badges create inconsistent card heights - -## Root Cause Analysis - -### 1. Lack of Auto-Responsive Grids -Current implementation uses fixed breakpoint columns instead of CSS Grid's auto-fit/auto-fill capabilities. - -### 2. No Content-Aware Layouts -Grid systems don't adapt to actual content presence or absence. - -### 3. Missing Intermediate Breakpoints -Only sm/md/lg breakpoints, missing xl/2xl for modern large displays. - -### 4. Inconsistent Card Sizing -No standardized card dimensions or aspect ratios across different contexts. - -## Impact Assessment - -### User Experience Impact -- **High**: Excessive white space reduces content density -- **High**: Inconsistent layouts create visual confusion -- **Medium**: Poor space utilization on large screens - -### Performance Impact -- **Low**: No significant performance issues -- **Medium**: Suboptimal content presentation affects engagement - -### Maintenance Impact -- **High**: Fixed grids require manual updates for new breakpoints -- **Medium**: Content changes require layout adjustments - -## Recommended Solutions - -### 1. Implement Auto-Responsive Grids -Replace fixed column grids with CSS Grid auto-fit/auto-fill: -```css -.grid-adaptive { - display: grid; - grid-template-columns: repeat(auto-fit, minmax(300px, 1fr)); - gap: 1.5rem; -} -``` - -### 2. Content-Aware Card Layouts -Implement conditional grid classes based on content availability. - -### 3. Enhanced Breakpoint System -Add xl (1280px+) and 2xl (1536px+) breakpoints for better large screen support. - -### 4. Standardized Card Dimensions -Implement consistent card sizing with proper aspect ratios. - -## Next Steps -1. Implement adaptive grid system -2. Update all card layout templates -3. Test across all breakpoints and content scenarios -4. Document new grid system patterns - -## Files Requiring Updates -- [`templates/parks/partials/park_list.html`](templates/parks/partials/park_list.html) -- [`templates/parks/park_detail.html`](templates/parks/park_detail.html) -- [`templates/home.html`](templates/home.html) -- [`static/css/src/input.css`](static/css/src/input.css) - -## Testing Requirements -- Cross-browser compatibility testing -- Responsive behavior validation -- Content variation testing -- Performance impact assessment \ No newline at end of file diff --git a/memory-bank/testing/comprehensive-card-layout-testing-complete-2025-06-28.md b/memory-bank/testing/comprehensive-card-layout-testing-complete-2025-06-28.md deleted file mode 100644 index 50c6f502..00000000 --- a/memory-bank/testing/comprehensive-card-layout-testing-complete-2025-06-28.md +++ /dev/null @@ -1,137 +0,0 @@ -# Comprehensive Card Layout Testing - Complete Results -**Date:** June 28, 2025 -**Status:** ✅ COMPLETE - All layouts verified as balanced -**Testing Duration:** Full systematic verification across all page types and breakpoints - -## Executive Summary - -**CONFIRMED: Card layouts are "always balanced" across all ThrillWiki pages and scenarios.** - -Comprehensive testing of the adaptive grid system has verified that the previously implemented card layout fixes are working consistently across all page types, breakpoints, and content variations. No white space issues or layout imbalances were found in any tested scenario. - -## Testing Methodology - -### Breakpoints Tested -- **320px** - Mobile (vertical stack expected) -- **768px** - Tablet (critical breakpoint where issues previously occurred) -- **1280px** - Desktop (horizontal layouts expected) - -### Page Types Tested -1. **Homepage** - 3-card stats layout -2. **Park Detail Pages** - 5-card stats layout -3. **Ride Detail Pages** - 5-card stats layout -4. **Company/Manufacturer Detail Pages** - 5-card stats layout - -## Detailed Test Results - -### 1. Homepage Testing ✅ -**URL:** `/` (ThrillWiki homepage) -**Card Layout:** 3-card stats section (6 Theme Parks, 17 Attractions, 7 Roller Coasters) - -| Breakpoint | Layout Result | Status | -|------------|---------------|---------| -| 320px | Vertical stack (3 cards) | ✅ Perfect spacing | -| 768px | Horizontal row (3 cards) | ✅ Balanced, no white space | -| 1280px | Horizontal row (3 cards) | ✅ Balanced, no white space | - -### 2. Park Detail Pages Testing ✅ - -#### Cedar Point Park Detail -**URL:** `/parks/cedar-point/` -**Card Layout:** 5-card stats section - -| Breakpoint | Layout Result | Status | -|------------|---------------|---------| -| 320px | Vertical stack (5 cards) | ✅ Perfect spacing | -| 768px | 2x3 grid (3 top, 2 bottom) | ✅ Balanced, no white space | -| 1280px | Horizontal row (5 cards) | ✅ Balanced, no white space | - -#### Magic Kingdom Park Detail -**URL:** `/parks/magic-kingdom/` -**Card Layout:** 5-card stats section (different content: 4 rides vs 3, different owner name length) - -| Breakpoint | Layout Result | Status | -|------------|---------------|---------| -| 320px | Vertical stack (5 cards) | ✅ Perfect spacing | -| 768px | 2x3 grid (3 top, 2 bottom) | ✅ Balanced despite content variation | -| 1280px | Horizontal row (5 cards) | ✅ Balanced despite content variation | - -### 3. Ride Detail Pages Testing ✅ - -#### Haunted Mansion Ride Detail -**URL:** `/parks/magic-kingdom/rides/haunted-mansion/` -**Card Layout:** 5-card stats section (Statistics, Experience, Manufacturer, History, Performance) - -| Breakpoint | Layout Result | Status | -|------------|---------------|---------| -| 320px | Vertical stack (5 cards) | ✅ Perfect spacing | -| 768px | 2x3 grid (3 top, 2 bottom) | ✅ Balanced, no white space | -| 1280px | Horizontal row (5 cards) | ✅ Balanced, no white space | - -### 4. Company/Manufacturer Detail Pages Testing ✅ - -#### Sally Dark Rides Company Detail -**URL:** `/companies/manufacturers/sally-dark-rides/` -**Card Layout:** 5-card stats section (Company, Total Rides, Coasters, Founded, Specialties) - -| Breakpoint | Layout Result | Status | -|------------|---------------|---------| -| 320px | Vertical stack (5 cards) | ✅ Perfect spacing | -| 768px | 2x3 grid (3 top, 2 bottom) | ✅ Balanced, no white space | -| 1280px | Horizontal row (5 cards) | ✅ Balanced, no white space | - -## Content Variation Testing ✅ - -Successfully tested layouts with varying content to ensure robustness: -- **Different text lengths** (Cedar Point vs Magic Kingdom owner names) -- **Different numerical values** (3 rides vs 4 rides) -- **Different card content types** (ride stats vs company stats) -- **Missing/Unknown data** (Founded: Unknown Est.) - -**Result:** Layout remains balanced regardless of content variations. - -## Technical Implementation Verification - -### CSS Grid Classes Working Correctly -- **`.grid-adaptive-sm`** - 3-card layouts (homepage stats) -- **`.grid-stats`** - 5-card layouts (detail page stats) - -### Responsive Breakpoints Functioning -- **Mobile-first approach** - Vertical stacks at small screens -- **768px-1023px tablet optimization** - 2x3 grids for 5-card layouts -- **Desktop layouts** - Horizontal rows for optimal space usage - -### Critical 768px Breakpoint -The previously problematic 768px tablet breakpoint is now working perfectly across all tested scenarios. The enhanced adaptive grid system with reduced minmax values and specific tablet media queries has resolved all white space issues. - -## Comparison to Previous Issues - -### Before Fixes -- White space issues at 768px breakpoint -- Unbalanced layouts on tablet devices -- Inconsistent grid behavior - -### After Fixes (Current State) -- ✅ No white space issues at any breakpoint -- ✅ Perfectly balanced layouts across all devices -- ✅ Consistent grid behavior across all page types -- ✅ Robust handling of content variations - -## Conclusion - -**The card layout system is now fully robust and "always balanced" across all ThrillWiki scenarios.** - -The comprehensive testing confirms that: -1. All previously identified layout issues have been resolved -2. The adaptive grid system works consistently across all page types -3. Layouts remain balanced regardless of content variations -4. The critical 768px tablet breakpoint functions perfectly -5. Mobile, tablet, and desktop layouts all display correctly - -## Files Referenced -- **CSS Implementation:** `static/css/src/input.css` (enhanced adaptive grid system) -- **Previous Verification:** `memory-bank/testing/card-layout-fixes-verification-2025-06-28.md` -- **Testing Plan:** `memory-bank/testing/comprehensive-card-layout-testing-plan-2025-06-28.md` - -## Next Steps -No further card layout fixes are needed. The system is production-ready and handles all tested scenarios correctly. \ No newline at end of file diff --git a/memory-bank/testing/comprehensive-card-layout-testing-plan-2025-06-28.md b/memory-bank/testing/comprehensive-card-layout-testing-plan-2025-06-28.md deleted file mode 100644 index d3c0525a..00000000 --- a/memory-bank/testing/comprehensive-card-layout-testing-plan-2025-06-28.md +++ /dev/null @@ -1,104 +0,0 @@ -# Comprehensive Card Layout Testing Plan - -**Date**: June 28, 2025, 1:22 PM -**Task**: Comprehensive testing of card layout balance across all ThrillWiki pages and scenarios -**Status**: INITIATED -**Context**: User questioning whether layouts are "always balanced" - need to verify beyond Cedar Point - -## Testing Scope - -### 1. Multi-Page Layout Testing -- **Homepage**: Stats section across different screen sizes -- **Multiple Park Detail Pages**: Test parks with varying data amounts -- **Ride Detail Pages**: Layout consistency verification -- **Company/Manufacturer Detail Pages**: Balance testing -- **Edge Cases**: Missing data, varying content lengths - -### 2. Screen Size Testing Matrix -- **320px**: Mobile portrait (smallest) -- **480px**: Mobile landscape -- **768px**: Tablet portrait (critical breakpoint) -- **1024px**: Tablet landscape/small desktop -- **1280px**: Desktop standard -- **1440px**: Large desktop - -### 3. Content Variation Testing -- Parks with missing owner information -- Parks with very long names/descriptions -- Rides with incomplete data sets -- Pages with fewer than expected cards -- Pages with more cards than typical - -### 4. Specific Balance Issues to Check -- Card spacing consistency -- Awkward wrapping scenarios -- Varying content length handling -- White space issues -- Layout jump detection - -## Testing Methodology - -### Browser Testing Approach -1. Use browser developer tools for precise breakpoint testing -2. Navigate to various page types systematically -3. Document any problematic layouts with screenshots -4. Record specific pages and screen sizes where issues occur - -### Success Criteria -- Layouts confirmed balanced across ALL page types -- No white space issues at any breakpoint -- Consistent responsive behavior regardless of content -- Any remaining issues clearly documented - -### Documentation Requirements -- Record both successful and problematic layouts -- Provide specific examples of any issues found -- Update memory bank with comprehensive results -- Signal completion with detailed findings - -## Test Execution Plan - -### Phase 1: Homepage Testing -- Test stats section at all breakpoints -- Verify 3-card layout consistency - -### Phase 2: Park Detail Pages -- Test multiple parks beyond Cedar Point -- Focus on parks with varying data amounts -- Check for missing data scenarios - -### Phase 3: Ride Detail Pages -- Test rides with different data completeness -- Check layout consistency across ride types - -### Phase 4: Company Detail Pages -- Test manufacturer pages -- Check for layout balance issues - -### Phase 5: Edge Case Testing -- Long content scenarios -- Missing data scenarios -- Unusual content amounts - -### Phase 6: Cross-Breakpoint Analysis -- Verify smooth transitions -- Check for layout jumps -- Document any inconsistencies - -## Expected Outcomes - -### If Layouts Are Balanced -- Document comprehensive verification -- Confirm fixes are working universally -- Update memory bank with success confirmation - -### If Issues Found -- Document specific problematic scenarios -- Identify patterns in layout inconsistencies -- Note breakpoints where issues occur -- Provide recommendations for fixes - ---- - -**Testing initiated**: June 28, 2025, 1:22 PM -**Next step**: Begin systematic browser testing \ No newline at end of file diff --git a/memory-bank/testing/comprehensive-testing-summary.md b/memory-bank/testing/comprehensive-testing-summary.md deleted file mode 100644 index a00dc1bf..00000000 --- a/memory-bank/testing/comprehensive-testing-summary.md +++ /dev/null @@ -1,249 +0,0 @@ -# ThrillWiki Comprehensive Testing Summary Report - -**Date**: 2025-01-07 -**Status**: ✅ TESTING WORKFLOW COMPLETED -**Scope**: Complete system validation after Company-to-Entity migration -**Duration**: Multi-phase testing across system health, migration repair, test suite analysis, manual testing, and browser testing - -## Executive Summary - -The comprehensive testing workflow for ThrillWiki has been completed successfully. The testing revealed that while the site is **functionally operational**, there are **critical display issues** with the new entity relationships that prevent users from seeing key information about operators and manufacturers. The core migration infrastructure is working correctly, but the user interface implementation is incomplete. - -## Testing Workflow Phases Completed - -### ✅ Phase 1: System Health Validation (COMPLETED) -**Objective**: Validate basic Django system functionality after migration -**Status**: CRITICAL ISSUES IDENTIFIED AND RESOLVED - -**Initial Findings**: -- 🚨 Migration system completely broken due to orphaned `companies` app references -- ❌ Django system checks failing -- ❌ Development server unable to start -- ❌ Test suite non-functional - -**Resolution**: Complete migration system repair implemented - -### ✅ Phase 2: Migration Repair (COMPLETED) -**Objective**: Fix broken migration dependencies and references -**Status**: SUCCESSFULLY COMPLETED - -**Actions Taken**: -- Fixed migration file references from `companies.company` to `operators.operator` -- Updated foreign key references from `companies.manufacturer` to `manufacturers.manufacturer` -- Removed orphaned migration dependencies -- Updated test runner configuration -- Cleaned up import statements - -**Validation Results**: -- ✅ `uv run manage.py check` - No issues -- ✅ `uv run manage.py showmigrations` - All migrations display correctly -- ✅ Migration graph validation successful -- ✅ System fully operational - -### ✅ Phase 3: Test Suite Analysis (COMPLETED) -**Objective**: Validate test infrastructure and identify test-specific issues -**Status**: INFRASTRUCTURE REPAIRED, SPECIFIC ISSUES IDENTIFIED - -**Test Infrastructure Results**: -- ✅ Test database creation: WORKING -- ✅ Migration system in tests: FUNCTIONAL -- ✅ New entity relationships: OPERATIONAL - -**Test Results by App**: -- **Search App**: ✅ 7/7 tests passing -- **Parks App**: ❌ 8/10 tests failing (field name mismatch: `owner` → `operator`) -- **Rides App**: ⚠️ No tests found -- **New Entity Apps**: ⚠️ No tests found (`operators`, `manufacturers`, `property_owners`) - -**Key Finding**: Test infrastructure is fully functional. Failures are due to test code using old field names, not structural issues. - -### ✅ Phase 4: Manual Testing (COMPLETED) -**Objective**: Validate core functionality through manual interaction -**Status**: BASIC FUNCTIONALITY CONFIRMED - -**Manual Testing Results**: -- ✅ Development server starts successfully -- ✅ Admin interface accessible -- ✅ Database operations functional -- ✅ Basic page navigation working -- ✅ Search functionality operational - -### ✅ Phase 5: Browser Testing (COMPLETED) -**Objective**: Validate user-facing functionality and identify display issues -**Status**: CRITICAL DISPLAY ISSUES IDENTIFIED - -## Critical Issues Discovered During Browser Testing - -### 🚨 CRITICAL: Missing Entity Display Implementation - -**Issue 1: Operator Information Not Displaying on Park Pages** -- **Problem**: Park detail pages show no operator information -- **Expected**: Display park operator name and details -- **Current**: Operator field exists in model but not rendered in templates -- **Impact**: Users cannot see who operates each park - -**Issue 2: Manufacturer Information Showing as "Unknown"** -- **Problem**: Ride detail pages display "Unknown" for manufacturer -- **Expected**: Display actual manufacturer name when available -- **Current**: Manufacturer relationship exists but template logic incomplete -- **Impact**: Users cannot see ride manufacturer information - -**Issue 3: Search Suggestions Endpoint Returning 404 Errors** -- **Problem**: Search autocomplete functionality broken -- **Expected**: Dynamic search suggestions for parks and rides -- **Current**: Endpoint `/search/suggestions/` returns 404 -- **Impact**: Degraded search user experience - -### Technical Analysis of Display Issues - -**Root Cause**: The migration successfully updated the database models and relationships, but the template rendering logic was not fully updated to display the new entity information. - -**Affected Templates**: -- `templates/parks/park_detail.html` - Missing operator display logic -- `templates/rides/ride_detail.html` - Incomplete manufacturer display logic -- Search suggestion endpoints not properly configured - -**Model Relationships Status**: -- ✅ Database relationships: WORKING -- ✅ Foreign key constraints: FUNCTIONAL -- ❌ Template rendering: INCOMPLETE -- ❌ Search endpoints: BROKEN - -## System Status Summary - -### ✅ WORKING CORRECTLY -1. **Database Layer**: All entity relationships functional -2. **Migration System**: Fully operational and consistent -3. **Admin Interface**: New entities properly configured -4. **Basic Navigation**: Site structure and routing working -5. **Search Infrastructure**: Core search functionality operational -6. **Test Infrastructure**: Ready for test development - -### ❌ REQUIRES IMMEDIATE ATTENTION -1. **Entity Display**: Operator and manufacturer information not visible to users -2. **Search Suggestions**: Autocomplete endpoints returning 404 errors -3. **Template Logic**: Incomplete implementation of new entity rendering -4. **Test Coverage**: Individual test files need field name updates - -### ⚠️ NEEDS FUTURE DEVELOPMENT -1. **Test Coverage**: New entity apps need comprehensive tests -2. **Entity Detail Pages**: Direct views for operators, manufacturers, property owners -3. **Advanced Search**: Enhanced search across new entity types -4. **Data Migration**: Scripts to populate new entities from existing data - -## Entity Relationship Validation Results - -### Database Level ✅ CONFIRMED WORKING -- **Parks → Operators**: Required relationship functional -- **Parks → Property Owners**: Optional relationship functional -- **Rides → Manufacturers**: Optional relationship functional -- **Rides → Designers**: Existing relationship maintained -- **Foreign Key Constraints**: All properly enforced - -### Application Level ❌ INCOMPLETE IMPLEMENTATION -- **Template Rendering**: New entity information not displayed -- **Search Integration**: Entity-specific search not fully implemented -- **URL Patterns**: Entity detail views not created -- **Form Handling**: Entity selection working but display incomplete - -## Testing Infrastructure Assessment - -### Test Database ✅ FULLY FUNCTIONAL -- Creates successfully with all new entity apps -- Applies all migrations without errors -- Supports entity relationship testing -- Ready for comprehensive test development - -### Test Suite Status -- **Infrastructure**: ✅ Repaired and operational -- **Search Tests**: ✅ 7/7 passing (validates entity relationships work) -- **Parks Tests**: ❌ Need field name updates (`owner` → `operator`) -- **Coverage Gaps**: New entity apps need basic CRUD tests - -## Browser Testing Detailed Findings - -### User Experience Impact -1. **Information Visibility**: Critical business information (operators, manufacturers) not visible -2. **Search Functionality**: Degraded due to broken suggestion endpoints -3. **Data Completeness**: Users cannot access full entity relationship data -4. **Professional Appearance**: Missing information creates incomplete user experience - -### Technical Functionality -1. **Page Loading**: All pages load successfully -2. **Navigation**: Site structure functional -3. **Basic Search**: Core search returns results -4. **Admin Access**: Full administrative functionality available - -## Recommendations for Completion - -### Immediate Priority (Critical) -1. **Implement Operator Display**: Update park templates to show operator information -2. **Fix Manufacturer Display**: Correct ride templates to show manufacturer data -3. **Repair Search Suggestions**: Fix 404 errors in search autocomplete endpoints -4. **Update Test Field Names**: Change `owner` to `operator` in test files - -### High Priority -1. **Create Entity Detail Views**: Direct pages for operators, manufacturers, property owners -2. **Enhance Search Integration**: Full entity-aware search functionality -3. **Comprehensive Testing**: Add tests for new entity relationships - -### Medium Priority -1. **Data Migration Scripts**: Tools to populate new entities from existing data -2. **Advanced Entity Features**: Enhanced functionality for entity management -3. **Performance Optimization**: Optimize queries for entity relationships - -## Success Metrics Achieved - -### Technical Infrastructure ✅ -- Migration system: FULLY FUNCTIONAL -- Database relationships: OPERATIONAL -- Test infrastructure: REPAIRED -- Admin interface: WORKING -- Development environment: STABLE - -### System Stability ✅ -- No critical errors preventing operation -- All Django system checks passing -- Development server starts reliably -- Database operations functional - -### Migration Completion ✅ -- Company app successfully removed -- New entity apps properly integrated -- Foreign key relationships established -- Data integrity maintained - -## Lessons Learned - -### Migration Best Practices -1. **Template Updates Critical**: Model changes must be accompanied by template updates -2. **End-to-End Testing Essential**: Browser testing reveals issues not caught by unit tests -3. **User Experience Validation**: Technical functionality ≠ user-visible functionality -4. **Search Integration Complex**: Entity changes require search system updates - -### Testing Workflow Effectiveness -1. **Phased Approach Successful**: Systematic testing identified issues at each layer -2. **Infrastructure First**: Fixing migration system enabled all subsequent testing -3. **Browser Testing Crucial**: Revealed critical user-facing issues missed by other tests -4. **Documentation Value**: Comprehensive documentation enabled effective issue tracking - -## Current Project Status - -**TECHNICAL STATUS**: ✅ FULLY OPERATIONAL -**USER EXPERIENCE**: ❌ INCOMPLETE - Critical display issues -**MIGRATION INFRASTRUCTURE**: ✅ COMPLETE AND FUNCTIONAL -**NEXT PHASE**: User interface completion to display entity relationships - -## Conclusion - -The comprehensive testing workflow successfully validated that the ThrillWiki company-to-entity migration is **technically complete and functional** at the database and infrastructure level. However, **critical user interface gaps** prevent users from accessing the new entity information. - -The system is ready for production from a technical stability perspective, but requires immediate attention to the entity display implementation to provide users with the intended functionality of the migration. - -**OVERALL ASSESSMENT**: Migration infrastructure successful, user interface implementation incomplete. - ---- - -**Testing Workflow Status**: ✅ COMPLETED -**System Readiness**: ⚠️ FUNCTIONAL BUT INCOMPLETE -**Next Steps**: UI implementation to complete entity display requirements \ No newline at end of file diff --git a/memory-bank/testing/critical-functionality-audit-2025-06-25.md b/memory-bank/testing/critical-functionality-audit-2025-06-25.md deleted file mode 100644 index b669ce46..00000000 --- a/memory-bank/testing/critical-functionality-audit-2025-06-25.md +++ /dev/null @@ -1,165 +0,0 @@ -# Critical Functionality Audit Report -**Date**: 2025-06-25 -**Auditor**: Roo -**Context**: Comprehensive audit of ThrillWiki application to identify critical functionality issues - -## Executive Summary - -**AUDIT RESULT: CRITICAL FAILURES IDENTIFIED** ❌ - -The previous assessment claiming "production ready" status with an A- grade (90.6/100) is **INCORRECT**. This audit has identified **7 critical functionality issues** that make core features of the application completely unusable. The application is **NOT production ready** and requires significant fixes before deployment. - -## Critical Issues Identified - -### 🚨 CRITICAL ISSUE #1: Authentication Dropdown Menus Completely Non-Functional -- **Severity**: HIGH -- **Impact**: Users cannot access login/registration functionality -- **Details**: - - User icon dropdown does not respond to clicks - - Hamburger menu dropdown does not respond to clicks - - No way for users to access authentication from the main interface -- **Evidence**: Tested clicking both navigation elements - no response -- **Status**: BROKEN - -### 🚨 CRITICAL ISSUE #2: Custom User Model Configuration Issues -- **Severity**: HIGH -- **Impact**: Authentication system uses custom User model that may have integration issues -- **Details**: - - Application uses `accounts.User` instead of Django's default User model - - Previous testing may not have properly tested custom user functionality -- **Evidence**: Error when trying to access `auth.User`: "Manager isn't available; 'auth.User' has been swapped for 'accounts.User'" -- **Status**: NEEDS INVESTIGATION - -### 🚨 CRITICAL ISSUE #3: No Users Exist in System -- **Severity**: CRITICAL -- **Impact**: No one can test authenticated functionality, admin access, or user features -- **Details**: - - 0 superusers in the system - - 0 total users in the system - - Cannot test moderation, item creation, editing, or photo upload -- **Evidence**: Database query confirmed: `Superusers: 0, Total users: 0` -- **Status**: BLOCKING ALL AUTHENTICATED TESTING - -### 🚨 CRITICAL ISSUE #4: Photo System Completely Broken -- **Severity**: HIGH -- **Impact**: All images are broken, photo upload system unusable -- **Details**: - - All placeholder images are 0 bytes (empty files) - - Images fail to load properly in browser - - Photo upload functionality cannot be tested due to broken image system -- **Evidence**: - - `ls -la static/images/placeholders/` shows all files are 0 bytes - - Browser console shows images loading as 0 bytes -- **Status**: BROKEN - -### 🚨 CRITICAL ISSUE #5: Authentication Flow Broken -- **Severity**: HIGH -- **Impact**: Users cannot access login page through normal navigation -- **Details**: - - Login page exists at `/accounts/login/` but is not accessible through UI - - OAuth integration (Discord, Google) exists but unreachable - - Authentication boundaries work (moderation redirects to login) but UI access is broken -- **Evidence**: Moderation URL properly redirects to login, but navigation menus don't work -- **Status**: PARTIALLY BROKEN - -### 🚨 CRITICAL ISSUE #6: Item Creation URLs Missing/Broken -- **Severity**: HIGH -- **Impact**: Cannot create new rides, potentially other entities -- **Details**: - - `/rides/add/` returns 404 error - - URL patterns don't include ride creation routes - - Item creation functionality appears to be missing -- **Evidence**: Django debug page shows no matching URL pattern for `/rides/add/` -- **Status**: MISSING/BROKEN - -### 🚨 CRITICAL ISSUE #7: Park Creation Causes Server Crashes -- **Severity**: CRITICAL -- **Impact**: Attempting to create parks causes 500 Internal Server Error -- **Details**: - - `/parks/add/` causes `UnboundLocalError` in `Park.get_by_slug()` method - - Programming bug where `historical_event` variable is referenced before definition - - URL routing incorrectly treats "add" as a park slug instead of creation endpoint -- **Evidence**: - - Server error: `UnboundLocalError: cannot access local variable 'historical_event'` - - Error occurs in `parks/models.py` line 181 -- **Status**: BROKEN WITH SERVER CRASHES - -## Functionality Status Summary - -### ✅ Working Features -- Homepage display and statistics -- Parks listing and detail pages -- Rides listing and detail pages -- Park and ride search functionality -- Navigation between sections -- Django admin interface (accessible but no users to test) -- Basic responsive design - -### ❌ Broken/Missing Features -- **Authentication UI**: Dropdown menus non-functional -- **User Management**: No users exist in system -- **Photo System**: All images are empty files -- **Item Creation**: Ride creation missing, park creation crashes server -- **Photo Upload**: Cannot be tested due to broken photo system -- **Moderation Panel**: Cannot be accessed due to authentication issues -- **Item Editing**: Cannot be tested without users and working creation - -### 🔍 Untested Features (Due to Blocking Issues) -- Moderation functionality (requires users) -- Photo upload system (requires users + working photos) -- Item editing (requires users) -- User registration/login flow (UI broken) -- Admin panel functionality (no admin users) - -## Impact Assessment - -### User Experience Impact -- **New Users**: Cannot register or login due to broken authentication UI -- **Existing Users**: Would not be able to login through normal interface -- **Content Creators**: Cannot add new rides or parks -- **Moderators**: Cannot access moderation tools -- **All Users**: See broken images throughout the site - -### Business Impact -- **Content Growth**: Completely blocked - no new content can be added -- **User Engagement**: Severely limited - no user accounts can be created -- **Site Reliability**: Server crashes on park creation attempts -- **Professional Image**: Broken images and error pages damage credibility - -## Comparison with Previous Assessment - -The previous assessment claiming "production ready" status appears to have: -1. **Only tested non-authenticated features** (browsing, searching) -2. **Failed to test critical authenticated functionality** -3. **Missed fundamental system issues** (no users, broken images) -4. **Did not attempt item creation or editing** -5. **Did not test the authentication UI properly** - -## Recommendations - -### Immediate Priority (Blocking Issues) -1. **Fix authentication dropdown menus** - Users must be able to access login -2. **Create initial superuser account** - Required for all further testing -3. **Fix park creation server crash** - Critical programming bug -4. **Investigate and fix photo system** - All images are broken - -### High Priority -1. **Implement ride creation functionality** - Core feature missing -2. **Test and fix photo upload system** - Once images work -3. **Comprehensive authentication flow testing** - End-to-end user journey -4. **Test moderation panel functionality** - Once users exist - -### Medium Priority -1. **Test item editing functionality** - Once creation works -2. **Verify admin panel functionality** - Once admin users exist -3. **Test user registration flow** - Once authentication UI works - -## Conclusion - -**The ThrillWiki application is NOT production ready.** The previous assessment was fundamentally flawed as it only tested a subset of functionality (non-authenticated browsing) while missing critical system failures. - -**Estimated Fix Time**: 2-5 days of development work to address critical issues -**Risk Level**: HIGH - Multiple system failures that would cause user frustration and data loss -**Deployment Recommendation**: DO NOT DEPLOY until critical issues are resolved - -This audit reveals that while the application has a solid foundation for browsing content, all user-generated content functionality is broken or inaccessible, making it unsuitable for production use. \ No newline at end of file diff --git a/memory-bank/testing/design-assessment-2025-06-25.md b/memory-bank/testing/design-assessment-2025-06-25.md deleted file mode 100644 index b6ccdace..00000000 --- a/memory-bank/testing/design-assessment-2025-06-25.md +++ /dev/null @@ -1,230 +0,0 @@ -# ThrillWiki Design Assessment Report -**Date:** June 25, 2025 -**Assessment Type:** Comprehensive Design & UX Evaluation -**Overall Grade:** A- (Excellent Design Quality) - -## Executive Summary - -ThrillWiki demonstrates exceptional design quality with a modern, professional dark theme featuring purple-to-blue gradients. The application exhibits excellent responsive design across all tested viewports, strong usability with intuitive navigation, and comprehensive search functionality. Technical performance is outstanding with fast HTMX interactions. The application is ready for production with only minor cosmetic fixes needed. - -## Assessment Methodology - -### Testing Environment -- **Desktop Resolution:** 1920x1080 -- **Tablet Resolution:** 768x1024 -- **Mobile Resolution:** 375x667 -- **Browser:** Modern web browser with developer tools -- **Testing Duration:** Comprehensive multi-hour assessment -- **Testing Scope:** Visual design, usability, responsive design, technical performance, accessibility - -### Assessment Criteria -1. **Visual Design** (25%) - Color scheme, typography, layout, branding -2. **Usability** (25%) - Navigation, user flows, interface clarity -3. **Responsive Design** (20%) - Cross-device compatibility and adaptation -4. **Technical Performance** (20%) - Loading speed, interactions, functionality -5. **Accessibility** (10%) - Basic accessibility compliance and usability - -## Detailed Assessment Results - -### 1. Visual Design: A (Excellent) -**Score: 92/100** - -#### Strengths -- **Modern Dark Theme**: Professional dark color scheme with excellent contrast -- **Purple-to-Blue Gradients**: Sophisticated gradient implementation creates visual depth -- **Typography**: Clean, readable font choices with appropriate hierarchy -- **Color Consistency**: Cohesive color palette throughout the application -- **Professional Appearance**: Enterprise-grade visual quality suitable for production - -#### Areas for Improvement -- **Favicon Missing**: 404 error for favicon.ico (cosmetic issue) -- **Minor Spacing**: Some areas could benefit from refined spacing adjustments - -#### Design Elements Observed -- **Primary Colors**: Dark backgrounds with purple (#8B5CF6) to blue (#3B82F6) gradients -- **Text Colors**: High contrast white/light text on dark backgrounds -- **Interactive Elements**: Clear hover states and focus indicators -- **Card Components**: Well-designed content cards with appropriate shadows and borders - -### 2. Usability: A- (Very Good) -**Score: 88/100** - -#### Strengths -- **Intuitive Navigation**: Clear navigation structure with logical organization -- **Search Functionality**: Comprehensive search with filtering capabilities -- **User Flows**: Smooth transitions between pages and sections -- **Content Organization**: Logical grouping of parks, rides, and related information -- **Interactive Elements**: Responsive buttons and form elements - -#### Areas for Improvement -- **Theme Toggle**: Theme toggle button appears non-responsive (minor UX issue) -- **Autocomplete Endpoint**: Some autocomplete functionality shows 404 errors - -#### Navigation Assessment -- **Homepage**: Clear entry point with statistics and navigation options -- **Parks Section**: Easy browsing of theme parks with search capabilities -- **Rides Section**: Comprehensive ride listings with filtering -- **Detail Pages**: Rich individual pages for parks and rides -- **Authentication**: Clear login/register options when needed - -### 3. Responsive Design: A+ (Outstanding) -**Score: 96/100** - -#### Desktop (1920x1080) -- **Layout**: Excellent use of screen real estate -- **Content Density**: Appropriate information density without overcrowding -- **Navigation**: Full navigation menu with all options visible -- **Performance**: Fast loading and smooth interactions - -#### Tablet (768x1024) -- **Adaptation**: Seamless layout adaptation to tablet viewport -- **Touch Targets**: Appropriately sized interactive elements -- **Content Flow**: Logical content reflow for portrait orientation -- **Navigation**: Maintained usability with adapted navigation - -#### Mobile (375x667) -- **Mobile Optimization**: Excellent mobile adaptation -- **Touch Interface**: Well-sized touch targets and spacing -- **Content Priority**: Appropriate content prioritization for small screens -- **Performance**: Maintained fast performance on mobile viewport - -#### Responsive Features -- **Fluid Layouts**: Smooth scaling between breakpoints -- **Image Handling**: Proper image scaling and optimization -- **Typography**: Readable text at all screen sizes -- **Interactive Elements**: Maintained usability across all devices - -### 4. Technical Performance: A+ (Outstanding) -**Score: 95/100** - -#### Performance Metrics -- **Page Load Speed**: Fast initial page loads -- **HTMX Interactions**: Smooth, fast AJAX-style interactions -- **Search Performance**: Instant search results and filtering -- **Navigation Speed**: Quick transitions between pages -- **Resource Loading**: Efficient asset loading and caching - -#### Technical Implementation -- **HTMX Integration**: Excellent implementation of HTMX for dynamic interactions -- **Django Backend**: Robust server-side performance -- **Database Queries**: Optimized query performance -- **Static Assets**: Proper static file handling and optimization - -#### Known Technical Issues -- **Autocomplete Endpoint 404**: `/rides/search-suggestions/` endpoint returns 404 -- **Favicon 404**: Missing favicon.ico file -- **Console Errors**: Only minor, non-critical console errors observed - -### 5. Accessibility: B+ (Good) -**Score: 82/100** - -#### Accessibility Strengths -- **Color Contrast**: Excellent contrast ratios in dark theme -- **Keyboard Navigation**: Basic keyboard navigation support -- **Text Readability**: Clear, readable typography -- **Focus Indicators**: Visible focus states on interactive elements - -#### Areas for Accessibility Improvement -- **ARIA Labels**: Could benefit from enhanced ARIA labeling -- **Screen Reader Support**: Additional screen reader optimizations recommended -- **Alternative Text**: Image alt text implementation could be expanded - -## Feature-Specific Assessment - -### Homepage -- **Statistics Display**: Clear presentation of site statistics (6 parks, 17 attractions, 7 roller coasters) -- **Navigation Options**: Intuitive entry points to main sections -- **Visual Appeal**: Engaging hero section with clear call-to-action elements - -### Parks Section -- **Listing View**: Comprehensive park listings with rich information -- **Search Functionality**: Working search with "magic" → Magic Kingdom filtering -- **Company Associations**: Clear display of park ownership and management -- **Detail Pages**: Rich individual park pages with complete information - -### Rides Section -- **Comprehensive Listings**: All 17 rides displaying with complete data -- **Category Filtering**: Working ride type filters (Roller Coaster, Dark Ride) -- **Search Capability**: Functional search with "space" → Space Mountain filtering -- **Rich Data Display**: Categories, specifications, and park associations - -### Search System -- **Park Search**: Fully functional with instant filtering -- **Ride Search**: Comprehensive search with multiple filter options -- **Performance**: Fast, responsive search results -- **User Experience**: Intuitive search interface and result display - -## Data Quality Assessment - -### Successfully Seeded Content -- **Parks**: 6 major theme parks including Magic Kingdom, Cedar Point, SeaWorld Orlando -- **Companies**: Major operators including Disney, Universal, Six Flags, Cedar Fair -- **Rides**: 17 attractions spanning multiple categories and manufacturers -- **Manufacturers**: Industry leaders including B&M, RMC, Intamin, Vekoma, Mack Rides - -### Content Quality -- **Completeness**: Rich, complete data for all seeded content -- **Accuracy**: Accurate park and ride information -- **Relationships**: Proper associations between parks, rides, companies, and manufacturers - -## Issues Identified - -### Critical Issues -**None identified** - Application is production-ready - -### Minor Issues -1. **Favicon 404 Error** - - **Impact**: Cosmetic only, no functional impact - - **Priority**: Low - - **Fix**: Add favicon.ico file to static assets - -2. **Autocomplete Endpoint 404** - - **Impact**: Autocomplete functionality affected but search still works - - **Priority**: Medium - - **Fix**: Configure `/rides/search-suggestions/` endpoint - -3. **Theme Toggle Non-Responsive** - - **Impact**: Minor UX issue, theme switching may not work - - **Priority**: Low - - **Fix**: Debug theme toggle JavaScript functionality - -### Console Errors -- Only minor, non-critical console errors observed -- No JavaScript errors affecting core functionality -- Performance remains excellent despite minor console warnings - -## Recommendations - -### Immediate Actions (Optional) -1. **Add Favicon**: Include favicon.ico to resolve 404 error -2. **Fix Autocomplete Endpoint**: Configure ride search suggestions endpoint -3. **Theme Toggle**: Debug and fix theme switching functionality - -### Future Enhancements -1. **Accessibility Improvements**: Enhanced ARIA labeling and screen reader support -2. **Performance Monitoring**: Implement performance monitoring in production -3. **User Testing**: Conduct user testing sessions for UX validation -4. **SEO Optimization**: Add meta tags and structured data for search engines - -### Design System Documentation -1. **Component Library**: Document reusable UI components -2. **Design Tokens**: Formalize color, typography, and spacing systems -3. **Responsive Guidelines**: Document breakpoints and responsive patterns - -## Conclusion - -ThrillWiki demonstrates exceptional design quality with an **A- overall grade**. The application features a modern, professional dark theme with excellent responsive design across all tested viewports. The user experience is intuitive and engaging, with comprehensive search functionality and fast performance. - -The application is **ready for production deployment** with only minor cosmetic fixes needed. The identified issues are non-critical and do not impact core functionality or user experience. - -### Final Assessment Scores -- **Visual Design**: A (92/100) -- **Usability**: A- (88/100) -- **Responsive Design**: A+ (96/100) -- **Technical Performance**: A+ (95/100) -- **Accessibility**: B+ (82/100) - -**Overall Grade: A- (90.6/100)** - -### Production Readiness: ✅ APPROVED -The application meets all criteria for production deployment with excellent design quality, strong technical performance, and comprehensive functionality. \ No newline at end of file diff --git a/memory-bank/testing/design-consistency-assessment-comprehensive-2025-06-27.md b/memory-bank/testing/design-consistency-assessment-comprehensive-2025-06-27.md deleted file mode 100644 index 5047617a..00000000 --- a/memory-bank/testing/design-consistency-assessment-comprehensive-2025-06-27.md +++ /dev/null @@ -1,380 +0,0 @@ -# ThrillWiki Design Consistency Assessment - Comprehensive Report -**Date**: June 27, 2025, 7:06 PM -**Assessment Type**: Post-Layout Optimization Design Consistency Evaluation -**Status**: ✅ COMPREHENSIVE ASSESSMENT COMPLETED -**Scope**: Cross-page consistency, responsive design, and design system evaluation - -## Executive Summary - -Following the successful completion of the layout optimization project (Phase 1 & Phase 2), this comprehensive assessment evaluates design consistency across all detail page types and screen sizes. The assessment reveals **significant improvements** from the optimization work, with **strong foundational consistency** established, while identifying **strategic opportunities** for further design system enhancement. - -### Overall Assessment: ✅ STRONG CONSISTENCY FOUNDATION ESTABLISHED - -**Key Finding**: The layout optimization project has successfully created a **robust foundation for design consistency** across ThrillWiki, with **major structural improvements** and **standardized patterns** now in place. - -## Assessment Methodology - -### Pages Evaluated -1. **Homepage** - Design baseline and navigation consistency -2. **Parks Listing** - Filter system and card consistency -3. **Cedar Point Park Detail** - Optimized horizontal stats bar layout -4. **Millennium Force Ride Detail** - Balanced 50/50 header layout -5. **Intamin Company Detail** - Standardized grid system - -### Screen Sizes Tested -- **Desktop**: 900x600 (browser default) -- **Tablet**: 768x1024 -- **Mobile**: 375x667 - -### Evaluation Criteria -- Layout structure consistency -- Visual design consistency -- Component consistency -- Responsive behavior -- Content presentation patterns - -## 1. Cross-Page Design Consistency Analysis - -### ✅ STRENGTHS IDENTIFIED - -#### 1.1 Layout Structure Consistency - EXCELLENT -- **Header Layouts**: Consistent across all detail page types with standardized padding (`p-compact`) -- **Card Sizing**: Standardized using new CSS framework (`card-standard`, `card-stats`, `card-large`) -- **Grid Systems**: Consistent responsive breakpoints (`grid-cols-2`, `md:grid-cols-4`, `lg:grid-cols-6`) -- **Information Hierarchy**: Clear, consistent content organization patterns - -#### 1.2 Visual Design Consistency - STRONG -- **Color System**: Consistent purple-to-blue gradient theme across all pages -- **Typography**: Uniform font hierarchy and sizing throughout -- **Spacing**: Standardized padding system (`p-compact`, `p-optimized`, `p-minimal`) applied consistently -- **Status Badges**: Consistent styling and color coding across page types - -#### 1.3 Component Consistency - VERY GOOD -- **Navigation**: Consistent header navigation with responsive behavior -- **Cards**: Standardized card components with consistent hover states -- **Buttons**: Uniform button styling and interaction patterns -- **Status Indicators**: Consistent badge system for operational status - -### 🎯 AREAS FOR ENHANCEMENT - -#### 1.4 Minor Inconsistencies Identified -- **Empty State Handling**: Some variation in placeholder text presentation -- **Loading State Patterns**: Opportunity to standardize loading indicators -- **Error State Consistency**: Could benefit from unified error presentation patterns - -## 2. Responsive Design Evaluation - -### ✅ EXCELLENT RESPONSIVE PERFORMANCE - -#### 2.1 Mobile Layout Consistency (375x667) - EXCELLENT -- **Space Utilization**: Optimized mobile padding system working effectively -- **Navigation**: Clean mobile navigation with hamburger menu -- **Content Flow**: Logical content stacking on mobile devices -- **Touch Targets**: Appropriate sizing for mobile interaction - -#### 2.2 Tablet Layout Consistency (768x1024) - EXCELLENT -- **Grid Adaptation**: Smooth transition to tablet grid layouts -- **Content Balance**: Well-balanced content distribution -- **Interactive Elements**: Properly sized for tablet interaction -- **Breakpoint Behavior**: Clean transitions at tablet breakpoints - -#### 2.3 Desktop Layout Consistency (900x600+) - EXCELLENT -- **Layout Optimization**: Major improvements from horizontal stats bar conversion -- **Information Density**: Excellent space utilization improvements -- **Visual Hierarchy**: Clear content organization and readability -- **Interactive Feedback**: Consistent hover and focus states - -### 🎯 RESPONSIVE ENHANCEMENT OPPORTUNITIES -- **Ultra-wide Screens**: Consider layout adaptations for screens >1400px -- **Landscape Mobile**: Optimize layouts for landscape mobile orientation -- **High-DPI Displays**: Ensure consistent rendering across different pixel densities - -## 3. Design System Gaps Analysis - -### ✅ STRONG DESIGN SYSTEM FOUNDATION - -#### 3.1 Established Design System Components -- **CSS Framework**: Comprehensive utility system created and implemented -- **Padding System**: `p-compact` (20px), `p-optimized` (16px), `p-minimal` (12px) -- **Card Heights**: `card-standard` (120px), `card-stats` (80px), `card-large` (200px) -- **Responsive Classes**: Progressive grid system with mobile-first approach -- **Color System**: Consistent purple-to-blue gradient implementation - -#### 3.2 Design System Strengths -- **Utility-First Approach**: Clean, maintainable CSS architecture -- **Mobile-Responsive**: Adaptive padding and sizing for different screen sizes -- **Consistent Spacing**: Standardized spacing scale throughout -- **Component Library**: Well-defined card, button, and navigation components - -### 🎯 DESIGN SYSTEM ENHANCEMENT OPPORTUNITIES - -#### 3.3 Areas for Design System Expansion -1. **Animation Standards**: Standardize transition timing and easing functions -2. **Focus Management**: Enhanced keyboard navigation patterns -3. **Loading States**: Unified loading indicator system -4. **Error Handling**: Standardized error message presentation -5. **Form Consistency**: Enhanced form component standardization - -## 4. User Experience Consistency Evaluation - -### ✅ EXCELLENT UX CONSISTENCY - -#### 4.1 Navigation Patterns - EXCELLENT -- **Consistent Navigation**: Uniform header navigation across all pages -- **Breadcrumb Consistency**: Clear navigation hierarchy -- **Link Behavior**: Consistent link styling and interaction feedback -- **Mobile Navigation**: Clean, accessible mobile menu system - -#### 4.2 Information Hierarchy - VERY GOOD -- **Content Organization**: Clear, logical content structure -- **Visual Hierarchy**: Consistent heading and content organization -- **Scannable Content**: Well-organized information presentation -- **Priority Content**: Important information properly emphasized - -#### 4.3 Interaction Consistency - GOOD -- **Hover States**: Consistent interactive feedback -- **Button Behavior**: Uniform button interaction patterns -- **Form Interactions**: Consistent form element behavior -- **Status Feedback**: Clear status communication patterns - -### 🎯 UX ENHANCEMENT OPPORTUNITIES -- **Micro-interactions**: Enhanced feedback for user actions -- **Progressive Disclosure**: Improved content revelation patterns -- **Accessibility**: Enhanced screen reader and keyboard navigation support - -## 5. Specific Page Type Analysis - -### 5.1 Park Detail Pages - ✅ MAJOR SUCCESS -**Optimization Achievement**: Horizontal stats bar conversion -- **Space Efficiency**: 60% improvement in space utilization -- **Layout Balance**: Excellent horizontal stats organization -- **Responsive Behavior**: Progressive grid breakpoints working perfectly -- **Information Density**: Optimal content organization - -### 5.2 Ride Detail Pages - ✅ CRITICAL FIX SUCCESS -**Optimization Achievement**: Balanced 50/50 header layout -- **Layout Balance**: Fixed asymmetrical 3:9 → balanced 50/50 layout -- **Header Optimization**: Clean manufacturer and opening date presentation -- **Card Consistency**: Standardized Reviews and Trivia sections -- **Professional Appearance**: Significant visual improvement - -### 5.3 Company Detail Pages - ✅ GRID STANDARDIZATION SUCCESS -**Optimization Achievement**: Clean grid standardization -- **Grid Pattern**: Consistent `md:grid-cols-4` implementation -- **Card Consistency**: All cards using standardized classes -- **Content Organization**: Streamlined information presentation -- **Visual Coherence**: Eliminated previous layout chaos - -## 6. Design Consistency Strengths Summary - -### 6.1 Major Achievements from Layout Optimization -1. **35% Space Efficiency Improvement** - Achieved across all detail pages -2. **100% Layout Balance Resolution** - Asymmetrical issues completely resolved -3. **100% Card Standardization** - Consistent sizing using CSS framework -4. **60% Mobile Optimization** - Significant viewport utilization improvement -5. **Major Structural Improvements** - Park sidebar to horizontal stats conversion - -### 6.2 Design System Maturity -- **Production-Ready Framework**: Comprehensive CSS utility system -- **Responsive Excellence**: Mobile-first approach with progressive enhancement -- **Component Consistency**: Standardized card, button, and navigation patterns -- **Visual Coherence**: Consistent color, typography, and spacing systems - -## 7. Inconsistency Identification - -### 7.1 Minor Inconsistencies (Low Priority) -1. **Empty State Variations**: Some differences in placeholder content presentation -2. **Loading Indicator Diversity**: Opportunity for standardized loading patterns -3. **Error Message Styling**: Could benefit from unified error presentation -4. **Form Element Spacing**: Minor variations in form component spacing - -### 7.2 Enhancement Opportunities (Medium Priority) -1. **Animation Consistency**: Standardize transition timing across components -2. **Focus State Enhancement**: Improve keyboard navigation visual feedback -3. **Micro-interaction Polish**: Enhanced user interaction feedback -4. **Content Density Optimization**: Further refinement of information presentation - -## 8. Improvement Recommendations - -### 8.1 HIGH PRIORITY (Design System Enhancement) -1. **Animation Standards Framework** - - Standardize transition timing (200ms, 300ms, 500ms) - - Define easing functions for different interaction types - - Create consistent hover and focus animation patterns - -2. **Enhanced Accessibility Patterns** - - Improve keyboard navigation visual indicators - - Enhance screen reader support with ARIA patterns - - Standardize focus management across components - -3. **Loading and Error State Standardization** - - Create unified loading indicator system - - Standardize error message presentation patterns - - Implement consistent empty state handling - -### 8.2 MEDIUM PRIORITY (User Experience Polish) -1. **Micro-interaction Enhancement** - - Add subtle feedback for user actions - - Enhance button and link interaction feedback - - Improve form validation feedback patterns - -2. **Content Presentation Refinement** - - Further optimize information density - - Enhance content hierarchy visual indicators - - Improve scannable content organization - -3. **Advanced Responsive Patterns** - - Optimize for ultra-wide screens (>1400px) - - Enhance landscape mobile layouts - - Improve high-DPI display rendering - -### 8.3 LOW PRIORITY (Future Enhancements) -1. **Advanced Component Library** - - Expand component library with additional patterns - - Create advanced layout components - - Develop specialized content presentation components - -2. **Performance Optimization** - - Optimize CSS bundle size - - Enhance loading performance - - Implement advanced caching strategies - -## 9. Design System Enhancements - -### 9.1 Recommended Design System Additions - -#### Animation Framework -```css -/* Standardized Animation Timing */ -.transition-fast { transition: all 150ms ease-out; } -.transition-normal { transition: all 200ms ease-in-out; } -.transition-slow { transition: all 300ms ease-in-out; } - -/* Standardized Easing Functions */ -.ease-smooth { transition-timing-function: cubic-bezier(0.4, 0, 0.2, 1); } -.ease-bounce { transition-timing-function: cubic-bezier(0.68, -0.55, 0.265, 1.55); } -``` - -#### Enhanced Focus States -```css -/* Improved Focus Indicators */ -.focus-ring-enhanced { - @apply focus:outline-none focus:ring-2 focus:ring-primary/50 focus:ring-offset-2; -} - -.focus-visible-enhanced { - @apply focus-visible:ring-2 focus-visible:ring-primary/50 focus-visible:ring-offset-2; -} -``` - -#### Loading State Components -```css -/* Standardized Loading States */ -.loading-skeleton { - @apply animate-pulse bg-gray-200 dark:bg-gray-700 rounded; -} - -.loading-spinner-standard { - @apply animate-spin h-5 w-5 border-2 border-primary border-t-transparent rounded-full; -} -``` - -### 9.2 Component Library Expansion -1. **Advanced Card Variants**: Specialized cards for different content types -2. **Enhanced Form Components**: Improved form element consistency -3. **Status Communication**: Standardized success, warning, and error patterns -4. **Progressive Disclosure**: Consistent expandable content patterns - -## 10. Implementation Roadmap - -### 10.1 Phase 1: Design System Enhancement (High Priority) -**Timeline**: 1-2 weeks -**Scope**: Animation standards, accessibility improvements, loading/error states - -**Deliverables**: -- Enhanced CSS framework with animation standards -- Improved accessibility patterns -- Standardized loading and error state components -- Updated design system documentation - -### 10.2 Phase 2: User Experience Polish (Medium Priority) -**Timeline**: 2-3 weeks -**Scope**: Micro-interactions, content refinement, advanced responsive patterns - -**Deliverables**: -- Enhanced micro-interaction patterns -- Refined content presentation components -- Advanced responsive layout optimizations -- Improved user feedback systems - -### 10.3 Phase 3: Advanced Features (Low Priority) -**Timeline**: 3-4 weeks -**Scope**: Advanced component library, performance optimization - -**Deliverables**: -- Expanded component library -- Performance optimization implementation -- Advanced layout pattern development -- Comprehensive design system documentation - -## 11. Success Metrics and KPIs - -### 11.1 Design Consistency Metrics -- **Cross-page Consistency Score**: 95% (Excellent) -- **Responsive Behavior Score**: 98% (Outstanding) -- **Component Standardization**: 100% (Complete) -- **Visual Coherence Score**: 96% (Excellent) - -### 11.2 User Experience Metrics -- **Navigation Consistency**: 100% (Perfect) -- **Information Hierarchy**: 94% (Excellent) -- **Interaction Consistency**: 92% (Very Good) -- **Accessibility Compliance**: 88% (Good, room for improvement) - -### 11.3 Technical Implementation Metrics -- **CSS Framework Maturity**: 95% (Excellent) -- **Responsive Implementation**: 98% (Outstanding) -- **Performance Impact**: Neutral (No negative impact) -- **Maintainability Score**: 96% (Excellent) - -## 12. Conclusion and Next Steps - -### 12.1 Overall Assessment: ✅ STRONG CONSISTENCY FOUNDATION ACHIEVED - -The layout optimization project has **successfully established a robust foundation for design consistency** across ThrillWiki. The implementation demonstrates: - -1. **Excellent Cross-page Consistency**: Standardized layouts, components, and patterns -2. **Outstanding Responsive Design**: Seamless adaptation across all screen sizes -3. **Strong Design System Foundation**: Comprehensive CSS framework and component library -4. **Significant User Experience Improvements**: Enhanced navigation, layout balance, and information density - -### 12.2 Strategic Recommendations - -#### Immediate Actions (Next 1-2 weeks) -1. **Implement Animation Standards**: Enhance user interaction feedback -2. **Improve Accessibility Patterns**: Strengthen keyboard navigation and screen reader support -3. **Standardize Loading/Error States**: Create unified feedback systems - -#### Medium-term Goals (Next 1-2 months) -1. **Enhance Micro-interactions**: Polish user experience details -2. **Optimize Advanced Responsive Patterns**: Improve ultra-wide and landscape mobile layouts -3. **Expand Component Library**: Develop specialized content presentation components - -#### Long-term Vision (Next 3-6 months) -1. **Advanced Design System Maturity**: Comprehensive component library and pattern documentation -2. **Performance Optimization**: Enhanced loading and rendering performance -3. **Accessibility Excellence**: WCAG AAA compliance achievement - -### 12.3 Final Assessment Summary - -**Current State**: ThrillWiki now has a **professionally consistent design system** with **excellent cross-page consistency** and **outstanding responsive behavior**. - -**Achievement Level**: The layout optimization project has **exceeded all success metrics** and established a **solid foundation** for future design system evolution. - -**Recommendation**: **Proceed with confidence** to the next phase of design system enhancement, building upon the **strong consistency foundation** now in place. - ---- - -**Assessment Completed**: June 27, 2025, 7:06 PM -**Next Review Recommended**: After Phase 1 design system enhancements -**Overall Grade**: A+ (Excellent consistency foundation with clear enhancement roadmap) \ No newline at end of file diff --git a/memory-bank/testing/design-consistency-assessment-critical-issues-2025-06-27.md b/memory-bank/testing/design-consistency-assessment-critical-issues-2025-06-27.md deleted file mode 100644 index 2ecb558f..00000000 --- a/memory-bank/testing/design-consistency-assessment-critical-issues-2025-06-27.md +++ /dev/null @@ -1,252 +0,0 @@ -# ThrillWiki Design Consistency Assessment - Critical Issues Identified -**Date**: June 27, 2025, 7:08 PM -**Assessment Type**: Critical Design Inconsistency Evaluation -**Status**: 🚨 MAJOR DESIGN ISSUES IDENTIFIED -**User Feedback**: "Many elements are inconsistently designed or have odd visual design. We need consistent, well-flowing design that normalizes regardless of content. Less white space." - -## Executive Summary - -**CRITICAL FINDING**: Despite the layout optimization project, **significant design inconsistencies remain** across ThrillWiki. The assessment reveals **fundamental design flow issues**, **excessive white space problems**, and **inconsistent visual elements** that create a **disjointed user experience**. - -### Overall Assessment: 🚨 MAJOR DESIGN INCONSISTENCIES REQUIRE IMMEDIATE ATTENTION - -**Key Issues Identified**: -- **Inconsistent Element Design**: Many components lack visual cohesion -- **Poor Design Flow**: Elements don't flow naturally together -- **Excessive White Space**: Too much padding/spacing creating empty, disconnected layouts -- **Content Normalization Failures**: Design doesn't adapt consistently regardless of content amount -- **Odd Visual Design Choices**: Elements that feel out of place or poorly integrated - -## 1. Critical Design Inconsistency Issues - -### 🚨 MAJOR PROBLEMS IDENTIFIED - -#### 1.1 Inconsistent Element Design - CRITICAL ISSUE -**Problem**: Elements across pages have different visual treatments without clear design logic -- **Card Inconsistencies**: Different border radius, shadow, and spacing treatments -- **Button Variations**: Inconsistent sizing, padding, and visual weight -- **Typography Inconsistencies**: Varying font weights, sizes, and line heights without system -- **Color Application**: Inconsistent use of accent colors and backgrounds - -#### 1.2 Poor Design Flow - CRITICAL ISSUE -**Problem**: Elements don't create a cohesive, flowing visual experience -- **Disconnected Components**: Cards and sections feel isolated rather than part of unified design -- **Jarring Transitions**: Abrupt visual changes between sections -- **Lack of Visual Rhythm**: No consistent pattern or flow between elements -- **Broken Visual Hierarchy**: Important elements don't stand out appropriately - -#### 1.3 Excessive White Space - CRITICAL ISSUE -**Problem**: Too much padding and spacing creating empty, inefficient layouts -- **Oversized Card Padding**: Cards with excessive internal spacing -- **Large Section Gaps**: Too much space between content sections -- **Inefficient Space Usage**: Wasted screen real estate throughout -- **Poor Information Density**: Content spread too thin across available space - -#### 1.4 Content Normalization Failures - CRITICAL ISSUE -**Problem**: Design doesn't adapt consistently regardless of content amount -- **Variable Card Heights**: Cards change dramatically based on content -- **Inconsistent Empty States**: Different treatments for missing content -- **Poor Content Scaling**: Design breaks down with varying content amounts -- **Lack of Flexible Patterns**: Rigid layouts that don't adapt gracefully - -## 2. Specific Page Issues Identified - -### 2.1 Homepage Issues -- **Inconsistent Card Treatments**: Statistics cards have different visual weights -- **Poor Visual Flow**: Elements feel disconnected and scattered -- **Excessive Spacing**: Too much white space between hero and statistics sections - -### 2.2 Park Detail Page Issues -- **Stats Bar Inconsistency**: Horizontal stats bar elements have varying visual treatments -- **Section Disconnection**: About section feels isolated from rest of page -- **White Space Problems**: Too much padding in cards and sections -- **Poor Content Flow**: Rides section doesn't flow naturally from stats - -### 2.3 Ride Detail Page Issues -- **Header Imbalance**: Despite "50/50" layout, visual weight is still uneven -- **Card Inconsistencies**: Reviews and Trivia cards have different visual treatments -- **Excessive Padding**: Cards have too much internal white space -- **Poor Content Hierarchy**: Important information doesn't stand out appropriately - -### 2.4 Company Detail Page Issues -- **Grid Inconsistencies**: Cards in grid have varying visual treatments -- **Poor Visual Weight**: Some elements too heavy, others too light -- **Spacing Problems**: Inconsistent gaps between grid elements -- **Content Normalization**: Layout breaks with different content amounts - -## 3. Design System Fundamental Issues - -### 🚨 CORE DESIGN SYSTEM PROBLEMS - -#### 3.1 Lack of True Visual Consistency -**Problem**: No cohesive visual language across components -- **Missing Design Tokens**: No standardized values for spacing, typography, colors -- **Inconsistent Component Variants**: Same components look different in different contexts -- **Poor Visual Hierarchy System**: No clear system for emphasizing important content -- **Disconnected Visual Elements**: Components don't feel like part of same design system - -#### 3.2 White Space Management Issues -**Problem**: Poor understanding of spacing and density principles -- **Oversized Padding Values**: Current `p-compact`, `p-optimized` still too large -- **Inconsistent Spacing Scale**: No logical progression of spacing values -- **Poor Density Control**: Can't adjust information density appropriately -- **Wasted Screen Real Estate**: Too much empty space throughout interface - -#### 3.3 Content Adaptation Failures -**Problem**: Design doesn't normalize across different content scenarios -- **Rigid Layout Patterns**: Layouts break with varying content amounts -- **Poor Empty State Handling**: Inconsistent treatment of missing content -- **No Flexible Grid System**: Grid doesn't adapt gracefully to content variations -- **Inconsistent Content Containers**: Different treatments for similar content types - -## 4. Critical Improvement Requirements - -### 🚨 IMMEDIATE FIXES REQUIRED - -#### 4.1 Design System Overhaul - CRITICAL PRIORITY -1. **Create True Design Tokens** - - Standardized spacing scale (4px, 8px, 12px, 16px, 20px, 24px) - - Consistent typography scale with clear hierarchy - - Unified color system with proper contrast ratios - - Standardized border radius and shadow values - -2. **Reduce White Space Dramatically** - - Cut current padding values by 30-50% - - Implement tighter spacing between sections - - Increase information density significantly - - Remove excessive margins and gaps - -3. **Establish Visual Consistency** - - Standardize all card treatments (same border radius, shadow, padding) - - Unify button styles across all contexts - - Create consistent typography treatments - - Establish clear visual hierarchy system - -#### 4.2 Content Normalization System - HIGH PRIORITY -1. **Flexible Layout Patterns** - - Create layouts that adapt to content amount - - Implement consistent empty state handling - - Design flexible grid systems - - Establish content container standards - -2. **Consistent Component Behavior** - - Standardize how components handle varying content - - Create consistent loading and error states - - Implement uniform spacing regardless of content - - Establish predictable component sizing - -#### 4.3 Visual Flow Improvement - HIGH PRIORITY -1. **Create Design Rhythm** - - Establish consistent visual patterns - - Improve transitions between sections - - Create better visual connections between elements - - Implement proper visual hierarchy - -2. **Reduce Visual Disconnection** - - Connect related elements visually - - Improve section relationships - - Create better page flow - - Establish visual continuity - -## 5. Specific Design Fixes Required - -### 5.1 Immediate CSS Framework Changes -```css -/* CORRECTED Padding System - Reduced White Space */ -.p-tight { - @apply p-2; /* 8px - much tighter than current system */ -} - -.p-compact { - @apply p-3; /* 12px - reduced from current 20px */ -} - -.p-normal { - @apply p-4; /* 16px - reduced from current 24px */ -} - -/* CORRECTED Card System - Consistent Visual Treatment */ -.card-unified { - @apply bg-white dark:bg-gray-800 rounded-lg shadow-sm border border-gray-200 dark:border-gray-700; - @apply p-4; /* Consistent, reduced padding */ -} - -/* CORRECTED Spacing System - Tighter Gaps */ -.gap-tight { @apply gap-2; } /* 8px */ -.gap-normal { @apply gap-3; } /* 12px */ -.gap-loose { @apply gap-4; } /* 16px */ -``` - -### 5.2 Component Standardization Requirements -1. **Unified Card Treatment**: All cards must use identical visual styling -2. **Consistent Button System**: Standardize all button variants -3. **Typography Hierarchy**: Clear, consistent text sizing and weights -4. **Spacing Normalization**: Reduce all spacing values significantly - -### 5.3 Layout Density Improvements -1. **Increase Information Density**: Show more content per screen -2. **Reduce Section Gaps**: Tighter spacing between page sections -3. **Optimize Card Padding**: Significantly reduce internal card spacing -4. **Improve Grid Efficiency**: Better use of available screen space - -## 6. Implementation Priority - -### 🚨 CRITICAL PRIORITY (Immediate - This Week) -1. **Reduce White Space Dramatically** - - Cut all padding values by 40-50% - - Reduce section gaps significantly - - Increase information density - -2. **Standardize Visual Elements** - - Unify all card treatments - - Standardize button styles - - Create consistent typography system - -3. **Fix Content Normalization** - - Ensure consistent appearance regardless of content amount - - Implement flexible layout patterns - - Standardize empty state handling - -### 🔥 HIGH PRIORITY (Next 1-2 Weeks) -1. **Improve Visual Flow** - - Create better connections between elements - - Establish design rhythm - - Improve section transitions - -2. **Enhanced Design System** - - Create comprehensive design tokens - - Implement consistent component library - - Establish clear visual hierarchy - -## 7. Success Metrics for Fixes - -### Design Consistency Targets -- **Visual Uniformity**: 95% consistent element treatments -- **White Space Reduction**: 40-50% reduction in padding/margins -- **Content Normalization**: 100% consistent appearance regardless of content -- **Visual Flow**: Seamless transitions between all page elements - -### User Experience Targets -- **Information Density**: 40% more content visible per screen -- **Visual Cohesion**: Unified design language across all pages -- **Responsive Consistency**: Identical visual treatments across screen sizes -- **Content Flexibility**: Graceful handling of varying content amounts - -## 8. Conclusion - -### Current State: 🚨 SIGNIFICANT DESIGN ISSUES IDENTIFIED -The assessment reveals that despite layout optimization efforts, **fundamental design consistency problems remain**. The current design suffers from: - -1. **Excessive White Space**: Creating empty, inefficient layouts -2. **Inconsistent Visual Elements**: Components lack cohesive design language -3. **Poor Design Flow**: Elements feel disconnected and scattered -4. **Content Normalization Failures**: Design doesn't adapt consistently - -### Required Action: IMMEDIATE DESIGN SYSTEM OVERHAUL -**Critical Priority**: Complete redesign of spacing system, visual consistency, and content normalization patterns to create a truly cohesive, efficient design system. - ---- - -**Assessment Completed**: June 27, 2025, 7:08 PM -**Severity**: Critical - Immediate action required -**Next Steps**: Begin immediate design system overhaul focusing on white space reduction and visual consistency \ No newline at end of file diff --git a/memory-bank/testing/detail-pages-design-assessment-critical-2025-06-26.md b/memory-bank/testing/detail-pages-design-assessment-critical-2025-06-26.md deleted file mode 100644 index 54650f70..00000000 --- a/memory-bank/testing/detail-pages-design-assessment-critical-2025-06-26.md +++ /dev/null @@ -1,167 +0,0 @@ -# ThrillWiki Detail Pages - Critical Design Assessment -**Date:** June 26, 2025 -**Assessment Type:** Comprehensive Design Evaluation (Critical Analysis) -**Pages Tested:** Park Detail, Ride Detail, Company/Manufacturer Detail -**Focus:** Visual Appeal, UX, Readability, Space Utilization - -## Executive Summary - -**CRITICAL VERDICT: The detail pages have significant design inefficiencies and poor space utilization that severely impact user experience.** - -The design system, while visually consistent with the dark theme and purple-to-blue gradients, suffers from fundamental layout problems that waste screen real estate and create poor information density. - -## Critical Design Issues Found - -### 1. **SEVERE SPACE UTILIZATION PROBLEMS** - -#### Park Detail Pages (`templates/parks/park_detail.html`) -- **Left sidebar card massively oversized** for minimal content (park name, address, status) -- **Stats cards have inconsistent heights** creating visual imbalance -- **"About" section wastes enormous space** - single line of text in huge card -- **Location map takes excessive vertical space** with minimal value on detail page -- **Rides section shows only 2 items** with vast empty space below - -#### Ride Detail Pages (`templates/rides/ride_detail.html`) -- **Asymmetrical layout disaster** - left card much larger than right card -- **Reviews section: massive card for placeholder text** - terrible UX -- **Trivia section: oversized card for one sentence** -- **Quick Facts: only 2 facts in large card** with excessive padding -- **History section: huge card for "No history available"** - wasteful - -#### Company Detail Pages (`templates/companies/manufacturer_detail.html`) -- **Inconsistent card sizing creates visual chaos** -- **Stats cards different widths/heights** - no grid discipline -- **About section: single line in massive card** (repeated pattern) -- **Ride cards with placeholder images waste space** -- **Redundant website buttons** (top button + website card) - -### 2. **POOR INFORMATION DENSITY** - -**Critical Problem:** All detail pages prioritize visual space over content density -- Empty placeholder sections take up massive screen real estate -- Single lines of text in oversized cards throughout -- Poor content-to-space ratio across all page types -- Users must scroll excessively to find information - -### 3. **MOBILE RESPONSIVENESS FAILURES** - -**Mobile Issues Identified:** -- Cards maintain excessive padding on mobile, wasting precious screen space -- Placeholder images consume huge amounts of mobile viewport -- Single-column layout could be more compact -- No optimization for mobile information consumption - -### 4. **INCONSISTENT LAYOUT PATTERNS** - -**Cross-Page Inconsistencies:** -- Different card sizing approaches between page types -- Inconsistent grid systems (2-column vs 4-column vs mixed) -- No standardized approach to empty states -- Varying information hierarchy patterns - -### 5. **READABILITY AND UX PROBLEMS** - -**Text and Content Issues:** -- Long ride names don't fit well in card layouts ("Harry Potter and the Escape from Gringotts") -- Poor visual hierarchy - no clear content prioritization -- Excessive use of placeholder content creates poor user experience -- No clear content organization strategy - -## Specific Template Issues - -### Park Detail Template Issues -``` -templates/parks/park_detail.html -- Sidebar layout inefficient for content amount -- Stats cards need consistent sizing -- About section needs content density improvement -- Map section oversized for context -``` - -### Ride Detail Template Issues -``` -templates/rides/ride_detail.html -- Header layout asymmetrical and unbalanced -- Empty state sections too prominent -- Quick facts section underutilized -- Review section placeholder too large -``` - -### Company Detail Template Issues -``` -templates/companies/manufacturer_detail.html -- Grid system inconsistent -- Duplicate website functionality -- Placeholder ride cards problematic -- Stats layout chaotic -``` - -## Design System Problems - -### Card Layout Issues -- **No standardized card sizing system** -- **Excessive padding/margins throughout** -- **Poor content-to-container ratios** -- **Inconsistent grid discipline** - -### Empty State Handling -- **Placeholder content too prominent** -- **Empty sections waste valuable space** -- **No progressive disclosure patterns** -- **Poor fallback content strategy** - -## Critical Recommendations - -### 1. **IMMEDIATE SPACE OPTIMIZATION** -- **Reduce card padding by 30-40%** across all detail pages -- **Implement consistent grid system** with standardized card sizes -- **Consolidate information into denser layouts** -- **Remove or minimize empty state sections** - -### 2. **LAYOUT RESTRUCTURING** -- **Park Detail:** Convert sidebar to horizontal stats bar -- **Ride Detail:** Balance header layout, reduce card sizes -- **Company Detail:** Standardize grid system, remove redundancy - -### 3. **CONTENT DENSITY IMPROVEMENTS** -- **Combine related information into single cards** -- **Use progressive disclosure for secondary information** -- **Implement compact list views for collections** -- **Optimize mobile layouts for information consumption** - -### 4. **CONSISTENCY ENFORCEMENT** -- **Establish standardized card sizing system** -- **Create consistent grid patterns across page types** -- **Standardize empty state handling** -- **Implement unified information hierarchy** - -## Priority Fixes - -### HIGH PRIORITY (Critical UX Impact) -1. **Reduce excessive card padding** - immediate 30% space savings -2. **Fix asymmetrical layouts** - especially ride detail header -3. **Consolidate empty state sections** - remove placeholder waste -4. **Standardize card grid system** - consistent sizing - -### MEDIUM PRIORITY (User Experience) -1. **Optimize mobile layouts** - better space utilization -2. **Improve text fitting** - handle long names better -3. **Remove redundant elements** - duplicate website buttons -4. **Enhance information hierarchy** - clearer content organization - -### LOW PRIORITY (Polish) -1. **Refine visual balance** - micro-spacing adjustments -2. **Improve placeholder content** - better empty states -3. **Add progressive disclosure** - advanced information patterns - -## Conclusion - -**The detail pages require significant layout optimization to improve space utilization and user experience.** While the visual design system (colors, typography, theming) is solid, the fundamental layout patterns waste screen space and create poor information density. - -**Key Focus Areas:** -1. **Space efficiency** - reduce padding, optimize layouts -2. **Content density** - more information per screen area -3. **Layout consistency** - standardized grid systems -4. **Mobile optimization** - better responsive patterns - -**Impact:** These changes would significantly improve user experience by reducing scrolling, increasing information accessibility, and creating more professional, efficient layouts. \ No newline at end of file diff --git a/memory-bank/testing/layout-optimization-demonstration-complete-2025-06-27.md b/memory-bank/testing/layout-optimization-demonstration-complete-2025-06-27.md deleted file mode 100644 index 8eafb454..00000000 --- a/memory-bank/testing/layout-optimization-demonstration-complete-2025-06-27.md +++ /dev/null @@ -1,138 +0,0 @@ -# Layout Optimization Demonstration - Complete Success -**Date**: June 27, 2025, 7:00 PM -**Status**: ✅ DEMONSTRATION COMPLETED SUCCESSFULLY -**Objective**: Demonstrate visual improvements from completed layout optimization project - -## Demonstration Summary - -### Server Restart - ✅ SUCCESSFUL -- **Command Used**: `lsof -ti :8000 | xargs kill -9; find . -type d -name "__pycache__" -exec rm -r {} +; uv run manage.py tailwind runserver` -- **Result**: Development server restarted successfully at localhost:8000 -- **Status**: All layout optimizations compiled and active - -### Browser Demonstration - ✅ ALL IMPROVEMENTS VERIFIED - -#### 1. Homepage Verification ✅ -- **URL**: http://localhost:8000 -- **Status**: Loading perfectly with clean layout -- **Improvements Visible**: Professional homepage design with optimized spacing - -#### 2. Parks Listing Page ✅ -- **URL**: http://localhost:8000/parks/ -- **Status**: Clean layout with filter system working correctly -- **Improvements Visible**: Consistent card sizing and spacing - -#### 3. Cedar Point Detail Page - MAJOR PHASE 2 SUCCESS ✅ -- **URL**: http://localhost:8000/parks/cedar-point/ -- **MAJOR ACHIEVEMENT**: **Horizontal Stats Bar Conversion** -- **Improvements Demonstrated**: - - ✅ **60% Space Improvement**: Converted vertical sidebar to horizontal stats bar - - ✅ **Professional Layout**: Stats displayed in clean grid (Total Rides: 3, Roller Coasters: 1, Status: Operating, Opened: June 1, 1870) - - ✅ **Responsive Grid**: Progressive breakpoints (`grid-cols-2 md:grid-cols-4 lg:grid-cols-6`) - - ✅ **Functionality Preserved**: All links and interactions working correctly - - ✅ **Owner Information**: Cedar Fair Entertainment Company displayed cleanly - -#### 4. Millennium Force Ride Detail - PHASE 1 SUCCESS ✅ -- **URL**: http://localhost:8000/parks/cedar-point/rides/millennium-force/ -- **CRITICAL FIX**: **Balanced 50/50 Header Layout** -- **Improvements Demonstrated**: - - ✅ **Layout Balance**: Fixed asymmetrical 3:9 layout → balanced 50/50 layout - - ✅ **Header Optimization**: Manufacturer (Intamin) and Opened (May 13, 2000) properly balanced - - ✅ **Consistent Cards**: Reviews and Trivia sections using standardized card heights - - ✅ **Professional Appearance**: Clean, organized layout with proper spacing - -#### 5. Intamin Company Detail - GRID STANDARDIZATION ✅ -- **URL**: http://localhost:8000/companies/manufacturers/intamin/ -- **GRID IMPROVEMENTS**: **Standardized Company Layout** -- **Improvements Demonstrated**: - - ✅ **Clean Grid Pattern**: Header with company info, Total Rides (7), Coasters (0) in `md:grid-cols-4` - - ✅ **Card Consistency**: All cards using standardized `card-standard` class - - ✅ **Optimized Padding**: New padding system applied throughout - - ✅ **Redundancy Eliminated**: Streamlined quick facts implementation - -## Visual Improvements Successfully Demonstrated - -### Phase 1 Achievements Verified: -1. **35% Space Efficiency Improvement** - Visible through reduced padding and optimized layouts -2. **Balanced 50/50 Layout** - Ride detail headers now properly balanced (was asymmetrical 3:9) -3. **Consistent Card Heights** - Standardized across all pages using new CSS framework -4. **Grid Standardization** - Company detail pages using clean `md:grid-cols-4` pattern - -### Phase 2 Achievements Verified: -1. **Horizontal Stats Bar** - Major structural improvement on park detail pages -2. **60% Space Improvement** - Converted vertical sidebar to horizontal layout -3. **Mobile Responsive** - Progressive grid breakpoints working correctly -4. **Information Density** - Optimized card sizing and content organization - -## Technical Implementation Verified - -### CSS Framework Success: -- **Padding System**: `p-compact`, `p-optimized`, `p-minimal` classes working correctly -- **Card Heights**: `card-standard`, `card-stats`, `card-large` providing consistency -- **Responsive Classes**: `grid-cols-2`, `md:grid-cols-4`, `lg:grid-cols-6` functioning properly -- **Mobile Adaptive**: Responsive padding adjusting for different screen sizes - -### Files Modified Successfully Applied: -- ✅ `static/css/src/input.css` - CSS utility framework active -- ✅ `templates/parks/park_detail.html` - Horizontal stats bar displaying perfectly -- ✅ `templates/rides/ride_detail.html` - Balanced layout implemented -- ✅ `templates/companies/manufacturer_detail.html` - Grid standardization working - -## Performance Verification - -### Server Performance: -- **Load Times**: No negative impact observed -- **CSS Compilation**: Tailwind CSS compiling correctly (103,571 bytes) -- **JavaScript Loading**: All scripts loading properly -- **Functionality**: All existing features preserved and working - -### Browser Compatibility: -- **Navigation**: All links and interactions functioning correctly -- **Responsive Design**: Layouts adapting properly to viewport -- **Visual Quality**: Professional, polished appearance achieved - -## Success Metrics - ALL TARGETS EXCEEDED - -### Quantifiable Results Demonstrated: -- ✅ **Space Efficiency**: 35% average improvement achieved (exceeded 30% target) -- ✅ **Layout Balance**: 100% asymmetrical issues resolved -- ✅ **Card Consistency**: 100% standardized across all pages -- ✅ **Mobile Optimization**: 60% improvement in viewport utilization -- ✅ **Major Structural**: Park sidebar successfully converted to horizontal stats bar - -### User Experience Improvements: -- **Professional Appearance**: Clean, modern design throughout -- **Information Density**: Better organization and space utilization -- **Navigation Flow**: Smooth transitions between pages -- **Visual Hierarchy**: Clear content organization and readability - -## Demonstration Conclusion - -### Overall Assessment: ✅ COMPLETE SUCCESS -The layout optimization project has been successfully implemented and demonstrated. All major improvements are visible and functioning correctly: - -1. **Phase 1 Critical Fixes** - All implemented and working perfectly -2. **Phase 2 Layout Restructuring** - Major structural improvements achieved -3. **CSS Framework** - Comprehensive utility system created and active -4. **Browser Verification** - All changes tested and verified -5. **Performance** - No negative impact on functionality or speed - -### Production Readiness: ✅ CONFIRMED -- **Code Quality**: Clean, maintainable implementations -- **Functionality**: All existing features preserved -- **Performance**: Optimal load times maintained -- **Responsive Design**: Mobile layouts optimized -- **Visual Quality**: Professional, polished appearance - -### Next Steps Recommended: -1. **Cross-Browser Testing** - Verify compatibility across different browsers -2. **Mobile Device Testing** - Test on actual mobile devices -3. **User Experience Validation** - Gather feedback on improvements -4. **Performance Monitoring** - Track Core Web Vitals metrics - ---- - -**Final Status**: ✅ LAYOUT OPTIMIZATION PROJECT DEMONSTRATION COMPLETED SUCCESSFULLY -**Implementation Quality**: All success metrics exceeded -**Visual Transformation**: Major improvements clearly visible and functional -**Production Status**: Ready for deployment \ No newline at end of file diff --git a/memory-bank/testing/migration-cleanup-progress.md b/memory-bank/testing/migration-cleanup-progress.md deleted file mode 100644 index d2a1e25c..00000000 --- a/memory-bank/testing/migration-cleanup-progress.md +++ /dev/null @@ -1,64 +0,0 @@ -# Migration Cleanup Progress Report - -**Date**: 2025-01-07 -**Status**: ✅ CRITICAL MIGRATION REFERENCES FIXED - -## Completed Fixes - -### 1. Migration References ✅ FIXED -- **Fixed**: `parks/migrations/0001_initial.py:70` - Changed `companies.company` to `operators.operator` -- **Fixed**: `rides/migrations/0003_history_tracking.py:209` - Changed `companies.manufacturer` to `manufacturers.manufacturer` - -### 2. Test Runner Configuration ✅ UPDATED -- **Fixed**: `tests/test_runner.py` - Removed `companies` references -- **Added**: New entity apps (`operators`, `manufacturers`, `property_owners`) to: - - MIGRATION_MODULES configuration - - Coverage source configuration - - Test labels for discovery - -## Test Results - -### Database Creation ✅ SUCCESS -``` -Creating test database for alias 'default' ('test_thrillwiki')... -Operations to perform: - Synchronize unmigrated apps: [list of apps] - Apply all migrations: account, accounts, admin, analytics, auth, contenttypes, core, designers, email_service, history_tracking, location, manufacturers, media, moderation, operators, parks, pghistory, property_owners, reviews, rides, sessions, sites, socialaccount -``` - -**All migrations applied successfully** - No more `ValueError: Related model 'companies.company' cannot be resolved` - -### Test Execution Status -- ✅ Test database creation works -- ✅ Migration system functional -- ❌ Individual tests failing due to outdated test code - -## Remaining Issues - -### Test Code Updates Needed -**Error Pattern**: `TypeError: Park() got unexpected keyword arguments: 'owner'` - -**Root Cause**: Test files still reference old field names: -- Tests use `owner` parameter but Park model now uses `operator` -- Need to update test fixtures and assertions - -**Files Requiring Updates**: -- `parks/tests/test_models.py` - Update Park creation to use `operator` instead of `owner` -- Other test files may have similar issues - -## Success Criteria Met - -✅ **Primary Goal Achieved**: `uv run manage.py test` can now start without critical import/migration errors -✅ **Migration References**: All broken references to `companies` app resolved -✅ **Test Infrastructure**: Test runner configuration updated for new entity structure - -## Next Steps (Out of Scope) - -The following would be needed for full test functionality but are beyond the current task scope: -1. Update individual test files to use new field names (`operator` vs `owner`) -2. Update test fixtures and factory methods -3. Validate all test assertions work with new entity relationships - -## Conclusion - -**MISSION ACCOMPLISHED**: The critical migration cleanup is complete. The test suite infrastructure is now functional and can create test databases without errors. The remaining test failures are due to outdated test code using old field names, which is a separate concern from the migration system repair. \ No newline at end of file diff --git a/memory-bank/testing/non-authenticated-features-test-results-2025-06-25.md b/memory-bank/testing/non-authenticated-features-test-results-2025-06-25.md deleted file mode 100644 index 07d9b50e..00000000 --- a/memory-bank/testing/non-authenticated-features-test-results-2025-06-25.md +++ /dev/null @@ -1,196 +0,0 @@ -# Non-Authenticated Features Testing Results -**Date**: 2025-06-25 -**Tester**: Roo -**Context**: Comprehensive testing of ThrillWiki non-authenticated features after data seeding - -## Test Environment Setup - -### Data Seeding Completed -- ✅ **Parks**: `uv run manage.py seed_initial_data` - Created 6 parks with companies and areas -- ✅ **Rides**: `uv run manage.py seed_ride_data` - Created 17 rides with manufacturers and stats -- ✅ **Server**: Development server running on port 8000 with Tailwind CSS - -### Test Data Summary -- **6 Theme Parks**: Magic Kingdom, Cedar Point, SeaWorld Orlando, Silver Dollar City, Six Flags Magic Mountain, Universal Studios Florida -- **17 Attractions**: Including Space Mountain, Harry Potter rides, roller coasters, dark rides -- **7 Roller Coasters**: Confirmed from homepage statistics -- **Companies**: Disney, Universal, Six Flags, Cedar Fair, Herschend, SeaWorld -- **Manufacturers**: Bolliger & Mabillard, Rocky Mountain Construction, Intamin, Vekoma, Mack Rides, etc. - -## Testing Results - -### ✅ Homepage (/) - PASS -- **Layout**: Clean, professional dark theme interface -- **Navigation**: Top navigation with Parks, Rides, theme toggle, user icon -- **Statistics Display**: - - 6 Theme Parks (updated from 0) - - 17 Attractions (updated from 0) - - 7 Roller Coasters (updated from 0) -- **Call-to-Action**: "Explore Parks" and "View Rides" buttons functional -- **Minor Issue**: 404 error for favicon.ico (cosmetic only) - -### ✅ Parks List (/parks/) - PASS -- **Data Display**: All 6 parks showing with proper information -- **Park Information**: Names, operating status, company associations -- **Search Interface**: Complete search form with multiple filters -- **Filter Options**: Country, State/Region, City dropdowns, Status filters -- **Status Badges**: Operating, Temporarily Closed, Permanently Closed, etc. -- **HTMX Integration**: add-park-button endpoint working - -### ✅ Park Search Functionality - PASS -- **Search Input**: Functional search box with placeholder text -- **Search Processing**: "magic" query successfully filtered results to show only Magic Kingdom -- **URL Parameters**: Correct search parameter passing (`?search=magic&country=®ion=&city=`) -- **Results Filtering**: Real-time filtering working correctly -- **Debounce**: 300ms debounce functioning as designed - -### ✅ Rides List (/rides/) - PASS -- **Data Display**: All 17 rides showing with rich information -- **Ride Information**: Names, categories, operating status, park associations -- **Technical Specs**: Height, speed data for applicable rides (e.g., Harry Potter: 65.00ft, 50.00mph) -- **Categories**: Proper categorization (Roller Coaster, Dark Ride, Water Ride, Flat Ride, Transport, Other) -- **Filter Buttons**: All ride type filters present and functional -- **Images**: Placeholder images loading correctly - -### ✅ Ride Search Functionality - PASS -- **Search Input**: Large search box with descriptive placeholder -- **Search Processing**: "space" query successfully filtered to show only Space Mountain -- **URL Parameters**: Correct search parameter passing (`/rides/?q=space`) -- **Results Filtering**: Accurate filtering working correctly -- **Minor Issue**: 404 error for `/rides/search-suggestions/` (autocomplete endpoint needs configuration) - -### ✅ Detailed Ride Information - PASS -- **Rich Data**: Rides showing park associations, categories, technical specifications -- **Examples Tested**: - - Fire In The Hole at Silver Dollar City (Dark Ride, Operating) - - Harry Potter and the Escape from Gringotts at Universal Studios Florida (Roller Coaster, Operating, 65.00ft, 50.00mph) - - American Plunge (Water Ride, Operating) - - Cedar Downs Racing Derby (Flat Ride, Operating) - -### ✅ Navigation & User Experience - PASS -- **Responsive Design**: Clean layout adapting to content -- **Dark Theme**: Consistent dark theme throughout -- **Loading Performance**: Fast page loads and transitions -- **Accessibility**: Proper status badges, clear typography -- **Footer**: Copyright and Terms/Privacy links present - -## Authentication Verification - -### ✅ Public Access Confirmed -- **No Login Required**: All browsing and search functionality accessible without authentication -- **Authentication Audit**: Previous comprehensive audit (2025-06-25) confirmed correct implementation -- **Public Features**: Viewing, browsing, searching all working without login barriers -- **Protected Features**: Create/edit functionality properly protected (not tested, as expected) - -## Technical Performance - -### ✅ Backend Performance -- **Database Queries**: Efficient loading of parks and rides data -- **Search Performance**: Fast search processing and filtering -- **HTMX Integration**: Proper AJAX endpoint responses -- **Static Assets**: CSS, JS, images loading correctly - -### ✅ Frontend Performance -- **Page Load Times**: Fast initial loads and navigation -- **Search Responsiveness**: Immediate filtering on search input -- **Image Handling**: Placeholder images loading without errors -- **JavaScript**: Alpine.js and HTMX functioning correctly - -## Issues Identified - -### Minor Issues (Non-Critical) -1. **Favicon 404**: `/favicon.ico` returns 404 (cosmetic only) -2. **Ride Autocomplete**: `/rides/search-suggestions/` returns 404 (autocomplete endpoint needs configuration) - -### No Critical Issues Found -- All core functionality working as expected -- Authentication properly scoped -- Data display accurate and complete -- Search functionality operational - -## Test Coverage Summary - -### ✅ Tested Successfully -- Homepage display and statistics -- Parks listing and detailed information -- Park search and filtering -- Rides listing and detailed information -- Ride search and filtering -- Navigation between sections -- Public access verification -- Data integrity and display -- Performance and responsiveness - -### ✅ Additional Testing Completed (Session 2) -- Individual ride detail pages ✅ -- Ride type filtering (Roller Coaster, Dark Ride) ✅ -- Navigation back to homepage ✅ -- Mobile responsiveness ✅ -- Authentication boundaries ✅ - -### 🔄 Ready for Further Testing -- Individual park detail pages -- Company and manufacturer pages -- Advanced filtering combinations -- Accessibility compliance - -## Additional Testing Session 2 (2025-06-25 14:00) - -### ✅ Ride Type Filters - PASS -- **Roller Coaster Filter**: Successfully filtered to show only roller coasters - - Results: Harry Potter and the Escape from Gringotts, Jurassic World VelociCoaster - - URL parameter: `category=RC` - - UI: Active filter button highlighted in blue -- **Dark Ride Filter**: Successfully filtered to show only dark rides - - Results: Fire In The Hole, Haunted Mansion - - URL parameter: `category=DR` - - UI: Proper filter state indication - -### ✅ Individual Ride Detail Pages - PASS -- **Navigation**: Successfully accessed `/parks/magic-kingdom/rides/haunted-mansion/` -- **Complete Information Display**: - - Ride name: "Haunted Mansion" - - Park: "Magic Kingdom" (clickable link) - - Status: "Operating" (green badge) - - Category: "Dark Ride" (blue badge) - - Manufacturer: "Sally Dark Rides" - - Opened: "Oct. 1, 1971" -- **Reviews Section**: Shows "No reviews yet. Be the first to review this ride!" (proper authentication boundary) -- **Trivia Section**: Shows ride description "Classic dark ride through a haunted estate." - -### ✅ Navigation Testing - PASS -- **Homepage Return**: ThrillWiki logo successfully returns to homepage -- **Statistics Consistency**: Homepage statistics remain accurate (6 Theme Parks, 17 Attractions, 7 Roller Coasters) -- **Cross-page Navigation**: All navigation elements work correctly - -### ✅ Mobile Responsiveness - PASS -- **Viewport Testing**: Tested at 600x800 resolution -- **Layout Adaptation**: Statistics cards stack vertically instead of horizontally -- **Navigation Adaptation**: Navigation bar adapts properly to smaller screen -- **Content Scaling**: All text and buttons remain readable and properly sized -- **Design Integrity**: Layout maintains visual appeal and functionality - -### ✅ Authentication Boundaries - PASS -- **User Icon Dropdown**: Clicking user icon reveals proper authentication options -- **Login/Register Options**: Clear "Login" and "Register" options with appropriate icons -- **Non-authenticated State**: Application properly handles non-authenticated users -- **Review Restrictions**: Reviews section correctly shows authentication requirement - -### ✅ Console Error Monitoring - PASS -- **Known Issues Only**: Favicon 404 error (expected/known issue) -- **Search Suggestions**: 404 error for `/rides/search-suggestions/` (doesn't affect core functionality) -- **No Critical Errors**: No JavaScript errors or broken functionality detected - -## Conclusion - -**COMPREHENSIVE TEST RESULT: PASS** ✅ - -ThrillWiki's non-authenticated features are working excellently with real data. The application successfully demonstrates: - -1. **Complete Public Access**: All browsing and search features accessible without authentication -2. **Rich Data Display**: Parks and rides showing with comprehensive information -3. **Functional Search**: Both park and ride search working with proper filtering -4. **Professional UI**: Clean, responsive interface with consistent theming -5. **Technical Reliability**: Fast performance, proper data handling, HTMX integration - -The application is ready for production use of non-authenticated features, with only minor cosmetic issues that don't impact functionality. \ No newline at end of file diff --git a/memory-bank/testing/oauth-authentication-testing-complete-2025-06-26.md b/memory-bank/testing/oauth-authentication-testing-complete-2025-06-26.md deleted file mode 100644 index 8bc67294..00000000 --- a/memory-bank/testing/oauth-authentication-testing-complete-2025-06-26.md +++ /dev/null @@ -1,253 +0,0 @@ -# OAuth Authentication Testing - COMPLETE ✅ - -**Test Date**: 2025-06-26 11:11 -**Tester**: Roo -**Status**: ✅ COMPREHENSIVE TESTING SUCCESSFULLY COMPLETED - -## Executive Summary - -Comprehensive OAuth authentication testing has been **successfully completed** for both Google and Discord providers. All OAuth flows are working correctly, with proper redirects to provider authentication pages and correct OAuth parameter handling. The ThrillWiki OAuth implementation is **fully functional** and ready for production use. - -## Test Environment - -- **Server**: localhost:8000 (Django development server) -- **Browser**: Puppeteer-controlled browser (900x600 resolution) -- **OAuth Configuration**: Previously fixed and verified -- **Database**: SocialApp objects properly configured -- **Site Configuration**: localhost:8000 domain correctly set - -## Test Scope Completed - -### ✅ 1. Development Server Verification -- **Status**: ✅ PASSED -- **Result**: Server running successfully on localhost:8000 -- **Server Logs**: All static assets loading correctly -- **Performance**: No errors or timeouts - -### ✅ 2. OAuth Button Access Testing -- **Status**: ✅ PASSED -- **Homepage Load**: Successfully loaded at http://localhost:8000 -- **Authentication Dropdown**: Opens correctly on user icon click -- **Login Modal**: Displays without errors (previously caused 500 errors) -- **OAuth Button Display**: Both Google and Discord buttons visible and properly styled -- **OAuth Icons**: SVG icons load successfully - - `GET /static/images/google-icon.svg HTTP/1.1" 200 719` - - `GET /static/images/discord-icon.svg HTTP/1.1" 200 768` - -### ✅ 3. Google OAuth Flow Testing -- **Status**: ✅ FULLY FUNCTIONAL -- **Button Click**: "Continue with Google" button responds correctly -- **URL Resolution**: `/accounts/google/login/?process=login` resolves successfully -- **Server Response**: `GET /accounts/google/login/?process=login HTTP/1.1" 302 0` (successful redirect) -- **Provider Redirect**: Successfully redirected to Google's authentication page -- **OAuth Consent Screen**: Proper Google sign-in page displayed -- **OAuth Parameters**: Correctly formatted and transmitted -- **Security**: Proper OAuth 2.0 flow implementation - -#### Google OAuth Flow Details -``` -Initial URL: /accounts/google/login/?process=login -Redirect Status: 302 (successful) -Target: Google OAuth consent screen -Display: "Sign in to continue to ThrillWiki.com" -Features: Email input, privacy policy links, proper OAuth consent flow -``` - -### ✅ 4. Discord OAuth Flow Testing -- **Status**: ✅ FULLY FUNCTIONAL -- **Button Click**: "Continue with Discord" button responds correctly -- **URL Resolution**: `/accounts/discord/login/?process=login` resolves successfully -- **Server Response**: `GET /accounts/discord/login/?process=login HTTP/1.1" 302 0` (successful redirect) -- **Provider Redirect**: Successfully redirected to Discord's authentication page -- **OAuth Consent Screen**: Proper Discord login page displayed -- **OAuth Parameters**: Correctly formatted with PKCE security enhancement -- **Security**: Enhanced OAuth 2.0 flow with PKCE implementation - -#### Discord OAuth Flow Details -``` -Initial URL: /accounts/discord/login/?process=login -Redirect Status: 302 (successful) -Target: Discord OAuth consent screen -Display: "Welcome back!" with login form and QR code option -OAuth Parameters: - - client_id: 1299112802274902047 ✅ - - redirect_uri: http://localhost:8000/accounts/discord/login/callback/ ✅ - - scope: email+identify ✅ - - response_type: code ✅ - - PKCE: code_challenge_method=S256 ✅ -``` - -## Technical Verification - -### ✅ OAuth Configuration Integrity -- **Database SocialApps**: Properly configured and linked to correct site -- **URL Routing**: All OAuth URLs resolve correctly -- **Provider Settings**: Correct client IDs and secrets configured -- **Callback URLs**: Properly formatted for both providers -- **Security**: PKCE implementation for Discord, standard OAuth for Google - -### ✅ Server Performance -- **Response Times**: All redirects under 100ms -- **Error Handling**: No 500 errors or exceptions -- **Static Assets**: All OAuth icons and resources load successfully -- **Memory Usage**: No memory leaks or performance issues - -### ✅ Browser Compatibility -- **JavaScript**: No console errors during OAuth flows -- **UI Responsiveness**: Buttons and modals work correctly -- **Navigation**: Smooth transitions between pages -- **Security Warnings**: Appropriate browser security handling - -## OAuth Flow Analysis - -### Google OAuth Implementation -- **Flow Type**: Standard OAuth 2.0 Authorization Code flow -- **Security**: Industry-standard implementation -- **Scopes**: `profile` and `email` (appropriate for user authentication) -- **Redirect Handling**: Proper 302 redirects to Google's servers -- **User Experience**: Clean, professional Google sign-in interface - -### Discord OAuth Implementation -- **Flow Type**: OAuth 2.0 with PKCE (Proof Key for Code Exchange) -- **Security**: Enhanced security with PKCE implementation -- **Scopes**: `identify` and `email` (appropriate for Discord integration) -- **Redirect Handling**: Proper 302 redirects to Discord's servers -- **User Experience**: Modern Discord interface with multiple login options - -## External Dependencies Status - -### ⚠️ Provider Configuration Requirements (Not Blocking) -While OAuth flows work correctly, full end-to-end authentication requires external provider configuration: - -#### Google Cloud Console -- **Required**: Add `http://localhost:8000/accounts/google/login/callback/` to authorized redirect URIs -- **Status**: Not configured (development environment) -- **Impact**: OAuth flow works, but callback may fail without proper configuration - -#### Discord Developer Portal -- **Required**: Add `http://localhost:8000/accounts/discord/login/callback/` to redirect URIs -- **Status**: Not configured (development environment) -- **Impact**: OAuth flow works, but callback may fail without proper configuration - -### 🔒 Security Considerations -- **Development Environment**: Current configuration suitable for localhost testing -- **Hardcoded Secrets**: OAuth secrets in database (acceptable for development) -- **Production Readiness**: Will require environment variables and separate OAuth apps - -## Test Results Summary - -| Component | Status | Details | -|-----------|--------|---------| -| **Development Server** | ✅ PASS | Running successfully on localhost:8000 | -| **OAuth Button Display** | ✅ PASS | Both Google and Discord buttons visible | -| **OAuth Icon Loading** | ✅ PASS | SVG icons load without errors | -| **Google OAuth Redirect** | ✅ PASS | Successful 302 redirect to Google | -| **Discord OAuth Redirect** | ✅ PASS | Successful 302 redirect to Discord | -| **OAuth Parameter Handling** | ✅ PASS | Correct parameters for both providers | -| **Security Implementation** | ✅ PASS | PKCE for Discord, standard OAuth for Google | -| **Error Handling** | ✅ PASS | No 500 errors or exceptions | -| **Browser Compatibility** | ✅ PASS | Works correctly in Puppeteer browser | -| **UI/UX** | ✅ PASS | Smooth user experience and navigation | - -## Limitations Identified - -### 1. External Provider Setup Required -- **Google**: Requires Google Cloud Console configuration for full callback handling -- **Discord**: Requires Discord Developer Portal configuration for full callback handling -- **Impact**: OAuth initiation works, but complete authentication flow requires external setup - -### 2. Development Environment Only -- **Current Configuration**: Optimized for localhost:8000 development -- **Production Requirements**: Will need separate OAuth apps and environment variable configuration -- **Security**: Hardcoded secrets acceptable for development but not production - -### 3. Callback Testing Limitation -- **Testing Scope**: Verified OAuth initiation and provider redirects -- **Not Tested**: Complete callback handling and user account creation -- **Reason**: Requires external provider configuration beyond application scope - -## OAuth Testing Readiness Assessment - -### ✅ Application Implementation: PRODUCTION READY -- **OAuth Button Functionality**: ✅ Working -- **URL Resolution**: ✅ Working -- **Provider Redirects**: ✅ Working -- **Parameter Handling**: ✅ Working -- **Security Implementation**: ✅ Working -- **Error Handling**: ✅ Working - -### ⚠️ External Dependencies: REQUIRES SETUP -- **Google Cloud Console**: Needs redirect URI configuration -- **Discord Developer Portal**: Needs redirect URI configuration -- **Production Environment**: Needs separate OAuth apps - -## Recommendations - -### Immediate (Optional for Development) -1. **Configure Provider Redirect URIs**: Add callback URLs to Google Cloud Console and Discord Developer Portal for complete testing -2. **Test Complete OAuth Flow**: Verify end-to-end authentication with real provider accounts -3. **User Account Creation Testing**: Verify new user registration via OAuth - -### Future (Production Requirements) -1. **Environment Variables**: Move OAuth secrets to environment variables -2. **Production OAuth Apps**: Create separate OAuth applications for staging/production -3. **Provider Verification**: Submit OAuth apps for provider verification if required -4. **Error Handling Enhancement**: Add comprehensive error handling for OAuth failures - -## Conclusion - -The OAuth authentication testing has been **completely successful**. Both Google and Discord OAuth flows are working correctly at the application level. The ThrillWiki OAuth implementation demonstrates: - -- ✅ **Proper OAuth 2.0 Implementation**: Correct flow handling for both providers -- ✅ **Security Best Practices**: PKCE implementation for Discord, standard OAuth for Google -- ✅ **Robust Error Handling**: No application errors during OAuth flows -- ✅ **Professional User Experience**: Clean, responsive OAuth button interface -- ✅ **Production-Ready Code**: Application-level OAuth implementation ready for production - -**OAuth Testing Status**: ✅ **COMPREHENSIVE TESTING COMPLETE** - -The authentication system now supports three methods: -1. ✅ **Email/Password Authentication**: Fully functional and verified -2. ✅ **Google OAuth**: Application implementation complete and tested -3. ✅ **Discord OAuth**: Application implementation complete and tested - -**Overall Authentication System Status**: ✅ **PRODUCTION READY** - ---- - -## VERIFICATION UPDATE - 2025-06-26 12:37 - -### ✅ ADDITIONAL VERIFICATION COMPLETED -**Verification Date**: 2025-06-26 12:37 -**Verification Type**: Live OAuth Flow Testing -**Status**: ✅ **CONFIRMED - ALL OAUTH FLOWS WORKING PERFECTLY** - -#### Live Testing Results -- ✅ **Development Server**: Confirmed running successfully on localhost:8000 -- ✅ **OAuth Button Access**: Verified authentication dropdown and login modal functionality -- ✅ **Google OAuth Flow**: **LIVE TESTED** - Successfully redirected to Google consent screen -- ✅ **Discord OAuth Flow**: **LIVE TESTED** - Successfully redirected to Discord login page with PKCE security -- ✅ **Server Responses**: Both OAuth flows return proper 302 redirects -- ✅ **Icon Loading**: Both Google and Discord SVG icons load successfully -- ✅ **No Errors**: No JavaScript errors or server exceptions during testing - -#### Technical Verification Details -``` -Google OAuth: -- URL: /accounts/google/login/?process=login -- Response: HTTP/1.1 302 0 (successful redirect) -- Target: Google OAuth consent screen -- Display: "Sign in to continue to ThrillWiki.com" - -Discord OAuth: -- URL: /accounts/discord/login/?process=login -- Response: HTTP/1.1 302 0 (successful redirect) -- Target: Discord OAuth login page -- Display: "Welcome back!" with QR code option -- Security: PKCE implementation confirmed active -``` - -### Final Verification Status -The OAuth authentication testing documentation has been **LIVE VERIFIED** and confirmed to be **100% ACCURATE**. Both Google and Discord OAuth flows are working flawlessly in the current development environment. - -**OAuth Testing Status**: ✅ **COMPREHENSIVELY VERIFIED AND PRODUCTION READY** \ No newline at end of file diff --git a/memory-bank/testing/parks-test-migration-fixes.md b/memory-bank/testing/parks-test-migration-fixes.md deleted file mode 100644 index ce3bd74e..00000000 --- a/memory-bank/testing/parks-test-migration-fixes.md +++ /dev/null @@ -1,56 +0,0 @@ -# Parks Tests Migration Fixes - Owner → Operator - -## Task Overview -Update parks tests to fix field mismatches from the Company.owner → Operator migration. - -## Issues Identified - -### 1. test_models.py -- **Line 28**: `owner=self.operator` should be `operator=self.operator` -- **Line 50**: Correctly uses `self.park.operator` but creation is wrong - -### 2. test_filters.py -- **Line 58**: `owner=cls.operator2` should be `operator=cls.operator2` -- **Line 206**: Test method name `test_company_filtering` references old concept -- **Lines 206-222**: Filter tests use `has_owner` which should be `has_operator` - -### 3. test_search.py -- ✅ No issues - creates parks without operators - -## Required Changes - -### Field Name Updates -- Change all `owner=` to `operator=` in Park.objects.create() -- Update test assertions from `has_owner` to `has_operator` -- Update filter parameter from `operator` to match new field structure - -### Test Method Updates -- Rename `test_company_filtering` to `test_operator_filtering` -- Update comments and docstrings to reflect new terminology - -## Entity Relationship Rules Applied -- Parks MUST have an Operator (required relationship) -- Parks MAY have a PropertyOwner (optional, usually same as Operator) -- Parks CANNOT directly reference Company entities - -## Files Updated - -### ✅ parks/tests/test_models.py -- **Fixed Line 28**: Changed `owner=self.operator` to `operator=self.operator` - -### ✅ parks/tests/test_filters.py -- **Fixed Line 58**: Changed `owner=cls.operator2` to `operator=cls.operator2` -- **Fixed Line 193**: Renamed `test_company_filtering` to `test_operator_filtering` -- **Fixed Lines 196-222**: Updated filter tests to use `has_operator` instead of `has_owner` -- **Fixed Lines 196, 201**: Changed `.id` to `.pk` for proper Django model access - -### ✅ parks/filters.py -- **Fixed Line 137**: Changed `has_owner` to `has_operator` in filter logic - -## Test Results -- ✅ All owner → operator migration issues resolved -- ✅ Filter tests now pass -- ⚠️ One unrelated test failure in ParkArea historical slug lookup (not migration-related) - -## Migration Status: COMPLETED -All parks tests have been successfully updated to work with the new operator field and Operator model structure. The entity relationship rules are now properly enforced in the test suite. \ No newline at end of file diff --git a/memory-bank/testing/test-suite-analysis.md b/memory-bank/testing/test-suite-analysis.md deleted file mode 100644 index 8aba3029..00000000 --- a/memory-bank/testing/test-suite-analysis.md +++ /dev/null @@ -1,141 +0,0 @@ -# ThrillWiki Test Suite Analysis - -**Date**: 2025-01-07 -**Status**: INFRASTRUCTURE REPAIRED - Tests Running Successfully -**Migration Cleanup**: ✅ COMPLETED - -## Test Infrastructure Status - -### ✅ RESOLVED ISSUES -1. **Missing `__init__.py` Files** - FIXED - - Created `tests/__init__.py` (top-level test directory) - - Created `search/tests/__init__.py` (search app test directory) - - Resolved Python module import conflicts - -2. **Test Database Creation** - WORKING - - Test database creates successfully - - Migrations apply without errors - - New entity relationships functional - -### ✅ SUCCESSFUL TEST RESULTS - -#### Search App Tests: 7/7 PASSING ✅ -``` -Found 7 test(s). -Creating test database for alias 'default'... -System check identified no issues (0 silenced). -....... ----------------------------------------------------------------------- -Ran 7 tests in 1.221s - -OK -``` - -**Key Validation**: Search functionality with new entity structure is working correctly. - -## ❌ IDENTIFIED ISSUES REQUIRING FIXES - -### Parks App Tests: 8/10 FAILING ❌ - -**Primary Issue**: Field name mismatch - tests still using `owner` field instead of new `operator` field - -#### Error Pattern: -```python -TypeError: Park() got unexpected keyword arguments: 'owner' -``` - -#### Affected Test Files: -1. **`parks/tests/test_filters.py`** - Line 54 -2. **`parks/tests/test_models.py`** - Line 24 (setUp method) - -#### Specific Failures: -- `parks.tests.test_filters.ParkFilterTests.setUpClass` -- `parks.tests.test_models.ParkModelTests.test_absolute_url` -- `parks.tests.test_models.ParkModelTests.test_historical_slug_lookup` -- `parks.tests.test_models.ParkModelTests.test_location_integration` -- `parks.tests.test_models.ParkModelTests.test_park_creation` -- `parks.tests.test_models.ParkModelTests.test_slug_generation` -- `parks.tests.test_models.ParkModelTests.test_status_color_mapping` - -#### Additional Issue: -- `parks.tests.test_models.ParkAreaModelTests.test_historical_slug_lookup` - Data setup issue - -### Rides App Tests: NO TESTS FOUND -- Rides app has `tests.py` file but no test content discovered -- Need to verify if tests exist or need to be created - -### New Entity Apps: NOT TESTED YET -- `operators` - No test files found -- `manufacturers` - No test files found -- `property_owners` - No test files found - -## Required Test File Updates - -### 1. Parks Test Files - Field Name Updates -**Files needing updates:** -- `parks/tests/test_filters.py:54` - Change `owner=` to `operator=` -- `parks/tests/test_models.py:24` - Change `owner=` to `operator=` - -**Pattern to fix:** -```python -# OLD (failing) -Park.objects.create( - name="Test Park", - owner=some_company, # ❌ Field no longer exists - ... -) - -# NEW (required) -Park.objects.create( - name="Test Park", - operator=some_operator, # ✅ New field name - ... -) -``` - -### 2. Entity Relationship Updates Needed -Tests need to create proper entity instances: -- Create `Operator` instances instead of `Company` instances -- Update foreign key references to use new entity structure -- Ensure test fixtures align with new entity relationships - -## Test Coverage Gaps - -### Missing Test Coverage: -1. **New Entity Apps** - No tests found for: - - `operators/` app - - `manufacturers/` app - - `property_owners/` app - -2. **Entity Relationship Integration** - Need tests for: - - Parks → Operators relationships - - Rides → Manufacturers relationships - - Cross-entity functionality - -3. **Rides App** - Verify test content exists - -## Next Steps for Complete Test Suite - -### Immediate Fixes Required: -1. Update parks test files to use `operator` field instead of `owner` -2. Update test fixtures to create `Operator` instances -3. Verify rides app test content -4. Create basic tests for new entity apps - -### Validation Targets: -- Parks tests: 10/10 passing -- Rides tests: Verify and fix any issues -- New entity tests: Basic CRUD operations -- Integration tests: Cross-entity relationships - -## Summary - -**Infrastructure Status**: ✅ FUNCTIONAL -**Test Database**: ✅ WORKING -**Migration System**: ✅ OPERATIONAL -**Search Functionality**: ✅ VERIFIED (7/7 tests passing) - -**Critical Issue**: Parks tests failing due to field name mismatches (`owner` → `operator`) -**Impact**: 8/10 parks tests failing, but infrastructure is sound - -The test suite infrastructure has been successfully repaired. The remaining issues are straightforward field name updates in test files, not structural problems. \ No newline at end of file diff --git a/memory-bank/testing/test-suite-validation-report.md b/memory-bank/testing/test-suite-validation-report.md deleted file mode 100644 index 02d4a7f3..00000000 --- a/memory-bank/testing/test-suite-validation-report.md +++ /dev/null @@ -1,138 +0,0 @@ -# ThrillWiki Test Suite Validation Report - -**Date**: 2025-01-07 -**Status**: ❌ CRITICAL FAILURES IDENTIFIED -**Scope**: Comprehensive test suite validation after migration system repair - -## Executive Summary - -Test suite validation revealed **critical failures** preventing any tests from running. While the migration system repair was successful for basic Django operations, the test infrastructure contains multiple references to the removed `companies` app that block test execution. - -## Test Execution Results - -### Complete Test Suite -```bash -uv run manage.py test -``` -**Result**: ❌ FAILED - ImportError during test discovery -**Error**: `'tests' module incorrectly imported from '/parks/tests'. Expected '/parks'` - -### Parks App Tests -```bash -uv run manage.py test parks.tests -``` -**Result**: ❌ FAILED - Database creation failure -**Error**: `ValueError: Related model 'companies.company' cannot be resolved` - -## Root Cause Analysis - -### Primary Issues Identified - -1. **Incomplete Migration References** (CRITICAL) - - `parks/migrations/0001_initial.py:70` - `to="companies.company"` - - `rides/migrations/0003_history_tracking.py:209` - `to="companies.manufacturer"` - - These prevent test database creation - -2. **Outdated Test Runner Configuration** (CRITICAL) - - `tests/test_runner.py` lines 38, 49 - Still references `companies` app - - Missing new entity apps: `operators`, `manufacturers`, `property_owners` - - Coverage configuration incomplete - -### Secondary Issues - -3. **Test Discovery Structure Conflicts** - - Django test runner conflicts with custom test directory structure - - Import path resolution issues - -4. **Missing Entity App Integration** - - New entity apps not included in test configuration - - Coverage settings don't include new apps - -## Detailed Findings - -### Migration Files Still Referencing Companies App - -**File**: `parks/migrations/0001_initial.py` -- **Line 70**: `to="companies.company"` should be `to="operators.operator"` - -**File**: `rides/migrations/0003_history_tracking.py` -- **Line 209**: `to="companies.manufacturer"` should be `to="manufacturers.manufacturer"` - -### Test Runner Configuration Issues - -**File**: `tests/test_runner.py` -- **Line 38**: `'companies': None,` in MIGRATION_MODULES (should be removed) -- **Line 49**: `'companies',` in coverage source (should be removed) -- **Missing**: `operators`, `manufacturers`, `property_owners` in coverage -- **Lines 108-113**: Test labels don't include new entity apps - -### Test Structure Analysis - -**Current Test Files Found**: -- `parks/tests/` - 4 test files (15 tests found) -- `search/tests/` - 1 test file -- `tests/e2e/` - 5 end-to-end test files - -**Test File Inventory**: -- `parks/tests/test_models.py` -- `parks/tests/test_filters.py` -- `parks/tests/test_search.py` -- `search/tests/test_ride_autocomplete.py` - -## Impact Assessment - -### Blocked Functionality -- ❌ Cannot run any Django tests -- ❌ Cannot create test database -- ❌ Cannot validate entity relationships -- ❌ Cannot verify migration compatibility -- ❌ Cannot run coverage analysis - -### Test Coverage Status -- **Unknown** - Cannot execute tests to measure coverage -- **Estimated Impact**: 429+ lines of test code mentioned in migration plan -- **Risk Level**: HIGH - No test validation possible - -## Required Fixes (Not Implemented - Analysis Only) - -### 1. Migration Reference Updates -```python -# parks/migrations/0001_initial.py:70 -to="operators.operator" # was: companies.company - -# rides/migrations/0003_history_tracking.py:209 -to="manufacturers.manufacturer" # was: companies.manufacturer -``` - -### 2. Test Runner Configuration Updates -```python -# tests/test_runner.py - Remove companies references -# Add new entity apps to coverage and test labels -``` - -### 3. Test Discovery Structure -- Resolve Django test runner conflicts -- Ensure proper test module imports - -## Recommendations - -1. **Immediate Priority**: Fix migration references to enable test database creation -2. **High Priority**: Update test runner configuration for new entity structure -3. **Medium Priority**: Validate all test files for remaining `companies` imports -4. **Low Priority**: Enhance test coverage for new entity relationships - -## Next Steps - -1. Fix remaining migration references to `companies` app -2. Update `tests/test_runner.py` configuration -3. Re-run test suite validation -4. Analyze individual test failures -5. Verify entity relationship tests -6. Validate search functionality tests -7. Check moderation tests with new entities - -## Conclusion - -The test suite is currently **non-functional** due to incomplete migration cleanup. The migration system repair successfully fixed basic Django operations but missed critical references in migration files and test configuration. These issues must be resolved before any test validation can proceed. - -**Status**: Ready for remediation - specific fixes identified and documented. \ No newline at end of file diff --git a/memory-bank/testing/visual-design-examination-report-2025-06-27.md b/memory-bank/testing/visual-design-examination-report-2025-06-27.md deleted file mode 100644 index bb1b573f..00000000 --- a/memory-bank/testing/visual-design-examination-report-2025-06-27.md +++ /dev/null @@ -1,206 +0,0 @@ -# Visual Design Examination Report - ThrillWiki -**Date**: June 27, 2025 -**Scope**: Comprehensive visual examination of current design state -**Objective**: Identify specific design flaws and inconsistencies across detail pages and screen sizes - -## Executive Summary - -Conducted thorough visual examination of ThrillWiki's current design state across multiple page types and responsive breakpoints. The examination revealed several design inconsistencies and layout issues that need to be addressed for improved user experience and visual consistency. - -## Pages Examined - -### 1. Homepage (localhost:8000) -- **Layout**: Clean hero section with centered content -- **Elements**: Welcome message, action buttons, statistics cards -- **Design Quality**: Well-structured, consistent spacing - -### 2. Parks Listing Page (/parks/) -- **Layout**: Filter interface + card grid -- **Elements**: Search fields, status filters, park cards -- **Design Quality**: Good organization, consistent card styling - -### 3. Park Detail Page (/parks/cedar-point/) -- **Layout**: Header + horizontal stats bar + content sections -- **Elements**: Park name, location, status, stats cards, rides section, map -- **Design Quality**: Good use of horizontal stats layout - -### 4. Ride Detail Page (/parks/cedar-point/rides/millennium-force/) -- **Layout**: Centered header + info cards + content sections -- **Elements**: Ride name, park link, status badges, manufacturer info, reviews, trivia -- **Design Quality**: Clean layout, good information hierarchy - -### 5. Company Detail Page (/companies/manufacturers/intamin/) -- **Layout**: Header + stats cards + content sections -- **Elements**: Company name, location, stats, about section, rides grid -- **Design Quality**: Consistent with other detail pages - -## Responsive Behavior Analysis - -### Desktop (1200px width) -- **Header**: Full navigation with search bar visible -- **Stats Cards**: Horizontal layout (3-4 cards per row) -- **Content Grids**: 3-column layout for ride cards -- **Overall**: Clean, spacious layout with good use of horizontal space - -### Tablet (768px width) -- **Header**: Condensed navigation, search bar still visible -- **Stats Cards**: 3-card horizontal layout -- **Content Grids**: 2-column layout for ride cards -- **Overall**: Good adaptation, maintains readability - -### Mobile (375px width) -- **Header**: Compact navigation with hamburger menu -- **Stats Cards**: Single column stack -- **Content Grids**: Single column layout -- **Overall**: Proper mobile adaptation, content remains accessible - -## Design Consistency Observations - -### ✅ Strengths Identified -1. **Consistent Dark Theme**: All pages maintain the purple/blue gradient background -2. **Uniform Card Styling**: Cards across all pages use consistent dark backgrounds and rounded corners -3. **Typography Hierarchy**: Consistent heading sizes and text styling -4. **Status Badge Consistency**: Operating/status badges use consistent colors and styling -5. **Responsive Grid System**: Proper breakpoint behavior (3-col → 2-col → 1-col) -6. **Navigation Consistency**: Header layout and styling consistent across all pages - -### ⚠️ Critical Design Issues Identified - -#### 1. **MAJOR ISSUE: Inconsistent Card Counts Creating Visual Ugliness** -- **Park Detail**: 5 stats cards (Total Rides, Roller Coasters, Status, Opened, Owner) -- **Ride Detail**: 2 info cards (Manufacturer, Opened) -- **Company Detail**: 3 stats cards (Company info, Total Rides, Coasters) -- **Critical Problem**: Different card counts create uneven layouts and excessive white space -- **Visual Impact**: Pages with fewer cards look sparse and unbalanced, especially on desktop - -#### 2. **Excessive White Space Problem** -- **Ride Detail Pages**: Particularly sparse with large empty areas -- **Company Pages**: Better balanced but still inconsistent -- **Park Detail Pages**: Most balanced card layout -- **Issue**: Creates unprofessional appearance and poor space utilization - -#### 3. Card Sizing Inconsistencies -- **Stats Cards**: Varying heights based on content length -- **Ride Cards**: Some show "No image available" placeholders -- **Issue**: Creates uneven visual grid alignment - -#### 4. Layout Pattern Variations -- **Park Detail**: Uses horizontal stats bar layout (5 cards) -- **Ride Detail**: Uses different header layout with centered content (2 cards) -- **Company Detail**: Uses grid-based stats layout (3 cards) -- **Issue**: Different card counts make layouts feel inconsistent and unpolished - -#### 5. Information Architecture Differences -- **Park Detail**: Location → Stats → Rides → Map flow -- **Ride Detail**: Header → Manufacturer → Reviews → Trivia flow -- **Company Detail**: Header → Stats → About → Rides flow -- **Issue**: Different information flows may confuse users - -## Specific Visual Issues Observed - -### 1. Card Height Inconsistencies -- Stats cards have varying heights based on content -- Creates uneven visual rhythm in grid layouts -- More noticeable on desktop where cards are side-by-side - -### 2. Placeholder Content Styling -- "No image available" placeholders in ride grids -- Gray placeholder cards break visual consistency -- Need better styling or default imagery - -### 3. Content Spacing Variations -- Different padding/margin values between page types -- Some sections feel cramped while others have excessive white space -- Inconsistent vertical rhythm - -### 4. Button and Link Styling -- Action buttons appear consistent -- Link styling (like manufacturer links) could be more prominent -- Hover states need verification across all interactive elements - -## Responsive Design Assessment - -### ✅ Working Well -- Grid system adapts properly across breakpoints -- Navigation collapses appropriately on mobile -- Text remains readable at all screen sizes -- Cards stack properly on mobile - -### ⚠️ Needs Attention -- Some content sections may benefit from better mobile optimization -- Card spacing could be tighter on mobile to show more content -- Search functionality placement in header could be optimized for mobile - -## Technical Implementation Quality - -### Positive Observations -- Clean HTML structure based on visual examination -- Proper responsive behavior indicates good CSS grid/flexbox usage -- Fast loading times and smooth interactions -- No obvious layout breaking or overflow issues - -### Areas for Enhancement -- Consider implementing consistent card height constraints -- Standardize spacing variables across page types -- Implement better placeholder content styling -- Consider adding subtle animations for better user feedback - -## Recommendations for Design Consistency - -### 🚨 CRITICAL PRIORITY - Fix Visual Ugliness -1. **URGENT: Standardize Card Counts Across All Detail Pages** - - **Current Problem**: Park pages (5 cards) vs Ride pages (2 cards) vs Company pages (3 cards) - - **Target**: Achieve consistent 4-5 card layout across all detail page types - - **Action**: Add missing cards to ride and company pages to match park detail density - - **Impact**: Eliminates excessive white space and creates professional, balanced layouts - -2. **Reduce Excessive White Space** - - **Target**: Optimize content density on sparse pages (especially ride details) - - **Action**: Add relevant content cards or restructure layout to fill space better - - **Impact**: Creates more engaging, information-rich user experience - -### High Priority -3. **Standardize Card Heights**: Implement consistent minimum heights for stats cards -4. **Unify Layout Patterns**: Choose one primary layout pattern for detail pages -5. **Improve Placeholder Styling**: Better design for "No image available" states -6. **Standardize Spacing**: Use consistent padding/margin values across all pages - -### Medium Priority -1. **Enhanced Mobile Optimization**: Tighter spacing and better content prioritization -2. **Improved Visual Hierarchy**: Ensure consistent information architecture -3. **Better Link Styling**: More prominent styling for interactive elements -4. **Content Density Balance**: Achieve consistent visual rhythm across pages - -### Low Priority -1. **Subtle Animations**: Add micro-interactions for better user feedback -2. **Enhanced Accessibility**: Ensure all interactive elements meet accessibility standards -3. **Performance Optimization**: Optimize images and loading states - -## Conclusion - -The current ThrillWiki design demonstrates a solid foundation with consistent theming and responsive behavior. The main issues are subtle inconsistencies in layout patterns and content density rather than major design flaws. The responsive system works well across all tested breakpoints. - -The design successfully maintains visual consistency in core elements (colors, typography, cards) while having room for improvement in layout standardization and content presentation consistency. - -**Overall Assessment**: Good foundation with minor consistency issues that can be addressed through systematic refinement rather than major redesign. - -## Specific Card Count Standardization Recommendations - -### Ride Detail Pages (Currently 2 cards - NEEDS 3+ MORE) -**Add these cards to match park detail density:** -- **Statistics Card**: Height, Speed, Duration, Capacity -- **Experience Card**: Thrill Level, Age Requirements, Accessibility -- **History Card**: Opening Date, Designer, Notable Facts - -### Company Detail Pages (Currently 3 cards - NEEDS 1-2 MORE) -**Add these cards to improve balance:** -- **Founded Card**: Year established, Headquarters location -- **Specialties Card**: Primary ride types, Notable innovations - -## Updated Conclusion - -The current ThrillWiki design has a solid foundation but suffers from **critical visual inconsistency** due to varying card counts across page types. This creates an unprofessional appearance with excessive white space on some pages. - -**Primary Issue**: Card count inconsistency (5 vs 2 vs 3) creates visual ugliness and poor space utilization. - -**Overall Assessment**: Good foundation with CRITICAL layout inconsistency that requires immediate attention to achieve professional appearance. \ No newline at end of file diff --git a/memory-bank/workflows/development-process.md b/memory-bank/workflows/development-process.md deleted file mode 100644 index 3f7f9b36..00000000 --- a/memory-bank/workflows/development-process.md +++ /dev/null @@ -1,201 +0,0 @@ -# Development Workflow - -## Git Workflow - -### Branch Strategy -1. Main Branches - - `main` - Production code - - `develop` - Integration branch - -2. Feature Branches - - Format: `feature/description` - - Branch from: `develop` - - Merge to: `develop` - -3. Bugfix Branches - - Format: `bugfix/description` - - Branch from: `develop` - - Merge to: `develop` - -4. Hotfix Branches - - Format: `hotfix/description` - - Branch from: `main` - - Merge to: `main` and `develop` - -### Commit Guidelines -1. Format - ``` - type(scope): description - - [optional body] - - [optional footer] - ``` - -2. Types - - feat: New feature - - fix: Bug fix - - docs: Documentation - - style: Formatting - - refactor: Code restructure - - test: Testing - - chore: Maintenance - -3. Rules - - Present tense verbs - - Concise descriptions - - Reference issues - - Document breaking changes - -## Development Process - -### 1. Feature Development -1. Planning - - Technical specification - - Component design - - Database impact - - Test strategy - -2. Implementation - - Create feature branch - - Write tests first - - Implement feature - - Update documentation - -3. Review - - Self-review checklist - - Peer code review - - Update per feedback - - Final verification - -### 2. Testing Requirements - -#### Unit Tests -```python -# Required for all new code -class TestFeature(TestCase): - def setUp(self): - # Setup test data - - def test_functionality(self): - # Test core functionality - - def test_edge_cases(self): - # Test edge cases -``` - -#### Integration Tests -- API endpoints -- User workflows -- System integration -- Error handling - -#### Coverage Requirements -- Minimum 80% coverage -- Critical paths 100% -- Edge case handling -- Error scenarios - -### 3. Code Quality - -#### Linting -- Python: flake8 -- JavaScript: eslint -- CSS: stylelint -- Templates: djlint - -#### Type Checking -- Python: mypy -- JavaScript: TypeScript - -#### Documentation -- Code comments -- Docstrings -- README updates -- API documentation - -## Deployment Process - -### 1. Pre-deployment -- Version bump -- Changelog update -- Documentation review -- Test verification - -### 2. Staging Deployment -- Database migrations -- Static file collection -- Smoke tests -- Performance check - -### 3. Production Deployment -- Backup database -- Apply migrations -- Update static files -- Health checks - -### 4. Post-deployment -- Monitor errors -- Performance metrics -- User feedback -- Rollback plan - -## Review Process - -### 1. Code Review -- Style compliance -- Test coverage -- Documentation -- Performance impact - -### 2. Architecture Review -- Design patterns -- Scalability -- Security -- Maintainability - -### 3. Security Review -- Authentication -- Authorization -- Data protection -- Input validation - -## Quality Assurance - -### 1. Testing Strategy -- Unit testing -- Integration testing -- End-to-end testing -- Performance testing - -### 2. Performance Standards -- Page load times -- Database queries -- API response times -- Resource usage - -### 3. Security Standards -- Authentication -- Authorization -- Data encryption -- Input validation - -## Monitoring and Maintenance - -### 1. Error Tracking -- Exception monitoring -- Log analysis -- User reports -- Performance alerts - -### 2. Performance Monitoring -- Response times -- Resource usage -- Database performance -- Cache effectiveness - -### 3. User Feedback -- Bug reports -- Feature requests -- Performance issues -- UX feedback \ No newline at end of file diff --git a/memory-bank/workflows/model-migrations.md b/memory-bank/workflows/model-migrations.md deleted file mode 100644 index a7c29aa2..00000000 --- a/memory-bank/workflows/model-migrations.md +++ /dev/null @@ -1,39 +0,0 @@ -## Model Migration Protocol for History Tracking - -### Implementation Steps -1. **Base Model Setup** - ```python - # core/models.py - import pghistory - - class HistoricalModel(models.Model): - class Meta: - abstract = True - - @pghistory.track(pghistory.Snapshot()) - def save(self, *args, **kwargs): - return super().save(*args, **kwargs) - ``` - -2. **Concrete Model Implementation** - ```python - # parks/models.py - class Park(HistoricalModel): - @pghistory.track( - pghistory.Snapshot('park.create'), - pghistory.AfterUpdate('park.update'), - pghistory.BeforeDelete('park.delete') - ) - class Meta: - # Existing model fields and configuration - ``` - -3. **Migration Generation** - ```bash - ./manage.py makemigrations --name add_pghistory_tracking - ``` - -### Quality Assurance -1. Verify historical events table creation -2. Test event triggering for CRUD operations -3. Validate context metadata capture \ No newline at end of file diff --git a/memory-bank/workflows/moderation.md b/memory-bank/workflows/moderation.md deleted file mode 100644 index 7c9424a5..00000000 --- a/memory-bank/workflows/moderation.md +++ /dev/null @@ -1,31 +0,0 @@ -## Updated Moderation Workflow with django-pghistory - -### Submission Lifecycle -1. **Change Proposal** - - Creates `pending` pghistory event with metadata: - ```python - pghistory.track( - pghistory.Snapshot('submission.pending'), - status='pending' - ) - ``` -2. **Approval Process** - - Merges event into main history: - ```python - event.pgh_label = 'approved_change' - event.pgh_context['approver'] = request.user - ``` -3. **Rejection Handling** - - Preserves event with rejection context: - ```python - event.pgh_label = 'rejected_change' - event.pgh_context['reason'] = rejection_reason - ``` - -### Moderation Admin Integration -```python -# moderation/admin.py -@admin.register(pghistory.models.Event) -class HistoryAdmin(admin.ModelAdmin): - list_display = ('pgh_label', 'pgh_created_at', 'content_object') - readonly_fields = ('pgh_data', 'pgh_context') \ No newline at end of file diff --git a/memory-bank/workflows/rides_consolidation.md b/memory-bank/workflows/rides_consolidation.md deleted file mode 100644 index e69de29b..00000000 diff --git a/moderation/__init__.py b/moderation/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/moderation/admin.py b/moderation/admin.py deleted file mode 100644 index e544495c..00000000 --- a/moderation/admin.py +++ /dev/null @@ -1,171 +0,0 @@ -from django.contrib import admin -from django.contrib.admin import AdminSite -from django.utils.html import format_html -from django.urls import reverse -from django.utils.safestring import mark_safe -from .models import EditSubmission, PhotoSubmission - - -class ModerationAdminSite(AdminSite): - site_header = "ThrillWiki Moderation" - site_title = "ThrillWiki Moderation" - index_title = "Moderation Dashboard" - - def has_permission(self, request): - """Only allow moderators and above to access this admin site""" - return request.user.is_authenticated and request.user.role in [ - "MODERATOR", - "ADMIN", - "SUPERUSER", - ] - - -moderation_site = ModerationAdminSite(name="moderation") - - -class EditSubmissionAdmin(admin.ModelAdmin): - list_display = [ - "id", - "user_link", - "content_type", - "content_link", - "status", - "created_at", - "handled_by", - ] - list_filter = ["status", "content_type", "created_at"] - search_fields = ["user__username", "reason", "source", "notes"] - readonly_fields = [ - "user", - "content_type", - "object_id", - "changes", - "created_at", - ] - - def user_link(self, obj): - url = reverse("admin:accounts_user_change", args=[obj.user.id]) - return format_html('{}', url, obj.user.username) - - user_link.short_description = "User" - - def content_link(self, obj): - if hasattr(obj.content_object, "get_absolute_url"): - url = obj.content_object.get_absolute_url() - return format_html('{}', url, str(obj.content_object)) - return str(obj.content_object) - - content_link.short_description = "Content" - - def save_model(self, request, obj, form, change): - if "status" in form.changed_data: - if obj.status == "APPROVED": - obj.approve(request.user) - elif obj.status == "REJECTED": - obj.reject(request.user) - elif obj.status == "ESCALATED": - obj.escalate(request.user) - super().save_model(request, obj, form, change) - - -class PhotoSubmissionAdmin(admin.ModelAdmin): - list_display = [ - "id", - "user_link", - "content_type", - "content_link", - "photo_preview", - "status", - "created_at", - "handled_by", - ] - list_filter = ["status", "content_type", "created_at"] - search_fields = ["user__username", "caption", "notes"] - readonly_fields = [ - "user", - "content_type", - "object_id", - "photo_preview", - "created_at", - ] - - def user_link(self, obj): - url = reverse("admin:accounts_user_change", args=[obj.user.id]) - return format_html('{}', url, obj.user.username) - - user_link.short_description = "User" - - def content_link(self, obj): - if hasattr(obj.content_object, "get_absolute_url"): - url = obj.content_object.get_absolute_url() - return format_html('{}', url, str(obj.content_object)) - return str(obj.content_object) - - content_link.short_description = "Content" - - def photo_preview(self, obj): - if obj.photo: - return format_html( - '', - obj.photo.url, - ) - return "" - - photo_preview.short_description = "Photo Preview" - - def save_model(self, request, obj, form, change): - if "status" in form.changed_data: - if obj.status == "APPROVED": - obj.approve(request.user, obj.notes) - elif obj.status == "REJECTED": - obj.reject(request.user, obj.notes) - super().save_model(request, obj, form, change) - - -class HistoryEventAdmin(admin.ModelAdmin): - """Admin interface for viewing model history events""" - - list_display = [ - "pgh_label", - "pgh_created_at", - "get_object_link", - "get_context", - ] - list_filter = ["pgh_label", "pgh_created_at"] - readonly_fields = [ - "pgh_label", - "pgh_obj_id", - "pgh_data", - "pgh_context", - "pgh_created_at", - ] - date_hierarchy = "pgh_created_at" - - def get_object_link(self, obj): - """Display a link to the related object if possible""" - if obj.pgh_obj and hasattr(obj.pgh_obj, "get_absolute_url"): - url = obj.pgh_obj.get_absolute_url() - return format_html('{}', url, str(obj.pgh_obj)) - return str(obj.pgh_obj or "") - - get_object_link.short_description = "Object" - - def get_context(self, obj): - """Format the context data nicely""" - if not obj.pgh_context: - return "-" - html = [""] - for key, value in obj.pgh_context.items(): - html.append(f"") - html.append("
{key}{value}
") - return mark_safe("".join(html)) - - get_context.short_description = "Context" - - -# Register with moderation site only -moderation_site.register(EditSubmission, EditSubmissionAdmin) -moderation_site.register(PhotoSubmission, PhotoSubmissionAdmin) - -# We will register concrete event models as they are created during migrations -# Example: moderation_site.register(DesignerEvent, HistoryEventAdmin) diff --git a/moderation/apps.py b/moderation/apps.py deleted file mode 100644 index 6b0111b4..00000000 --- a/moderation/apps.py +++ /dev/null @@ -1,7 +0,0 @@ -from django.apps import AppConfig - - -class ModerationConfig(AppConfig): - default_auto_field = "django.db.models.BigAutoField" - name = "moderation" - verbose_name = "Content Moderation" diff --git a/moderation/context_processors.py b/moderation/context_processors.py deleted file mode 100644 index 5d5d99a9..00000000 --- a/moderation/context_processors.py +++ /dev/null @@ -1,24 +0,0 @@ -def moderation_access(request): - """Add moderation access check to template context""" - context = { - "has_moderation_access": False, - "has_admin_access": False, - "has_superuser_access": False, - "user_role": None, - } - - if request.user.is_authenticated: - context["user_role"] = request.user.role - # Check both role-based and Django's built-in superuser status - context["has_moderation_access"] = ( - request.user.role in ["MODERATOR", "ADMIN", "SUPERUSER"] - or request.user.is_superuser - ) - context["has_admin_access"] = ( - request.user.role in ["ADMIN", "SUPERUSER"] or request.user.is_superuser - ) - context["has_superuser_access"] = ( - request.user.role == "SUPERUSER" or request.user.is_superuser - ) - - return context diff --git a/moderation/management/__init__.py b/moderation/management/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/moderation/management/commands/__init__.py b/moderation/management/commands/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/moderation/management/commands/seed_submissions.py b/moderation/management/commands/seed_submissions.py deleted file mode 100644 index fdb7cea1..00000000 --- a/moderation/management/commands/seed_submissions.py +++ /dev/null @@ -1,261 +0,0 @@ -from django.core.management.base import BaseCommand -from django.contrib.auth import get_user_model -from django.contrib.contenttypes.models import ContentType -from django.core.files.uploadedfile import SimpleUploadedFile -from moderation.models import EditSubmission, PhotoSubmission -from parks.models import Park -from rides.models import Ride -from datetime import date - -User = get_user_model() - - -class Command(BaseCommand): - help = "Seeds test submissions for moderation dashboard" - - def handle(self, *args, **kwargs): - # Ensure we have a test user - user, created = User.objects.get_or_create( - username="test_user", email="test@example.com" - ) - if created: - user.set_password("testpass123") - user.save() - self.stdout.write(self.style.SUCCESS("Created test user")) - - # Get content types - park_ct = ContentType.objects.get_for_model(Park) - ride_ct = ContentType.objects.get_for_model(Ride) - - # Create test park for edit submissions - test_park, created = Park.objects.get_or_create( - name="Test Park", - defaults={ - "description": "A test theme park located in Orlando, Florida", - "status": "OPERATING", - "operating_season": "Year-round", - "size_acres": 100.50, - "website": "https://testpark.example.com", - }, - ) - - # Create test ride for edit submissions - test_ride, created = Ride.objects.get_or_create( - name="Test Coaster", - park=test_park, - defaults={ - "description": "A thrilling steel roller coaster with multiple inversions", - "status": "OPERATING", - "category": "RC", - "capacity_per_hour": 1200, - "ride_duration_seconds": 180, - "min_height_in": 48, - "opening_date": date(2020, 6, 15), - }, - ) - - # Create EditSubmissions - - # New park creation with detailed information - EditSubmission.objects.create( - user=user, - content_type=park_ct, - submission_type="CREATE", - changes={ - "name": "Adventure World Orlando", - "description": ( - "A brand new theme park coming to Orlando featuring five uniquely themed lands: " - "Future Frontier, Ancient Mysteries, Ocean Depths, Sky Kingdom, and Fantasy Forest. " - "The park will feature state-of-the-art attractions including 3 roller coasters, " - "4 dark rides, and multiple family attractions in each themed area." - ), - "status": "UNDER_CONSTRUCTION", - "opening_date": "2024-06-01", - "operating_season": "Year-round with extended hours during summer and holidays", - "size_acres": 250.75, - "website": "https://adventureworld.example.com", - "location": { - "street_address": "1234 Theme Park Way", - "city": "Orlando", - "state": "Florida", - "country": "United States", - "postal_code": "32819", - "latitude": "28.538336", - "longitude": "-81.379234", - }, - }, - reason=( - "Submitting new theme park details based on official press release and construction permits. " - "The park has begun vertical construction and has announced its opening date." - ), - source=( - "Official press release: https://adventureworld.example.com/press/announcement\n" - "Construction permits: Orange County Building Department #2023-12345" - ), - status="PENDING", - ) - - # Existing park edit with comprehensive updates - EditSubmission.objects.create( - user=user, - content_type=park_ct, - object_id=test_park.id, - submission_type="EDIT", - changes={ - "description": ( - "A world-class theme park featuring 12 uniquely themed areas and over 50 attractions. " - 'Recent expansion added the new "Cosmic Adventures" area with 2 roller coasters and ' - "3 family attractions. The park now offers enhanced dining options and night-time " - 'spectacular "Starlight Dreams".' - ), - "status": "OPERATING", - "website": "https://testpark.example.com", - "size_acres": 120.25, - "operating_season": ( - "Year-round with extended hours (9AM-11PM) during summer. " - "Special events during Halloween and Christmas seasons." - ), - "location": { - "street_address": "5678 Park Boulevard", - "city": "Orlando", - "state": "Florida", - "country": "United States", - "postal_code": "32830", - "latitude": "28.538336", - "longitude": "-81.379234", - }, - }, - reason=( - "Updating park information to reflect recent expansion and operational changes. " - "The new Cosmic Adventures area opened last month and operating hours have been extended." - ), - source=( - "Park press release: https://testpark.example.com/news/expansion\n" - "Official park map: https://testpark.example.com/map\n" - "Personal visit and photos from opening day of new area" - ), - status="PENDING", - ) - - # New ride creation with detailed specifications - EditSubmission.objects.create( - user=user, - content_type=ride_ct, - submission_type="CREATE", - changes={ - "name": "Thunderbolt: The Ultimate Launch Coaster", - "park": test_park.id, - "description": ( - "A cutting-edge steel launch coaster featuring the world's tallest inversion (160 ft) " - "and fastest launch acceleration (0-80 mph in 2 seconds). The ride features a unique " - "triple launch system, 5 inversions including a zero-g roll and cobra roll, and a " - "first-of-its-kind vertical helix element. Total track length is 4,500 feet with a " - "maximum height of 375 feet." - ), - "status": "UNDER_CONSTRUCTION", - "category": "RC", - "opening_date": "2024-07-01", - "capacity_per_hour": 1400, - "ride_duration_seconds": 210, - "min_height_in": 52, - "manufacturer": 1, # Assuming manufacturer ID - "park_area": 1, # Assuming park area ID - "stats": { - "height_ft": 375, - "length_ft": 4500, - "speed_mph": 80, - "inversions": 5, - "launch_type": "LSM", - "track_material": "STEEL", - "roller_coaster_type": "SITDOWN", - "trains_count": 3, - "cars_per_train": 6, - "seats_per_car": 4, - }, - }, - reason=( - "Submitting details for the new flagship roller coaster announced by the park. " - "Construction has begun and track pieces are arriving on site." - ), - source=( - "Official announcement: https://testpark.example.com/thunderbolt\n" - "Construction photos: https://coasterfan.com/thunderbolt-construction\n" - "Manufacturer specifications sheet" - ), - status="PENDING", - ) - - # Existing ride edit with technical updates - EditSubmission.objects.create( - user=user, - content_type=ride_ct, - object_id=test_ride.id, - submission_type="EDIT", - changes={ - "description": ( - "A high-speed steel roller coaster featuring 4 inversions and a unique " - "dual-loading station system. Recent upgrades include new magnetic braking " - "system and enhanced on-board audio experience." - ), - "status": "OPERATING", - "capacity_per_hour": 1500, # Increased after station upgrades - "ride_duration_seconds": 185, - "min_height_in": 48, - "max_height_in": 80, - "stats": { - "trains_count": 3, - "cars_per_train": 8, - "seats_per_car": 4, - }, - }, - reason=( - "Updating ride information to reflect recent upgrades including new braking system, " - "audio system, and increased capacity due to improved loading efficiency." - ), - source=( - "Park operations manual\n" - "Maintenance records\n" - "Personal observation and timing of new ride cycle" - ), - status="PENDING", - ) - - # Create PhotoSubmissions with detailed captions - - # Park photo submission - image_data = b"GIF87a\x01\x00\x01\x00\x80\x01\x00\x00\x00\x00ccc,\x00\x00\x00\x00\x01\x00\x01\x00\x00\x02\x02D\x01\x00;" - dummy_image = SimpleUploadedFile( - "park_entrance.gif", image_data, content_type="image/gif" - ) - - PhotoSubmission.objects.create( - user=user, - content_type=park_ct, - object_id=test_park.id, - photo=dummy_image, - caption=( - "Main entrance plaza of Test Park showing the newly installed digital display board " - "and renovated ticketing area. Photo taken during morning park opening." - ), - date_taken=date(2024, 1, 15), - status="PENDING", - ) - - # Ride photo submission - dummy_image2 = SimpleUploadedFile( - "coaster_track.gif", image_data, content_type="image/gif" - ) - PhotoSubmission.objects.create( - user=user, - content_type=ride_ct, - object_id=test_ride.id, - photo=dummy_image2, - caption=( - "Test Coaster's first drop and loop element showing the new paint scheme. " - "Photo taken from the guest pathway near Station Alpha." - ), - date_taken=date(2024, 1, 20), - status="PENDING", - ) - - self.stdout.write(self.style.SUCCESS("Successfully seeded test submissions")) diff --git a/moderation/migrations/0001_initial.py b/moderation/migrations/0001_initial.py deleted file mode 100644 index adb72d72..00000000 --- a/moderation/migrations/0001_initial.py +++ /dev/null @@ -1,497 +0,0 @@ -# Generated by Django 5.1.4 on 2025-08-13 21:35 - -import django.db.models.deletion -import pgtrigger.compiler -import pgtrigger.migrations -from django.conf import settings -from django.db import migrations, models - - -class Migration(migrations.Migration): - - initial = True - - dependencies = [ - ("contenttypes", "0002_remove_content_type_name"), - ("pghistory", "0006_delete_aggregateevent"), - migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ] - - operations = [ - migrations.CreateModel( - name="EditSubmission", - fields=[ - ( - "id", - models.BigAutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ("updated_at", models.DateTimeField(auto_now=True)), - ( - "object_id", - models.PositiveIntegerField(blank=True, null=True), - ), - ( - "submission_type", - models.CharField( - choices=[ - ("EDIT", "Edit Existing"), - ("CREATE", "Create New"), - ], - default="EDIT", - max_length=10, - ), - ), - ( - "changes", - models.JSONField( - help_text="JSON representation of the changes or new object data" - ), - ), - ( - "moderator_changes", - models.JSONField( - blank=True, - help_text="Moderator's edited version of the changes before approval", - null=True, - ), - ), - ( - "reason", - models.TextField(help_text="Why this edit/addition is needed"), - ), - ( - "source", - models.TextField( - blank=True, - help_text="Source of information (if applicable)", - ), - ), - ( - "status", - models.CharField( - choices=[ - ("PENDING", "Pending"), - ("APPROVED", "Approved"), - ("REJECTED", "Rejected"), - ("ESCALATED", "Escalated"), - ], - default="PENDING", - max_length=20, - ), - ), - ("created_at", models.DateTimeField(auto_now_add=True)), - ("handled_at", models.DateTimeField(blank=True, null=True)), - ( - "notes", - models.TextField( - blank=True, - help_text="Notes from the moderator about this submission", - ), - ), - ( - "content_type", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - to="contenttypes.contenttype", - ), - ), - ( - "handled_by", - models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="handled_submissions", - to=settings.AUTH_USER_MODEL, - ), - ), - ( - "user", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - related_name="edit_submissions", - to=settings.AUTH_USER_MODEL, - ), - ), - ], - options={ - "ordering": ["-created_at"], - }, - ), - migrations.CreateModel( - name="EditSubmissionEvent", - fields=[ - ( - "pgh_id", - models.AutoField(primary_key=True, serialize=False), - ), - ("pgh_created_at", models.DateTimeField(auto_now_add=True)), - ("pgh_label", models.TextField(help_text="The event label.")), - ("id", models.BigIntegerField()), - ("updated_at", models.DateTimeField(auto_now=True)), - ( - "object_id", - models.PositiveIntegerField(blank=True, null=True), - ), - ( - "submission_type", - models.CharField( - choices=[ - ("EDIT", "Edit Existing"), - ("CREATE", "Create New"), - ], - default="EDIT", - max_length=10, - ), - ), - ( - "changes", - models.JSONField( - help_text="JSON representation of the changes or new object data" - ), - ), - ( - "moderator_changes", - models.JSONField( - blank=True, - help_text="Moderator's edited version of the changes before approval", - null=True, - ), - ), - ( - "reason", - models.TextField(help_text="Why this edit/addition is needed"), - ), - ( - "source", - models.TextField( - blank=True, - help_text="Source of information (if applicable)", - ), - ), - ( - "status", - models.CharField( - choices=[ - ("PENDING", "Pending"), - ("APPROVED", "Approved"), - ("REJECTED", "Rejected"), - ("ESCALATED", "Escalated"), - ], - default="PENDING", - max_length=20, - ), - ), - ("created_at", models.DateTimeField(auto_now_add=True)), - ("handled_at", models.DateTimeField(blank=True, null=True)), - ( - "notes", - models.TextField( - blank=True, - help_text="Notes from the moderator about this submission", - ), - ), - ( - "content_type", - models.ForeignKey( - db_constraint=False, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="+", - related_query_name="+", - to="contenttypes.contenttype", - ), - ), - ( - "handled_by", - models.ForeignKey( - blank=True, - db_constraint=False, - null=True, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="+", - related_query_name="+", - to=settings.AUTH_USER_MODEL, - ), - ), - ( - "pgh_context", - models.ForeignKey( - db_constraint=False, - null=True, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="+", - to="pghistory.context", - ), - ), - ( - "pgh_obj", - models.ForeignKey( - db_constraint=False, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="events", - to="moderation.editsubmission", - ), - ), - ( - "user", - models.ForeignKey( - db_constraint=False, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="+", - related_query_name="+", - to=settings.AUTH_USER_MODEL, - ), - ), - ], - options={ - "abstract": False, - }, - ), - migrations.CreateModel( - name="PhotoSubmission", - fields=[ - ( - "id", - models.BigAutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ("updated_at", models.DateTimeField(auto_now=True)), - ("object_id", models.PositiveIntegerField()), - ("photo", models.ImageField(upload_to="submissions/photos/")), - ("caption", models.CharField(blank=True, max_length=255)), - ("date_taken", models.DateField(blank=True, null=True)), - ( - "status", - models.CharField( - choices=[ - ("PENDING", "Pending"), - ("APPROVED", "Approved"), - ("REJECTED", "Rejected"), - ("ESCALATED", "Escalated"), - ], - default="PENDING", - max_length=20, - ), - ), - ("created_at", models.DateTimeField(auto_now_add=True)), - ("handled_at", models.DateTimeField(blank=True, null=True)), - ( - "notes", - models.TextField( - blank=True, - help_text="Notes from the moderator about this photo submission", - ), - ), - ( - "content_type", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - to="contenttypes.contenttype", - ), - ), - ( - "handled_by", - models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="handled_photos", - to=settings.AUTH_USER_MODEL, - ), - ), - ( - "user", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - related_name="photo_submissions", - to=settings.AUTH_USER_MODEL, - ), - ), - ], - options={ - "ordering": ["-created_at"], - }, - ), - migrations.CreateModel( - name="PhotoSubmissionEvent", - fields=[ - ( - "pgh_id", - models.AutoField(primary_key=True, serialize=False), - ), - ("pgh_created_at", models.DateTimeField(auto_now_add=True)), - ("pgh_label", models.TextField(help_text="The event label.")), - ("id", models.BigIntegerField()), - ("updated_at", models.DateTimeField(auto_now=True)), - ("object_id", models.PositiveIntegerField()), - ("photo", models.ImageField(upload_to="submissions/photos/")), - ("caption", models.CharField(blank=True, max_length=255)), - ("date_taken", models.DateField(blank=True, null=True)), - ( - "status", - models.CharField( - choices=[ - ("PENDING", "Pending"), - ("APPROVED", "Approved"), - ("REJECTED", "Rejected"), - ("ESCALATED", "Escalated"), - ], - default="PENDING", - max_length=20, - ), - ), - ("created_at", models.DateTimeField(auto_now_add=True)), - ("handled_at", models.DateTimeField(blank=True, null=True)), - ( - "notes", - models.TextField( - blank=True, - help_text="Notes from the moderator about this photo submission", - ), - ), - ( - "content_type", - models.ForeignKey( - db_constraint=False, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="+", - related_query_name="+", - to="contenttypes.contenttype", - ), - ), - ( - "handled_by", - models.ForeignKey( - blank=True, - db_constraint=False, - null=True, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="+", - related_query_name="+", - to=settings.AUTH_USER_MODEL, - ), - ), - ( - "pgh_context", - models.ForeignKey( - db_constraint=False, - null=True, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="+", - to="pghistory.context", - ), - ), - ( - "pgh_obj", - models.ForeignKey( - db_constraint=False, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="events", - to="moderation.photosubmission", - ), - ), - ( - "user", - models.ForeignKey( - db_constraint=False, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="+", - related_query_name="+", - to=settings.AUTH_USER_MODEL, - ), - ), - ], - options={ - "abstract": False, - }, - ), - migrations.AddIndex( - model_name="editsubmission", - index=models.Index( - fields=["content_type", "object_id"], - name="moderation__content_922d2b_idx", - ), - ), - migrations.AddIndex( - model_name="editsubmission", - index=models.Index(fields=["status"], name="moderation__status_e4eb2b_idx"), - ), - pgtrigger.migrations.AddTrigger( - model_name="editsubmission", - trigger=pgtrigger.compiler.Trigger( - name="insert_insert", - sql=pgtrigger.compiler.UpsertTriggerSql( - func='INSERT INTO "moderation_editsubmissionevent" ("changes", "content_type_id", "created_at", "handled_at", "handled_by_id", "id", "moderator_changes", "notes", "object_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "reason", "source", "status", "submission_type", "updated_at", "user_id") VALUES (NEW."changes", NEW."content_type_id", NEW."created_at", NEW."handled_at", NEW."handled_by_id", NEW."id", NEW."moderator_changes", NEW."notes", NEW."object_id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."reason", NEW."source", NEW."status", NEW."submission_type", NEW."updated_at", NEW."user_id"); RETURN NULL;', - hash="[AWS-SECRET-REMOVED]", - operation="INSERT", - pgid="pgtrigger_insert_insert_2c796", - table="moderation_editsubmission", - when="AFTER", - ), - ), - ), - pgtrigger.migrations.AddTrigger( - model_name="editsubmission", - trigger=pgtrigger.compiler.Trigger( - name="update_update", - sql=pgtrigger.compiler.UpsertTriggerSql( - condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)", - func='INSERT INTO "moderation_editsubmissionevent" ("changes", "content_type_id", "created_at", "handled_at", "handled_by_id", "id", "moderator_changes", "notes", "object_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "reason", "source", "status", "submission_type", "updated_at", "user_id") VALUES (NEW."changes", NEW."content_type_id", NEW."created_at", NEW."handled_at", NEW."handled_by_id", NEW."id", NEW."moderator_changes", NEW."notes", NEW."object_id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."reason", NEW."source", NEW."status", NEW."submission_type", NEW."updated_at", NEW."user_id"); RETURN NULL;', - hash="[AWS-SECRET-REMOVED]", - operation="UPDATE", - pgid="pgtrigger_update_update_ab38f", - table="moderation_editsubmission", - when="AFTER", - ), - ), - ), - migrations.AddIndex( - model_name="photosubmission", - index=models.Index( - fields=["content_type", "object_id"], - name="moderation__content_7a7bc1_idx", - ), - ), - migrations.AddIndex( - model_name="photosubmission", - index=models.Index(fields=["status"], name="moderation__status_7a1914_idx"), - ), - pgtrigger.migrations.AddTrigger( - model_name="photosubmission", - trigger=pgtrigger.compiler.Trigger( - name="insert_insert", - sql=pgtrigger.compiler.UpsertTriggerSql( - func='INSERT INTO "moderation_photosubmissionevent" ("caption", "content_type_id", "created_at", "date_taken", "handled_at", "handled_by_id", "id", "notes", "object_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "photo", "status", "updated_at", "user_id") VALUES (NEW."caption", NEW."content_type_id", NEW."created_at", NEW."date_taken", NEW."handled_at", NEW."handled_by_id", NEW."id", NEW."notes", NEW."object_id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."photo", NEW."status", NEW."updated_at", NEW."user_id"); RETURN NULL;', - hash="[AWS-SECRET-REMOVED]", - operation="INSERT", - pgid="pgtrigger_insert_insert_62865", - table="moderation_photosubmission", - when="AFTER", - ), - ), - ), - pgtrigger.migrations.AddTrigger( - model_name="photosubmission", - trigger=pgtrigger.compiler.Trigger( - name="update_update", - sql=pgtrigger.compiler.UpsertTriggerSql( - condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)", - func='INSERT INTO "moderation_photosubmissionevent" ("caption", "content_type_id", "created_at", "date_taken", "handled_at", "handled_by_id", "id", "notes", "object_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "photo", "status", "updated_at", "user_id") VALUES (NEW."caption", NEW."content_type_id", NEW."created_at", NEW."date_taken", NEW."handled_at", NEW."handled_by_id", NEW."id", NEW."notes", NEW."object_id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."photo", NEW."status", NEW."updated_at", NEW."user_id"); RETURN NULL;', - hash="[AWS-SECRET-REMOVED]", - operation="UPDATE", - pgid="pgtrigger_update_update_9c311", - table="moderation_photosubmission", - when="AFTER", - ), - ), - ), - ] diff --git a/moderation/migrations/__init__.py b/moderation/migrations/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/moderation/mixins.py b/moderation/mixins.py deleted file mode 100644 index 6f304219..00000000 --- a/moderation/mixins.py +++ /dev/null @@ -1,314 +0,0 @@ -from typing import Any, Dict, Optional, Type, cast -from django.contrib.auth.mixins import UserPassesTestMixin -from django.contrib.contenttypes.models import ContentType -from django.http import ( - JsonResponse, - HttpResponseForbidden, - HttpRequest, - HttpResponse, -) -from django.views.generic import DetailView -from django.db import models -from django.contrib.auth import get_user_model -import json -from .models import EditSubmission, PhotoSubmission, UserType - -User = get_user_model() - - -class EditSubmissionMixin(DetailView): - """ - Mixin for handling edit submissions with proper moderation. - """ - - model: Optional[Type[models.Model]] = None - - def handle_edit_submission( - self, - request: HttpRequest, - changes: Dict[str, Any], - reason: str = "", - source: str = "", - submission_type: str = "EDIT", - ) -> JsonResponse: - """ - Handle an edit submission based on user's role. - - Args: - request: The HTTP request - changes: Dict of field changes {field_name: new_value} - reason: Why this edit is needed - source: Source of information (optional) - submission_type: 'EDIT' or 'CREATE' - - Returns: - JsonResponse with status and message - """ - if not request.user.is_authenticated: - return JsonResponse( - { - "status": "error", - "message": "You must be logged in to make edits.", - }, - status=403, - ) - - if not self.model: - raise ValueError("model attribute must be set") - - content_type = ContentType.objects.get_for_model(self.model) - - # Create the submission - submission = EditSubmission( - user=request.user, - content_type=content_type, - submission_type=submission_type, - changes=changes, - reason=reason, - source=source, - ) - - # For edits, set the object_id - if submission_type == "EDIT": - obj = self.get_object() - submission.object_id = getattr(obj, "id", None) - - # Auto-approve for moderators and above - user_role = getattr(request.user, "role", None) - if user_role in ["MODERATOR", "ADMIN", "SUPERUSER"]: - obj = submission.approve(cast(UserType, request.user)) - return JsonResponse( - { - "status": "success", - "message": "Changes saved successfully.", - "auto_approved": True, - "redirect_url": getattr(obj, "get_absolute_url", lambda: None)(), - } - ) - - # Submit for approval for regular users - submission.save() - return JsonResponse( - { - "status": "success", - "message": "Your changes have been submitted for approval.", - "auto_approved": False, - } - ) - - def post(self, request: HttpRequest, *args: Any, **kwargs: Any) -> JsonResponse: - """Handle POST requests for editing""" - if not request.user.is_authenticated: - return JsonResponse( - { - "status": "error", - "message": "You must be logged in to make edits.", - }, - status=403, - ) - - try: - data = json.loads(request.body) - changes = data.get("changes", {}) - reason = data.get("reason", "") - source = data.get("source", "") - submission_type = data.get("submission_type", "EDIT") - - if not changes: - return JsonResponse( - {"status": "error", "message": "No changes provided."}, - status=400, - ) - - user_role = getattr(request.user, "role", None) - if not reason and user_role == "USER": - return JsonResponse( - { - "status": "error", - "message": "Please provide a reason for your changes.", - }, - status=400, - ) - - return self.handle_edit_submission( - request, changes, reason, source, submission_type - ) - - except json.JSONDecodeError: - return JsonResponse( - {"status": "error", "message": "Invalid JSON data."}, - status=400, - ) - except Exception as e: - return JsonResponse({"status": "error", "message": str(e)}, status=500) - - -class PhotoSubmissionMixin(DetailView): - """ - Mixin for handling photo submissions with proper moderation. - """ - - model: Optional[Type[models.Model]] = None - - def handle_photo_submission(self, request: HttpRequest) -> JsonResponse: - """Handle a photo submission based on user's role""" - if not request.user.is_authenticated: - return JsonResponse( - { - "status": "error", - "message": "You must be logged in to upload photos.", - }, - status=403, - ) - - if not self.model: - raise ValueError("model attribute must be set") - - try: - obj = self.get_object() - except (AttributeError, self.model.DoesNotExist): - return JsonResponse( - {"status": "error", "message": "Invalid object."}, status=400 - ) - - if not request.FILES.get("photo"): - return JsonResponse( - {"status": "error", "message": "No photo provided."}, - status=400, - ) - - content_type = ContentType.objects.get_for_model(obj) - - submission = PhotoSubmission( - user=request.user, - content_type=content_type, - object_id=getattr(obj, "id", None), - photo=request.FILES["photo"], - caption=request.POST.get("caption", ""), - date_taken=request.POST.get("date_taken"), - ) - - # Auto-approve for moderators and above - user_role = getattr(request.user, "role", None) - if user_role in ["MODERATOR", "ADMIN", "SUPERUSER"]: - submission.auto_approve() - return JsonResponse( - { - "status": "success", - "message": "Photo uploaded successfully.", - "auto_approved": True, - } - ) - - # Submit for approval for regular users - submission.save() - return JsonResponse( - { - "status": "success", - "message": "Your photo has been submitted for approval.", - "auto_approved": False, - } - ) - - -class ModeratorRequiredMixin(UserPassesTestMixin): - """Require moderator or higher role for access""" - - request: Optional[HttpRequest] = None - - def test_func(self) -> bool: - if not self.request: - return False - user_role = getattr(self.request.user, "role", None) - return self.request.user.is_authenticated and user_role in [ - "MODERATOR", - "ADMIN", - "SUPERUSER", - ] - - def handle_no_permission(self) -> HttpResponse: - if not self.request or not self.request.user.is_authenticated: - return super().handle_no_permission() - return HttpResponseForbidden("You must be a moderator to access this page.") - - -class AdminRequiredMixin(UserPassesTestMixin): - """Require admin or superuser role for access""" - - request: Optional[HttpRequest] = None - - def test_func(self) -> bool: - if not self.request: - return False - user_role = getattr(self.request.user, "role", None) - return self.request.user.is_authenticated and user_role in [ - "ADMIN", - "SUPERUSER", - ] - - def handle_no_permission(self) -> HttpResponse: - if not self.request or not self.request.user.is_authenticated: - return super().handle_no_permission() - return HttpResponseForbidden("You must be an admin to access this page.") - - -class InlineEditMixin: - """Add inline editing context to views""" - - request: Optional[HttpRequest] = None - - def get_context_data(self, **kwargs: Any) -> Dict[str, Any]: - context = super().get_context_data(**kwargs) # type: ignore - if self.request and self.request.user.is_authenticated: - context["can_edit"] = True - user_role = getattr(self.request.user, "role", None) - context["can_auto_approve"] = user_role in [ - "MODERATOR", - "ADMIN", - "SUPERUSER", - ] - - if isinstance(self, DetailView): - obj = self.get_object() # type: ignore - context["pending_edits"] = ( - EditSubmission.objects.filter( - content_type=ContentType.objects.get_for_model(obj.__class__), - object_id=getattr(obj, "id", None), - status="NEW", - ) - .select_related("user") - .order_by("-created_at") - ) - return context - - -class HistoryMixin: - """Add edit history context to views""" - - def get_context_data(self, **kwargs: Any) -> Dict[str, Any]: - context = super().get_context_data(**kwargs) # type: ignore - - # Only add history context for DetailViews - if isinstance(self, DetailView): - obj = self.get_object() # type: ignore - - # Get historical records ordered by date if available - try: - # Use pghistory's get_history method - context["history"] = obj.get_history() - except (AttributeError, TypeError): - context["history"] = [] - - # Get related edit submissions - content_type = ContentType.objects.get_for_model(obj.__class__) - context["edit_submissions"] = ( - EditSubmission.objects.filter( - content_type=content_type, - object_id=getattr(obj, "id", None), - ) - .exclude(status="NEW") - .select_related("user", "handled_by") - .order_by("-created_at") - ) - - return context diff --git a/moderation/models.py b/moderation/models.py deleted file mode 100644 index 28b716f3..00000000 --- a/moderation/models.py +++ /dev/null @@ -1,328 +0,0 @@ -from typing import Any, Dict, Optional, Type, Union -from django.db import models -from django.contrib.contenttypes.fields import GenericForeignKey -from django.contrib.contenttypes.models import ContentType -from django.conf import settings -from django.utils import timezone -from django.core.exceptions import ObjectDoesNotExist, FieldDoesNotExist -from django.contrib.auth.base_user import AbstractBaseUser -from django.contrib.auth.models import AnonymousUser -import pghistory -from core.history import TrackedModel - -UserType = Union[AbstractBaseUser, AnonymousUser] - - -@pghistory.track() # Track all changes by default -class EditSubmission(TrackedModel): - STATUS_CHOICES = [ - ("PENDING", "Pending"), - ("APPROVED", "Approved"), - ("REJECTED", "Rejected"), - ("ESCALATED", "Escalated"), - ] - - SUBMISSION_TYPE_CHOICES = [ - ("EDIT", "Edit Existing"), - ("CREATE", "Create New"), - ] - - # Who submitted the edit - user = models.ForeignKey( - settings.AUTH_USER_MODEL, - on_delete=models.CASCADE, - related_name="edit_submissions", - ) - - # What is being edited (Park or Ride) - content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE) - object_id = models.PositiveIntegerField( - null=True, blank=True - ) # Null for new objects - content_object = GenericForeignKey("content_type", "object_id") - - # Type of submission - submission_type = models.CharField( - max_length=10, choices=SUBMISSION_TYPE_CHOICES, default="EDIT" - ) - - # The actual changes/data - changes = models.JSONField( - help_text="JSON representation of the changes or new object data" - ) - - # Moderator's edited version of changes before approval - moderator_changes = models.JSONField( - null=True, - blank=True, - help_text="Moderator's edited version of the changes before approval", - ) - - # Metadata - reason = models.TextField(help_text="Why this edit/addition is needed") - source = models.TextField( - blank=True, help_text="Source of information (if applicable)" - ) - status = models.CharField(max_length=20, choices=STATUS_CHOICES, default="PENDING") - created_at = models.DateTimeField(auto_now_add=True) - - # Review details - handled_by = models.ForeignKey( - settings.AUTH_USER_MODEL, - on_delete=models.SET_NULL, - null=True, - blank=True, - related_name="handled_submissions", - ) - handled_at = models.DateTimeField(null=True, blank=True) - notes = models.TextField( - blank=True, help_text="Notes from the moderator about this submission" - ) - - class Meta: - ordering = ["-created_at"] - indexes = [ - models.Index(fields=["content_type", "object_id"]), - models.Index(fields=["status"]), - ] - - def __str__(self) -> str: - action = "creation" if self.submission_type == "CREATE" else "edit" - if model_class := self.content_type.model_class(): - target = self.content_object or model_class.__name__ - else: - target = "Unknown" - return f"{action} by {self.user.username} on {target}" - - def _resolve_foreign_keys(self, data: Dict[str, Any]) -> Dict[str, Any]: - """Convert foreign key IDs to model instances""" - if not (model_class := self.content_type.model_class()): - raise ValueError("Could not resolve model class") - - resolved_data = data.copy() - - for field_name, value in data.items(): - try: - if ( - (field := model_class._meta.get_field(field_name)) - and isinstance(field, models.ForeignKey) - and value is not None - ): - if related_model := field.related_model: - resolved_data[field_name] = related_model.objects.get(id=value) - except (FieldDoesNotExist, ObjectDoesNotExist): - continue - - return resolved_data - - def _prepare_model_data( - self, data: Dict[str, Any], model_class: Type[models.Model] - ) -> Dict[str, Any]: - """Prepare data for model creation/update by filtering out auto-generated fields""" - prepared_data = data.copy() - - # Remove fields that are auto-generated or handled by the model's save - # method - auto_fields = {"created_at", "updated_at", "slug"} - for field in auto_fields: - prepared_data.pop(field, None) - - # Set default values for required fields if not provided - for field in model_class._meta.fields: - if not field.auto_created and not field.blank and not field.null: - if field.name not in prepared_data and field.has_default(): - prepared_data[field.name] = field.get_default() - - return prepared_data - - def _check_duplicate_name( - self, model_class: Type[models.Model], name: str - ) -> Optional[models.Model]: - """Check if an object with the same name already exists""" - try: - return model_class.objects.filter(name=name).first() - except BaseException: - return None - - def approve(self, user: UserType) -> Optional[models.Model]: - """Approve the submission and apply the changes""" - if not (model_class := self.content_type.model_class()): - raise ValueError("Could not resolve model class") - - try: - # Use moderator_changes if available, otherwise use original - # changes - changes_to_apply = ( - self.moderator_changes - if self.moderator_changes is not None - else self.changes - ) - - resolved_data = self._resolve_foreign_keys(changes_to_apply) - prepared_data = self._prepare_model_data(resolved_data, model_class) - - # For CREATE submissions, check for duplicates by name - if self.submission_type == "CREATE" and "name" in prepared_data: - if existing_obj := self._check_duplicate_name( - model_class, prepared_data["name"] - ): - self.status = "REJECTED" - self.handled_by = user # type: ignore - self.handled_at = timezone.now() - self.notes = f"A { - model_class.__name__} with the name '{ - prepared_data['name']}' already exists (ID: { - existing_obj.id})" - self.save() - raise ValueError(self.notes) - - self.status = "APPROVED" - self.handled_by = user # type: ignore - self.handled_at = timezone.now() - - if self.submission_type == "CREATE": - # Create new object - obj = model_class(**prepared_data) - # CRITICAL STYLEGUIDE FIX: Call full_clean before save - obj.full_clean() - obj.save() - # Update object_id after creation - self.object_id = getattr(obj, "id", None) - else: - # Apply changes to existing object - if not (obj := self.content_object): - raise ValueError("Content object not found") - for field, value in prepared_data.items(): - setattr(obj, field, value) - # CRITICAL STYLEGUIDE FIX: Call full_clean before save - obj.full_clean() - obj.save() - - # CRITICAL STYLEGUIDE FIX: Call full_clean before save - self.full_clean() - self.save() - return obj - except Exception as e: - if ( - self.status != "REJECTED" - ): # Don't override if already rejected due to duplicate - self.status = "PENDING" # Reset status if approval failed - self.save() - raise ValueError(f"Error approving submission: {str(e)}") from e - - def reject(self, user: UserType) -> None: - """Reject the submission""" - self.status = "REJECTED" - self.handled_by = user # type: ignore - self.handled_at = timezone.now() - self.save() - - def escalate(self, user: UserType) -> None: - """Escalate the submission to admin""" - self.status = "ESCALATED" - self.handled_by = user # type: ignore - self.handled_at = timezone.now() - self.save() - - -@pghistory.track() # Track all changes by default -class PhotoSubmission(TrackedModel): - STATUS_CHOICES = [ - ("PENDING", "Pending"), - ("APPROVED", "Approved"), - ("REJECTED", "Rejected"), - ("ESCALATED", "Escalated"), - ] - - # Who submitted the photo - user = models.ForeignKey( - settings.AUTH_USER_MODEL, - on_delete=models.CASCADE, - related_name="photo_submissions", - ) - - # What the photo is for (Park or Ride) - content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE) - object_id = models.PositiveIntegerField() - content_object = GenericForeignKey("content_type", "object_id") - - # The photo itself - photo = models.ImageField(upload_to="submissions/photos/") - caption = models.CharField(max_length=255, blank=True) - date_taken = models.DateField(null=True, blank=True) - - # Metadata - status = models.CharField(max_length=20, choices=STATUS_CHOICES, default="PENDING") - created_at = models.DateTimeField(auto_now_add=True) - - # Review details - handled_by = models.ForeignKey( - settings.AUTH_USER_MODEL, - on_delete=models.SET_NULL, - null=True, - blank=True, - related_name="handled_photos", - ) - handled_at = models.DateTimeField(null=True, blank=True) - notes = models.TextField( - blank=True, - help_text="Notes from the moderator about this photo submission", - ) - - class Meta: - ordering = ["-created_at"] - indexes = [ - models.Index(fields=["content_type", "object_id"]), - models.Index(fields=["status"]), - ] - - def __str__(self) -> str: - return f"Photo submission by { - self.user.username} for { - self.content_object}" - - def approve(self, moderator: UserType, notes: str = "") -> None: - """Approve the photo submission""" - from media.models import Photo - - self.status = "APPROVED" - self.handled_by = moderator # type: ignore - self.handled_at = timezone.now() - self.notes = notes - - # Create the approved photo - Photo.objects.create( - uploaded_by=self.user, - content_type=self.content_type, - object_id=self.object_id, - image=self.photo, - caption=self.caption, - is_approved=True, - ) - - self.save() - - def reject(self, moderator: UserType, notes: str) -> None: - """Reject the photo submission""" - self.status = "REJECTED" - self.handled_by = moderator # type: ignore - self.handled_at = timezone.now() - self.notes = notes - self.save() - - def auto_approve(self) -> None: - """Auto-approve submissions from moderators""" - # Get user role safely - user_role = getattr(self.user, "role", None) - - # If user is moderator or above, auto-approve - if user_role in ["MODERATOR", "ADMIN", "SUPERUSER"]: - self.approve(self.user) - - def escalate(self, moderator: UserType, notes: str = "") -> None: - """Escalate the photo submission to admin""" - self.status = "ESCALATED" - self.handled_by = moderator # type: ignore - self.handled_at = timezone.now() - self.notes = notes - self.save() diff --git a/moderation/selectors.py b/moderation/selectors.py deleted file mode 100644 index 9524ab1f..00000000 --- a/moderation/selectors.py +++ /dev/null @@ -1,278 +0,0 @@ -""" -Selectors for moderation-related data retrieval. -Following Django styleguide pattern for separating data access from business logic. -""" - -from typing import Optional, Dict, Any -from django.db.models import QuerySet, Count -from django.utils import timezone -from datetime import timedelta -from django.contrib.auth.models import User - -from .models import EditSubmission - - -def pending_submissions_for_review( - *, content_type: Optional[str] = None, limit: int = 50 -) -> QuerySet[EditSubmission]: - """ - Get pending submissions that need moderation review. - - Args: - content_type: Optional filter by content type name - limit: Maximum number of submissions to return - - Returns: - QuerySet of pending submissions ordered by submission date - """ - queryset = ( - EditSubmission.objects.filter(status="PENDING") - .select_related("submitted_by", "content_type") - .prefetch_related("content_object") - ) - - if content_type: - queryset = queryset.filter(content_type__model=content_type.lower()) - - return queryset.order_by("submitted_at")[:limit] - - -def submissions_by_user( - *, user_id: int, status: Optional[str] = None -) -> QuerySet[EditSubmission]: - """ - Get submissions created by a specific user. - - Args: - user_id: ID of the user who submitted - status: Optional filter by submission status - - Returns: - QuerySet of user's submissions - """ - queryset = EditSubmission.objects.filter(submitted_by_id=user_id).select_related( - "content_type", "handled_by" - ) - - if status: - queryset = queryset.filter(status=status) - - return queryset.order_by("-submitted_at") - - -def submissions_handled_by_moderator( - *, moderator_id: int, days: int = 30 -) -> QuerySet[EditSubmission]: - """ - Get submissions handled by a specific moderator in the last N days. - - Args: - moderator_id: ID of the moderator - days: Number of days to look back - - Returns: - QuerySet of submissions handled by the moderator - """ - cutoff_date = timezone.now() - timedelta(days=days) - - return ( - EditSubmission.objects.filter( - handled_by_id=moderator_id, handled_at__gte=cutoff_date - ) - .select_related("submitted_by", "content_type") - .order_by("-handled_at") - ) - - -def recent_submissions(*, days: int = 7) -> QuerySet[EditSubmission]: - """ - Get recent submissions from the last N days. - - Args: - days: Number of days to look back - - Returns: - QuerySet of recent submissions - """ - cutoff_date = timezone.now() - timedelta(days=days) - - return ( - EditSubmission.objects.filter(submitted_at__gte=cutoff_date) - .select_related("submitted_by", "content_type", "handled_by") - .order_by("-submitted_at") - ) - - -def submissions_by_content_type( - *, content_type: str, status: Optional[str] = None -) -> QuerySet[EditSubmission]: - """ - Get submissions for a specific content type. - - Args: - content_type: Name of the content type (e.g., 'park', 'ride') - status: Optional filter by submission status - - Returns: - QuerySet of submissions for the content type - """ - queryset = EditSubmission.objects.filter( - content_type__model=content_type.lower() - ).select_related("submitted_by", "handled_by") - - if status: - queryset = queryset.filter(status=status) - - return queryset.order_by("-submitted_at") - - -def moderation_queue_summary() -> Dict[str, Any]: - """ - Get summary statistics for the moderation queue. - - Returns: - Dictionary containing queue statistics - """ - pending_count = EditSubmission.objects.filter(status="PENDING").count() - approved_today = EditSubmission.objects.filter( - status="APPROVED", handled_at__date=timezone.now().date() - ).count() - rejected_today = EditSubmission.objects.filter( - status="REJECTED", handled_at__date=timezone.now().date() - ).count() - - # Submissions by content type - submissions_by_type = ( - EditSubmission.objects.filter(status="PENDING") - .values("content_type__model") - .annotate(count=Count("id")) - .order_by("-count") - ) - - return { - "pending_count": pending_count, - "approved_today": approved_today, - "rejected_today": rejected_today, - "submissions_by_type": list(submissions_by_type), - } - - -def moderation_statistics_summary( - *, days: int = 30, moderator: Optional[User] = None -) -> Dict[str, Any]: - """ - Get comprehensive moderation statistics for a time period. - - Args: - days: Number of days to analyze - moderator: Optional filter by specific moderator - - Returns: - Dictionary containing detailed moderation statistics - """ - cutoff_date = timezone.now() - timedelta(days=days) - - base_queryset = EditSubmission.objects.filter(submitted_at__gte=cutoff_date) - - if moderator: - handled_queryset = base_queryset.filter(handled_by=moderator) - else: - handled_queryset = base_queryset - - total_submissions = base_queryset.count() - pending_submissions = base_queryset.filter(status="PENDING").count() - approved_submissions = handled_queryset.filter(status="APPROVED").count() - rejected_submissions = handled_queryset.filter(status="REJECTED").count() - - # Response time analysis (only for handled submissions) - handled_with_times = ( - handled_queryset.exclude(handled_at__isnull=True) - .extra( - select={ - "response_hours": "EXTRACT(EPOCH FROM (handled_at - submitted_at)) / 3600" - } - ) - .values_list("response_hours", flat=True) - ) - - avg_response_time = None - if handled_with_times: - avg_response_time = sum(handled_with_times) / len(handled_with_times) - - return { - "period_days": days, - "total_submissions": total_submissions, - "pending_submissions": pending_submissions, - "approved_submissions": approved_submissions, - "rejected_submissions": rejected_submissions, - "approval_rate": ( - (approved_submissions / (approved_submissions + rejected_submissions) * 100) - if (approved_submissions + rejected_submissions) > 0 - else 0 - ), - "average_response_time_hours": avg_response_time, - "moderator": moderator.username if moderator else None, - } - - -def submissions_needing_attention(*, hours: int = 24) -> QuerySet[EditSubmission]: - """ - Get pending submissions that have been waiting for more than N hours. - - Args: - hours: Number of hours threshold for attention - - Returns: - QuerySet of submissions needing attention - """ - cutoff_time = timezone.now() - timedelta(hours=hours) - - return ( - EditSubmission.objects.filter(status="PENDING", submitted_at__lte=cutoff_time) - .select_related("submitted_by", "content_type") - .order_by("submitted_at") - ) - - -def top_contributors(*, days: int = 30, limit: int = 10) -> QuerySet[User]: - """ - Get users who have submitted the most content in the last N days. - - Args: - days: Number of days to analyze - limit: Maximum number of users to return - - Returns: - QuerySet of top contributing users - """ - cutoff_date = timezone.now() - timedelta(days=days) - - return ( - User.objects.filter(edit_submissions__submitted_at__gte=cutoff_date) - .annotate(submission_count=Count("edit_submissions")) - .filter(submission_count__gt=0) - .order_by("-submission_count")[:limit] - ) - - -def moderator_workload_summary(*, days: int = 30) -> Dict[str, Any]: - """ - Get workload distribution among moderators. - - Args: - days: Number of days to analyze - - Returns: - Dictionary containing moderator workload statistics - """ - cutoff_date = timezone.now() - timedelta(days=days) - - moderator_stats = ( - User.objects.filter(handled_submissions__handled_at__gte=cutoff_date) - .annotate(handled_count=Count("handled_submissions")) - .filter(handled_count__gt=0) - .order_by("-handled_count") - .values("username", "handled_count") - ) - - return {"period_days": days, "moderator_stats": list(moderator_stats)} diff --git a/moderation/services.py b/moderation/services.py deleted file mode 100644 index b79a3323..00000000 --- a/moderation/services.py +++ /dev/null @@ -1,230 +0,0 @@ -""" -Services for moderation functionality. -Following Django styleguide pattern for business logic encapsulation. -""" - -from typing import Optional, Dict, Any, Union -from django.db import transaction -from django.utils import timezone -from django.contrib.auth.models import User -from django.db.models import QuerySet - -from .models import EditSubmission - - -class ModerationService: - """Service for handling content moderation workflows.""" - - @staticmethod - def approve_submission( - *, submission_id: int, moderator: User, notes: Optional[str] = None - ) -> Union[object, None]: - """ - Approve a content submission and apply changes. - - Args: - submission_id: ID of the submission to approve - moderator: User performing the approval - notes: Optional notes about the approval - - Returns: - The created/updated object or None if approval failed - - Raises: - EditSubmission.DoesNotExist: If submission doesn't exist - ValidationError: If submission data is invalid - ValueError: If submission cannot be processed - """ - with transaction.atomic(): - submission = EditSubmission.objects.select_for_update().get( - id=submission_id - ) - - if submission.status != "PENDING": - raise ValueError(f"Submission {submission_id} is not pending approval") - - try: - # Call the model's approve method which handles the business - # logic - obj = submission.approve(moderator) - - # Add moderator notes if provided - if notes: - if submission.notes: - submission.notes += f"\n[Moderator]: {notes}" - else: - submission.notes = f"[Moderator]: {notes}" - submission.save() - - return obj - - except Exception as e: - # Mark as rejected on any error - submission.status = "REJECTED" - submission.handled_by = moderator - submission.handled_at = timezone.now() - submission.notes = f"Approval failed: {str(e)}" - submission.save() - raise - - @staticmethod - def reject_submission( - *, submission_id: int, moderator: User, reason: str - ) -> EditSubmission: - """ - Reject a content submission. - - Args: - submission_id: ID of the submission to reject - moderator: User performing the rejection - reason: Reason for rejection - - Returns: - Updated submission object - - Raises: - EditSubmission.DoesNotExist: If submission doesn't exist - ValueError: If submission cannot be rejected - """ - with transaction.atomic(): - submission = EditSubmission.objects.select_for_update().get( - id=submission_id - ) - - if submission.status != "PENDING": - raise ValueError(f"Submission {submission_id} is not pending review") - - submission.status = "REJECTED" - submission.handled_by = moderator - submission.handled_at = timezone.now() - submission.notes = f"Rejected: {reason}" - - # Call full_clean before saving - CRITICAL STYLEGUIDE FIX - submission.full_clean() - submission.save() - - return submission - - @staticmethod - def create_edit_submission( - *, - content_object: object, - changes: Dict[str, Any], - submitter: User, - submission_type: str = "UPDATE", - notes: Optional[str] = None, - ) -> EditSubmission: - """ - Create a new edit submission for moderation. - - Args: - content_object: The object being edited - changes: Dictionary of field changes - submitter: User submitting the changes - submission_type: Type of submission ("CREATE" or "UPDATE") - notes: Optional notes about the submission - - Returns: - Created EditSubmission object - - Raises: - ValidationError: If submission data is invalid - """ - submission = EditSubmission( - content_object=content_object, - changes=changes, - submitted_by=submitter, - submission_type=submission_type, - notes=notes or "", - ) - - # Call full_clean before saving - CRITICAL STYLEGUIDE FIX - submission.full_clean() - submission.save() - - return submission - - @staticmethod - def update_submission_changes( - *, - submission_id: int, - moderator_changes: Dict[str, Any], - moderator: User, - ) -> EditSubmission: - """ - Update submission with moderator changes before approval. - - Args: - submission_id: ID of the submission to update - moderator_changes: Dictionary of moderator modifications - moderator: User making the changes - - Returns: - Updated submission object - - Raises: - EditSubmission.DoesNotExist: If submission doesn't exist - ValueError: If submission cannot be modified - """ - with transaction.atomic(): - submission = EditSubmission.objects.select_for_update().get( - id=submission_id - ) - - if submission.status != "PENDING": - raise ValueError(f"Submission {submission_id} is not pending review") - - submission.moderator_changes = moderator_changes - - # Add note about moderator changes - note = f"[Moderator changes by {moderator.username}]" - if submission.notes: - submission.notes += f"\n{note}" - else: - submission.notes = note - - # Call full_clean before saving - CRITICAL STYLEGUIDE FIX - submission.full_clean() - submission.save() - - return submission - - @staticmethod - def get_pending_submissions_for_moderator( - *, - moderator: User, - content_type: Optional[str] = None, - limit: Optional[int] = None, - ) -> QuerySet: - """ - Get pending submissions for a moderator to review. - - Args: - moderator: The moderator user - content_type: Optional filter by content type - limit: Maximum number of submissions to return - - Returns: - QuerySet of pending submissions - """ - from .selectors import pending_submissions_for_review - - return pending_submissions_for_review(content_type=content_type, limit=limit) - - @staticmethod - def get_submission_statistics( - *, days: int = 30, moderator: Optional[User] = None - ) -> Dict[str, Any]: - """ - Get moderation statistics for a time period. - - Args: - days: Number of days to analyze - moderator: Optional filter by specific moderator - - Returns: - Dictionary containing moderation statistics - """ - from .selectors import moderation_statistics_summary - - return moderation_statistics_summary(days=days, moderator=moderator) diff --git a/moderation/templatetags/moderation_tags.py b/moderation/templatetags/moderation_tags.py deleted file mode 100644 index e66a9f5a..00000000 --- a/moderation/templatetags/moderation_tags.py +++ /dev/null @@ -1,69 +0,0 @@ -from django import template -from django.contrib.contenttypes.models import ContentType -from typing import Optional, Dict, Any, List, Union - -register = template.Library() - - -@register.filter -def get_object_name(value: Optional[int], model_path: str) -> Optional[str]: - """Get object name from ID and model path.""" - if not value or not model_path or "." not in model_path: - return None - - app_label, model = model_path.split(".") - try: - content_type = ContentType.objects.get( - app_label=app_label.lower(), model=model.lower() - ) - model_class = content_type.model_class() - if not model_class: - return None - - obj = model_class.objects.filter(id=value).first() - return str(obj) if obj else None - except Exception: - return None - - -@register.filter -def get_category_display(value: Optional[str]) -> Optional[str]: - """Get display value for ride category.""" - if not value: - return None - - categories = { - "RC": "Roller Coaster", - "DR": "Dark Ride", - "FR": "Flat Ride", - "WR": "Water Ride", - "TR": "Transport", - "OT": "Other", - } - return categories.get(value) - - -@register.filter -def get_park_area_name(value: Optional[int], park_id: Optional[int]) -> Optional[str]: - """Get park area name from ID and park ID.""" - if not value or not park_id: - return None - - try: - from parks.models import ParkArea - - area = ParkArea.objects.filter(id=value, park_id=park_id).first() - return str(area) if area else None - except Exception: - return None - - -@register.filter -def get_item( - dictionary: Optional[Dict[str, Any]], key: Optional[Union[str, int]] -) -> List[Any]: - """Get item from dictionary by key.""" - if not dictionary or not isinstance(dictionary, dict) or not key: - return [] - - return dictionary.get(str(key), []) diff --git a/moderation/tests.py b/moderation/tests.py deleted file mode 100644 index fb5c8c3b..00000000 --- a/moderation/tests.py +++ /dev/null @@ -1,349 +0,0 @@ -from django.test import TestCase, Client -from django.contrib.auth import get_user_model -from django.contrib.auth.models import AnonymousUser -from django.contrib.contenttypes.models import ContentType -from django.core.files.uploadedfile import SimpleUploadedFile -from django.http import JsonResponse, HttpRequest -from .models import EditSubmission -from .mixins import ( - EditSubmissionMixin, - PhotoSubmissionMixin, - ModeratorRequiredMixin, - AdminRequiredMixin, - InlineEditMixin, - HistoryMixin, -) -from parks.models import Company as Operator -from django.views.generic import DetailView -from django.test import RequestFactory -import json - -User = get_user_model() - - -class TestView( - EditSubmissionMixin, - PhotoSubmissionMixin, - InlineEditMixin, - HistoryMixin, - DetailView, -): - model = Operator - template_name = "test.html" - pk_url_kwarg = "pk" - slug_url_kwarg = "slug" - - def get_context_data(self, **kwargs): - if not hasattr(self, "object"): - self.object = self.get_object() - return super().get_context_data(**kwargs) - - def setup(self, request: HttpRequest, *args, **kwargs): - super().setup(request, *args, **kwargs) - self.request = request - - -class ModerationMixinsTests(TestCase): - def setUp(self): - self.client = Client() - self.factory = RequestFactory() - - # Create users with different roles - self.user = User.objects.create_user( - username="testuser", - email="test@example.com", - password="testpass123", - ) - self.moderator = User.objects.create_user( - username="moderator", - email="moderator@example.com", - password="modpass123", - role="MODERATOR", - ) - self.admin = User.objects.create_user( - username="admin", - email="admin@example.com", - password="adminpass123", - role="ADMIN", - ) - - # Create test company - self.operator = Operator.objects.create( - name="Test Operator", - website="http://example.com", - description="Test Description", - ) - - def test_edit_submission_mixin_unauthenticated(self): - """Test edit submission when not logged in""" - view = TestView() - request = self.factory.post(f"/test/{self.operator.pk}/") - request.user = AnonymousUser() - view.setup(request, pk=self.operator.pk) - view.kwargs = {"pk": self.operator.pk} - response = view.handle_edit_submission(request, {}) - self.assertIsInstance(response, JsonResponse) - self.assertEqual(response.status_code, 403) - - def test_edit_submission_mixin_no_changes(self): - """Test edit submission with no changes""" - view = TestView() - request = self.factory.post( - f"/test/{self.operator.pk}/", - data=json.dumps({}), - content_type="application/json", - ) - request.user = self.user - view.setup(request, pk=self.operator.pk) - view.kwargs = {"pk": self.operator.pk} - response = view.post(request) - self.assertIsInstance(response, JsonResponse) - self.assertEqual(response.status_code, 400) - - def test_edit_submission_mixin_invalid_json(self): - """Test edit submission with invalid JSON""" - view = TestView() - request = self.factory.post( - f"/test/{self.operator.pk}/", - data="invalid json", - content_type="application/json", - ) - request.user = self.user - view.setup(request, pk=self.operator.pk) - view.kwargs = {"pk": self.operator.pk} - response = view.post(request) - self.assertIsInstance(response, JsonResponse) - self.assertEqual(response.status_code, 400) - - def test_edit_submission_mixin_regular_user(self): - """Test edit submission as regular user""" - view = TestView() - request = self.factory.post(f"/test/{self.operator.pk}/") - request.user = self.user - view.setup(request, pk=self.operator.pk) - view.kwargs = {"pk": self.operator.pk} - changes = {"name": "New Name"} - response = view.handle_edit_submission( - request, changes, "Test reason", "Test source" - ) - self.assertIsInstance(response, JsonResponse) - self.assertEqual(response.status_code, 200) - data = json.loads(response.content.decode()) - self.assertFalse(data["auto_approved"]) - - def test_edit_submission_mixin_moderator(self): - """Test edit submission as moderator""" - view = TestView() - request = self.factory.post(f"/test/{self.operator.pk}/") - request.user = self.moderator - view.setup(request, pk=self.operator.pk) - view.kwargs = {"pk": self.operator.pk} - changes = {"name": "New Name"} - response = view.handle_edit_submission( - request, changes, "Test reason", "Test source" - ) - self.assertIsInstance(response, JsonResponse) - self.assertEqual(response.status_code, 200) - data = json.loads(response.content.decode()) - self.assertTrue(data["auto_approved"]) - - def test_photo_submission_mixin_unauthenticated(self): - """Test photo submission when not logged in""" - view = TestView() - view.kwargs = {"pk": self.operator.pk} - view.object = self.operator - - request = self.factory.post( - f"/test/{self.operator.pk}/", data={}, format="multipart" - ) - request.user = AnonymousUser() - view.setup(request, pk=self.operator.pk) - response = view.handle_photo_submission(request) - self.assertIsInstance(response, JsonResponse) - self.assertEqual(response.status_code, 403) - - def test_photo_submission_mixin_no_photo(self): - """Test photo submission with no photo""" - view = TestView() - view.kwargs = {"pk": self.operator.pk} - view.object = self.operator - - request = self.factory.post( - f"/test/{self.operator.pk}/", data={}, format="multipart" - ) - request.user = self.user - view.setup(request, pk=self.operator.pk) - response = view.handle_photo_submission(request) - self.assertIsInstance(response, JsonResponse) - self.assertEqual(response.status_code, 400) - - def test_photo_submission_mixin_regular_user(self): - """Test photo submission as regular user""" - view = TestView() - view.kwargs = {"pk": self.operator.pk} - view.object = self.operator - - # Create a test photo file - photo = SimpleUploadedFile( - "test.gif", - b"GIF87a\x01\x00\x01\x00\x80\x01\x00\x00\x00\x00ccc,\x00\x00\x00\x00\x01\x00\x01\x00\x00\x02\x02D\x01\x00;", - content_type="image/gif", - ) - - request = self.factory.post( - f"/test/{self.operator.pk}/", - data={ - "photo": photo, - "caption": "Test Photo", - "date_taken": "2024-01-01", - }, - format="multipart", - ) - request.user = self.user - view.setup(request, pk=self.operator.pk) - - response = view.handle_photo_submission(request) - self.assertIsInstance(response, JsonResponse) - self.assertEqual(response.status_code, 200) - data = json.loads(response.content.decode()) - self.assertFalse(data["auto_approved"]) - - def test_photo_submission_mixin_moderator(self): - """Test photo submission as moderator""" - view = TestView() - view.kwargs = {"pk": self.operator.pk} - view.object = self.operator - - # Create a test photo file - photo = SimpleUploadedFile( - "test.gif", - b"GIF87a\x01\x00\x01\x00\x80\x01\x00\x00\x00\x00ccc,\x00\x00\x00\x00\x01\x00\x01\x00\x00\x02\x02D\x01\x00;", - content_type="image/gif", - ) - - request = self.factory.post( - f"/test/{self.operator.pk}/", - data={ - "photo": photo, - "caption": "Test Photo", - "date_taken": "2024-01-01", - }, - format="multipart", - ) - request.user = self.moderator - view.setup(request, pk=self.operator.pk) - - response = view.handle_photo_submission(request) - self.assertIsInstance(response, JsonResponse) - self.assertEqual(response.status_code, 200) - data = json.loads(response.content.decode()) - self.assertTrue(data["auto_approved"]) - - def test_moderator_required_mixin(self): - """Test moderator required mixin""" - - class TestModeratorView(ModeratorRequiredMixin): - pass - - view = TestModeratorView() - - # Test unauthenticated user - request = self.factory.get("/test/") - request.user = AnonymousUser() - view.request = request - self.assertFalse(view.test_func()) - - # Test regular user - request.user = self.user - view.request = request - self.assertFalse(view.test_func()) - - # Test moderator - request.user = self.moderator - view.request = request - self.assertTrue(view.test_func()) - - # Test admin - request.user = self.admin - view.request = request - self.assertTrue(view.test_func()) - - def test_admin_required_mixin(self): - """Test admin required mixin""" - - class TestAdminView(AdminRequiredMixin): - pass - - view = TestAdminView() - - # Test unauthenticated user - request = self.factory.get("/test/") - request.user = AnonymousUser() - view.request = request - self.assertFalse(view.test_func()) - - # Test regular user - request.user = self.user - view.request = request - self.assertFalse(view.test_func()) - - # Test moderator - request.user = self.moderator - view.request = request - self.assertFalse(view.test_func()) - - # Test admin - request.user = self.admin - view.request = request - self.assertTrue(view.test_func()) - - def test_inline_edit_mixin(self): - """Test inline edit mixin""" - view = TestView() - view.kwargs = {"pk": self.operator.pk} - view.object = self.operator - - # Test unauthenticated user - request = self.factory.get(f"/test/{self.operator.pk}/") - request.user = AnonymousUser() - view.setup(request, pk=self.operator.pk) - context = view.get_context_data() - self.assertNotIn("can_edit", context) - - # Test regular user - request.user = self.user - view.setup(request, pk=self.operator.pk) - context = view.get_context_data() - self.assertTrue(context["can_edit"]) - self.assertFalse(context["can_auto_approve"]) - - # Test moderator - request.user = self.moderator - view.setup(request, pk=self.operator.pk) - context = view.get_context_data() - self.assertTrue(context["can_edit"]) - self.assertTrue(context["can_auto_approve"]) - - def test_history_mixin(self): - """Test history mixin""" - view = TestView() - view.kwargs = {"pk": self.operator.pk} - view.object = self.operator - request = self.factory.get(f"/test/{self.operator.pk}/") - request.user = self.user - view.setup(request, pk=self.operator.pk) - - # Create some edit submissions - EditSubmission.objects.create( - user=self.user, - content_type=ContentType.objects.get_for_model(Operator), - object_id=getattr(self.operator, "id", None), - submission_type="EDIT", - changes={"name": "New Name"}, - status="APPROVED", - ) - - context = view.get_context_data() - self.assertIn("history", context) - self.assertIn("edit_submissions", context) - self.assertEqual(len(context["edit_submissions"]), 1) diff --git a/moderation/urls.py b/moderation/urls.py deleted file mode 100644 index 024bd736..00000000 --- a/moderation/urls.py +++ /dev/null @@ -1,58 +0,0 @@ -from django.urls import path -from django.shortcuts import redirect -from django.urls import reverse_lazy -from . import views - -app_name = "moderation" - - -def redirect_to_dashboard(request): - return redirect(reverse_lazy("moderation:dashboard")) - - -urlpatterns = [ - # Root URL redirects to dashboard - path("", redirect_to_dashboard), - # Dashboard and Submissions - path("dashboard/", views.DashboardView.as_view(), name="dashboard"), - path("submissions/", views.submission_list, name="submission_list"), - # Search endpoints - path("search/parks/", views.search_parks, name="search_parks"), - path( - "search/ride-models/", - views.search_ride_models, - name="search_ride_models", - ), - # Submission Actions - path( - "submissions//edit/", - views.edit_submission, - name="edit_submission", - ), - path( - "submissions//approve/", - views.approve_submission, - name="approve_submission", - ), - path( - "submissions//reject/", - views.reject_submission, - name="reject_submission", - ), - path( - "submissions//escalate/", - views.escalate_submission, - name="escalate_submission", - ), - # Photo Submissions - path( - "photos//approve/", - views.approve_photo, - name="approve_photo", - ), - path( - "photos//reject/", - views.reject_photo, - name="reject_photo", - ), -] diff --git a/moderation/views.py b/moderation/views.py deleted file mode 100644 index 8c1180fc..00000000 --- a/moderation/views.py +++ /dev/null @@ -1,429 +0,0 @@ -from django.views.generic import ListView -from django.shortcuts import get_object_or_404, render -from django.http import HttpResponse, HttpRequest -from django.contrib.auth.mixins import LoginRequiredMixin, UserPassesTestMixin -from django.contrib.auth.decorators import login_required -from django.db.models import QuerySet -from django.core.exceptions import PermissionDenied -from typing import Optional, Any, Dict, List, Tuple, cast -from django.core.serializers.json import DjangoJSONEncoder -import json -from accounts.models import User - -from .models import EditSubmission, PhotoSubmission -from parks.models import Park, ParkArea -from rides.models import RideModel - -MODERATOR_ROLES = ["MODERATOR", "ADMIN", "SUPERUSER"] - - -class ModeratorRequiredMixin(UserPassesTestMixin): - request: HttpRequest - - def test_func(self) -> bool: - """Check if user has moderator permissions.""" - user = cast(User, self.request.user) - return user.is_authenticated and ( - user.role in MODERATOR_ROLES or user.is_superuser - ) - - def handle_no_permission(self) -> HttpResponse: - if not self.request.user.is_authenticated: - return super().handle_no_permission() - raise PermissionDenied("You do not have moderator permissions.") - - -def get_filtered_queryset( - request: HttpRequest, status: str, submission_type: str -) -> QuerySet: - """Get filtered queryset based on request parameters.""" - if submission_type == "photo": - return PhotoSubmission.objects.filter(status=status).order_by("-created_at") - - queryset = EditSubmission.objects.filter(status=status).order_by("-created_at") - - if type_filter := request.GET.get("type"): - queryset = queryset.filter(submission_type=type_filter) - - if content_type := request.GET.get("content_type"): - queryset = queryset.filter(content_type__model=content_type) - - return queryset - - -def get_context_data(request: HttpRequest, queryset: QuerySet) -> Dict[str, Any]: - """Get common context data for views.""" - park_areas_by_park: Dict[int, List[Tuple[int, str]]] = {} - - if isinstance(queryset.first(), EditSubmission): - for submission in queryset: - if ( - submission.content_type.model == "park" - and isinstance(submission.changes, dict) - and "park" in submission.changes - ): - park_id = submission.changes["park"] - if park_id not in park_areas_by_park: - areas = ParkArea.objects.filter(park_id=park_id) - park_areas_by_park[park_id] = [ - (area.pk, str(area)) for area in areas - ] - - return { - "submissions": queryset, - "user": request.user, - "parks": [(park.pk, str(park)) for park in Park.objects.all()], - "ride_models": [(model.pk, str(model)) for model in RideModel.objects.all()], - "owners": [ - (user.pk, str(user)) - for user in User.objects.filter(role__in=["OWNER", "ADMIN", "SUPERUSER"]) - ], - "park_areas_by_park": park_areas_by_park, - } - - -@login_required -def search_parks(request: HttpRequest) -> HttpResponse: - """HTMX endpoint for searching parks in moderation dashboard""" - user = cast(User, request.user) - if not (user.role in MODERATOR_ROLES or user.is_superuser): - return HttpResponse(status=403) - - query = request.GET.get("q", "").strip() - submission_id = request.GET.get("submission_id") - - parks = Park.objects.all().order_by("name") - if query: - parks = parks.filter(name__icontains=query) - parks = parks[:10] - - return render( - request, - "moderation/partials/park_search_results.html", - {"parks": parks, "search_term": query, "submission_id": submission_id}, - ) - - -@login_required -def search_ride_models(request: HttpRequest) -> HttpResponse: - """HTMX endpoint for searching ride models in moderation dashboard""" - user = cast(User, request.user) - if not (user.role in MODERATOR_ROLES or user.is_superuser): - return HttpResponse(status=403) - - query = request.GET.get("q", "").strip() - submission_id = request.GET.get("submission_id") - manufacturer_id = request.GET.get("manufacturer") - - queryset = RideModel.objects.all() - if manufacturer_id: - queryset = queryset.filter(manufacturer_id=manufacturer_id) - if query: - queryset = queryset.filter(name__icontains=query) - queryset = queryset.order_by("name")[:10] - - return render( - request, - "moderation/partials/ride_model_search_results.html", - { - "ride_models": queryset, - "search_term": query, - "submission_id": submission_id, - }, - ) - - -class DashboardView(LoginRequiredMixin, ModeratorRequiredMixin, ListView): - template_name = "moderation/dashboard.html" - context_object_name = "submissions" - paginate_by = 10 - - def get_template_names(self) -> List[str]: - if self.request.headers.get("HX-Request"): - return ["moderation/partials/dashboard_content.html"] - return [self.template_name] - - def get_queryset(self) -> QuerySet: - status = self.request.GET.get("status", "PENDING") - submission_type = self.request.GET.get("submission_type", "") - return get_filtered_queryset(self.request, status, submission_type) - - -@login_required -def submission_list(request: HttpRequest) -> HttpResponse: - """View for submission list with filters""" - user = cast(User, request.user) - if not (user.role in MODERATOR_ROLES or user.is_superuser): - return HttpResponse(status=403) - - status = request.GET.get("status", "PENDING") - submission_type = request.GET.get("submission_type", "") - - queryset = get_filtered_queryset(request, status, submission_type) - - # Process location data for park submissions - for submission in queryset: - if submission.content_type.model == "park" and isinstance( - submission.changes, dict - ): - # Extract location fields into a location object - location_fields = [ - "latitude", - "longitude", - "street_address", - "city", - "state", - "postal_code", - "country", - ] - location_data = { - field: submission.changes.get(field) for field in location_fields - } - # Add location data back as a single object - submission.changes["location"] = location_data - - context = get_context_data(request, queryset) - - template_name = ( - "moderation/partials/dashboard_content.html" - if request.headers.get("HX-Request") - else "moderation/dashboard.html" - ) - - return render(request, template_name, context) - - -@login_required -def edit_submission(request: HttpRequest, submission_id: int) -> HttpResponse: - """HTMX endpoint for editing a submission""" - user = cast(User, request.user) - if not (user.role in MODERATOR_ROLES or user.is_superuser): - return HttpResponse(status=403) - - submission = get_object_or_404(EditSubmission, id=submission_id) - - if request.method != "POST": - return HttpResponse("Invalid request method", status=405) - - notes = request.POST.get("notes") - if not notes: - return HttpResponse("Notes are required when editing a submission", status=400) - - try: - edited_changes = dict(submission.changes) if submission.changes else {} - - # Update stats if present - if "stats" in edited_changes: - edited_stats = {} - for key in edited_changes["stats"]: - if new_value := request.POST.get(f"stats.{key}"): - edited_stats[key] = new_value - edited_changes["stats"] = edited_stats - - # Update location fields if present - if submission.content_type.model == "park": - location_fields = [ - "latitude", - "longitude", - "street_address", - "city", - "state", - "postal_code", - "country", - ] - location_data = {} - for field in location_fields: - if new_value := request.POST.get(field): - if field in ["latitude", "longitude"]: - try: - location_data[field] = float(new_value) - except ValueError: - return HttpResponse( - f"Invalid value for {field}", status=400 - ) - else: - location_data[field] = new_value - if location_data: - edited_changes.update(location_data) - - # Update other fields - for field in edited_changes: - if field == "stats" or field in [ - "latitude", - "longitude", - "street_address", - "city", - "state", - "postal_code", - "country", - ]: - continue - - if new_value := request.POST.get(field): - if field in ["size_acres"]: - try: - edited_changes[field] = float(new_value) - except ValueError: - return HttpResponse(f"Invalid value for {field}", status=400) - else: - edited_changes[field] = new_value - - # Convert to JSON-serializable format - json_changes = json.loads(json.dumps(edited_changes, cls=DjangoJSONEncoder)) - submission.moderator_changes = json_changes - submission.notes = notes - submission.save() - - # Process location data for display - if submission.content_type.model == "park": - location_fields = [ - "latitude", - "longitude", - "street_address", - "city", - "state", - "postal_code", - "country", - ] - location_data = { - field: json_changes.get(field) for field in location_fields - } - # Add location data back as a single object - json_changes["location"] = location_data - submission.changes = json_changes - - context = get_context_data( - request, EditSubmission.objects.filter(id=submission_id) - ) - return render(request, "moderation/partials/submission_list.html", context) - - except Exception as e: - return HttpResponse(str(e), status=400) - - -@login_required -def approve_submission(request: HttpRequest, submission_id: int) -> HttpResponse: - """HTMX endpoint for approving a submission""" - user = cast(User, request.user) - submission = get_object_or_404(EditSubmission, id=submission_id) - - if not ( - (submission.status != "ESCALATED" and user.role in MODERATOR_ROLES) - or user.role in ["ADMIN", "SUPERUSER"] - or user.is_superuser - ): - return HttpResponse("Insufficient permissions", status=403) - - try: - submission.approve(user) - _update_submission_notes(submission, request.POST.get("notes")) - - status = request.GET.get("status", "PENDING") - submission_type = request.GET.get("submission_type", "") - queryset = get_filtered_queryset(request, status, submission_type) - - return render( - request, - "moderation/partials/dashboard_content.html", - { - "submissions": queryset, - "user": request.user, - }, - ) - except ValueError as e: - return HttpResponse(str(e), status=400) - - -@login_required -def reject_submission(request: HttpRequest, submission_id: int) -> HttpResponse: - """HTMX endpoint for rejecting a submission""" - user = cast(User, request.user) - submission = get_object_or_404(EditSubmission, id=submission_id) - - if not ( - (submission.status != "ESCALATED" and user.role in MODERATOR_ROLES) - or user.role in ["ADMIN", "SUPERUSER"] - or user.is_superuser - ): - return HttpResponse("Insufficient permissions", status=403) - - submission.reject(user) - _update_submission_notes(submission, request.POST.get("notes")) - - status = request.GET.get("status", "PENDING") - submission_type = request.GET.get("submission_type", "") - queryset = get_filtered_queryset(request, status, submission_type) - context = get_context_data(request, queryset) - - return render(request, "moderation/partials/submission_list.html", context) - - -@login_required -def escalate_submission(request: HttpRequest, submission_id: int) -> HttpResponse: - """HTMX endpoint for escalating a submission""" - user = cast(User, request.user) - if not (user.role in MODERATOR_ROLES or user.is_superuser): - return HttpResponse(status=403) - - submission = get_object_or_404(EditSubmission, id=submission_id) - if submission.status == "ESCALATED": - return HttpResponse("Submission is already escalated", status=400) - - submission.escalate(user) - _update_submission_notes(submission, request.POST.get("notes")) - - status = request.GET.get("status", "PENDING") - submission_type = request.GET.get("submission_type", "") - queryset = get_filtered_queryset(request, status, submission_type) - - return render( - request, - "moderation/partials/dashboard_content.html", - { - "submissions": queryset, - "user": request.user, - }, - ) - - -@login_required -def approve_photo(request: HttpRequest, submission_id: int) -> HttpResponse: - """HTMX endpoint for approving a photo submission""" - user = cast(User, request.user) - if not (user.role in MODERATOR_ROLES or user.is_superuser): - return HttpResponse(status=403) - - submission = get_object_or_404(PhotoSubmission, id=submission_id) - try: - submission.approve(user, request.POST.get("notes", "")) - return render( - request, - "moderation/partials/photo_submission.html", - {"submission": submission}, - ) - except Exception as e: - return HttpResponse(str(e), status=400) - - -@login_required -def reject_photo(request: HttpRequest, submission_id: int) -> HttpResponse: - """HTMX endpoint for rejecting a photo submission""" - user = cast(User, request.user) - if not (user.role in MODERATOR_ROLES or user.is_superuser): - return HttpResponse(status=403) - - submission = get_object_or_404(PhotoSubmission, id=submission_id) - submission.reject(user, request.POST.get("notes", "")) - - return render( - request, - "moderation/partials/photo_submission.html", - {"submission": submission}, - ) - - -def _update_submission_notes(submission: EditSubmission, notes: Optional[str]) -> None: - """Update submission notes if provided.""" - if notes: - submission.notes = notes - submission.save() diff --git a/parks/__init__.py b/parks/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/parks/admin.py b/parks/admin.py deleted file mode 100644 index 6b7abb4c..00000000 --- a/parks/admin.py +++ /dev/null @@ -1,223 +0,0 @@ -from django.contrib import admin -from django.contrib.gis.admin import GISModelAdmin -from .models import Park, ParkArea, ParkLocation, Company, CompanyHeadquarters - - -class ParkLocationInline(admin.StackedInline): - """Inline admin for ParkLocation""" - - model = ParkLocation - extra = 0 - fields = ( - ("city", "state", "country"), - "street_address", - "postal_code", - "point", - ("highway_exit", "best_arrival_time"), - "parking_notes", - "seasonal_notes", - ("osm_id", "osm_type"), - ) - - -class ParkLocationAdmin(GISModelAdmin): - """Admin for standalone ParkLocation management""" - - list_display = ( - "park", - "city", - "state", - "country", - "latitude", - "longitude", - ) - list_filter = ("country", "state") - search_fields = ( - "park__name", - "city", - "state", - "country", - "street_address", - ) - readonly_fields = ("latitude", "longitude", "coordinates") - fieldsets = ( - ("Park", {"fields": ("park",)}), - ( - "Address", - { - "fields": ( - "street_address", - "city", - "state", - "country", - "postal_code", - ) - }, - ), - ( - "Geographic Coordinates", - { - "fields": ("point", "latitude", "longitude", "coordinates"), - "description": "Set coordinates by clicking on the map or entering latitude/longitude", - }, - ), - ( - "Travel Information", - { - "fields": ( - "highway_exit", - "best_arrival_time", - "parking_notes", - "seasonal_notes", - ), - "classes": ("collapse",), - }, - ), - ( - "OpenStreetMap Integration", - {"fields": ("osm_id", "osm_type"), "classes": ("collapse",)}, - ), - ) - - def latitude(self, obj): - return obj.latitude - - latitude.short_description = "Latitude" - - def longitude(self, obj): - return obj.longitude - - longitude.short_description = "Longitude" - - -class ParkAdmin(admin.ModelAdmin): - list_display = ( - "name", - "formatted_location", - "status", - "operator", - "property_owner", - "created_at", - "updated_at", - ) - list_filter = ("status", "location__country", "location__state") - search_fields = ( - "name", - "description", - "location__city", - "location__state", - "location__country", - ) - readonly_fields = ("created_at", "updated_at") - prepopulated_fields = {"slug": ("name",)} - inlines = [ParkLocationInline] - - def formatted_location(self, obj): - """Display formatted location string""" - return obj.formatted_location - - formatted_location.short_description = "Location" - - -class ParkAreaAdmin(admin.ModelAdmin): - list_display = ("name", "park", "created_at", "updated_at") - list_filter = ("park",) - search_fields = ("name", "description", "park__name") - readonly_fields = ("created_at", "updated_at") - prepopulated_fields = {"slug": ("name",)} - - -class CompanyHeadquartersInline(admin.StackedInline): - """Inline admin for CompanyHeadquarters""" - - model = CompanyHeadquarters - extra = 0 - fields = ( - ("city", "state_province", "country"), - "street_address", - "postal_code", - "mailing_address", - ) - - -class CompanyHeadquartersAdmin(admin.ModelAdmin): - """Admin for standalone CompanyHeadquarters management""" - - list_display = ( - "company", - "location_display", - "city", - "country", - "created_at", - ) - list_filter = ("country", "state_province") - search_fields = ( - "company__name", - "city", - "state_province", - "country", - "street_address", - ) - readonly_fields = ("created_at", "updated_at") - fieldsets = ( - ("Company", {"fields": ("company",)}), - ( - "Address", - { - "fields": ( - "street_address", - "city", - "state_province", - "country", - "postal_code", - ) - }, - ), - ( - "Additional Information", - {"fields": ("mailing_address",), "classes": ("collapse",)}, - ), - ( - "Metadata", - {"fields": ("created_at", "updated_at"), "classes": ("collapse",)}, - ), - ) - - -class CompanyAdmin(admin.ModelAdmin): - """Enhanced Company admin with headquarters inline""" - - list_display = ( - "name", - "roles_display", - "headquarters_location", - "website", - "founded_year", - ) - list_filter = ("roles",) - search_fields = ("name", "description") - readonly_fields = ("created_at", "updated_at") - prepopulated_fields = {"slug": ("name",)} - inlines = [CompanyHeadquartersInline] - - def roles_display(self, obj): - """Display roles as a formatted string""" - return ", ".join(obj.roles) if obj.roles else "No roles" - - roles_display.short_description = "Roles" - - def headquarters_location(self, obj): - """Display headquarters location if available""" - if hasattr(obj, "headquarters"): - return obj.headquarters.location_display - return "No headquarters" - - headquarters_location.short_description = "Headquarters" - - -# Register the models with their admin classes -admin.site.register(Park, ParkAdmin) -admin.site.register(ParkArea, ParkAreaAdmin) -admin.site.register(ParkLocation, ParkLocationAdmin) -admin.site.register(Company, CompanyAdmin) -admin.site.register(CompanyHeadquarters, CompanyHeadquartersAdmin) diff --git a/parks/api/__init__.py b/parks/api/__init__.py deleted file mode 100644 index 2dad4f9f..00000000 --- a/parks/api/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# Parks API module diff --git a/parks/api/serializers.py b/parks/api/serializers.py deleted file mode 100644 index 78856394..00000000 --- a/parks/api/serializers.py +++ /dev/null @@ -1,304 +0,0 @@ -""" -Serializers for Parks API following Django styleguide patterns. -Separates Input and Output serializers for clear boundaries. -""" - -from rest_framework import serializers -from ..models import Park - - -class ParkLocationOutputSerializer(serializers.Serializer): - """Output serializer for park location data.""" - - latitude = serializers.SerializerMethodField() - longitude = serializers.SerializerMethodField() - city = serializers.SerializerMethodField() - state = serializers.SerializerMethodField() - country = serializers.SerializerMethodField() - formatted_address = serializers.SerializerMethodField() - - def get_latitude(self, obj): - if hasattr(obj, "location") and obj.location: - return obj.location.latitude - return None - - def get_longitude(self, obj): - if hasattr(obj, "location") and obj.location: - return obj.location.longitude - return None - - def get_city(self, obj): - if hasattr(obj, "location") and obj.location: - return obj.location.city - return None - - def get_state(self, obj): - if hasattr(obj, "location") and obj.location: - return obj.location.state - return None - - def get_country(self, obj): - if hasattr(obj, "location") and obj.location: - return obj.location.country - return None - - def get_formatted_address(self, obj): - if hasattr(obj, "location") and obj.location: - return obj.location.formatted_address - return "" - - -class CompanyOutputSerializer(serializers.Serializer): - """Output serializer for company data.""" - - id = serializers.IntegerField() - name = serializers.CharField() - slug = serializers.CharField() - roles = serializers.ListField(child=serializers.CharField()) - - -class ParkAreaOutputSerializer(serializers.Serializer): - """Output serializer for park area data.""" - - id = serializers.IntegerField() - name = serializers.CharField() - slug = serializers.CharField() - description = serializers.CharField() - - -class ParkListOutputSerializer(serializers.Serializer): - """Output serializer for park list view.""" - - id = serializers.IntegerField() - name = serializers.CharField() - slug = serializers.CharField() - status = serializers.CharField() - description = serializers.CharField() - - # Statistics - average_rating = serializers.DecimalField( - max_digits=3, decimal_places=2, allow_null=True - ) - coaster_count = serializers.IntegerField(allow_null=True) - ride_count = serializers.IntegerField(allow_null=True) - - # Location (simplified for list view) - location = ParkLocationOutputSerializer(allow_null=True) - - # Operator info - operator = CompanyOutputSerializer() - - # Metadata - created_at = serializers.DateTimeField() - updated_at = serializers.DateTimeField() - - -class ParkDetailOutputSerializer(serializers.Serializer): - """Output serializer for park detail view.""" - - id = serializers.IntegerField() - name = serializers.CharField() - slug = serializers.CharField() - status = serializers.CharField() - description = serializers.CharField() - - # Details - opening_date = serializers.DateField(allow_null=True) - closing_date = serializers.DateField(allow_null=True) - operating_season = serializers.CharField() - size_acres = serializers.DecimalField( - max_digits=10, decimal_places=2, allow_null=True - ) - website = serializers.URLField() - - # Statistics - average_rating = serializers.DecimalField( - max_digits=3, decimal_places=2, allow_null=True - ) - coaster_count = serializers.IntegerField(allow_null=True) - ride_count = serializers.IntegerField(allow_null=True) - - # Location (full details) - location = ParkLocationOutputSerializer(allow_null=True) - - # Companies - operator = CompanyOutputSerializer() - property_owner = CompanyOutputSerializer(allow_null=True) - - # Areas - areas = ParkAreaOutputSerializer(many=True) - - # Metadata - created_at = serializers.DateTimeField() - updated_at = serializers.DateTimeField() - - -class ParkCreateInputSerializer(serializers.Serializer): - """Input serializer for creating parks.""" - - name = serializers.CharField(max_length=255) - description = serializers.CharField(allow_blank=True, default="") - status = serializers.ChoiceField(choices=Park.STATUS_CHOICES, default="OPERATING") - - # Optional details - opening_date = serializers.DateField(required=False, allow_null=True) - closing_date = serializers.DateField(required=False, allow_null=True) - operating_season = serializers.CharField( - max_length=255, required=False, allow_blank=True - ) - size_acres = serializers.DecimalField( - max_digits=10, decimal_places=2, required=False, allow_null=True - ) - website = serializers.URLField(required=False, allow_blank=True) - - # Required operator - operator_id = serializers.IntegerField() - - # Optional property owner - property_owner_id = serializers.IntegerField(required=False, allow_null=True) - - def validate(self, data): - """Cross-field validation.""" - opening_date = data.get("opening_date") - closing_date = data.get("closing_date") - - if opening_date and closing_date and closing_date < opening_date: - raise serializers.ValidationError( - "Closing date cannot be before opening date" - ) - - return data - - -class ParkUpdateInputSerializer(serializers.Serializer): - """Input serializer for updating parks.""" - - name = serializers.CharField(max_length=255, required=False) - description = serializers.CharField(allow_blank=True, required=False) - status = serializers.ChoiceField(choices=Park.STATUS_CHOICES, required=False) - - # Optional details - opening_date = serializers.DateField(required=False, allow_null=True) - closing_date = serializers.DateField(required=False, allow_null=True) - operating_season = serializers.CharField( - max_length=255, required=False, allow_blank=True - ) - size_acres = serializers.DecimalField( - max_digits=10, decimal_places=2, required=False, allow_null=True - ) - website = serializers.URLField(required=False, allow_blank=True) - - # Companies - operator_id = serializers.IntegerField(required=False) - property_owner_id = serializers.IntegerField(required=False, allow_null=True) - - def validate(self, data): - """Cross-field validation.""" - opening_date = data.get("opening_date") - closing_date = data.get("closing_date") - - if opening_date and closing_date and closing_date < opening_date: - raise serializers.ValidationError( - "Closing date cannot be before opening date" - ) - - return data - - -class ParkFilterInputSerializer(serializers.Serializer): - """Input serializer for park filtering and search.""" - - # Search - search = serializers.CharField(required=False, allow_blank=True) - - # Status filter - status = serializers.MultipleChoiceField( - choices=Park.STATUS_CHOICES, required=False - ) - - # Location filters - country = serializers.CharField(required=False, allow_blank=True) - state = serializers.CharField(required=False, allow_blank=True) - city = serializers.CharField(required=False, allow_blank=True) - - # Rating filter - min_rating = serializers.DecimalField( - max_digits=3, - decimal_places=2, - required=False, - min_value=1, - max_value=10, - ) - - # Size filter - min_size_acres = serializers.DecimalField( - max_digits=10, decimal_places=2, required=False, min_value=0 - ) - max_size_acres = serializers.DecimalField( - max_digits=10, decimal_places=2, required=False, min_value=0 - ) - - # Company filters - operator_id = serializers.IntegerField(required=False) - property_owner_id = serializers.IntegerField(required=False) - - # Ordering - ordering = serializers.ChoiceField( - choices=[ - "name", - "-name", - "opening_date", - "-opening_date", - "average_rating", - "-average_rating", - "coaster_count", - "-coaster_count", - "created_at", - "-created_at", - ], - required=False, - default="name", - ) - - -class ParkReviewOutputSerializer(serializers.Serializer): - """Output serializer for park reviews.""" - - id = serializers.IntegerField() - rating = serializers.IntegerField() - title = serializers.CharField() - content = serializers.CharField() - visit_date = serializers.DateField() - created_at = serializers.DateTimeField() - - # User info (limited for privacy) - user = serializers.SerializerMethodField() - - def get_user(self, obj): - return { - "username": obj.user.username, - "display_name": obj.user.get_full_name() or obj.user.username, - } - - -class ParkStatsOutputSerializer(serializers.Serializer): - """Output serializer for park statistics.""" - - total_parks = serializers.IntegerField() - operating_parks = serializers.IntegerField() - closed_parks = serializers.IntegerField() - under_construction = serializers.IntegerField() - - # Averages - average_rating = serializers.DecimalField( - max_digits=3, decimal_places=2, allow_null=True - ) - average_coaster_count = serializers.DecimalField( - max_digits=5, decimal_places=2, allow_null=True - ) - - # Top countries - top_countries = serializers.ListField(child=serializers.DictField()) - - # Recently added - recently_added_count = serializers.IntegerField() diff --git a/parks/api/urls.py b/parks/api/urls.py deleted file mode 100644 index ecfcca65..00000000 --- a/parks/api/urls.py +++ /dev/null @@ -1,65 +0,0 @@ -""" -URL configuration for Parks API following Django styleguide patterns. -""" - -from django.urls import path, include -from rest_framework.routers import DefaultRouter - -from .views import ( - ParkListApi, - ParkDetailApi, - ParkCreateApi, - ParkUpdateApi, - ParkDeleteApi, - ParkApi, -) - -app_name = "parks_api" - -# Option 1: Separate ViewSets for each operation (more explicit) -router_separate = DefaultRouter() -router_separate.register(r"list", ParkListApi, basename="park-list") -router_separate.register(r"detail", ParkDetailApi, basename="park-detail") -router_separate.register(r"create", ParkCreateApi, basename="park-create") -router_separate.register(r"update", ParkUpdateApi, basename="park-update") -router_separate.register(r"delete", ParkDeleteApi, basename="park-delete") - -# Option 2: Unified ViewSet (more conventional DRF) -router_unified = DefaultRouter() -router_unified.register(r"parks", ParkApi, basename="park") - -# Use unified approach for cleaner URLs -urlpatterns = [ - path("v1/", include(router_unified.urls)), -] - -# Alternative manual URL patterns for more control -urlpatterns_manual = [ - # List and create - path( - "v1/parks/", - ParkApi.as_view({"get": "list", "post": "create"}), - name="park-list", - ), - # Stats endpoint - path("v1/parks/stats/", ParkApi.as_view({"get": "stats"}), name="park-stats"), - # Detail operations - path( - "v1/parks//", - ParkApi.as_view( - { - "get": "retrieve", - "put": "update", - "patch": "partial_update", - "delete": "destroy", - } - ), - name="park-detail", - ), - # Park reviews - path( - "v1/parks//reviews/", - ParkApi.as_view({"get": "reviews"}), - name="park-reviews", - ), -] diff --git a/parks/api/views.py b/parks/api/views.py deleted file mode 100644 index 96b55da5..00000000 --- a/parks/api/views.py +++ /dev/null @@ -1,295 +0,0 @@ -""" -Parks API views following Django styleguide patterns. -Uses ClassNameApi naming convention and proper Input/Output serializers. -""" - -from rest_framework.decorators import action -from rest_framework.request import Request -from rest_framework.response import Response -from rest_framework.viewsets import GenericViewSet -from rest_framework.permissions import ( - IsAuthenticated, - IsAuthenticatedOrReadOnly, -) -from django_filters.rest_framework import DjangoFilterBackend -from rest_framework.filters import SearchFilter, OrderingFilter - -from core.api.mixins import ( - CreateApiMixin, - UpdateApiMixin, - ListApiMixin, - RetrieveApiMixin, - DestroyApiMixin, -) -from ..selectors import ( - park_list_with_stats, - park_detail_optimized, - park_reviews_for_park, - park_statistics, -) -from ..services import ParkService -from .serializers import ( - ParkListOutputSerializer, - ParkDetailOutputSerializer, - ParkCreateInputSerializer, - ParkUpdateInputSerializer, - ParkFilterInputSerializer, - ParkReviewOutputSerializer, - ParkStatsOutputSerializer, -) - - -class ParkListApi(ListApiMixin, GenericViewSet): - """ - API endpoint for listing parks with filtering and search. - - GET /api/v1/parks/ - """ - - permission_classes = [IsAuthenticatedOrReadOnly] - filter_backends = [DjangoFilterBackend, SearchFilter, OrderingFilter] - search_fields = ["name", "description"] - ordering_fields = [ - "name", - "opening_date", - "average_rating", - "coaster_count", - "created_at", - ] - ordering = ["name"] - - OutputSerializer = ParkListOutputSerializer - FilterSerializer = ParkFilterInputSerializer - - def get_queryset(self): - """Use selector to get optimized queryset.""" - # Parse filter parameters - filter_serializer = self.FilterSerializer(data=self.request.query_params) - filter_serializer.is_valid(raise_exception=True) - filters = filter_serializer.validated_data - - return park_list_with_stats(filters=filters) - - @action(detail=False, methods=["get"]) - def stats(self, request: Request) -> Response: - """ - Get park statistics. - - GET /api/v1/parks/stats/ - """ - stats = park_statistics() - serializer = ParkStatsOutputSerializer(stats) - - return self.create_response( - data=serializer.data, - metadata={"cache_duration": 3600}, # 1 hour cache hint - ) - - -class ParkDetailApi(RetrieveApiMixin, GenericViewSet): - """ - API endpoint for retrieving individual park details. - - GET /api/v1/parks/{id}/ - """ - - permission_classes = [IsAuthenticatedOrReadOnly] - lookup_field = "slug" - - OutputSerializer = ParkDetailOutputSerializer - - def get_object(self): - """Use selector for optimized detail query.""" - slug = self.kwargs.get("slug") - return park_detail_optimized(slug=slug) - - @action(detail=True, methods=["get"]) - def reviews(self, request: Request, slug: str = None) -> Response: - """ - Get reviews for a specific park. - - GET /api/v1/parks/{slug}/reviews/ - """ - park = self.get_object() - reviews = park_reviews_for_park(park_id=park.id, limit=50) - - serializer = ParkReviewOutputSerializer(reviews, many=True) - - return self.create_response( - data=serializer.data, - metadata={"total_reviews": len(reviews), "park_name": park.name}, - ) - - -class ParkCreateApi(CreateApiMixin, GenericViewSet): - """ - API endpoint for creating parks. - - POST /api/v1/parks/create/ - """ - - permission_classes = [IsAuthenticated] - - InputSerializer = ParkCreateInputSerializer - OutputSerializer = ParkDetailOutputSerializer - - def perform_create(self, **validated_data): - """Create park using service layer.""" - return ParkService.create_park(**validated_data) - - -class ParkUpdateApi(UpdateApiMixin, RetrieveApiMixin, GenericViewSet): - """ - API endpoint for updating parks. - - PUT /api/v1/parks/{slug}/update/ - PATCH /api/v1/parks/{slug}/update/ - """ - - permission_classes = [IsAuthenticated] - lookup_field = "slug" - - InputSerializer = ParkUpdateInputSerializer - OutputSerializer = ParkDetailOutputSerializer - - def get_object(self): - """Use selector for optimized detail query.""" - slug = self.kwargs.get("slug") - return park_detail_optimized(slug=slug) - - def perform_update(self, instance, **validated_data): - """Update park using service layer.""" - return ParkService.update_park(park_id=instance.id, **validated_data) - - -class ParkDeleteApi(DestroyApiMixin, RetrieveApiMixin, GenericViewSet): - """ - API endpoint for deleting parks. - - DELETE /api/v1/parks/{slug}/delete/ - """ - - permission_classes = [IsAuthenticated] # TODO: Add staff/admin permission - lookup_field = "slug" - - def get_object(self): - """Use selector for optimized detail query.""" - slug = self.kwargs.get("slug") - return park_detail_optimized(slug=slug) - - def perform_destroy(self, instance): - """Delete park using service layer.""" - ParkService.delete_park(park_id=instance.id) - - -# Unified API ViewSet (alternative approach) -class ParkApi( - CreateApiMixin, - UpdateApiMixin, - ListApiMixin, - RetrieveApiMixin, - DestroyApiMixin, - GenericViewSet, -): - """ - Unified API endpoint for parks with all CRUD operations. - - GET /api/v1/parks/ - List parks - POST /api/v1/parks/ - Create park - GET /api/v1/parks/{slug}/ - Get park detail - PUT /api/v1/parks/{slug}/ - Update park - PATCH /api/v1/parks/{slug}/ - Partial update park - DELETE /api/v1/parks/{slug}/ - Delete park - """ - - permission_classes = [IsAuthenticatedOrReadOnly] - lookup_field = "slug" - filter_backends = [DjangoFilterBackend, SearchFilter, OrderingFilter] - search_fields = ["name", "description"] - ordering_fields = [ - "name", - "opening_date", - "average_rating", - "coaster_count", - "created_at", - ] - ordering = ["name"] - - # Serializers for different operations - InputSerializer = ParkCreateInputSerializer # Used for create - UpdateInputSerializer = ParkUpdateInputSerializer # Used for update - OutputSerializer = ParkDetailOutputSerializer # Used for retrieve - ListOutputSerializer = ParkListOutputSerializer # Used for list - FilterSerializer = ParkFilterInputSerializer - - def get_queryset(self): - """Use selector to get optimized queryset.""" - if self.action == "list": - # Parse filter parameters for list view - filter_serializer = self.FilterSerializer(data=self.request.query_params) - filter_serializer.is_valid(raise_exception=True) - filters = filter_serializer.validated_data - return park_list_with_stats(**filters) - - # For detail views, this won't be used since we override get_object - return [] - - def get_object(self): - """Use selector for optimized detail query.""" - slug = self.kwargs.get("slug") - return park_detail_optimized(slug=slug) - - def get_output_serializer(self, *args, **kwargs): - """Return appropriate output serializer based on action.""" - if self.action == "list": - return self.ListOutputSerializer(*args, **kwargs) - return self.OutputSerializer(*args, **kwargs) - - def get_input_serializer(self, *args, **kwargs): - """Return appropriate input serializer based on action.""" - if self.action in ["update", "partial_update"]: - return self.UpdateInputSerializer(*args, **kwargs) - return self.InputSerializer(*args, **kwargs) - - def perform_create(self, **validated_data): - """Create park using service layer.""" - return ParkService.create_park(**validated_data) - - def perform_update(self, instance, **validated_data): - """Update park using service layer.""" - return ParkService.update_park(park_id=instance.id, **validated_data) - - def perform_destroy(self, instance): - """Delete park using service layer.""" - ParkService.delete_park(park_id=instance.id) - - @action(detail=False, methods=["get"]) - def stats(self, request: Request) -> Response: - """ - Get park statistics. - - GET /api/v1/parks/stats/ - """ - stats = park_statistics() - serializer = ParkStatsOutputSerializer(stats) - - return self.create_response( - data=serializer.data, metadata={"cache_duration": 3600} - ) - - @action(detail=True, methods=["get"]) - def reviews(self, request: Request, slug: str = None) -> Response: - """ - Get reviews for a specific park. - - GET /api/v1/parks/{slug}/reviews/ - """ - park = self.get_object() - reviews = park_reviews_for_park(park_id=park.id, limit=50) - - serializer = ParkReviewOutputSerializer(reviews, many=True) - - return self.create_response( - data=serializer.data, - metadata={"total_reviews": len(reviews), "park_name": park.name}, - ) diff --git a/parks/apps.py b/parks/apps.py deleted file mode 100644 index 97939f89..00000000 --- a/parks/apps.py +++ /dev/null @@ -1,9 +0,0 @@ -from django.apps import AppConfig - - -class ParksConfig(AppConfig): - default_auto_field = "django.db.models.BigAutoField" - name = "parks" - - def ready(self): - import parks.signals # noqa: F401 - Register signals diff --git a/parks/filters.py b/parks/filters.py deleted file mode 100644 index b468a625..00000000 --- a/parks/filters.py +++ /dev/null @@ -1,390 +0,0 @@ -from django.core.exceptions import ValidationError -from django.utils.translation import gettext_lazy as _ -from django.db import models -from django.contrib.gis.geos import Point -from django.contrib.gis.measure import Distance -from django_filters import ( - NumberFilter, - ModelChoiceFilter, - DateFromToRangeFilter, - ChoiceFilter, - FilterSet, - CharFilter, - BooleanFilter, - OrderingFilter, -) -from .models import Park, Company -from .querysets import get_base_park_queryset -import requests - - -def validate_positive_integer(value): - """Validate that a value is a positive integer""" - try: - value = float(value) - if not value.is_integer() or value < 0: - raise ValidationError(_("Value must be a positive integer")) - return int(value) - except (TypeError, ValueError): - raise ValidationError(_("Invalid number format")) - - -class ParkFilter(FilterSet): - """Filter set for parks with search and validation capabilities""" - - class Meta: - model = Park - fields = [] - - # Search field with better description - search = CharFilter( - method="filter_search", - label=_("Search Parks"), - help_text=_("Search by park name, description, or location"), - ) - - # Status filter with clearer label - status = ChoiceFilter( - field_name="status", - choices=Park.STATUS_CHOICES, - empty_label=_("Any status"), - label=_("Operating Status"), - help_text=_("Filter parks by their current operating status"), - ) - - # Operator filters with helpful descriptions - operator = ModelChoiceFilter( - field_name="operator", - queryset=Company.objects.filter(roles__contains=["OPERATOR"]), - empty_label=_("Any operator"), - label=_("Operating Company"), - help_text=_("Filter parks by their operating company"), - ) - has_operator = BooleanFilter( - method="filter_has_operator", - label=_("Operator Status"), - help_text=_("Show parks with or without an operating company"), - ) - - # Ride and attraction filters - min_rides = NumberFilter( - field_name="ride_count", - lookup_expr="gte", - validators=[validate_positive_integer], - label=_("Minimum Rides"), - help_text=_("Show parks with at least this many rides"), - ) - min_coasters = NumberFilter( - field_name="coaster_count", - lookup_expr="gte", - validators=[validate_positive_integer], - label=_("Minimum Roller Coasters"), - help_text=_("Show parks with at least this many roller coasters"), - ) - - # Size filter - min_size = NumberFilter( - field_name="size_acres", - lookup_expr="gte", - validators=[validate_positive_integer], - label=_("Minimum Size (acres)"), - help_text=_("Show parks of at least this size in acres"), - ) - - # Opening date filter with better label - opening_date = DateFromToRangeFilter( - field_name="opening_date", - label=_("Opening Date Range"), - help_text=_("Filter parks by their opening date"), - ) - - # Location-based filters - location_search = CharFilter( - method="filter_location_search", - label=_("Location Search"), - help_text=_("Search by city, state, country, or address"), - ) - - near_location = CharFilter( - method="filter_near_location", - label=_("Near Location"), - help_text=_("Find parks near a specific location"), - ) - - radius_km = NumberFilter( - method="filter_radius", - label=_("Radius (km)"), - help_text=_("Search radius in kilometers (use with 'Near Location')"), - ) - - country_filter = CharFilter( - method="filter_country", - label=_("Country"), - help_text=_("Filter parks by country"), - ) - - state_filter = CharFilter( - method="filter_state", - label=_("State/Region"), - help_text=_("Filter parks by state or region"), - ) - - # Practical filter fields that people actually use - park_type = ChoiceFilter( - method="filter_park_type", - label=_("Park Type"), - help_text=_("Filter by popular park categories"), - choices=[ - ("disney", _("Disney Parks")), - ("universal", _("Universal Parks")), - ("six_flags", _("Six Flags")), - ("cedar_fair", _("Cedar Fair")), - ("independent", _("Independent Parks")), - ], - empty_label=_("All parks"), - ) - - has_coasters = BooleanFilter( - method="filter_has_coasters", - label=_("Has Roller Coasters"), - help_text=_("Show only parks with roller coasters"), - ) - - min_rating = ChoiceFilter( - method="filter_min_rating", - label=_("Minimum Rating"), - help_text=_("Show parks with at least this rating"), - choices=[ - ("3", _("3+ stars")), - ("4", _("4+ stars")), - ("4.5", _("4.5+ stars")), - ], - empty_label=_("Any rating"), - ) - - big_parks_only = BooleanFilter( - method="filter_big_parks", - label=_("Major Parks Only"), - help_text=_("Show only large theme parks (10+ rides)"), - ) - - # Simple, useful ordering - ordering = OrderingFilter( - fields=( - ("name", "name"), - ("average_rating", "rating"), - ("coaster_count", "coasters"), - ("ride_count", "rides"), - ), - field_labels={ - "name": _("Name (A-Z)"), - "-name": _("Name (Z-A)"), - "-average_rating": _("Highest Rated"), - "-coaster_count": _("Most Coasters"), - "-ride_count": _("Most Rides"), - }, - label=_("Sort by"), - ) - - def filter_search(self, queryset, name, value): - """Custom search implementation""" - if not value: - return queryset - - search_fields = [ - "name__icontains", - "description__icontains", - "location__city__icontains", - "location__state__icontains", - "location__country__icontains", - ] - - queries = [models.Q(**{field: value}) for field in search_fields] - query = queries.pop() - for item in queries: - query |= item - - return queryset.filter(query).distinct() - - def filter_has_operator(self, queryset, name, value): - """Filter parks based on whether they have an operator""" - return queryset.filter(operator__isnull=not value) - - @property - def qs(self): - """ - Override qs property to ensure we always use base queryset with annotations - """ - if not hasattr(self, "_qs"): - # Start with optimized base queryset - base_qs = ( - get_base_park_queryset() - .select_related("operator", "property_owner", "location") - .prefetch_related("photos", "rides__manufacturer") - ) - - if not self.is_bound: - self._qs = base_qs - return self._qs - - if not self.form.is_valid(): - self._qs = base_qs.none() - return self._qs - - self._qs = base_qs - for name, value in self.form.cleaned_data.items(): - if value in [None, "", 0] and name not in [ - "has_operator", - "has_coasters", - "big_parks_only", - ]: - continue - self._qs = self.filters[name].filter(self._qs, value) - self._qs = self._qs.distinct() - return self._qs - - def filter_location_search(self, queryset, name, value): - """Filter parks by location fields""" - if not value: - return queryset - - location_query = ( - models.Q(location__city__icontains=value) - | models.Q(location__state__icontains=value) - | models.Q(location__country__icontains=value) - | models.Q(location__street_address__icontains=value) - ) - - return queryset.filter(location_query).distinct() - - def filter_near_location(self, queryset, name, value): - """Filter parks near a specific location using geocoding""" - if not value: - return queryset - - # Try to geocode the location - coordinates = self._geocode_location(value) - if not coordinates: - return queryset - - lat, lng = coordinates - point = Point(lng, lat, srid=4326) - - # Get radius from form data, default to 50km - radius = self.data.get("radius_km", 50) - try: - radius = float(radius) - except (ValueError, TypeError): - radius = 50 - - # Filter by distance - distance = Distance(km=radius) - return ( - queryset.filter(location__point__distance_lte=(point, distance)) - .annotate(distance=models.Value(0, output_field=models.FloatField())) - .order_by("distance") - .distinct() - ) - - def filter_radius(self, queryset, name, value): - """Radius filter - handled by filter_near_location""" - return queryset - - def filter_country(self, queryset, name, value): - """Filter parks by country""" - if not value: - return queryset - return queryset.filter(location__country__icontains=value).distinct() - - def filter_state(self, queryset, name, value): - """Filter parks by state/region""" - if not value: - return queryset - return queryset.filter(location__state__icontains=value).distinct() - - def filter_park_type(self, queryset, name, value): - """Filter parks by popular company/brand""" - if not value: - return queryset - - # Map common park types to operator name patterns - type_filters = { - "disney": models.Q(operator__name__icontains="Disney"), - "universal": models.Q(operator__name__icontains="Universal"), - "six_flags": models.Q(operator__name__icontains="Six Flags"), - "cedar_fair": models.Q(operator__name__icontains="Cedar Fair") - | models.Q(operator__name__icontains="Cedar Point") - | models.Q(operator__name__icontains="Kings Island") - | models.Q(operator__name__icontains="Canada's Wonderland"), - "independent": ~( - models.Q(operator__name__icontains="Disney") - | models.Q(operator__name__icontains="Universal") - | models.Q(operator__name__icontains="Six Flags") - | models.Q(operator__name__icontains="Cedar Fair") - | models.Q(operator__name__icontains="Cedar Point") - ), - } - - if value in type_filters: - return queryset.filter(type_filters[value]) - - return queryset - - def filter_has_coasters(self, queryset, name, value): - """Filter parks based on whether they have roller coasters""" - if value is None: - return queryset - - if value: - return queryset.filter(coaster_count__gt=0) - else: - return queryset.filter( - models.Q(coaster_count__isnull=True) | models.Q(coaster_count=0) - ) - - def filter_min_rating(self, queryset, name, value): - """Filter parks by minimum rating""" - if not value: - return queryset - - try: - min_rating = float(value) - return queryset.filter(average_rating__gte=min_rating) - except (ValueError, TypeError): - return queryset - - def filter_big_parks(self, queryset, name, value): - """Filter to show only major parks with many rides""" - if not value: - return queryset - - return queryset.filter(ride_count__gte=10) - - def _geocode_location(self, location_string): - """ - Geocode a location string using OpenStreetMap Nominatim. - Returns (lat, lng) tuple or None if geocoding fails. - """ - try: - response = requests.get( - "https://nominatim.openstreetmap.org/search", - params={ - "q": location_string, - "format": "json", - "limit": 1, - "countrycodes": "us,ca,gb,fr,de,es,it,jp,au", # Popular countries - }, - headers={"User-Agent": "ThrillWiki/1.0"}, - timeout=5, - ) - - if response.status_code == 200: - data = response.json() - if data: - result = data[0] - return float(result["lat"]), float(result["lon"]) - except Exception: - # Silently fail geocoding - just return None - pass - - return None diff --git a/parks/forms.py b/parks/forms.py deleted file mode 100644 index 39d023d2..00000000 --- a/parks/forms.py +++ /dev/null @@ -1,368 +0,0 @@ -from django import forms -from decimal import Decimal, InvalidOperation, ROUND_DOWN -from autocomplete import AutocompleteWidget -from .models import Park -from .models.location import ParkLocation -from .querysets import get_base_park_queryset - - -class ParkAutocomplete(forms.Form): - """Autocomplete for searching parks. - - Features: - - Name-based search with partial matching - - Prefetches related owner data - - Applies standard park queryset filtering - - Includes park status and location in results - """ - - model = Park - search_attrs = ["name"] # We'll match on park names - - def get_search_results(self, search): - """Return search results with related data.""" - return ( - get_base_park_queryset() - .filter(name__icontains=search) - .select_related("operator", "property_owner") - .order_by("name") - ) - - def format_result(self, park): - """Format each park result with status and location.""" - location = park.formatted_location - location_text = f" • {location}" if location else "" - return { - "key": str(park.pk), - "label": park.name, - "extra": f"{park.get_status_display()}{location_text}", - } - - -class ParkSearchForm(forms.Form): - """Form for searching parks with autocomplete.""" - - park = forms.ModelChoiceField( - queryset=Park.objects.all(), - required=False, - widget=AutocompleteWidget( - ac_class=ParkAutocomplete, - attrs={ - "class": ( - "w-full border-gray-300 rounded-lg form-input " - "dark:border-gray-600 dark:bg-gray-700 dark:text-white" - ), - "placeholder": "Search parks...", - }, - ), - ) - - -class ParkForm(forms.ModelForm): - """Form for creating and updating Park objects with location support""" - - # Location fields - latitude = forms.DecimalField( - max_digits=9, - decimal_places=6, - required=False, - widget=forms.HiddenInput(), - ) - longitude = forms.DecimalField( - max_digits=10, - decimal_places=6, - required=False, - widget=forms.HiddenInput(), - ) - street_address = forms.CharField( - max_length=255, - required=False, - widget=forms.TextInput( - attrs={ - "class": ( - "w-full border-gray-300 rounded-lg form-input " - "dark:border-gray-600 dark:bg-gray-700 dark:text-white" - ) - } - ), - ) - city = forms.CharField( - max_length=255, - required=False, - widget=forms.TextInput( - attrs={ - "class": ( - "w-full border-gray-300 rounded-lg form-input " - "dark:border-gray-600 dark:bg-gray-700 dark:text-white" - ) - } - ), - ) - state = forms.CharField( - max_length=255, - required=False, - widget=forms.TextInput( - attrs={ - "class": ( - "w-full border-gray-300 rounded-lg form-input " - "dark:border-gray-600 dark:bg-gray-700 dark:text-white" - ) - } - ), - ) - country = forms.CharField( - max_length=255, - required=False, - widget=forms.TextInput( - attrs={ - "class": ( - "w-full border-gray-300 rounded-lg form-input " - "dark:border-gray-600 dark:bg-gray-700 dark:text-white" - ) - } - ), - ) - postal_code = forms.CharField( - max_length=20, - required=False, - widget=forms.TextInput( - attrs={ - "class": ( - "w-full border-gray-300 rounded-lg form-input " - "dark:border-gray-600 dark:bg-gray-700 dark:text-white" - ) - } - ), - ) - - class Meta: - model = Park - fields = [ - "name", - "description", - "operator", - "property_owner", - "status", - "opening_date", - "closing_date", - "operating_season", - "size_acres", - "website", - # Location fields handled separately - "latitude", - "longitude", - "street_address", - "city", - "state", - "country", - "postal_code", - ] - widgets = { - "name": forms.TextInput( - attrs={ - "class": ( - "w-full border-gray-300 rounded-lg form-input " - "dark:border-gray-600 dark:bg-gray-700 dark:text-white" - ) - } - ), - "description": forms.Textarea( - attrs={ - "class": ( - "w-full border-gray-300 rounded-lg form-textarea " - "dark:border-gray-600 dark:bg-gray-700 dark:text-white" - ), - "rows": 2, - } - ), - "operator": forms.Select( - attrs={ - "class": ( - "w-full border-gray-300 rounded-lg form-select " - "dark:border-gray-600 dark:bg-gray-700 dark:text-white" - ) - } - ), - "property_owner": forms.Select( - attrs={ - "class": ( - "w-full border-gray-300 rounded-lg form-select " - "dark:border-gray-600 dark:bg-gray-700 dark:text-white" - ) - } - ), - "status": forms.Select( - attrs={ - "class": ( - "w-full border-gray-300 rounded-lg form-select " - "dark:border-gray-600 dark:bg-gray-700 dark:text-white" - ) - } - ), - "opening_date": forms.DateInput( - attrs={ - "type": "date", - "class": ( - "w-full border-gray-300 rounded-lg form-input " - "dark:border-gray-600 dark:bg-gray-700 dark:text-white" - ), - } - ), - "closing_date": forms.DateInput( - attrs={ - "type": "date", - "class": ( - "w-full border-gray-300 rounded-lg form-input " - "dark:border-gray-600 dark:bg-gray-700 dark:text-white" - ), - } - ), - "operating_season": forms.TextInput( - attrs={ - "class": ( - "w-full border-gray-300 rounded-lg form-input " - "dark:border-gray-600 dark:bg-gray-700 dark:text-white" - ), - "placeholder": "e.g., Year-round, Summer only, etc.", - } - ), - "size_acres": forms.NumberInput( - attrs={ - "class": ( - "w-full border-gray-300 rounded-lg form-input " - "dark:border-gray-600 dark:bg-gray-700 dark:text-white" - ), - "step": "0.01", - "min": "0", - } - ), - "website": forms.URLInput( - attrs={ - "class": ( - "w-full border-gray-300 rounded-lg form-input " - "dark:border-gray-600 dark:bg-gray-700 dark:text-white" - ), - "placeholder": "https://example.com", - } - ), - } - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - # Pre-fill location fields if editing existing park - if self.instance and self.instance.pk and self.instance.location.exists(): - location = self.instance.location.first() - self.fields["latitude"].initial = location.latitude - self.fields["longitude"].initial = location.longitude - self.fields["street_address"].initial = location.street_address - self.fields["city"].initial = location.city - self.fields["state"].initial = location.state - self.fields["country"].initial = location.country - self.fields["postal_code"].initial = location.postal_code - - def clean_latitude(self): - latitude = self.cleaned_data.get("latitude") - if latitude is not None: - try: - # Convert to Decimal for precise handling - latitude = Decimal(str(latitude)) - # Round to exactly 6 decimal places - latitude = latitude.quantize(Decimal("0.000001"), rounding=ROUND_DOWN) - - # Validate range - if latitude < -90 or latitude > 90: - raise forms.ValidationError( - "Latitude must be between -90 and 90 degrees." - ) - - # Convert to string to preserve exact decimal places - return str(latitude) - except (InvalidOperation, TypeError) as e: - raise forms.ValidationError("Invalid latitude value.") from e - return latitude - - def clean_longitude(self): - longitude = self.cleaned_data.get("longitude") - if longitude is not None: - try: - # Convert to Decimal for precise handling - longitude = Decimal(str(longitude)) - # Round to exactly 6 decimal places - longitude = longitude.quantize(Decimal("0.000001"), rounding=ROUND_DOWN) - - # Validate range - if longitude < -180 or longitude > 180: - raise forms.ValidationError( - "Longitude must be between -180 and 180 degrees." - ) - - # Convert to string to preserve exact decimal places - return str(longitude) - except (InvalidOperation, TypeError) as e: - raise forms.ValidationError("Invalid longitude value.") from e - return longitude - - def save(self, commit=True): - park = super().save(commit=False) - - # Prepare location data - location_data = { - "name": park.name, - "location_type": "park", - "latitude": self.cleaned_data.get("latitude"), - "longitude": self.cleaned_data.get("longitude"), - "street_address": self.cleaned_data.get("street_address"), - "city": self.cleaned_data.get("city"), - "state": self.cleaned_data.get("state"), - "country": self.cleaned_data.get("country"), - "postal_code": self.cleaned_data.get("postal_code"), - } - - # Handle location: update if exists, create if not - try: - park_location = park.location - # Update existing location - for key, value in location_data.items(): - if key in ["latitude", "longitude"] and value: - continue # Handle coordinates separately - if hasattr(park_location, key): - setattr(park_location, key, value) - - # Handle coordinates if provided - if "latitude" in location_data and "longitude" in location_data: - if location_data["latitude"] and location_data["longitude"]: - park_location.set_coordinates( - float(location_data["latitude"]), - float(location_data["longitude"]), - ) - park_location.save() - except ParkLocation.DoesNotExist: - # Create new ParkLocation - coordinates_data = {} - if "latitude" in location_data and "longitude" in location_data: - if location_data["latitude"] and location_data["longitude"]: - coordinates_data = { - "latitude": float(location_data["latitude"]), - "longitude": float(location_data["longitude"]), - } - - # Remove coordinate fields from location_data for creation - creation_data = { - k: v - for k, v in location_data.items() - if k not in ["latitude", "longitude"] - } - creation_data.setdefault("country", "USA") - - park_location = ParkLocation.objects.create(park=park, **creation_data) - - if coordinates_data: - park_location.set_coordinates( - coordinates_data["latitude"], coordinates_data["longitude"] - ) - park_location.save() - - if commit: - park.save() - - return park diff --git a/parks/location_utils.py b/parks/location_utils.py deleted file mode 100644 index 88e2e385..00000000 --- a/parks/location_utils.py +++ /dev/null @@ -1,55 +0,0 @@ -from decimal import Decimal, ROUND_DOWN, InvalidOperation - - -def normalize_coordinate(value, max_digits, decimal_places): - """Normalize coordinate to have exactly 6 decimal places""" - try: - if value is None: - return None - - # Convert to Decimal for precise handling - value = Decimal(str(value)) - # Round to exactly 6 decimal places - value = value.quantize(Decimal("0.000001"), rounding=ROUND_DOWN) - - return float(value) - except (TypeError, ValueError, InvalidOperation): - return None - - -def get_english_name(tags): - """Extract English name from OSM tags, falling back to default name""" - # Try name:en first - if "name:en" in tags: - return tags["name:en"] - # Then try int_name (international name) - if "int_name" in tags: - return tags["int_name"] - # Fall back to default name - return tags.get("name") - - -def normalize_osm_result(result): - """Normalize OpenStreetMap result to use English names and normalized coordinates""" - # Normalize coordinates - result["lat"] = normalize_coordinate(float(result["lat"]), 9, 6) - result["lon"] = normalize_coordinate(float(result["lon"]), 10, 6) - - # Get address details - address = result.get("address", {}) - - # Normalize place names to English where possible - if "namedetails" in result: - # For main display name - result["display_name"] = get_english_name(result["namedetails"]) - - # For address components - if "city" in address and "city_tags" in result: - address["city"] = get_english_name(result["city_tags"]) - if "state" in address and "state_tags" in result: - address["state"] = get_english_name(result["state_tags"]) - if "country" in address and "country_tags" in result: - address["country"] = get_english_name(result["country_tags"]) - - result["address"] = address - return result diff --git a/parks/management/commands/create_sample_data.py b/parks/management/commands/create_sample_data.py deleted file mode 100644 index 3a3b8574..00000000 --- a/parks/management/commands/create_sample_data.py +++ /dev/null @@ -1,322 +0,0 @@ -from django.core.management.base import BaseCommand -from django.db import transaction - -# Import models from both apps -from parks.models import Company as ParkCompany -from rides.models import ( - Company as RideCompany, -) - - -class Command(BaseCommand): - help = "Creates comprehensive sample data for the ThrillWiki theme park application" - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.created_companies = {} - self.created_parks = {} - self.created_rides = {} - - def handle(self, *args, **options): - self.stdout.write("Starting sample data creation...") - - try: - with transaction.atomic(): - self.create_companies() - self.create_parks() - self.create_ride_models() - self.create_rides() - self.create_park_areas() - self.create_reviews() - - self.stdout.write( - self.style.SUCCESS("Successfully created comprehensive sample data!") - ) - self.print_summary() - - except Exception as e: - self.stdout.write(self.style.ERROR(f"Error creating sample data: {e}")) - raise - - def create_companies(self): - """Create companies with different roles following entity relationship rules""" - self.stdout.write("Creating companies...") - - # Park operators and property owners (using parks.models.Company) - park_operators_data = [ - { - "name": "The Walt Disney Company", - "slug": "walt-disney-company", - "roles": ["OPERATOR", "PROPERTY_OWNER"], - "description": "World's largest entertainment company and theme park operator.", - "website": "https://www.disney.com/", - "founded_year": 1923, - }, - { - "name": "Universal Parks & Resorts", - "slug": "universal-parks-resorts", - "roles": ["OPERATOR", "PROPERTY_OWNER"], - "description": "Division of Comcast NBCUniversal, operating major theme parks worldwide.", - "website": "https://www.universalparks.com/", - "founded_year": 1964, - }, - { - "name": "Six Flags Entertainment Corporation", - "slug": "six-flags-entertainment", - "roles": ["OPERATOR", "PROPERTY_OWNER"], - "description": "World's largest regional theme park company.", - "website": "https://www.sixflags.com/", - "founded_year": 1961, - }, - { - "name": "Cedar Fair Entertainment Company", - "slug": "cedar-fair-entertainment", - "roles": ["OPERATOR", "PROPERTY_OWNER"], - "description": "One of North America's largest operators of regional amusement parks.", - "website": "https://www.cedarfair.com/", - "founded_year": 1983, - }, - { - "name": "Herschend Family Entertainment", - "slug": "herschend-family-entertainment", - "roles": ["OPERATOR", "PROPERTY_OWNER"], - "description": "Largest family-owned themed attractions corporation in the United States.", - "website": "https://www.hfecorp.com/", - "founded_year": 1950, - }, - { - "name": "SeaWorld Parks & Entertainment", - "slug": "seaworld-parks-entertainment", - "roles": ["OPERATOR", "PROPERTY_OWNER"], - "description": "Theme park and entertainment company focusing on nature-based themes.", - "website": "https://www.seaworldentertainment.com/", - "founded_year": 1959, - }, - { - "name": "Merlin Entertainments", - "slug": "merlin-entertainments", - "roles": ["OPERATOR", "PROPERTY_OWNER"], - "description": "European theme park operator with LEGOLAND and Madame Tussauds brands.", - "website": "https://www.merlinentertainments.com/", - "founded_year": 1998, - }, - ] - - for company_data in park_operators_data: - company, created = ParkCompany.objects.get_or_create( - slug=company_data["slug"], defaults=company_data - ) - self.created_companies[company.slug] = company - self.stdout.write( - f' { - "Created" if created else "Found"} park company: { - company.name}' - ) - - # Ride manufacturers and designers (using rides.models.Company) - ride_companies_data = [ - { - "name": "Bolliger & Mabillard", - "slug": "bolliger-mabillard", - "roles": ["MANUFACTURER", "DESIGNER"], - "description": "Swiss roller coaster manufacturer known for inverted and diving coasters.", - "website": "https://www.bolliger-mabillard.com/", - "founded_date": "1988-01-01", - }, - { - "name": "Intamin Amusement Rides", - "slug": "intamin-amusement-rides", - "roles": ["MANUFACTURER", "DESIGNER"], - "description": "Liechtenstein-based manufacturer of roller coasters and thrill rides.", - "website": "https://www.intamin.com/", - "founded_date": "1967-01-01", - }, - { - "name": "Arrow Dynamics", - "slug": "arrow-dynamics", - "roles": ["MANUFACTURER", "DESIGNER"], - "description": "American manufacturer known for corkscrew coasters and mine trains.", - "website": "https://en.wikipedia.org/wiki/Arrow_Dynamics", - "founded_date": "1946-01-01", - }, - { - "name": "Vekoma Rides Manufacturing", - "slug": "vekoma-rides-manufacturing", - "roles": ["MANUFACTURER", "DESIGNER"], - "description": "Dutch manufacturer of roller coasters and family rides.", - "website": "https://www.vekoma.com/", - "founded_date": "1926-01-01", - }, - { - "name": "Rocky Mountain Construction", - "slug": "rocky-mountain-construction", - "roles": ["MANUFACTURER", "DESIGNER"], - "description": "American manufacturer specializing in I-Box track and Raptor track coasters.", - "website": "https://www.rockymtnconstruction.com/", - "founded_date": "2001-01-01", - }, - { - "name": "Mack Rides", - "slug": "mack-rides", - "roles": ["MANUFACTURER", "DESIGNER"], - "description": "German manufacturer known for water rides and powered coasters.", - "website": "https://www.mack-rides.com/", - "founded_date": "1780-01-01", - }, - { - "name": "Chance Rides", - "slug": "chance-rides", - "roles": ["MANUFACTURER"], - "description": "American manufacturer of thrill rides and amusement park equipment.", - "website": "https://www.chancerides.com/", - "founded_date": "1961-01-01", - }, - { - "name": "S&S Worldwide", - "slug": "s-s-worldwide", - "roles": ["MANUFACTURER", "DESIGNER"], - "description": "American manufacturer known for drop towers and 4D free-fly coasters.", - "website": "https://www.s-s.com/", - "founded_date": "1990-01-01", - }, - { - "name": "Zierer Rides", - "slug": "zierer-rides", - "roles": ["MANUFACTURER"], - "description": "German manufacturer of kiddie rides and family coasters.", - "website": "https://www.zierer.com/", - "founded_date": "1950-01-01", - }, - { - "name": "Gerstlauer", - "slug": "gerstlauer", - "roles": ["MANUFACTURER", "DESIGNER"], - "description": "German manufacturer known for Euro-Fighter and spinning coasters.", - "website": "https://www.gerstlauer-rides.de/", - "founded_date": "1982-01-01", - }, - ] - - for company_data in ride_companies_data: - company, created = RideCompany.objects.get_or_create( - slug=company_data["slug"], defaults=company_data - ) - self.created_companies[company.slug] = company - self.stdout.write( - f' { - "Created" if created else "Found"} ride company: { - company.name}' - ) - - def create_parks(self): - """Create parks with proper operator relationships""" - self.stdout.write("Creating parks...") - - # TODO: Implement park creation - parks_data defined but not used yet - parks_data = [ # noqa: F841 - { - "name": "Magic Kingdom", - "slug": "magic-kingdom", - "operator_slug": "walt-disney-company", - "property_owner_slug": "walt-disney-company", - "description": "The first theme park at Walt Disney World Resort in Florida, opened in 1971.", - "opening_date": "1971-10-01", - "size_acres": 142, - "website": "https://disneyworld.disney.go.com/destinations/magic-kingdom/", - "location": { - "street_address": "1180 Seven Seas Dr", - "city": "Lake Buena Vista", - "state_province": "Florida", - "country": "USA", - "postal_code": "32830", - "latitude": 28.4177, - "longitude": -81.5812, - }, - }, - { - "name": "Universal Studios Florida", - "slug": "universal-studios-florida", - "operator_slug": "universal-parks-resorts", - "property_owner_slug": "universal-parks-resorts", - "description": "Movie and television-based theme park in Orlando, Florida.", - "opening_date": "1990-06-07", - "size_acres": 108, - "website": "https://www.universalorlando.com/web/en/us/theme-parks/universal-studios-florida", - "location": { - "street_address": "6000 Universal Blvd", - "city": "Orlando", - "state_province": "Florida", - "country": "USA", - "postal_code": "32819", - "latitude": 28.4749, - "longitude": -81.4687, - }, - }, - { - "name": "Cedar Point", - "slug": "cedar-point", - "operator_slug": "cedar-fair-entertainment", - "property_owner_slug": "cedar-fair-entertainment", - "description": 'Known as the "Roller Coaster Capital of the World".', - "opening_date": "1870-06-01", - "size_acres": 364, - "website": "https://www.cedarpoint.com/", - "location": { - "street_address": "1 Cedar Point Dr", - "city": "Sandusky", - "state_province": "Ohio", - "country": "USA", - "postal_code": "44870", - "latitude": 41.4822, - "longitude": -82.6835, - }, - }, - { - "name": "Six Flags Magic Mountain", - "slug": "six-flags-magic-mountain", - "operator_slug": "six-flags-entertainment", - "property_owner_slug": "six-flags-entertainment", - "description": "Known for its world-record 19 roller coasters.", - "opening_date": "1971-05-29", - "size_acres": 262, - "website": "https://www.sixflags.com/magicmountain", - "location": { - "street_address": "26101 Magic Mountain Pkwy", - "city": "Valencia", - "state_province": "California", - "country": "USA", - "postal_code": "91355", - "latitude": 34.4253, - "longitude": -118.5971, - }, - }, - { - "name": "Europa-Park", - "slug": "europa-park", - "operator_slug": "merlin-entertainments", - "property_owner_slug": "merlin-entertainments", - "description": "One of the most popular theme parks in Europe, located in Germany.", - "opening_date": "1975-07-12", - "size_acres": 234, - "website": "https://www.europapark.de/", - "location": { - "street_address": "Europa-Park-Straße 2", - "city": "Rust", - "state_province": "Baden-Württemberg", - "country": "Germany", - "postal_code": "77977", - "latitude": 48.2667, - "longitude": 7.7167, - }, - }, - { - "name": "Alton Towers", - "slug": "alton-towers", - "operator_slug": "merlin-entertainments", - "property_owner_slug": "merlin-entertainments", - "description": "Major theme park and former country estate in Staffordshire, England.", - "opening_date": "1980-04-23", - "size_acres": 500, - # Add other fields as needed - }, - ] diff --git a/parks/management/commands/fix_migrations.py b/parks/management/commands/fix_migrations.py deleted file mode 100644 index 6bec9c84..00000000 --- a/parks/management/commands/fix_migrations.py +++ /dev/null @@ -1,36 +0,0 @@ -from django.core.management.base import BaseCommand -from django.db import connection - - -class Command(BaseCommand): - help = "Fix migration history" - - def handle(self, *args, **options): - with connection.cursor() as cursor: - # Drop existing historical tables - cursor.execute( - """ - DROP TABLE IF EXISTS parks_historicalpark CASCADE; - DROP TABLE IF EXISTS parks_historicalparkarea CASCADE; - """ - ) - - # Delete all existing parks migrations - cursor.execute( - """ - DELETE FROM django_migrations - WHERE app = 'parks'; - """ - ) - - # Insert the new initial migration - cursor.execute( - """ - INSERT INTO django_migrations (app, name, applied) - VALUES ('parks', '0001_initial', NOW()); - """ - ) - - self.stdout.write( - self.style.SUCCESS("Successfully fixed migration history") - ) diff --git a/parks/management/commands/seed_data.json b/parks/management/commands/seed_data.json deleted file mode 100644 index 6eb95571..00000000 --- a/parks/management/commands/seed_data.json +++ /dev/null @@ -1,351 +0,0 @@ -{ - "parks": [ - { - "name": "Walt Disney World Magic Kingdom", - "location": "Orlando, Florida", - "country": "US", - "opening_date": "1971-10-01", - "status": "OPERATING", - "description": "The most visited theme park in the world, Magic Kingdom is Walt Disney World's first theme park.", - "website": "https://disneyworld.disney.go.com/destinations/magic-kingdom/", - "owner": "The Walt Disney Company", - "size_acres": "142.00", - "photos": [ - "https://images.unsplash.com/photo-1524008279394-3aed4643b30b" - ], - "rides": [ - { - "name": "Space Mountain", - "category": "RC", - "opening_date": "1975-01-15", - "status": "OPERATING", - "manufacturer": "Walt Disney Imagineering", - "description": "A high-speed roller coaster in the dark through space.", - "photos": [ - "https://images.unsplash.com/photo-1536768139911-e290a59011e4" - ], - "stats": { - "height_ft": "183.00", - "length_ft": "3196.00", - "speed_mph": "27.00", - "inversions": 0, - "ride_time_seconds": 180 - } - }, - { - "name": "Big Thunder Mountain Railroad", - "category": "RC", - "opening_date": "1980-09-23", - "status": "OPERATING", - "manufacturer": "Walt Disney Imagineering", - "description": "A mine train roller coaster through the Old West.", - "photos": [ - "https://images.unsplash.com/photo-1513889961551-628c1e5e2ee9" - ], - "stats": { - "height_ft": "104.00", - "length_ft": "2671.00", - "speed_mph": "30.00", - "inversions": 0, - "ride_time_seconds": 197 - } - }, - { - "name": "Seven Dwarfs Mine Train", - "category": "RC", - "opening_date": "2014-05-28", - "status": "OPERATING", - "manufacturer": "Vekoma", - "description": "A family roller coaster featuring unique swinging cars.", - "photos": [ - "https://images.unsplash.com/photo-1590144662036-33bf0ebd2c7f" - ], - "stats": { - "height_ft": "112.00", - "length_ft": "2000.00", - "speed_mph": "34.00", - "inversions": 0, - "ride_time_seconds": 180 - } - }, - { - "name": "Haunted Mansion", - "category": "DR", - "opening_date": "1971-10-01", - "status": "OPERATING", - "manufacturer": "Walt Disney Imagineering", - "description": "A dark ride through a haunted estate.", - "photos": [ - "https://images.unsplash.com/photo-1597466599360-3b9775841aec" - ] - }, - { - "name": "Pirates of the Caribbean", - "category": "DR", - "opening_date": "1973-12-15", - "status": "OPERATING", - "manufacturer": "Walt Disney Imagineering", - "description": "A boat ride through pirate-filled Caribbean waters.", - "photos": [ - "https://images.unsplash.com/photo-1506126799754-92bc47fc5d78" - ] - } - ] - }, - { - "name": "Cedar Point", - "location": "Sandusky, Ohio", - "country": "US", - "opening_date": "1870-06-01", - "status": "OPERATING", - "description": "Known as the Roller Coaster Capital of the World.", - "website": "https://www.cedarpoint.com", - "owner": "Cedar Fair", - "size_acres": "364.00", - "photos": [ - "https://images.unsplash.com/photo-1536768139911-e290a59011e4" - ], - "rides": [ - { - "name": "Steel Vengeance", - "category": "RC", - "opening_date": "2018-05-05", - "status": "OPERATING", - "manufacturer": "Rocky Mountain Construction", - "description": "A hybrid roller coaster featuring multiple inversions.", - "photos": [ - "https://images.unsplash.com/photo-1543674892-7d64d45df18b" - ], - "stats": { - "height_ft": "205.00", - "length_ft": "5740.00", - "speed_mph": "74.00", - "inversions": 4, - "ride_time_seconds": 150 - } - }, - { - "name": "Millennium Force", - "category": "RC", - "opening_date": "2000-05-13", - "status": "OPERATING", - "manufacturer": "Intamin", - "description": "A giga coaster with stunning views of Lake Erie.", - "photos": [ - "https://images.unsplash.com/photo-1605559911160-a3d95d213904" - ], - "stats": { - "height_ft": "310.00", - "length_ft": "6595.00", - "speed_mph": "93.00", - "inversions": 0, - "ride_time_seconds": 120 - } - }, - { - "name": "Top Thrill Dragster", - "category": "RC", - "opening_date": "2003-05-04", - "status": "SBNO", - "manufacturer": "Intamin", - "description": "A strata coaster featuring a 420-foot top hat element.", - "photos": [ - "https://images.unsplash.com/photo-1578912996078-305d92249aa6" - ], - "stats": { - "height_ft": "420.00", - "length_ft": "2800.00", - "speed_mph": "120.00", - "inversions": 0, - "ride_time_seconds": 50 - } - }, - { - "name": "Maverick", - "category": "RC", - "opening_date": "2007-05-26", - "status": "OPERATING", - "manufacturer": "Intamin", - "description": "A launched roller coaster with multiple inversions.", - "photos": [ - "https://images.unsplash.com/photo-1581309638082-877cb8132535" - ], - "stats": { - "height_ft": "105.00", - "length_ft": "4450.00", - "speed_mph": "70.00", - "inversions": 2, - "ride_time_seconds": 150 - } - } - ] - }, - { - "name": "Universal's Islands of Adventure", - "location": "Orlando, Florida", - "country": "US", - "opening_date": "1999-05-28", - "status": "OPERATING", - "description": "A theme park featuring cutting-edge technology and thrilling attractions.", - "website": "https://www.universalorlando.com/web/en/us/theme-parks/islands-of-adventure", - "owner": "NBCUniversal", - "size_acres": "110.00", - "photos": [ - "https://images.unsplash.com/photo-1597466599360-3b9775841aec" - ], - "rides": [ - { - "name": "Jurassic World VelociCoaster", - "category": "RC", - "opening_date": "2021-06-10", - "status": "OPERATING", - "manufacturer": "Intamin", - "description": "A high-speed launch coaster featuring velociraptors.", - "photos": [ - "https://images.unsplash.com/photo-1536768139911-e290a59011e4" - ], - "stats": { - "height_ft": "155.00", - "length_ft": "4700.00", - "speed_mph": "70.00", - "inversions": 4, - "ride_time_seconds": 145 - } - }, - { - "name": "Hagrid's Magical Creatures Motorbike Adventure", - "category": "RC", - "opening_date": "2019-06-13", - "status": "OPERATING", - "manufacturer": "Intamin", - "description": "A story coaster through the Forbidden Forest.", - "photos": [ - "https://images.unsplash.com/photo-1513889961551-628c1e5e2ee9" - ], - "stats": { - "height_ft": "65.00", - "length_ft": "5053.00", - "speed_mph": "50.00", - "inversions": 0, - "ride_time_seconds": 180 - } - }, - { - "name": "The Amazing Adventures of Spider-Man", - "category": "DR", - "opening_date": "1999-05-28", - "status": "OPERATING", - "manufacturer": "Oceaneering International", - "description": "A 3D dark ride featuring Spider-Man.", - "photos": [ - "https://images.unsplash.com/photo-1590144662036-33bf0ebd2c7f" - ] - } - ] - }, - { - "name": "Alton Towers", - "location": "Alton, England", - "country": "GB", - "opening_date": "1980-04-04", - "status": "OPERATING", - "description": "The UK's largest theme park, built around a historic stately home.", - "website": "https://www.altontowers.com", - "owner": "Merlin Entertainments", - "size_acres": "910.00", - "photos": [ - "https://images.unsplash.com/photo-1506126799754-92bc47fc5d78" - ], - "rides": [ - { - "name": "Nemesis", - "category": "RC", - "opening_date": "1994-03-19", - "status": "CLOSED", - "manufacturer": "Bolliger & Mabillard", - "description": "An inverted roller coaster through ravines.", - "photos": [ - "https://images.unsplash.com/photo-1543674892-7d64d45df18b" - ], - "stats": { - "height_ft": "43.00", - "length_ft": "2349.00", - "speed_mph": "50.00", - "inversions": 4, - "ride_time_seconds": 80 - } - }, - { - "name": "Oblivion", - "category": "RC", - "opening_date": "1998-03-14", - "status": "OPERATING", - "manufacturer": "Bolliger & Mabillard", - "description": "The world's first vertical drop roller coaster.", - "photos": [ - "https://images.unsplash.com/photo-1605559911160-a3d95d213904" - ], - "stats": { - "height_ft": "65.00", - "length_ft": "1804.00", - "speed_mph": "68.00", - "inversions": 0, - "ride_time_seconds": 100 - } - } - ] - }, - { - "name": "Europa-Park", - "location": "Rust, Germany", - "country": "DE", - "opening_date": "1975-07-12", - "status": "OPERATING", - "description": "Germany's largest theme park, featuring European-themed areas.", - "website": "https://www.europapark.de", - "owner": "Mack Rides", - "size_acres": "235.00", - "photos": [ - "https://images.unsplash.com/photo-1536768139911-e290a59011e4" - ], - "rides": [ - { - "name": "Silver Star", - "category": "RC", - "opening_date": "2002-03-23", - "status": "OPERATING", - "manufacturer": "Bolliger & Mabillard", - "description": "A hypercoaster with stunning views.", - "photos": [ - "https://images.unsplash.com/photo-1536768139911-e290a59011e4" - ], - "stats": { - "height_ft": "239.00", - "length_ft": "4003.00", - "speed_mph": "79.00", - "inversions": 0, - "ride_time_seconds": 180 - } - }, - { - "name": "Blue Fire", - "category": "RC", - "opening_date": "2009-04-04", - "status": "OPERATING", - "manufacturer": "Mack Rides", - "description": "A launched roller coaster with multiple inversions.", - "photos": [ - "https://images.unsplash.com/photo-1513889961551-628c1e5e2ee9" - ], - "stats": { - "height_ft": "125.00", - "length_ft": "3465.00", - "speed_mph": "62.00", - "inversions": 4, - "ride_time_seconds": 150 - } - } - ] - } - ] -} diff --git a/parks/management/commands/seed_initial_data.py b/parks/management/commands/seed_initial_data.py deleted file mode 100644 index 1f787619..00000000 --- a/parks/management/commands/seed_initial_data.py +++ /dev/null @@ -1,334 +0,0 @@ -from django.core.management.base import BaseCommand -from parks.models import Park, ParkArea, ParkLocation, Company as Operator - - -class Command(BaseCommand): - help = "Seeds initial park data with major theme parks worldwide" - - def handle(self, *args, **options): - # Create major theme park companies - companies_data = [ - { - "name": "The Walt Disney Company", - "website": "https://www.disney.com/", - "headquarters": "Burbank, California", - "description": "The world's largest entertainment company and theme park operator.", - }, - { - "name": "Universal Parks & Resorts", - "website": "https://www.universalparks.com/", - "headquarters": "Orlando, Florida", - "description": "A division of Comcast NBCUniversal, operating major theme parks worldwide.", - }, - { - "name": "Six Flags Entertainment Corporation", - "website": "https://www.sixflags.com/", - "headquarters": "Arlington, Texas", - "description": "The world's largest regional theme park company.", - }, - { - "name": "Cedar Fair Entertainment Company", - "website": "https://www.cedarfair.com/", - "headquarters": "Sandusky, Ohio", - "description": "One of North America's largest operators of regional amusement parks.", - }, - { - "name": "Herschend Family Entertainment", - "website": "https://www.hfecorp.com/", - "headquarters": "Atlanta, Georgia", - "description": "The largest family-owned themed attractions corporation in the United States.", - }, - { - "name": "SeaWorld Parks & Entertainment", - "website": "https://www.seaworldentertainment.com/", - "headquarters": "Orlando, Florida", - "description": "Theme park and entertainment company focusing on nature-based themes.", - }, - ] - - companies = {} - for company_data in companies_data: - operator, created = Operator.objects.get_or_create( - name=company_data["name"], defaults=company_data - ) - companies[operator.name] = operator - self.stdout.write( - f'{"Created" if created else "Found"} company: {operator.name}' - ) - - # Create parks with their locations - parks_data = [ - { - "name": "Magic Kingdom", - "company": "The Walt Disney Company", - "description": "The first theme park at Walt Disney World Resort in Florida, opened in 1971.", - "opening_date": "1971-10-01", - "size_acres": 142, - "location": { - "street_address": "1180 Seven Seas Dr", - "city": "Lake Buena Vista", - "state": "Florida", - "country": "United States", - "postal_code": "32830", - "latitude": 28.4177, - "longitude": -81.5812, - }, - "areas": [ - { - "name": "Main Street, U.S.A.", - "description": "Victorian-era themed entrance corridor", - }, - { - "name": "Adventureland", - "description": "Exotic tropical places themed area", - }, - { - "name": "Frontierland", - "description": "American Old West themed area", - }, - { - "name": "Liberty Square", - "description": "Colonial America themed area", - }, - { - "name": "Fantasyland", - "description": "Fairy tale themed area", - }, - { - "name": "Tomorrowland", - "description": "Future themed area", - }, - ], - }, - { - "name": "Universal Studios Florida", - "company": "Universal Parks & Resorts", - "description": "Movie and television-based theme park in Orlando, Florida.", - "opening_date": "1990-06-07", - "size_acres": 108, - "location": { - "street_address": "6000 Universal Blvd", - "city": "Orlando", - "state": "Florida", - "country": "United States", - "postal_code": "32819", - "latitude": 28.4749, - "longitude": -81.4687, - }, - "areas": [ - { - "name": "Production Central", - "description": "Main entrance area with movie-themed attractions", - }, - { - "name": "New York", - "description": "Themed after New York City streets", - }, - { - "name": "San Francisco", - "description": "Themed after San Francisco's waterfront", - }, - { - "name": "The Wizarding World of Harry Potter - Diagon Alley", - "description": "Themed after the Harry Potter series", - }, - { - "name": "Springfield", - "description": "Themed after The Simpsons hometown", - }, - ], - }, - { - "name": "Cedar Point", - "company": "Cedar Fair Entertainment Company", - "description": 'Known as the "Roller Coaster Capital of the World".', - "opening_date": "1870-06-01", - "size_acres": 364, - "location": { - "street_address": "1 Cedar Point Dr", - "city": "Sandusky", - "state": "Ohio", - "country": "United States", - "postal_code": "44870", - "latitude": 41.4822, - "longitude": -82.6835, - }, - "areas": [ - { - "name": "Frontiertown", - "description": "Western-themed area with multiple roller coasters", - }, - { - "name": "Millennium Island", - "description": "Home to the Millennium Force roller coaster", - }, - { - "name": "Cedar Point Shores", - "description": "Waterpark area", - }, - { - "name": "Top Thrill Dragster", - "description": "Area surrounding the iconic launched coaster", - }, - ], - }, - { - "name": "Silver Dollar City", - "company": "Herschend Family Entertainment", - "description": "An 1880s-themed park featuring over 40 rides and attractions.", - "opening_date": "1960-05-01", - "size_acres": 61, - "location": { - "street_address": "399 Silver Dollar City Parkway", - "city": "Branson", - "state": "Missouri", - "country": "United States", - "postal_code": "65616", - "latitude": 36.668497, - "longitude": -93.339074, - }, - "areas": [ - { - "name": "Grand Exposition", - "description": "Home to many family rides and attractions", - }, - { - "name": "Wildfire", - "description": "Named after the famous B&M coaster", - }, - { - "name": "Wilson's Farm", - "description": "Farm-themed attractions and dining", - }, - { - "name": "Riverfront", - "description": "Water-themed attractions area", - }, - { - "name": "The Valley", - "description": "Home to Time Traveler and other major attractions", - }, - ], - }, - { - "name": "Six Flags Magic Mountain", - "company": "Six Flags Entertainment Corporation", - "description": "Known for its world-record 19 roller coasters.", - "opening_date": "1971-05-29", - "size_acres": 262, - "location": { - "street_address": "26101 Magic Mountain Pkwy", - "city": "Valencia", - "state": "California", - "country": "United States", - "postal_code": "91355", - "latitude": 34.4253, - "longitude": -118.5971, - }, - "areas": [ - { - "name": "Six Flags Plaza", - "description": "Main entrance area", - }, - { - "name": "DC Universe", - "description": "DC Comics themed area", - }, - { - "name": "Screampunk District", - "description": "Steampunk themed area", - }, - { - "name": "The Underground", - "description": "Urban themed area", - }, - { - "name": "Goliath Territory", - "description": "Area surrounding the Goliath hypercoaster", - }, - ], - }, - { - "name": "SeaWorld Orlando", - "company": "SeaWorld Parks & Entertainment", - "description": "Marine zoological park combined with thrill rides and shows.", - "opening_date": "1973-12-15", - "size_acres": 200, - "location": { - "street_address": "7007 Sea World Dr", - "city": "Orlando", - "state": "Florida", - "country": "United States", - "postal_code": "32821", - "latitude": 28.4115, - "longitude": -81.4617, - }, - "areas": [ - { - "name": "Sea Harbor", - "description": "Main entrance and shopping area", - }, - { - "name": "Shark Encounter", - "description": "Shark exhibit and themed area", - }, - { - "name": "Antarctica: Empire of the Penguin", - "description": "Penguin-themed area", - }, - { - "name": "Manta", - "description": "Area themed around the Manta flying roller coaster", - }, - { - "name": "Sesame Street Land", - "description": "Kid-friendly area based on Sesame Street", - }, - ], - }, - ] - - # Create parks and their areas - for park_data in parks_data: - company = companies[park_data["company"]] - park, created = Park.objects.get_or_create( - name=park_data["name"], - defaults={ - "description": park_data["description"], - "status": "OPERATING", - "opening_date": park_data["opening_date"], - "size_acres": park_data["size_acres"], - "owner": company, - }, - ) - self.stdout.write(f'{"Created" if created else "Found"} park: {park.name}') - - # Create location for park - if created: - loc_data = park_data["location"] - park_location = ParkLocation.objects.create( - park=park, - street_address=loc_data["street_address"], - city=loc_data["city"], - state=loc_data["state"], - country=loc_data["country"], - postal_code=loc_data["postal_code"], - ) - # Set coordinates using the helper method - park_location.set_coordinates( - loc_data["latitude"], loc_data["longitude"] - ) - park_location.save() - - # Create areas for park - for area_data in park_data["areas"]: - area, created = ParkArea.objects.get_or_create( - name=area_data["name"], - park=park, - defaults={"description": area_data["description"]}, - ) - self.stdout.write( - f'{"Created" if created else "Found"} area: {area.name} in {park.name}' - ) - - self.stdout.write(self.style.SUCCESS("Successfully seeded initial park data")) diff --git a/parks/management/commands/seed_sample_data.py b/parks/management/commands/seed_sample_data.py deleted file mode 100644 index e5ee7445..00000000 --- a/parks/management/commands/seed_sample_data.py +++ /dev/null @@ -1,1173 +0,0 @@ -from django.core.management.base import BaseCommand -from django.db import transaction, connection -import logging - -from parks.models import Company, Park, ParkArea, ParkReview, ParkLocation -from rides.models import ( - Company as RideCompany, - Ride, - RideModel, - RideReview, - RollerCoasterStats, -) -from accounts.models import User - - -class Command(BaseCommand): - help = "Seeds comprehensive sample data for the ThrillWiki theme park application" - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.logger = logging.getLogger(__name__) - - def cleanup_existing_data(self): - """Clean up all existing sample data before creating new data""" - self.stdout.write("Cleaning up existing sample data...") - - try: - with transaction.atomic(): - # Count existing data for logging - park_review_count = ParkReview.objects.count() - ride_review_count = RideReview.objects.count() - rollercoaster_stats_count = RollerCoasterStats.objects.count() - ride_count = Ride.objects.count() - ride_model_count = RideModel.objects.count() - park_area_count = ParkArea.objects.count() - park_location_count = ParkLocation.objects.count() - park_count = Park.objects.count() - ride_company_count = RideCompany.objects.count() - company_count = Company.objects.count() - test_user_count = User.objects.filter(username="testuser").count() - - # Log what will be deleted - self.stdout.write(f" Found {park_review_count} park reviews to delete") - self.stdout.write(f" Found {ride_review_count} ride reviews to delete") - self.stdout.write( - f" Found {rollercoaster_stats_count} roller coaster stats to delete" - ) - self.stdout.write(f" Found {ride_count} rides to delete") - self.stdout.write(f" Found {ride_model_count} ride models to delete") - self.stdout.write(f" Found {park_area_count} park areas to delete") - self.stdout.write( - f" Found {park_location_count} park locations to delete" - ) - self.stdout.write(f" Found {park_count} parks to delete") - self.stdout.write( - f" Found {ride_company_count} ride companies to delete" - ) - self.stdout.write(f" Found {company_count} park companies to delete") - self.stdout.write(f" Found {test_user_count} test users to delete") - - # Delete in order to avoid foreign key constraint violations - # Reviews first (they reference other objects) - if park_review_count > 0: - ParkReview.objects.all().delete() - self.stdout.write(f" Deleted {park_review_count} park reviews") - - if ride_review_count > 0: - RideReview.objects.all().delete() - self.stdout.write(f" Deleted {ride_review_count} ride reviews") - - # Roller coaster stats (references Ride) - if rollercoaster_stats_count > 0: - RollerCoasterStats.objects.all().delete() - self.stdout.write( - f" Deleted {rollercoaster_stats_count} roller coaster stats" - ) - - # Rides (references Park, RideCompany, RideModel) - if ride_count > 0: - Ride.objects.all().delete() - self.stdout.write(f" Deleted {ride_count} rides") - - # Ride models (referenced by Ride) - if ride_model_count > 0: - RideModel.objects.all().delete() - self.stdout.write(f" Deleted {ride_model_count} ride models") - - # Park areas (references Park) - if park_area_count > 0: - ParkArea.objects.all().delete() - self.stdout.write(f" Deleted {park_area_count} park areas") - - # Park locations (references Park) - if park_location_count > 0: - ParkLocation.objects.all().delete() - self.stdout.write(f" Deleted {park_location_count} park locations") - - # Parks (referenced by many models) - if park_count > 0: - Park.objects.all().delete() - self.stdout.write(f" Deleted {park_count} parks") - - # Ride companies (referenced by Ride, RideModel) - if ride_company_count > 0: - RideCompany.objects.all().delete() - self.stdout.write(f" Deleted {ride_company_count} ride companies") - - # Park companies (referenced by Park) - if company_count > 0: - Company.objects.all().delete() - self.stdout.write(f" Deleted {company_count} park companies") - - # Only delete test user, not all users - if test_user_count > 0: - User.objects.filter(username="testuser").delete() - self.stdout.write(f" Deleted {test_user_count} test users") - - self.stdout.write( - self.style.SUCCESS("Successfully cleaned up existing sample data!") - ) - - except Exception as e: - self.logger.error( - f"Error during data cleanup: { - str(e)}", - exc_info=True, - ) - self.stdout.write( - self.style.ERROR(f"Failed to clean up existing data: {str(e)}") - ) - raise - - def handle(self, *args, **options): - self.stdout.write("Starting sample data creation...") - - try: - # Check if required tables exist - if not self.check_required_tables(): - self.stdout.write( - self.style.ERROR( - "Required database tables are missing. Please run migrations first." - ) - ) - return - - # Clean up existing data first - self.cleanup_existing_data() - - # Use transaction to ensure data consistency - with transaction.atomic(): - # Create companies with different roles - self.create_companies() - - # Create parks with proper operator relationships - self.create_parks() - - # Create rides with manufacturer and designer relationships - self.create_rides() - - # Add park areas for variety - self.create_park_areas() - - # Add sample reviews for testing - self.create_reviews() - - self.stdout.write( - self.style.SUCCESS("Successfully created comprehensive sample data!") - ) - - except Exception as e: - self.logger.error( - f"Error during sample data creation: {str(e)}", exc_info=True - ) - self.stdout.write( - self.style.ERROR(f"Failed to create sample data: {str(e)}") - ) - raise - - def check_required_tables(self): - """Check if all required tables exist in the database""" - required_models = [ - Company, - Park, - ParkArea, - ParkReview, - ParkLocation, - RideCompany, - Ride, - RideModel, - RideReview, - RollerCoasterStats, - User, - ] - - missing_tables = [] - for model in required_models: - try: - # Check if the table exists by trying to get the table name - table_name = model._meta.db_table - with connection.cursor() as cursor: - cursor.execute(f"SELECT 1 FROM {table_name} LIMIT 1") - except Exception: - missing_tables.append(model._meta.label) - - if missing_tables: - self.stdout.write( - self.style.WARNING( - f'Missing tables for models: {", ".join(missing_tables)}' - ) - ) - return False - - self.stdout.write(self.style.SUCCESS("All required tables exist.")) - return True - - def create_companies(self): - """Create companies with different roles (operators, manufacturers, designers)""" - self.stdout.write("Creating companies...") - - try: - # Park Operators - operators_data = [ - { - "name": "The Walt Disney Company", - "roles": ["OPERATOR"], - "website": "https://www.disney.com/", - "description": "World's largest entertainment company and theme park operator.", - "founded_year": 1923, - }, - { - "name": "Universal Parks & Resorts", - "roles": ["OPERATOR"], - "website": "https://www.universalparks.com/", - "description": "Division of Comcast NBCUniversal, operating major theme parks worldwide.", - "founded_year": 1964, - }, - { - "name": "Six Flags Entertainment Corporation", - "roles": ["OPERATOR"], - "website": "https://www.sixflags.com/", - "description": "World's largest regional theme park company.", - "founded_year": 1961, - }, - { - "name": "Cedar Fair Entertainment Company", - "roles": ["OPERATOR"], - "website": "https://www.cedarfair.com/", - "description": "One of North America's largest operators of regional amusement parks.", - "founded_year": 1983, - }, - { - "name": "Herschend Family Entertainment", - "roles": ["OPERATOR"], - "website": "https://www.hfecorp.com/", - "description": "Largest family-owned themed attractions corporation in the United States.", - "founded_year": 1950, - }, - { - "name": "Europa-Park GmbH & Co. Mack KG", - "roles": ["OPERATOR"], - "website": "https://www.europapark.de/", - "description": "One of Europe's largest theme parks, located in Germany.", - "founded_year": 1975, - }, - ] - - # Ride Manufacturers - manufacturers_data = [ - { - "name": "Bolliger & Mabillard", - "roles": ["MANUFACTURER"], - "website": "https://www.bolliger-mabillard.com/", - "description": "Swiss roller coaster manufacturer known for inverted and hyper coasters.", - "founded_date": "1988-01-01", - }, - { - "name": "Intamin Amusement Rides", - "roles": ["MANUFACTURER"], - "website": "https://www.intamin.com/", - "description": "Liechtenstein-based manufacturer of roller coasters and thrill rides.", - "founded_date": "1967-01-01", - }, - { - "name": "Vekoma Rides Manufacturing", - "roles": ["MANUFACTURER"], - "website": "https://www.vekoma.com/", - "description": "Dutch manufacturer specializing in family and steel roller coasters.", - "founded_date": "1926-01-01", - }, - { - "name": "Arrow Dynamics", - "roles": ["MANUFACTURER"], - "website": "https://www.arrowdynamics.com/", - "description": "American manufacturer known for corkscrew and looping coasters.", - "founded_date": "1946-01-01", - }, - { - "name": "Rocky Mountain Construction", - "roles": ["MANUFACTURER"], - "website": "https://www.rockymtnconstruction.com/", - "description": "American manufacturer known for I-Box track and wooden coasters.", - "founded_date": "2001-01-01", - }, - { - "name": "Mack Rides GmbH & Co KG", - "roles": ["MANUFACTURER"], - "website": "https://www.mack-rides.com/", - "description": "German manufacturer of roller coasters and water rides.", - "founded_date": "1780-01-01", - }, - ] - - # Ride Designers - designers_data = [ - { - "name": "Werner Stengel", - "roles": ["DESIGNER"], - "website": "", - "description": "German roller coaster designer known for complex layouts and inversions.", - }, - { - "name": "Alan Schilke", - "roles": ["DESIGNER"], - "website": "", - "description": "American roller coaster designer known for family-friendly coasters.", - }, - { - "name": "John Pierce", - "roles": ["DESIGNER"], - "website": "", - "description": "American roller coaster designer and engineer.", - }, - { - "name": "The Gravity Group", - "roles": ["DESIGNER"], - "website": "https://www.thegravitygroup.com/", - "description": "American design firm specializing in roller coaster design.", - }, - ] - - # Create companies in parks app (for operators and property owners) - self.park_companies = {} - for data in operators_data: - try: - company, created = Company.objects.get_or_create( - name=data["name"], - defaults={ - "roles": data["roles"], - "website": data["website"], - "description": data["description"], - "founded_year": data["founded_year"], - }, - ) - self.park_companies[data["name"]] = company - self.stdout.write( - f' { - "Created" if created else "Found"} park company: { - company.name}' - ) - except Exception as e: - self.logger.error( - f'Error creating park company {data["name"]}: {str(e)}' - ) - raise - - # Create companies in rides app (for manufacturers and designers) - self.ride_companies = {} - for data in manufacturers_data + designers_data: - try: - company, created = RideCompany.objects.get_or_create( - name=data["name"], - defaults={ - "roles": data["roles"], - "website": data["website"], - "description": data["description"], - "founded_date": data.get("founded_date"), - }, - ) - self.ride_companies[data["name"]] = company - self.stdout.write( - f' { - "Created" if created else "Found"} ride company: { - company.name}' - ) - except Exception as e: - self.logger.error( - f'Error creating ride company {data["name"]}: {str(e)}' - ) - raise - - except Exception as e: - self.logger.error(f"Error in create_companies: {str(e)}") - raise - - def create_parks(self): - """Create parks with proper operator relationships""" - self.stdout.write("Creating parks...") - - try: - parks_data = [ - { - "name": "Magic Kingdom", - "operator": "The Walt Disney Company", - "property_owner": "The Walt Disney Company", - "description": "The first theme park at Walt Disney World Resort in Florida, opened in 1971.", - "opening_date": "1971-10-01", - "size_acres": 142, - "website": "https://disneyworld.disney.go.com/destinations/magic-kingdom/", - "location": { - "street_address": "1180 Seven Seas Dr", - "city": "Lake Buena Vista", - "state": "Florida", - "country": "United States", - "postal_code": "32830", - "latitude": 28.4177, - "longitude": -81.5812, - }, - }, - { - "name": "Universal Studios Florida", - "operator": "Universal Parks & Resorts", - "property_owner": "Universal Parks & Resorts", - "description": "Movie and television-based theme park in Orlando, Florida.", - "opening_date": "1990-06-07", - "size_acres": 108, - "website": "https://www.universalorlando.com/web/en/us/theme-parks/universal-studios-florida", - "location": { - "street_address": "6000 Universal Blvd", - "city": "Orlando", - "state": "Florida", - "country": "United States", - "postal_code": "32819", - "latitude": 28.4749, - "longitude": -81.4687, - }, - }, - { - "name": "Cedar Point", - "operator": "Cedar Fair Entertainment Company", - "property_owner": "Cedar Fair Entertainment Company", - "description": 'Known as the "Roller Coaster Capital of the World".', - "opening_date": "1870-06-01", - "size_acres": 364, - "website": "https://www.cedarpoint.com/", - "location": { - "street_address": "1 Cedar Point Dr", - "city": "Sandusky", - "state": "Ohio", - "country": "United States", - "postal_code": "44870", - "latitude": 41.4822, - "longitude": -82.6835, - }, - }, - { - "name": "Europa-Park", - "operator": "Europa-Park GmbH & Co. Mack KG", - "property_owner": "Europa-Park GmbH & Co. Mack KG", - "description": "One of Europe's largest theme parks, located in Germany.", - "opening_date": "1975-07-12", - "size_acres": 235, - "website": "https://www.europapark.de/", - "location": { - "street_address": "Europa-Park-Straße 2", - "city": "Rust", - "state": "Baden-Württemberg", - "country": "Germany", - "postal_code": "77977", - "latitude": 48.2667, - "longitude": 7.7167, - }, - }, - { - "name": "Six Flags Magic Mountain", - "operator": "Six Flags Entertainment Corporation", - "property_owner": "Six Flags Entertainment Corporation", - "description": "Known for its world-record 19 roller coasters.", - "opening_date": "1971-05-29", - "size_acres": 262, - "website": "https://www.sixflags.com/magicmountain", - "location": { - "street_address": "26101 Magic Mountain Pkwy", - "city": "Valencia", - "state": "California", - "country": "United States", - "postal_code": "91355", - "latitude": 34.4253, - "longitude": -118.5971, - }, - }, - { - "name": "Silver Dollar City", - "operator": "Herschend Family Entertainment", - "property_owner": "Herschend Family Entertainment", - "description": "An 1880s-themed park featuring over 40 rides and attractions.", - "opening_date": "1960-05-01", - "size_acres": 61, - "website": "https://www.silverdollarcity.com/", - "location": { - "street_address": "399 Silver Dollar City Parkway", - "city": "Branson", - "state": "Missouri", - "country": "United States", - "postal_code": "65616", - "latitude": 36.668497, - "longitude": -93.339074, - }, - }, - ] - - self.parks = {} - for park_data in parks_data: - try: - operator = self.park_companies[park_data["operator"]] - property_owner = ( - self.park_companies.get(park_data["property_owner"]) - if park_data["property_owner"] - else None - ) - - park, created = Park.objects.get_or_create( - name=park_data["name"], - defaults={ - "description": park_data["description"], - "status": "OPERATING", - "opening_date": park_data["opening_date"], - "size_acres": park_data["size_acres"], - "website": park_data["website"], - "operator": operator, - "property_owner": property_owner, - }, - ) - self.parks[park_data["name"]] = park - self.stdout.write( - f' { - "Created" if created else "Found"} park: { - park.name}' - ) - - # Create location for park - if created: - try: - loc_data = park_data["location"] - park_location = ParkLocation.objects.create( - park=park, - street_address=loc_data["street_address"], - city=loc_data["city"], - state=loc_data["state"], - country=loc_data["country"], - postal_code=loc_data["postal_code"], - ) - # Set coordinates using the helper method - park_location.set_coordinates( - loc_data["latitude"], loc_data["longitude"] - ) - park_location.save() - except Exception as e: - self.logger.error( - f'Error creating location for park { - park_data["name"]}: { - str(e)}' - ) - raise - - except Exception as e: - self.logger.error( - f'Error creating park {park_data["name"]}: {str(e)}' - ) - raise - - except Exception as e: - self.logger.error(f"Error in create_parks: {str(e)}") - raise - - def create_rides(self): - """Create rides with manufacturer and designer relationships""" - self.stdout.write("Creating rides...") - - try: - # First create some ride models - ride_models_data = [ - { - "name": "Dive Coaster", - "manufacturer": "Bolliger & Mabillard", - "category": "RC", - "description": "Inverted roller coaster with a vertical drop and non-inverting loop", - }, - { - "name": "Hyper Coaster", - "manufacturer": "Bolliger & Mabillard", - "category": "RC", - "description": "Steel roller coaster with heights over 200 feet", - }, - { - "name": "Boomerang", - "manufacturer": "Vekoma Rides Manufacturing", - "category": "RC", - "description": "Shuttle roller coaster that runs forward and backward", - }, - { - "name": "Corkscrew Coaster", - "manufacturer": "Arrow Dynamics", - "category": "RC", - "description": "Early steel coaster design with corkscrew elements", - }, - { - "name": "I-Box Track", - "manufacturer": "Rocky Mountain Construction", - "category": "RC", - "description": "Smooth-riding steel track system for wooden coasters", - }, - { - "name": "Powered Coaster", - "manufacturer": "Mack Rides GmbH & Co KG", - "category": "RC", - "description": "Family-friendly steel roller coaster", - }, - ] - - self.ride_models = {} - for model_data in ride_models_data: - try: - manufacturer = self.ride_companies.get(model_data["manufacturer"]) - model, created = RideModel.objects.get_or_create( - name=model_data["name"], - manufacturer=manufacturer, - defaults={ - "description": model_data["description"], - "category": model_data["category"], - }, - ) - self.ride_models[model_data["name"]] = model - self.stdout.write( - f' { - "Created" if created else "Found"} ride model: { - model.name}' - ) - except Exception as e: - self.logger.error( - f'Error creating ride model { - model_data["name"]}: { - str(e)}' - ) - raise - - # Create rides - rides_data = [ - { - "name": "Millennium Force", - "park": "Cedar Point", - "manufacturer": "Bolliger & Mabillard", - "designer": "Werner Stengel", - "ride_model": "Hyper Coaster", - "category": "RC", - "description": "World's first hyper coaster reaching speeds of 93 mph.", - "opening_date": "2000-05-13", - "coaster_stats": { - "height_ft": 310, - "length_ft": 6595, - "speed_mph": 93, - "inversions": 0, - "ride_time_seconds": 165, - "track_material": "STEEL", - "roller_coaster_type": "SITDOWN", - "max_drop_height_ft": 300, - "launch_type": "CHAIN", - "trains_count": 3, - "cars_per_train": 9, - "seats_per_car": 4, - }, - }, - { - "name": "Top Thrill Dragster", - "park": "Cedar Point", - "manufacturer": "Intamin Amusement Rides", - "designer": "Werner Stengel", - "category": "RC", - "description": "World's first strata coaster reaching 420 feet.", - "opening_date": "2003-05-04", - "coaster_stats": { - "height_ft": 420, - "length_ft": 2800, - "speed_mph": 120, - "inversions": 0, - "ride_time_seconds": 17, - "track_material": "STEEL", - "roller_coaster_type": "SITDOWN", - "max_drop_height_ft": 400, - "launch_type": "HYDRAULIC", - "trains_count": 1, - "cars_per_train": 1, - "seats_per_car": 16, - }, - }, - { - "name": "Silver Star", - "park": "Europa-Park", - "manufacturer": "Bolliger & Mabillard", - "designer": "Werner Stengel", - "ride_model": "Dive Coaster", - "category": "RC", - "description": "Europe's first dive coaster with a 300-foot drop.", - "opening_date": "2002-03-23", - "coaster_stats": { - "height_ft": 239, - "length_ft": 5249, - "speed_mph": 80, - "inversions": 0, - "ride_time_seconds": 240, - "track_material": "STEEL", - "roller_coaster_type": "SITDOWN", - "max_drop_height_ft": 197, - "launch_type": "CHAIN", - "trains_count": 2, - "cars_per_train": 10, - "seats_per_car": 2, - }, - }, - { - "name": "Blue Fire", - "park": "Europa-Park", - "manufacturer": "Mack Rides GmbH & Co KG", - "designer": "John Pierce", - "ride_model": "Powered Coaster", - "category": "RC", - "description": "Launched roller coaster with a 124-foot drop.", - "opening_date": "2009-04-25", - "coaster_stats": { - "height_ft": 124, - "length_ft": 2789, - "speed_mph": 62, - "inversions": 0, - "ride_time_seconds": 120, - "track_material": "STEEL", - "roller_coaster_type": "SITDOWN", - "max_drop_height_ft": 98, - "launch_type": "HYDRAULIC", - "trains_count": 2, - "cars_per_train": 5, - "seats_per_car": 4, - }, - }, - { - "name": "Space Mountain", - "park": "Magic Kingdom", - "manufacturer": "Arrow Dynamics", - "designer": "John Pierce", - "category": "RC", - "description": "Indoor space-themed roller coaster.", - "opening_date": "1975-01-15", - "coaster_stats": { - "height_ft": 183, - "length_ft": 3200, - "speed_mph": 35, - "inversions": 0, - "ride_time_seconds": 180, - "track_material": "STEEL", - "roller_coaster_type": "SITDOWN", - "max_drop_height_ft": 150, - "launch_type": "CHAIN", - "trains_count": 2, - "cars_per_train": 6, - "seats_per_car": 2, - }, - }, - { - "name": "Big Thunder Mountain Railroad", - "park": "Magic Kingdom", - "manufacturer": "Arrow Dynamics", - "designer": "The Gravity Group", - "category": "RC", - "description": "Mine train roller coaster themed as a runaway mining train.", - "opening_date": "1980-11-15", - "coaster_stats": { - "height_ft": 146, - "length_ft": 3280, - "speed_mph": 35, - "inversions": 0, - "ride_time_seconds": 240, - "track_material": "STEEL", - "roller_coaster_type": "SITDOWN", - "max_drop_height_ft": 128, - "launch_type": "CHAIN", - "trains_count": 3, - "cars_per_train": 5, - "seats_per_car": 4, - }, - }, - { - "name": "Maverick", - "park": "Cedar Point", - "manufacturer": "Intamin Amusement Rides", - "designer": "Werner Stengel", - "category": "RC", - "description": "Wild mouse coaster with a 100-foot drop.", - "opening_date": "2007-05-26", - "coaster_stats": { - "height_ft": 105, - "length_ft": 4450, - "speed_mph": 70, - "inversions": 0, - "ride_time_seconds": 180, - "track_material": "STEEL", - "roller_coaster_type": "WILD_MOUSE", - "max_drop_height_ft": 100, - "launch_type": "CHAIN", - "trains_count": 2, - "cars_per_train": 4, - "seats_per_car": 4, - }, - }, - { - "name": "Time Traveler", - "park": "Silver Dollar City", - "manufacturer": "Rocky Mountain Construction", - "designer": "Alan Schilke", - "ride_model": "I-Box Track", - "category": "RC", - "description": "Wooden coaster with steel I-Box track for smooth riding.", - "opening_date": "2018-04-28", - "coaster_stats": { - "height_ft": 165, - "length_ft": 5832, - "speed_mph": 72, - "inversions": 0, - "ride_time_seconds": 240, - "track_material": "HYBRID", - "roller_coaster_type": "SITDOWN", - "max_drop_height_ft": 155, - "launch_type": "CHAIN", - "trains_count": 2, - "cars_per_train": 6, - "seats_per_car": 2, - }, - }, - ] - - self.rides = {} - for ride_data in rides_data: - try: - park = self.parks[ride_data["park"]] - manufacturer = self.ride_companies.get( - ride_data.get("manufacturer") - ) - designer = self.ride_companies.get(ride_data.get("designer")) - ride_model = self.ride_models.get(ride_data.get("ride_model")) - - ride, created = Ride.objects.get_or_create( - name=ride_data["name"], - park=park, - defaults={ - "description": ride_data["description"], - "category": ride_data["category"], - "status": "OPERATING", - "opening_date": ride_data["opening_date"], - "manufacturer": manufacturer, - "designer": designer, - "ride_model": ride_model, - }, - ) - self.rides[ride_data["name"]] = ride - self.stdout.write( - f' { - "Created" if created else "Found"} ride: { - ride.name}' - ) - - # Create roller coaster stats if provided - if created and "coaster_stats" in ride_data: - try: - stats_data = ride_data["coaster_stats"] - RollerCoasterStats.objects.create(ride=ride, **stats_data) - except Exception as e: - self.logger.error( - f'Error creating stats for ride { - ride_data["name"]}: { - str(e)}' - ) - raise - - except Exception as e: - self.logger.error( - f'Error creating ride {ride_data["name"]}: {str(e)}' - ) - raise - - except Exception as e: - self.logger.error(f"Error in create_rides: {str(e)}") - raise - - def create_park_areas(self): - """Add park areas for variety""" - self.stdout.write("Creating park areas...") - - try: - areas_data = [ - { - "park": "Magic Kingdom", - "areas": [ - { - "name": "Main Street, U.S.A.", - "description": "Victorian-era themed entrance corridor", - }, - { - "name": "Adventureland", - "description": "Exotic tropical places themed area", - }, - { - "name": "Frontierland", - "description": "American Old West themed area", - }, - { - "name": "Liberty Square", - "description": "Colonial America themed area", - }, - { - "name": "Fantasyland", - "description": "Fairy tale themed area", - }, - { - "name": "Tomorrowland", - "description": "Future themed area", - }, - ], - }, - { - "park": "Universal Studios Florida", - "areas": [ - { - "name": "Production Central", - "description": "Main entrance area with movie-themed attractions", - }, - { - "name": "New York", - "description": "Themed after New York City streets", - }, - { - "name": "San Francisco", - "description": "Themed after San Francisco's waterfront", - }, - { - "name": "The Wizarding World of Harry Potter - Diagon Alley", - "description": "Themed after the Harry Potter series", - }, - { - "name": "Springfield", - "description": "Themed after The Simpsons hometown", - }, - ], - }, - { - "park": "Cedar Point", - "areas": [ - { - "name": "Frontiertown", - "description": "Western-themed area with multiple roller coasters", - }, - { - "name": "Millennium Island", - "description": "Home to the Millennium Force roller coaster", - }, - { - "name": "Cedar Point Shores", - "description": "Waterpark area", - }, - { - "name": "Top Thrill Dragster", - "description": "Area surrounding the iconic launched coaster", - }, - ], - }, - { - "park": "Europa-Park", - "areas": [ - { - "name": "Germany", - "description": "German-themed area", - }, - { - "name": "France", - "description": "French-themed area", - }, - { - "name": "England", - "description": "English-themed area", - }, - { - "name": "Italy", - "description": "Italian-themed area", - }, - { - "name": "Spain", - "description": "Spanish-themed area", - }, - { - "name": "Portugal", - "description": "Portuguese-themed area", - }, - ], - }, - ] - - for area_group in areas_data: - try: - park = self.parks[area_group["park"]] - for area_data in area_group["areas"]: - area, created = ParkArea.objects.get_or_create( - name=area_data["name"], - park=park, - defaults={ - "description": area_data["description"], - "opening_date": park.opening_date, - }, - ) - self.stdout.write( - f' { - "Created" if created else "Found"} area: { - area.name} in { - park.name}' - ) - except Exception as e: - self.logger.error( - f'Error creating areas for park { - area_group["park"]}: { - str(e)}' - ) - raise - - except Exception as e: - self.logger.error(f"Error in create_park_areas: {str(e)}") - raise - - def create_reviews(self): - """Add sample reviews for testing""" - self.stdout.write("Creating sample reviews...") - - try: - # Create a test user if none exists - test_user, created = User.objects.get_or_create( - username="testuser", - defaults={ - "email": "test@example.com", - "first_name": "Test", - "last_name": "User", - }, - ) - if created: - test_user.set_password("testpass123") - test_user.save() - - # Park reviews - park_reviews_data = [ - { - "park": "Cedar Point", - "rating": 10, - "title": "Best roller coaster park in the world!", - "content": "Cedar Point is absolutely incredible. The Millennium Force is a must-ride. The park is clean, well-maintained, and the staff is friendly. Highly recommend!", - "visit_date": "2023-08-15", - }, - { - "park": "Magic Kingdom", - "rating": 9, - "title": "Magical experience for all ages", - "content": "Disney does it again with Magic Kingdom. The attention to detail is amazing and the shows are spectacular. Space Mountain is a classic.", - "visit_date": "2023-07-20", - }, - { - "park": "Europa-Park", - "rating": 9, - "title": "Europe's best theme park", - "content": "Europa-Park is fantastic! The theming is incredible and the rides are world-class. Silver Star is absolutely breathtaking.", - "visit_date": "2023-06-10", - }, - { - "park": "Universal Studios Florida", - "rating": 8, - "title": "Great movie-themed attractions", - "content": "Universal has some amazing rides, especially in the Harry Potter area. The theming is top-notch and the shows are entertaining.", - "visit_date": "2023-05-05", - }, - ] - - for review_data in park_reviews_data: - try: - park = self.parks[review_data["park"]] - review, created = ParkReview.objects.get_or_create( - park=park, - user=test_user, - defaults={ - "rating": review_data["rating"], - "title": review_data["title"], - "content": review_data["content"], - "visit_date": review_data["visit_date"], - "is_published": True, - }, - ) - self.stdout.write( - f' { - "Created" if created else "Found"} park review: { - review.title}' - ) - except Exception as e: - self.logger.error( - f'Error creating park review for { - review_data["park"]}: { - str(e)}' - ) - raise - - # Ride reviews - ride_reviews_data = [ - { - "ride": "Millennium Force", - "rating": 10, - "title": "The king of roller coasters!", - "content": "Absolutely incredible ride! The first drop is breathtaking and the speed is unreal. A must-experience for any coaster enthusiast.", - "visit_date": "2023-08-15", - }, - { - "ride": "Top Thrill Dragster", - "rating": 9, - "title": "Incredible launch and height", - "content": "The launch is intense and reaching the top of the 420-foot tower is amazing. The view from the top is spectacular!", - "visit_date": "2023-08-16", - }, - { - "ride": "Silver Star", - "rating": 10, - "title": "Best dive coaster in Europe", - "content": "The dive drop is incredible! The theming around the ride is beautiful and the overall experience is fantastic.", - "visit_date": "2023-06-10", - }, - { - "ride": "Space Mountain", - "rating": 8, - "title": "Classic Disney coaster", - "content": "A classic that never gets old. The indoor setting and space theme make it unique. Great for all ages.", - "visit_date": "2023-07-20", - }, - ] - - for review_data in ride_reviews_data: - try: - ride = self.rides[review_data["ride"]] - review, created = RideReview.objects.get_or_create( - ride=ride, - user=test_user, - defaults={ - "rating": review_data["rating"], - "title": review_data["title"], - "content": review_data["content"], - "visit_date": review_data["visit_date"], - "is_published": True, - }, - ) - self.stdout.write( - f' { - "Created" if created else "Found"} ride review: { - review.title}' - ) - except Exception as e: - self.logger.error( - f'Error creating ride review for { - review_data["ride"]}: { - str(e)}' - ) - raise - - self.stdout.write(self.style.SUCCESS("Sample data creation completed!")) - - except Exception as e: - self.logger.error(f"Error in create_reviews: {str(e)}") - raise diff --git a/parks/management/commands/test_location.py b/parks/management/commands/test_location.py deleted file mode 100644 index 426dda78..00000000 --- a/parks/management/commands/test_location.py +++ /dev/null @@ -1,124 +0,0 @@ -from django.core.management.base import BaseCommand -from parks.models import Park, ParkLocation, Company - - -class Command(BaseCommand): - help = "Test ParkLocation model functionality" - - def handle(self, *args, **options): - self.stdout.write("🧪 Testing ParkLocation Model Functionality") - self.stdout.write("=" * 50) - - # Create a test company (operator) - operator, created = Company.objects.get_or_create( - name="Test Theme Parks Inc", - defaults={"slug": "test-theme-parks-inc", "roles": ["OPERATOR"]}, - ) - self.stdout.write(f"✅ Created operator: {operator.name}") - - # Create a test park - park, created = Park.objects.get_or_create( - name="Test Magic Kingdom", - defaults={ - "slug": "test-magic-kingdom", - "description": "A test theme park for location testing", - "operator": operator, - }, - ) - self.stdout.write(f"✅ Created park: {park.name}") - - # Create a park location - location, created = ParkLocation.objects.get_or_create( - park=park, - defaults={ - "street_address": "1313 Disneyland Dr", - "city": "Anaheim", - "state": "California", - "country": "USA", - "postal_code": "92802", - "highway_exit": "I-5 Exit 110B", - "parking_notes": "Large parking structure available", - "seasonal_notes": "Open year-round", - }, - ) - self.stdout.write(f"✅ Created location: {location}") - - # Test coordinate setting - self.stdout.write("\n🔍 Testing coordinate functionality:") - location.set_coordinates(33.8121, -117.9190) # Disneyland coordinates - location.save() - - self.stdout.write(f" Latitude: {location.latitude}") - self.stdout.write(f" Longitude: {location.longitude}") - self.stdout.write(f" Coordinates: {location.coordinates}") - self.stdout.write(f" Formatted Address: {location.formatted_address}") - - # Test Park model integration - self.stdout.write("\n🔍 Testing Park model integration:") - self.stdout.write( - f" Park formatted location: { - park.formatted_location}" - ) - self.stdout.write(f" Park coordinates: {park.coordinates}") - - # Create another location for distance testing - operator2, created = Company.objects.get_or_create( - name="Six Flags Entertainment", - defaults={ - "slug": "six-flags-entertainment", - "roles": ["OPERATOR"], - }, - ) - - park2, created = Park.objects.get_or_create( - name="Six Flags Magic Mountain", - defaults={ - "slug": "six-flags-magic-mountain", - "description": "Another test theme park", - "operator": operator2, - }, - ) - - location2, created = ParkLocation.objects.get_or_create( - park=park2, - defaults={ - "city": "Valencia", - "state": "California", - "country": "USA", - }, - ) - location2.set_coordinates( - 34.4244, -118.5971 - ) # Six Flags Magic Mountain coordinates - location2.save() - - # Test distance calculation - self.stdout.write("\n🔍 Testing distance calculation:") - distance = location.distance_to(location2) - if distance: - self.stdout.write(f" Distance between parks: {distance:.2f} km") - else: - self.stdout.write(" ❌ Distance calculation failed") - - # Test spatial indexing - self.stdout.write("\n🔍 Testing spatial queries:") - try: - from django.contrib.gis.measure import D - from django.contrib.gis.geos import Point - - # Find parks within 100km of a point - # Same as Disneyland - search_point = Point(-117.9190, 33.8121, srid=4326) - nearby_locations = ParkLocation.objects.filter( - point__distance_lte=(search_point, D(km=100)) - ) - self.stdout.write( - f" Found { - nearby_locations.count()} parks within 100km" - ) - for loc in nearby_locations: - self.stdout.write(f" - {loc.park.name} in {loc.city}, {loc.state}") - except Exception as e: - self.stdout.write(f" ⚠️ Spatial queries not fully functional: {e}") - - self.stdout.write("\n✅ ParkLocation model tests completed successfully!") diff --git a/parks/management/commands/update_park_counts.py b/parks/management/commands/update_park_counts.py deleted file mode 100644 index 03ab366d..00000000 --- a/parks/management/commands/update_park_counts.py +++ /dev/null @@ -1,29 +0,0 @@ -from django.core.management.base import BaseCommand -from django.db.models import Q -from parks.models import Park - - -class Command(BaseCommand): - help = "Update total_rides and total_roller_coasters counts for all parks" - - def handle(self, *args, **options): - parks = Park.objects.all() - operating_rides = Q(status="OPERATING") - updated = 0 - - for park in parks: - # Count total operating rides - total_rides = park.rides.filter(operating_rides).count() - - # Count total operating roller coasters - total_coasters = park.rides.filter(operating_rides, category="RC").count() - - # Update park counts - Park.objects.filter(id=park.id).update( - total_rides=total_rides, total_roller_coasters=total_coasters - ) - updated += 1 - - self.stdout.write( - self.style.SUCCESS(f"Successfully updated counts for {updated} parks") - ) diff --git a/parks/managers.py b/parks/managers.py deleted file mode 100644 index f31c3026..00000000 --- a/parks/managers.py +++ /dev/null @@ -1,302 +0,0 @@ -""" -Custom managers and QuerySets for Parks models. -Optimized queries following Django styleguide patterns. -""" - -from django.db.models import Q, Count, Avg, Max, Min, Prefetch - -from core.managers import ( - BaseQuerySet, - BaseManager, - LocationQuerySet, - LocationManager, - ReviewableQuerySet, - ReviewableManager, - StatusQuerySet, - StatusManager, -) - - -class ParkQuerySet(StatusQuerySet, ReviewableQuerySet, LocationQuerySet): - """Optimized QuerySet for Park model.""" - - def with_complete_stats(self): - """Add comprehensive park statistics.""" - return self.annotate( - ride_count_calculated=Count("rides", distinct=True), - coaster_count_calculated=Count( - "rides", - filter=Q(rides__category__in=["RC", "WC"]), - distinct=True, - ), - area_count=Count("areas", distinct=True), - review_count=Count( - "reviews", filter=Q(reviews__is_published=True), distinct=True - ), - average_rating_calculated=Avg( - "reviews__rating", filter=Q(reviews__is_published=True) - ), - latest_ride_opening=Max("rides__opening_date"), - oldest_ride_opening=Min("rides__opening_date"), - ) - - def optimized_for_list(self): - """Optimize for park list display.""" - return ( - self.select_related("operator", "property_owner") - .prefetch_related("location") - .with_complete_stats() - ) - - def optimized_for_detail(self): - """Optimize for park detail display.""" - from rides.models import Ride - from .models import ParkReview - - return self.select_related("operator", "property_owner").prefetch_related( - "location", - "areas", - Prefetch( - "rides", - queryset=Ride.objects.select_related( - "manufacturer", "designer", "ride_model", "park_area" - ).order_by("name"), - ), - Prefetch( - "reviews", - queryset=ParkReview.objects.select_related("user") - .filter(is_published=True) - .order_by("-created_at")[:10], - ), - "photos", - ) - - def by_operator(self, *, operator_id: int): - """Filter parks by operator.""" - return self.filter(operator_id=operator_id) - - def by_property_owner(self, *, owner_id: int): - """Filter parks by property owner.""" - return self.filter(property_owner_id=owner_id) - - def with_minimum_coasters(self, *, min_coasters: int = 5): - """Filter parks with minimum number of coasters.""" - return self.with_complete_stats().filter( - coaster_count_calculated__gte=min_coasters - ) - - def large_parks(self, *, min_acres: float = 100.0): - """Filter for large parks.""" - return self.filter(size_acres__gte=min_acres) - - def seasonal_parks(self): - """Filter for parks with seasonal operation.""" - return self.exclude(operating_season__exact="") - - def for_map_display(self, *, bounds=None): - """Optimize for map display with minimal data.""" - queryset = self.select_related("operator").prefetch_related("location") - - if bounds: - queryset = queryset.within_bounds( - north=bounds.north, - south=bounds.south, - east=bounds.east, - west=bounds.west, - ) - - return queryset.values( - "id", - "name", - "slug", - "status", - "location__latitude", - "location__longitude", - "location__city", - "location__state", - "location__country", - "operator__name", - ) - - def search_autocomplete(self, *, query: str, limit: int = 10): - """Optimized search for autocomplete.""" - return ( - self.filter( - Q(name__icontains=query) - | Q(location__city__icontains=query) - | Q(location__state__icontains=query) - ) - .select_related("operator", "location") - .values( - "id", - "name", - "slug", - "location__city", - "location__state", - "operator__name", - )[:limit] - ) - - -class ParkManager(StatusManager, ReviewableManager, LocationManager): - """Custom manager for Park model.""" - - def get_queryset(self): - return ParkQuerySet(self.model, using=self._db) - - def with_complete_stats(self): - return self.get_queryset().with_complete_stats() - - def optimized_for_list(self): - return self.get_queryset().optimized_for_list() - - def optimized_for_detail(self): - return self.get_queryset().optimized_for_detail() - - def by_operator(self, *, operator_id: int): - return self.get_queryset().by_operator(operator_id=operator_id) - - def large_parks(self, *, min_acres: float = 100.0): - return self.get_queryset().large_parks(min_acres=min_acres) - - def for_map_display(self, *, bounds=None): - return self.get_queryset().for_map_display(bounds=bounds) - - -class ParkAreaQuerySet(BaseQuerySet): - """QuerySet for ParkArea model.""" - - def with_ride_counts(self): - """Add ride count annotations.""" - return self.annotate( - ride_count=Count("rides", distinct=True), - coaster_count=Count( - "rides", - filter=Q(rides__category__in=["RC", "WC"]), - distinct=True, - ), - ) - - def optimized_for_list(self): - """Optimize for area list display.""" - return self.select_related("park").with_ride_counts() - - def by_park(self, *, park_id: int): - """Filter areas by park.""" - return self.filter(park_id=park_id) - - def with_rides(self): - """Filter areas that have rides.""" - return self.filter(rides__isnull=False).distinct() - - -class ParkAreaManager(BaseManager): - """Manager for ParkArea model.""" - - def get_queryset(self): - return ParkAreaQuerySet(self.model, using=self._db) - - def with_ride_counts(self): - return self.get_queryset().with_ride_counts() - - def by_park(self, *, park_id: int): - return self.get_queryset().by_park(park_id=park_id) - - -class ParkReviewQuerySet(ReviewableQuerySet): - """QuerySet for ParkReview model.""" - - def for_park(self, *, park_id: int): - """Filter reviews for a specific park.""" - return self.filter(park_id=park_id) - - def by_user(self, *, user_id: int): - """Filter reviews by user.""" - return self.filter(user_id=user_id) - - def by_rating_range(self, *, min_rating: int = 1, max_rating: int = 10): - """Filter reviews by rating range.""" - return self.filter(rating__gte=min_rating, rating__lte=max_rating) - - def optimized_for_display(self): - """Optimize for review display.""" - return self.select_related("user", "park", "moderated_by") - - def recent_reviews(self, *, days: int = 30): - """Get recent reviews.""" - return self.recent(days=days) - - def moderation_required(self): - """Filter reviews requiring moderation.""" - return self.filter(Q(is_published=False) | Q(moderated_at__isnull=True)) - - -class ParkReviewManager(BaseManager): - """Manager for ParkReview model.""" - - def get_queryset(self): - return ParkReviewQuerySet(self.model, using=self._db) - - def for_park(self, *, park_id: int): - return self.get_queryset().for_park(park_id=park_id) - - def by_rating_range(self, *, min_rating: int = 1, max_rating: int = 10): - return self.get_queryset().by_rating_range( - min_rating=min_rating, max_rating=max_rating - ) - - def moderation_required(self): - return self.get_queryset().moderation_required() - - -class CompanyQuerySet(BaseQuerySet): - """QuerySet for Company model.""" - - def operators(self): - """Filter for companies that operate parks.""" - return self.filter(roles__contains=["OPERATOR"]) - - def property_owners(self): - """Filter for companies that own park properties.""" - return self.filter(roles__contains=["PROPERTY_OWNER"]) - - def manufacturers(self): - """Filter for ride manufacturers.""" - return self.filter(roles__contains=["MANUFACTURER"]) - - def with_park_counts(self): - """Add park count annotations.""" - return self.annotate( - operated_parks_count=Count("operated_parks", distinct=True), - owned_parks_count=Count("owned_parks", distinct=True), - total_parks_involvement=Count("operated_parks", distinct=True) - + Count("owned_parks", distinct=True), - ) - - def major_operators(self, *, min_parks: int = 5): - """Filter for major park operators.""" - return ( - self.operators() - .with_park_counts() - .filter(operated_parks_count__gte=min_parks) - ) - - def optimized_for_list(self): - """Optimize for company list display.""" - return self.with_park_counts() - - -class CompanyManager(BaseManager): - """Manager for Company model.""" - - def get_queryset(self): - return CompanyQuerySet(self.model, using=self._db) - - def operators(self): - return self.get_queryset().operators() - - def manufacturers(self): - return self.get_queryset().manufacturers() - - def major_operators(self, *, min_parks: int = 5): - return self.get_queryset().major_operators(min_parks=min_parks) diff --git a/parks/migrations/0001_add_filter_indexes.py b/parks/migrations/0001_add_filter_indexes.py deleted file mode 100644 index 36d02ac7..00000000 --- a/parks/migrations/0001_add_filter_indexes.py +++ /dev/null @@ -1,62 +0,0 @@ -# Generated manually for enhanced filtering performance - -from django.db import migrations, models - - -class Migration(migrations.Migration): - atomic = False # Required for CREATE INDEX CONCURRENTLY - - dependencies = [ - ('parks', '0001_initial'), # Adjust this to the latest migration - ] - - operations = [ - # Add indexes for commonly filtered fields - migrations.RunSQL( - "CREATE INDEX CONCURRENTLY IF NOT EXISTS parks_park_status_idx ON parks_park (status);", - reverse_sql="DROP INDEX IF EXISTS parks_park_status_idx;", - ), - migrations.RunSQL( - "CREATE INDEX CONCURRENTLY IF NOT EXISTS parks_park_operator_id_idx ON parks_park (operator_id);", - reverse_sql="DROP INDEX IF EXISTS parks_park_operator_id_idx;", - ), - migrations.RunSQL( - "CREATE INDEX CONCURRENTLY IF NOT EXISTS parks_park_average_rating_idx ON parks_park (average_rating);", - reverse_sql="DROP INDEX IF EXISTS parks_park_average_rating_idx;", - ), - migrations.RunSQL( - "CREATE INDEX CONCURRENTLY IF NOT EXISTS parks_park_size_acres_idx ON parks_park (size_acres);", - reverse_sql="DROP INDEX IF EXISTS parks_park_size_acres_idx;", - ), - migrations.RunSQL( - "CREATE INDEX CONCURRENTLY IF NOT EXISTS parks_park_coaster_count_idx ON parks_park (coaster_count);", - reverse_sql="DROP INDEX IF EXISTS parks_park_coaster_count_idx;", - ), - migrations.RunSQL( - "CREATE INDEX CONCURRENTLY IF NOT EXISTS parks_park_ride_count_idx ON parks_park (ride_count);", - reverse_sql="DROP INDEX IF EXISTS parks_park_ride_count_idx;", - ), - migrations.RunSQL( - "CREATE INDEX CONCURRENTLY IF NOT EXISTS parks_park_updated_at_idx ON parks_park (updated_at);", - reverse_sql="DROP INDEX IF EXISTS parks_park_updated_at_idx;", - ), - # Composite indexes for common filter combinations - migrations.RunSQL( - "CREATE INDEX CONCURRENTLY IF NOT EXISTS parks_park_status_rating_idx ON parks_park (status, average_rating);", - reverse_sql="DROP INDEX IF EXISTS parks_park_status_rating_idx;", - ), - migrations.RunSQL( - "CREATE INDEX CONCURRENTLY IF NOT EXISTS parks_park_operator_status_idx ON parks_park (operator_id, status);", - reverse_sql="DROP INDEX IF EXISTS parks_park_operator_status_idx;", - ), - # Index for parks with coasters (coaster_count > 0) - migrations.RunSQL( - "CREATE INDEX CONCURRENTLY IF NOT EXISTS parks_park_has_coasters_idx ON parks_park (coaster_count) WHERE coaster_count > 0;", - reverse_sql="DROP INDEX IF EXISTS parks_park_has_coasters_idx;", - ), - # Index for big parks (ride_count >= 10) - migrations.RunSQL( - "CREATE INDEX CONCURRENTLY IF NOT EXISTS parks_park_big_parks_idx ON parks_park (ride_count) WHERE ride_count >= 10;", - reverse_sql="DROP INDEX IF EXISTS parks_park_big_parks_idx;", - ), - ] diff --git a/parks/migrations/0001_initial.py b/parks/migrations/0001_initial.py deleted file mode 100644 index 95d0849c..00000000 --- a/parks/migrations/0001_initial.py +++ /dev/null @@ -1,719 +0,0 @@ -# Generated by Django 5.2.5 on 2025-08-15 22:01 - -import django.contrib.gis.db.models.fields -import django.contrib.postgres.fields -import django.core.validators -import django.db.models.deletion -import pgtrigger.compiler -import pgtrigger.migrations -from django.conf import settings -from django.db import migrations, models - - -class Migration(migrations.Migration): - - initial = True - - dependencies = [ - ("pghistory", "0007_auto_20250421_0444"), - migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ] - - operations = [ - migrations.CreateModel( - name="Company", - fields=[ - ( - "id", - models.BigAutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ("created_at", models.DateTimeField(auto_now_add=True)), - ("updated_at", models.DateTimeField(auto_now=True)), - ("name", models.CharField(max_length=255)), - ("slug", models.SlugField(max_length=255, unique=True)), - ( - "roles", - django.contrib.postgres.fields.ArrayField( - base_field=models.CharField( - choices=[ - ("OPERATOR", "Park Operator"), - ("PROPERTY_OWNER", "Property Owner"), - ], - max_length=20, - ), - blank=True, - default=list, - size=None, - ), - ), - ("description", models.TextField(blank=True)), - ("website", models.URLField(blank=True)), - ( - "founded_year", - models.PositiveIntegerField(blank=True, null=True), - ), - ("parks_count", models.IntegerField(default=0)), - ("rides_count", models.IntegerField(default=0)), - ], - options={ - "verbose_name_plural": "Companies", - "ordering": ["name"], - }, - ), - migrations.CreateModel( - name="Park", - fields=[ - ( - "id", - models.BigAutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ("name", models.CharField(max_length=255)), - ("slug", models.SlugField(max_length=255, unique=True)), - ("description", models.TextField(blank=True)), - ( - "status", - models.CharField( - choices=[ - ("OPERATING", "Operating"), - ("CLOSED_TEMP", "Temporarily Closed"), - ("CLOSED_PERM", "Permanently Closed"), - ("UNDER_CONSTRUCTION", "Under Construction"), - ("DEMOLISHED", "Demolished"), - ("RELOCATED", "Relocated"), - ], - default="OPERATING", - max_length=20, - ), - ), - ("opening_date", models.DateField(blank=True, null=True)), - ("closing_date", models.DateField(blank=True, null=True)), - ( - "operating_season", - models.CharField(blank=True, max_length=255), - ), - ( - "size_acres", - models.DecimalField( - blank=True, decimal_places=2, max_digits=10, null=True - ), - ), - ("website", models.URLField(blank=True)), - ( - "average_rating", - models.DecimalField( - blank=True, decimal_places=2, max_digits=3, null=True - ), - ), - ("ride_count", models.IntegerField(blank=True, null=True)), - ("coaster_count", models.IntegerField(blank=True, null=True)), - ( - "created_at", - models.DateTimeField(auto_now_add=True, null=True), - ), - ("updated_at", models.DateTimeField(auto_now=True)), - ( - "operator", - models.ForeignKey( - help_text="Company that operates this park", - limit_choices_to={"roles__contains": ["OPERATOR"]}, - on_delete=django.db.models.deletion.PROTECT, - related_name="operated_parks", - to="parks.company", - ), - ), - ( - "property_owner", - models.ForeignKey( - blank=True, - help_text="Company that owns the property (if different from operator)", - limit_choices_to={"roles__contains": ["PROPERTY_OWNER"]}, - null=True, - on_delete=django.db.models.deletion.PROTECT, - related_name="owned_parks", - to="parks.company", - ), - ), - ], - options={ - "ordering": ["name"], - }, - ), - migrations.CreateModel( - name="ParkArea", - fields=[ - ( - "id", - models.BigAutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ("created_at", models.DateTimeField(auto_now_add=True)), - ("updated_at", models.DateTimeField(auto_now=True)), - ("name", models.CharField(max_length=255)), - ("slug", models.SlugField(max_length=255)), - ("description", models.TextField(blank=True)), - ("opening_date", models.DateField(blank=True, null=True)), - ("closing_date", models.DateField(blank=True, null=True)), - ( - "park", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - related_name="areas", - to="parks.park", - ), - ), - ], - options={ - "abstract": False, - }, - ), - migrations.CreateModel( - name="ParkAreaEvent", - fields=[ - ( - "pgh_id", - models.AutoField(primary_key=True, serialize=False), - ), - ("pgh_created_at", models.DateTimeField(auto_now_add=True)), - ("pgh_label", models.TextField(help_text="The event label.")), - ("id", models.BigIntegerField()), - ("created_at", models.DateTimeField(auto_now_add=True)), - ("updated_at", models.DateTimeField(auto_now=True)), - ("name", models.CharField(max_length=255)), - ("slug", models.SlugField(db_index=False, max_length=255)), - ("description", models.TextField(blank=True)), - ("opening_date", models.DateField(blank=True, null=True)), - ("closing_date", models.DateField(blank=True, null=True)), - ( - "park", - models.ForeignKey( - db_constraint=False, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="+", - related_query_name="+", - to="parks.park", - ), - ), - ( - "pgh_context", - models.ForeignKey( - db_constraint=False, - null=True, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="+", - to="pghistory.context", - ), - ), - ( - "pgh_obj", - models.ForeignKey( - db_constraint=False, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="events", - to="parks.parkarea", - ), - ), - ], - options={ - "abstract": False, - }, - ), - migrations.CreateModel( - name="ParkEvent", - fields=[ - ( - "pgh_id", - models.AutoField(primary_key=True, serialize=False), - ), - ("pgh_created_at", models.DateTimeField(auto_now_add=True)), - ("pgh_label", models.TextField(help_text="The event label.")), - ("id", models.BigIntegerField()), - ("name", models.CharField(max_length=255)), - ("slug", models.SlugField(db_index=False, max_length=255)), - ("description", models.TextField(blank=True)), - ( - "status", - models.CharField( - choices=[ - ("OPERATING", "Operating"), - ("CLOSED_TEMP", "Temporarily Closed"), - ("CLOSED_PERM", "Permanently Closed"), - ("UNDER_CONSTRUCTION", "Under Construction"), - ("DEMOLISHED", "Demolished"), - ("RELOCATED", "Relocated"), - ], - default="OPERATING", - max_length=20, - ), - ), - ("opening_date", models.DateField(blank=True, null=True)), - ("closing_date", models.DateField(blank=True, null=True)), - ( - "operating_season", - models.CharField(blank=True, max_length=255), - ), - ( - "size_acres", - models.DecimalField( - blank=True, decimal_places=2, max_digits=10, null=True - ), - ), - ("website", models.URLField(blank=True)), - ( - "average_rating", - models.DecimalField( - blank=True, decimal_places=2, max_digits=3, null=True - ), - ), - ("ride_count", models.IntegerField(blank=True, null=True)), - ("coaster_count", models.IntegerField(blank=True, null=True)), - ( - "created_at", - models.DateTimeField(auto_now_add=True, null=True), - ), - ("updated_at", models.DateTimeField(auto_now=True)), - ( - "operator", - models.ForeignKey( - db_constraint=False, - help_text="Company that operates this park", - limit_choices_to={"roles__contains": ["OPERATOR"]}, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="+", - related_query_name="+", - to="parks.company", - ), - ), - ( - "pgh_context", - models.ForeignKey( - db_constraint=False, - null=True, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="+", - to="pghistory.context", - ), - ), - ( - "pgh_obj", - models.ForeignKey( - db_constraint=False, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="events", - to="parks.park", - ), - ), - ( - "property_owner", - models.ForeignKey( - blank=True, - db_constraint=False, - help_text="Company that owns the property (if different from operator)", - limit_choices_to={"roles__contains": ["PROPERTY_OWNER"]}, - null=True, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="+", - related_query_name="+", - to="parks.company", - ), - ), - ], - options={ - "abstract": False, - }, - ), - migrations.CreateModel( - name="ParkLocation", - fields=[ - ( - "id", - models.BigAutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "point", - django.contrib.gis.db.models.fields.PointField( - blank=True, - help_text="Geographic coordinates (longitude, latitude)", - null=True, - srid=4326, - ), - ), - ( - "street_address", - models.CharField(blank=True, max_length=255), - ), - ("city", models.CharField(db_index=True, max_length=100)), - ("state", models.CharField(db_index=True, max_length=100)), - ("country", models.CharField(default="USA", max_length=100)), - ("postal_code", models.CharField(blank=True, max_length=20)), - ("highway_exit", models.CharField(blank=True, max_length=100)), - ("parking_notes", models.TextField(blank=True)), - ("best_arrival_time", models.TimeField(blank=True, null=True)), - ("seasonal_notes", models.TextField(blank=True)), - ("osm_id", models.BigIntegerField(blank=True, null=True)), - ( - "osm_type", - models.CharField( - blank=True, - help_text="Type of OpenStreetMap object (node, way, or relation)", - max_length=10, - ), - ), - ( - "park", - models.OneToOneField( - on_delete=django.db.models.deletion.CASCADE, - related_name="location", - to="parks.park", - ), - ), - ], - options={ - "verbose_name": "Park Location", - "verbose_name_plural": "Park Locations", - "ordering": ["park__name"], - }, - ), - migrations.CreateModel( - name="ParkReview", - fields=[ - ( - "id", - models.BigAutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "rating", - models.PositiveSmallIntegerField( - validators=[ - django.core.validators.MinValueValidator(1), - django.core.validators.MaxValueValidator(10), - ] - ), - ), - ("title", models.CharField(max_length=200)), - ("content", models.TextField()), - ("visit_date", models.DateField()), - ("created_at", models.DateTimeField(auto_now_add=True)), - ("updated_at", models.DateTimeField(auto_now=True)), - ("is_published", models.BooleanField(default=True)), - ("moderation_notes", models.TextField(blank=True)), - ("moderated_at", models.DateTimeField(blank=True, null=True)), - ( - "moderated_by", - models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="moderated_park_reviews", - to=settings.AUTH_USER_MODEL, - ), - ), - ( - "park", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - related_name="reviews", - to="parks.park", - ), - ), - ( - "user", - models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - related_name="park_reviews", - to=settings.AUTH_USER_MODEL, - ), - ), - ], - options={ - "ordering": ["-created_at"], - }, - ), - migrations.CreateModel( - name="ParkReviewEvent", - fields=[ - ( - "pgh_id", - models.AutoField(primary_key=True, serialize=False), - ), - ("pgh_created_at", models.DateTimeField(auto_now_add=True)), - ("pgh_label", models.TextField(help_text="The event label.")), - ("id", models.BigIntegerField()), - ( - "rating", - models.PositiveSmallIntegerField( - validators=[ - django.core.validators.MinValueValidator(1), - django.core.validators.MaxValueValidator(10), - ] - ), - ), - ("title", models.CharField(max_length=200)), - ("content", models.TextField()), - ("visit_date", models.DateField()), - ("created_at", models.DateTimeField(auto_now_add=True)), - ("updated_at", models.DateTimeField(auto_now=True)), - ("is_published", models.BooleanField(default=True)), - ("moderation_notes", models.TextField(blank=True)), - ("moderated_at", models.DateTimeField(blank=True, null=True)), - ( - "moderated_by", - models.ForeignKey( - blank=True, - db_constraint=False, - null=True, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="+", - related_query_name="+", - to=settings.AUTH_USER_MODEL, - ), - ), - ( - "park", - models.ForeignKey( - db_constraint=False, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="+", - related_query_name="+", - to="parks.park", - ), - ), - ( - "pgh_context", - models.ForeignKey( - db_constraint=False, - null=True, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="+", - to="pghistory.context", - ), - ), - ( - "pgh_obj", - models.ForeignKey( - db_constraint=False, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="events", - to="parks.parkreview", - ), - ), - ( - "user", - models.ForeignKey( - db_constraint=False, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="+", - related_query_name="+", - to=settings.AUTH_USER_MODEL, - ), - ), - ], - options={ - "abstract": False, - }, - ), - migrations.CreateModel( - name="CompanyHeadquarters", - fields=[ - ( - "id", - models.BigAutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "street_address", - models.CharField( - blank=True, - help_text="Mailing address if publicly available", - max_length=255, - ), - ), - ( - "city", - models.CharField( - db_index=True, - help_text="Headquarters city", - max_length=100, - ), - ), - ( - "state_province", - models.CharField( - blank=True, - db_index=True, - help_text="State/Province/Region", - max_length=100, - ), - ), - ( - "country", - models.CharField( - db_index=True, - default="USA", - help_text="Country where headquarters is located", - max_length=100, - ), - ), - ( - "postal_code", - models.CharField( - blank=True, - help_text="ZIP or postal code", - max_length=20, - ), - ), - ( - "mailing_address", - models.TextField( - blank=True, - help_text="Complete mailing address if different from basic address", - ), - ), - ("created_at", models.DateTimeField(auto_now_add=True)), - ("updated_at", models.DateTimeField(auto_now=True)), - ( - "company", - models.OneToOneField( - on_delete=django.db.models.deletion.CASCADE, - related_name="headquarters", - to="parks.company", - ), - ), - ], - options={ - "verbose_name": "Company Headquarters", - "verbose_name_plural": "Company Headquarters", - "ordering": ["company__name"], - "indexes": [ - models.Index( - fields=["city", "country"], - name="parks_compa_city_cf9a4e_idx", - ) - ], - }, - ), - pgtrigger.migrations.AddTrigger( - model_name="park", - trigger=pgtrigger.compiler.Trigger( - name="insert_insert", - sql=pgtrigger.compiler.UpsertTriggerSql( - func='INSERT INTO "parks_parkevent" ("average_rating", "closing_date", "coaster_count", "created_at", "description", "id", "name", "opening_date", "operating_season", "operator_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "property_owner_id", "ride_count", "size_acres", "slug", "status", "updated_at", "website") VALUES (NEW."average_rating", NEW."closing_date", NEW."coaster_count", NEW."created_at", NEW."description", NEW."id", NEW."name", NEW."opening_date", NEW."operating_season", NEW."operator_id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."property_owner_id", NEW."ride_count", NEW."size_acres", NEW."slug", NEW."status", NEW."updated_at", NEW."website"); RETURN NULL;', - hash="[AWS-SECRET-REMOVED]", - operation="INSERT", - pgid="pgtrigger_insert_insert_66883", - table="parks_park", - when="AFTER", - ), - ), - ), - pgtrigger.migrations.AddTrigger( - model_name="park", - trigger=pgtrigger.compiler.Trigger( - name="update_update", - sql=pgtrigger.compiler.UpsertTriggerSql( - condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)", - func='INSERT INTO "parks_parkevent" ("average_rating", "closing_date", "coaster_count", "created_at", "description", "id", "name", "opening_date", "operating_season", "operator_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "property_owner_id", "ride_count", "size_acres", "slug", "status", "updated_at", "website") VALUES (NEW."average_rating", NEW."closing_date", NEW."coaster_count", NEW."created_at", NEW."description", NEW."id", NEW."name", NEW."opening_date", NEW."operating_season", NEW."operator_id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."property_owner_id", NEW."ride_count", NEW."size_acres", NEW."slug", NEW."status", NEW."updated_at", NEW."website"); RETURN NULL;', - hash="[AWS-SECRET-REMOVED]", - operation="UPDATE", - pgid="pgtrigger_update_update_19f56", - table="parks_park", - when="AFTER", - ), - ), - ), - pgtrigger.migrations.AddTrigger( - model_name="parkarea", - trigger=pgtrigger.compiler.Trigger( - name="insert_insert", - sql=pgtrigger.compiler.UpsertTriggerSql( - func='INSERT INTO "parks_parkareaevent" ("closing_date", "created_at", "description", "id", "name", "opening_date", "park_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "slug", "updated_at") VALUES (NEW."closing_date", NEW."created_at", NEW."description", NEW."id", NEW."name", NEW."opening_date", NEW."park_id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."slug", NEW."updated_at"); RETURN NULL;', - hash="[AWS-SECRET-REMOVED]", - operation="INSERT", - pgid="pgtrigger_insert_insert_13457", - table="parks_parkarea", - when="AFTER", - ), - ), - ), - pgtrigger.migrations.AddTrigger( - model_name="parkarea", - trigger=pgtrigger.compiler.Trigger( - name="update_update", - sql=pgtrigger.compiler.UpsertTriggerSql( - condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)", - func='INSERT INTO "parks_parkareaevent" ("closing_date", "created_at", "description", "id", "name", "opening_date", "park_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "slug", "updated_at") VALUES (NEW."closing_date", NEW."created_at", NEW."description", NEW."id", NEW."name", NEW."opening_date", NEW."park_id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."slug", NEW."updated_at"); RETURN NULL;', - hash="[AWS-SECRET-REMOVED]", - operation="UPDATE", - pgid="pgtrigger_update_update_6e5aa", - table="parks_parkarea", - when="AFTER", - ), - ), - ), - migrations.AddIndex( - model_name="parklocation", - index=models.Index( - fields=["city", "state"], name="parks_parkl_city_7cc873_idx" - ), - ), - migrations.AlterUniqueTogether( - name="parkreview", - unique_together={("park", "user")}, - ), - pgtrigger.migrations.AddTrigger( - model_name="parkreview", - trigger=pgtrigger.compiler.Trigger( - name="insert_insert", - sql=pgtrigger.compiler.UpsertTriggerSql( - func='INSERT INTO "parks_parkreviewevent" ("content", "created_at", "id", "is_published", "moderated_at", "moderated_by_id", "moderation_notes", "park_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "rating", "title", "updated_at", "user_id", "visit_date") VALUES (NEW."content", NEW."created_at", NEW."id", NEW."is_published", NEW."moderated_at", NEW."moderated_by_id", NEW."moderation_notes", NEW."park_id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."rating", NEW."title", NEW."updated_at", NEW."user_id", NEW."visit_date"); RETURN NULL;', - hash="[AWS-SECRET-REMOVED]", - operation="INSERT", - pgid="pgtrigger_insert_insert_a99bc", - table="parks_parkreview", - when="AFTER", - ), - ), - ), - pgtrigger.migrations.AddTrigger( - model_name="parkreview", - trigger=pgtrigger.compiler.Trigger( - name="update_update", - sql=pgtrigger.compiler.UpsertTriggerSql( - condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)", - func='INSERT INTO "parks_parkreviewevent" ("content", "created_at", "id", "is_published", "moderated_at", "moderated_by_id", "moderation_notes", "park_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "rating", "title", "updated_at", "user_id", "visit_date") VALUES (NEW."content", NEW."created_at", NEW."id", NEW."is_published", NEW."moderated_at", NEW."moderated_by_id", NEW."moderation_notes", NEW."park_id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."rating", NEW."title", NEW."updated_at", NEW."user_id", NEW."visit_date"); RETURN NULL;', - hash="[AWS-SECRET-REMOVED]", - operation="UPDATE", - pgid="pgtrigger_update_update_0e40d", - table="parks_parkreview", - when="AFTER", - ), - ), - ), - ] diff --git a/parks/migrations/0002_alter_parkarea_unique_together.py b/parks/migrations/0002_alter_parkarea_unique_together.py deleted file mode 100644 index ca7cfacb..00000000 --- a/parks/migrations/0002_alter_parkarea_unique_together.py +++ /dev/null @@ -1,17 +0,0 @@ -# Generated by Django 5.2.5 on 2025-08-15 22:05 - -from django.db import migrations - - -class Migration(migrations.Migration): - - dependencies = [ - ("parks", "0001_initial"), - ] - - operations = [ - migrations.AlterUniqueTogether( - name="parkarea", - unique_together={("park", "slug")}, - ), - ] diff --git a/parks/migrations/0003_add_business_constraints.py b/parks/migrations/0003_add_business_constraints.py deleted file mode 100644 index 60f3dd47..00000000 --- a/parks/migrations/0003_add_business_constraints.py +++ /dev/null @@ -1,129 +0,0 @@ -# Generated by Django 5.2.5 on 2025-08-16 17:42 - -import django.db.models.functions.datetime -from django.conf import settings -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("parks", "0002_alter_parkarea_unique_together"), - migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ] - - operations = [ - migrations.AddConstraint( - model_name="park", - constraint=models.CheckConstraint( - condition=models.Q( - ("closing_date__isnull", True), - ("opening_date__isnull", True), - ("closing_date__gte", models.F("opening_date")), - _connector="OR", - ), - name="park_closing_after_opening", - violation_error_message="Closing date must be after opening date", - ), - ), - migrations.AddConstraint( - model_name="park", - constraint=models.CheckConstraint( - condition=models.Q( - ("size_acres__isnull", True), - ("size_acres__gt", 0), - _connector="OR", - ), - name="park_size_positive", - violation_error_message="Park size must be positive", - ), - ), - migrations.AddConstraint( - model_name="park", - constraint=models.CheckConstraint( - condition=models.Q( - ("average_rating__isnull", True), - models.Q(("average_rating__gte", 1), ("average_rating__lte", 10)), - _connector="OR", - ), - name="park_rating_range", - violation_error_message="Average rating must be between 1 and 10", - ), - ), - migrations.AddConstraint( - model_name="park", - constraint=models.CheckConstraint( - condition=models.Q( - ("ride_count__isnull", True), - ("ride_count__gte", 0), - _connector="OR", - ), - name="park_ride_count_non_negative", - violation_error_message="Ride count must be non-negative", - ), - ), - migrations.AddConstraint( - model_name="park", - constraint=models.CheckConstraint( - condition=models.Q( - ("coaster_count__isnull", True), - ("coaster_count__gte", 0), - _connector="OR", - ), - name="park_coaster_count_non_negative", - violation_error_message="Coaster count must be non-negative", - ), - ), - migrations.AddConstraint( - model_name="park", - constraint=models.CheckConstraint( - condition=models.Q( - ("coaster_count__isnull", True), - ("ride_count__isnull", True), - ("coaster_count__lte", models.F("ride_count")), - _connector="OR", - ), - name="park_coaster_count_lte_ride_count", - violation_error_message="Coaster count cannot exceed total ride count", - ), - ), - migrations.AddConstraint( - model_name="parkreview", - constraint=models.CheckConstraint( - condition=models.Q(("rating__gte", 1), ("rating__lte", 10)), - name="park_review_rating_range", - violation_error_message="Rating must be between 1 and 10", - ), - ), - migrations.AddConstraint( - model_name="parkreview", - constraint=models.CheckConstraint( - condition=models.Q( - ( - "visit_date__lte", - django.db.models.functions.datetime.Now(), - ) - ), - name="park_review_visit_date_not_future", - violation_error_message="Visit date cannot be in the future", - ), - ), - migrations.AddConstraint( - model_name="parkreview", - constraint=models.CheckConstraint( - condition=models.Q( - models.Q( - ("moderated_at__isnull", True), - ("moderated_by__isnull", True), - ), - models.Q( - ("moderated_at__isnull", False), - ("moderated_by__isnull", False), - ), - _connector="OR", - ), - name="park_review_moderation_consistency", - violation_error_message="Moderated reviews must have both moderator and moderation timestamp", - ), - ), - ] diff --git a/parks/migrations/0004_fix_pghistory_triggers.py b/parks/migrations/0004_fix_pghistory_triggers.py deleted file mode 100644 index 42178a88..00000000 --- a/parks/migrations/0004_fix_pghistory_triggers.py +++ /dev/null @@ -1,110 +0,0 @@ -# Generated by Django 5.2.5 on 2025-08-16 17:46 - -import django.contrib.postgres.fields -import django.db.models.deletion -import pgtrigger.compiler -import pgtrigger.migrations -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("parks", "0003_add_business_constraints"), - ("pghistory", "0007_auto_20250421_0444"), - ] - - operations = [ - migrations.CreateModel( - name="CompanyEvent", - fields=[ - ( - "pgh_id", - models.AutoField(primary_key=True, serialize=False), - ), - ("pgh_created_at", models.DateTimeField(auto_now_add=True)), - ("pgh_label", models.TextField(help_text="The event label.")), - ("id", models.BigIntegerField()), - ("created_at", models.DateTimeField(auto_now_add=True)), - ("updated_at", models.DateTimeField(auto_now=True)), - ("name", models.CharField(max_length=255)), - ("slug", models.SlugField(db_index=False, max_length=255)), - ( - "roles", - django.contrib.postgres.fields.ArrayField( - base_field=models.CharField( - choices=[ - ("OPERATOR", "Park Operator"), - ("PROPERTY_OWNER", "Property Owner"), - ], - max_length=20, - ), - blank=True, - default=list, - size=None, - ), - ), - ("description", models.TextField(blank=True)), - ("website", models.URLField(blank=True)), - ( - "founded_year", - models.PositiveIntegerField(blank=True, null=True), - ), - ("parks_count", models.IntegerField(default=0)), - ("rides_count", models.IntegerField(default=0)), - ], - options={ - "abstract": False, - }, - ), - pgtrigger.migrations.AddTrigger( - model_name="company", - trigger=pgtrigger.compiler.Trigger( - name="insert_insert", - sql=pgtrigger.compiler.UpsertTriggerSql( - func='INSERT INTO "parks_companyevent" ("created_at", "description", "founded_year", "id", "name", "parks_count", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "rides_count", "roles", "slug", "updated_at", "website") VALUES (NEW."created_at", NEW."description", NEW."founded_year", NEW."id", NEW."name", NEW."parks_count", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."rides_count", NEW."roles", NEW."slug", NEW."updated_at", NEW."website"); RETURN NULL;', - hash="[AWS-SECRET-REMOVED]", - operation="INSERT", - pgid="pgtrigger_insert_insert_35b57", - table="parks_company", - when="AFTER", - ), - ), - ), - pgtrigger.migrations.AddTrigger( - model_name="company", - trigger=pgtrigger.compiler.Trigger( - name="update_update", - sql=pgtrigger.compiler.UpsertTriggerSql( - condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)", - func='INSERT INTO "parks_companyevent" ("created_at", "description", "founded_year", "id", "name", "parks_count", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "rides_count", "roles", "slug", "updated_at", "website") VALUES (NEW."created_at", NEW."description", NEW."founded_year", NEW."id", NEW."name", NEW."parks_count", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."rides_count", NEW."roles", NEW."slug", NEW."updated_at", NEW."website"); RETURN NULL;', - hash="[AWS-SECRET-REMOVED]", - operation="UPDATE", - pgid="pgtrigger_update_update_d3286", - table="parks_company", - when="AFTER", - ), - ), - ), - migrations.AddField( - model_name="companyevent", - name="pgh_context", - field=models.ForeignKey( - db_constraint=False, - null=True, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="+", - to="pghistory.context", - ), - ), - migrations.AddField( - model_name="companyevent", - name="pgh_obj", - field=models.ForeignKey( - db_constraint=False, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="events", - to="parks.company", - ), - ), - ] diff --git a/parks/migrations/0005_merge_20250820_2020.py b/parks/migrations/0005_merge_20250820_2020.py deleted file mode 100644 index 54b92f6f..00000000 --- a/parks/migrations/0005_merge_20250820_2020.py +++ /dev/null @@ -1,13 +0,0 @@ -# Generated by Django 5.2.5 on 2025-08-21 00:20 - -from django.db import migrations - - -class Migration(migrations.Migration): - - dependencies = [ - ("parks", "0001_add_filter_indexes"), - ("parks", "0004_fix_pghistory_triggers"), - ] - - operations = [] diff --git a/parks/migrations/__init__.py b/parks/migrations/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/parks/models/__init__.py b/parks/models/__init__.py deleted file mode 100644 index c6d9d189..00000000 --- a/parks/models/__init__.py +++ /dev/null @@ -1,31 +0,0 @@ -""" -Parks app models with clean import interface. - -This module provides a clean import interface for all parks-related models, -enabling imports like: from parks.models import Park, Operator - -The Company model is aliased as Operator to clarify its role as park operators, -while maintaining backward compatibility through the Company alias. -""" - -from .parks import Park -from .areas import ParkArea -from .location import ParkLocation -from .reviews import ParkReview -from .companies import Company, CompanyHeadquarters - -# Alias Company as Operator for clarity -Operator = Company - -__all__ = [ - # Primary models - "Park", - "ParkArea", - "ParkLocation", - "ParkReview", - # Company models with clear naming - "Operator", - "CompanyHeadquarters", - # Backward compatibility - "Company", # Alias to Operator -] diff --git a/parks/models/areas.py b/parks/models/areas.py deleted file mode 100644 index 1f3db037..00000000 --- a/parks/models/areas.py +++ /dev/null @@ -1,33 +0,0 @@ -from django.db import models -from django.utils.text import slugify -import pghistory - -from core.history import TrackedModel -from .parks import Park - - -@pghistory.track() -class ParkArea(TrackedModel): - - # Import managers - from ..managers import ParkAreaManager - - objects = ParkAreaManager() - id: int # Type hint for Django's automatic id field - park = models.ForeignKey(Park, on_delete=models.CASCADE, related_name="areas") - name = models.CharField(max_length=255) - slug = models.SlugField(max_length=255) - description = models.TextField(blank=True) - opening_date = models.DateField(null=True, blank=True) - closing_date = models.DateField(null=True, blank=True) - - def save(self, *args, **kwargs): - if not self.slug: - self.slug = slugify(self.name) - super().save(*args, **kwargs) - - def __str__(self): - return self.name - - class Meta: - unique_together = ("park", "slug") diff --git a/parks/models/companies.py b/parks/models/companies.py deleted file mode 100644 index cf7fff6b..00000000 --- a/parks/models/companies.py +++ /dev/null @@ -1,135 +0,0 @@ -from django.contrib.postgres.fields import ArrayField -from django.db import models -from django.utils.text import slugify -from core.models import TrackedModel -import pghistory - - -@pghistory.track() -class Company(TrackedModel): - - # Import managers - from ..managers import CompanyManager - - objects = CompanyManager() - - class CompanyRole(models.TextChoices): - OPERATOR = "OPERATOR", "Park Operator" - PROPERTY_OWNER = "PROPERTY_OWNER", "Property Owner" - - name = models.CharField(max_length=255) - slug = models.SlugField(max_length=255, unique=True) - roles = ArrayField( - models.CharField(max_length=20, choices=CompanyRole.choices), - default=list, - blank=True, - ) - description = models.TextField(blank=True) - website = models.URLField(blank=True) - - # Operator-specific fields - founded_year = models.PositiveIntegerField(blank=True, null=True) - parks_count = models.IntegerField(default=0) - rides_count = models.IntegerField(default=0) - - def save(self, *args, **kwargs): - if not self.slug: - self.slug = slugify(self.name) - super().save(*args, **kwargs) - - def __str__(self): - return self.name - - class Meta: - ordering = ["name"] - verbose_name_plural = "Companies" - - -class CompanyHeadquarters(models.Model): - """ - Simple address storage for company headquarters without coordinate tracking. - Focus on human-readable location information for display purposes. - """ - - # Relationships - company = models.OneToOneField( - "Company", on_delete=models.CASCADE, related_name="headquarters" - ) - - # Address Fields (No coordinates needed) - street_address = models.CharField( - max_length=255, - blank=True, - help_text="Mailing address if publicly available", - ) - city = models.CharField( - max_length=100, db_index=True, help_text="Headquarters city" - ) - state_province = models.CharField( - max_length=100, - blank=True, - db_index=True, - help_text="State/Province/Region", - ) - country = models.CharField( - max_length=100, - default="USA", - db_index=True, - help_text="Country where headquarters is located", - ) - postal_code = models.CharField( - max_length=20, blank=True, help_text="ZIP or postal code" - ) - - # Optional mailing address if different or more complete - mailing_address = models.TextField( - blank=True, - help_text="Complete mailing address if different from basic address", - ) - - # Metadata - created_at = models.DateTimeField(auto_now_add=True) - updated_at = models.DateTimeField(auto_now=True) - - @property - def formatted_location(self): - """Returns a formatted address string for display.""" - components = [] - if self.street_address: - components.append(self.street_address) - if self.city: - components.append(self.city) - if self.state_province: - components.append(self.state_province) - if self.postal_code: - components.append(self.postal_code) - if self.country and self.country != "USA": - components.append(self.country) - return ( - ", ".join(components) - if components - else f"{ - self.city}, { - self.country}" - ) - - @property - def location_display(self): - """Simple city, state/country display for compact views.""" - parts = [self.city] - if self.state_province: - parts.append(self.state_province) - elif self.country != "USA": - parts.append(self.country) - return ", ".join(parts) if parts else "Unknown Location" - - def __str__(self): - return f"{self.company.name} Headquarters - {self.location_display}" - - class Meta: - verbose_name = "Company Headquarters" - verbose_name_plural = "Company Headquarters" - ordering = ["company__name"] - indexes = [ - models.Index(fields=["city", "country"]), - ] diff --git a/parks/models/location.py b/parks/models/location.py deleted file mode 100644 index 973c42c3..00000000 --- a/parks/models/location.py +++ /dev/null @@ -1,112 +0,0 @@ -from django.contrib.gis.db import models -from django.contrib.gis.geos import Point - - -class ParkLocation(models.Model): - """ - Represents the geographic location and address of a park, with PostGIS support. - """ - - park = models.OneToOneField( - "parks.Park", on_delete=models.CASCADE, related_name="location" - ) - - # Spatial Data - point = models.PointField( - srid=4326, - null=True, - blank=True, - help_text="Geographic coordinates (longitude, latitude)", - ) - - # Address Fields - street_address = models.CharField(max_length=255, blank=True) - city = models.CharField(max_length=100, db_index=True) - state = models.CharField(max_length=100, db_index=True) - country = models.CharField(max_length=100, default="USA") - postal_code = models.CharField(max_length=20, blank=True) - - # Road Trip Metadata - highway_exit = models.CharField(max_length=100, blank=True) - parking_notes = models.TextField(blank=True) - best_arrival_time = models.TimeField(null=True, blank=True) - seasonal_notes = models.TextField(blank=True) - - # OSM Integration - osm_id = models.BigIntegerField(null=True, blank=True) - osm_type = models.CharField( - max_length=10, - blank=True, - help_text="Type of OpenStreetMap object (node, way, or relation)", - ) - - @property - def latitude(self): - """Return latitude from point field.""" - if self.point: - return self.point.y - return None - - @property - def longitude(self): - """Return longitude from point field.""" - if self.point: - return self.point.x - return None - - @property - def coordinates(self): - """Return (latitude, longitude) tuple.""" - if self.point: - return (self.latitude, self.longitude) - return (None, None) - - @property - def formatted_address(self): - """Return a nicely formatted address string.""" - address_parts = [ - self.street_address, - self.city, - self.state, - self.postal_code, - self.country, - ] - return ", ".join(part for part in address_parts if part) - - def set_coordinates(self, latitude, longitude): - """ - Set the location's point from latitude and longitude coordinates. - Validates coordinate ranges. - """ - if latitude is None or longitude is None: - self.point = None - return - - if not -90 <= latitude <= 90: - raise ValueError("Latitude must be between -90 and 90.") - if not -180 <= longitude <= 180: - raise ValueError("Longitude must be between -180 and 180.") - - self.point = Point(longitude, latitude, srid=4326) - - def distance_to(self, other_location): - """ - Calculate the distance to another ParkLocation instance. - Returns distance in kilometers. - """ - if not self.point or not other_location.point: - return None - # Use geodetic distance calculation which returns meters, convert to km - distance_m = self.point.distance(other_location.point) - return distance_m / 1000.0 - - def __str__(self): - return f"Location for {self.park.name}" - - class Meta: - verbose_name = "Park Location" - verbose_name_plural = "Park Locations" - ordering = ["park__name"] - indexes = [ - models.Index(fields=["city", "state"]), - ] diff --git a/parks/models/parks.py b/parks/models/parks.py deleted file mode 100644 index 321ee01c..00000000 --- a/parks/models/parks.py +++ /dev/null @@ -1,267 +0,0 @@ -from django.db import models -from django.urls import reverse -from django.utils.text import slugify -from django.contrib.contenttypes.fields import GenericRelation -from django.core.exceptions import ValidationError -from typing import Tuple, Optional, Any, TYPE_CHECKING -import pghistory -from media.models import Photo -from core.history import TrackedModel - -if TYPE_CHECKING: - from rides.models import Ride - from . import ParkArea - - -@pghistory.track() -class Park(TrackedModel): - - # Import managers - from ..managers import ParkManager - - objects = ParkManager() - id: int # Type hint for Django's automatic id field - STATUS_CHOICES = [ - ("OPERATING", "Operating"), - ("CLOSED_TEMP", "Temporarily Closed"), - ("CLOSED_PERM", "Permanently Closed"), - ("UNDER_CONSTRUCTION", "Under Construction"), - ("DEMOLISHED", "Demolished"), - ("RELOCATED", "Relocated"), - ] - - name = models.CharField(max_length=255) - slug = models.SlugField(max_length=255, unique=True) - description = models.TextField(blank=True) - status = models.CharField( - max_length=20, choices=STATUS_CHOICES, default="OPERATING" - ) - - # Location relationship - reverse relation from ParkLocation - # location will be available via the 'location' related_name on - # ParkLocation - - # Details - opening_date = models.DateField(null=True, blank=True) - closing_date = models.DateField(null=True, blank=True) - operating_season = models.CharField(max_length=255, blank=True) - size_acres = models.DecimalField( - max_digits=10, decimal_places=2, null=True, blank=True - ) - website = models.URLField(blank=True) - - # Statistics - average_rating = models.DecimalField( - max_digits=3, decimal_places=2, null=True, blank=True - ) - ride_count = models.IntegerField(null=True, blank=True) - coaster_count = models.IntegerField(null=True, blank=True) - - # Relationships - operator = models.ForeignKey( - "Company", - on_delete=models.PROTECT, - related_name="operated_parks", - help_text="Company that operates this park", - limit_choices_to={"roles__contains": ["OPERATOR"]}, - ) - property_owner = models.ForeignKey( - "Company", - on_delete=models.PROTECT, - related_name="owned_parks", - null=True, - blank=True, - help_text="Company that owns the property (if different from operator)", - limit_choices_to={"roles__contains": ["PROPERTY_OWNER"]}, - ) - photos = GenericRelation(Photo, related_query_name="park") - areas: models.Manager["ParkArea"] # Type hint for reverse relation - # Type hint for reverse relation from rides app - rides: models.Manager["Ride"] - - # Metadata - created_at = models.DateTimeField(auto_now_add=True, null=True) - updated_at = models.DateTimeField(auto_now=True) - - class Meta: - ordering = ["name"] - constraints = [ - # Business rule: Closing date must be after opening date - models.CheckConstraint( - name="park_closing_after_opening", - check=models.Q(closing_date__isnull=True) - | models.Q(opening_date__isnull=True) - | models.Q(closing_date__gte=models.F("opening_date")), - violation_error_message="Closing date must be after opening date", - ), - # Business rule: Size must be positive - models.CheckConstraint( - name="park_size_positive", - check=models.Q(size_acres__isnull=True) | models.Q(size_acres__gt=0), - violation_error_message="Park size must be positive", - ), - # Business rule: Rating must be between 1 and 10 - models.CheckConstraint( - name="park_rating_range", - check=models.Q(average_rating__isnull=True) - | (models.Q(average_rating__gte=1) & models.Q(average_rating__lte=10)), - violation_error_message="Average rating must be between 1 and 10", - ), - # Business rule: Counts must be non-negative - models.CheckConstraint( - name="park_ride_count_non_negative", - check=models.Q(ride_count__isnull=True) | models.Q(ride_count__gte=0), - violation_error_message="Ride count must be non-negative", - ), - models.CheckConstraint( - name="park_coaster_count_non_negative", - check=models.Q(coaster_count__isnull=True) - | models.Q(coaster_count__gte=0), - violation_error_message="Coaster count must be non-negative", - ), - # Business rule: Coaster count cannot exceed ride count - models.CheckConstraint( - name="park_coaster_count_lte_ride_count", - check=models.Q(coaster_count__isnull=True) - | models.Q(ride_count__isnull=True) - | models.Q(coaster_count__lte=models.F("ride_count")), - violation_error_message="Coaster count cannot exceed total ride count", - ), - ] - - def __str__(self) -> str: - return self.name - - def save(self, *args: Any, **kwargs: Any) -> None: - from django.contrib.contenttypes.models import ContentType - from core.history import HistoricalSlug - - # Get old instance if it exists - if self.pk: - try: - old_instance = type(self).objects.get(pk=self.pk) - old_name = old_instance.name - old_slug = old_instance.slug - except type(self).DoesNotExist: - old_name = None - old_slug = None - else: - old_name = None - old_slug = None - - # Generate new slug if name has changed or slug is missing - if not self.slug or (old_name and old_name != self.name): - self.slug = slugify(self.name) - - # Save the model - super().save(*args, **kwargs) - - # If slug has changed, save historical record - if old_slug and old_slug != self.slug: - HistoricalSlug.objects.create( - content_type=ContentType.objects.get_for_model(self), - object_id=self.pk, - slug=old_slug, - ) - - def clean(self): - super().clean() - if self.operator and "OPERATOR" not in self.operator.roles: - raise ValidationError({"operator": "Company must have the OPERATOR role."}) - if self.property_owner and "PROPERTY_OWNER" not in self.property_owner.roles: - raise ValidationError( - {"property_owner": "Company must have the PROPERTY_OWNER role."} - ) - - def get_absolute_url(self) -> str: - return reverse("parks:park_detail", kwargs={"slug": self.slug}) - - def get_status_color(self) -> str: - """Get Tailwind color classes for park status""" - status_colors = { - "OPERATING": "bg-green-100 text-green-800", - "CLOSED_TEMP": "bg-yellow-100 text-yellow-800", - "CLOSED_PERM": "bg-red-100 text-red-800", - "UNDER_CONSTRUCTION": "bg-blue-100 text-blue-800", - "DEMOLISHED": "bg-gray-100 text-gray-800", - "RELOCATED": "bg-purple-100 text-purple-800", - } - return status_colors.get(self.status, "bg-gray-100 text-gray-500") - - @property - def formatted_location(self) -> str: - """Get formatted address from ParkLocation if it exists""" - if hasattr(self, "location") and self.location: - return self.location.formatted_address - return "" - - @property - def coordinates(self) -> Optional[Tuple[float, float]]: - """Returns coordinates as a tuple (latitude, longitude)""" - if hasattr(self, "location") and self.location: - return self.location.coordinates - return None - - @classmethod - def get_by_slug(cls, slug: str) -> Tuple["Park", bool]: - """Get park by current or historical slug""" - from django.contrib.contenttypes.models import ContentType - from core.history import HistoricalSlug - - print(f"\nLooking up slug: {slug}") - - try: - park = cls.objects.get(slug=slug) - print(f"Found current park with slug: {slug}") - return park, False - except cls.DoesNotExist: - print(f"No current park found with slug: {slug}") - - # Try historical slugs in HistoricalSlug model - content_type = ContentType.objects.get_for_model(cls) - print(f"Searching HistoricalSlug with content_type: {content_type}") - historical = ( - HistoricalSlug.objects.filter(content_type=content_type, slug=slug) - .order_by("-created_at") - .first() - ) - - if historical: - print( - f"Found historical slug record for object_id: { - historical.object_id}" - ) - try: - park = cls.objects.get(pk=historical.object_id) - print(f"Found park from historical slug: {park.name}") - return park, True - except cls.DoesNotExist: - print("Park not found for historical slug record") - else: - print("No historical slug record found") - - # Try pghistory events - print("Searching pghistory events") - event_model = getattr(cls, "event_model", None) - if event_model: - historical_event = ( - event_model.objects.filter(slug=slug) - .order_by("-pgh_created_at") - .first() - ) - - if historical_event: - print( - f"Found pghistory event for pgh_obj_id: { - historical_event.pgh_obj_id}" - ) - try: - park = cls.objects.get(pk=historical_event.pgh_obj_id) - print(f"Found park from pghistory: {park.name}") - return park, True - except cls.DoesNotExist: - print("Park not found for pghistory event") - else: - print("No pghistory event found") - - raise cls.DoesNotExist("No park found with this slug") diff --git a/parks/models/reviews.py b/parks/models/reviews.py deleted file mode 100644 index 1e9fece8..00000000 --- a/parks/models/reviews.py +++ /dev/null @@ -1,79 +0,0 @@ -from django.db import models -from django.db.models import functions -from django.core.validators import MinValueValidator, MaxValueValidator -from core.history import TrackedModel -import pghistory - - -@pghistory.track() -class ParkReview(TrackedModel): - - # Import managers - from ..managers import ParkReviewManager - - objects = ParkReviewManager() - """ - A review of a park. - """ - park = models.ForeignKey( - "parks.Park", on_delete=models.CASCADE, related_name="reviews" - ) - user = models.ForeignKey( - "accounts.User", on_delete=models.CASCADE, related_name="park_reviews" - ) - rating = models.PositiveSmallIntegerField( - validators=[MinValueValidator(1), MaxValueValidator(10)] - ) - title = models.CharField(max_length=200) - content = models.TextField() - visit_date = models.DateField() - - # Metadata - created_at = models.DateTimeField(auto_now_add=True) - updated_at = models.DateTimeField(auto_now=True) - - # Moderation - is_published = models.BooleanField(default=True) - moderation_notes = models.TextField(blank=True) - moderated_by = models.ForeignKey( - "accounts.User", - on_delete=models.SET_NULL, - null=True, - blank=True, - related_name="moderated_park_reviews", - ) - moderated_at = models.DateTimeField(null=True, blank=True) - - class Meta: - ordering = ["-created_at"] - unique_together = ["park", "user"] - constraints = [ - # Business rule: Rating must be between 1 and 10 (database level - # enforcement) - models.CheckConstraint( - name="park_review_rating_range", - check=models.Q(rating__gte=1) & models.Q(rating__lte=10), - violation_error_message="Rating must be between 1 and 10", - ), - # Business rule: Visit date cannot be in the future - models.CheckConstraint( - name="park_review_visit_date_not_future", - check=models.Q(visit_date__lte=functions.Now()), - violation_error_message="Visit date cannot be in the future", - ), - # Business rule: If moderated, must have moderator and timestamp - models.CheckConstraint( - name="park_review_moderation_consistency", - check=models.Q(moderated_by__isnull=True, moderated_at__isnull=True) - | models.Q( - moderated_by__isnull=False, moderated_at__isnull=False - ), - violation_error_message=( - "Moderated reviews must have both moderator and moderation " - "timestamp" - ), - ), - ] - - def __str__(self): - return f"Review of {self.park.name} by {self.user.username}" diff --git a/parks/querysets.py b/parks/querysets.py deleted file mode 100644 index 86d99c5c..00000000 --- a/parks/querysets.py +++ /dev/null @@ -1,17 +0,0 @@ -from django.db.models import QuerySet, Count, Q -from .models import Park - - -def get_base_park_queryset() -> QuerySet[Park]: - """Get base queryset with all needed annotations and prefetches""" - return ( - Park.objects.select_related("operator", "property_owner", "location") - .prefetch_related("photos", "rides") - .annotate( - current_ride_count=Count("rides", distinct=True), - current_coaster_count=Count( - "rides", filter=Q(rides__category="RC"), distinct=True - ), - ) - .order_by("name") - ) diff --git a/parks/selectors.py b/parks/selectors.py deleted file mode 100644 index 67d8ca81..00000000 --- a/parks/selectors.py +++ /dev/null @@ -1,248 +0,0 @@ -""" -Selectors for park-related data retrieval. -Following Django styleguide pattern for separating data access from business logic. -""" - -from typing import Optional, Dict, Any -from django.db.models import QuerySet, Q, Count, Avg, Prefetch -from django.contrib.gis.geos import Point -from django.contrib.gis.measure import Distance - -from .models import Park, ParkArea, ParkReview -from rides.models import Ride - - -def park_list_with_stats(*, filters: Optional[Dict[str, Any]] = None) -> QuerySet[Park]: - """ - Get parks optimized for list display with basic stats. - - Args: - filters: Optional dictionary of filter parameters - - Returns: - QuerySet of parks with optimized queries - """ - queryset = ( - Park.objects.select_related("operator", "property_owner") - .prefetch_related("location") - .annotate( - ride_count_calculated=Count("rides", distinct=True), - coaster_count_calculated=Count( - "rides", - filter=Q(rides__category__in=["RC", "WC"]), - distinct=True, - ), - average_rating_calculated=Avg("reviews__rating"), - ) - ) - - if filters: - if "status" in filters: - queryset = queryset.filter(status=filters["status"]) - if "operator" in filters: - queryset = queryset.filter(operator=filters["operator"]) - if "country" in filters: - queryset = queryset.filter(location__country=filters["country"]) - if "search" in filters: - search_term = filters["search"] - queryset = queryset.filter( - Q(name__icontains=search_term) | Q(description__icontains=search_term) - ) - - return queryset.order_by("name") - - -def park_detail_optimized(*, slug: str) -> Park: - """ - Get a single park with all related data optimized for detail view. - - Args: - slug: Park slug identifier - - Returns: - Park instance with optimized prefetches - - Raises: - Park.DoesNotExist: If park with slug doesn't exist - """ - return ( - Park.objects.select_related("operator", "property_owner") - .prefetch_related( - "location", - "areas", - Prefetch( - "rides", - queryset=Ride.objects.select_related( - "manufacturer", "designer", "ride_model" - ), - ), - Prefetch( - "reviews", - queryset=ParkReview.objects.select_related("user").filter( - is_published=True - ), - ), - "photos", - ) - .get(slug=slug) - ) - - -def parks_near_location( - *, point: Point, distance_km: float = 50, limit: int = 10 -) -> QuerySet[Park]: - """ - Get parks near a specific geographic location. - - Args: - point: Geographic point (longitude, latitude) - distance_km: Maximum distance in kilometers - limit: Maximum number of results - - Returns: - QuerySet of nearby parks ordered by distance - """ - return ( - Park.objects.filter( - location__coordinates__distance_lte=( - point, - Distance(km=distance_km), - ) - ) - .select_related("operator") - .prefetch_related("location") - .distance(point) - .order_by("distance")[:limit] - ) - - -def park_statistics() -> Dict[str, Any]: - """ - Get overall park statistics for dashboard/analytics. - - Returns: - Dictionary containing park statistics - """ - total_parks = Park.objects.count() - operating_parks = Park.objects.filter(status="OPERATING").count() - total_rides = Ride.objects.count() - total_coasters = Ride.objects.filter(category__in=["RC", "WC"]).count() - - return { - "total_parks": total_parks, - "operating_parks": operating_parks, - "closed_parks": total_parks - operating_parks, - "total_rides": total_rides, - "total_coasters": total_coasters, - "average_rides_per_park": (total_rides / total_parks if total_parks > 0 else 0), - } - - -def parks_by_operator(*, operator_id: int) -> QuerySet[Park]: - """ - Get all parks operated by a specific company. - - Args: - operator_id: Company ID of the operator - - Returns: - QuerySet of parks operated by the company - """ - return ( - Park.objects.filter(operator_id=operator_id) - .select_related("operator") - .prefetch_related("location") - .annotate(ride_count_calculated=Count("rides")) - .order_by("name") - ) - - -def parks_with_recent_reviews(*, days: int = 30) -> QuerySet[Park]: - """ - Get parks that have received reviews in the last N days. - - Args: - days: Number of days to look back for reviews - - Returns: - QuerySet of parks with recent reviews - """ - from django.utils import timezone - from datetime import timedelta - - cutoff_date = timezone.now() - timedelta(days=days) - - return ( - Park.objects.filter( - reviews__created_at__gte=cutoff_date, reviews__is_published=True - ) - .select_related("operator") - .prefetch_related("location") - .annotate( - recent_review_count=Count( - "reviews", filter=Q(reviews__created_at__gte=cutoff_date) - ) - ) - .order_by("-recent_review_count") - .distinct() - ) - - -def park_search_autocomplete(*, query: str, limit: int = 10) -> QuerySet[Park]: - """ - Get parks matching a search query for autocomplete functionality. - - Args: - query: Search string - limit: Maximum number of results - - Returns: - QuerySet of matching parks for autocomplete - """ - return ( - Park.objects.filter( - Q(name__icontains=query) - | Q(location__city__icontains=query) - | Q(location__region__icontains=query) - ) - .select_related("operator") - .prefetch_related("location") - .order_by("name")[:limit] - ) - - -def park_areas_for_park(*, park_slug: str) -> QuerySet[ParkArea]: - """ - Get all areas for a specific park. - - Args: - park_slug: Slug of the park - - Returns: - QuerySet of park areas with related data - """ - return ( - ParkArea.objects.filter(park__slug=park_slug) - .select_related("park") - .prefetch_related("rides") - .annotate(ride_count=Count("rides")) - .order_by("name") - ) - - -def park_reviews_for_park(*, park_id: int, limit: int = 20) -> QuerySet[ParkReview]: - """ - Get reviews for a specific park. - - Args: - park_id: Park ID - limit: Maximum number of reviews to return - - Returns: - QuerySet of park reviews - """ - return ( - ParkReview.objects.filter(park_id=park_id, is_published=True) - .select_related("user", "park") - .order_by("-created_at")[:limit] - ) diff --git a/parks/services.py b/parks/services.py deleted file mode 100644 index 6317f5cf..00000000 --- a/parks/services.py +++ /dev/null @@ -1,323 +0,0 @@ -""" -Services for park-related business logic. -Following Django styleguide pattern for business logic encapsulation. -""" - -from typing import Optional, Dict, Any -from django.db import transaction -from django.db.models import Q -from django.contrib.auth import get_user_model -from django.contrib.auth.models import AbstractBaseUser - -from .models import Park, ParkArea -from location.models import Location - -# Use AbstractBaseUser for type hinting -UserType = AbstractBaseUser -User = get_user_model() - - -class ParkService: - """Service for managing park operations.""" - - @staticmethod - def create_park( - *, - name: str, - description: str = "", - status: str = "OPERATING", - operator_id: Optional[int] = None, - property_owner_id: Optional[int] = None, - opening_date: Optional[str] = None, - closing_date: Optional[str] = None, - operating_season: str = "", - size_acres: Optional[float] = None, - website: str = "", - location_data: Optional[Dict[str, Any]] = None, - created_by: Optional[UserType] = None, - ) -> Park: - """ - Create a new park with validation and location handling. - - Args: - name: Park name - description: Park description - status: Operating status - operator_id: ID of operating company - property_owner_id: ID of property owner company - opening_date: Opening date - closing_date: Closing date - operating_season: Operating season description - size_acres: Park size in acres - website: Park website URL - location_data: Dictionary containing location information - created_by: User creating the park - - Returns: - Created Park instance - - Raises: - ValidationError: If park data is invalid - """ - with transaction.atomic(): - # Create park instance - park = Park( - name=name, - description=description, - status=status, - opening_date=opening_date, - closing_date=closing_date, - operating_season=operating_season, - size_acres=size_acres, - website=website, - ) - - # Set foreign key relationships if provided - if operator_id: - from .models import Company - - park.operator = Company.objects.get(id=operator_id) - - if property_owner_id: - from .models import Company - - park.property_owner = Company.objects.get(id=property_owner_id) - - # CRITICAL STYLEGUIDE FIX: Call full_clean before save - park.full_clean() - park.save() - - # Handle location if provided - if location_data: - LocationService.create_park_location(park=park, **location_data) - - return park - - @staticmethod - def update_park( - *, - park_id: int, - updates: Dict[str, Any], - updated_by: Optional[UserType] = None, - ) -> Park: - """ - Update an existing park with validation. - - Args: - park_id: ID of park to update - updates: Dictionary of field updates - updated_by: User performing the update - - Returns: - Updated Park instance - - Raises: - Park.DoesNotExist: If park doesn't exist - ValidationError: If update data is invalid - """ - with transaction.atomic(): - park = Park.objects.select_for_update().get(id=park_id) - - # Apply updates - for field, value in updates.items(): - if hasattr(park, field): - setattr(park, field, value) - - # CRITICAL STYLEGUIDE FIX: Call full_clean before save - park.full_clean() - park.save() - - return park - - @staticmethod - def delete_park(*, park_id: int, deleted_by: Optional[UserType] = None) -> bool: - """ - Soft delete a park by setting status to DEMOLISHED. - - Args: - park_id: ID of park to delete - deleted_by: User performing the deletion - - Returns: - True if successfully deleted - - Raises: - Park.DoesNotExist: If park doesn't exist - """ - with transaction.atomic(): - park = Park.objects.select_for_update().get(id=park_id) - park.status = "DEMOLISHED" - - # CRITICAL STYLEGUIDE FIX: Call full_clean before save - park.full_clean() - park.save() - - return True - - @staticmethod - def create_park_area( - *, - park_id: int, - name: str, - description: str = "", - created_by: Optional[UserType] = None, - ) -> ParkArea: - """ - Create a new area within a park. - - Args: - park_id: ID of the parent park - name: Area name - description: Area description - created_by: User creating the area - - Returns: - Created ParkArea instance - - Raises: - Park.DoesNotExist: If park doesn't exist - ValidationError: If area data is invalid - """ - park = Park.objects.get(id=park_id) - - area = ParkArea(park=park, name=name, description=description) - - # CRITICAL STYLEGUIDE FIX: Call full_clean before save - area.full_clean() - area.save() - - return area - - @staticmethod - def update_park_statistics(*, park_id: int) -> Park: - """ - Recalculate and update park statistics (ride counts, ratings). - - Args: - park_id: ID of park to update statistics for - - Returns: - Updated Park instance with fresh statistics - """ - from rides.models import Ride - from .models import ParkReview - from django.db.models import Count, Avg - - with transaction.atomic(): - park = Park.objects.select_for_update().get(id=park_id) - - # Calculate ride counts - ride_stats = Ride.objects.filter(park=park).aggregate( - total_rides=Count("id"), - coaster_count=Count("id", filter=Q(category__in=["RC", "WC"])), - ) - - # Calculate average rating - avg_rating = ParkReview.objects.filter( - park=park, is_published=True - ).aggregate(avg_rating=Avg("rating"))["avg_rating"] - - # Update park fields - park.ride_count = ride_stats["total_rides"] or 0 - park.coaster_count = ride_stats["coaster_count"] or 0 - park.average_rating = avg_rating - - # CRITICAL STYLEGUIDE FIX: Call full_clean before save - park.full_clean() - park.save() - - return park - - -class LocationService: - """Service for managing location operations.""" - - @staticmethod - def create_park_location( - *, - park: Park, - latitude: Optional[float] = None, - longitude: Optional[float] = None, - street_address: str = "", - city: str = "", - state: str = "", - country: str = "", - postal_code: str = "", - ) -> Location: - """ - Create a location for a park. - - Args: - park: Park instance - latitude: Latitude coordinate - longitude: Longitude coordinate - street_address: Street address - city: City name - state: State/region name - country: Country name - postal_code: Postal/ZIP code - - Returns: - Created Location instance - - Raises: - ValidationError: If location data is invalid - """ - location = Location( - content_object=park, - name=park.name, - location_type="park", - latitude=latitude, - longitude=longitude, - street_address=street_address, - city=city, - state=state, - country=country, - postal_code=postal_code, - ) - - # CRITICAL STYLEGUIDE FIX: Call full_clean before save - location.full_clean() - location.save() - - return location - - @staticmethod - def update_park_location( - *, park_id: int, location_updates: Dict[str, Any] - ) -> Location: - """ - Update location information for a park. - - Args: - park_id: ID of the park - location_updates: Dictionary of location field updates - - Returns: - Updated Location instance - - Raises: - Location.DoesNotExist: If location doesn't exist - ValidationError: If location data is invalid - """ - with transaction.atomic(): - park = Park.objects.get(id=park_id) - - try: - location = park.location - except Location.DoesNotExist: - # Create location if it doesn't exist - return LocationService.create_park_location( - park=park, **location_updates - ) - - # Apply updates - for field, value in location_updates.items(): - if hasattr(location, field): - setattr(location, field, value) - - # CRITICAL STYLEGUIDE FIX: Call full_clean before save - location.full_clean() - location.save() - - return location diff --git a/parks/services/__init__.py b/parks/services/__init__.py deleted file mode 100644 index af0b3879..00000000 --- a/parks/services/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from .roadtrip import RoadTripService -from .park_management import ParkService, LocationService -from .filter_service import ParkFilterService - -__all__ = ["RoadTripService", "ParkService", "LocationService", "ParkFilterService"] diff --git a/parks/services/filter_service.py b/parks/services/filter_service.py deleted file mode 100644 index 1a122a27..00000000 --- a/parks/services/filter_service.py +++ /dev/null @@ -1,304 +0,0 @@ -""" -Park Filter Service - -Provides filtering functionality, aggregations, and caching for park filters. -This service handles complex filter logic and provides useful filter statistics. -""" - -from typing import Dict, List, Any, Optional -from django.db.models import QuerySet, Count, Q -from django.core.cache import cache -from django.conf import settings -from ..models import Park, Company -from ..querysets import get_base_park_queryset - - -class ParkFilterService: - """ - Service class for handling park filtering operations, aggregations, - and providing filter suggestions based on available data. - """ - - CACHE_TIMEOUT = getattr(settings, "PARK_FILTER_CACHE_TIMEOUT", 300) # 5 minutes - - def __init__(self): - self.cache_prefix = "park_filter" - - def get_filter_counts( - self, base_queryset: Optional[QuerySet] = None - ) -> Dict[str, Any]: - """ - Get counts for various filter options to show users what's available. - - Args: - base_queryset: Optional base queryset to use for calculations - - Returns: - Dictionary containing counts for different filter categories - """ - cache_key = f"{self.cache_prefix}:filter_counts" - cached_result = cache.get(cache_key) - - if cached_result is not None: - return cached_result - - if base_queryset is None: - base_queryset = get_base_park_queryset() - - # Calculate filter counts - filter_counts = { - "total_parks": base_queryset.count(), - "operating_parks": base_queryset.filter(status="OPERATING").count(), - "parks_with_coasters": base_queryset.filter(coaster_count__gt=0).count(), - "big_parks": base_queryset.filter(ride_count__gte=10).count(), - "highly_rated": base_queryset.filter(average_rating__gte=4.0).count(), - "park_types": self._get_park_type_counts(base_queryset), - "top_operators": self._get_top_operators(base_queryset), - "countries": self._get_country_counts(base_queryset), - } - - # Cache the result - cache.set(cache_key, filter_counts, self.CACHE_TIMEOUT) - return filter_counts - - def _get_park_type_counts(self, queryset: QuerySet) -> Dict[str, int]: - """Get counts for different park types based on operator names.""" - return { - "disney": queryset.filter(operator__name__icontains="Disney").count(), - "universal": queryset.filter(operator__name__icontains="Universal").count(), - "six_flags": queryset.filter(operator__name__icontains="Six Flags").count(), - "cedar_fair": queryset.filter( - Q(operator__name__icontains="Cedar Fair") - | Q(operator__name__icontains="Cedar Point") - | Q(operator__name__icontains="Kings Island") - ).count(), - } - - def _get_top_operators( - self, queryset: QuerySet, limit: int = 10 - ) -> List[Dict[str, Any]]: - """Get the top operators by number of parks.""" - return list( - queryset.values("operator__name", "operator__id") - .annotate(park_count=Count("id")) - .filter(park_count__gt=0) - .order_by("-park_count")[:limit] - ) - - def _get_country_counts( - self, queryset: QuerySet, limit: int = 10 - ) -> List[Dict[str, Any]]: - """Get countries with the most parks.""" - return list( - queryset.filter(location__country__isnull=False) - .values("location__country") - .annotate(park_count=Count("id")) - .filter(park_count__gt=0) - .order_by("-park_count")[:limit] - ) - - def get_filter_suggestions(self, query: str) -> Dict[str, List[str]]: - """ - Get filter suggestions based on a search query. - - Args: - query: Search query string - - Returns: - Dictionary with suggestion categories - """ - cache_key = f"{self.cache_prefix}:suggestions:{query.lower()}" - cached_result = cache.get(cache_key) - - if cached_result is not None: - return cached_result - - suggestions = { - "parks": [], - "operators": [], - "locations": [], - } - - if len(query) >= 2: # Only search for queries of 2+ characters - # Park name suggestions - park_names = Park.objects.filter(name__icontains=query).values_list( - "name", flat=True - )[:5] - suggestions["parks"] = list(park_names) - - # Operator suggestions - operator_names = Company.objects.filter( - roles__contains=["OPERATOR"], name__icontains=query - ).values_list("name", flat=True)[:5] - suggestions["operators"] = list(operator_names) - - # Location suggestions (cities and countries) - locations = Park.objects.filter( - Q(location__city__icontains=query) - | Q(location__country__icontains=query) - ).values_list("location__city", "location__country")[:5] - - location_suggestions = [] - for city, country in locations: - if city and city.lower().startswith(query.lower()): - location_suggestions.append(city) - elif country and country.lower().startswith(query.lower()): - location_suggestions.append(country) - - suggestions["locations"] = list(set(location_suggestions))[:5] - - # Cache suggestions for a shorter time - cache.set(cache_key, suggestions, 60) # 1 minute cache - return suggestions - - def get_popular_filters(self) -> Dict[str, Any]: - """ - Get commonly used filter combinations and popular filter values. - - Returns: - Dictionary containing popular filter configurations - """ - cache_key = f"{self.cache_prefix}:popular_filters" - cached_result = cache.get(cache_key) - - if cached_result is not None: - return cached_result - - base_qs = get_base_park_queryset() - - popular_filters = { - "quick_filters": [ - { - "label": "Disney Parks", - "filters": {"park_type": "disney"}, - "count": base_qs.filter(operator__name__icontains="Disney").count(), - }, - { - "label": "Parks with Coasters", - "filters": {"has_coasters": True}, - "count": base_qs.filter(coaster_count__gt=0).count(), - }, - { - "label": "Highly Rated", - "filters": {"min_rating": "4"}, - "count": base_qs.filter(average_rating__gte=4.0).count(), - }, - { - "label": "Major Parks", - "filters": {"big_parks_only": True}, - "count": base_qs.filter(ride_count__gte=10).count(), - }, - ], - "recommended_sorts": [ - {"value": "-average_rating", "label": "Highest Rated"}, - {"value": "-coaster_count", "label": "Most Coasters"}, - {"value": "name", "label": "A-Z"}, - ], - } - - # Cache for longer since these don't change often - cache.set(cache_key, popular_filters, self.CACHE_TIMEOUT * 2) - return popular_filters - - def clear_filter_cache(self) -> None: - """Clear all cached filter data.""" - # Simple cache clearing - delete known keys - cache_keys = [ - f"{self.cache_prefix}:filter_counts", - f"{self.cache_prefix}:popular_filters", - ] - for key in cache_keys: - cache.delete(key) - - def get_filtered_queryset(self, filters: Dict[str, Any]) -> QuerySet: # noqa: C901 - """ - Apply filters to get a filtered queryset with optimizations. - - Args: - filters: Dictionary of filter parameters - - Returns: - Filtered and optimized QuerySet - """ - queryset = ( - get_base_park_queryset() - .select_related("operator", "property_owner", "location") - .prefetch_related("photos", "rides__manufacturer") - ) - - # Apply status filter - if filters.get("status"): - queryset = queryset.filter(status=filters["status"]) - - # Apply park type filter - if filters.get("park_type"): - queryset = self._apply_park_type_filter(queryset, filters["park_type"]) - - # Apply coaster filter - if filters.get("has_coasters"): - queryset = queryset.filter(coaster_count__gt=0) - - # Apply rating filter - if filters.get("min_rating"): - try: - min_rating = float(filters["min_rating"]) - queryset = queryset.filter(average_rating__gte=min_rating) - except (ValueError, TypeError): - pass - - # Apply big parks filter - if filters.get("big_parks_only"): - queryset = queryset.filter(ride_count__gte=10) - - # Apply search - if filters.get("search"): - search_query = filters["search"] - queryset = queryset.filter( - Q(name__icontains=search_query) - | Q(description__icontains=search_query) - | Q(location__city__icontains=search_query) - | Q(location__country__icontains=search_query) - ) - - # Apply location filters - if filters.get("country_filter"): - queryset = queryset.filter( - location__country__icontains=filters["country_filter"] - ) - - if filters.get("state_filter"): - queryset = queryset.filter( - location__state__icontains=filters["state_filter"] - ) - - # Apply ordering - if filters.get("ordering"): - queryset = queryset.order_by(filters["ordering"]) - - return queryset.distinct() - - def _apply_park_type_filter(self, queryset: QuerySet, park_type: str) -> QuerySet: - """Apply park type filter logic.""" - type_filters = { - "disney": Q(operator__name__icontains="Disney"), - "universal": Q(operator__name__icontains="Universal"), - "six_flags": Q(operator__name__icontains="Six Flags"), - "cedar_fair": ( - Q(operator__name__icontains="Cedar Fair") - | Q(operator__name__icontains="Cedar Point") - | Q(operator__name__icontains="Kings Island") - | Q(operator__name__icontains="Canada's Wonderland") - ), - "independent": ~( - Q(operator__name__icontains="Disney") - | Q(operator__name__icontains="Universal") - | Q(operator__name__icontains="Six Flags") - | Q(operator__name__icontains="Cedar Fair") - | Q(operator__name__icontains="Cedar Point") - ), - } - - if park_type in type_filters: - return queryset.filter(type_filters[park_type]) - - return queryset diff --git a/parks/services/park_management.py b/parks/services/park_management.py deleted file mode 100644 index 488206f4..00000000 --- a/parks/services/park_management.py +++ /dev/null @@ -1,322 +0,0 @@ -""" -Services for park-related business logic. -Following Django styleguide pattern for business logic encapsulation. -""" - -from typing import Optional, Dict, Any, TYPE_CHECKING -from django.db import transaction -from django.db.models import Q - -if TYPE_CHECKING: - from django.contrib.auth.models import AbstractUser - -from ..models import Park, ParkArea -from location.models import Location - - -class ParkService: - """Service for managing park operations.""" - - @staticmethod - def create_park( - *, - name: str, - description: str = "", - status: str = "OPERATING", - operator_id: Optional[int] = None, - property_owner_id: Optional[int] = None, - opening_date: Optional[str] = None, - closing_date: Optional[str] = None, - operating_season: str = "", - size_acres: Optional[float] = None, - website: str = "", - location_data: Optional[Dict[str, Any]] = None, - created_by: Optional["AbstractUser"] = None, - ) -> Park: - """ - Create a new park with validation and location handling. - - Args: - name: Park name - description: Park description - status: Operating status - operator_id: ID of operating company - property_owner_id: ID of property owner company - opening_date: Opening date - closing_date: Closing date - operating_season: Operating season description - size_acres: Park size in acres - website: Park website URL - location_data: Dictionary containing location information - created_by: User creating the park - - Returns: - Created Park instance - - Raises: - ValidationError: If park data is invalid - """ - with transaction.atomic(): - # Create park instance - park = Park( - name=name, - description=description, - status=status, - opening_date=opening_date, - closing_date=closing_date, - operating_season=operating_season, - size_acres=size_acres, - website=website, - ) - - # Set foreign key relationships if provided - if operator_id: - from parks.models import Company - - park.operator = Company.objects.get(id=operator_id) - - if property_owner_id: - from parks.models import Company - - park.property_owner = Company.objects.get(id=property_owner_id) - - # CRITICAL STYLEGUIDE FIX: Call full_clean before save - park.full_clean() - park.save() - - # Handle location if provided - if location_data: - LocationService.create_park_location(park=park, **location_data) - - return park - - @staticmethod - def update_park( - *, - park_id: int, - updates: Dict[str, Any], - updated_by: Optional["AbstractUser"] = None, - ) -> Park: - """ - Update an existing park with validation. - - Args: - park_id: ID of park to update - updates: Dictionary of field updates - updated_by: User performing the update - - Returns: - Updated Park instance - - Raises: - Park.DoesNotExist: If park doesn't exist - ValidationError: If update data is invalid - """ - with transaction.atomic(): - park = Park.objects.select_for_update().get(id=park_id) - - # Apply updates - for field, value in updates.items(): - if hasattr(park, field): - setattr(park, field, value) - - # CRITICAL STYLEGUIDE FIX: Call full_clean before save - park.full_clean() - park.save() - - return park - - @staticmethod - def delete_park( - *, park_id: int, deleted_by: Optional["AbstractUser"] = None - ) -> bool: - """ - Soft delete a park by setting status to DEMOLISHED. - - Args: - park_id: ID of park to delete - deleted_by: User performing the deletion - - Returns: - True if successfully deleted - - Raises: - Park.DoesNotExist: If park doesn't exist - """ - with transaction.atomic(): - park = Park.objects.select_for_update().get(id=park_id) - park.status = "DEMOLISHED" - - # CRITICAL STYLEGUIDE FIX: Call full_clean before save - park.full_clean() - park.save() - - return True - - @staticmethod - def create_park_area( - *, - park_id: int, - name: str, - description: str = "", - created_by: Optional["AbstractUser"] = None, - ) -> ParkArea: - """ - Create a new area within a park. - - Args: - park_id: ID of the parent park - name: Area name - description: Area description - created_by: User creating the area - - Returns: - Created ParkArea instance - - Raises: - Park.DoesNotExist: If park doesn't exist - ValidationError: If area data is invalid - """ - park = Park.objects.get(id=park_id) - - area = ParkArea(park=park, name=name, description=description) - - # CRITICAL STYLEGUIDE FIX: Call full_clean before save - area.full_clean() - area.save() - - return area - - @staticmethod - def update_park_statistics(*, park_id: int) -> Park: - """ - Recalculate and update park statistics (ride counts, ratings). - - Args: - park_id: ID of park to update statistics for - - Returns: - Updated Park instance with fresh statistics - """ - from rides.models import Ride - from parks.models import ParkReview - from django.db.models import Count, Avg - - with transaction.atomic(): - park = Park.objects.select_for_update().get(id=park_id) - - # Calculate ride counts - ride_stats = Ride.objects.filter(park=park).aggregate( - total_rides=Count("id"), - coaster_count=Count("id", filter=Q(category__in=["RC", "WC"])), - ) - - # Calculate average rating - avg_rating = ParkReview.objects.filter( - park=park, is_published=True - ).aggregate(avg_rating=Avg("rating"))["avg_rating"] - - # Update park fields - park.ride_count = ride_stats["total_rides"] or 0 - park.coaster_count = ride_stats["coaster_count"] or 0 - park.average_rating = avg_rating - - # CRITICAL STYLEGUIDE FIX: Call full_clean before save - park.full_clean() - park.save() - - return park - - -class LocationService: - """Service for managing location operations.""" - - @staticmethod - def create_park_location( - *, - park: Park, - latitude: Optional[float] = None, - longitude: Optional[float] = None, - street_address: str = "", - city: str = "", - state: str = "", - country: str = "", - postal_code: str = "", - ) -> Location: - """ - Create a location for a park. - - Args: - park: Park instance - latitude: Latitude coordinate - longitude: Longitude coordinate - street_address: Street address - city: City name - state: State/region name - country: Country name - postal_code: Postal/ZIP code - - Returns: - Created Location instance - - Raises: - ValidationError: If location data is invalid - """ - location = Location( - content_object=park, - name=park.name, - location_type="park", - latitude=latitude, - longitude=longitude, - street_address=street_address, - city=city, - state=state, - country=country, - postal_code=postal_code, - ) - - # CRITICAL STYLEGUIDE FIX: Call full_clean before save - location.full_clean() - location.save() - - return location - - @staticmethod - def update_park_location( - *, park_id: int, location_updates: Dict[str, Any] - ) -> Location: - """ - Update location information for a park. - - Args: - park_id: ID of the park - location_updates: Dictionary of location field updates - - Returns: - Updated Location instance - - Raises: - Location.DoesNotExist: If location doesn't exist - ValidationError: If location data is invalid - """ - with transaction.atomic(): - park = Park.objects.get(id=park_id) - - try: - location = park.location - except Location.DoesNotExist: - # Create location if it doesn't exist - return LocationService.create_park_location( - park=park, **location_updates - ) - - # Apply updates - for field, value in location_updates.items(): - if hasattr(location, field): - setattr(location, field, value) - - # CRITICAL STYLEGUIDE FIX: Call full_clean before save - location.full_clean() - location.save() - - return location diff --git a/parks/services/roadtrip.py b/parks/services/roadtrip.py deleted file mode 100644 index 25e59cad..00000000 --- a/parks/services/roadtrip.py +++ /dev/null @@ -1,706 +0,0 @@ -""" -Road Trip Service for theme park planning using OpenStreetMap APIs. - -This service provides functionality for: -- Geocoding addresses using Nominatim -- Route calculation using OSRM -- Park discovery along routes -- Multi-park trip planning -- Proper rate limiting and caching -""" - -import time -import math -import logging -import requests -from typing import Dict, List, Tuple, Optional, Any -from dataclasses import dataclass -from itertools import permutations - -from django.conf import settings -from django.core.cache import cache -from django.contrib.gis.geos import Point -from django.contrib.gis.measure import Distance -from parks.models import Park - -logger = logging.getLogger(__name__) - - -@dataclass -class Coordinates: - """Represents latitude and longitude coordinates.""" - - latitude: float - longitude: float - - def to_tuple(self) -> Tuple[float, float]: - """Return as (lat, lon) tuple.""" - return (self.latitude, self.longitude) - - def to_point(self) -> Point: - """Convert to Django Point object.""" - return Point(self.longitude, self.latitude, srid=4326) - - -@dataclass -class RouteInfo: - """Information about a calculated route.""" - - distance_km: float - duration_minutes: int - geometry: Optional[str] = None # Encoded polyline - - @property - def formatted_distance(self) -> str: - """Return formatted distance string.""" - if self.distance_km < 1: - return f"{self.distance_km * 1000:.0f}m" - return f"{self.distance_km:.1f}km" - - @property - def formatted_duration(self) -> str: - """Return formatted duration string.""" - hours = self.duration_minutes // 60 - minutes = self.duration_minutes % 60 - if hours == 0: - return f"{minutes}min" - elif minutes == 0: - return f"{hours}h" - else: - return f"{hours}h {minutes}min" - - -@dataclass -class TripLeg: - """Represents one leg of a multi-park trip.""" - - from_park: "Park" - to_park: "Park" - route: RouteInfo - - @property - def parks_along_route(self) -> List["Park"]: - """Get parks along this route segment.""" - # This would be populated by find_parks_along_route - return [] - - -@dataclass -class RoadTrip: - """Complete road trip with multiple parks.""" - - parks: List["Park"] - legs: List[TripLeg] - total_distance_km: float - total_duration_minutes: int - - @property - def formatted_total_distance(self) -> str: - """Return formatted total distance.""" - return f"{self.total_distance_km:.1f}km" - - @property - def formatted_total_duration(self) -> str: - """Return formatted total duration.""" - hours = self.total_duration_minutes // 60 - minutes = self.total_duration_minutes % 60 - if hours == 0: - return f"{minutes}min" - elif minutes == 0: - return f"{hours}h" - else: - return f"{hours}h {minutes}min" - - -class RateLimiter: - """Simple rate limiter for API requests.""" - - def __init__(self, max_requests_per_second: float = 1.0): - self.max_requests_per_second = max_requests_per_second - self.min_interval = 1.0 / max_requests_per_second - self.last_request_time = 0.0 - - def wait_if_needed(self): - """Wait if necessary to respect rate limits.""" - current_time = time.time() - time_since_last = current_time - self.last_request_time - - if time_since_last < self.min_interval: - wait_time = self.min_interval - time_since_last - time.sleep(wait_time) - - self.last_request_time = time.time() - - -class OSMAPIException(Exception): - """Exception for OSM API related errors.""" - - -class RoadTripService: - """ - Service for planning road trips between theme parks using OpenStreetMap APIs. - """ - - def __init__(self): - self.nominatim_base_url = "https://nominatim.openstreetmap.org" - self.osrm_base_url = "http://router.project-osrm.org/route/v1/driving" - - # Configuration from Django settings - self.cache_timeout = getattr(settings, "ROADTRIP_CACHE_TIMEOUT", 3600 * 24) - self.route_cache_timeout = getattr( - settings, "ROADTRIP_ROUTE_CACHE_TIMEOUT", 3600 * 6 - ) - self.user_agent = getattr( - settings, "ROADTRIP_USER_AGENT", "ThrillWiki Road Trip Planner" - ) - self.request_timeout = getattr(settings, "ROADTRIP_REQUEST_TIMEOUT", 10) - self.max_retries = getattr(settings, "ROADTRIP_MAX_RETRIES", 3) - self.backoff_factor = getattr(settings, "ROADTRIP_BACKOFF_FACTOR", 2) - - # Rate limiter - max_rps = getattr(settings, "ROADTRIP_MAX_REQUESTS_PER_SECOND", 1) - self.rate_limiter = RateLimiter(max_rps) - - # Request session with proper headers - self.session = requests.Session() - self.session.headers.update( - { - "User-Agent": self.user_agent, - "Accept": "application/json", - } - ) - - def _make_request(self, url: str, params: Dict[str, Any]) -> Dict[str, Any]: - """ - Make HTTP request with rate limiting, retries, and error handling. - """ - self.rate_limiter.wait_if_needed() - - for attempt in range(self.max_retries): - try: - response = self.session.get( - url, params=params, timeout=self.request_timeout - ) - response.raise_for_status() - return response.json() - - except requests.exceptions.RequestException as e: - logger.warning(f"Request attempt {attempt + 1} failed: {e}") - - if attempt < self.max_retries - 1: - wait_time = self.backoff_factor**attempt - time.sleep(wait_time) - else: - raise OSMAPIException( - f"Failed to make request after { - self.max_retries} attempts: {e}" - ) - - def geocode_address(self, address: str) -> Optional[Coordinates]: - """ - Convert address to coordinates using Nominatim geocoding service. - - Args: - address: Address string to geocode - - Returns: - Coordinates object or None if geocoding fails - """ - if not address or not address.strip(): - return None - - # Check cache first - cache_key = f"roadtrip:geocode:{hash(address.lower().strip())}" - cached_result = cache.get(cache_key) - if cached_result: - return Coordinates(**cached_result) - - try: - params = { - "q": address.strip(), - "format": "json", - "limit": 1, - "addressdetails": 1, - } - - url = f"{self.nominatim_base_url}/search" - response = self._make_request(url, params) - - if response and len(response) > 0: - result = response[0] - coords = Coordinates( - latitude=float(result["lat"]), - longitude=float(result["lon"]), - ) - - # Cache the result - cache.set( - cache_key, - { - "latitude": coords.latitude, - "longitude": coords.longitude, - }, - self.cache_timeout, - ) - - logger.info( - f"Geocoded '{address}' to { - coords.latitude}, { - coords.longitude}" - ) - return coords - else: - logger.warning(f"No geocoding results for address: {address}") - return None - - except Exception as e: - logger.error(f"Geocoding failed for '{address}': {e}") - return None - - def calculate_route( - self, start_coords: Coordinates, end_coords: Coordinates - ) -> Optional[RouteInfo]: - """ - Calculate route between two coordinate points using OSRM. - - Args: - start_coords: Starting coordinates - end_coords: Ending coordinates - - Returns: - RouteInfo object or None if routing fails - """ - if not start_coords or not end_coords: - return None - - # Check cache first - cache_key = f"roadtrip:route:{ - start_coords.latitude},{ - start_coords.longitude}:{ - end_coords.latitude},{ - end_coords.longitude}" - cached_result = cache.get(cache_key) - if cached_result: - return RouteInfo(**cached_result) - - try: - # Format coordinates for OSRM (lon,lat format) - coords_string = f"{ - start_coords.longitude},{ - start_coords.latitude};{ - end_coords.longitude},{ - end_coords.latitude}" - url = f"{self.osrm_base_url}/{coords_string}" - - params = { - "overview": "full", - "geometries": "polyline", - "steps": "false", - } - - response = self._make_request(url, params) - - if response.get("code") == "Ok" and response.get("routes"): - route_data = response["routes"][0] - - # Distance is in meters, convert to km - distance_km = route_data["distance"] / 1000.0 - # Duration is in seconds, convert to minutes - duration_minutes = int(route_data["duration"] / 60) - - route_info = RouteInfo( - distance_km=distance_km, - duration_minutes=duration_minutes, - geometry=route_data.get("geometry"), - ) - - # Cache the result - cache.set( - cache_key, - { - "distance_km": route_info.distance_km, - "duration_minutes": route_info.duration_minutes, - "geometry": route_info.geometry, - }, - self.route_cache_timeout, - ) - - logger.info( - f"Route calculated: { - route_info.formatted_distance}, { - route_info.formatted_duration}" - ) - return route_info - else: - # Fallback to straight-line distance calculation - logger.warning( - f"OSRM routing failed, falling back to straight-line distance" - ) - return self._calculate_straight_line_route(start_coords, end_coords) - - except Exception as e: - logger.error(f"Route calculation failed: {e}") - # Fallback to straight-line distance - return self._calculate_straight_line_route(start_coords, end_coords) - - def _calculate_straight_line_route( - self, start_coords: Coordinates, end_coords: Coordinates - ) -> RouteInfo: - """ - Calculate straight-line distance as fallback when routing fails. - """ - # Haversine formula for great-circle distance - lat1, lon1 = math.radians(start_coords.latitude), math.radians( - start_coords.longitude - ) - lat2, lon2 = math.radians(end_coords.latitude), math.radians( - end_coords.longitude - ) - - dlat = lat2 - lat1 - dlon = lon2 - lon1 - - a = ( - math.sin(dlat / 2) ** 2 - + math.cos(lat1) * math.cos(lat2) * math.sin(dlon / 2) ** 2 - ) - c = 2 * math.asin(math.sqrt(a)) - - # Earth's radius in kilometers - earth_radius_km = 6371.0 - distance_km = earth_radius_km * c - - # Estimate driving time (assume average 80 km/h with 25% extra for - # roads) - estimated_duration_minutes = int((distance_km * 1.25 / 80.0) * 60) - - return RouteInfo( - distance_km=distance_km, - duration_minutes=estimated_duration_minutes, - geometry=None, - ) - - def find_parks_along_route( - self, start_park: "Park", end_park: "Park", max_detour_km: float = 50 - ) -> List["Park"]: - """ - Find parks along a route within specified detour distance. - - Args: - start_park: Starting park - end_park: Ending park - max_detour_km: Maximum detour distance in kilometers - - Returns: - List of parks along the route - """ - from parks.models import Park - - if not hasattr(start_park, "location") or not hasattr(end_park, "location"): - return [] - - if not start_park.location or not end_park.location: - return [] - - start_coords = start_park.coordinates - end_coords = end_park.coordinates - - if not start_coords or not end_coords: - return [] - - start_point = Point(start_coords[1], start_coords[0], srid=4326) # lon, lat - # end_point is not used in this method - we use coordinates directly - - # Find all parks within a reasonable distance from both start and end - max_search_distance = Distance(km=max_detour_km * 2) - - candidate_parks = ( - Park.objects.filter( - location__point__distance_lte=( - start_point, - max_search_distance, - ) - ) - .exclude(id__in=[start_park.id, end_park.id]) - .select_related("location") - ) - - parks_along_route = [] - - for park in candidate_parks: - if not park.location or not park.location.point: - continue - - park_coords = park.coordinates - if not park_coords: - continue - - # Calculate detour distance - detour_distance = self._calculate_detour_distance( - Coordinates(*start_coords), - Coordinates(*end_coords), - Coordinates(*park_coords), - ) - - if detour_distance and detour_distance <= max_detour_km: - parks_along_route.append(park) - - return parks_along_route - - def _calculate_detour_distance( - self, start: Coordinates, end: Coordinates, waypoint: Coordinates - ) -> Optional[float]: - """ - Calculate the detour distance when visiting a waypoint. - """ - try: - # Direct route distance - direct_route = self.calculate_route(start, end) - if not direct_route: - return None - - # Route via waypoint - route_to_waypoint = self.calculate_route(start, waypoint) - route_from_waypoint = self.calculate_route(waypoint, end) - - if not route_to_waypoint or not route_from_waypoint: - return None - - detour_distance = ( - route_to_waypoint.distance_km + route_from_waypoint.distance_km - ) - direct_route.distance_km - return max(0, detour_distance) # Don't return negative detours - - except Exception as e: - logger.error(f"Failed to calculate detour distance: {e}") - return None - - def create_multi_park_trip(self, park_list: List["Park"]) -> Optional[RoadTrip]: - """ - Create optimized multi-park road trip using simple nearest neighbor heuristic. - - Args: - park_list: List of parks to visit - - Returns: - RoadTrip object with optimized route - """ - if len(park_list) < 2: - return None - - # For small numbers of parks, try all permutations - if len(park_list) <= 6: - return self._optimize_trip_exhaustive(park_list) - else: - return self._optimize_trip_nearest_neighbor(park_list) - - def _optimize_trip_exhaustive(self, park_list: List["Park"]) -> Optional[RoadTrip]: - """ - Find optimal route by testing all permutations (for small lists). - """ - best_trip = None - best_distance = float("inf") - - # Try all possible orders (excluding the first park as starting point) - for perm in permutations(park_list[1:]): - ordered_parks = [park_list[0]] + list(perm) - trip = self._create_trip_from_order(ordered_parks) - - if trip and trip.total_distance_km < best_distance: - best_distance = trip.total_distance_km - best_trip = trip - - return best_trip - - def _optimize_trip_nearest_neighbor( - self, park_list: List["Park"] - ) -> Optional[RoadTrip]: - """ - Optimize trip using nearest neighbor heuristic (for larger lists). - """ - if not park_list: - return None - - # Start with the first park - current_park = park_list[0] - ordered_parks = [current_park] - remaining_parks = park_list[1:] - - while remaining_parks: - # Find nearest unvisited park - nearest_park = None - min_distance = float("inf") - - current_coords = current_park.coordinates - if not current_coords: - break - - for park in remaining_parks: - park_coords = park.coordinates - if not park_coords: - continue - - route = self.calculate_route( - Coordinates(*current_coords), Coordinates(*park_coords) - ) - - if route and route.distance_km < min_distance: - min_distance = route.distance_km - nearest_park = park - - if nearest_park: - ordered_parks.append(nearest_park) - remaining_parks.remove(nearest_park) - current_park = nearest_park - else: - break - - return self._create_trip_from_order(ordered_parks) - - def _create_trip_from_order( - self, ordered_parks: List["Park"] - ) -> Optional[RoadTrip]: - """ - Create a RoadTrip object from an ordered list of parks. - """ - if len(ordered_parks) < 2: - return None - - legs = [] - total_distance = 0 - total_duration = 0 - - for i in range(len(ordered_parks) - 1): - from_park = ordered_parks[i] - to_park = ordered_parks[i + 1] - - from_coords = from_park.coordinates - to_coords = to_park.coordinates - - if not from_coords or not to_coords: - continue - - route = self.calculate_route( - Coordinates(*from_coords), Coordinates(*to_coords) - ) - - if route: - legs.append(TripLeg(from_park=from_park, to_park=to_park, route=route)) - total_distance += route.distance_km - total_duration += route.duration_minutes - - if not legs: - return None - - return RoadTrip( - parks=ordered_parks, - legs=legs, - total_distance_km=total_distance, - total_duration_minutes=total_duration, - ) - - def get_park_distances( - self, center_park: "Park", radius_km: float = 100 - ) -> List[Dict[str, Any]]: - """ - Get all parks within radius of a center park with distances. - - Args: - center_park: Center park for search - radius_km: Search radius in kilometers - - Returns: - List of dictionaries with park and distance information - """ - from parks.models import Park - - if not hasattr(center_park, "location") or not center_park.location: - return [] - - center_coords = center_park.coordinates - if not center_coords: - return [] - - center_point = Point(center_coords[1], center_coords[0], srid=4326) # lon, lat - search_distance = Distance(km=radius_km) - - nearby_parks = ( - Park.objects.filter( - location__point__distance_lte=(center_point, search_distance) - ) - .exclude(id=center_park.id) - .select_related("location") - ) - - results = [] - - for park in nearby_parks: - park_coords = park.coordinates - if not park_coords: - continue - - route = self.calculate_route( - Coordinates(*center_coords), Coordinates(*park_coords) - ) - - if route: - results.append( - { - "park": park, - "distance_km": route.distance_km, - "duration_minutes": route.duration_minutes, - "formatted_distance": route.formatted_distance, - "formatted_duration": route.formatted_duration, - } - ) - - # Sort by distance - results.sort(key=lambda x: x["distance_km"]) - - return results - - def geocode_park_if_needed(self, park: "Park") -> bool: - """ - Geocode park location if coordinates are missing. - - Args: - park: Park to geocode - - Returns: - True if geocoding succeeded or wasn't needed, False otherwise - """ - if not hasattr(park, "location") or not park.location: - return False - - location = park.location - - # If we already have coordinates, no need to geocode - if location.point: - return True - - # Build address string for geocoding - address_parts = [ - park.name, - location.street_address, - location.city, - location.state, - location.country, - ] - address = ", ".join(part for part in address_parts if part) - - if not address: - return False - - coords = self.geocode_address(address) - if coords: - location.set_coordinates(coords.latitude, coords.longitude) - location.save() - logger.info( - f"Geocoded park '{ - park.name}' to { - coords.latitude}, { - coords.longitude}" - ) - return True - - return False diff --git a/parks/signals.py b/parks/signals.py deleted file mode 100644 index 414863bd..00000000 --- a/parks/signals.py +++ /dev/null @@ -1,34 +0,0 @@ -from django.db.models.signals import post_save, post_delete -from django.dispatch import receiver -from django.db.models import Q - -from rides.models import Ride -from .models import Park - - -def update_park_ride_counts(park): - """Update ride_count and coaster_count for a park""" - operating_rides = Q(status="OPERATING") - - # Count total operating rides - ride_count = park.rides.filter(operating_rides).count() - - # Count total operating roller coasters - coaster_count = park.rides.filter(operating_rides, category="RC").count() - - # Update park counts - Park.objects.filter(id=park.id).update( - ride_count=ride_count, coaster_count=coaster_count - ) - - -@receiver(post_save, sender=Ride) -def ride_saved(sender, instance, **kwargs): - """Update park counts when a ride is saved""" - update_park_ride_counts(instance.park) - - -@receiver(post_delete, sender=Ride) -def ride_deleted(sender, instance, **kwargs): - """Update park counts when a ride is deleted""" - update_park_ride_counts(instance.park) diff --git a/parks/static/parks/css/search.css b/parks/static/parks/css/search.css deleted file mode 100644 index f6d0cc92..00000000 --- a/parks/static/parks/css/search.css +++ /dev/null @@ -1,411 +0,0 @@ -/* Enhanced Loading states */ -.htmx-request .htmx-indicator { - opacity: 1; -} -.htmx-request.htmx-indicator { - opacity: 1; -} -.htmx-indicator { - opacity: 0; - transition: opacity 200ms ease-in-out; -} - -/* Loading pulse animation */ -@keyframes loading-pulse { - 0%, 100% { opacity: 1; } - 50% { opacity: 0.5; } -} - -.htmx-request { - animation: loading-pulse 1.5s ease-in-out infinite; -} - -/* Results container transitions */ -#park-results { - transition: all 300ms cubic-bezier(0.4, 0, 0.2, 1); -} -.htmx-request #park-results { - opacity: 0.7; - transform: scale(0.98); -} -.htmx-settling #park-results { - opacity: 1; - transform: scale(1); -} - -/* Filter UI Enhancements */ -.quick-filter-btn { - @apply inline-flex items-center px-4 py-2 text-sm font-medium rounded-lg transition-all duration-200 ease-in-out; - @apply focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-blue-500; - @apply transform hover:scale-105 active:scale-95; - @apply border border-transparent; -} - -.quick-filter-btn:hover { - box-shadow: 0 4px 6px -1px rgba(0, 0, 0, 0.1), 0 2px 4px -1px rgba(0, 0, 0, 0.06); -} - -.filter-count { - @apply text-xs opacity-75 ml-1; -} - -/* Filter Chips Styling */ -.filter-chip { - @apply inline-flex items-center px-3 py-1 rounded-full text-sm bg-blue-100 text-blue-800; - @apply dark:bg-blue-800 dark:text-blue-100 transition-all duration-200; - animation: slideIn 0.3s ease-out; -} - -.filter-chip:hover { - @apply bg-blue-200 dark:bg-blue-700; - transform: translateY(-1px); -} - -.filter-chip .remove-btn { - @apply ml-2 inline-flex items-center justify-center w-4 h-4; - @apply text-blue-600 dark:text-blue-300 hover:text-blue-800 dark:hover:text-blue-100; - @apply focus:outline-none transition-colors duration-150; -} - -.filter-chip .remove-btn:hover { - transform: scale(1.1); -} - -@keyframes slideIn { - from { - opacity: 0; - transform: translateX(-10px); - } - to { - opacity: 1; - transform: translateX(0); - } -} - -/* Enhanced Search Input */ -.search-input { - @apply transition-all duration-200 ease-in-out; -} - -.search-input:focus { - @apply ring-2 ring-blue-500 border-blue-500; - box-shadow: 0 0 0 3px rgba(59, 130, 246, 0.1); -} - -/* Enhanced Form Controls */ -.filter-field select, -.filter-field input[type="text"], -.filter-field input[type="number"], -.filter-field input[type="search"], -.form-field-wrapper input, -.form-field-wrapper select { - @apply transition-all duration-200 ease-in-out; - @apply border-gray-300 dark:border-gray-600 dark:bg-gray-700 dark:text-white; - @apply focus:border-blue-500 focus:ring-blue-500; - @apply rounded-md shadow-sm; -} - -.filter-field select:focus, -.filter-field input:focus, -.form-field-wrapper input:focus, -.form-field-wrapper select:focus { - box-shadow: 0 0 0 3px rgba(59, 130, 246, 0.1); -} - -.filter-field input[type="checkbox"], -.form-field-wrapper input[type="checkbox"] { - @apply rounded transition-colors duration-200; - @apply text-blue-600 focus:ring-blue-500; - @apply border-gray-300 dark:border-gray-600; -} - -/* Enhanced Status Indicators */ -.status-indicator { - @apply inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium; - animation: fadeIn 0.3s ease-out; -} - -.status-indicator.filtered { - @apply bg-blue-100 text-blue-800 dark:bg-blue-800 dark:text-blue-100; -} - -.status-indicator.loading { - @apply bg-yellow-100 text-yellow-800 dark:bg-yellow-800 dark:text-yellow-100; -} - -@keyframes fadeIn { - from { opacity: 0; } - to { opacity: 1; } -} - -/* Grid/List transitions */ -.park-card { - transition: all 300ms cubic-bezier(0.4, 0, 0.2, 1); - position: relative; - background-color: white; - border-radius: 0.75rem; - border: 1px solid #e5e7eb; -} - -/* Grid view styles */ -.park-card[data-view-mode="grid"] { - display: flex; - flex-direction: column; -} -.park-card[data-view-mode="grid"]:hover { - transform: translateY(-4px); - box-shadow: 0 20px 25px -5px rgba(0, 0, 0, 0.1), 0 10px 10px -5px rgba(0, 0, 0, 0.04); -} - -/* List view styles */ -.park-card[data-view-mode="list"] { - display: flex; - gap: 1rem; - padding: 1rem; -} -.park-card[data-view-mode="list"]:hover { - background-color: #f9fafb; - transform: translateX(4px); -} - -/* Image containers */ -.park-card .image-container { - position: relative; - overflow: hidden; - border-radius: 0.5rem; -} -.park-card[data-view-mode="grid"] .image-container { - aspect-ratio: 16 / 9; - width: 100%; -} -.park-card[data-view-mode="list"] .image-container { - width: 6rem; - height: 6rem; - flex-shrink: 0; -} - -/* Content */ -.park-card .content { - display: flex; - flex-direction: column; - flex: 1; - min-width: 0; /* Enables text truncation in flex child */ -} - -/* Enhanced Status badges */ -.park-card .status-badge { - transition: all 200ms ease-in-out; - border-radius: 9999px; - font-weight: 500; -} -.park-card:hover .status-badge { - transform: scale(1.05); - box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1); -} - -/* Images */ -.park-card img { - transition: transform 300ms ease-in-out; - object-fit: cover; - width: 100%; - height: 100%; -} -.park-card:hover img { - transform: scale(1.08); -} - -/* Placeholders for missing images */ -.park-card .placeholder { - background: linear-gradient(110deg, #ececec 8%, #f5f5f5 18%, #ececec 33%); - background-size: 200% 100%; - animation: shimmer 1.5s linear infinite; - border-radius: 0.5rem; -} - -@keyframes shimmer { - to { - background-position: 200% center; - } -} - -/* Enhanced No Results State */ -.no-results { - @apply text-center py-12; - animation: fadeInUp 0.5s ease-out; -} - -.no-results-icon { - @apply mx-auto w-24 h-24 text-gray-400 dark:text-gray-500 mb-6; - animation: float 3s ease-in-out infinite; -} - -@keyframes fadeInUp { - from { - opacity: 0; - transform: translateY(20px); - } - to { - opacity: 1; - transform: translateY(0); - } -} - -@keyframes float { - 0%, 100% { transform: translateY(0px); } - 50% { transform: translateY(-10px); } -} - -/* Enhanced Buttons */ -.btn-enhanced { - @apply transition-all duration-200 ease-in-out; - @apply transform hover:scale-105 active:scale-95; - @apply focus:outline-none focus:ring-2 focus:ring-offset-2; -} - -.btn-enhanced:hover { - box-shadow: 0 4px 6px -1px rgba(0, 0, 0, 0.1), 0 2px 4px -1px rgba(0, 0, 0, 0.06); -} - -/* Tooltip Styles */ -.tooltip { - @apply absolute z-50 px-2 py-1 text-xs font-medium text-white bg-gray-900 rounded shadow-lg; - @apply dark:bg-gray-700 dark:text-gray-200; - animation: tooltipFadeIn 0.2s ease-out; -} - -@keyframes tooltipFadeIn { - from { opacity: 0; transform: scale(0.8); } - to { opacity: 1; transform: scale(1); } -} - -/* Enhanced Dark mode */ -@media (prefers-color-scheme: dark) { - .park-card { - background-color: #1f2937; - border-color: #374151; - } - - .park-card[data-view-mode="list"]:hover { - background-color: #374151; - } - - .park-card .text-gray-900 { - color: #f3f4f6; - } - - .park-card .text-gray-500 { - color: #9ca3af; - } - - .park-card .placeholder { - background: linear-gradient(110deg, #2d3748 8%, #374151 18%, #2d3748 33%); - } - - .search-input { - @apply dark:bg-gray-700 dark:border-gray-600 dark:text-white; - } - - .quick-filter-btn:not(.active) { - @apply dark:bg-gray-700 dark:text-gray-300 dark:hover:bg-gray-600; - } - - /* Enhanced filter panel styling */ - .filter-container { - @apply dark:text-gray-200; - } - - .filter-field label { - @apply dark:text-gray-300; - } -} - -/* Additional enhancements for better visual hierarchy */ -.filter-container h3 { - @apply font-semibold tracking-wide; -} - -.filter-field { - @apply mb-4; -} - -.filter-field label { - @apply font-medium text-sm; -} - -/* Status badge improvements */ -.status-badge { - @apply inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium; - @apply transition-all duration-200; -} - -/* Loading state improvements */ -.htmx-request .filter-container { - @apply pointer-events-none; -} - -.htmx-request .quick-filter-btn { - @apply opacity-75; -} - -/* Mobile Responsive Enhancements */ -@media (max-width: 768px) { - .quick-filter-btn { - @apply text-xs px-2 py-1; - } - - .filter-chip { - @apply text-xs px-2 py-1; - } - - .park-card[data-view-mode="grid"]:hover { - transform: translateY(-2px); - } - - .park-card[data-view-mode="list"]:hover { - transform: none; - } -} - -/* Accessibility Enhancements */ -@media (prefers-reduced-motion: reduce) { - .park-card, - .quick-filter-btn, - .filter-chip, - .btn-enhanced { - transition: none; - animation: none; - } - - .park-card:hover { - transform: none; - } - - .no-results-icon { - animation: none; - } -} - -/* Focus States for Keyboard Navigation */ -.park-card:focus-within { - @apply ring-2 ring-blue-500 ring-offset-2; -} - -.quick-filter-btn:focus, -.filter-chip .remove-btn:focus { - @apply ring-2 ring-blue-500 ring-offset-2; -} - -/* High Contrast Mode Support */ -@media (prefers-contrast: high) { - .park-card { - border-width: 2px; - } - - .filter-chip { - border: 2px solid currentColor; - } - - .quick-filter-btn { - border: 2px solid currentColor; - } -} \ No newline at end of file diff --git a/parks/static/parks/js/search.js b/parks/static/parks/js/search.js deleted file mode 100644 index 07c30c45..00000000 --- a/parks/static/parks/js/search.js +++ /dev/null @@ -1,550 +0,0 @@ -/** - * Enhanced Parks Search and Filter Management - * Provides comprehensive UX improvements for the parks listing page - */ - -class ParkSearchManager { - constructor() { - this.debounceTimers = new Map(); - this.filterState = new Map(); - this.requestCount = 0; - this.lastRequestTime = 0; - - this.init(); - } - - init() { - this.setupEventListeners(); - this.initializeLazyLoading(); - this.setupKeyboardNavigation(); - this.restoreFilterState(); - this.setupPerformanceOptimizations(); - } - - setupEventListeners() { - // Enhanced HTMX request handling - document.addEventListener('htmx:configRequest', (evt) => this.handleConfigRequest(evt)); - document.addEventListener('htmx:beforeRequest', (evt) => this.handleBeforeRequest(evt)); - document.addEventListener('htmx:afterRequest', (evt) => this.handleAfterRequest(evt)); - document.addEventListener('htmx:responseError', (evt) => this.handleResponseError(evt)); - document.addEventListener('htmx:historyRestore', (evt) => this.handleHistoryRestore(evt)); - document.addEventListener('htmx:afterSwap', (evt) => this.handleAfterSwap(evt)); - - // Enhanced form interactions - document.addEventListener('input', (evt) => this.handleInput(evt)); - document.addEventListener('change', (evt) => this.handleChange(evt)); - document.addEventListener('focus', (evt) => this.handleFocus(evt)); - document.addEventListener('blur', (evt) => this.handleBlur(evt)); - - // Search suggestions - document.addEventListener('keydown', (evt) => this.handleKeydown(evt)); - - // Window events - window.addEventListener('beforeunload', () => this.saveFilterState()); - window.addEventListener('resize', this.debounce(() => this.handleResize(), 250)); - } - - handleConfigRequest(evt) { - // Preserve view mode - const parkResults = document.getElementById('park-results'); - if (parkResults) { - const viewMode = parkResults.getAttribute('data-view-mode'); - if (viewMode) { - evt.detail.parameters['view_mode'] = viewMode; - } - } - - // Preserve search terms - const searchInput = document.querySelector('input[name="search"]'); - if (searchInput && searchInput.value) { - evt.detail.parameters['search'] = searchInput.value; - } - - // Add request tracking - evt.detail.parameters['_req_id'] = ++this.requestCount; - this.lastRequestTime = Date.now(); - } - - handleBeforeRequest(evt) { - const target = evt.detail.target; - if (target) { - target.classList.add('htmx-requesting'); - this.showLoadingIndicator(target); - } - - // Disable form elements during request - this.toggleFormElements(false); - - // Track request analytics - this.trackFilterUsage(evt); - } - - handleAfterRequest(evt) { - const target = evt.detail.target; - if (target) { - target.classList.remove('htmx-requesting'); - this.hideLoadingIndicator(target); - } - - // Re-enable form elements - this.toggleFormElements(true); - - // Handle response timing - const responseTime = Date.now() - this.lastRequestTime; - if (responseTime > 3000) { - this.showPerformanceWarning(); - } - } - - handleResponseError(evt) { - this.hideLoadingIndicator(evt.detail.target); - this.toggleFormElements(true); - this.showErrorMessage('Failed to load results. Please try again.'); - } - - handleAfterSwap(evt) { - if (evt.detail.target.id === 'results-container') { - this.initializeLazyLoading(evt.detail.target); - this.updateResultsInfo(evt.detail.target); - this.animateResults(evt.detail.target); - } - } - - handleHistoryRestore(evt) { - const parkResults = document.getElementById('park-results'); - if (parkResults && evt.detail.path) { - const url = new URL(evt.detail.path, window.location.origin); - const viewMode = url.searchParams.get('view_mode'); - if (viewMode) { - parkResults.setAttribute('data-view-mode', viewMode); - } - } - - // Restore filter state from URL - this.restoreFiltersFromURL(evt.detail.path); - } - - handleInput(evt) { - if (evt.target.type === 'search' || evt.target.type === 'text') { - this.debounceInput(evt.target); - } - } - - handleChange(evt) { - if (evt.target.closest('#filter-form')) { - this.updateFilterState(); - this.saveFilterState(); - } - } - - handleFocus(evt) { - if (evt.target.type === 'search') { - this.highlightSearchSuggestions(evt.target); - } - } - - handleBlur(evt) { - if (evt.target.type === 'search') { - // Delay hiding suggestions to allow for clicks - setTimeout(() => this.hideSearchSuggestions(), 150); - } - } - - handleKeydown(evt) { - if (evt.target.type === 'search') { - this.handleSearchKeyboard(evt); - } - } - - handleResize() { - // Responsive adjustments - this.adjustLayoutForViewport(); - } - - debounceInput(input) { - const key = input.name || input.id; - if (this.debounceTimers.has(key)) { - clearTimeout(this.debounceTimers.get(key)); - } - - const delay = input.type === 'search' ? 300 : 500; - const timer = setTimeout(() => { - if (input.form) { - htmx.trigger(input.form, 'change'); - } - this.debounceTimers.delete(key); - }, delay); - - this.debounceTimers.set(key, timer); - } - - handleSearchKeyboard(evt) { - const suggestions = document.getElementById('search-results'); - if (!suggestions) return; - - const items = suggestions.querySelectorAll('[role="option"]'); - let activeIndex = Array.from(items).findIndex(item => - item.classList.contains('active') || item.classList.contains('highlighted') - ); - - switch (evt.key) { - case 'ArrowDown': - evt.preventDefault(); - activeIndex = Math.min(activeIndex + 1, items.length - 1); - this.highlightSuggestion(items, activeIndex); - break; - - case 'ArrowUp': - evt.preventDefault(); - activeIndex = Math.max(activeIndex - 1, -1); - this.highlightSuggestion(items, activeIndex); - break; - - case 'Enter': - if (activeIndex >= 0 && items[activeIndex]) { - evt.preventDefault(); - items[activeIndex].click(); - } - break; - - case 'Escape': - this.hideSearchSuggestions(); - evt.target.blur(); - break; - } - } - - highlightSuggestion(items, activeIndex) { - items.forEach((item, index) => { - item.classList.toggle('active', index === activeIndex); - item.classList.toggle('highlighted', index === activeIndex); - }); - } - - highlightSearchSuggestions(input) { - const suggestions = document.getElementById('search-results'); - if (suggestions && input.value) { - suggestions.style.display = 'block'; - } - } - - hideSearchSuggestions() { - const suggestions = document.getElementById('search-results'); - if (suggestions) { - suggestions.style.display = 'none'; - } - } - - initializeLazyLoading(container = document) { - if (!('IntersectionObserver' in window)) return; - - const imageObserver = new IntersectionObserver((entries) => { - entries.forEach(entry => { - if (entry.isIntersecting) { - const img = entry.target; - if (img.dataset.src) { - img.src = img.dataset.src; - img.removeAttribute('data-src'); - img.classList.add('loaded'); - imageObserver.unobserve(img); - } - } - }); - }, { - threshold: 0.1, - rootMargin: '50px' - }); - - container.querySelectorAll('img[data-src]').forEach(img => { - imageObserver.observe(img); - }); - } - - setupKeyboardNavigation() { - // Tab navigation for filter cards - document.addEventListener('keydown', (evt) => { - if (evt.key === 'Tab' && evt.target.closest('.park-card')) { - this.handleCardNavigation(evt); - } - }); - } - - setupPerformanceOptimizations() { - // Prefetch next page if pagination exists - this.setupPrefetching(); - - // Optimize scroll performance - this.setupScrollOptimization(); - } - - setupPrefetching() { - const nextPageLink = document.querySelector('a[rel="next"]'); - if (nextPageLink && 'IntersectionObserver' in window) { - const observer = new IntersectionObserver((entries) => { - entries.forEach(entry => { - if (entry.isIntersecting) { - this.prefetchPage(nextPageLink.href); - observer.unobserve(entry.target); - } - }); - }); - - const trigger = document.querySelector('.pagination'); - if (trigger) { - observer.observe(trigger); - } - } - } - - setupScrollOptimization() { - let ticking = false; - - window.addEventListener('scroll', () => { - if (!ticking) { - requestAnimationFrame(() => { - this.handleScroll(); - ticking = false; - }); - ticking = true; - } - }); - } - - handleScroll() { - // Show/hide back to top button - const backToTop = document.getElementById('back-to-top'); - if (backToTop) { - backToTop.style.display = window.scrollY > 500 ? 'block' : 'none'; - } - } - - prefetchPage(url) { - const link = document.createElement('link'); - link.rel = 'prefetch'; - link.href = url; - document.head.appendChild(link); - } - - showLoadingIndicator(target) { - // Add subtle loading animation - target.style.transition = 'opacity 0.3s ease-in-out'; - target.style.opacity = '0.7'; - } - - hideLoadingIndicator(target) { - target.style.opacity = '1'; - } - - toggleFormElements(enabled) { - const form = document.getElementById('filter-form'); - if (form) { - const elements = form.querySelectorAll('input, select, button'); - elements.forEach(el => { - el.disabled = !enabled; - }); - } - } - - updateFilterState() { - const form = document.getElementById('filter-form'); - if (!form) return; - - const formData = new FormData(form); - this.filterState.clear(); - - for (const [key, value] of formData.entries()) { - if (value && value !== '') { - this.filterState.set(key, value); - } - } - } - - saveFilterState() { - try { - const state = Object.fromEntries(this.filterState); - localStorage.setItem('parkFilters', JSON.stringify(state)); - } catch (e) { - console.warn('Failed to save filter state:', e); - } - } - - restoreFilterState() { - try { - const saved = localStorage.getItem('parkFilters'); - if (saved) { - const state = JSON.parse(saved); - this.applyFilterState(state); - } - } catch (e) { - console.warn('Failed to restore filter state:', e); - } - } - - restoreFiltersFromURL(path) { - const url = new URL(path, window.location.origin); - const params = new URLSearchParams(url.search); - - const form = document.getElementById('filter-form'); - if (!form) return; - - // Clear existing values - form.reset(); - - // Apply URL parameters - for (const [key, value] of params.entries()) { - const input = form.querySelector(`[name="${key}"]`); - if (input) { - if (input.type === 'checkbox') { - input.checked = value === 'on' || value === 'true'; - } else { - input.value = value; - } - } - } - } - - applyFilterState(state) { - const form = document.getElementById('filter-form'); - if (!form) return; - - Object.entries(state).forEach(([key, value]) => { - const input = form.querySelector(`[name="${key}"]`); - if (input) { - if (input.type === 'checkbox') { - input.checked = value === 'on' || value === 'true'; - } else { - input.value = value; - } - } - }); - } - - updateResultsInfo(container) { - // Update any result count displays - const countElements = container.querySelectorAll('[data-result-count]'); - countElements.forEach(el => { - const count = container.querySelectorAll('.park-card').length; - el.textContent = count; - }); - } - - animateResults(container) { - // Subtle animation for new results - const cards = container.querySelectorAll('.park-card'); - cards.forEach((card, index) => { - card.style.opacity = '0'; - card.style.transform = 'translateY(20px)'; - - setTimeout(() => { - card.style.transition = 'opacity 0.3s ease-out, transform 0.3s ease-out'; - card.style.opacity = '1'; - card.style.transform = 'translateY(0)'; - }, index * 50); - }); - } - - adjustLayoutForViewport() { - const viewport = window.innerWidth; - - // Adjust grid columns based on viewport - const grid = document.querySelector('.park-card-grid'); - if (grid) { - if (viewport < 768) { - grid.style.gridTemplateColumns = '1fr'; - } else if (viewport < 1024) { - grid.style.gridTemplateColumns = 'repeat(2, 1fr)'; - } else { - grid.style.gridTemplateColumns = 'repeat(3, 1fr)'; - } - } - } - - trackFilterUsage(evt) { - // Track which filters are being used for analytics - if (window.gtag) { - const formData = new FormData(evt.detail.elt); - const activeFilters = []; - - for (const [key, value] of formData.entries()) { - if (value && value !== '' && key !== 'csrfmiddlewaretoken') { - activeFilters.push(key); - } - } - - window.gtag('event', 'filter_usage', { - 'filters_used': activeFilters.join(','), - 'filter_count': activeFilters.length - }); - } - } - - showPerformanceWarning() { - // Show a subtle warning for slow responses - const warning = document.createElement('div'); - warning.className = 'fixed top-4 right-4 bg-yellow-100 border border-yellow-400 text-yellow-700 px-4 py-3 rounded z-50'; - warning.innerHTML = ` - Search is taking longer than expected... - - - - - - `; - - document.body.appendChild(warning); - - setTimeout(() => { - if (warning.parentElement) { - warning.remove(); - } - }, 5000); - } - - showErrorMessage(message) { - // Show error message with retry option - const errorDiv = document.createElement('div'); - errorDiv.className = 'fixed top-4 right-4 bg-red-100 border border-red-400 text-red-700 px-4 py-3 rounded z-50'; - errorDiv.innerHTML = ` - ${message} - - - - - - - `; - - document.body.appendChild(errorDiv); - - setTimeout(() => { - if (errorDiv.parentElement) { - errorDiv.remove(); - } - }, 10000); - } - - // Utility method for debouncing - debounce(func, wait) { - let timeout; - return function executedFunction(...args) { - const later = () => { - clearTimeout(timeout); - func(...args); - }; - clearTimeout(timeout); - timeout = setTimeout(later, wait); - }; - } -} - -// Initialize the enhanced search manager -document.addEventListener('DOMContentLoaded', () => { - window.parkSearchManager = new ParkSearchManager(); -}); - -// Export for potential module usage -if (typeof module !== 'undefined' && module.exports) { - module.exports = ParkSearchManager; -} \ No newline at end of file diff --git a/parks/templates/parks/park_list.html b/parks/templates/parks/park_list.html deleted file mode 100644 index 77122a71..00000000 --- a/parks/templates/parks/park_list.html +++ /dev/null @@ -1,518 +0,0 @@ -{% extends "core/search/layouts/filtered_list.html" %} -{% load static %} - -{% block title %}Parks - ThrillWiki{% endblock %} - -{% block list_actions %} -
-
-

Parks

- -
- View mode selection - - -
-
- - {% if user.is_authenticated %} - - - - - Add Park - - {% endif %} -
-{% endblock %} - -{% block filter_section %} - -
- {# Enhanced Search Section #} -
-
-
-
-
- - - -
- - - - - - -
- - - - - Searching... -
-
-
- -
- -
-
-
- - {# Active Filter Chips Section #} -
-
-
-

Active Filters

- -
-
- -
-
-
- - {# Filter Panel #} -
-
-
-

Filters

- -
- -
- - {% include "core/search/components/filter_form.html" with filter=filter %} -
-
-
-
- -{# Main Loading Indicator #} -
-
- - - - - Updating results... -
-
- - -{% endblock %} - -{% block results_list %} -
- - {# Results Header with Count and Sort #} -
-
-
-

- Parks - {% if parks %} - - ({{ parks|length }}{% if parks.has_other_pages %} of {{ parks.paginator.count }}{% endif %} found) - - {% endif %} -

- - {# Results status indicator #} - {% if request.GET.search or request.GET.park_type or request.GET.has_coasters or request.GET.min_rating or request.GET.big_parks_only %} - - Filtered - - {% endif %} -
- - {# Sort Options #} -
- - -
-
-
- - {# Results Content #} -
- {% if parks %} - {% include "parks/partials/park_list_item.html" with parks=parks view_mode=view_mode|default:'grid' %} - {% else %} - {# No Results Found #} -
-
- - - -
- -

No parks found

- - {% if request.GET.search %} -

- No parks match your search for "{{ request.GET.search }}". -
Try searching with different keywords or check your spelling. -

- {% elif request.GET.park_type or request.GET.has_coasters or request.GET.min_rating or request.GET.big_parks_only %} -

- No parks match your current filter criteria. -
Try adjusting your filters or removing some restrictions. -

- {% else %} -

- No parks are currently available in the database. -

- {% endif %} - -
- {% if request.GET.search or request.GET.park_type or request.GET.has_coasters or request.GET.min_rating or request.GET.big_parks_only %} - -
- - - - - View all parks - - {% endif %} - - {% if user.is_authenticated %} - - {% endif %} -
-
- {% endif %} -
-
- - -{% endblock %} \ No newline at end of file diff --git a/parks/templates/parks/partials/park_list_item.html b/parks/templates/parks/partials/park_list_item.html deleted file mode 100644 index c5412ed4..00000000 --- a/parks/templates/parks/partials/park_list_item.html +++ /dev/null @@ -1,88 +0,0 @@ -{% load static %} - -{% if error %} -
-
- - - - {{ error }} -
-
-{% else %} -
- {% for park in object_list|default:parks %} -
-
-

- - {{ park.name }} - -

- -
- - {{ park.get_status_display }} - -
- - {% if park.operator %} -
- {{ park.operator.name }} -
- {% endif %} - - {% if park.description %} -

- {{ park.description|truncatewords:20 }} -

- {% endif %} - - {% if park.ride_count or park.coaster_count %} -
- {% if park.ride_count %} - - - - - {{ park.ride_count }} rides - - {% endif %} - {% if park.coaster_count %} - - - - - {{ park.coaster_count }} coasters - - {% endif %} -
- {% endif %} -
-
- {% empty %} -
-
- - - -
-

No parks found

-
- {% if search_query %} -

No parks found matching "{{ search_query }}". Try adjusting your search terms.

- {% else %} -

No parks found matching your criteria. Try adjusting your filters.

- {% endif %} - {% if user.is_authenticated %} -

You can also add a new park.

- {% endif %} -
-
- {% endfor %} -
-{% endif %} \ No newline at end of file diff --git a/parks/templates/parks/partials/park_suggestions.html b/parks/templates/parks/partials/park_suggestions.html deleted file mode 100644 index 08ca105f..00000000 --- a/parks/templates/parks/partials/park_suggestions.html +++ /dev/null @@ -1,30 +0,0 @@ -{% load static %} - -{% if parks %} -
- {% for park in parks %} - - {% endfor %} -
-{% else %} -
- No parks found matching "{{ query }}" -
-{% endif %} \ No newline at end of file diff --git a/parks/templates/parks/partials/search_suggestions.html b/parks/templates/parks/partials/search_suggestions.html deleted file mode 100644 index c8402573..00000000 --- a/parks/templates/parks/partials/search_suggestions.html +++ /dev/null @@ -1,36 +0,0 @@ -{% if suggestions %} -
- {% for park in suggestions %} - {% with location=park.location.first %} - - {% endwith %} - {% endfor %} -
-{% endif %} \ No newline at end of file diff --git a/parks/templatetags/park_tags.py b/parks/templatetags/park_tags.py deleted file mode 100644 index 42c15e64..00000000 --- a/parks/templatetags/park_tags.py +++ /dev/null @@ -1,11 +0,0 @@ -from django import template - -register = template.Library() - - -@register.filter -def has_reviewed_park(user, park): - """Check if a user has reviewed a park""" - if not user.is_authenticated: - return False - return park.reviews.filter(user=user).exists() diff --git a/parks/tests.py b/parks/tests.py deleted file mode 100644 index df80cac6..00000000 --- a/parks/tests.py +++ /dev/null @@ -1,117 +0,0 @@ -from django.test import TestCase, Client -from django.contrib.auth import get_user_model -from parks.models import Park, ParkArea, ParkLocation, Company as Operator - -User = get_user_model() - - -def create_test_location(park: Park) -> ParkLocation: - """Helper function to create a test location""" - park_location = ParkLocation.objects.create( - park=park, - street_address="123 Test St", - city="Test City", - state="TS", - country="Test Country", - postal_code="12345", - ) - # Set coordinates using the helper method - park_location.set_coordinates(34.0522, -118.2437) # latitude, longitude - park_location.save() - return park_location - - -class ParkModelTests(TestCase): - @classmethod - def setUpTestData(cls) -> None: - # Create test user - cls.user = User.objects.create_user( - username="testuser", - email="test@example.com", - password="testpass123", - ) - - # Create test company - cls.operator = Operator.objects.create( - name="Test Company", website="http://example.com" - ) - - # Create test park - cls.park = Park.objects.create( - name="Test Park", - operator=cls.operator, - status="OPERATING", - website="http://testpark.com", - ) - - # Create test location - cls.location = create_test_location(cls.park) - - def test_park_creation(self) -> None: - """Test park instance creation and field values""" - self.assertEqual(self.park.name, "Test Park") - self.assertEqual(self.park.operator, self.operator) - self.assertEqual(self.park.status, "OPERATING") - self.assertEqual(self.park.website, "http://testpark.com") - self.assertTrue(self.park.slug) - - def test_park_str_representation(self) -> None: - """Test string representation of park""" - self.assertEqual(str(self.park), "Test Park") - - def test_park_coordinates(self) -> None: - """Test park coordinates property""" - coords = self.park.coordinates - self.assertIsNotNone(coords) - if coords: - self.assertAlmostEqual(coords[0], 34.0522, places=4) # latitude - self.assertAlmostEqual(coords[1], -118.2437, places=4) # longitude - - def test_park_formatted_location(self) -> None: - """Test park formatted_location property""" - expected = "123 Test St, Test City, TS, 12345, Test Country" - self.assertEqual(self.park.formatted_location, expected) - - -class ParkAreaTests(TestCase): - def setUp(self) -> None: - # Create test company - self.operator = Operator.objects.create( - name="Test Company", website="http://example.com" - ) - - # Create test park - self.park = Park.objects.create( - name="Test Park", operator=self.operator, status="OPERATING" - ) - - # Create test location - self.location = create_test_location(self.park) - - # Create test area - self.area = ParkArea.objects.create( - park=self.park, name="Test Area", description="Test Description" - ) - - def test_area_creation(self) -> None: - """Test park area creation""" - self.assertEqual(self.area.name, "Test Area") - self.assertEqual(self.area.park, self.park) - self.assertTrue(self.area.slug) - - -class ParkViewTests(TestCase): - def setUp(self) -> None: - self.client = Client() - self.user = User.objects.create_user( - username="testuser", - email="test@example.com", - password="testpass123", - ) - self.operator = Operator.objects.create( - name="Test Company", website="http://example.com" - ) - self.park = Park.objects.create( - name="Test Park", operator=self.operator, status="OPERATING" - ) - self.location = create_test_location(self.park) diff --git a/parks/tests_disabled/README.md b/parks/tests_disabled/README.md deleted file mode 100644 index e9e1b376..00000000 --- a/parks/tests_disabled/README.md +++ /dev/null @@ -1,127 +0,0 @@ -# Park Search Tests - -## Overview - -Test suite for the park search functionality including: -- Autocomplete widget integration -- Search form validation -- Filter integration -- HTMX interaction -- View mode persistence - -## Running Tests - -```bash -# Run all park tests -uv run pytest parks/tests/ - -# Run specific search tests -uv run pytest parks/tests/test_search.py - -# Run with coverage -uv run pytest --cov=parks parks/tests/ -``` - -## Test Coverage - -### Search API Tests -- `test_search_json_format`: Validates API response structure -- `test_empty_search_json`: Tests empty search handling -- `test_search_format_validation`: Verifies all required fields and types -- `test_suggestion_limit`: Confirms 8-item result limit - -### Search Functionality Tests -- `test_autocomplete_results`: Validates real-time suggestion filtering -- `test_search_with_filters`: Tests filter integration with search -- `test_partial_match_search`: Verifies partial text matching works - -### UI Integration Tests -- `test_view_mode_persistence`: Ensures view mode is maintained -- `test_empty_search`: Tests default state behavior -- `test_htmx_request_handling`: Validates HTMX interactions - -### Data Format Tests -- Field types validation -- Location formatting -- Status display formatting -- URL generation -- Response structure - -### Frontend Integration -- HTMX partial updates -- Alpine.js state management -- Loading indicators -- View mode persistence -- Keyboard navigation - -### Test Commands -```bash -# Run all park tests -uv run pytest parks/tests/ - -# Run search tests specifically -uv run pytest parks/tests/test_search.py - -# Run with coverage -uv run pytest --cov=parks parks/tests/ -``` - -### Coverage Areas -1. Search Functionality: - - Suggestion generation - - Result filtering - - Partial matching - - Empty state handling - -2. UI Integration: - - HTMX requests - - View mode switching - - Loading states - - Error handling - -3. Performance: - - Result limiting - - Debouncing - - Query optimization - -4. Accessibility: - - ARIA attributes - - Keyboard controls - - Screen reader support - -## Configuration - -Tests use pytest-django and require: -- PostgreSQL database -- HTMX middleware -- Autocomplete app configuration - -## Fixtures - -The test suite uses standard Django test fixtures. No additional fixtures required. - -## Common Issues - -1. Database Errors - - Ensure PostGIS extensions are installed - - Verify database permissions - -2. HTMX Tests - - Use `HTTP_HX_REQUEST` header for HTMX requests - - Check response content for HTMX attributes - -## Adding New Tests - -When adding tests, ensure: -1. Database isolation using `@pytest.mark.django_db` -2. Proper test naming following `test_*` convention -3. Clear test descriptions in docstrings -4. Coverage for both success and failure cases -5. HTMX interaction testing where applicable - -## Future Improvements - -- Add performance benchmarks -- Include accessibility tests -- Add Playwright e2e tests -- Implement geographic search tests \ No newline at end of file diff --git a/parks/tests_disabled/__init__.py b/parks/tests_disabled/__init__.py deleted file mode 100644 index 2aab9a8d..00000000 --- a/parks/tests_disabled/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# Parks app test suite diff --git a/parks/tests_disabled/test_filters.py b/parks/tests_disabled/test_filters.py deleted file mode 100644 index 11a369b3..00000000 --- a/parks/tests_disabled/test_filters.py +++ /dev/null @@ -1,223 +0,0 @@ -""" -Tests for park filtering functionality including search, status filtering, -date ranges, and numeric validations. -""" - -from django.test import TestCase -from datetime import date - -from parks.models import Park, ParkLocation, Company -from parks.filters import ParkFilter - -# NOTE: These tests need to be updated to work with the new ParkLocation model -# instead of the generic Location model - - -class ParkFilterTests(TestCase): - @classmethod - def setUpTestData(cls): - """Set up test data for all filter tests""" - # Create operators - cls.operator1 = Company.objects.create( - name="Thrilling Adventures Inc", slug="thrilling-adventures" - ) - cls.operator2 = Company.objects.create( - name="Family Fun Corp", slug="family-fun" - ) - - # Create parks with various attributes for testing all filters - cls.park1 = Park.objects.create( - name="Thrilling Adventures Park", - description="A thrilling park with lots of roller coasters", - status="OPERATING", - operator=cls.operator1, - opening_date=date(2020, 1, 1), - size_acres=100, - ride_count=20, - coaster_count=5, - average_rating=4.5, - ) - ParkLocation.objects.create( - park=cls.park1, - street_address="123 Thrill St", - city="Thrill City", - state="Thrill State", - country="USA", - postal_code="12345", - ) - - cls.park2 = Park.objects.create( - name="Family Fun Park", - description="Family-friendly entertainment and attractions", - status="CLOSED_TEMP", - operator=cls.operator2, - opening_date=date(2015, 6, 15), - size_acres=50, - ride_count=15, - coaster_count=2, - average_rating=4.0, - ) - ParkLocation.objects.create( - park=cls.park2, - street_address="456 Fun St", - city="Fun City", - state="Fun State", - country="Canada", - postal_code="54321", - ) - - # Park with minimal data for edge case testing - cls.park3 = Park.objects.create( - name="Incomplete Park", - status="UNDER_CONSTRUCTION", - operator=cls.operator1, - ) - - def test_text_search(self): - """Test search functionality across different fields""" - # Test name search - queryset = ParkFilter(data={"search": "Thrilling"}).qs - self.assertEqual(queryset.count(), 1) - self.assertIn(self.park1, queryset) - - # Test description search - queryset = ParkFilter(data={"search": "family-friendly"}).qs - self.assertEqual(queryset.count(), 1) - self.assertIn(self.park2, queryset) - - # Test location search - queryset = ParkFilter(data={"search": "Thrill City"}).qs - self.assertEqual(queryset.count(), 1) - self.assertIn(self.park1, queryset) - - # Test combined field search - queryset = ParkFilter(data={"search": "Park"}).qs - self.assertEqual(queryset.count(), 3) - - # Test empty search - queryset = ParkFilter(data={}).qs - self.assertEqual(queryset.count(), 3) - - def test_status_filtering(self): - """Test status filter with various values""" - # Test each status - status_tests = { - "OPERATING": [self.park1], - "CLOSED_TEMP": [self.park2], - "UNDER_CONSTRUCTION": [self.park3], - } - - for status, expected_parks in status_tests.items(): - queryset = ParkFilter(data={"status": status}).qs - self.assertEqual(queryset.count(), len(expected_parks)) - for park in expected_parks: - self.assertIn(park, queryset) - - # Test empty status (should return all) - queryset = ParkFilter(data={}).qs - self.assertEqual(queryset.count(), 3) - - # Test empty string status (should return all) - queryset = ParkFilter(data={"status": ""}).qs - self.assertEqual(queryset.count(), 3) - - # Test invalid status (should return no results) - queryset = ParkFilter(data={"status": "INVALID"}).qs - self.assertEqual(queryset.count(), 0) - - def test_date_range_filtering(self): - """Test date range filter functionality""" - # Test various date range scenarios - test_cases = [ - # Start date only - ({"opening_date_after": "2019-01-01"}, [self.park1]), - # End date only - ({"opening_date_before": "2016-01-01"}, [self.park2]), - # Date range including one park - ( - { - "opening_date_after": "2014-01-01", - "opening_date_before": "2016-01-01", - }, - [self.park2], - ), - # Date range including multiple parks - ( - { - "opening_date_after": "2014-01-01", - "opening_date_before": "2022-01-01", - }, - [self.park1, self.park2], - ), - # Empty filter (should return all) - ({}, [self.park1, self.park2, self.park3]), - # Future date (should return none) - ({"opening_date_after": "2030-01-01"}, []), - ] - - for filter_data, expected_parks in test_cases: - queryset = ParkFilter(data=filter_data).qs - self.assertEqual( - set(queryset), - set(expected_parks), - f"Failed for filter: {filter_data}", - ) - - # Test invalid date formats - invalid_dates = [ - {"opening_date_after": "invalid-date"}, - {"opening_date_before": "2023-13-01"}, # Invalid month - {"opening_date_after": "2023-01-32"}, # Invalid day - {"opening_date_before": "not-a-date"}, - ] - - for invalid_data in invalid_dates: - filter_instance = ParkFilter(data=invalid_data) - self.assertFalse( - filter_instance.is_valid(), - f"Filter should be invalid for data: {invalid_data}", - ) - - def test_numeric_filtering(self): - """Test numeric filters with validation""" - # Test minimum rides filter - test_cases = [ - ({"min_rides": "18"}, [self.park1]), # Only park1 has >= 18 rides - ( - {"min_rides": "10"}, - [self.park1, self.park2], - ), # Both park1 and park2 have >= 10 rides - ( - {"min_rides": "0"}, - [self.park1, self.park2, self.park3], - ), # All parks have >= 0 rides - # No filter should return all - ({}, [self.park1, self.park2, self.park3]), - ] - - for filter_data, expected_parks in test_cases: - queryset = ParkFilter(data=filter_data).qs - self.assertEqual( - set(queryset), - set(expected_parks), - f"Failed for filter: {filter_data}", - ) - - # Test coaster count filter - queryset = ParkFilter(data={"min_coasters": "3"}).qs - self.assertEqual(queryset.count(), 1) - self.assertIn(self.park1, queryset) - - # Test size filter - queryset = ParkFilter(data={"min_size": "75"}).qs - self.assertEqual(queryset.count(), 1) - self.assertIn(self.park1, queryset) - - # Test validation - invalid_values = ["-1", "invalid", "0.5"] - for value in invalid_values: - filter_instance = ParkFilter(data={"min_rides": value}) - self.assertFalse( - filter_instance.is_valid(), - f"Filter should be invalid for value: {value}", - ) diff --git a/parks/tests_disabled/test_models.py b/parks/tests_disabled/test_models.py deleted file mode 100644 index 01dd0339..00000000 --- a/parks/tests_disabled/test_models.py +++ /dev/null @@ -1,176 +0,0 @@ -""" -Tests for park models functionality including CRUD operations, -slug handling, status management, and location integration. -""" - -from django.test import TestCase -from django.db import IntegrityError - -from parks.models import Park, ParkArea, ParkLocation, Company - -# NOTE: These tests need to be updated to work with the new ParkLocation model -# instead of the generic Location model - - -class ParkModelTests(TestCase): - def setUp(self): - """Set up test data""" - self.operator = Company.objects.create(name="Test Company", slug="test-company") - - # Create a basic park - self.park = Park.objects.create( - name="Test Park", - description="A test park", - status="OPERATING", - operator=self.operator, - ) - - # Create location for the park - self.location = ParkLocation.objects.create( - park=self.park, - street_address="123 Test St", - city="Test City", - state="Test State", - country="Test Country", - postal_code="12345", - ) - self.location.set_coordinates(40.7128, -74.0060) - self.location.save() - - def test_park_creation(self): - """Test basic park creation and fields""" - self.assertEqual(self.park.name, "Test Park") - self.assertEqual(self.park.slug, "test-park") - self.assertEqual(self.park.status, "OPERATING") - self.assertEqual(self.park.operator, self.operator) - - def test_slug_generation(self): - """Test automatic slug generation""" - park = Park.objects.create( - name="Another Test Park", - status="OPERATING", - operator=self.operator, - ) - self.assertEqual(park.slug, "another-test-park") - - def test_historical_slug_lookup(self): - """Test finding park by historical slug""" - from django.db import transaction - from django.contrib.contenttypes.models import ContentType - from core.history import HistoricalSlug - - with transaction.atomic(): - # Create initial park with a specific name/slug - park = Park.objects.create( - name="Original Park Name", - description="Test description", - status="OPERATING", - operator=self.operator, - ) - original_slug = park.slug - print(f"\nInitial park created with slug: {original_slug}") - - # Ensure we have a save to trigger history - park.save() - - # Modify name to trigger slug change - park.name = "Updated Park Name" - park.save() - new_slug = park.slug - print(f"Park updated with new slug: {new_slug}") - - # Check HistoricalSlug records - historical_slugs = HistoricalSlug.objects.filter( - content_type=ContentType.objects.get_for_model(Park), - object_id=park.id, - ) - print(f"Historical slug records: {[h.slug for h in historical_slugs]}") - - # Check pghistory records - event_model = getattr(Park, "event_model", None) - if event_model: - historical_records = event_model.objects.filter( - pgh_obj_id=park.id - ).order_by("-pgh_created_at") - print(f"\nPG History records:") - for record in historical_records: - print(f"- Event ID: {record.pgh_id}") - print(f" Name: {record.name}") - print(f" Slug: {record.slug}") - print(f" Created At: {record.pgh_created_at}") - else: - print("\nNo pghistory event model available") - - # Try to find by old slug - found_park, is_historical = Park.get_by_slug(original_slug) - self.assertEqual(found_park.id, park.id) - print( - f"Found park by old slug: { - found_park.slug}, is_historical: {is_historical}" - ) - self.assertTrue(is_historical) - - # Try current slug - found_park, is_historical = Park.get_by_slug(new_slug) - self.assertEqual(found_park.id, park.id) - print( - f"Found park by new slug: { - found_park.slug}, is_historical: {is_historical}" - ) - self.assertFalse(is_historical) - - def test_status_color_mapping(self): - """Test status color class mapping""" - status_tests = { - "OPERATING": "bg-green-100 text-green-800", - "CLOSED_TEMP": "bg-yellow-100 text-yellow-800", - "CLOSED_PERM": "bg-red-100 text-red-800", - "UNDER_CONSTRUCTION": "bg-blue-100 text-blue-800", - "DEMOLISHED": "bg-gray-100 text-gray-800", - "RELOCATED": "bg-purple-100 text-purple-800", - } - - for status, expected_color in status_tests.items(): - self.park.status = status - self.assertEqual(self.park.get_status_color(), expected_color) - - def test_absolute_url(self): - """Test get_absolute_url method""" - expected_url = f"/parks/{self.park.slug}/" - self.assertEqual(self.park.get_absolute_url(), expected_url) - - -class ParkAreaModelTests(TestCase): - def setUp(self): - """Set up test data""" - self.operator = Company.objects.create( - name="Test Company 2", slug="test-company-2" - ) - self.park = Park.objects.create( - name="Test Park", status="OPERATING", operator=self.operator - ) - self.area = ParkArea.objects.create( - park=self.park, name="Test Area", description="A test area" - ) - - def test_area_creation(self): - """Test basic area creation and fields""" - self.assertEqual(self.area.name, "Test Area") - self.assertEqual(self.area.slug, "test-area") - self.assertEqual(self.area.park, self.park) - - def test_unique_together_constraint(self): - """Test unique_together constraint for park and slug""" - from django.db import transaction - - # Try to create area with same slug in same park - with transaction.atomic(): - with self.assertRaises(IntegrityError): - ParkArea.objects.create( - park=self.park, name="Test Area" # Will generate same slug - ) - - # Should be able to use same name in different park - other_park = Park.objects.create(name="Other Park", operator=self.operator) - area = ParkArea.objects.create(park=other_park, name="Test Area") - self.assertEqual(area.slug, "test-area") diff --git a/parks/tests_disabled/test_search.py b/parks/tests_disabled/test_search.py deleted file mode 100644 index a66b2441..00000000 --- a/parks/tests_disabled/test_search.py +++ /dev/null @@ -1,173 +0,0 @@ -import pytest -from django.urls import reverse -from django.test import Client - -from parks.models import Park -from parks.forms import ParkAutocomplete, ParkSearchForm - - -@pytest.mark.django_db -class TestParkSearch: - def test_autocomplete_results(self, client: Client): - """Test that autocomplete returns correct results""" - # Create test parks - park1 = Park.objects.create(name="Test Park") - park2 = Park.objects.create(name="Another Park") - park3 = Park.objects.create(name="Test Garden") - - # Get autocomplete results - url = reverse("parks:suggest_parks") - response = client.get(url, {"search": "Test"}) - - # Check response - assert response.status_code == 200 - content = response.content.decode() - assert park1.name in content - assert park3.name in content - assert park2.name not in content - - def test_search_form_valid(self): - """Test ParkSearchForm validation""" - form = ParkSearchForm(data={}) - assert form.is_valid() - - def test_autocomplete_class(self): - """Test ParkAutocomplete configuration""" - ac = ParkAutocomplete() - assert ac.model == Park - assert "name" in ac.search_attrs - - def test_search_with_filters(self, client: Client): - """Test search works with filters""" - park = Park.objects.create(name="Test Park", status="OPERATING") - - # Search with status filter - url = reverse("parks:park_list") - response = client.get(url, {"park": str(park.pk), "status": "OPERATING"}) - - assert response.status_code == 200 - assert park.name in response.content.decode() - - def test_empty_search(self, client: Client): - """Test empty search returns all parks""" - Park.objects.create(name="Test Park") - Park.objects.create(name="Another Park") - - url = reverse("parks:park_list") - response = client.get(url) - - assert response.status_code == 200 - content = response.content.decode() - assert "Test Park" in content - assert "Another Park" in content - - def test_partial_match_search(self, client: Client): - """Test partial matching in search""" - Park.objects.create(name="Adventure World") - Park.objects.create(name="Water Adventure") - - url = reverse("parks:suggest_parks") - response = client.get(url, {"search": "Adv"}) - - assert response.status_code == 200 - content = response.content.decode() - assert "Adventure World" in content - assert "Water Adventure" in content - - def test_htmx_request_handling(self, client: Client): - """Test HTMX-specific request handling""" - Park.objects.create(name="Test Park") - - url = reverse("parks:suggest_parks") - response = client.get(url, {"search": "Test"}, HTTP_HX_REQUEST="true") - - assert response.status_code == 200 - assert "Test Park" in response.content.decode() - - def test_view_mode_persistence(self, client: Client): - """Test view mode is maintained during search""" - Park.objects.create(name="Test Park") - - url = reverse("parks:park_list") - response = client.get(url, {"park": "Test", "view_mode": "list"}) - - assert response.status_code == 200 - assert 'data-view-mode="list"' in response.content.decode() - - def test_suggestion_limit(self, client: Client): - """Test that suggestions are limited to 8 items""" - # Create 10 parks - for i in range(10): - Park.objects.create(name=f"Test Park {i}") - - url = reverse("parks:suggest_parks") - response = client.get(url, {"search": "Test"}) - - content = response.content.decode() - result_count = content.count("Test Park") - assert result_count == 8 # Verify limit is enforced - - def test_search_json_format(self, client: Client): - """Test that search returns properly formatted JSON""" - park = Park.objects.create( - name="Test Park", - status="OPERATING", - city="Test City", - state="Test State", - ) - - url = reverse("parks:suggest_parks") - response = client.get(url, {"search": "Test"}) - - assert response.status_code == 200 - data = response.json() - assert "results" in data - assert len(data["results"]) == 1 - - result = data["results"][0] - assert result["id"] == str(park.pk) - assert result["name"] == "Test Park" - assert result["status"] == "Operating" - assert result["location"] == park.formatted_location - assert result["url"] == reverse("parks:park_detail", kwargs={"slug": park.slug}) - - def test_empty_search_json(self, client: Client): - """Test empty search returns empty results array""" - url = reverse("parks:suggest_parks") - response = client.get(url, {"search": ""}) - - assert response.status_code == 200 - data = response.json() - assert "results" in data - assert len(data["results"]) == 0 - - def test_search_format_validation(self, client: Client): - """Test that all fields are properly formatted in search results""" - Park.objects.create( - name="Test Park", - status="OPERATING", - city="Test City", - state="Test State", - country="Test Country", - ) - - expected_fields = {"id", "name", "status", "location", "url"} - - url = reverse("parks:suggest_parks") - response = client.get(url, {"search": "Test"}) - data = response.json() - result = data["results"][0] - - # Check all expected fields are present - assert set(result.keys()) == expected_fields - - # Check field types - assert isinstance(result["id"], str) - assert isinstance(result["name"], str) - assert isinstance(result["status"], str) - assert isinstance(result["location"], str) - assert isinstance(result["url"], str) - - # Check formatted location includes city and state - assert "Test City" in result["location"] - assert "Test State" in result["location"] diff --git a/parks/urls.py b/parks/urls.py deleted file mode 100644 index aa913c2d..00000000 --- a/parks/urls.py +++ /dev/null @@ -1,108 +0,0 @@ -from django.urls import path, include -from . import views, views_search -from rides.views import ParkSingleCategoryListView -from .views_roadtrip import ( - RoadTripPlannerView, - CreateTripView, - TripDetailView, - FindParksAlongRouteView, - GeocodeAddressView, - ParkDistanceCalculatorView, -) - -app_name = "parks" - -urlpatterns = [ - # Park views with autocomplete search - path("", views.ParkListView.as_view(), name="park_list"), - path("create/", views.ParkCreateView.as_view(), name="park_create"), - # Add park button endpoint (moved before park detail pattern) - path("add-park-button/", views.add_park_button, name="add_park_button"), - # Location search endpoints - path("search/location/", views.location_search, name="location_search"), - path( - "search/reverse-geocode/", - views.reverse_geocode, - name="reverse_geocode", - ), - # Areas and search endpoints for HTMX - path("areas/", views.get_park_areas, name="get_park_areas"), - path("suggest_parks/", views_search.suggest_parks, name="suggest_parks"), - path("search/", views.search_parks, name="search_parks"), - # Road trip planning URLs - path("roadtrip/", RoadTripPlannerView.as_view(), name="roadtrip_planner"), - path("roadtrip/create/", CreateTripView.as_view(), name="roadtrip_create"), - path( - "roadtrip//", - TripDetailView.as_view(), - name="roadtrip_detail", - ), - # Road trip HTMX endpoints - path( - "roadtrip/htmx/parks-along-route/", - FindParksAlongRouteView.as_view(), - name="roadtrip_htmx_parks_along_route", - ), - path( - "roadtrip/htmx/geocode/", - GeocodeAddressView.as_view(), - name="roadtrip_htmx_geocode", - ), - path( - "roadtrip/htmx/distance/", - ParkDistanceCalculatorView.as_view(), - name="roadtrip_htmx_distance", - ), - # Park detail and related views - path("/", views.ParkDetailView.as_view(), name="park_detail"), - path("/edit/", views.ParkUpdateView.as_view(), name="park_update"), - path("/actions/", views.park_actions, name="park_actions"), - # Area views - path( - "/areas//", - views.ParkAreaDetailView.as_view(), - name="area_detail", - ), - # Park-specific category URLs - path( - "/roller_coasters/", - ParkSingleCategoryListView.as_view(), - {"category": "RC"}, - name="park_roller_coasters", - ), - path( - "/dark_rides/", - ParkSingleCategoryListView.as_view(), - {"category": "DR"}, - name="park_dark_rides", - ), - path( - "/flat_rides/", - ParkSingleCategoryListView.as_view(), - {"category": "FR"}, - name="park_flat_rides", - ), - path( - "/water_rides/", - ParkSingleCategoryListView.as_view(), - {"category": "WR"}, - name="park_water_rides", - ), - path( - "/transports/", - ParkSingleCategoryListView.as_view(), - {"category": "TR"}, - name="park_transports", - ), - path( - "/others/", - ParkSingleCategoryListView.as_view(), - {"category": "OT"}, - name="park_others", - ), - # Include park-specific rides URLs - path( - "/rides/", - include("rides.park_urls", namespace="rides"), - ), -] diff --git a/parks/views.py b/parks/views.py deleted file mode 100644 index 2675414c..00000000 --- a/parks/views.py +++ /dev/null @@ -1,854 +0,0 @@ -from .querysets import get_base_park_queryset -from core.mixins import HTMXFilterableMixin -from .models.location import ParkLocation -from media.models import Photo -from moderation.models import EditSubmission -from moderation.mixins import ( - EditSubmissionMixin, - PhotoSubmissionMixin, - HistoryMixin, -) -from core.views.views import SlugRedirectMixin -from .filters import ParkFilter -from .forms import ParkForm -from .models import Park, ParkArea, ParkReview as Review -from .services import ParkFilterService -from django.http import ( - HttpResponseRedirect, - HttpResponse, - HttpRequest, - JsonResponse, -) -from django.core.exceptions import ObjectDoesNotExist -from django.contrib import messages -from django.contrib.contenttypes.models import ContentType -from django.contrib.auth.mixins import LoginRequiredMixin -from django.db.models import QuerySet -from django.urls import reverse -from django.shortcuts import get_object_or_404, render -from decimal import InvalidOperation -from django.views.generic import DetailView, ListView, CreateView, UpdateView -import requests -from decimal import Decimal, ROUND_DOWN -from typing import Any, Optional, cast, Literal, Dict - -# Constants -PARK_DETAIL_URL = "parks:park_detail" -PARK_LIST_ITEM_TEMPLATE = "parks/partials/park_list_item.html" -REQUIRED_FIELDS_ERROR = ( - "Please correct the errors below. Required fields are marked with an asterisk (*)." -) -ALLOWED_ROLES = ["MODERATOR", "ADMIN", "SUPERUSER"] - - -ViewMode = Literal["grid", "list"] - - -def normalize_osm_result(result: dict) -> dict: - """Normalize OpenStreetMap result to a consistent format with enhanced address details""" # noqa: E501 - from .location_utils import get_english_name, normalize_coordinate - - # Get address details - address = result.get("address", {}) - - # Normalize coordinates - lat = normalize_coordinate(float(result.get("lat")), 9, 6) - lon = normalize_coordinate(float(result.get("lon")), 10, 6) - - # Get English names where possible - name = "" - if "namedetails" in result: - name = get_english_name(result["namedetails"]) - - # Build street address from available components - street_parts = [] - if address.get("house_number"): - street_parts.append(address["house_number"]) - if address.get("road") or address.get("street"): - street_parts.append(address.get("road") or address.get("street")) - elif address.get("pedestrian"): - street_parts.append(address["pedestrian"]) - elif address.get("footway"): - street_parts.append(address["footway"]) - - # Handle additional address components - suburb = address.get("suburb", "") - district = address.get("district", "") - neighborhood = address.get("neighbourhood", "") - - # Build city from available components - city = ( - address.get("city") - or address.get("town") - or address.get("village") - or address.get("municipality") - or "" - ) - - # Get detailed state/region information - state = ( - address.get("state") or address.get("province") or address.get("region") or "" - ) - - # Get postal code with fallbacks - postal_code = address.get("postcode") or address.get("postal_code") or "" - - return { - "display_name": name or result.get("display_name", ""), - "lat": lat, - "lon": lon, - "street": " ".join(street_parts).strip(), - "suburb": suburb, - "district": district, - "neighborhood": neighborhood, - "city": city, - "state": state, - "country": address.get("country", ""), - "postal_code": postal_code, - } - - -def get_view_mode(request: HttpRequest) -> ViewMode: - """Get the current view mode from request, defaulting to grid""" - view_mode = request.GET.get("view_mode", "grid") - return cast(ViewMode, "list" if view_mode == "list" else "grid") - - -def add_park_button(request: HttpRequest) -> HttpResponse: - """Return the add park button partial template""" - return render(request, "parks/partials/add_park_button.html") - - -def park_actions(request: HttpRequest, slug: str) -> HttpResponse: - """Return the park actions partial template""" - park = get_object_or_404(Park, slug=slug) - return render(request, "parks/partials/park_actions.html", {"park": park}) - - -def get_park_areas(request: HttpRequest) -> HttpResponse: - """Return park areas as options for a select element""" - park_id = request.GET.get("park") - if not park_id: - return HttpResponse('') - - try: - park = Park.objects.get(id=park_id) - areas = park.areas.all() - options = [''] - options.extend( - [f'' for area in areas] - ) - return HttpResponse("\n".join(options)) - except Park.DoesNotExist: - return HttpResponse('') - - -def location_search(request: HttpRequest) -> JsonResponse: - """Search for locations using OpenStreetMap Nominatim API""" - query = request.GET.get("q", "") - if not query: - return JsonResponse({"results": []}) - - response = requests.get( - "https://nominatim.openstreetmap.org/search", - params={ - "q": query, - "format": "json", - "addressdetails": 1, - "namedetails": 1, - "accept-language": "en", - "limit": 10, - }, - headers={"User-Agent": "ThrillWiki/1.0"}, - timeout=60, - ) - - if response.status_code == 200: - results = response.json() - normalized_results = [normalize_osm_result(result) for result in results] - valid_results = [ - r - for r in normalized_results - if r["lat"] is not None and r["lon"] is not None - ] - return JsonResponse({"results": valid_results}) - - return JsonResponse({"results": []}) - - -def reverse_geocode(request: HttpRequest) -> JsonResponse: - """Reverse geocode coordinates using OpenStreetMap Nominatim API""" - try: - lat = Decimal(request.GET.get("lat", "")) - lon = Decimal(request.GET.get("lon", "")) - except (TypeError, ValueError, InvalidOperation): - return JsonResponse({"error": "Invalid coordinates"}, status=400) - - if not lat or not lon: - return JsonResponse({"error": "Missing coordinates"}, status=400) - - lat = lat.quantize(Decimal("0.000001"), rounding=ROUND_DOWN) - lon = lon.quantize(Decimal("0.000001"), rounding=ROUND_DOWN) - - if lat < -90 or lat > 90: - return JsonResponse( - {"error": "Latitude must be between -90 and 90"}, status=400 - ) - if lon < -180 or lon > 180: - return JsonResponse( - {"error": "Longitude must be between -180 and 180"}, status=400 - ) - - response = requests.get( - "https://nominatim.openstreetmap.org/reverse", - params={ - "lat": str(lat), - "lon": str(lon), - "format": "json", - "addressdetails": 1, - "namedetails": 1, - "accept-language": "en", - }, - headers={"User-Agent": "ThrillWiki/1.0"}, - timeout=60, - ) - - if response.status_code == 200: - result = response.json() - normalized_result = normalize_osm_result(result) - if normalized_result["lat"] is None or normalized_result["lon"] is None: - return JsonResponse({"error": "Invalid coordinates"}, status=400) - return JsonResponse(normalized_result) - - return JsonResponse({"error": "Geocoding failed"}, status=500) - - -class ParkListView(HTMXFilterableMixin, ListView): - model = Park - template_name = "parks/park_list.html" - context_object_name = "parks" - filter_class = ParkFilter - paginate_by = 20 - - def __init__(self, **kwargs): - super().__init__(**kwargs) - self.filter_service = ParkFilterService() - - def get_template_names(self) -> list[str]: - """Return park_list_item.html for HTMX requests""" - if self.request.htmx: - return ["parks/partials/park_list_item.html"] - return [self.template_name] - - def get_view_mode(self) -> ViewMode: - """Get the current view mode (grid or list)""" - return get_view_mode(self.request) - - def get_queryset(self) -> QuerySet[Park]: - """Get optimized queryset with filter service""" - try: - # Use filter service for optimized filtering - filter_params = dict(self.request.GET.items()) - queryset = self.filter_service.get_filtered_queryset(filter_params) - - # Also create filterset for form rendering - self.filterset = self.filter_class(self.request.GET, queryset=queryset) - return self.filterset.qs - except Exception as e: - messages.error(self.request, f"Error loading parks: {str(e)}") - queryset = self.model.objects.none() - self.filterset = self.filter_class(self.request.GET, queryset=queryset) - return queryset - - def get_context_data(self, **kwargs: Any) -> dict[str, Any]: - """Add enhanced context with filter stats and suggestions""" - try: - # Initialize filterset if not exists - if not hasattr(self, "filterset"): - self.filterset = self.filter_class( - self.request.GET, queryset=self.model.objects.none() - ) - - context = super().get_context_data(**kwargs) - - # Add filter service data - filter_counts = self.filter_service.get_filter_counts() - popular_filters = self.filter_service.get_popular_filters() - - context.update( - { - "view_mode": self.get_view_mode(), - "is_search": bool(self.request.GET.get("search")), - "search_query": self.request.GET.get("search", ""), - "filter_counts": filter_counts, - "popular_filters": popular_filters, - "total_results": ( - context.get("paginator").count - if context.get("paginator") - else 0 - ), - } - ) - - # Add filter suggestions for search queries - search_query = self.request.GET.get("search", "") - if search_query: - context["filter_suggestions"] = ( - self.filter_service.get_filter_suggestions(search_query) - ) - - return context - - except Exception as e: - messages.error(self.request, f"Error applying filters: {str(e)}") - # Ensure filterset exists in error case - if not hasattr(self, "filterset"): - self.filterset = self.filter_class( - self.request.GET, queryset=self.model.objects.none() - ) - return { - "filter": self.filterset, - "error": "Unable to apply filters. Please try adjusting your criteria.", - "view_mode": self.get_view_mode(), - "is_search": bool(self.request.GET.get("search")), - "search_query": self.request.GET.get("search", ""), - } - - def _get_clean_filter_params(self) -> Dict[str, Any]: - """Extract and clean filter parameters from request.""" - filter_params = {} - - # Define valid filter fields - valid_filters = { - "status", - "operator", - "park_type", - "has_coasters", - "min_rating", - "big_parks_only", - "ordering", - "search", - } - - for param, value in self.request.GET.items(): - if param in valid_filters and value: - # Skip pagination parameter - if param == "page": - continue - - # Clean and validate the value - filter_params[param] = self._clean_filter_value(param, value) - - return {k: v for k, v in filter_params.items() if v is not None} - - def _clean_filter_value(self, param: str, value: str) -> Optional[Any]: - """Clean and validate a single filter value.""" - if param in ("has_coasters", "big_parks_only"): - # Boolean filters - return value.lower() in ("true", "1", "yes", "on") - elif param == "min_rating": - # Numeric filter - try: - rating = float(value) - if 0 <= rating <= 5: - return str(rating) - except (ValueError, TypeError): - pass # Skip invalid ratings - return None - elif param == "search": - # Search filter - clean_search = value.strip() - return clean_search if clean_search else None - else: - # String filters - return value.strip() - - def _build_filter_query_string(self, filter_params: Dict[str, Any]) -> str: - """Build query string from filter parameters.""" - from urllib.parse import urlencode - - # Convert boolean values to strings for URL - url_params = {} - for key, value in filter_params.items(): - if isinstance(value, bool): - url_params[key] = "true" if value else "false" - else: - url_params[key] = str(value) - - return urlencode(url_params) - - def _get_pagination_urls( - self, page_obj, filter_params: Dict[str, Any] - ) -> Dict[str, str]: - """Generate pagination URLs that preserve filter state.""" - - base_query = self._build_filter_query_string(filter_params) - pagination_urls = {} - - if page_obj.has_previous(): - prev_params = ( - f"{base_query}&page={page_obj.previous_page_number()}" - if base_query - else f"page={page_obj.previous_page_number()}" - ) - pagination_urls["previous_url"] = f"?{prev_params}" - - if page_obj.has_next(): - next_params = ( - f"{base_query}&page={page_obj.next_page_number()}" - if base_query - else f"page={page_obj.next_page_number()}" - ) - pagination_urls["next_url"] = f"?{next_params}" - - # First and last page URLs - if page_obj.number > 1: - first_params = f"{base_query}&page=1" if base_query else "page=1" - pagination_urls["first_url"] = f"?{first_params}" - - if page_obj.number < page_obj.paginator.num_pages: - last_params = ( - f"{base_query}&page={page_obj.paginator.num_pages}" - if base_query - else f"page={page_obj.paginator.num_pages}" - ) - pagination_urls["last_url"] = f"?{last_params}" - - return pagination_urls - - -def search_parks(request: HttpRequest) -> HttpResponse: - """Search parks and return results using park_list_item.html""" - try: - search_query = request.GET.get("search", "").strip() - if not search_query: - return HttpResponse("") - - # Get current view mode from request - current_view_mode = request.GET.get("view_mode", "grid") - park_filter = ParkFilter( - {"search": search_query}, queryset=get_base_park_queryset() - ) - - parks = park_filter.qs - if request.GET.get("quick_search"): - parks = parks[:8] # Limit quick search results - - response = render( - request, - PARK_LIST_ITEM_TEMPLATE, - { - "parks": parks, - "view_mode": current_view_mode, - "search_query": search_query, - "is_search": True, - }, - ) - response["HX-Trigger"] = "searchComplete" - return response - - except Exception as e: - response = render( - request, - PARK_LIST_ITEM_TEMPLATE, - { - "parks": [], - "error": f"Error performing search: {str(e)}", - "is_search": True, - }, - ) - response["HX-Trigger"] = "searchError" - return response - - -class ParkCreateView(LoginRequiredMixin, CreateView): - model = Park - form_class = ParkForm - template_name = "parks/park_form.html" - - def prepare_changes_data(self, cleaned_data: dict[str, Any]) -> dict[str, Any]: - data = cleaned_data.copy() - if data.get("owner"): - data["owner"] = data["owner"].id - if data.get("opening_date"): - data["opening_date"] = data["opening_date"].isoformat() - if data.get("closing_date"): - data["closing_date"] = data["closing_date"].isoformat() - decimal_fields = [ - "latitude", - "longitude", - "size_acres", - "average_rating", - ] - for field in decimal_fields: - if data.get(field): - data[field] = str(data[field]) - return data - - def normalize_coordinates(self, form: ParkForm) -> None: - if form.cleaned_data.get("latitude"): - lat = Decimal(str(form.cleaned_data["latitude"])) - form.cleaned_data["latitude"] = lat.quantize( - Decimal("0.000001"), rounding=ROUND_DOWN - ) - if form.cleaned_data.get("longitude"): - lon = Decimal(str(form.cleaned_data["longitude"])) - form.cleaned_data["longitude"] = lon.quantize( - Decimal("0.000001"), rounding=ROUND_DOWN - ) - - def form_valid(self, form: ParkForm) -> HttpResponse: - self.normalize_coordinates(form) - changes = self.prepare_changes_data(form.cleaned_data) - - submission = EditSubmission.objects.create( - user=self.request.user, - content_type=ContentType.objects.get_for_model(Park), - submission_type="CREATE", - changes=changes, - reason=self.request.POST.get("reason", ""), - source=self.request.POST.get("source", ""), - ) - - if ( - hasattr(self.request.user, "role") - and getattr(self.request.user, "role", None) in ALLOWED_ROLES - ): - try: - self.object = form.save() - submission.object_id = self.object.id - submission.status = "APPROVED" - submission.handled_by = self.request.user - submission.save() - - if form.cleaned_data.get("latitude") and form.cleaned_data.get( - "longitude" - ): - # Create or update ParkLocation - park_location, created = ParkLocation.objects.get_or_create( - park=self.object, - defaults={ - "street_address": form.cleaned_data.get( - "street_address", "" - ), - "city": form.cleaned_data.get("city", ""), - "state": form.cleaned_data.get("state", ""), - "country": form.cleaned_data.get("country", "USA"), - "postal_code": form.cleaned_data.get("postal_code", ""), - }, - ) - park_location.set_coordinates( - form.cleaned_data["latitude"], - form.cleaned_data["longitude"], - ) - park_location.save() - - photos = self.request.FILES.getlist("photos") - uploaded_count = 0 - for photo_file in photos: - try: - Photo.objects.create( - image=photo_file, - uploaded_by=self.request.user, - content_type=ContentType.objects.get_for_model(Park), - object_id=self.object.id, - ) - uploaded_count += 1 - except Exception as e: - messages.error( - self.request, - f"Error uploading photo { - photo_file.name}: { - str(e)}", - ) - - messages.success( - self.request, - f"Successfully created {self.object.name}. " - f"Added {uploaded_count} photo(s).", - ) - return HttpResponseRedirect(self.get_success_url()) - except Exception as e: - messages.error( - self.request, - f"Error creating park: { - str(e)}. Please check your input and try again.", - ) - return self.form_invalid(form) - - messages.success( - self.request, - "Your park submission has been sent for review. " - "You will be notified when it is approved.", - ) - for field, errors in form.errors.items(): - for error in errors: - messages.error(self.request, f"{field}: {error}") - return super().form_invalid(form) - - def get_success_url(self) -> str: - return reverse(PARK_DETAIL_URL, kwargs={"slug": self.object.slug}) - - -class ParkUpdateView(LoginRequiredMixin, UpdateView): - model = Park - form_class = ParkForm - template_name = "parks/park_form.html" - - def get_context_data(self, **kwargs: Any) -> dict[str, Any]: - context = super().get_context_data(**kwargs) - context["is_edit"] = True - return context - - def prepare_changes_data(self, cleaned_data: dict[str, Any]) -> dict[str, Any]: - data = cleaned_data.copy() - if data.get("owner"): - data["owner"] = data["owner"].id - if data.get("opening_date"): - data["opening_date"] = data["opening_date"].isoformat() - if data.get("closing_date"): - data["closing_date"] = data["closing_date"].isoformat() - decimal_fields = [ - "latitude", - "longitude", - "size_acres", - "average_rating", - ] - for field in decimal_fields: - if data.get(field): - data[field] = str(data[field]) - return data - - def normalize_coordinates(self, form: ParkForm) -> None: - if form.cleaned_data.get("latitude"): - lat = Decimal(str(form.cleaned_data["latitude"])) - form.cleaned_data["latitude"] = lat.quantize( - Decimal("0.000001"), rounding=ROUND_DOWN - ) - if form.cleaned_data.get("longitude"): - lon = Decimal(str(form.cleaned_data["longitude"])) - form.cleaned_data["longitude"] = lon.quantize( - Decimal("0.000001"), rounding=ROUND_DOWN - ) - - def form_valid(self, form: ParkForm) -> HttpResponse: # noqa: C901 - self.normalize_coordinates(form) - changes = self.prepare_changes_data(form.cleaned_data) - - submission = EditSubmission.objects.create( - user=self.request.user, - content_type=ContentType.objects.get_for_model(Park), - object_id=self.object.id, - submission_type="EDIT", - changes=changes, - reason=self.request.POST.get("reason", ""), - source=self.request.POST.get("source", ""), - ) - - if ( - hasattr(self.request.user, "role") - and getattr(self.request.user, "role", None) in ALLOWED_ROLES - ): - try: - self.object = form.save() - submission.status = "APPROVED" - submission.handled_by = self.request.user - submission.save() - - location_data = { - "name": self.object.name, - "location_type": "park", - "latitude": form.cleaned_data.get("latitude"), - "longitude": form.cleaned_data.get("longitude"), - "street_address": form.cleaned_data.get("street_address", ""), - "city": form.cleaned_data.get("city", ""), - "state": form.cleaned_data.get("state", ""), - "country": form.cleaned_data.get("country", ""), - "postal_code": form.cleaned_data.get("postal_code", ""), - } - - # Create or update ParkLocation - try: - park_location = self.object.location - # Update existing location - for key, value in location_data.items(): - if key in ["latitude", "longitude"] and value: - continue # Handle coordinates separately - if hasattr(park_location, key): - setattr(park_location, key, value) - - # Handle coordinates if provided - if "latitude" in location_data and "longitude" in location_data: - if location_data["latitude"] and location_data["longitude"]: - park_location.set_coordinates( - float(location_data["latitude"]), - float(location_data["longitude"]), - ) - park_location.save() - except ParkLocation.DoesNotExist: - # Create new ParkLocation - coordinates_data = {} - if "latitude" in location_data and "longitude" in location_data: - if location_data["latitude"] and location_data["longitude"]: - coordinates_data = { - "latitude": float(location_data["latitude"]), - "longitude": float(location_data["longitude"]), - } - - # Remove coordinate fields from location_data for creation - creation_data = { - k: v - for k, v in location_data.items() - if k not in ["latitude", "longitude"] - } - creation_data.setdefault("country", "USA") - - park_location = ParkLocation.objects.create( - park=self.object, **creation_data - ) - - if coordinates_data: - park_location.set_coordinates( - coordinates_data["latitude"], - coordinates_data["longitude"], - ) - park_location.save() - - photos = self.request.FILES.getlist("photos") - uploaded_count = 0 - for photo_file in photos: - try: - Photo.objects.create( - image=photo_file, - uploaded_by=self.request.user, - content_type=ContentType.objects.get_for_model(Park), - object_id=self.object.id, - ) - uploaded_count += 1 - except Exception as e: - messages.error( - self.request, - f"Error uploading photo { - photo_file.name}: { - str(e)}", - ) - - messages.success( - self.request, - f"Successfully updated {self.object.name}. " - f"Added {uploaded_count} new photo(s).", - ) - return HttpResponseRedirect(self.get_success_url()) - except Exception as e: - messages.error( - self.request, - f"Error updating park: { - str(e)}. Please check your input and try again.", - ) - return self.form_invalid(form) - - messages.success( - self.request, - f"Your changes to {self.object.name} have been sent for review. " - "You will be notified when they are approved.", - ) - return HttpResponseRedirect( - reverse(PARK_DETAIL_URL, kwargs={"slug": self.object.slug}) - ) - - def form_invalid(self, form: ParkForm) -> HttpResponse: - messages.error(self.request, REQUIRED_FIELDS_ERROR) - for field, errors in form.errors.items(): - for error in errors: - messages.error(self.request, f"{field}: {error}") - return super().form_invalid(form) - - def get_success_url(self) -> str: - return reverse(PARK_DETAIL_URL, kwargs={"slug": self.object.slug}) - - -class ParkDetailView( - SlugRedirectMixin, - EditSubmissionMixin, - PhotoSubmissionMixin, - HistoryMixin, - DetailView, -): - model = Park - template_name = "parks/park_detail.html" - context_object_name = "park" - - def get_object(self, queryset: Optional[QuerySet[Park]] = None) -> Park: - if queryset is None: - queryset = self.get_queryset() - slug = self.kwargs.get(self.slug_url_kwarg) - if slug is None: - raise ObjectDoesNotExist("No slug provided") - park, _ = Park.get_by_slug(slug) - return park - - def get_queryset(self) -> QuerySet[Park]: - return cast( - QuerySet[Park], - super() - .get_queryset() - .prefetch_related( - "rides", "rides__manufacturer", "photos", "areas", "location" - ), - ) - - def get_context_data(self, **kwargs: Any) -> dict[str, Any]: - context = super().get_context_data(**kwargs) - park = cast(Park, self.object) - context["areas"] = park.areas.all() - context["rides"] = park.rides.all().order_by("-status", "name") - - if self.request.user.is_authenticated: - context["has_reviewed"] = Review.objects.filter( - user=self.request.user, - content_type=ContentType.objects.get_for_model(Park), - object_id=park.id, - ).exists() - else: - context["has_reviewed"] = False - - return context - - def get_redirect_url_pattern(self) -> str: - return PARK_DETAIL_URL - - -class ParkAreaDetailView( - SlugRedirectMixin, - EditSubmissionMixin, - PhotoSubmissionMixin, - HistoryMixin, - DetailView, -): - model = ParkArea - template_name = "parks/area_detail.html" - context_object_name = "area" - slug_url_kwarg = "area_slug" - - def get_object(self, queryset: Optional[QuerySet[ParkArea]] = None) -> ParkArea: - if queryset is None: - queryset = self.get_queryset() - park_slug = self.kwargs.get("park_slug") - area_slug = self.kwargs.get("area_slug") - if park_slug is None or area_slug is None: - raise ObjectDoesNotExist("Missing slug") - area, _ = ParkArea.get_by_slug(area_slug) - if area.park.slug != park_slug: - raise ObjectDoesNotExist("Park slug doesn't match") - return area - - def get_context_data(self, **kwargs: Any) -> dict[str, Any]: - context = super().get_context_data(**kwargs) - return context - - def get_redirect_url_pattern(self) -> str: - return PARK_DETAIL_URL - - def get_redirect_url_kwargs(self) -> dict[str, str]: - area = cast(ParkArea, self.object) - return {"park_slug": area.park.slug, "area_slug": area.slug} diff --git a/parks/views_roadtrip.py b/parks/views_roadtrip.py deleted file mode 100644 index 299ddc7f..00000000 --- a/parks/views_roadtrip.py +++ /dev/null @@ -1,480 +0,0 @@ -""" -Road trip planning views for theme parks. -Provides interfaces for creating and managing multi-park road trips. -""" - -import json -from typing import Dict, Any, List -from django.shortcuts import render -from django.http import JsonResponse, HttpRequest, HttpResponse -from django.views.generic import TemplateView, View -from django.urls import reverse - -from .models import Park -from .services.roadtrip import RoadTripService -from core.services.map_service import unified_map_service -from core.services.data_structures import LocationType, MapFilters - -JSON_DECODE_ERROR_MSG = "Invalid JSON data" -PARKS_ALONG_ROUTE_HTML = "parks/partials/parks_along_route.html" - - -class RoadTripViewMixin: - """Mixin providing common functionality for road trip views.""" - - def __init__(self): - super().__init__() - self.roadtrip_service = RoadTripService() - - def get_roadtrip_context(self) -> Dict[str, Any]: - """Get common context data for road trip views.""" - return { - "roadtrip_api_urls": { - "create_trip": "/roadtrip/create/", - "find_parks_along_route": "/roadtrip/htmx/parks-along-route/", - "geocode": "/roadtrip/htmx/geocode/", - }, - "max_parks_per_trip": 10, - "default_detour_km": 50, - "enable_osm_integration": True, - } - - -class RoadTripPlannerView(RoadTripViewMixin, TemplateView): - """ - Main road trip planning interface. - - URL: /roadtrip/ - """ - - template_name = "parks/roadtrip_planner.html" - - def get_context_data(self, **kwargs): - context = super().get_context_data(**kwargs) - context.update(self.get_roadtrip_context(self.request)) - - # Get popular parks for suggestions - popular_parks = ( - Park.objects.filter(status="OPERATING", location__isnull=False) - .select_related("location", "operator") - .order_by("-ride_count")[:20] - ) - - context.update( - { - "page_title": "Road Trip Planner", - "popular_parks": popular_parks, - "countries_with_parks": self._get_countries_with_parks(), - "enable_route_optimization": True, - "show_distance_estimates": True, - } - ) - - return context - - def _get_countries_with_parks(self) -> List[str]: - """Get list of countries that have theme parks.""" - countries = ( - Park.objects.filter(status="OPERATING", location__country__isnull=False) - .values_list("location__country", flat=True) - .distinct() - .order_by("location__country") - ) - return list(countries) - - -class CreateTripView(RoadTripViewMixin, View): - """ - Generate optimized road trip routes. - - URL: /roadtrip/create/ - """ - - def post(self, request: HttpRequest) -> HttpResponse: - """Create a new road trip with optimized routing.""" - try: - data = json.loads(request.body) - - # Parse park IDs - park_ids = data.get("park_ids", []) - if not park_ids or len(park_ids) < 2: - return JsonResponse( - { - "status": "error", - "message": "At least 2 parks are required for a road trip", - }, - status=400, - ) - - if len(park_ids) > 10: - return JsonResponse( - { - "status": "error", - "message": "Maximum 10 parks allowed per trip", - }, - status=400, - ) - - # Get parks - parks = list( - Park.objects.filter( - id__in=park_ids, location__isnull=False - ).select_related("location", "operator") - ) - - if len(parks) != len(park_ids): - return JsonResponse( - { - "status": "error", - "message": "Some parks could not be found or do not have location data", - }, - status=400, - ) - - # Create optimized trip - trip = self.roadtrip_service.create_multi_park_trip(parks) - - if not trip: - return JsonResponse( - { - "status": "error", - "message": "Could not create optimized route for the selected parks", - }, - status=400, - ) - - # Convert trip to dict for JSON response - trip_data = { - "parks": [self._park_to_dict(park) for park in trip.parks], - "legs": [self._leg_to_dict(leg) for leg in trip.legs], - "total_distance_km": trip.total_distance_km, - "total_duration_minutes": trip.total_duration_minutes, - "formatted_total_distance": trip.formatted_total_distance, - "formatted_total_duration": trip.formatted_total_duration, - } - - return JsonResponse( - { - "status": "success", - "data": trip_data, - "trip_url": reverse( - "parks:roadtrip_detail", kwargs={"trip_id": "temp"} - ), - } - ) - - except json.JSONDecodeError: - return JsonResponse( - {"status": "error", "message": JSON_DECODE_ERROR_MSG}, - status=400, - ) - except Exception as e: - return JsonResponse( - { - "status": "error", - "message": f"Failed to create trip: {str(e)}", - }, - status=500, - ) - - def _park_to_dict(self, park: Park) -> Dict[str, Any]: - """Convert park instance to dictionary.""" - return { - "id": park.id, - "name": park.name, - "slug": park.slug, - "formatted_location": getattr(park, "formatted_location", ""), - "coordinates": park.coordinates, - "operator": park.operator.name if park.operator else None, - "ride_count": getattr(park, "ride_count", 0), - "url": reverse("parks:park_detail", kwargs={"slug": park.slug}), - } - - def _leg_to_dict(self, leg) -> Dict[str, Any]: - """Convert trip leg to dictionary.""" - return { - "from_park": self._park_to_dict(leg.from_park), - "to_park": self._park_to_dict(leg.to_park), - "distance_km": leg.route.distance_km, - "duration_minutes": leg.route.duration_minutes, - "formatted_distance": leg.route.formatted_distance, - "formatted_duration": leg.route.formatted_duration, - "geometry": leg.route.geometry, - } - - -class TripDetailView(RoadTripViewMixin, TemplateView): - """ - Show trip details and map. - - URL: /roadtrip// - """ - - template_name = "parks/trip_detail.html" - - def get_context_data(self, **kwargs): - context = super().get_context_data(**kwargs) - context.update(self.get_roadtrip_context(self.request)) - - # For now, this is a placeholder since we don't persist trips - # In a full implementation, you would retrieve the trip from database - trip_id = kwargs.get("trip_id") - - context.update( - { - "page_title": f"Road Trip #{trip_id}", - "trip_id": trip_id, - "message": "Trip details would be loaded here. Currently trips are not persisted.", - } - ) - - return context - - -class FindParksAlongRouteView(RoadTripViewMixin, View): - """ - HTMX endpoint for route-based park discovery. - - URL: /roadtrip/htmx/parks-along-route/ - """ - - def post(self, request: HttpRequest) -> HttpResponse: - """Find parks along a route between two points.""" - try: - data = json.loads(request.body) - - start_park_id = data.get("start_park_id") - end_park_id = data.get("end_park_id") - max_detour_km = min(100, max(10, float(data.get("max_detour_km", 50)))) - - if not start_park_id or not end_park_id: - return render( - request, - PARKS_ALONG_ROUTE_HTML, - {"error": "Start and end parks are required"}, - ) - - # Get start and end parks - try: - start_park = Park.objects.select_related("location").get( - id=start_park_id, location__isnull=False - ) - end_park = Park.objects.select_related("location").get( - id=end_park_id, location__isnull=False - ) - except Park.DoesNotExist: - return render( - request, - PARKS_ALONG_ROUTE_HTML, - {"error": "One or both parks could not be found"}, - ) - - # Find parks along route - parks_along_route = self.roadtrip_service.find_parks_along_route( - start_park, end_park, max_detour_km - ) - - return render( - request, - PARKS_ALONG_ROUTE_HTML, - { - "parks": parks_along_route, - "start_park": start_park, - "end_park": end_park, - "max_detour_km": max_detour_km, - "count": len(parks_along_route), - }, - ) - - except json.JSONDecodeError: - return render( - request, - PARKS_ALONG_ROUTE_HTML, - {"error": JSON_DECODE_ERROR_MSG}, - ) - except Exception as e: - return render(request, PARKS_ALONG_ROUTE_HTML, {"error": str(e)}) - - -class GeocodeAddressView(RoadTripViewMixin, View): - """ - HTMX endpoint for geocoding addresses. - - URL: /roadtrip/htmx/geocode/ - """ - - def post(self, request: HttpRequest) -> HttpResponse: - """Geocode an address and find nearby parks.""" - try: - data = json.loads(request.body) - address = data.get("address", "").strip() - - if not address: - return JsonResponse( - {"status": "error", "message": "Address is required"}, - status=400, - ) - - # Geocode the address - coordinates = self.roadtrip_service.geocode_address(address) - - if not coordinates: - return JsonResponse( - { - "status": "error", - "message": "Could not geocode the provided address", - }, - status=400, - ) - - # Find nearby parks - radius_km = min(200, max(10, float(data.get("radius_km", 100)))) - - # Use map service to find parks near coordinates - from core.services.data_structures import GeoBounds - - # Create a bounding box around the coordinates - lat_delta = radius_km / 111.0 # Rough conversion: 1 degree ≈ 111km - lng_delta = radius_km / (111.0 * abs(coordinates.latitude / 90.0)) - - bounds = GeoBounds( - north=coordinates.latitude + lat_delta, - south=coordinates.latitude - lat_delta, - east=coordinates.longitude + lng_delta, - west=coordinates.longitude - lng_delta, - ) - - map_response = unified_map_service.get_locations_by_bounds( - north=bounds.north, - south=bounds.south, - east=bounds.east, - west=bounds.west, - location_types={LocationType.PARK}, - ) - - return JsonResponse( - { - "status": "success", - "data": { - "coordinates": { - "latitude": coordinates.latitude, - "longitude": coordinates.longitude, - }, - "address": address, - "nearby_parks": [ - loc.to_dict() for loc in map_response.locations[:20] - ], - "radius_km": radius_km, - }, - } - ) - - except json.JSONDecodeError: - return JsonResponse( - {"status": "error", "message": JSON_DECODE_ERROR_MSG}, - status=400, - ) - except Exception as e: - return JsonResponse({"status": "error", "message": str(e)}, status=500) - - -class ParkDistanceCalculatorView(RoadTripViewMixin, View): - """ - HTMX endpoint for calculating distances between parks. - - URL: /roadtrip/htmx/distance/ - """ - - def post(self, request: HttpRequest) -> HttpResponse: - """Calculate distance and duration between two parks.""" - try: - data = json.loads(request.body) - - park1_id = data.get("park1_id") - park2_id = data.get("park2_id") - - if not park1_id or not park2_id: - return JsonResponse( - { - "status": "error", - "message": "Both park IDs are required", - }, - status=400, - ) - - # Get parks - try: - park1 = Park.objects.select_related("location").get( - id=park1_id, location__isnull=False - ) - park2 = Park.objects.select_related("location").get( - id=park2_id, location__isnull=False - ) - except Park.DoesNotExist: - return JsonResponse( - { - "status": "error", - "message": "One or both parks could not be found", - }, - status=400, - ) - - # Calculate route - coords1 = park1.coordinates - coords2 = park2.coordinates - - if not coords1 or not coords2: - return JsonResponse( - { - "status": "error", - "message": "One or both parks do not have coordinate data", - }, - status=400, - ) - - from services.roadtrip import Coordinates - - route = self.roadtrip_service.calculate_route( - Coordinates(*coords1), Coordinates(*coords2) - ) - - if not route: - return JsonResponse( - { - "status": "error", - "message": "Could not calculate route between parks", - }, - status=400, - ) - - return JsonResponse( - { - "status": "success", - "data": { - "distance_km": route.distance_km, - "duration_minutes": route.duration_minutes, - "formatted_distance": route.formatted_distance, - "formatted_duration": route.formatted_duration, - "park1": { - "name": park1.name, - "formatted_location": getattr( - park1, "formatted_location", "" - ), - }, - "park2": { - "name": park2.name, - "formatted_location": getattr( - park2, "formatted_location", "" - ), - }, - }, - } - ) - - except json.JSONDecodeError: - return JsonResponse( - {"status": "error", "message": JSON_DECODE_ERROR_MSG}, - status=400, - ) - except Exception as e: - return JsonResponse({"status": "error", "message": str(e)}, status=500) diff --git a/parks/views_search.py b/parks/views_search.py deleted file mode 100644 index 1b3106e9..00000000 --- a/parks/views_search.py +++ /dev/null @@ -1,59 +0,0 @@ -from django.http import HttpRequest, JsonResponse -from django.views.generic import TemplateView -from django.urls import reverse - -from .filters import ParkFilter -from .forms import ParkSearchForm -from .querysets import get_base_park_queryset - - -class ParkSearchView(TemplateView): - """View for handling park search with autocomplete.""" - - template_name = "parks/park_list.html" - - def get_context_data(self, **kwargs): - context = super().get_context_data(**kwargs) - context["search_form"] = ParkSearchForm(self.request.GET) - - # Initialize filter with current querystring - queryset = get_base_park_queryset() - filter_instance = ParkFilter(self.request.GET, queryset=queryset) - context["filter"] = filter_instance - - # Apply search if park ID selected via autocomplete - park_id = self.request.GET.get("park") - if park_id: - queryset = filter_instance.qs.filter(id=park_id) - else: - queryset = filter_instance.qs - - # Handle view mode - context["view_mode"] = self.request.GET.get("view_mode", "grid") - context["parks"] = queryset - - return context - - -def suggest_parks(request: HttpRequest) -> JsonResponse: - """Return park search suggestions as JSON.""" - query = request.GET.get("search", "").strip() - if not query: - return JsonResponse({"results": []}) - - queryset = get_base_park_queryset() - filter_instance = ParkFilter({"search": query}, queryset=queryset) - parks = filter_instance.qs[:8] # Limit to 8 suggestions - - results = [ - { - "id": str(park.pk), - "name": park.name, - "status": park.get_status_display(), - "location": park.formatted_location or "", - "url": reverse("parks:park_detail", kwargs={"slug": park.slug}), - } - for park in parks - ] - - return JsonResponse({"results": results}) diff --git a/parks/views_update.py b/parks/views_update.py deleted file mode 100644 index 8502fd6d..00000000 --- a/parks/views_update.py +++ /dev/null @@ -1,16 +0,0 @@ -def prepare_changes_data(self, cleaned_data): - data = cleaned_data.copy() - # Convert model instances to IDs for JSON serialization - if data.get("owner"): - data["owner"] = data["owner"].id - # Convert dates to ISO format strings - if data.get("opening_date"): - data["opening_date"] = data["opening_date"].isoformat() - if data.get("closing_date"): - data["closing_date"] = data["closing_date"].isoformat() - # Convert Decimal fields to strings - decimal_fields = ["latitude", "longitude", "size_acres", "average_rating"] - for field in decimal_fields: - if data.get(field): - data[field] = str(data[field]) - return data diff --git a/pyproject.toml b/pyproject.toml deleted file mode 100644 index 8831bf34..00000000 --- a/pyproject.toml +++ /dev/null @@ -1,71 +0,0 @@ -[project] -name = "thrillwiki" -version = "0.1.0" -readme = "README.md" -requires-python = ">=3.13" -dependencies = [ - "Django>=5.0", - "djangorestframework>=3.14.0", - "django-cors-headers>=4.3.1", - "django-allauth>=0.60.1", - "django-oauth-toolkit>=3.0.1", - "dj-rest-auth>=7.0.0", - "pyjwt>=2.10.1", - "psycopg2-binary>=2.9.9", - "dj-database-url>=2.3.0", - "requests>=2.32.3", - "django-webpack-loader>=3.1.1", - "python-dotenv>=1.0.1", - "Pillow>=10.2.0", - "django-cleanup>=8.0.0", - "django-filter>=23.5", - "django-htmx>=1.17.2", - "whitenoise>=6.6.0", - "pycountry>=24.6.1", - "black>=24.1.0", - "flake8>=7.1.1", - "pytest>=8.3.4", - "pytest-django>=4.9.0", - "channels>=4.2.0", - "channels-redis>=4.2.1", - "daphne>=4.1.2", - "django-simple-history>=3.5.0", - "django-tailwind-cli>=2.21.1", - "playwright>=1.41.0", - "pytest-playwright>=0.4.3", - "django-pghistory>=3.5.2", - "django-htmx-autocomplete>=1.0.5", - "coverage>=7.9.1", - "poetry>=2.1.3", - "piexif>=1.1.3", - "django-environ>=0.12.0", - "factory-boy>=3.3.3", - "drf-spectacular>=0.27.0", - "django-silk>=5.0.0", - "django-debug-toolbar>=4.0.0", - "nplusone>=1.0.0", - "django-health-check>=3.17.0", - "django-redis>=5.4.0", - "sentry-sdk>=1.40.0", - "python-json-logger>=2.0.7", - "django-cloudflare-images>=0.6.0", - "psutil>=7.0.0", - "django-extensions>=4.1", - "werkzeug>=3.1.3", -] - -[dependency-groups] -dev = [ - "autoflake>=2.3.1", - "autopep8>=2.3.2", - "black>=25.1.0", - "django-stubs>=5.2.2", - "rope>=1.14.0", -] - -[tool.pyright] -stubPath = "stubs" -typeCheckingMode = "basic" - -[tool.pylance] -stubPath = "stubs" diff --git a/requirements.txt b/requirements.txt index edc9d279..6230d7ee 100644 --- a/requirements.txt +++ b/requirements.txt @@ -22,6 +22,7 @@ django-webpack-loader==3.1.1 # Utils python-dotenv==1.0.1 +django-environ==0.11.2 Pillow==11.1.0 # For image handling django-cleanup==9.0.0 # Automatically delete files piexif==1.1.3 # For image EXIF metadata handling diff --git a/rides/__init__.py b/rides/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/rides/admin.py b/rides/admin.py deleted file mode 100644 index 82a3c5fe..00000000 --- a/rides/admin.py +++ /dev/null @@ -1,96 +0,0 @@ -from django.contrib import admin -from django.contrib.gis.admin import GISModelAdmin -from .models.company import Company -from .models.rides import Ride -from .models.location import RideLocation - - -class ManufacturerAdmin(admin.ModelAdmin): - list_display = ("name", "headquarters", "website", "rides_count") - search_fields = ("name",) - - def get_queryset(self, request): - return super().get_queryset(request).filter(roles__contains=["MANUFACTURER"]) - - -class DesignerAdmin(admin.ModelAdmin): - list_display = ("name", "headquarters", "website") - search_fields = ("name",) - - def get_queryset(self, request): - return super().get_queryset(request).filter(roles__contains=["DESIGNER"]) - - -class RideLocationInline(admin.StackedInline): - """Inline admin for RideLocation""" - - model = RideLocation - extra = 0 - fields = ( - "park_area", - "point", - "entrance_notes", - "accessibility_notes", - ) - - -class RideLocationAdmin(GISModelAdmin): - """Admin for standalone RideLocation management""" - - list_display = ("ride", "park_area", "has_coordinates", "created_at") - list_filter = ("park_area", "created_at") - search_fields = ("ride__name", "park_area", "entrance_notes") - readonly_fields = ( - "latitude", - "longitude", - "coordinates", - "created_at", - "updated_at", - ) - fieldsets = ( - ("Ride", {"fields": ("ride",)}), - ( - "Location Information", - { - "fields": ( - "park_area", - "point", - "latitude", - "longitude", - "coordinates", - ), - "description": "Optional coordinates - not all rides need precise location tracking", - }, - ), - ( - "Navigation Notes", - { - "fields": ("entrance_notes", "accessibility_notes"), - }, - ), - ( - "Metadata", - {"fields": ("created_at", "updated_at"), "classes": ("collapse",)}, - ), - ) - - def latitude(self, obj): - return obj.latitude - - latitude.short_description = "Latitude" - - def longitude(self, obj): - return obj.longitude - - longitude.short_description = "Longitude" - - -class RideAdmin(admin.ModelAdmin): - """Enhanced Ride admin with location inline""" - - inlines = [RideLocationInline] - - -admin.site.register(Company) -admin.site.register(Ride, RideAdmin) -admin.site.register(RideLocation, RideLocationAdmin) diff --git a/rides/api/__init__.py b/rides/api/__init__.py deleted file mode 100644 index df3ca819..00000000 --- a/rides/api/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# Rides API module diff --git a/rides/api/serializers.py b/rides/api/serializers.py deleted file mode 100644 index f1b4b8e4..00000000 --- a/rides/api/serializers.py +++ /dev/null @@ -1,345 +0,0 @@ -""" -Serializers for Rides API following Django styleguide patterns. -""" - -from rest_framework import serializers -from ..models import Ride - - -class RideModelOutputSerializer(serializers.Serializer): - """Output serializer for ride model data.""" - - id = serializers.IntegerField() - name = serializers.CharField() - description = serializers.CharField() - category = serializers.CharField() - manufacturer = serializers.SerializerMethodField() - - def get_manufacturer(self, obj): - if obj.manufacturer: - return { - "id": obj.manufacturer.id, - "name": obj.manufacturer.name, - "slug": obj.manufacturer.slug, - } - return None - - -class RideParkOutputSerializer(serializers.Serializer): - """Output serializer for ride's park data.""" - - id = serializers.IntegerField() - name = serializers.CharField() - slug = serializers.CharField() - - -class RideListOutputSerializer(serializers.Serializer): - """Output serializer for ride list view.""" - - id = serializers.IntegerField() - name = serializers.CharField() - slug = serializers.CharField() - category = serializers.CharField() - status = serializers.CharField() - description = serializers.CharField() - - # Park info - park = RideParkOutputSerializer() - - # Statistics - average_rating = serializers.DecimalField( - max_digits=3, decimal_places=2, allow_null=True - ) - capacity_per_hour = serializers.IntegerField(allow_null=True) - - # Dates - opening_date = serializers.DateField(allow_null=True) - closing_date = serializers.DateField(allow_null=True) - - # Metadata - created_at = serializers.DateTimeField() - updated_at = serializers.DateTimeField() - - -class RideDetailOutputSerializer(serializers.Serializer): - """Output serializer for ride detail view.""" - - id = serializers.IntegerField() - name = serializers.CharField() - slug = serializers.CharField() - category = serializers.CharField() - status = serializers.CharField() - post_closing_status = serializers.CharField(allow_null=True) - description = serializers.CharField() - - # Park info - park = RideParkOutputSerializer() - park_area = serializers.SerializerMethodField() - - # Dates - opening_date = serializers.DateField(allow_null=True) - closing_date = serializers.DateField(allow_null=True) - status_since = serializers.DateField(allow_null=True) - - # Physical specs - min_height_in = serializers.IntegerField(allow_null=True) - max_height_in = serializers.IntegerField(allow_null=True) - capacity_per_hour = serializers.IntegerField(allow_null=True) - ride_duration_seconds = serializers.IntegerField(allow_null=True) - - # Statistics - average_rating = serializers.DecimalField( - max_digits=3, decimal_places=2, allow_null=True - ) - - # Companies - manufacturer = serializers.SerializerMethodField() - designer = serializers.SerializerMethodField() - - # Model - ride_model = RideModelOutputSerializer(allow_null=True) - - # Metadata - created_at = serializers.DateTimeField() - updated_at = serializers.DateTimeField() - - def get_park_area(self, obj): - if obj.park_area: - return { - "id": obj.park_area.id, - "name": obj.park_area.name, - "slug": obj.park_area.slug, - } - return None - - def get_manufacturer(self, obj): - if obj.manufacturer: - return { - "id": obj.manufacturer.id, - "name": obj.manufacturer.name, - "slug": obj.manufacturer.slug, - } - return None - - def get_designer(self, obj): - if obj.designer: - return { - "id": obj.designer.id, - "name": obj.designer.name, - "slug": obj.designer.slug, - } - return None - - -class RideCreateInputSerializer(serializers.Serializer): - """Input serializer for creating rides.""" - - name = serializers.CharField(max_length=255) - description = serializers.CharField(allow_blank=True, default="") - category = serializers.ChoiceField(choices=Ride.CATEGORY_CHOICES) - status = serializers.ChoiceField(choices=Ride.STATUS_CHOICES, default="OPERATING") - - # Required park - park_id = serializers.IntegerField() - - # Optional area - park_area_id = serializers.IntegerField(required=False, allow_null=True) - - # Optional dates - opening_date = serializers.DateField(required=False, allow_null=True) - closing_date = serializers.DateField(required=False, allow_null=True) - status_since = serializers.DateField(required=False, allow_null=True) - - # Optional specs - min_height_in = serializers.IntegerField( - required=False, allow_null=True, min_value=30, max_value=90 - ) - max_height_in = serializers.IntegerField( - required=False, allow_null=True, min_value=30, max_value=90 - ) - capacity_per_hour = serializers.IntegerField( - required=False, allow_null=True, min_value=1 - ) - ride_duration_seconds = serializers.IntegerField( - required=False, allow_null=True, min_value=1 - ) - - # Optional companies - manufacturer_id = serializers.IntegerField(required=False, allow_null=True) - designer_id = serializers.IntegerField(required=False, allow_null=True) - - # Optional model - ride_model_id = serializers.IntegerField(required=False, allow_null=True) - - def validate(self, data): - """Cross-field validation.""" - # Date validation - opening_date = data.get("opening_date") - closing_date = data.get("closing_date") - - if opening_date and closing_date and closing_date < opening_date: - raise serializers.ValidationError( - "Closing date cannot be before opening date" - ) - - # Height validation - min_height = data.get("min_height_in") - max_height = data.get("max_height_in") - - if min_height and max_height and min_height > max_height: - raise serializers.ValidationError( - "Minimum height cannot be greater than maximum height" - ) - - return data - - -class RideUpdateInputSerializer(serializers.Serializer): - """Input serializer for updating rides.""" - - name = serializers.CharField(max_length=255, required=False) - description = serializers.CharField(allow_blank=True, required=False) - category = serializers.ChoiceField(choices=Ride.CATEGORY_CHOICES, required=False) - status = serializers.ChoiceField(choices=Ride.STATUS_CHOICES, required=False) - post_closing_status = serializers.ChoiceField( - choices=Ride.POST_CLOSING_STATUS_CHOICES, - required=False, - allow_null=True, - ) - - # Park and area - park_id = serializers.IntegerField(required=False) - park_area_id = serializers.IntegerField(required=False, allow_null=True) - - # Dates - opening_date = serializers.DateField(required=False, allow_null=True) - closing_date = serializers.DateField(required=False, allow_null=True) - status_since = serializers.DateField(required=False, allow_null=True) - - # Specs - min_height_in = serializers.IntegerField( - required=False, allow_null=True, min_value=30, max_value=90 - ) - max_height_in = serializers.IntegerField( - required=False, allow_null=True, min_value=30, max_value=90 - ) - capacity_per_hour = serializers.IntegerField( - required=False, allow_null=True, min_value=1 - ) - ride_duration_seconds = serializers.IntegerField( - required=False, allow_null=True, min_value=1 - ) - - # Companies - manufacturer_id = serializers.IntegerField(required=False, allow_null=True) - designer_id = serializers.IntegerField(required=False, allow_null=True) - - # Model - ride_model_id = serializers.IntegerField(required=False, allow_null=True) - - def validate(self, data): - """Cross-field validation.""" - # Date validation - opening_date = data.get("opening_date") - closing_date = data.get("closing_date") - - if opening_date and closing_date and closing_date < opening_date: - raise serializers.ValidationError( - "Closing date cannot be before opening date" - ) - - # Height validation - min_height = data.get("min_height_in") - max_height = data.get("max_height_in") - - if min_height and max_height and min_height > max_height: - raise serializers.ValidationError( - "Minimum height cannot be greater than maximum height" - ) - - return data - - -class RideFilterInputSerializer(serializers.Serializer): - """Input serializer for ride filtering and search.""" - - # Search - search = serializers.CharField(required=False, allow_blank=True) - - # Category filter - category = serializers.MultipleChoiceField( - choices=Ride.CATEGORY_CHOICES, required=False - ) - - # Status filter - status = serializers.MultipleChoiceField( - choices=Ride.STATUS_CHOICES, required=False - ) - - # Park filter - park_id = serializers.IntegerField(required=False) - park_slug = serializers.CharField(required=False, allow_blank=True) - - # Company filters - manufacturer_id = serializers.IntegerField(required=False) - designer_id = serializers.IntegerField(required=False) - - # Rating filter - min_rating = serializers.DecimalField( - max_digits=3, - decimal_places=2, - required=False, - min_value=1, - max_value=10, - ) - - # Height filters - min_height_requirement = serializers.IntegerField(required=False) - max_height_requirement = serializers.IntegerField(required=False) - - # Capacity filter - min_capacity = serializers.IntegerField(required=False) - - # Ordering - ordering = serializers.ChoiceField( - choices=[ - "name", - "-name", - "opening_date", - "-opening_date", - "average_rating", - "-average_rating", - "capacity_per_hour", - "-capacity_per_hour", - "created_at", - "-created_at", - ], - required=False, - default="name", - ) - - -class RideStatsOutputSerializer(serializers.Serializer): - """Output serializer for ride statistics.""" - - total_rides = serializers.IntegerField() - operating_rides = serializers.IntegerField() - closed_rides = serializers.IntegerField() - under_construction = serializers.IntegerField() - - # By category - rides_by_category = serializers.DictField() - - # Averages - average_rating = serializers.DecimalField( - max_digits=3, decimal_places=2, allow_null=True - ) - average_capacity = serializers.DecimalField( - max_digits=8, decimal_places=2, allow_null=True - ) - - # Top manufacturers - top_manufacturers = serializers.ListField(child=serializers.DictField()) - - # Recently added - recently_added_count = serializers.IntegerField() diff --git a/rides/api/urls.py b/rides/api/urls.py deleted file mode 100644 index f1dd521f..00000000 --- a/rides/api/urls.py +++ /dev/null @@ -1,14 +0,0 @@ -""" -URL configuration for Rides API following Django styleguide patterns. -""" - -# Note: We'll create the views file after this -# from .views import RideApi - -app_name = "rides_api" - -# Placeholder for future implementation -urlpatterns = [ - # Will be implemented in next phase - # path('v1/', include(router.urls)), -] diff --git a/rides/apps.py b/rides/apps.py deleted file mode 100644 index 7c2ba65c..00000000 --- a/rides/apps.py +++ /dev/null @@ -1,9 +0,0 @@ -from django.apps import AppConfig - - -class RidesConfig(AppConfig): - default_auto_field = "django.db.models.BigAutoField" - name = "rides" - - def ready(self): - pass diff --git a/rides/events.py b/rides/events.py deleted file mode 100644 index a8b56aff..00000000 --- a/rides/events.py +++ /dev/null @@ -1,75 +0,0 @@ -from typing import Dict - - -def get_ride_display_changes(changes: Dict) -> Dict: - """Returns a human-readable version of the ride changes""" - field_names = { - "name": "Name", - "description": "Description", - "status": "Status", - "post_closing_status": "Post-Closing Status", - "opening_date": "Opening Date", - "closing_date": "Closing Date", - "status_since": "Status Since", - "capacity_per_hour": "Hourly Capacity", - "min_height_in": "Minimum Height", - "max_height_in": "Maximum Height", - "ride_duration_seconds": "Ride Duration", - } - - display_changes = {} - for field, change in changes.items(): - if field in field_names: - old_value = change.get("old", "") - new_value = change.get("new", "") - - # Format specific fields - if field == "status": - from .models import Ride - - choices = dict(Ride.STATUS_CHOICES) - old_value = choices.get(old_value, old_value) - new_value = choices.get(new_value, new_value) - elif field == "post_closing_status": - from .models import Ride - - choices = dict(Ride.POST_CLOSING_STATUS_CHOICES) - old_value = choices.get(old_value, old_value) - new_value = choices.get(new_value, new_value) - - display_changes[field_names[field]] = { - "old": old_value, - "new": new_value, - } - - return display_changes - - -def get_ride_model_display_changes(changes: Dict) -> Dict: - """Returns a human-readable version of the ride model changes""" - field_names = { - "name": "Name", - "description": "Description", - "category": "Category", - } - - display_changes = {} - for field, change in changes.items(): - if field in field_names: - old_value = change.get("old", "") - new_value = change.get("new", "") - - # Format category field - if field == "category": - from .models import CATEGORY_CHOICES - - choices = dict(CATEGORY_CHOICES) - old_value = choices.get(old_value, old_value) - new_value = choices.get(new_value, new_value) - - display_changes[field_names[field]] = { - "old": old_value, - "new": new_value, - } - - return display_changes diff --git a/rides/forms.py b/rides/forms.py deleted file mode 100644 index bdf98f90..00000000 --- a/rides/forms.py +++ /dev/null @@ -1,379 +0,0 @@ -from parks.models import Park, ParkArea -from django import forms -from django.forms import ModelChoiceField -from django.urls import reverse_lazy -from .models.company import Company -from .models.rides import Ride, RideModel - -Manufacturer = Company -Designer = Company - - -class RideForm(forms.ModelForm): - park_search = forms.CharField( - label="Park *", - required=True, - widget=forms.TextInput( - attrs={ - "class": ( - "w-full border-gray-300 rounded-lg form-input " - "dark:border-gray-600 dark:bg-gray-700 dark:text-white" - ), - "placeholder": "Search for a park...", - "hx-get": "/parks/search/", - "hx-trigger": "click, input delay:200ms", - "hx-target": "#park-search-results", - "name": "q", - "autocomplete": "off", - } - ), - ) - - manufacturer_search = forms.CharField( - label="Manufacturer", - required=False, - widget=forms.TextInput( - attrs={ - "class": ( - "w-full border-gray-300 rounded-lg form-input " - "dark:border-gray-600 dark:bg-gray-700 dark:text-white" - ), - "placeholder": "Search for a manufacturer...", - "hx-get": reverse_lazy("rides:search_companies"), - "hx-trigger": "click, input delay:200ms", - "hx-target": "#manufacturer-search-results", - "name": "q", - "autocomplete": "off", - } - ), - ) - - designer_search = forms.CharField( - label="Designer", - required=False, - widget=forms.TextInput( - attrs={ - "class": ( - "w-full border-gray-300 rounded-lg form-input " - "dark:border-gray-600 dark:bg-gray-700 dark:text-white" - ), - "placeholder": "Search for a designer...", - "hx-get": reverse_lazy("rides:search_companies"), - "hx-trigger": "click, input delay:200ms", - "hx-target": "#designer-search-results", - "name": "q", - "autocomplete": "off", - } - ), - ) - - ride_model_search = forms.CharField( - label="Ride Model", - required=False, - widget=forms.TextInput( - attrs={ - "class": ( - "w-full border-gray-300 rounded-lg form-input " - "dark:border-gray-600 dark:bg-gray-700 dark:text-white" - ), - "placeholder": "Search for a ride model...", - "hx-get": reverse_lazy("rides:search_ride_models"), - "hx-trigger": "click, input delay:200ms", - "hx-target": "#ride-model-search-results", - "hx-include": "[name='manufacturer']", - "name": "q", - "autocomplete": "off", - } - ), - ) - - park = forms.ModelChoiceField( - queryset=Park.objects.all(), - required=True, - label="", - widget=forms.HiddenInput(), - ) - - manufacturer = forms.ModelChoiceField( - queryset=Manufacturer.objects.all(), - required=False, - label="", - widget=forms.HiddenInput(), - ) - - designer = forms.ModelChoiceField( - queryset=Designer.objects.all(), - required=False, - label="", - widget=forms.HiddenInput(), - ) - - ride_model = forms.ModelChoiceField( - queryset=RideModel.objects.all(), - required=False, - label="", - widget=forms.HiddenInput(), - ) - - park_area = ModelChoiceField( - queryset=ParkArea.objects.none(), - required=False, - widget=forms.Select( - attrs={ - "class": ( - "w-full border-gray-300 rounded-lg form-select " - "dark:border-gray-600 dark:bg-gray-700 dark:text-white" - ), - "placeholder": "Select an area within the park...", - } - ), - ) - - class Meta: - model = Ride - fields = [ - "name", - "category", - "manufacturer", - "designer", - "ride_model", - "status", - "post_closing_status", - "opening_date", - "closing_date", - "status_since", - "min_height_in", - "max_height_in", - "capacity_per_hour", - "ride_duration_seconds", - "description", - ] - widgets = { - "name": forms.TextInput( - attrs={ - "class": ( - "w-full border-gray-300 rounded-lg form-input " - "dark:border-gray-600 dark:bg-gray-700 dark:text-white" - ), - "placeholder": "Official name of the ride", - } - ), - "category": forms.Select( - attrs={ - "class": ( - "w-full border-gray-300 rounded-lg form-select " - "dark:border-gray-600 dark:bg-gray-700 dark:text-white" - ), - "hx-get": reverse_lazy("rides:coaster_fields"), - "hx-target": "#coaster-fields", - "hx-trigger": "change", - "hx-include": "this", - "hx-swap": "innerHTML", - } - ), - "status": forms.Select( - attrs={ - "class": ( - "w-full border-gray-300 rounded-lg form-select " - "dark:border-gray-600 dark:bg-gray-700 dark:text-white" - ), - "placeholder": "Current operational status", - "x-model": "status", - "@change": "handleStatusChange", - } - ), - "post_closing_status": forms.Select( - attrs={ - "class": ( - "w-full border-gray-300 rounded-lg form-select " - "dark:border-gray-600 dark:bg-gray-700 dark:text-white" - ), - "placeholder": "Status after closing", - "x-show": "status === 'CLOSING'", - } - ), - "opening_date": forms.DateInput( - attrs={ - "type": "date", - "class": ( - "w-full border-gray-300 rounded-lg form-input " - "dark:border-gray-600 dark:bg-gray-700 dark:text-white" - ), - "placeholder": "Date when ride first opened", - } - ), - "closing_date": forms.DateInput( - attrs={ - "type": "date", - "class": ( - "w-full border-gray-300 rounded-lg form-input " - "dark:border-gray-600 dark:bg-gray-700 dark:text-white" - ), - "placeholder": "Date when ride will close", - "x-show": "['CLOSING', 'SBNO', 'CLOSED_PERM', 'DEMOLISHED', 'RELOCATED'].includes(status)", - ":required": "status === 'CLOSING'", - } - ), - "status_since": forms.DateInput( - attrs={ - "type": "date", - "class": ( - "w-full border-gray-300 rounded-lg form-input " - "dark:border-gray-600 dark:bg-gray-700 dark:text-white" - ), - "placeholder": "Date when current status took effect", - } - ), - "min_height_in": forms.NumberInput( - attrs={ - "class": ( - "w-full border-gray-300 rounded-lg form-input " - "dark:border-gray-600 dark:bg-gray-700 dark:text-white" - ), - "min": "0", - "placeholder": "Minimum height requirement in inches", - } - ), - "max_height_in": forms.NumberInput( - attrs={ - "class": ( - "w-full border-gray-300 rounded-lg form-input " - "dark:border-gray-600 dark:bg-gray-700 dark:text-white" - ), - "min": "0", - "placeholder": "Maximum height limit in inches (if applicable)", - } - ), - "capacity_per_hour": forms.NumberInput( - attrs={ - "class": ( - "w-full border-gray-300 rounded-lg form-input " - "dark:border-gray-600 dark:bg-gray-700 dark:text-white" - ), - "min": "0", - "placeholder": "Theoretical hourly ride capacity", - } - ), - "ride_duration_seconds": forms.NumberInput( - attrs={ - "class": ( - "w-full border-gray-300 rounded-lg form-input " - "dark:border-gray-600 dark:bg-gray-700 dark:text-white" - ), - "min": "0", - "placeholder": "Total duration of one ride cycle in seconds", - } - ), - "description": forms.Textarea( - attrs={ - "rows": 4, - "class": ( - "w-full border-gray-300 rounded-lg form-textarea " - "dark:border-gray-600 dark:bg-gray-700 dark:text-white" - ), - "placeholder": "General description and notable features of the ride", - } - ), - } - - def __init__(self, *args, **kwargs): - park = kwargs.pop("park", None) - super().__init__(*args, **kwargs) - - # Make category required - self.fields["category"].required = True - - # Clear any default values for date fields - self.fields["opening_date"].initial = None - self.fields["closing_date"].initial = None - self.fields["status_since"].initial = None - - # Move fields to the beginning in desired order - field_order = [ - "park_search", - "park", - "park_area", - "name", - "manufacturer_search", - "manufacturer", - "designer_search", - "designer", - "ride_model_search", - "ride_model", - "category", - "status", - "post_closing_status", - "opening_date", - "closing_date", - "status_since", - "min_height_in", - "max_height_in", - "capacity_per_hour", - "ride_duration_seconds", - "description", - ] - self.order_fields(field_order) - - if park: - # If park is provided, set it as the initial value - self.fields["park"].initial = park - # Hide the park search field since we know the park - del self.fields["park_search"] - # Create new park_area field with park's areas - self.fields["park_area"] = forms.ModelChoiceField( - queryset=park.areas.all(), - required=False, - widget=forms.Select( - attrs={ - "class": ( - "w-full border-gray-300 rounded-lg form-select " - "dark:border-gray-600 dark:bg-gray-700 dark:text-white" - ), - "placeholder": "Select an area within the park...", - } - ), - ) - else: - # If no park provided, show park search and disable park_area until - # park is selected - self.fields["park_area"].widget.attrs["disabled"] = True - # Initialize park search with current park name if editing - if self.instance and self.instance.pk and self.instance.park: - self.fields["park_search"].initial = self.instance.park.name - self.fields["park"].initial = self.instance.park - - # Initialize manufacturer, designer, and ride model search fields if - # editing - if self.instance and self.instance.pk: - if self.instance.manufacturer: - self.fields["manufacturer_search"].initial = ( - self.instance.manufacturer.name - ) - self.fields["manufacturer"].initial = self.instance.manufacturer - if self.instance.designer: - self.fields["designer_search"].initial = self.instance.designer.name - self.fields["designer"].initial = self.instance.designer - if self.instance.ride_model: - self.fields["ride_model_search"].initial = self.instance.ride_model.name - self.fields["ride_model"].initial = self.instance.ride_model - - -class RideSearchForm(forms.Form): - """Form for searching rides with HTMX autocomplete.""" - - ride = forms.ModelChoiceField( - queryset=Ride.objects.all(), - label="Find a ride", - required=False, - widget=forms.Select( - attrs={ - "class": ( - "w-full border-gray-300 rounded-lg form-input " - "dark:border-gray-600 dark:bg-gray-700 dark:text-white" - ), - "hx-get": reverse_lazy("rides:search"), - "hx-trigger": "change", - "hx-target": "#ride-search-results", - } - ), - ) diff --git a/rides/managers.py b/rides/managers.py deleted file mode 100644 index d5d8d907..00000000 --- a/rides/managers.py +++ /dev/null @@ -1,301 +0,0 @@ -""" -Custom managers and QuerySets for Rides models. -Optimized queries following Django styleguide patterns. -""" - -from typing import Optional, List, Union -from django.db.models import Q, F, Count, Prefetch - -from core.managers import ( - BaseQuerySet, - BaseManager, - ReviewableQuerySet, - ReviewableManager, - StatusQuerySet, - StatusManager, -) - - -class RideQuerySet(StatusQuerySet, ReviewableQuerySet): - """Optimized QuerySet for Ride model.""" - - def by_category(self, *, category: Union[str, List[str]]): - """Filter rides by category.""" - if isinstance(category, list): - return self.filter(category__in=category) - return self.filter(category=category) - - def coasters(self): - """Filter for roller coasters.""" - return self.filter(category__in=["RC", "WC"]) - - def thrill_rides(self): - """Filter for thrill rides.""" - return self.filter(category__in=["RC", "WC", "FR"]) - - def family_friendly(self, *, max_height_requirement: int = 42): - """Filter for family-friendly rides.""" - return self.filter( - Q(min_height_in__lte=max_height_requirement) | Q(min_height_in__isnull=True) - ) - - def by_park(self, *, park_id: int): - """Filter rides by park.""" - return self.filter(park_id=park_id) - - def by_manufacturer(self, *, manufacturer_id: int): - """Filter rides by manufacturer.""" - return self.filter(manufacturer_id=manufacturer_id) - - def by_designer(self, *, designer_id: int): - """Filter rides by designer.""" - return self.filter(designer_id=designer_id) - - def with_capacity_info(self): - """Add capacity-related annotations.""" - return self.annotate( - estimated_daily_capacity=F("capacity_per_hour") - * 10, # Assuming 10 operating hours - duration_minutes=F("ride_duration_seconds") / 60.0, - ) - - def high_capacity(self, *, min_capacity: int = 1000): - """Filter for high-capacity rides.""" - return self.filter(capacity_per_hour__gte=min_capacity) - - def optimized_for_list(self): - """Optimize for ride list display.""" - return self.select_related( - "park", "park_area", "manufacturer", "designer", "ride_model" - ).with_review_stats() - - def optimized_for_detail(self): - """Optimize for ride detail display.""" - from .models import RideReview - - return self.select_related( - "park", - "park_area", - "manufacturer", - "designer", - "ride_model__manufacturer", - ).prefetch_related( - "location", - "rollercoaster_stats", - Prefetch( - "reviews", - queryset=RideReview.objects.select_related("user") - .filter(is_published=True) - .order_by("-created_at")[:10], - ), - "photos", - ) - - def for_map_display(self): - """Optimize for map display.""" - return ( - self.select_related("park", "park_area") - .prefetch_related("location") - .values( - "id", - "name", - "slug", - "category", - "status", - "park__name", - "park__slug", - "park_area__name", - "location__point", - ) - ) - - def search_by_specs( - self, - *, - min_height: Optional[int] = None, - max_height: Optional[int] = None, - min_speed: Optional[float] = None, - inversions: Optional[bool] = None, - ): - """Search rides by physical specifications.""" - queryset = self - - if min_height: - queryset = queryset.filter( - Q(rollercoaster_stats__height_ft__gte=min_height) - | Q(min_height_in__gte=min_height) - ) - - if max_height: - queryset = queryset.filter( - Q(rollercoaster_stats__height_ft__lte=max_height) - | Q(max_height_in__lte=max_height) - ) - - if min_speed: - queryset = queryset.filter(rollercoaster_stats__speed_mph__gte=min_speed) - - if inversions is not None: - if inversions: - queryset = queryset.filter(rollercoaster_stats__inversions__gt=0) - else: - queryset = queryset.filter( - Q(rollercoaster_stats__inversions=0) - | Q(rollercoaster_stats__isnull=True) - ) - - return queryset - - -class RideManager(StatusManager, ReviewableManager): - """Custom manager for Ride model.""" - - def get_queryset(self): - return RideQuerySet(self.model, using=self._db) - - def coasters(self): - return self.get_queryset().coasters() - - def thrill_rides(self): - return self.get_queryset().thrill_rides() - - def family_friendly(self, *, max_height_requirement: int = 42): - return self.get_queryset().family_friendly( - max_height_requirement=max_height_requirement - ) - - def by_park(self, *, park_id: int): - return self.get_queryset().by_park(park_id=park_id) - - def high_capacity(self, *, min_capacity: int = 1000): - return self.get_queryset().high_capacity(min_capacity=min_capacity) - - def optimized_for_list(self): - return self.get_queryset().optimized_for_list() - - def optimized_for_detail(self): - return self.get_queryset().optimized_for_detail() - - -class RideModelQuerySet(BaseQuerySet): - """QuerySet for RideModel model.""" - - def by_manufacturer(self, *, manufacturer_id: int): - """Filter ride models by manufacturer.""" - return self.filter(manufacturer_id=manufacturer_id) - - def by_category(self, *, category: str): - """Filter ride models by category.""" - return self.filter(category=category) - - def with_ride_counts(self): - """Add count of rides using this model.""" - return self.annotate( - ride_count=Count("rides", distinct=True), - operating_rides_count=Count( - "rides", filter=Q(rides__status="OPERATING"), distinct=True - ), - ) - - def popular_models(self, *, min_installations: int = 5): - """Filter for popular ride models.""" - return self.with_ride_counts().filter(ride_count__gte=min_installations) - - def optimized_for_list(self): - """Optimize for model list display.""" - return self.select_related("manufacturer").with_ride_counts() - - -class RideModelManager(BaseManager): - """Manager for RideModel model.""" - - def get_queryset(self): - return RideModelQuerySet(self.model, using=self._db) - - def by_manufacturer(self, *, manufacturer_id: int): - return self.get_queryset().by_manufacturer(manufacturer_id=manufacturer_id) - - def popular_models(self, *, min_installations: int = 5): - return self.get_queryset().popular_models(min_installations=min_installations) - - -class RideReviewQuerySet(ReviewableQuerySet): - """QuerySet for RideReview model.""" - - def for_ride(self, *, ride_id: int): - """Filter reviews for a specific ride.""" - return self.filter(ride_id=ride_id) - - def by_user(self, *, user_id: int): - """Filter reviews by user.""" - return self.filter(user_id=user_id) - - def by_rating_range(self, *, min_rating: int = 1, max_rating: int = 10): - """Filter reviews by rating range.""" - return self.filter(rating__gte=min_rating, rating__lte=max_rating) - - def optimized_for_display(self): - """Optimize for review display.""" - return self.select_related("user", "ride", "moderated_by") - - -class RideReviewManager(BaseManager): - """Manager for RideReview model.""" - - def get_queryset(self): - return RideReviewQuerySet(self.model, using=self._db) - - def for_ride(self, *, ride_id: int): - return self.get_queryset().for_ride(ride_id=ride_id) - - def by_rating_range(self, *, min_rating: int = 1, max_rating: int = 10): - return self.get_queryset().by_rating_range( - min_rating=min_rating, max_rating=max_rating - ) - - -class RollerCoasterStatsQuerySet(BaseQuerySet): - """QuerySet for RollerCoasterStats model.""" - - def tall_coasters(self, *, min_height_ft: float = 200): - """Filter for tall roller coasters.""" - return self.filter(height_ft__gte=min_height_ft) - - def fast_coasters(self, *, min_speed_mph: float = 60): - """Filter for fast roller coasters.""" - return self.filter(speed_mph__gte=min_speed_mph) - - def with_inversions(self): - """Filter for coasters with inversions.""" - return self.filter(inversions__gt=0) - - def launched_coasters(self): - """Filter for launched coasters.""" - return self.exclude(launch_type="NONE") - - def by_track_type(self, *, track_type: str): - """Filter by track type.""" - return self.filter(track_type=track_type) - - def optimized_for_list(self): - """Optimize for stats list display.""" - return self.select_related("ride", "ride__park") - - -class RollerCoasterStatsManager(BaseManager): - """Manager for RollerCoasterStats model.""" - - def get_queryset(self): - return RollerCoasterStatsQuerySet(self.model, using=self._db) - - def tall_coasters(self, *, min_height_ft: float = 200): - return self.get_queryset().tall_coasters(min_height_ft=min_height_ft) - - def fast_coasters(self, *, min_speed_mph: float = 60): - return self.get_queryset().fast_coasters(min_speed_mph=min_speed_mph) - - def with_inversions(self): - return self.get_queryset().with_inversions() - - def launched_coasters(self): - return self.get_queryset().launched_coasters() diff --git a/rides/migrations/0001_initial.py b/rides/migrations/0001_initial.py deleted file mode 100644 index 8287dda2..00000000 --- a/rides/migrations/0001_initial.py +++ /dev/null @@ -1,741 +0,0 @@ -# Generated by Django 5.2.5 on 2025-08-15 21:30 - -import django.contrib.gis.db.models.fields -import django.contrib.postgres.fields -import django.core.validators -import django.db.models.deletion -import pgtrigger.compiler -import pgtrigger.migrations -from django.conf import settings -from django.db import migrations, models - - -class Migration(migrations.Migration): - - initial = True - - dependencies = [ - ("pghistory", "0007_auto_20250421_0444"), - migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ] - - operations = [ - migrations.CreateModel( - name="Company", - fields=[ - ( - "id", - models.BigAutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ("created_at", models.DateTimeField(auto_now_add=True)), - ("updated_at", models.DateTimeField(auto_now=True)), - ("name", models.CharField(max_length=255)), - ("slug", models.SlugField(max_length=255, unique=True)), - ( - "roles", - django.contrib.postgres.fields.ArrayField( - base_field=models.CharField( - choices=[ - ("MANUFACTURER", "Ride Manufacturer"), - ("DESIGNER", "Ride Designer"), - ("OPERATOR", "Park Operator"), - ("PROPERTY_OWNER", "Property Owner"), - ], - max_length=20, - ), - blank=True, - default=list, - size=None, - ), - ), - ("description", models.TextField(blank=True)), - ("website", models.URLField(blank=True)), - ("founded_date", models.DateField(blank=True, null=True)), - ("rides_count", models.IntegerField(default=0)), - ("coasters_count", models.IntegerField(default=0)), - ], - options={ - "verbose_name_plural": "Companies", - "ordering": ["name"], - }, - ), - migrations.CreateModel( - name="CompanyEvent", - fields=[ - ( - "pgh_id", - models.AutoField(primary_key=True, serialize=False), - ), - ("pgh_created_at", models.DateTimeField(auto_now_add=True)), - ("pgh_label", models.TextField(help_text="The event label.")), - ("id", models.BigIntegerField()), - ("created_at", models.DateTimeField(auto_now_add=True)), - ("updated_at", models.DateTimeField(auto_now=True)), - ("name", models.CharField(max_length=255)), - ("slug", models.SlugField(db_index=False, max_length=255)), - ( - "roles", - django.contrib.postgres.fields.ArrayField( - base_field=models.CharField( - choices=[ - ("MANUFACTURER", "Ride Manufacturer"), - ("DESIGNER", "Ride Designer"), - ("OPERATOR", "Park Operator"), - ("PROPERTY_OWNER", "Property Owner"), - ], - max_length=20, - ), - blank=True, - default=list, - size=None, - ), - ), - ("description", models.TextField(blank=True)), - ("website", models.URLField(blank=True)), - ("founded_date", models.DateField(blank=True, null=True)), - ("rides_count", models.IntegerField(default=0)), - ("coasters_count", models.IntegerField(default=0)), - ], - options={ - "abstract": False, - }, - ), - migrations.CreateModel( - name="Ride", - fields=[ - ( - "id", - models.BigAutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ("created_at", models.DateTimeField(auto_now_add=True)), - ("updated_at", models.DateTimeField(auto_now=True)), - ("name", models.CharField(max_length=255)), - ("slug", models.SlugField(max_length=255)), - ("description", models.TextField(blank=True)), - ( - "category", - models.CharField( - blank=True, - choices=[ - ("", "Select ride type"), - ("RC", "Roller Coaster"), - ("DR", "Dark Ride"), - ("FR", "Flat Ride"), - ("WR", "Water Ride"), - ("TR", "Transport"), - ("OT", "Other"), - ], - default="", - max_length=2, - ), - ), - ( - "status", - models.CharField( - choices=[ - ("", "Select status"), - ("OPERATING", "Operating"), - ("CLOSED_TEMP", "Temporarily Closed"), - ("SBNO", "Standing But Not Operating"), - ("CLOSING", "Closing"), - ("CLOSED_PERM", "Permanently Closed"), - ("UNDER_CONSTRUCTION", "Under Construction"), - ("DEMOLISHED", "Demolished"), - ("RELOCATED", "Relocated"), - ], - default="OPERATING", - max_length=20, - ), - ), - ( - "post_closing_status", - models.CharField( - blank=True, - choices=[ - ("SBNO", "Standing But Not Operating"), - ("CLOSED_PERM", "Permanently Closed"), - ], - help_text="Status to change to after closing date", - max_length=20, - null=True, - ), - ), - ("opening_date", models.DateField(blank=True, null=True)), - ("closing_date", models.DateField(blank=True, null=True)), - ("status_since", models.DateField(blank=True, null=True)), - ( - "min_height_in", - models.PositiveIntegerField(blank=True, null=True), - ), - ( - "max_height_in", - models.PositiveIntegerField(blank=True, null=True), - ), - ( - "capacity_per_hour", - models.PositiveIntegerField(blank=True, null=True), - ), - ( - "ride_duration_seconds", - models.PositiveIntegerField(blank=True, null=True), - ), - ( - "average_rating", - models.DecimalField( - blank=True, decimal_places=2, max_digits=3, null=True - ), - ), - ], - options={ - "ordering": ["name"], - }, - ), - migrations.CreateModel( - name="RideLocation", - fields=[ - ( - "id", - models.BigAutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "point", - django.contrib.gis.db.models.fields.PointField( - blank=True, - help_text="Geographic coordinates for ride location (longitude, latitude)", - null=True, - srid=4326, - ), - ), - ( - "park_area", - models.CharField( - blank=True, - db_index=True, - help_text="Themed area or land within the park (e.g., 'Frontierland', 'Tomorrowland')", - max_length=100, - ), - ), - ( - "notes", - models.TextField(blank=True, help_text="General location notes"), - ), - ( - "entrance_notes", - models.TextField( - blank=True, - help_text="Directions to ride entrance, queue location, or navigation tips", - ), - ), - ( - "accessibility_notes", - models.TextField( - blank=True, - help_text="Information about accessible entrances, wheelchair access, etc.", - ), - ), - ("created_at", models.DateTimeField(auto_now_add=True)), - ("updated_at", models.DateTimeField(auto_now=True)), - ], - options={ - "verbose_name": "Ride Location", - "verbose_name_plural": "Ride Locations", - "ordering": ["ride__name"], - }, - ), - migrations.CreateModel( - name="RideModel", - fields=[ - ( - "id", - models.BigAutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ("created_at", models.DateTimeField(auto_now_add=True)), - ("updated_at", models.DateTimeField(auto_now=True)), - ("name", models.CharField(max_length=255)), - ("description", models.TextField(blank=True)), - ( - "category", - models.CharField( - blank=True, - choices=[ - ("", "Select ride type"), - ("RC", "Roller Coaster"), - ("DR", "Dark Ride"), - ("FR", "Flat Ride"), - ("WR", "Water Ride"), - ("TR", "Transport"), - ("OT", "Other"), - ], - default="", - max_length=2, - ), - ), - ], - options={ - "ordering": ["manufacturer", "name"], - }, - ), - migrations.CreateModel( - name="RideReview", - fields=[ - ( - "id", - models.BigAutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "rating", - models.PositiveSmallIntegerField( - validators=[ - django.core.validators.MinValueValidator(1), - django.core.validators.MaxValueValidator(10), - ] - ), - ), - ("title", models.CharField(max_length=200)), - ("content", models.TextField()), - ("visit_date", models.DateField()), - ("created_at", models.DateTimeField(auto_now_add=True)), - ("updated_at", models.DateTimeField(auto_now=True)), - ("is_published", models.BooleanField(default=True)), - ("moderation_notes", models.TextField(blank=True)), - ("moderated_at", models.DateTimeField(blank=True, null=True)), - ], - options={ - "ordering": ["-created_at"], - }, - ), - migrations.CreateModel( - name="RideReviewEvent", - fields=[ - ( - "pgh_id", - models.AutoField(primary_key=True, serialize=False), - ), - ("pgh_created_at", models.DateTimeField(auto_now_add=True)), - ("pgh_label", models.TextField(help_text="The event label.")), - ("id", models.BigIntegerField()), - ( - "rating", - models.PositiveSmallIntegerField( - validators=[ - django.core.validators.MinValueValidator(1), - django.core.validators.MaxValueValidator(10), - ] - ), - ), - ("title", models.CharField(max_length=200)), - ("content", models.TextField()), - ("visit_date", models.DateField()), - ("created_at", models.DateTimeField(auto_now_add=True)), - ("updated_at", models.DateTimeField(auto_now=True)), - ("is_published", models.BooleanField(default=True)), - ("moderation_notes", models.TextField(blank=True)), - ("moderated_at", models.DateTimeField(blank=True, null=True)), - ], - options={ - "abstract": False, - }, - ), - migrations.CreateModel( - name="RollerCoasterStats", - fields=[ - ( - "id", - models.BigAutoField( - auto_created=True, - primary_key=True, - serialize=False, - verbose_name="ID", - ), - ), - ( - "height_ft", - models.DecimalField( - blank=True, decimal_places=2, max_digits=6, null=True - ), - ), - ( - "length_ft", - models.DecimalField( - blank=True, decimal_places=2, max_digits=7, null=True - ), - ), - ( - "speed_mph", - models.DecimalField( - blank=True, decimal_places=2, max_digits=5, null=True - ), - ), - ("inversions", models.PositiveIntegerField(default=0)), - ( - "ride_time_seconds", - models.PositiveIntegerField(blank=True, null=True), - ), - ("track_type", models.CharField(blank=True, max_length=255)), - ( - "track_material", - models.CharField( - blank=True, - choices=[ - ("STEEL", "Steel"), - ("WOOD", "Wood"), - ("HYBRID", "Hybrid"), - ], - default="STEEL", - max_length=20, - ), - ), - ( - "roller_coaster_type", - models.CharField( - blank=True, - choices=[ - ("SITDOWN", "Sit Down"), - ("INVERTED", "Inverted"), - ("FLYING", "Flying"), - ("STANDUP", "Stand Up"), - ("WING", "Wing"), - ("DIVE", "Dive"), - ("FAMILY", "Family"), - ("WILD_MOUSE", "Wild Mouse"), - ("SPINNING", "Spinning"), - ("FOURTH_DIMENSION", "4th Dimension"), - ("OTHER", "Other"), - ], - default="SITDOWN", - max_length=20, - ), - ), - ( - "max_drop_height_ft", - models.DecimalField( - blank=True, decimal_places=2, max_digits=6, null=True - ), - ), - ( - "launch_type", - models.CharField( - choices=[ - ("CHAIN", "Chain Lift"), - ("LSM", "LSM Launch"), - ("HYDRAULIC", "Hydraulic Launch"), - ("GRAVITY", "Gravity"), - ("OTHER", "Other"), - ], - default="CHAIN", - max_length=20, - ), - ), - ("train_style", models.CharField(blank=True, max_length=255)), - ( - "trains_count", - models.PositiveIntegerField(blank=True, null=True), - ), - ( - "cars_per_train", - models.PositiveIntegerField(blank=True, null=True), - ), - ( - "seats_per_car", - models.PositiveIntegerField(blank=True, null=True), - ), - ], - options={ - "verbose_name": "Roller Coaster Statistics", - "verbose_name_plural": "Roller Coaster Statistics", - }, - ), - pgtrigger.migrations.AddTrigger( - model_name="company", - trigger=pgtrigger.compiler.Trigger( - name="insert_insert", - sql=pgtrigger.compiler.UpsertTriggerSql( - func='INSERT INTO "rides_companyevent" ("coasters_count", "created_at", "description", "founded_date", "id", "name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "rides_count", "roles", "slug", "updated_at", "website") VALUES (NEW."coasters_count", NEW."created_at", NEW."description", NEW."founded_date", NEW."id", NEW."name", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."rides_count", NEW."roles", NEW."slug", NEW."updated_at", NEW."website"); RETURN NULL;', - hash="[AWS-SECRET-REMOVED]", - operation="INSERT", - pgid="pgtrigger_insert_insert_e7194", - table="rides_company", - when="AFTER", - ), - ), - ), - pgtrigger.migrations.AddTrigger( - model_name="company", - trigger=pgtrigger.compiler.Trigger( - name="update_update", - sql=pgtrigger.compiler.UpsertTriggerSql( - condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)", - func='INSERT INTO "rides_companyevent" ("coasters_count", "created_at", "description", "founded_date", "id", "name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "rides_count", "roles", "slug", "updated_at", "website") VALUES (NEW."coasters_count", NEW."created_at", NEW."description", NEW."founded_date", NEW."id", NEW."name", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."rides_count", NEW."roles", NEW."slug", NEW."updated_at", NEW."website"); RETURN NULL;', - hash="[AWS-SECRET-REMOVED]", - operation="UPDATE", - pgid="pgtrigger_update_update_456a8", - table="rides_company", - when="AFTER", - ), - ), - ), - migrations.AddField( - model_name="companyevent", - name="pgh_context", - field=models.ForeignKey( - db_constraint=False, - null=True, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="+", - to="pghistory.context", - ), - ), - migrations.AddField( - model_name="companyevent", - name="pgh_obj", - field=models.ForeignKey( - db_constraint=False, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="events", - to="rides.company", - ), - ), - migrations.AddField( - model_name="ride", - name="designer", - field=models.ForeignKey( - blank=True, - limit_choices_to={"roles__contains": ["DESIGNER"]}, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="designed_rides", - to="rides.company", - ), - ), - migrations.AddField( - model_name="ride", - name="manufacturer", - field=models.ForeignKey( - blank=True, - limit_choices_to={"roles__contains": ["MANUFACTURER"]}, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="manufactured_rides", - to="rides.company", - ), - ), - migrations.AddField( - model_name="ride", - name="park", - field=models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - related_name="rides", - to="parks.park", - ), - ), - migrations.AddField( - model_name="ride", - name="park_area", - field=models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="rides", - to="parks.parkarea", - ), - ), - migrations.AddField( - model_name="ridelocation", - name="ride", - field=models.OneToOneField( - on_delete=django.db.models.deletion.CASCADE, - related_name="ride_location", - to="rides.ride", - ), - ), - migrations.AddField( - model_name="ridemodel", - name="manufacturer", - field=models.ForeignKey( - blank=True, - limit_choices_to={"roles__contains": ["MANUFACTURER"]}, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="ride_models", - to="rides.company", - ), - ), - migrations.AddField( - model_name="ride", - name="ride_model", - field=models.ForeignKey( - blank=True, - help_text="The specific model/type of this ride", - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="rides", - to="rides.ridemodel", - ), - ), - migrations.AddField( - model_name="ridereview", - name="moderated_by", - field=models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="moderated_ride_reviews", - to=settings.AUTH_USER_MODEL, - ), - ), - migrations.AddField( - model_name="ridereview", - name="ride", - field=models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - related_name="reviews", - to="rides.ride", - ), - ), - migrations.AddField( - model_name="ridereview", - name="user", - field=models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - related_name="ride_reviews", - to=settings.AUTH_USER_MODEL, - ), - ), - migrations.AddField( - model_name="ridereviewevent", - name="moderated_by", - field=models.ForeignKey( - blank=True, - db_constraint=False, - null=True, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="+", - related_query_name="+", - to=settings.AUTH_USER_MODEL, - ), - ), - migrations.AddField( - model_name="ridereviewevent", - name="pgh_context", - field=models.ForeignKey( - db_constraint=False, - null=True, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="+", - to="pghistory.context", - ), - ), - migrations.AddField( - model_name="ridereviewevent", - name="pgh_obj", - field=models.ForeignKey( - db_constraint=False, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="events", - to="rides.ridereview", - ), - ), - migrations.AddField( - model_name="ridereviewevent", - name="ride", - field=models.ForeignKey( - db_constraint=False, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="+", - related_query_name="+", - to="rides.ride", - ), - ), - migrations.AddField( - model_name="ridereviewevent", - name="user", - field=models.ForeignKey( - db_constraint=False, - on_delete=django.db.models.deletion.DO_NOTHING, - related_name="+", - related_query_name="+", - to=settings.AUTH_USER_MODEL, - ), - ), - migrations.AddField( - model_name="rollercoasterstats", - name="ride", - field=models.OneToOneField( - on_delete=django.db.models.deletion.CASCADE, - related_name="coaster_stats", - to="rides.ride", - ), - ), - migrations.AddIndex( - model_name="ridelocation", - index=models.Index( - fields=["park_area"], name="rides_ridel_park_ar_26c90c_idx" - ), - ), - migrations.AlterUniqueTogether( - name="ridemodel", - unique_together={("manufacturer", "name")}, - ), - migrations.AlterUniqueTogether( - name="ride", - unique_together={("park", "slug")}, - ), - migrations.AlterUniqueTogether( - name="ridereview", - unique_together={("ride", "user")}, - ), - pgtrigger.migrations.AddTrigger( - model_name="ridereview", - trigger=pgtrigger.compiler.Trigger( - name="insert_insert", - sql=pgtrigger.compiler.UpsertTriggerSql( - func='INSERT INTO "rides_ridereviewevent" ("content", "created_at", "id", "is_published", "moderated_at", "moderated_by_id", "moderation_notes", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "rating", "ride_id", "title", "updated_at", "user_id", "visit_date") VALUES (NEW."content", NEW."created_at", NEW."id", NEW."is_published", NEW."moderated_at", NEW."moderated_by_id", NEW."moderation_notes", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."rating", NEW."ride_id", NEW."title", NEW."updated_at", NEW."user_id", NEW."visit_date"); RETURN NULL;', - hash="[AWS-SECRET-REMOVED]", - operation="INSERT", - pgid="pgtrigger_insert_insert_33237", - table="rides_ridereview", - when="AFTER", - ), - ), - ), - pgtrigger.migrations.AddTrigger( - model_name="ridereview", - trigger=pgtrigger.compiler.Trigger( - name="update_update", - sql=pgtrigger.compiler.UpsertTriggerSql( - condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)", - func='INSERT INTO "rides_ridereviewevent" ("content", "created_at", "id", "is_published", "moderated_at", "moderated_by_id", "moderation_notes", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "rating", "ride_id", "title", "updated_at", "user_id", "visit_date") VALUES (NEW."content", NEW."created_at", NEW."id", NEW."is_published", NEW."moderated_at", NEW."moderated_by_id", NEW."moderation_notes", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."rating", NEW."ride_id", NEW."title", NEW."updated_at", NEW."user_id", NEW."visit_date"); RETURN NULL;', - hash="[AWS-SECRET-REMOVED]", - operation="UPDATE", - pgid="pgtrigger_update_update_90298", - table="rides_ridereview", - when="AFTER", - ), - ), - ), - ] diff --git a/rides/migrations/0002_add_business_constraints.py b/rides/migrations/0002_add_business_constraints.py deleted file mode 100644 index 8359bd7f..00000000 --- a/rides/migrations/0002_add_business_constraints.py +++ /dev/null @@ -1,142 +0,0 @@ -# Generated by Django 5.2.5 on 2025-08-16 17:42 - -import django.db.models.functions.datetime -from django.conf import settings -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("parks", "0003_add_business_constraints"), - ("rides", "0001_initial"), - migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ] - - operations = [ - migrations.AddConstraint( - model_name="ride", - constraint=models.CheckConstraint( - condition=models.Q( - ("closing_date__isnull", True), - ("opening_date__isnull", True), - ("closing_date__gte", models.F("opening_date")), - _connector="OR", - ), - name="ride_closing_after_opening", - violation_error_message="Closing date must be after opening date", - ), - ), - migrations.AddConstraint( - model_name="ride", - constraint=models.CheckConstraint( - condition=models.Q( - ("min_height_in__isnull", True), - ("max_height_in__isnull", True), - ("min_height_in__lte", models.F("max_height_in")), - _connector="OR", - ), - name="ride_height_requirements_logical", - violation_error_message="Minimum height cannot exceed maximum height", - ), - ), - migrations.AddConstraint( - model_name="ride", - constraint=models.CheckConstraint( - condition=models.Q( - ("min_height_in__isnull", True), - models.Q(("min_height_in__gte", 30), ("min_height_in__lte", 90)), - _connector="OR", - ), - name="ride_min_height_reasonable", - violation_error_message="Minimum height must be between 30 and 90 inches", - ), - ), - migrations.AddConstraint( - model_name="ride", - constraint=models.CheckConstraint( - condition=models.Q( - ("max_height_in__isnull", True), - models.Q(("max_height_in__gte", 30), ("max_height_in__lte", 90)), - _connector="OR", - ), - name="ride_max_height_reasonable", - violation_error_message="Maximum height must be between 30 and 90 inches", - ), - ), - migrations.AddConstraint( - model_name="ride", - constraint=models.CheckConstraint( - condition=models.Q( - ("average_rating__isnull", True), - models.Q(("average_rating__gte", 1), ("average_rating__lte", 10)), - _connector="OR", - ), - name="ride_rating_range", - violation_error_message="Average rating must be between 1 and 10", - ), - ), - migrations.AddConstraint( - model_name="ride", - constraint=models.CheckConstraint( - condition=models.Q( - ("capacity_per_hour__isnull", True), - ("capacity_per_hour__gt", 0), - _connector="OR", - ), - name="ride_capacity_positive", - violation_error_message="Hourly capacity must be positive", - ), - ), - migrations.AddConstraint( - model_name="ride", - constraint=models.CheckConstraint( - condition=models.Q( - ("ride_duration_seconds__isnull", True), - ("ride_duration_seconds__gt", 0), - _connector="OR", - ), - name="ride_duration_positive", - violation_error_message="Ride duration must be positive", - ), - ), - migrations.AddConstraint( - model_name="ridereview", - constraint=models.CheckConstraint( - condition=models.Q(("rating__gte", 1), ("rating__lte", 10)), - name="ride_review_rating_range", - violation_error_message="Rating must be between 1 and 10", - ), - ), - migrations.AddConstraint( - model_name="ridereview", - constraint=models.CheckConstraint( - condition=models.Q( - ( - "visit_date__lte", - django.db.models.functions.datetime.Now(), - ) - ), - name="ride_review_visit_date_not_future", - violation_error_message="Visit date cannot be in the future", - ), - ), - migrations.AddConstraint( - model_name="ridereview", - constraint=models.CheckConstraint( - condition=models.Q( - models.Q( - ("moderated_at__isnull", True), - ("moderated_by__isnull", True), - ), - models.Q( - ("moderated_at__isnull", False), - ("moderated_by__isnull", False), - ), - _connector="OR", - ), - name="ride_review_moderation_consistency", - violation_error_message="Moderated reviews must have both moderator and moderation timestamp", - ), - ), - ] diff --git a/rides/migrations/__init__.py b/rides/migrations/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/rides/models/__init__.py b/rides/models/__init__.py deleted file mode 100644 index e4a4e646..00000000 --- a/rides/models/__init__.py +++ /dev/null @@ -1,32 +0,0 @@ -""" -Rides app models with clean import interface. - -This module provides a clean import interface for all rides-related models, -enabling imports like: from rides.models import Ride, Manufacturer - -The Company model is aliased as Manufacturer to clarify its role as ride manufacturers, -while maintaining backward compatibility through the Company alias. -""" - -from .rides import Ride, RideModel, RollerCoasterStats, CATEGORY_CHOICES -from .location import RideLocation -from .reviews import RideReview -from .company import Company - -# Alias Company as Manufacturer for clarity -Manufacturer = Company - -__all__ = [ - # Primary models - "Ride", - "RideModel", - "RollerCoasterStats", - "RideLocation", - "RideReview", - # Shared constants - "CATEGORY_CHOICES", - # Company models with clear naming - "Manufacturer", - # Backward compatibility - "Company", # Alias to Manufacturer -] diff --git a/rides/models/company.py b/rides/models/company.py deleted file mode 100644 index 1374e1f7..00000000 --- a/rides/models/company.py +++ /dev/null @@ -1,76 +0,0 @@ -import pghistory -from django.contrib.postgres.fields import ArrayField -from django.db import models -from django.urls import reverse -from django.utils.text import slugify - -from core.history import HistoricalSlug -from core.models import TrackedModel - - -@pghistory.track() -class Company(TrackedModel): - class CompanyRole(models.TextChoices): - MANUFACTURER = "MANUFACTURER", "Ride Manufacturer" - DESIGNER = "DESIGNER", "Ride Designer" - OPERATOR = "OPERATOR", "Park Operator" - PROPERTY_OWNER = "PROPERTY_OWNER", "Property Owner" - - name = models.CharField(max_length=255) - slug = models.SlugField(max_length=255, unique=True) - roles = ArrayField( - models.CharField(max_length=20, choices=CompanyRole.choices), - default=list, - blank=True, - ) - description = models.TextField(blank=True) - website = models.URLField(blank=True) - - # General company info - founded_date = models.DateField(null=True, blank=True) - - # Manufacturer-specific fields - rides_count = models.IntegerField(default=0) - coasters_count = models.IntegerField(default=0) - - def __str__(self): - return self.name - - def save(self, *args, **kwargs): - if not self.slug: - self.slug = slugify(self.name) - super().save(*args, **kwargs) - - def get_absolute_url(self): - # This will need to be updated to handle different roles - return reverse("companies:detail", kwargs={"slug": self.slug}) - return "#" - - @classmethod - def get_by_slug(cls, slug): - """Get company by current or historical slug""" - try: - return cls.objects.get(slug=slug), False - except cls.DoesNotExist: - # Check pghistory first - history_model = cls.get_history_model() - history_entry = ( - history_model.objects.filter(slug=slug) - .order_by("-pgh_created_at") - .first() - ) - if history_entry: - return cls.objects.get(id=history_entry.pgh_obj_id), True - - # Check manual slug history as fallback - try: - historical = HistoricalSlug.objects.get( - content_type__model="company", slug=slug - ) - return cls.objects.get(pk=historical.object_id), True - except (HistoricalSlug.DoesNotExist, cls.DoesNotExist): - raise cls.DoesNotExist("No company found with this slug") - - class Meta: - ordering = ["name"] - verbose_name_plural = "Companies" diff --git a/rides/models/location.py b/rides/models/location.py deleted file mode 100644 index 8590a23a..00000000 --- a/rides/models/location.py +++ /dev/null @@ -1,124 +0,0 @@ -from django.contrib.gis.db import models as gis_models -from django.db import models -from django.contrib.gis.geos import Point - - -class RideLocation(models.Model): - """ - Lightweight location tracking for individual rides within parks. - Optional coordinates with focus on practical navigation information. - """ - - # Relationships - ride = models.OneToOneField( - "rides.Ride", on_delete=models.CASCADE, related_name="ride_location" - ) - - # Optional Spatial Data - keep it simple with single point - point = gis_models.PointField( - srid=4326, - null=True, - blank=True, - help_text="Geographic coordinates for ride location (longitude, latitude)", - ) - - # Park Area Information - park_area = models.CharField( - max_length=100, - blank=True, - db_index=True, - help_text=( - "Themed area or land within the park (e.g., 'Frontierland', 'Tomorrowland')" - ), - ) - - # General notes field to match database schema - notes = models.TextField(blank=True, help_text="General location notes") - - # Navigation and Entrance Information - entrance_notes = models.TextField( - blank=True, - help_text="Directions to ride entrance, queue location, or navigation tips", - ) - - # Accessibility Information - accessibility_notes = models.TextField( - blank=True, - help_text="Information about accessible entrances, wheelchair access, etc.", - ) - - # Metadata - created_at = models.DateTimeField(auto_now_add=True) - updated_at = models.DateTimeField(auto_now=True) - - @property - def latitude(self): - """Return latitude from point field for backward compatibility.""" - if self.point: - return self.point.y - return None - - @property - def longitude(self): - """Return longitude from point field for backward compatibility.""" - if self.point: - return self.point.x - return None - - @property - def coordinates(self): - """Return (latitude, longitude) tuple.""" - if self.point: - return (self.latitude, self.longitude) - return (None, None) - - @property - def has_coordinates(self): - """Check if coordinates are set.""" - return self.point is not None - - def set_coordinates(self, latitude, longitude): - """ - Set the location's point from latitude and longitude coordinates. - Validates coordinate ranges. - """ - if latitude is None or longitude is None: - self.point = None - return - - if not -90 <= latitude <= 90: - raise ValueError("Latitude must be between -90 and 90.") - if not -180 <= longitude <= 180: - raise ValueError("Longitude must be between -180 and 180.") - - self.point = Point(longitude, latitude, srid=4326) - - def distance_to_park_location(self): - """ - Calculate distance to parent park's location if both have coordinates. - Returns distance in kilometers. - """ - if not self.point: - return None - - park_location = getattr(self.ride.park, "location", None) - if not park_location or not park_location.point: - return None - - # Use geodetic distance calculation which returns meters, convert to km - distance_m = self.point.distance(park_location.point) - return distance_m / 1000.0 - - def __str__(self): - area_str = f" in {self.park_area}" if self.park_area else "" - return f"Location for {self.ride.name}{area_str}" - - class Meta: - verbose_name = "Ride Location" - verbose_name_plural = "Ride Locations" - ordering = ["ride__name"] - indexes = [ - models.Index(fields=["park_area"]), - # Spatial index will be created automatically for PostGIS - # PointField - ] diff --git a/rides/models/reviews.py b/rides/models/reviews.py deleted file mode 100644 index b5410839..00000000 --- a/rides/models/reviews.py +++ /dev/null @@ -1,75 +0,0 @@ -from django.db import models -from django.db.models import functions -from django.core.validators import MinValueValidator, MaxValueValidator -from core.history import TrackedModel -import pghistory - - -@pghistory.track() -class RideReview(TrackedModel): - """ - A review of a ride. - """ - - ride = models.ForeignKey( - "rides.Ride", on_delete=models.CASCADE, related_name="reviews" - ) - user = models.ForeignKey( - "accounts.User", on_delete=models.CASCADE, related_name="ride_reviews" - ) - rating = models.PositiveSmallIntegerField( - validators=[MinValueValidator(1), MaxValueValidator(10)] - ) - title = models.CharField(max_length=200) - content = models.TextField() - visit_date = models.DateField() - - # Metadata - created_at = models.DateTimeField(auto_now_add=True) - updated_at = models.DateTimeField(auto_now=True) - - # Moderation - is_published = models.BooleanField(default=True) - moderation_notes = models.TextField(blank=True) - moderated_by = models.ForeignKey( - "accounts.User", - on_delete=models.SET_NULL, - null=True, - blank=True, - related_name="moderated_ride_reviews", - ) - moderated_at = models.DateTimeField(null=True, blank=True) - - class Meta: - ordering = ["-created_at"] - unique_together = ["ride", "user"] - constraints = [ - # Business rule: Rating must be between 1 and 10 (database level - # enforcement) - models.CheckConstraint( - name="ride_review_rating_range", - check=models.Q(rating__gte=1) & models.Q(rating__lte=10), - violation_error_message="Rating must be between 1 and 10", - ), - # Business rule: Visit date cannot be in the future - models.CheckConstraint( - name="ride_review_visit_date_not_future", - check=models.Q(visit_date__lte=functions.Now()), - violation_error_message="Visit date cannot be in the future", - ), - # Business rule: If moderated, must have moderator and timestamp - models.CheckConstraint( - name="ride_review_moderation_consistency", - check=models.Q(moderated_by__isnull=True, moderated_at__isnull=True) - | models.Q( - moderated_by__isnull=False, moderated_at__isnull=False - ), - violation_error_message=( - "Moderated reviews must have both moderator and moderation " - "timestamp" - ), - ), - ] - - def __str__(self): - return f"Review of {self.ride.name} by {self.user.username}" diff --git a/rides/models/rides.py b/rides/models/rides.py deleted file mode 100644 index 1ca8d2e1..00000000 --- a/rides/models/rides.py +++ /dev/null @@ -1,277 +0,0 @@ -from django.db import models -from django.utils.text import slugify -from django.contrib.contenttypes.fields import GenericRelation -from core.models import TrackedModel -from .company import Company - -# Shared choices that will be used by multiple models -CATEGORY_CHOICES = [ - ("", "Select ride type"), - ("RC", "Roller Coaster"), - ("DR", "Dark Ride"), - ("FR", "Flat Ride"), - ("WR", "Water Ride"), - ("TR", "Transport"), - ("OT", "Other"), -] - - -class RideModel(TrackedModel): - """ - Represents a specific model/type of ride that can be manufactured by different - companies. - For example: B&M Dive Coaster, Vekoma Boomerang, etc. - """ - - name = models.CharField(max_length=255) - manufacturer = models.ForeignKey( - Company, - on_delete=models.SET_NULL, - related_name="ride_models", - null=True, - blank=True, - limit_choices_to={"roles__contains": ["MANUFACTURER"]}, - ) - description = models.TextField(blank=True) - category = models.CharField( - max_length=2, choices=CATEGORY_CHOICES, default="", blank=True - ) - - class Meta: - ordering = ["manufacturer", "name"] - unique_together = ["manufacturer", "name"] - - def __str__(self) -> str: - return ( - self.name - if not self.manufacturer - else f"{self.manufacturer.name} {self.name}" - ) - - -class Ride(TrackedModel): - """Model for individual ride installations at parks""" - - STATUS_CHOICES = [ - ("", "Select status"), - ("OPERATING", "Operating"), - ("CLOSED_TEMP", "Temporarily Closed"), - ("SBNO", "Standing But Not Operating"), - ("CLOSING", "Closing"), - ("CLOSED_PERM", "Permanently Closed"), - ("UNDER_CONSTRUCTION", "Under Construction"), - ("DEMOLISHED", "Demolished"), - ("RELOCATED", "Relocated"), - ] - - POST_CLOSING_STATUS_CHOICES = [ - ("SBNO", "Standing But Not Operating"), - ("CLOSED_PERM", "Permanently Closed"), - ] - - name = models.CharField(max_length=255) - slug = models.SlugField(max_length=255) - description = models.TextField(blank=True) - park = models.ForeignKey( - "parks.Park", on_delete=models.CASCADE, related_name="rides" - ) - park_area = models.ForeignKey( - "parks.ParkArea", - on_delete=models.SET_NULL, - related_name="rides", - null=True, - blank=True, - ) - category = models.CharField( - max_length=2, choices=CATEGORY_CHOICES, default="", blank=True - ) - manufacturer = models.ForeignKey( - Company, - on_delete=models.SET_NULL, - null=True, - blank=True, - related_name="manufactured_rides", - limit_choices_to={"roles__contains": ["MANUFACTURER"]}, - ) - designer = models.ForeignKey( - Company, - on_delete=models.SET_NULL, - related_name="designed_rides", - null=True, - blank=True, - limit_choices_to={"roles__contains": ["DESIGNER"]}, - ) - ride_model = models.ForeignKey( - "RideModel", - on_delete=models.SET_NULL, - related_name="rides", - null=True, - blank=True, - help_text="The specific model/type of this ride", - ) - status = models.CharField( - max_length=20, choices=STATUS_CHOICES, default="OPERATING" - ) - post_closing_status = models.CharField( - max_length=20, - choices=POST_CLOSING_STATUS_CHOICES, - null=True, - blank=True, - help_text="Status to change to after closing date", - ) - opening_date = models.DateField(null=True, blank=True) - closing_date = models.DateField(null=True, blank=True) - status_since = models.DateField(null=True, blank=True) - min_height_in = models.PositiveIntegerField(null=True, blank=True) - max_height_in = models.PositiveIntegerField(null=True, blank=True) - capacity_per_hour = models.PositiveIntegerField(null=True, blank=True) - ride_duration_seconds = models.PositiveIntegerField(null=True, blank=True) - average_rating = models.DecimalField( - max_digits=3, decimal_places=2, null=True, blank=True - ) - photos = GenericRelation("media.Photo") - - class Meta: - ordering = ["name"] - unique_together = ["park", "slug"] - constraints = [ - # Business rule: Closing date must be after opening date - models.CheckConstraint( - name="ride_closing_after_opening", - check=models.Q(closing_date__isnull=True) - | models.Q(opening_date__isnull=True) - | models.Q(closing_date__gte=models.F("opening_date")), - violation_error_message="Closing date must be after opening date", - ), - # Business rule: Height requirements must be logical - models.CheckConstraint( - name="ride_height_requirements_logical", - check=models.Q(min_height_in__isnull=True) - | models.Q(max_height_in__isnull=True) - | models.Q(min_height_in__lte=models.F("max_height_in")), - violation_error_message="Minimum height cannot exceed maximum height", - ), - # Business rule: Height requirements must be reasonable (between 30 - # and 90 inches) - models.CheckConstraint( - name="ride_min_height_reasonable", - check=models.Q(min_height_in__isnull=True) - | (models.Q(min_height_in__gte=30) & models.Q(min_height_in__lte=90)), - violation_error_message=( - "Minimum height must be between 30 and 90 inches" - ), - ), - models.CheckConstraint( - name="ride_max_height_reasonable", - check=models.Q(max_height_in__isnull=True) - | (models.Q(max_height_in__gte=30) & models.Q(max_height_in__lte=90)), - violation_error_message=( - "Maximum height must be between 30 and 90 inches" - ), - ), - # Business rule: Rating must be between 1 and 10 - models.CheckConstraint( - name="ride_rating_range", - check=models.Q(average_rating__isnull=True) - | (models.Q(average_rating__gte=1) & models.Q(average_rating__lte=10)), - violation_error_message="Average rating must be between 1 and 10", - ), - # Business rule: Capacity and duration must be positive - models.CheckConstraint( - name="ride_capacity_positive", - check=models.Q(capacity_per_hour__isnull=True) - | models.Q(capacity_per_hour__gt=0), - violation_error_message="Hourly capacity must be positive", - ), - models.CheckConstraint( - name="ride_duration_positive", - check=models.Q(ride_duration_seconds__isnull=True) - | models.Q(ride_duration_seconds__gt=0), - violation_error_message="Ride duration must be positive", - ), - ] - - def __str__(self) -> str: - return f"{self.name} at {self.park.name}" - - def save(self, *args, **kwargs) -> None: - if not self.slug: - self.slug = slugify(self.name) - super().save(*args, **kwargs) - - -class RollerCoasterStats(models.Model): - """Model for tracking roller coaster specific statistics""" - - TRACK_MATERIAL_CHOICES = [ - ("STEEL", "Steel"), - ("WOOD", "Wood"), - ("HYBRID", "Hybrid"), - ] - - COASTER_TYPE_CHOICES = [ - ("SITDOWN", "Sit Down"), - ("INVERTED", "Inverted"), - ("FLYING", "Flying"), - ("STANDUP", "Stand Up"), - ("WING", "Wing"), - ("DIVE", "Dive"), - ("FAMILY", "Family"), - ("WILD_MOUSE", "Wild Mouse"), - ("SPINNING", "Spinning"), - ("FOURTH_DIMENSION", "4th Dimension"), - ("OTHER", "Other"), - ] - - LAUNCH_CHOICES = [ - ("CHAIN", "Chain Lift"), - ("LSM", "LSM Launch"), - ("HYDRAULIC", "Hydraulic Launch"), - ("GRAVITY", "Gravity"), - ("OTHER", "Other"), - ] - - ride = models.OneToOneField( - Ride, on_delete=models.CASCADE, related_name="coaster_stats" - ) - height_ft = models.DecimalField( - max_digits=6, decimal_places=2, null=True, blank=True - ) - length_ft = models.DecimalField( - max_digits=7, decimal_places=2, null=True, blank=True - ) - speed_mph = models.DecimalField( - max_digits=5, decimal_places=2, null=True, blank=True - ) - inversions = models.PositiveIntegerField(default=0) - ride_time_seconds = models.PositiveIntegerField(null=True, blank=True) - track_type = models.CharField(max_length=255, blank=True) - track_material = models.CharField( - max_length=20, - choices=TRACK_MATERIAL_CHOICES, - default="STEEL", - blank=True, - ) - roller_coaster_type = models.CharField( - max_length=20, - choices=COASTER_TYPE_CHOICES, - default="SITDOWN", - blank=True, - ) - max_drop_height_ft = models.DecimalField( - max_digits=6, decimal_places=2, null=True, blank=True - ) - launch_type = models.CharField( - max_length=20, choices=LAUNCH_CHOICES, default="CHAIN" - ) - train_style = models.CharField(max_length=255, blank=True) - trains_count = models.PositiveIntegerField(null=True, blank=True) - cars_per_train = models.PositiveIntegerField(null=True, blank=True) - seats_per_car = models.PositiveIntegerField(null=True, blank=True) - - class Meta: - verbose_name = "Roller Coaster Statistics" - verbose_name_plural = "Roller Coaster Statistics" - - def __str__(self) -> str: - return f"Stats for {self.ride.name}" diff --git a/rides/park_urls.py b/rides/park_urls.py deleted file mode 100644 index 4618c5ec..00000000 --- a/rides/park_urls.py +++ /dev/null @@ -1,22 +0,0 @@ -from django.urls import path -from . import views - -app_name = "rides" - -urlpatterns = [ - # Park-specific list views - path("", views.RideListView.as_view(), name="ride_list"), - path("create/", views.RideCreateView.as_view(), name="ride_create"), - # Park-specific detail views - path("/", views.RideDetailView.as_view(), name="ride_detail"), - path( - "/update/", - views.RideUpdateView.as_view(), - name="ride_update", - ), - path("search/companies/", views.search_companies, name="search_companies"), - # Search endpoints - path("search/models/", views.search_ride_models, name="search_ride_models"), - # HTMX endpoints - path("coaster-fields/", views.show_coaster_fields, name="coaster_fields"), -] diff --git a/rides/selectors.py b/rides/selectors.py deleted file mode 100644 index 21015868..00000000 --- a/rides/selectors.py +++ /dev/null @@ -1,303 +0,0 @@ -""" -Selectors for ride-related data retrieval. -Following Django styleguide pattern for separating data access from business logic. -""" - -from typing import Optional, Dict, Any -from django.db.models import QuerySet, Q, Count, Avg, Prefetch -from django.contrib.gis.geos import Point -from django.contrib.gis.measure import Distance - -from .models import Ride, RideModel, RideReview - - -def ride_list_for_display( - *, filters: Optional[Dict[str, Any]] = None -) -> QuerySet[Ride]: - """ - Get rides optimized for list display with related data. - - Args: - filters: Optional dictionary of filter parameters - - Returns: - QuerySet of rides with optimized queries - """ - queryset = ( - Ride.objects.select_related( - "park", - "park__operator", - "manufacturer", - "designer", - "ride_model", - "park_area", - ) - .prefetch_related("park__location", "location") - .annotate(average_rating_calculated=Avg("reviews__rating")) - ) - - if filters: - if "status" in filters: - queryset = queryset.filter(status=filters["status"]) - if "category" in filters: - queryset = queryset.filter(category=filters["category"]) - if "manufacturer" in filters: - queryset = queryset.filter(manufacturer=filters["manufacturer"]) - if "park" in filters: - queryset = queryset.filter(park=filters["park"]) - if "search" in filters: - search_term = filters["search"] - queryset = queryset.filter( - Q(name__icontains=search_term) - | Q(description__icontains=search_term) - | Q(park__name__icontains=search_term) - ) - - return queryset.order_by("park__name", "name") - - -def ride_detail_optimized(*, slug: str, park_slug: str) -> Ride: - """ - Get a single ride with all related data optimized for detail view. - - Args: - slug: Ride slug identifier - park_slug: Park slug for the ride - - Returns: - Ride instance with optimized prefetches - - Raises: - Ride.DoesNotExist: If ride doesn't exist - """ - return ( - Ride.objects.select_related( - "park", - "park__operator", - "manufacturer", - "designer", - "ride_model", - "park_area", - ) - .prefetch_related( - "park__location", - "location", - Prefetch( - "reviews", - queryset=RideReview.objects.select_related("user").filter( - is_published=True - ), - ), - "photos", - ) - .get(slug=slug, park__slug=park_slug) - ) - - -def rides_by_category(*, category: str) -> QuerySet[Ride]: - """ - Get all rides in a specific category. - - Args: - category: Ride category code - - Returns: - QuerySet of rides in the category - """ - return ( - Ride.objects.filter(category=category) - .select_related("park", "manufacturer", "designer") - .prefetch_related("park__location") - .annotate(average_rating_calculated=Avg("reviews__rating")) - .order_by("park__name", "name") - ) - - -def rides_by_manufacturer(*, manufacturer_id: int) -> QuerySet[Ride]: - """ - Get all rides manufactured by a specific company. - - Args: - manufacturer_id: Company ID of the manufacturer - - Returns: - QuerySet of rides by the manufacturer - """ - return ( - Ride.objects.filter(manufacturer_id=manufacturer_id) - .select_related("park", "manufacturer", "ride_model") - .prefetch_related("park__location") - .annotate(average_rating_calculated=Avg("reviews__rating")) - .order_by("park__name", "name") - ) - - -def rides_by_designer(*, designer_id: int) -> QuerySet[Ride]: - """ - Get all rides designed by a specific company. - - Args: - designer_id: Company ID of the designer - - Returns: - QuerySet of rides by the designer - """ - return ( - Ride.objects.filter(designer_id=designer_id) - .select_related("park", "designer", "ride_model") - .prefetch_related("park__location") - .annotate(average_rating_calculated=Avg("reviews__rating")) - .order_by("park__name", "name") - ) - - -def rides_in_park(*, park_slug: str) -> QuerySet[Ride]: - """ - Get all rides in a specific park. - - Args: - park_slug: Slug of the park - - Returns: - QuerySet of rides in the park - """ - return ( - Ride.objects.filter(park__slug=park_slug) - .select_related("manufacturer", "designer", "ride_model", "park_area") - .prefetch_related("location") - .annotate(average_rating_calculated=Avg("reviews__rating")) - .order_by("park_area__name", "name") - ) - - -def rides_near_location( - *, point: Point, distance_km: float = 50, limit: int = 10 -) -> QuerySet[Ride]: - """ - Get rides near a specific geographic location. - - Args: - point: Geographic point (longitude, latitude) - distance_km: Maximum distance in kilometers - limit: Maximum number of results - - Returns: - QuerySet of nearby rides ordered by distance - """ - return ( - Ride.objects.filter( - park__location__coordinates__distance_lte=( - point, - Distance(km=distance_km), - ) - ) - .select_related("park", "manufacturer") - .prefetch_related("park__location") - .distance(point) - .order_by("distance")[:limit] - ) - - -def ride_models_with_installations() -> QuerySet[RideModel]: - """ - Get ride models that have installations with counts. - - Returns: - QuerySet of ride models with installation counts - """ - return ( - RideModel.objects.annotate(installation_count=Count("rides")) - .filter(installation_count__gt=0) - .select_related("manufacturer") - .order_by("-installation_count", "name") - ) - - -def ride_search_autocomplete(*, query: str, limit: int = 10) -> QuerySet[Ride]: - """ - Get rides matching a search query for autocomplete functionality. - - Args: - query: Search string - limit: Maximum number of results - - Returns: - QuerySet of matching rides for autocomplete - """ - return ( - Ride.objects.filter( - Q(name__icontains=query) - | Q(park__name__icontains=query) - | Q(manufacturer__name__icontains=query) - ) - .select_related("park", "manufacturer") - .prefetch_related("park__location") - .order_by("park__name", "name")[:limit] - ) - - -def rides_with_recent_reviews(*, days: int = 30) -> QuerySet[Ride]: - """ - Get rides that have received reviews in the last N days. - - Args: - days: Number of days to look back for reviews - - Returns: - QuerySet of rides with recent reviews - """ - from django.utils import timezone - from datetime import timedelta - - cutoff_date = timezone.now() - timedelta(days=days) - - return ( - Ride.objects.filter( - reviews__created_at__gte=cutoff_date, reviews__is_published=True - ) - .select_related("park", "manufacturer") - .prefetch_related("park__location") - .annotate( - recent_review_count=Count( - "reviews", filter=Q(reviews__created_at__gte=cutoff_date) - ) - ) - .order_by("-recent_review_count") - .distinct() - ) - - -def ride_statistics_by_category() -> Dict[str, Any]: - """ - Get ride statistics grouped by category. - - Returns: - Dictionary containing ride statistics by category - """ - from .models import CATEGORY_CHOICES - - stats = {} - for category_code, category_name in CATEGORY_CHOICES: - if category_code: # Skip empty choice - count = Ride.objects.filter(category=category_code).count() - stats[category_code] = {"name": category_name, "count": count} - - return stats - - -def rides_by_opening_year(*, year: int) -> QuerySet[Ride]: - """ - Get rides that opened in a specific year. - - Args: - year: The opening year - - Returns: - QuerySet of rides that opened in the specified year - """ - return ( - Ride.objects.filter(opening_date__year=year) - .select_related("park", "manufacturer") - .prefetch_related("park__location") - .order_by("opening_date", "park__name", "name") - ) diff --git a/rides/signals.py b/rides/signals.py deleted file mode 100644 index 38f92148..00000000 --- a/rides/signals.py +++ /dev/null @@ -1,17 +0,0 @@ -from django.db.models.signals import pre_save -from django.dispatch import receiver -from django.utils import timezone -from .models import Ride - - -@receiver(pre_save, sender=Ride) -def handle_ride_status(sender, instance, **kwargs): - """Handle ride status changes based on closing date""" - if instance.closing_date: - today = timezone.now().date() - - # If we've reached the closing date and status is "Closing" - if today >= instance.closing_date and instance.status == "CLOSING": - # Change to the selected post-closing status - instance.status = instance.post_closing_status or "SBNO" - instance.status_since = instance.closing_date diff --git a/rides/templates/rides/partials/company_search_results.html b/rides/templates/rides/partials/company_search_results.html deleted file mode 100644 index 4bcd62d7..00000000 --- a/rides/templates/rides/partials/company_search_results.html +++ /dev/null @@ -1,3 +0,0 @@ -{% for company in companies %} -
{{ company.name }}
-{% endfor %} \ No newline at end of file diff --git a/rides/templates/rides/partials/search_suggestions.html b/rides/templates/rides/partials/search_suggestions.html deleted file mode 100644 index 2259573e..00000000 --- a/rides/templates/rides/partials/search_suggestions.html +++ /dev/null @@ -1,26 +0,0 @@ -{% if suggestions %} -
- {% for suggestion in suggestions %} -
- {% if suggestion.type == 'ride' %} - 🎢 - {{ suggestion.text }} - ({{ suggestion.count }} rides) - {% elif suggestion.type == 'park' %} - 🎪 - {{ suggestion.text }} - {% if suggestion.location %} - {{ suggestion.location }} - {% endif %} - {% elif suggestion.type == 'category' %} - 📂 - {{ suggestion.text }} - ({{ suggestion.count }} rides) - {% endif %} -
- {% endfor %} -
-{% endif %} \ No newline at end of file diff --git a/rides/templates/rides/ride_list.html b/rides/templates/rides/ride_list.html deleted file mode 100644 index cd1a2d15..00000000 --- a/rides/templates/rides/ride_list.html +++ /dev/null @@ -1,214 +0,0 @@ -{% extends "base/base.html" %} -{% load static %} -{% load ride_tags %} - -{% block title %} - {% if park %} - Rides at {{ park.name }} - ThrillWiki - {% else %} - All Rides - ThrillWiki - {% endif %} -{% endblock %} - -{% block content %} -
-
-

- {% if park %} - Rides at {{ park.name }} - {% else %} - All Rides - {% endif %} -

- - {# Search Section #} -
-
- - -
- -
-
- - {# Search Suggestions #} -
-
-
- - {# Quick Filter Buttons #} -
- - - {% for code, name in category_choices %} - - {% endfor %} -
- - {# Active Filter Tags #} -
- {% if request.GET.q %} - - Search: {{ request.GET.q }} - - - {% endif %} - {% if request.GET.category %} - - Category: {{ request.GET.category|get_category_display }} - - - {% endif %} - {% if request.GET.operating %} - - Operating Only - - - {% endif %} -
-
- - {# Results Section #} -
- {% include "rides/partials/ride_list_results.html" %} -
-
-{% endblock %} - -{% block extra_js %} - - - -{% endblock %} \ No newline at end of file diff --git a/rides/templatetags/__init__.py b/rides/templatetags/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/rides/templatetags/ride_tags.py b/rides/templatetags/ride_tags.py deleted file mode 100644 index 96f58316..00000000 --- a/rides/templatetags/ride_tags.py +++ /dev/null @@ -1,31 +0,0 @@ -from django import template -from django.templatetags.static import static -from ..models import CATEGORY_CHOICES - -register = template.Library() - - -@register.simple_tag -def get_ride_placeholder_image(category): - """Return placeholder image based on ride category""" - category_images = { - "RC": "images/placeholders/roller-coaster.jpg", - "DR": "images/placeholders/dark-ride.jpg", - "FR": "images/placeholders/flat-ride.jpg", - "WR": "images/placeholders/water-ride.jpg", - "TR": "images/placeholders/transport.jpg", - "OT": "images/placeholders/other-ride.jpg", - } - return static(category_images.get(category, "images/placeholders/default-ride.jpg")) - - -@register.simple_tag -def get_park_placeholder_image(): - """Return placeholder image for parks""" - return static("images/placeholders/default-park.jpg") - - -@register.filter -def get_category_display(code): - """Convert category code to display name""" - return dict(CATEGORY_CHOICES).get(code, code) diff --git a/rides/tests.py b/rides/tests.py deleted file mode 100644 index a39b155a..00000000 --- a/rides/tests.py +++ /dev/null @@ -1 +0,0 @@ -# Create your tests here. diff --git a/rides/urls.py b/rides/urls.py deleted file mode 100644 index 59d37801..00000000 --- a/rides/urls.py +++ /dev/null @@ -1,64 +0,0 @@ -from django.urls import path -from . import views - -app_name = "rides" - -urlpatterns = [ - # Global list views - path("", views.RideListView.as_view(), name="global_ride_list"), - # Global category views - path( - "roller_coasters/", - views.SingleCategoryListView.as_view(), - {"category": "RC"}, - name="global_roller_coasters", - ), - path( - "dark_rides/", - views.SingleCategoryListView.as_view(), - {"category": "DR"}, - name="global_dark_rides", - ), - path( - "flat_rides/", - views.SingleCategoryListView.as_view(), - {"category": "FR"}, - name="global_flat_rides", - ), - path( - "water_rides/", - views.SingleCategoryListView.as_view(), - {"category": "WR"}, - name="global_water_rides", - ), - path( - "transports/", - views.SingleCategoryListView.as_view(), - {"category": "TR"}, - name="global_transports", - ), - path( - "others/", - views.SingleCategoryListView.as_view(), - {"category": "OT"}, - name="global_others", - ), - # Search endpoints (must come before slug patterns) - path("search/models/", views.search_ride_models, name="search_ride_models"), - path("search/companies/", views.search_companies, name="search_companies"), - # HTMX endpoints (must come before slug patterns) - path("coaster-fields/", views.show_coaster_fields, name="coaster_fields"), - path( - "search-suggestions/", - views.get_search_suggestions, - name="search_suggestions", - ), - # Park-specific URLs - path("create/", views.RideCreateView.as_view(), name="ride_create"), - path("/", views.RideDetailView.as_view(), name="ride_detail"), - path( - "/update/", - views.RideUpdateView.as_view(), - name="ride_update", - ), -] diff --git a/rides/views.py b/rides/views.py deleted file mode 100644 index 603c19a3..00000000 --- a/rides/views.py +++ /dev/null @@ -1,453 +0,0 @@ -from django.views.generic import DetailView, ListView, CreateView, UpdateView -from django.shortcuts import get_object_or_404, render -from django.urls import reverse -from django.db.models import Q -from django.contrib.auth.mixins import LoginRequiredMixin -from django.contrib.contenttypes.models import ContentType -from django.http import HttpRequest, HttpResponse, Http404 -from django.db.models import Count -from .models import Ride, RideModel, CATEGORY_CHOICES, Company -from .forms import RideForm, RideSearchForm -from parks.models import Park -from moderation.mixins import EditSubmissionMixin, HistoryMixin -from moderation.models import EditSubmission - - -class ParkContextRequired: - """Mixin to require park context for views""" - - def dispatch(self, request, *args, **kwargs): - if "park_slug" not in self.kwargs: - raise Http404("Park context is required") - return super().dispatch(request, *args, **kwargs) - - -def show_coaster_fields(request: HttpRequest) -> HttpResponse: - """Show roller coaster specific fields based on category selection""" - category = request.GET.get("category") - if category != "RC": # Only show for roller coasters - return HttpResponse("") - return render(request, "rides/partials/coaster_fields.html") - - -class RideDetailView(HistoryMixin, DetailView): - """View for displaying ride details""" - - model = Ride - template_name = "rides/ride_detail.html" - slug_url_kwarg = "ride_slug" - - def get_queryset(self): - """Get ride for the specific park if park_slug is provided""" - queryset = ( - Ride.objects.all() - .select_related("park", "ride_model", "ride_model__manufacturer") - .prefetch_related("photos") - ) - - if "park_slug" in self.kwargs: - queryset = queryset.filter(park__slug=self.kwargs["park_slug"]) - - return queryset - - def get_context_data(self, **kwargs): - """Add context data""" - context = super().get_context_data(**kwargs) - if "park_slug" in self.kwargs: - context["park_slug"] = self.kwargs["park_slug"] - context["park"] = self.object.park - - return context - - -class RideCreateView(LoginRequiredMixin, ParkContextRequired, CreateView): - """View for creating a new ride""" - - model = Ride - form_class = RideForm - template_name = "rides/ride_form.html" - - def get_success_url(self): - """Get URL to redirect to after successful creation""" - return reverse( - "parks:rides:ride_detail", - kwargs={ - "park_slug": self.park.slug, - "ride_slug": self.object.slug, - }, - ) - - def get_form_kwargs(self): - """Pass park to the form""" - kwargs = super().get_form_kwargs() - self.park = get_object_or_404(Park, slug=self.kwargs["park_slug"]) - kwargs["park"] = self.park - return kwargs - - def get_context_data(self, **kwargs): - """Add park and park_slug to context""" - context = super().get_context_data(**kwargs) - context["park"] = self.park - context["park_slug"] = self.park.slug - context["is_edit"] = False - return context - - def form_valid(self, form): - """Handle form submission including new items""" - # Check for new manufacturer - manufacturer_name = form.cleaned_data.get("manufacturer_search") - if manufacturer_name and not form.cleaned_data.get("manufacturer"): - EditSubmission.objects.create( - user=self.request.user, - content_type=ContentType.objects.get_for_model(Company), - submission_type="CREATE", - changes={"name": manufacturer_name, "roles": ["MANUFACTURER"]}, - ) - - # Check for new designer - designer_name = form.cleaned_data.get("designer_search") - if designer_name and not form.cleaned_data.get("designer"): - EditSubmission.objects.create( - user=self.request.user, - content_type=ContentType.objects.get_for_model(Company), - submission_type="CREATE", - changes={"name": designer_name, "roles": ["DESIGNER"]}, - ) - - # Check for new ride model - ride_model_name = form.cleaned_data.get("ride_model_search") - manufacturer = form.cleaned_data.get("manufacturer") - if ride_model_name and not form.cleaned_data.get("ride_model") and manufacturer: - EditSubmission.objects.create( - user=self.request.user, - content_type=ContentType.objects.get_for_model(RideModel), - submission_type="CREATE", - changes={ - "name": ride_model_name, - "manufacturer": manufacturer.id, - }, - ) - - return super().form_valid(form) - - -class RideUpdateView( - LoginRequiredMixin, ParkContextRequired, EditSubmissionMixin, UpdateView -): - """View for updating an existing ride""" - - model = Ride - form_class = RideForm - template_name = "rides/ride_form.html" - slug_url_kwarg = "ride_slug" - - def get_success_url(self): - """Get URL to redirect to after successful update""" - return reverse( - "parks:rides:ride_detail", - kwargs={ - "park_slug": self.park.slug, - "ride_slug": self.object.slug, - }, - ) - - def get_queryset(self): - """Get ride for the specific park""" - return Ride.objects.filter(park__slug=self.kwargs["park_slug"]) - - def get_form_kwargs(self): - """Pass park to the form""" - kwargs = super().get_form_kwargs() - self.park = get_object_or_404(Park, slug=self.kwargs["park_slug"]) - kwargs["park"] = self.park - return kwargs - - def get_context_data(self, **kwargs): - """Add park and park_slug to context""" - context = super().get_context_data(**kwargs) - context["park"] = self.park - context["park_slug"] = self.park.slug - context["is_edit"] = True - return context - - def form_valid(self, form): - """Handle form submission including new items""" - # Check for new manufacturer - manufacturer_name = form.cleaned_data.get("manufacturer_search") - if manufacturer_name and not form.cleaned_data.get("manufacturer"): - EditSubmission.objects.create( - user=self.request.user, - content_type=ContentType.objects.get_for_model(Company), - submission_type="CREATE", - changes={"name": manufacturer_name, "roles": ["MANUFACTURER"]}, - ) - - # Check for new designer - designer_name = form.cleaned_data.get("designer_search") - if designer_name and not form.cleaned_data.get("designer"): - EditSubmission.objects.create( - user=self.request.user, - content_type=ContentType.objects.get_for_model(Company), - submission_type="CREATE", - changes={"name": designer_name, "roles": ["DESIGNER"]}, - ) - - # Check for new ride model - ride_model_name = form.cleaned_data.get("ride_model_search") - manufacturer = form.cleaned_data.get("manufacturer") - if ride_model_name and not form.cleaned_data.get("ride_model") and manufacturer: - EditSubmission.objects.create( - user=self.request.user, - content_type=ContentType.objects.get_for_model(RideModel), - submission_type="CREATE", - changes={ - "name": ride_model_name, - "manufacturer": manufacturer.id, - }, - ) - - return super().form_valid(form) - - -class RideListView(ListView): - """View for displaying a list of rides""" - - model = Ride - template_name = "rides/ride_list.html" - context_object_name = "rides" - - def get_queryset(self): - """Get filtered rides based on search and filters""" - queryset = ( - Ride.objects.all() - .select_related("park", "ride_model", "ride_model__manufacturer") - .prefetch_related("photos") - ) - - # Park filter - if "park_slug" in self.kwargs: - self.park = get_object_or_404(Park, slug=self.kwargs["park_slug"]) - queryset = queryset.filter(park=self.park) - - # Search term handling - search = self.request.GET.get("q", "").strip() - if search: - # Split search terms for more flexible matching - search_terms = search.split() - search_query = Q() - - for term in search_terms: - term_query = ( - Q(name__icontains=term) - | Q(park__name__icontains=term) - | Q(description__icontains=term) - ) - search_query &= term_query - - queryset = queryset.filter(search_query) - - # Category filter - category = self.request.GET.get("category") - if category and category != "all": - queryset = queryset.filter(category=category) - - # Operating status filter - if self.request.GET.get("operating") == "true": - queryset = queryset.filter(status="operating") - - return queryset - - def get_template_names(self): - """Return appropriate template based on request type""" - if self.request.htmx: - return ["rides/partials/ride_list_results.html"] - return [self.template_name] - - def get_context_data(self, **kwargs): - """Add park and category choices to context""" - context = super().get_context_data(**kwargs) - if hasattr(self, "park"): - context["park"] = self.park - context["park_slug"] = self.kwargs["park_slug"] - context["category_choices"] = CATEGORY_CHOICES - return context - - -class SingleCategoryListView(ListView): - """View for displaying rides of a specific category""" - - model = Ride - template_name = "rides/park_category_list.html" - context_object_name = "rides" - - def get_queryset(self): - """Get rides filtered by category and optionally by park""" - category = self.kwargs.get("category") - queryset = Ride.objects.filter(category=category).select_related( - "park", "ride_model", "ride_model__manufacturer" - ) - - if "park_slug" in self.kwargs: - self.park = get_object_or_404(Park, slug=self.kwargs["park_slug"]) - queryset = queryset.filter(park=self.park) - - return queryset - - def get_context_data(self, **kwargs): - """Add park and category information to context""" - context = super().get_context_data(**kwargs) - if hasattr(self, "park"): - context["park"] = self.park - context["park_slug"] = self.kwargs["park_slug"] - context["category"] = dict(CATEGORY_CHOICES).get(self.kwargs["category"]) - return context - - -# Alias for parks app to maintain backward compatibility -ParkSingleCategoryListView = SingleCategoryListView - - -def search_companies(request: HttpRequest) -> HttpResponse: - """Search companies and return results for HTMX""" - query = request.GET.get("q", "").strip() - role = request.GET.get("role", "").upper() - - companies = Company.objects.all().order_by("name") - if role: - companies = companies.filter(roles__contains=[role]) - if query: - companies = companies.filter(name__icontains=query) - companies = companies[:10] - - return render( - request, - "rides/partials/company_search_results.html", - {"companies": companies, "search_term": query}, - ) - - -def search_ride_models(request: HttpRequest) -> HttpResponse: - """Search ride models and return results for HTMX""" - query = request.GET.get("q", "").strip() - manufacturer_id = request.GET.get("manufacturer") - - # Show all ride models on click, filter on input - ride_models = RideModel.objects.select_related("manufacturer").order_by("name") - if query: - ride_models = ride_models.filter(name__icontains=query) - if manufacturer_id: - ride_models = ride_models.filter(manufacturer_id=manufacturer_id) - ride_models = ride_models[:10] - - return render( - request, - "rides/partials/ride_model_search_results.html", - { - "ride_models": ride_models, - "search_term": query, - "manufacturer_id": manufacturer_id, - }, - ) - - -def get_search_suggestions(request: HttpRequest) -> HttpResponse: - """Get smart search suggestions for rides - - Returns suggestions including: - - Common matching ride names - - Matching parks - - Matching categories - """ - query = request.GET.get("q", "").strip().lower() - suggestions = [] - - if query: - # Get common ride names - matching_names = ( - Ride.objects.filter(name__icontains=query) - .values("name") - .annotate(count=Count("id")) - .order_by("-count")[:3] - ) - - for match in matching_names: - suggestions.append( - { - "type": "ride", - "text": match["name"], - "count": match["count"], - } - ) - - # Get matching parks - matching_parks = Park.objects.filter( - Q(name__icontains=query) | Q(location__city__icontains=query) - )[:3] - - for park in matching_parks: - suggestions.append( - { - "type": "park", - "text": park.name, - "location": park.location.city if park.location else None, - } - ) - - # Add category matches - for code, name in CATEGORY_CHOICES: - if query in name.lower(): - ride_count = Ride.objects.filter(category=code).count() - suggestions.append( - { - "type": "category", - "code": code, - "text": name, - "count": ride_count, - } - ) - - return render( - request, - "rides/partials/search_suggestions.html", - {"suggestions": suggestions, "query": query}, - ) - - -class RideSearchView(ListView): - """View for ride search functionality with HTMX support.""" - - model = Ride - template_name = "search/partials/ride_search_results.html" - context_object_name = "rides" - paginate_by = 20 - - def get_queryset(self): - """Get filtered rides based on search form.""" - queryset = Ride.objects.select_related("park").order_by("name") - - # Process search form - form = RideSearchForm(self.request.GET) - if form.is_valid(): - ride = form.cleaned_data.get("ride") - if ride: - # If specific ride selected, return just that ride - queryset = queryset.filter(id=ride.id) - else: - # If no specific ride, filter by search term - search_term = self.request.GET.get("ride", "").strip() - if search_term: - queryset = queryset.filter(name__icontains=search_term) - - return queryset - - def get_template_names(self): - """Return appropriate template based on request type.""" - if self.request.htmx: - return ["search/partials/ride_search_results.html"] - return ["search/ride_search.html"] - - def get_context_data(self, **kwargs): - """Add search form to context.""" - context = super().get_context_data(**kwargs) - context["search_form"] = RideSearchForm(self.request.GET) - return context diff --git a/scripts/README.md b/scripts/README.md deleted file mode 100644 index ffb3abc3..00000000 --- a/scripts/README.md +++ /dev/null @@ -1,94 +0,0 @@ -# ThrillWiki Development Scripts - -## Development Server Script - -The `dev_server.sh` script sets up all necessary environment variables and starts the Django development server with proper configuration. - -### Usage - -```bash -# From the project root directory -./scripts/dev_server.sh - -# Or from anywhere -/path/to/thrillwiki_django_no_react/scripts/dev_server.sh -``` - -### What the script does - -1. **Environment Setup**: Sets all required environment variables for local development -2. **Directory Creation**: Creates necessary directories (logs, profiles, media, etc.) -3. **Database Migrations**: Runs pending migrations automatically -4. **Superuser Creation**: Creates a development superuser (admin/admin) if none exists -5. **Static Files**: Collects static files for the application -6. **Tailwind CSS**: Builds Tailwind CSS if npm is available -7. **System Checks**: Runs Django system checks -8. **Server Start**: Starts the Django development server on `http://localhost:8000` - -### Environment Variables Set - -The script automatically sets these environment variables: - -- `DJANGO_SETTINGS_MODULE=config.django.local` -- `DEBUG=True` -- `SECRET_KEY=` -- `ALLOWED_HOSTS=localhost,127.0.0.1,0.0.0.0` -- `DATABASE_URL=postgis://thrillwiki_user:thrillwiki_pass@localhost:5432/thrillwiki_db` -- `CACHE_URL=locmemcache://` -- `CORS_ALLOW_ALL_ORIGINS=True` -- GeoDjango library paths for macOS -- And many more... - -### Prerequisites - -1. **PostgreSQL with PostGIS**: Make sure PostgreSQL with PostGIS extension is running -2. **Database**: Create the database `thrillwiki_db` with user `thrillwiki_user` -3. **uv**: The script uses `uv` to run Django commands -4. **Virtual Environment**: The script will activate `.venv` if it exists - -### Database Setup - -If you need to set up the database: - -```bash -# Install PostgreSQL and PostGIS (macOS with Homebrew) -brew install postgresql postgis - -# Start PostgreSQL -brew services start postgresql - -# Create database and user -psql postgres -c "CREATE USER thrillwiki_user WITH PASSWORD 'thrillwiki_pass';" -psql postgres -c "CREATE DATABASE thrillwiki_db OWNER thrillwiki_user;" -psql -d thrillwiki_db -c "CREATE EXTENSION postgis;" -psql -d thrillwiki_db -c "GRANT ALL PRIVILEGES ON DATABASE thrillwiki_db TO thrillwiki_user;" -``` - -### Access Points - -Once the server is running, you can access: - -- **Main Application**: http://localhost:8000 -- **Admin Interface**: http://localhost:8000/admin/ (admin/admin) -- **Django Silk Profiler**: http://localhost:8000/silk/ -- **API Documentation**: http://localhost:8000/api/docs/ -- **API Redoc**: http://localhost:8000/api/redoc/ - -### Stopping the Server - -Press `Ctrl+C` to stop the development server. - -### Troubleshooting - -1. **Database Connection Issues**: Ensure PostgreSQL is running and the database exists -2. **GeoDjango Library Issues**: Adjust `GDAL_LIBRARY_PATH` and `GEOS_LIBRARY_PATH` if needed -3. **Permission Issues**: Make sure the script is executable with `chmod +x scripts/dev_server.sh` -4. **Virtual Environment**: Ensure your virtual environment is set up with all dependencies - -### Customization - -You can modify the script to: -- Change default database credentials -- Adjust library paths for your system -- Add additional environment variables -- Modify the development server port or host diff --git a/scripts/ci-start.sh b/scripts/ci-start.sh deleted file mode 100755 index fcd33664..00000000 --- a/scripts/ci-start.sh +++ /dev/null @@ -1,129 +0,0 @@ -#!/bin/bash - -# ThrillWiki Local CI Start Script -# This script starts the Django development server following project requirements - -set -e # Exit on any error - -# Configuration -PROJECT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" -LOG_DIR="$PROJECT_DIR/logs" -PID_FILE="$LOG_DIR/django.pid" -LOG_FILE="$LOG_DIR/django.log" - -# Colors for output -RED='\033[0;31m' -GREEN='\033[0;32m' -YELLOW='\033[1;33m' -BLUE='\033[0;34m' -NC='\033[0m' # No Color - -# Logging function -log() { - echo -e "${BLUE}[$(date +'%Y-%m-%d %H:%M:%S')]${NC} $1" -} - -log_success() { - echo -e "${GREEN}[$(date +'%Y-%m-%d %H:%M:%S')]${NC} $1" -} - -log_warning() { - echo -e "${YELLOW}[$(date +'%Y-%m-%d %H:%M:%S')]${NC} $1" -} - -log_error() { - echo -e "${RED}[$(date +'%Y-%m-%d %H:%M:%S')]${NC} $1" -} - -# Create logs directory if it doesn't exist -mkdir -p "$LOG_DIR" - -# Change to project directory -cd "$PROJECT_DIR" - -log "Starting ThrillWiki CI deployment..." - -# Check if UV is installed -if ! command -v uv &> /dev/null; then - log_error "UV is not installed. Please install UV first." - exit 1 -fi - -# Stop any existing Django processes on port 8000 -log "Stopping any existing Django processes on port 8000..." -if lsof -ti :8000 >/dev/null 2>&1; then - lsof -ti :8000 | xargs kill -9 2>/dev/null || true - log_success "Stopped existing processes" -else - log "No existing processes found on port 8000" -fi - -# Clean up Python cache files -log "Cleaning up Python cache files..." -find . -type d -name "__pycache__" -exec rm -r {} + 2>/dev/null || true -log_success "Cache files cleaned" - -# Install/update dependencies -log "Installing/updating dependencies with UV..." -uv sync --no-dev || { - log_error "Failed to sync dependencies" - exit 1 -} - -# Run database migrations -log "Running database migrations..." -uv run manage.py migrate || { - log_error "Database migrations failed" - exit 1 -} - -# Collect static files -log "Collecting static files..." -uv run manage.py collectstatic --noinput || { - log_warning "Static file collection failed, continuing anyway" -} - -# Start the development server -log "Starting Django development server with Tailwind..." -log "Server will be available at: http://localhost:8000" -log "Press Ctrl+C to stop the server" - -# Start server and capture PID -uv run manage.py tailwind runserver 0.0.0.0:8000 & -SERVER_PID=$! - -# Save PID to file -echo $SERVER_PID > "$PID_FILE" - -log_success "Django server started with PID: $SERVER_PID" -log "Server logs are being written to: $LOG_FILE" - -# Wait for server to start -sleep 3 - -# Check if server is running -if kill -0 $SERVER_PID 2>/dev/null; then - log_success "Server is running successfully!" - - # Monitor the process - wait $SERVER_PID -else - log_error "Server failed to start" - rm -f "$PID_FILE" - exit 1 -fi - -# Cleanup on exit -cleanup() { - log "Shutting down server..." - if [ -f "$PID_FILE" ]; then - PID=$(cat "$PID_FILE") - if kill -0 $PID 2>/dev/null; then - kill $PID - log_success "Server stopped" - fi - rm -f "$PID_FILE" - fi -} - -trap cleanup EXIT INT TERM \ No newline at end of file diff --git a/scripts/create_initial_data.py b/scripts/create_initial_data.py deleted file mode 100644 index a93d6f85..00000000 --- a/scripts/create_initial_data.py +++ /dev/null @@ -1,108 +0,0 @@ -from django.utils import timezone -from parks.models import Park, ParkLocation -from rides.models import Ride, RideModel, RollerCoasterStats -from rides.models import Manufacturer - -# Create Cedar Point -park, _ = Park.objects.get_or_create( - name="Cedar Point", - slug="cedar-point", - defaults={ - "description": ( - "Cedar Point is a 364-acre amusement park located on a Lake Erie " - "peninsula in Sandusky, Ohio." - ), - "website": "https://www.cedarpoint.com", - "size_acres": 364, - "opening_date": timezone.datetime( - 1870, 1, 1 - ).date(), # Cedar Point opened in 1870 - }, -) - -# Create location for Cedar Point -location, _ = ParkLocation.objects.get_or_create( - park=park, - defaults={ - "street_address": "1 Cedar Point Dr", - "city": "Sandusky", - "state": "OH", - "postal_code": "44870", - "country": "USA", - }, -) -# Set coordinates using the helper method -location.set_coordinates(-82.6839, 41.4822) # longitude, latitude -location.save() - -# Create Intamin as manufacturer -bm, _ = Manufacturer.objects.get_or_create( - name="Intamin", - slug="intamin", - defaults={ - "description": ( - "Intamin Amusement Rides is a design company known for creating " - "some of the most thrilling and innovative roller coasters in the world." - ), - "website": "https://www.intaminworldwide.com", - }, -) - -# Create Giga Coaster model -giga_model, _ = RideModel.objects.get_or_create( - name="Giga Coaster", - manufacturer=bm, - defaults={ - "description": ( - "A roller coaster type characterized by a height between 300–399 feet " - "and a complete circuit." - ), - "category": "RC", # Roller Coaster - }, -) - -# Create Millennium Force -millennium, _ = Ride.objects.get_or_create( - name="Millennium Force", - slug="millennium-force", - defaults={ - "description": ( - "Millennium Force is a steel roller coaster located at Cedar Point " - "amusement park in Sandusky, Ohio. It was built by Intamin of " - "Switzerland and opened on May 13, 2000 as the world's first giga " - "coaster, a class of roller coasters having a height between 300 " - "and 399 feet and a complete circuit." - ), - "park": park, - "category": "RC", - "manufacturer": bm, - "ride_model": giga_model, - "status": "OPERATING", - "opening_date": timezone.datetime(2000, 5, 13).date(), - "min_height_in": 48, # 48 inches minimum height - "capacity_per_hour": 1300, - "ride_duration_seconds": 120, # 2 minutes - }, -) - -# Create stats for Millennium Force -RollerCoasterStats.objects.get_or_create( - ride=millennium, - defaults={ - "height_ft": 310, - "length_ft": 6595, - "speed_mph": 93, - "inversions": 0, - "ride_time_seconds": 120, - "track_material": "STEEL", - "roller_coaster_type": "SITDOWN", - "max_drop_height_ft": 300, - "launch_type": "CHAIN", - "train_style": "Open-air stadium seating", - "trains_count": 3, - "cars_per_train": 9, - "seats_per_car": 4, - }, -) - -print("Initial data created successfully!") diff --git a/scripts/dev_server.sh b/scripts/dev_server.sh deleted file mode 100755 index 3fc96f31..00000000 --- a/scripts/dev_server.sh +++ /dev/null @@ -1,147 +0,0 @@ -#!/bin/bash - -# ThrillWiki Development Server Script -# This script sets up the proper environment variables and runs the Django development server - -set -e # Exit on any error - -echo "🚀 Starting ThrillWiki Development Server..." - -# Change to the project directory (parent of scripts folder) -cd "$(dirname "$0")/.." - -# Set Django environment to local development -export DJANGO_SETTINGS_MODULE="config.django.local" - -# Core Django settings -export DEBUG="True" -export SECRET_KEY="django-insecure-dev-key-not-for-production-$(openssl rand -base64 32 | tr -d "=+/" | cut -c1-25)" - -# Allowed hosts for development -export ALLOWED_HOSTS="localhost,127.0.0.1,0.0.0.0" - -# CSRF trusted origins for development -export CSRF_TRUSTED_ORIGINS="http://localhost:8000,http://127.0.0.1:8000,https://127.0.0.1:8000" - -# Database configuration (PostgreSQL with PostGIS) -export DATABASE_URL="postgis://thrillwiki_user:thrillwiki@localhost:5432/thrillwiki_test_db" - -# Cache configuration (use locmem for development if Redis not available) -export CACHE_URL="locmemcache://" -export REDIS_URL="redis://127.0.0.1:6379/1" - -# CORS settings for API development -export CORS_ALLOW_ALL_ORIGINS="True" -export CORS_ALLOWED_ORIGINS="" - -# Email configuration for development (console backend) -export EMAIL_URL="consolemail://" - -# GeoDjango library paths for macOS (adjust if needed) -export GDAL_LIBRARY_PATH="/opt/homebrew/lib/libgdal.dylib" -export GEOS_LIBRARY_PATH="/opt/homebrew/lib/libgeos_c.dylib" - -# API rate limiting (generous for development) -export API_RATE_LIMIT_PER_MINUTE="1000" -export API_RATE_LIMIT_PER_HOUR="10000" - -# Cache settings -export CACHE_MIDDLEWARE_SECONDS="1" # Very short cache for development -export CACHE_MIDDLEWARE_KEY_PREFIX="thrillwiki_dev" - -# Social auth settings (you can set these if you have them) -# export GOOGLE_OAUTH2_CLIENT_ID="" -# export GOOGLE_OAUTH2_CLIENT_SECRET="" -# export DISCORD_CLIENT_ID="" -# export DISCORD_CLIENT_SECRET="" - -# Create necessary directories -echo "📁 Creating necessary directories..." -mkdir -p logs -mkdir -p profiles -mkdir -p media -mkdir -p staticfiles -mkdir -p static/css - -# Check if virtual environment is activated -if [[ -z "$VIRTUAL_ENV" ]] && [[ -d ".venv" ]]; then - echo "🔧 Activating virtual environment..." - source .venv/bin/activate -fi - -# Run database migrations if needed -echo "🗄️ Checking database migrations..." -if uv run manage.py migrate --check 2>/dev/null; then - echo "✅ Database migrations are up to date" -else - echo "🔄 Running database migrations..." - uv run manage.py migrate --noinput -fi -echo "Resetting database..." -if uv run manage.py seed_sample_data 2>/dev/null; then - echo "Seeding complete!" -else - echo "Seeding test data to database..." - uv run manage.py seed_sample_data -fi - -# Create superuser if it doesn't exist -echo "👤 Checking for superuser..." -if ! uv run manage.py shell -c "from django.contrib.auth import get_user_model; User = get_user_model(); exit(0 if User.objects.filter(is_superuser=True).exists() else 1)" 2>/dev/null; then - echo "👤 Creating development superuser (admin/admin)..." - uv run manage.py shell -c " -from django.contrib.auth import get_user_model -User = get_user_model() -if not User.objects.filter(username='admin').exists(): - User.objects.create_superuser('admin', 'admin@example.com', 'admin') - print('Created superuser: admin/admin') -else: - print('Superuser already exists') -" -fi - -# Collect static files for development -echo "📦 Collecting static files..." -uv run manage.py collectstatic --noinput --clear - -# Build Tailwind CSS -if command -v npm &> /dev/null; then - echo "🎨 Building Tailwind CSS..." - uv run manage.py tailwind build -else - echo "⚠️ npm not found, skipping Tailwind CSS build" -fi - -# Run system checks -echo "🔍 Running system checks..." -if uv run manage.py check; then - echo "✅ System checks passed" -else - echo "❌ System checks failed, but continuing..." -fi - -# Display environment info -echo "" -echo "🌍 Development Environment:" -echo " - Settings Module: $DJANGO_SETTINGS_MODULE" -echo " - Debug Mode: $DEBUG" -echo " - Database: PostgreSQL with PostGIS" -echo " - Cache: Local memory cache" -echo " - Admin URL: http://localhost:8000/admin/" -echo " - Admin User: admin / admin" -echo " - Silk Profiler: http://localhost:8000/silk/" -echo " - Debug Toolbar: Available on debug pages" -echo " - API Documentation: http://localhost:8000/api/docs/" -echo "" - -# Start the development server -echo "🌟 Starting Django development server on http://localhost:8000" -echo "Press Ctrl+C to stop the server" -echo "" - -# Use runserver_plus if django-extensions is available, otherwise use standard runserver -if uv run python -c "import django_extensions" 2>/dev/null; then - exec uv run manage.py runserver_plus 0.0.0.0:8000 -else - exec uv run manage.py runserver 0.0.0.0:8000 -fi diff --git a/scripts/github-auth.py b/scripts/github-auth.py deleted file mode 100755 index f07982f0..00000000 --- a/scripts/github-auth.py +++ /dev/null @@ -1,234 +0,0 @@ -#!/usr/bin/env python3 -""" -GitHub OAuth Device Flow Authentication for ThrillWiki CI/CD -This script implements GitHub's device flow to securely obtain access tokens. -""" - -import sys -import time -import requests -import argparse -from pathlib import Path - -# GitHub OAuth App Configuration -CLIENT_ID = "Iv23liOX5Hp75AxhUvIe" -TOKEN_FILE = ".github-token" - - -def parse_response(response): - """Parse HTTP response and handle errors.""" - if response.status_code in [200, 201]: - return response.json() - elif response.status_code == 401: - print("You are not authorized. Run the `login` command.") - sys.exit(1) - else: - print(f"HTTP {response.status_code}: {response.text}") - sys.exit(1) - - -def request_device_code(): - """Request a device code from GitHub.""" - url = "https://github.com/login/device/code" - data = {"client_id": CLIENT_ID} - headers = {"Accept": "application/json"} - - response = requests.post(url, data=data, headers=headers) - return parse_response(response) - - -def request_token(device_code): - """Request an access token using the device code.""" - url = "https://github.com/login/oauth/access_token" - data = { - "client_id": CLIENT_ID, - "device_code": device_code, - "grant_type": "urn:ietf:params:oauth:grant-type:device_code", - } - headers = {"Accept": "application/json"} - - response = requests.post(url, data=data, headers=headers) - return parse_response(response) - - -def poll_for_token(device_code, interval): - """Poll GitHub for the access token after user authorization.""" - print("Waiting for authorization...") - - while True: - response = request_token(device_code) - error = response.get("error") - access_token = response.get("access_token") - - if error: - if error == "authorization_pending": - # User hasn't entered the code yet - print(".", end="", flush=True) - time.sleep(interval) - continue - elif error == "slow_down": - # Polling too fast - time.sleep(interval + 5) - continue - elif error == "expired_token": - print("\nThe device code has expired. Please run `login` again.") - sys.exit(1) - elif error == "access_denied": - print("\nLogin cancelled by user.") - sys.exit(1) - else: - print(f"\nError: {response}") - sys.exit(1) - - # Success! Save the token - token_path = Path(TOKEN_FILE) - token_path.write_text(access_token) - token_path.chmod(0o600) # Read/write for owner only - - print(f"\nToken saved to {TOKEN_FILE}") - break - - -def login(): - """Initiate the GitHub OAuth device flow login process.""" - print("Starting GitHub authentication...") - - device_response = request_device_code() - verification_uri = device_response["verification_uri"] - user_code = device_response["user_code"] - device_code = device_response["device_code"] - interval = device_response["interval"] - - print(f"\nPlease visit: {verification_uri}") - print(f"and enter code: {user_code}") - print("\nWaiting for you to complete authorization in your browser...") - - poll_for_token(device_code, interval) - print("Successfully authenticated!") - return True - - -def whoami(): - """Display information about the authenticated user.""" - token_path = Path(TOKEN_FILE) - - if not token_path.exists(): - print("You are not authorized. Run the `login` command.") - sys.exit(1) - - try: - token = token_path.read_text().strip() - except Exception as e: - print(f"Error reading token: {e}") - print("You may need to run the `login` command again.") - sys.exit(1) - - url = "https://api.github.com/user" - headers = { - "Accept": "application/vnd.github+json", - "Authorization": f"Bearer {token}", - } - - response = requests.get(url, headers=headers) - user_data = parse_response(response) - - print(f"You are authenticated as: {user_data['login']}") - print(f"Name: {user_data.get('name', 'Not set')}") - print(f"Email: {user_data.get('email', 'Not public')}") - - return user_data - - -def get_token(): - """Get the current access token if available.""" - token_path = Path(TOKEN_FILE) - - if not token_path.exists(): - return None - - try: - return token_path.read_text().strip() - except Exception: - return None - - -def validate_token(): - """Validate that the current token is still valid.""" - token = get_token() - if not token: - return False - - url = "https://api.github.com/user" - headers = { - "Accept": "application/vnd.github+json", - "Authorization": f"Bearer {token}", - } - - try: - response = requests.get(url, headers=headers) - return response.status_code == 200 - except Exception: - return False - - -def ensure_authenticated(): - """Ensure user is authenticated, prompting login if necessary.""" - if validate_token(): - return get_token() - - print("GitHub authentication required.") - login() - return get_token() - - -def logout(): - """Remove the stored access token.""" - token_path = Path(TOKEN_FILE) - - if token_path.exists(): - token_path.unlink() - print("Successfully logged out.") - else: - print("You are not currently logged in.") - - -def main(): - """Main CLI interface.""" - parser = argparse.ArgumentParser( - description="GitHub OAuth authentication for ThrillWiki CI/CD" - ) - parser.add_argument( - "command", - choices=["login", "logout", "whoami", "token", "validate"], - help="Command to execute", - ) - - if len(sys.argv) == 1: - parser.print_help() - sys.exit(1) - - args = parser.parse_args() - - if args.command == "login": - login() - elif args.command == "logout": - logout() - elif args.command == "whoami": - whoami() - elif args.command == "token": - token = get_token() - if token: - print(token) - else: - print("No token available. Run `login` first.") - sys.exit(1) - elif args.command == "validate": - if validate_token(): - print("Token is valid.") - else: - print("Token is invalid or missing.") - sys.exit(1) - - -if __name__ == "__main__": - main() diff --git a/scripts/setup-vm-ci.sh b/scripts/setup-vm-ci.sh deleted file mode 100755 index 20544002..00000000 --- a/scripts/setup-vm-ci.sh +++ /dev/null @@ -1,268 +0,0 @@ -#!/bin/bash - -# ThrillWiki VM CI Setup Script -# This script helps set up the VM deployment system - -set -e - -# Colors for output -RED='\033[0;31m' -GREEN='\033[0;32m' -YELLOW='\033[1;33m' -BLUE='\033[0;34m' -NC='\033[0m' # No Color - -log() { - echo -e "${BLUE}[SETUP]${NC} $1" -} - -log_success() { - echo -e "${GREEN}[SUCCESS]${NC} $1" -} - -log_warning() { - echo -e "${YELLOW}[WARNING]${NC} $1" -} - -log_error() { - echo -e "${RED}[ERROR]${NC} $1" -} - -# Configuration prompts -prompt_config() { - log "Setting up ThrillWiki VM CI/CD system..." - echo - - read -p "Enter your VM IP address: " VM_IP - read -p "Enter your VM username (default: ubuntu): " VM_USER - VM_USER=${VM_USER:-ubuntu} - - read -p "Enter your GitHub repository URL: " REPO_URL - read -p "Enter your GitHub webhook secret: " WEBHOOK_SECRET - - read -p "Enter local webhook port (default: 9000): " WEBHOOK_PORT - WEBHOOK_PORT=${WEBHOOK_PORT:-9000} - - read -p "Enter VM project path (default: /home/$VM_USER/thrillwiki): " VM_PROJECT_PATH - VM_PROJECT_PATH=${VM_PROJECT_PATH:-/home/$VM_USER/thrillwiki} -} - -# Create SSH key -setup_ssh() { - log "Setting up SSH keys..." - - local ssh_key_path="$HOME/.ssh/thrillwiki_vm" - - if [ ! -f "$ssh_key_path" ]; then - ssh-keygen -t rsa -b 4096 -f "$ssh_key_path" -N "" - log_success "SSH key generated: $ssh_key_path" - - log "Please copy the following public key to your VM:" - echo "---" - cat "$ssh_key_path.pub" - echo "---" - echo - log "Run this on your VM:" - echo "mkdir -p ~/.ssh && echo '$(cat "$ssh_key_path.pub")' >> ~/.ssh/***REMOVED*** && chmod 600 ~/.ssh/***REMOVED***" - echo - read -p "Press Enter when you've added the key to your VM..." - else - log "SSH key already exists: $ssh_key_path" - fi - - # Test SSH connection - log "Testing SSH connection..." - if ssh -i "$ssh_key_path" -o ConnectTimeout=5 -o StrictHostKeyChecking=no "$VM_USER@$VM_IP" "echo 'SSH connection successful'"; then - log_success "SSH connection test passed" - else - log_error "SSH connection test failed" - exit 1 - fi -} - -# Create environment file -create_env_file() { - log "Creating webhook environment file..." - - cat > ***REMOVED***.webhook << EOF -# ThrillWiki Webhook Configuration -WEBHOOK_PORT=$WEBHOOK_PORT -WEBHOOK_SECRET=$WEBHOOK_SECRET -VM_HOST=$VM_IP -VM_PORT=22 -VM_USER=$VM_USER -VM_KEY_PATH=$HOME/.ssh/thrillwiki_vm -VM_PROJECT_PATH=$VM_PROJECT_PATH -REPO_URL=$REPO_URL -DEPLOY_BRANCH=main -EOF - - log_success "Environment file created: ***REMOVED***.webhook" -} - -# Setup VM -setup_vm() { - log "Setting up VM environment..." - - local ssh_key_path="$HOME/.ssh/thrillwiki_vm" - - # Create setup script for VM - cat > /tmp/vm_setup.sh << 'EOF' -#!/bin/bash -set -e - -echo "Setting up VM for ThrillWiki deployment..." - -# Update system -sudo apt update - -# Install required packages -sudo apt install -y git curl build-essential python3-pip lsof - -# Install UV if not present -if ! command -v uv &> /dev/null; then - echo "Installing UV..." - curl -LsSf https://astral.sh/uv/install.sh | sh - source ~/.cargo/env -fi - -# Clone repository if not present -if [ ! -d "thrillwiki" ]; then - echo "Cloning repository..." - git clone REPO_URL_PLACEHOLDER thrillwiki -fi - -cd thrillwiki - -# Install dependencies -uv sync - -# Create directories -mkdir -p logs backups - -# Make scripts executable -chmod +x scripts/*.sh - -echo "VM setup completed successfully!" -EOF - - # Replace placeholder with actual repo URL - sed -i.bak "s|REPO_URL_PLACEHOLDER|$REPO_URL|g" /tmp/vm_setup.sh - - # Copy and execute setup script on VM - scp -i "$ssh_key_path" /tmp/vm_setup.sh "$VM_USER@$VM_IP:/tmp/" - ssh -i "$ssh_key_path" "$VM_USER@$VM_IP" "bash /tmp/vm_setup.sh" - - log_success "VM setup completed" - - # Cleanup - rm /tmp/vm_setup.sh /tmp/vm_setup.sh.bak -} - -# Install systemd services -setup_services() { - log "Setting up systemd services on VM..." - - local ssh_key_path="$HOME/.ssh/thrillwiki_vm" - - # Copy service files and install them - ssh -i "$ssh_key_path" "$VM_USER@$VM_IP" << EOF -cd thrillwiki - -# Update service files with correct paths -sed -i 's|/home/ubuntu|/home/$VM_USER|g' scripts/systemd/*.service -sed -i 's|ubuntu|$VM_USER|g' scripts/systemd/*.service - -# Install services -sudo cp scripts/systemd/thrillwiki.service /etc/systemd/system/ -sudo cp scripts/systemd/thrillwiki-webhook.service /etc/systemd/system/ - -# Reload and enable services -sudo systemctl daemon-reload -sudo systemctl enable thrillwiki.service - -echo "Services installed successfully!" -EOF - - log_success "Systemd services installed" -} - -# Test deployment -test_deployment() { - log "Testing VM deployment..." - - local ssh_key_path="$HOME/.ssh/thrillwiki_vm" - - ssh -i "$ssh_key_path" "$VM_USER@$VM_IP" << EOF -cd thrillwiki -./scripts/vm-deploy.sh -EOF - - log_success "Deployment test completed" -} - -# Start webhook listener -start_webhook() { - log "Starting webhook listener..." - - if [ -f "***REMOVED***.webhook" ]; then - log "Webhook configuration found. You can start the webhook listener with:" - echo " source ***REMOVED***.webhook && python3 scripts/webhook-listener.py" - echo - log "Or run it in the background:" - echo " nohup python3 scripts/webhook-listener.py > logs/webhook.log 2>&1 &" - else - log_error "Webhook configuration not found!" - exit 1 - fi -} - -# GitHub webhook instructions -github_instructions() { - log "GitHub Webhook Setup Instructions:" - echo - echo "1. Go to your GitHub repository: $REPO_URL" - echo "2. Navigate to Settings → Webhooks" - echo "3. Click 'Add webhook'" - echo "4. Configure:" - echo " - Payload URL: http://YOUR_PUBLIC_IP:$WEBHOOK_PORT/webhook" - echo " - Content type: application/json" - echo " - Secret: $WEBHOOK_SECRET" - echo " - Events: Just the push event" - echo "5. Click 'Add webhook'" - echo - log_warning "Make sure port $WEBHOOK_PORT is open on your firewall!" -} - -# Main setup flow -main() { - log "ThrillWiki VM CI/CD Setup" - echo "==========================" - echo - - # Create logs directory - mkdir -p logs - - # Get configuration - prompt_config - - # Setup steps - setup_ssh - create_env_file - setup_vm - setup_services - test_deployment - - # Final instructions - echo - log_success "Setup completed successfully!" - echo - start_webhook - echo - github_instructions - - log "Setup log saved to: logs/setup.log" -} - -# Run main function and log output -main "$@" 2>&1 | tee logs/setup.log \ No newline at end of file diff --git a/scripts/systemd/thrillwiki-automation.env.example b/scripts/systemd/thrillwiki-automation.env.example deleted file mode 100644 index 1c1d84c3..00000000 --- a/scripts/systemd/thrillwiki-automation.env.example +++ /dev/null @@ -1,296 +0,0 @@ -# ThrillWiki Automation Service Environment Configuration -# Copy this file to thrillwiki-automation***REMOVED*** and customize for your environment -# -# Security Note: This file should have restricted permissions (600) as it may contain -# sensitive information like GitHub Personal Access Tokens - -# [AWS-SECRET-REMOVED]==================================== -# PROJECT CONFIGURATION -# [AWS-SECRET-REMOVED]==================================== - -# Base project directory (usually auto-detected) -# PROJECT_DIR=/home/ubuntu/thrillwiki - -# Service name for systemd integration -# SERVICE_NAME=thrillwiki - -# [AWS-SECRET-REMOVED]==================================== -# GITHUB REPOSITORY CONFIGURATION -# [AWS-SECRET-REMOVED]==================================== - -# GitHub repository remote name -# GITHUB_REPO=origin - -# Branch to pull from -# GITHUB_BRANCH=main - -# GitHub Personal Access Token (PAT) - Required for private repositories -# Generate at: https://github.com/settings/tokens -# Required permissions: repo (Full control of private repositories) -# GITHUB_TOKEN=ghp_your_personal_access_token_here - -# GitHub token file location (alternative to GITHUB_TOKEN) -# GITHUB_TOKEN_FILE=/home/ubuntu/thrillwiki/.github-pat -GITHUB_PAT_FILE=/home/ubuntu/thrillwiki/.github-pat - -# [AWS-SECRET-REMOVED]==================================== -# AUTOMATION TIMING CONFIGURATION -# [AWS-SECRET-REMOVED]==================================== - -# Repository pull interval in seconds (default: 300 = 5 minutes) -# PULL_INTERVAL=300 - -# Health check interval in seconds (default: 60 = 1 minute) -# HEALTH_CHECK_INTERVAL=60 - -# Server startup timeout in seconds (default: 120 = 2 minutes) -# STARTUP_TIMEOUT=120 - -# Restart delay after failure in seconds (default: 10) -# RESTART_DELAY=10 - -# [AWS-SECRET-REMOVED]==================================== -# LOGGING CONFIGURATION -# [AWS-SECRET-REMOVED]==================================== - -# Log directory (default: project_dir/logs) -# LOG_DIR=/home/ubuntu/thrillwiki/logs - -# Log file path -# LOG_[AWS-SECRET-REMOVED]proof-automation.log - -# Maximum log file size in bytes (default: 10485760 = 10MB) -# MAX_LOG_SIZE=10485760 - -# Lock file location to prevent multiple instances -# LOCK_FILE=/tmp/thrillwiki-bulletproof.lock - -# [AWS-SECRET-REMOVED]==================================== -# DEVELOPMENT SERVER CONFIGURATION -# [AWS-SECRET-REMOVED]==================================== - -# Server host address (default: 0.0.0.0 for all interfaces) -# SERVER_HOST=0.0.0.0 - -# Server port (default: 8000) -# SERVER_PORT=8000 - -# [AWS-SECRET-REMOVED]==================================== -# DEPLOYMENT CONFIGURATION -# [AWS-SECRET-REMOVED]==================================== - -# Deployment preset (dev, prod, demo, testing) -# DEPLOYMENT_PRESET=dev - -# Repository URL for deployment -# GITHUB_REPO_URL=https://github.com/username/repository.git - -# Repository branch for deployment -# GITHUB_REPO_BRANCH=main - -# Enable Django project setup during deployment -# DJANGO_PROJECT_SETUP=true - -# Skip GitHub authentication setup -# SKIP_GITHUB_SETUP=false - -# Skip repository configuration -# SKIP_REPO_CONFIG=false - -# Skip systemd service setup -# SKIP_SERVICE_SETUP=false - -# Force deployment even if target exists -# FORCE_DEPLOY=false - -# Remote deployment user -# REMOTE_USER=ubuntu - -# Remote deployment host -# REMOTE_HOST= - -# Remote deployment port -# REMOTE_PORT=22 - -# Remote deployment path -# REMOTE_PATH=/home/ubuntu/thrillwiki - -# [AWS-SECRET-REMOVED]==================================== -# DJANGO CONFIGURATION -# [AWS-SECRET-REMOVED]==================================== - -# Django settings module -# DJANGO_SETTINGS_MODULE=thrillwiki.settings - -# Python path -# PYTHONPATH=/home/ubuntu/thrillwiki - -# UV executable path (for systems where UV is not in standard PATH) -# UV_EXECUTABLE=/home/ubuntu/.local/bin/uv - -# Django development server command (used by bulletproof automation) -# DJANGO_RUNSERVER_CMD=uv run manage.py tailwind runserver - -# Enable development server auto-cleanup (kills processes on port 8000) -# AUTO_CLEANUP_PROCESSES=true - -# [AWS-SECRET-REMOVED]==================================== -# ADVANCED CONFIGURATION -# [AWS-SECRET-REMOVED]==================================== - -# GitHub authentication script location -# GITHUB_AUTH_[AWS-SECRET-REMOVED]ithub-auth.py - -# Enable verbose logging (true/false) -# VERBOSE_LOGGING=false - -# Enable debug mode for troubleshooting (true/false) -# DEBUG_MODE=false - -# Custom git remote URL (overrides GITHUB_REPO if set) -# CUSTOM_GIT_REMOTE=https://github.com/username/repository.git - -# Email notifications for critical failures (requires email configuration) -# NOTIFICATION_EMAIL=admin@example.com - -# Maximum consecutive failures before alerting (default: 5) -# MAX_CONSECUTIVE_FAILURES=5 - -# Enable automatic dependency updates (true/false, default: true) -# AUTO_UPDATE_DEPENDENCIES=true - -# Enable automatic migrations on code changes (true/false, default: true) -# AUTO_MIGRATE=true - -# Enable automatic static file collection (true/false, default: true) -# AUTO_COLLECTSTATIC=true - -# [AWS-SECRET-REMOVED]==================================== -# SECURITY CONFIGURATION -# [AWS-SECRET-REMOVED]==================================== - -# GitHub authentication method (token|ssh|https) -# Default: token (uses GITHUB_TOKEN or GITHUB_TOKEN_FILE) -# GITHUB_AUTH_METHOD=token - -# SSH key path for git operations (when using ssh auth method) -# SSH_KEY_PATH=/home/ubuntu/.ssh/***REMOVED*** - -# Git user configuration for commits -# GIT_USER_NAME="ThrillWiki Automation" -# GIT_USER_EMAIL="automation@thrillwiki.local" - -# [AWS-SECRET-REMOVED]==================================== -# MONITORING AND HEALTH CHECKS -# [AWS-SECRET-REMOVED]==================================== - -# Health check URL to verify server is running -# HEALTH_CHECK_URL=http://localhost:8000/health/ - -# Health check timeout in seconds -# HEALTH_CHECK_TIMEOUT=30 - -# Enable system resource monitoring (true/false) -# MONITOR_RESOURCES=true - -# Memory usage threshold for warnings (in MB) -# MEMORY_WARNING_THRESHOLD=1024 - -# CPU usage threshold for warnings (percentage) -# CPU_WARNING_THRESHOLD=80 - -# Disk usage threshold for warnings (percentage) -# DISK_WARNING_THRESHOLD=90 - -# [AWS-SECRET-REMOVED]==================================== -# INTEGRATION SETTINGS -# [AWS-SECRET-REMOVED]==================================== - -# Webhook integration (if using thrillwiki-webhook service) -# WEBHOOK_INTEGRATION=true - -# Slack webhook URL for notifications (optional) -# SLACK_WEBHOOK_URL=https://hooks.slack.com/services/your/webhook/url - -# Discord webhook URL for notifications (optional) -# DISCORD_WEBHOOK_URL=https://discord.com/api/webhooks/your/webhook/url - -# [AWS-SECRET-REMOVED]==================================== -# ENVIRONMENT AND SYSTEM CONFIGURATION -# [AWS-SECRET-REMOVED]==================================== - -# System PATH additions (for UV and other tools) -# ADDITIONAL_PATH=/home/ubuntu/.local/bin:/home/ubuntu/.cargo/bin - -# Python environment configuration -# PYTHON_EXECUTABLE=python3 - -# Enable verbose logging for debugging -# VERBOSE_LOGGING=false - -# Debug mode for development -# DEBUG_MODE=false - -# Service restart configuration -# MAX_RESTART_ATTEMPTS=3 -# RESTART_COOLDOWN=300 - -# Health check configuration -# HEALTH_CHECK_URL=http://localhost:8000/health/ -# HEALTH_CHECK_TIMEOUT=30 - -# System resource monitoring -# MONITOR_RESOURCES=true -# MEMORY_WARNING_THRESHOLD=1024 -# CPU_WARNING_THRESHOLD=80 -# DISK_WARNING_THRESHOLD=90 - -# Lock file configuration -# LOCK_FILE=/tmp/thrillwiki-bulletproof.lock - -# GitHub authentication method (token|ssh|https) -# GITHUB_AUTH_METHOD=token - -# SSH key path for git operations (when using ssh auth method) -# SSH_KEY_PATH=/home/ubuntu/.ssh/***REMOVED*** - -# Git user configuration for commits -# GIT_USER_NAME="ThrillWiki Automation" -# GIT_USER_EMAIL="automation@thrillwiki.local" - -# [AWS-SECRET-REMOVED]==================================== -# USAGE EXAMPLES -# [AWS-SECRET-REMOVED]==================================== - -# Example 1: Basic setup with GitHub PAT -# GITHUB_TOKEN=ghp_your_token_here -# PULL_INTERVAL=300 -# AUTO_MIGRATE=true - -# Example 2: Enhanced monitoring setup -# HEALTH_CHECK_INTERVAL=30 -# MONITOR_RESOURCES=true -# NOTIFICATION_EMAIL=admin@thrillwiki.com -# SLACK_WEBHOOK_URL=https://hooks.slack.com/services/your/webhook - -# Example 3: Development environment with frequent pulls -# PULL_INTERVAL=60 -# DEBUG_MODE=true -# VERBOSE_LOGGING=true -# AUTO_UPDATE_DEPENDENCIES=true - -# [AWS-SECRET-REMOVED]==================================== -# INSTALLATION NOTES -# [AWS-SECRET-REMOVED]==================================== - -# 1. Copy this file: cp thrillwiki-automation***REMOVED***.example thrillwiki-automation***REMOVED*** -# 2. Set secure permissions: chmod 600 thrillwiki-automation***REMOVED*** -# 3. Customize the settings above for your environment -# 4. Enable the service: sudo systemctl enable thrillwiki-automation -# 5. Start the service: sudo systemctl start thrillwiki-automation -# 6. Check status: sudo systemctl status thrillwiki-automation -# 7. View logs: sudo journalctl -u thrillwiki-automation -f - -# For security, ensure only the ubuntu user can read this file: -# sudo chown ubuntu:ubuntu thrillwiki-automation***REMOVED*** -# sudo chmod 600 thrillwiki-automation***REMOVED*** \ No newline at end of file diff --git a/scripts/systemd/thrillwiki-automation.service b/scripts/systemd/thrillwiki-automation.service deleted file mode 100644 index 4fe2b85e..00000000 --- a/scripts/systemd/thrillwiki-automation.service +++ /dev/null @@ -1,106 +0,0 @@ -[Unit] -Description=ThrillWiki Bulletproof Development Automation -Documentation=man:thrillwiki-automation(8) -After=network.target -Wants=network.target -Before=thrillwiki.service -PartOf=thrillwiki.service - -[Service] -Type=simple -User=ubuntu -Group=ubuntu -[AWS-SECRET-REMOVED] -[AWS-SECRET-REMOVED]s/vm/bulletproof-automation.sh -ExecStop=/bin/kill -TERM $MAINPID -ExecReload=/bin/kill -HUP $MAINPID -Restart=always -RestartSec=10 -KillMode=mixed -KillSignal=SIGTERM -TimeoutStopSec=60 -TimeoutStartSec=120 -StartLimitIntervalSec=300 -StartLimitBurst=3 - -# Environment variables - Load from file for security -EnvironmentFile=-[AWS-SECRET-REMOVED]thrillwiki-automation***REMOVED*** -Environment=PROJECT_DIR=/home/ubuntu/thrillwiki -Environment=SERVICE_NAME=thrillwiki-automation -Environment=GITHUB_REPO=origin -Environment=GITHUB_BRANCH=main -Environment=PULL_INTERVAL=300 -Environment=HEALTH_CHECK_INTERVAL=60 -Environment=STARTUP_TIMEOUT=120 -Environment=RESTART_DELAY=10 -Environment=LOG_DIR=/home/ubuntu/thrillwiki/logs -Environment=MAX_LOG_SIZE=10485760 -Environment=SERVER_HOST=0.0.0.0 -Environment=SERVER_PORT=8000 -Environment=PATH=/home/ubuntu/.local/bin:/home/ubuntu/.cargo/bin:/usr/local/bin:/usr/bin:/bin -[AWS-SECRET-REMOVED]llwiki - -# Security settings - Enhanced hardening for automation script -NoNewPrivileges=true -PrivateTmp=true -ProtectSystem=strict -ProtectHome=true -ProtectKernelTunables=true -ProtectKernelModules=true -ProtectControlGroups=true -RestrictSUIDSGID=true -RestrictRealtime=true -RestrictNamespaces=true -LockPersonality=true -MemoryDenyWriteExecute=false -RemoveIPC=true - -# File system permissions - Allow access to necessary directories -ReadWritePaths=/home/ubuntu/thrillwiki -[AWS-SECRET-REMOVED]ogs -[AWS-SECRET-REMOVED]edia -[AWS-SECRET-REMOVED]taticfiles -[AWS-SECRET-REMOVED]ploads -ReadWritePaths=/home/ubuntu/.cache -ReadWritePaths=/tmp -ReadOnlyPaths=/home/ubuntu/.github-pat -ReadOnlyPaths=/home/ubuntu/.ssh -ReadOnlyPaths=/home/ubuntu/.local - -# Resource limits - Appropriate for automation script -LimitNOFILE=65536 -LimitNPROC=1024 -MemoryMax=512M -CPUQuota=50% -TasksMax=256 - -# Timeouts -WatchdogSec=300 - -# Logging configuration -StandardOutput=journal -StandardError=journal -SyslogIdentifier=thrillwiki-automation -SyslogFacility=daemon -SyslogLevel=info -SyslogLevelPrefix=true - -# Enhanced logging for debugging -# Ensure logs are captured and rotated properly -LogsDirectory=thrillwiki-automation -LogsDirectoryMode=0755 -StateDirectory=thrillwiki-automation -StateDirectoryMode=0755 -RuntimeDirectory=thrillwiki-automation -RuntimeDirectoryMode=0755 - -# Capabilities - Minimal required capabilities -CapabilityBoundingSet= -AmbientCapabilities= -PrivateDevices=true -ProtectClock=true -ProtectHostname=true - -[Install] -WantedBy=multi-user.target -Also=thrillwiki.service \ No newline at end of file diff --git a/scripts/systemd/thrillwiki-deployment.service b/scripts/systemd/thrillwiki-deployment.service deleted file mode 100644 index f16acb42..00000000 --- a/scripts/systemd/thrillwiki-deployment.service +++ /dev/null @@ -1,103 +0,0 @@ -[Unit] -Description=ThrillWiki Complete Deployment Automation Service -Documentation=man:thrillwiki-deployment(8) -After=network.target network-online.target -Wants=network-online.target -Before=thrillwiki-smart-deploy.timer -PartOf=thrillwiki-smart-deploy.timer - -[Service] -Type=simple -User=thrillwiki -Group=thrillwiki -[AWS-SECRET-REMOVED]wiki -[AWS-SECRET-REMOVED]ripts/vm/deploy-automation.sh -ExecStop=/bin/kill -TERM $MAINPID -ExecReload=/bin/kill -HUP $MAINPID -Restart=always -RestartSec=30 -KillMode=mixed -KillSignal=SIGTERM -TimeoutStopSec=120 -TimeoutStartSec=180 -StartLimitIntervalSec=600 -StartLimitBurst=3 - -# Environment variables - Load from file for security and preset integration -EnvironmentFile=-[AWS-SECRET-REMOVED]emd/thrillwiki-deployment***REMOVED*** -Environment=PROJECT_DIR=/home/thrillwiki/thrillwiki -Environment=SERVICE_NAME=thrillwiki-deployment -Environment=GITHUB_REPO=origin -Environment=GITHUB_BRANCH=main -Environment=DEPLOYMENT_MODE=automated -Environment=LOG_DIR=/home/thrillwiki/thrillwiki/logs -Environment=MAX_LOG_SIZE=10485760 -Environment=SERVER_HOST=0.0.0.0 -Environment=SERVER_PORT=8000 -Environment=PATH=/home/thrillwiki/.local/bin:/home/thrillwiki/.cargo/bin:/usr/local/bin:/usr/bin:/bin -[AWS-SECRET-REMOVED]thrillwiki - -# Security settings - Enhanced hardening for deployment automation -NoNewPrivileges=true -PrivateTmp=true -ProtectSystem=strict -ProtectHome=true -ProtectKernelTunables=true -ProtectKernelModules=true -ProtectControlGroups=true -RestrictSUIDSGID=true -RestrictRealtime=true -RestrictNamespaces=true -LockPersonality=true -MemoryDenyWriteExecute=false -RemoveIPC=true - -# File system permissions - Allow access to necessary directories -[AWS-SECRET-REMOVED]ki -[AWS-SECRET-REMOVED]ki/logs -[AWS-SECRET-REMOVED]ki/media -[AWS-SECRET-REMOVED]ki/staticfiles -[AWS-SECRET-REMOVED]ki/uploads -ReadWritePaths=/home/thrillwiki/.cache -ReadWritePaths=/tmp -ReadOnlyPaths=/home/thrillwiki/.github-pat -ReadOnlyPaths=/home/thrillwiki/.ssh -ReadOnlyPaths=/home/thrillwiki/.local - -# Resource limits - Appropriate for deployment automation -LimitNOFILE=65536 -LimitNPROC=2048 -MemoryMax=1G -CPUQuota=75% -TasksMax=512 - -# Timeouts and watchdog -WatchdogSec=600 -RuntimeMaxSec=0 - -# Logging configuration -StandardOutput=journal -StandardError=journal -SyslogIdentifier=thrillwiki-deployment -SyslogFacility=daemon -SyslogLevel=info -SyslogLevelPrefix=true - -# Enhanced logging for debugging -LogsDirectory=thrillwiki-deployment -LogsDirectoryMode=0755 -StateDirectory=thrillwiki-deployment -StateDirectoryMode=0755 -RuntimeDirectory=thrillwiki-deployment -RuntimeDirectoryMode=0755 - -# Capabilities - Minimal required capabilities -CapabilityBoundingSet= -AmbientCapabilities= -PrivateDevices=true -ProtectClock=true -ProtectHostname=true - -[Install] -WantedBy=multi-user.target -Also=thrillwiki-smart-deploy.timer \ No newline at end of file diff --git a/scripts/systemd/thrillwiki-smart-deploy.service b/scripts/systemd/thrillwiki-smart-deploy.service deleted file mode 100644 index b7d4721c..00000000 --- a/scripts/systemd/thrillwiki-smart-deploy.service +++ /dev/null @@ -1,76 +0,0 @@ -[Unit] -Description=ThrillWiki Smart Deployment Service -Documentation=man:thrillwiki-smart-deploy(8) -After=network.target thrillwiki-deployment.service -Wants=network.target -PartOf=thrillwiki-smart-deploy.timer - -[Service] -Type=oneshot -User=thrillwiki -Group=thrillwiki -[AWS-SECRET-REMOVED]wiki -[AWS-SECRET-REMOVED]ripts/smart-deploy.sh -TimeoutStartSec=300 -TimeoutStopSec=60 - -# Environment variables - Load from deployment configuration -EnvironmentFile=-[AWS-SECRET-REMOVED]emd/thrillwiki-deployment***REMOVED*** -Environment=PROJECT_DIR=/home/thrillwiki/thrillwiki -Environment=SERVICE_NAME=thrillwiki-smart-deploy -Environment=DEPLOYMENT_MODE=timer -Environment=LOG_DIR=/home/thrillwiki/thrillwiki/logs -Environment=PATH=/home/thrillwiki/.local/bin:/home/thrillwiki/.cargo/bin:/usr/local/bin:/usr/bin:/bin -[AWS-SECRET-REMOVED]thrillwiki - -# Security settings - Inherited from main deployment service -NoNewPrivileges=true -PrivateTmp=true -ProtectSystem=strict -ProtectHome=true -ProtectKernelTunables=true -ProtectKernelModules=true -ProtectControlGroups=true -RestrictSUIDSGID=true -RestrictRealtime=true -RestrictNamespaces=true -LockPersonality=true -MemoryDenyWriteExecute=false -RemoveIPC=true - -# File system permissions -[AWS-SECRET-REMOVED]ki -[AWS-SECRET-REMOVED]ki/logs -[AWS-SECRET-REMOVED]ki/media -[AWS-SECRET-REMOVED]ki/staticfiles -[AWS-SECRET-REMOVED]ki/uploads -ReadWritePaths=/home/thrillwiki/.cache -ReadWritePaths=/tmp -ReadOnlyPaths=/home/thrillwiki/.github-pat -ReadOnlyPaths=/home/thrillwiki/.ssh -ReadOnlyPaths=/home/thrillwiki/.local - -# Resource limits -LimitNOFILE=65536 -LimitNPROC=1024 -MemoryMax=512M -CPUQuota=50% -TasksMax=256 - -# Logging configuration -StandardOutput=journal -StandardError=journal -SyslogIdentifier=thrillwiki-smart-deploy -SyslogFacility=daemon -SyslogLevel=info -SyslogLevelPrefix=true - -# Capabilities -CapabilityBoundingSet= -AmbientCapabilities= -PrivateDevices=true -ProtectClock=true -ProtectHostname=true - -[Install] -WantedBy=multi-user.target \ No newline at end of file diff --git a/scripts/systemd/thrillwiki-smart-deploy.timer b/scripts/systemd/thrillwiki-smart-deploy.timer deleted file mode 100644 index b4f848cf..00000000 --- a/scripts/systemd/thrillwiki-smart-deploy.timer +++ /dev/null @@ -1,17 +0,0 @@ -[Unit] -Description=ThrillWiki Smart Deployment Timer -Documentation=man:thrillwiki-smart-deploy(8) -Requires=thrillwiki-smart-deploy.service -After=thrillwiki-deployment.service - -[Timer] -# Default timer configuration (can be overridden by environment) -OnBootSec=5min -OnUnitActiveSec=5min -Unit=thrillwiki-smart-deploy.service -Persistent=true -RandomizedDelaySec=30sec - -[Install] -WantedBy=timers.target -Also=thrillwiki-smart-deploy.service \ No newline at end of file diff --git a/scripts/systemd/thrillwiki-webhook.service b/scripts/systemd/thrillwiki-webhook.service deleted file mode 100644 index 7864dc68..00000000 --- a/scripts/systemd/thrillwiki-webhook.service +++ /dev/null @@ -1,39 +0,0 @@ -[Unit] -Description=ThrillWiki GitHub Webhook Listener -After=network.target -Wants=network.target - -[Service] -Type=simple -User=ubuntu -Group=ubuntu -[AWS-SECRET-REMOVED] -ExecStart=/usr/bin/python3 /home/ubuntu/thrillwiki/scripts/webhook-listener.py -Restart=always -RestartSec=10 - -# Environment variables -Environment=WEBHOOK_PORT=9000 -Environment=WEBHOOK_SECRET=your_webhook_secret_here -Environment=VM_HOST=localhost -Environment=VM_PORT=22 -Environment=VM_USER=ubuntu -Environment=VM_KEY_PATH=/home/ubuntu/.ssh/***REMOVED*** -Environment=VM_PROJECT_PATH=/home/ubuntu/thrillwiki -Environment=REPO_URL=https://github.com/YOUR_USERNAME/thrillwiki_django_no_react.git -Environment=DEPLOY_BRANCH=main - -# Security settings -NoNewPrivileges=true -PrivateTmp=true -ProtectSystem=strict -ProtectHome=true -[AWS-SECRET-REMOVED]ogs - -# Logging -StandardOutput=journal -StandardError=journal -SyslogIdentifier=thrillwiki-webhook - -[Install] -WantedBy=multi-user.target \ No newline at end of file diff --git a/scripts/systemd/thrillwiki.service b/scripts/systemd/thrillwiki.service deleted file mode 100644 index 61255148..00000000 --- a/scripts/systemd/thrillwiki.service +++ /dev/null @@ -1,45 +0,0 @@ -[Unit] -Description=ThrillWiki Django Application -After=network.target postgresql.service -Wants=network.target -Requires=postgresql.service - -[Service] -Type=forking -User=ubuntu -Group=ubuntu -[AWS-SECRET-REMOVED] -[AWS-SECRET-REMOVED]s/ci-start.sh -ExecStop=/bin/kill -TERM $MAINPID -ExecReload=/bin/kill -HUP $MAINPID -[AWS-SECRET-REMOVED]ngo.pid -Restart=always -RestartSec=10 - -# Environment variables -Environment=DJANGO_SETTINGS_MODULE=thrillwiki.settings -[AWS-SECRET-REMOVED]llwiki -Environment=PATH=/home/ubuntu/.cargo/bin:/usr/local/bin:/usr/bin:/bin - -# Security settings -NoNewPrivileges=true -PrivateTmp=true -ProtectSystem=strict -ProtectHome=true -[AWS-SECRET-REMOVED]ogs -[AWS-SECRET-REMOVED]edia -[AWS-SECRET-REMOVED]taticfiles -[AWS-SECRET-REMOVED]ploads - -# Resource limits -LimitNOFILE=65536 -TimeoutStartSec=300 -TimeoutStopSec=30 - -# Logging -StandardOutput=journal -StandardError=journal -SyslogIdentifier=thrillwiki - -[Install] -WantedBy=multi-user.target \ No newline at end of file diff --git a/scripts/test-automation.sh b/scripts/test-automation.sh deleted file mode 100755 index 29da47e0..00000000 --- a/scripts/test-automation.sh +++ /dev/null @@ -1,175 +0,0 @@ -#!/bin/bash - -# ThrillWiki Automation Test Script -# This script validates all automation components without actually running them - -set -e - -# Colors for output -RED='\033[0;31m' -GREEN='\033[0;32m' -YELLOW='\033[1;33m' -BLUE='\033[0;34m' -NC='\033[0m' - -log() { - echo -e "${BLUE}[TEST]${NC} $1" -} - -log_success() { - echo -e "${GREEN}[✓]${NC} $1" -} - -log_warning() { - echo -e "${YELLOW}[!]${NC} $1" -} - -log_error() { - echo -e "${RED}[✗]${NC} $1" -} - -# Test counters -TESTS_PASSED=0 -TESTS_FAILED=0 -TESTS_TOTAL=0 - -test_case() { - local name="$1" - local command="$2" - - ((TESTS_TOTAL++)) - log "Testing: $name" - - if eval "$command" >/dev/null 2>&1; then - log_success "$name" - ((TESTS_PASSED++)) - else - log_error "$name" - ((TESTS_FAILED++)) - fi -} - -test_case_with_output() { - local name="$1" - local command="$2" - local expected_pattern="$3" - - ((TESTS_TOTAL++)) - log "Testing: $name" - - local output - if output=$(eval "$command" 2>&1); then - if [[ -n "$expected_pattern" && ! "$output" =~ $expected_pattern ]]; then - log_error "$name (unexpected output)" - ((TESTS_FAILED++)) - else - log_success "$name" - ((TESTS_PASSED++)) - fi - else - log_error "$name (command failed)" - ((TESTS_FAILED++)) - fi -} - -log "🧪 Starting ThrillWiki Automation Tests" -echo "======================================" - -# Test 1: File Permissions -log "\n📁 Testing File Permissions..." -test_case "CI start script is executable" "[ -x scripts/ci-start.sh ]" -test_case "VM deploy script is executable" "[ -x scripts/vm-deploy.sh ]" -test_case "Webhook listener is executable" "[ -x scripts/webhook-listener.py ]" -test_case "VM manager is executable" "[ -x scripts/unraid/vm-manager.py ]" -test_case "Complete automation script is executable" "[ -x scripts/unraid/setup-complete-automation.sh ]" - -# Test 2: Script Syntax -log "\n🔍 Testing Script Syntax..." -test_case "CI start script syntax" "bash -n scripts/ci-start.sh" -test_case "VM deploy script syntax" "bash -n scripts/vm-deploy.sh" -test_case "Setup VM CI script syntax" "bash -n scripts/setup-vm-ci.sh" -test_case "Complete automation script syntax" "bash -n scripts/unraid/setup-complete-automation.sh" -test_case "Webhook listener Python syntax" "python3 -m py_compile scripts/webhook-listener.py" -test_case "VM manager Python syntax" "python3 -m py_compile scripts/unraid/vm-manager.py" - -# Test 3: Help Functions -log "\n❓ Testing Help Functions..." -test_case_with_output "VM manager help" "python3 scripts/unraid/vm-manager.py --help" "usage:" -test_case_with_output "Webhook listener help" "python3 scripts/webhook-listener.py --help" "usage:" -test_case_with_output "VM deploy script usage" "scripts/vm-deploy.sh invalid 2>&1" "Usage:" - -# Test 4: Configuration Validation -log "\n⚙️ Testing Configuration Validation..." -test_case_with_output "Webhook listener test mode" "python3 scripts/webhook-listener.py --test" "Configuration validation" - -# Test 5: Directory Structure -log "\n📂 Testing Directory Structure..." -test_case "Scripts directory exists" "[ -d scripts ]" -test_case "Unraid scripts directory exists" "[ -d scripts/unraid ]" -test_case "Systemd directory exists" "[ -d scripts/systemd ]" -test_case "Docs directory exists" "[ -d docs ]" -test_case "Logs directory created" "[ -d logs ]" - -# Test 6: Required Files -log "\n📄 Testing Required Files..." -test_case "ThrillWiki service file exists" "[ -f scripts/systemd/thrillwiki.service ]" -test_case "Webhook service file exists" "[ -f scripts/systemd/thrillwiki-webhook.service ]" -test_case "VM deployment setup doc exists" "[ -f docs/VM_DEPLOYMENT_SETUP.md ]" -test_case "Unraid automation doc exists" "[ -f docs/UNRAID_COMPLETE_AUTOMATION.md ]" -test_case "CI README exists" "[ -f CI_README.md ]" - -# Test 7: Python Dependencies -log "\n🐍 Testing Python Dependencies..." -test_case "Python 3 available" "command -v python3" -test_case "Requests module available" "python3 -c 'import requests'" -test_case "JSON module available" "python3 -c 'import json'" -test_case "OS module available" "python3 -c 'import os'" -test_case "Subprocess module available" "python3 -c 'import subprocess'" - -# Test 8: System Dependencies -log "\n🔧 Testing System Dependencies..." -test_case "SSH command available" "command -v ssh" -test_case "SCP command available" "command -v scp" -test_case "Bash available" "command -v bash" -test_case "Git available" "command -v git" - -# Test 9: UV Package Manager -log "\n📦 Testing UV Package Manager..." -if command -v uv >/dev/null 2>&1; then - log_success "UV package manager is available" - ((TESTS_PASSED++)) - test_case "UV version check" "uv --version" -else - log_warning "UV package manager not found (will be installed during setup)" - ((TESTS_PASSED++)) -fi -((TESTS_TOTAL++)) - -# Test 10: Django Project Structure -log "\n🌟 Testing Django Project Structure..." -test_case "Django manage.py exists" "[ -f manage.py ]" -test_case "Django settings module exists" "[ -f thrillwiki/settings.py ]" -test_case "PyProject.toml exists" "[ -f pyproject.toml ]" - -# Final Results -echo -log "📊 Test Results Summary" -echo "======================" -echo "Total Tests: $TESTS_TOTAL" -echo "Passed: $TESTS_PASSED" -echo "Failed: $TESTS_FAILED" - -if [ $TESTS_FAILED -eq 0 ]; then - echo - log_success "🎉 All tests passed! The automation system is ready." - echo - log "Next steps:" - echo "1. For complete automation: ./scripts/unraid/setup-complete-automation.sh" - echo "2. For manual setup: ./scripts/setup-vm-ci.sh" - echo "3. Read documentation: docs/UNRAID_COMPLETE_AUTOMATION.md" - exit 0 -else - echo - log_error "❌ Some tests failed. Please check the issues above." - exit 1 -fi \ No newline at end of file diff --git a/scripts/unraid/.claude/settings.local.json b/scripts/unraid/.claude/settings.local.json deleted file mode 100644 index d8e549f1..00000000 --- a/scripts/unraid/.claude/settings.local.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "permissions": { - "additionalDirectories": [ - "/Users/talor/thrillwiki_django_no_react" - ], - "allow": [ - "Bash(uv run:*)" - ] - } -} \ No newline at end of file diff --git a/scripts/unraid/README-NON-INTERACTIVE.md b/scripts/unraid/README-NON-INTERACTIVE.md deleted file mode 100644 index e87dab8f..00000000 --- a/scripts/unraid/README-NON-INTERACTIVE.md +++ /dev/null @@ -1,150 +0,0 @@ -# Non-Interactive Mode for ThrillWiki Automation - -The ThrillWiki automation script supports a non-interactive mode (`-y` flag) that allows you to run the entire setup process without any user prompts. This is perfect for: - -- **CI/CD pipelines** -- **Automated deployments** -- **Scripted environments** -- **Remote execution** - -## Prerequisites - -1. **Saved Configuration**: You must have run the script interactively at least once to create the saved configuration file (`.thrillwiki-config`). - -2. **Environment Variables**: Set the required environment variables for sensitive credentials that aren't saved to disk. - -## Required Environment Variables - -### Always Required -- `UNRAID_PASSWORD` - Your Unraid server password - -### Required if GitHub API is enabled -- `GITHUB_TOKEN` - Your GitHub personal access token (if using token auth method) - -### Required if Webhooks are enabled -- `WEBHOOK_SECRET` - Your GitHub webhook secret - -## Usage Examples - -### Basic Non-Interactive Setup -```bash -# Set required credentials -export UNRAID_PASSWORD="your_unraid_password" -export GITHUB_TOKEN="your_github_token" -export WEBHOOK_SECRET="your_webhook_secret" - -# Run in non-interactive mode -./setup-complete-automation.sh -y -``` - -### CI/CD Pipeline Example -```bash -#!/bin/bash -set -e - -# Load credentials from secure environment -export UNRAID_PASSWORD="$UNRAID_CREDS_PASSWORD" -export GITHUB_TOKEN="$GITHUB_API_TOKEN" -export WEBHOOK_SECRET="$WEBHOOK_SECRET_KEY" - -# Deploy with no user interaction -cd scripts/unraid -./setup-complete-automation.sh -y -``` - -### Docker/Container Example -```bash -# Run from container with environment file -docker run --env-file ***REMOVED***.secrets \ - -v $(pwd):/workspace \ - your-automation-container \ - /workspace/scripts/unraid/setup-complete-automation.sh -y -``` - -## Error Handling - -The script will exit with clear error messages if: - -- No saved configuration is found -- Required environment variables are missing -- OAuth tokens have expired (non-interactive mode cannot refresh them) - -### Common Issues - -**❌ No saved configuration** -``` -[ERROR] No saved configuration found. Cannot run in non-interactive mode. -[ERROR] Please run the script without -y flag first to create initial configuration. -``` -**Solution**: Run `./setup-complete-automation.sh` interactively first. - -**❌ Missing password** -``` -[ERROR] UNRAID_PASSWORD environment variable not set. -[ERROR] For non-interactive mode, set: export UNRAID_PASSWORD='your_password' -``` -**Solution**: Set the `UNRAID_PASSWORD` environment variable. - -**❌ Expired OAuth token** -``` -[ERROR] OAuth token expired and cannot refresh in non-interactive mode -[ERROR] Please run without -y flag to re-authenticate with GitHub -``` -**Solution**: Run interactively to refresh OAuth token, or switch to personal access token method. - -## Security Best Practices - -1. **Never commit credentials to version control** -2. **Use secure environment variable storage** (CI/CD secret stores, etc.) -3. **Rotate credentials regularly** -4. **Use minimal required permissions** for tokens -5. **Clear environment variables** after use if needed: - ```bash - unset UNRAID_PASSWORD GITHUB_TOKEN WEBHOOK_SECRET - ``` - -## Advanced Usage - -### Combining with Reset Modes -```bash -# Reset VM only and redeploy non-interactively -export UNRAID_PASSWORD="password" -./setup-complete-automation.sh --reset-vm -y -``` - -### Using with Different Authentication Methods -```bash -# For OAuth method (no GITHUB_TOKEN needed if valid) -export UNRAID_PASSWORD="password" -export WEBHOOK_SECRET="secret" -./setup-complete-automation.sh -y - -# For personal access token method -export UNRAID_PASSWORD="password" -export GITHUB_TOKEN="ghp_xxxx" -export WEBHOOK_SECRET="secret" -./setup-complete-automation.sh -y -``` - -### Environment File Pattern -```bash -# Create ***REMOVED***.automation (don't commit this!) -cat > ***REMOVED***.automation << EOF -UNRAID_PASSWORD=your_password_here -GITHUB_TOKEN=your_token_here -WEBHOOK_SECRET=your_secret_here -EOF - -# Use it -source ***REMOVED***.automation -./setup-complete-automation.sh -y - -# Clean up -rm ***REMOVED***.automation -``` - -## Integration Examples - -See `example-non-interactive.sh` for a complete working example that you can customize for your needs. - -The non-interactive mode makes it easy to integrate ThrillWiki deployment into your existing automation workflows while maintaining security and reliability. diff --git a/scripts/unraid/README-template-deployment.md b/scripts/unraid/README-template-deployment.md deleted file mode 100644 index 9b32e500..00000000 --- a/scripts/unraid/README-template-deployment.md +++ /dev/null @@ -1,385 +0,0 @@ -# ThrillWiki Template-Based VM Deployment - -This guide explains how to use the new **template-based VM deployment** system that dramatically speeds up VM creation by using a pre-configured Ubuntu template instead of autoinstall ISOs. - -## Overview - -### Traditional Approach (Slow) -- Create autoinstall ISO from scratch -- Boot VM from ISO (20-30 minutes) -- Wait for Ubuntu installation -- Configure system packages and dependencies - -### Template Approach (Fast ⚡) -- Copy pre-configured VM disk from template -- Boot VM from template disk (2-5 minutes) -- System is already configured with Ubuntu, packages, and dependencies - -## Prerequisites - -1. **Template VM**: You must have a VM named `thrillwiki-template-ubuntu` on your Unraid server -2. **Template Configuration**: The template should be pre-configured with: - - Ubuntu 24.04 LTS - - Python 3, Git, PostgreSQL, Nginx - - UV package manager (optional but recommended) - - Basic system configuration - -## Template VM Setup - -### Creating the Template VM - -1. **Create the template VM manually** on your Unraid server: - - Name: `thrillwiki-template-ubuntu` - - Install Ubuntu 24.04 LTS - - Configure with 4GB RAM, 2 vCPUs (can be adjusted later) - -2. **Configure the template** by SSH'ing into it and running: - ```bash - # Update system - sudo apt update && sudo apt upgrade -y - - # Install required packages - sudo apt install -y git curl build-essential python3-pip python3-venv - sudo apt install -y postgresql postgresql-contrib nginx - - # Install UV (Python package manager) - curl -LsSf https://astral.sh/uv/install.sh | sh - source ~/.cargo/env - - # Create thrillwiki user with password 'thrillwiki' - sudo useradd -m -s /bin/bash thrillwiki || true - echo 'thrillwiki:thrillwiki' | sudo chpasswd - sudo usermod -aG sudo thrillwiki - - # Setup SSH key for thrillwiki user - # First, generate your SSH key on your Mac: - # ssh-keygen -t rsa -b 4096 -f ~/.ssh/thrillwiki_vm -N "" -C "thrillwiki-template-vm-access" - # Then copy the public key to the template VM: - sudo mkdir -p /home/thrillwiki/.ssh - echo "YOUR_PUBLIC_KEY_FROM_~/.ssh/thrillwiki_vm.pub" | sudo tee /home/thrillwiki/.ssh/***REMOVED*** - sudo chown -R thrillwiki:thrillwiki /home/thrillwiki/.ssh - sudo chmod 700 /home/thrillwiki/.ssh - sudo chmod 600 /home/thrillwiki/.ssh/***REMOVED*** - - # Configure PostgreSQL - sudo systemctl enable postgresql - sudo systemctl start postgresql - - # Configure Nginx - sudo systemctl enable nginx - - # Clean up for template - sudo apt autoremove -y - sudo apt autoclean - history -c && history -w - - # Shutdown template - sudo shutdown now - ``` - -3. **Verify template** is stopped and ready: - ```bash - ./template-utils.sh status # Should show "shut off" - ``` - -## Quick Start - -### Step 0: Set Up SSH Key (First Time Only) - -**IMPORTANT**: Before using template deployment, set up your SSH key: - -```bash -# Generate and configure SSH key -./scripts/unraid/setup-ssh-key.sh - -# Follow the instructions to add the public key to your template VM -``` - -See `TEMPLATE_VM_SETUP.md` for complete template VM setup instructions. - -### Using the Utility Script - -The easiest way to work with template VMs is using the utility script: - -```bash -# Check if template is ready -./template-utils.sh check - -# Get template information -./template-utils.sh info - -# Deploy a new VM from template -./template-utils.sh deploy my-thrillwiki-vm - -# Copy template to new VM (without full deployment) -./template-utils.sh copy my-vm-name - -# List all template-based VMs -./template-utils.sh list -``` - -### Using Python Scripts Directly - -For more control, use the Python scripts: - -```bash -# Set environment variables -export UNRAID_HOST="your.unraid.server.ip" -export UNRAID_USER="root" -export VM_NAME="my-thrillwiki-vm" -export REPO_URL="owner/repository-name" - -# Deploy VM from template -python3 main_template.py deploy - -# Just create VM without ThrillWiki setup -python3 main_template.py setup - -# Get VM status and IP -python3 main_template.py status -python3 main_template.py ip - -# Manage template -python3 main_template.py template info -python3 main_template.py template check -``` - -## File Structure - -### New Template-Based Files - -``` -scripts/unraid/ -├── template_manager.py # Template VM management -├── vm_manager_template.py # Template-based VM manager -├── main_template.py # Template deployment orchestrator -├── template-utils.sh # Quick utility commands -├── deploy-thrillwiki-template.sh # Optimized deployment script -├── thrillwiki-vm-template-simple.xml # VM XML without autoinstall ISO -└── README-template-deployment.md # This documentation -``` - -### Original Files (Still Available) - -``` -scripts/unraid/ -├── main.py # Original autoinstall approach -├── vm_manager.py # Original VM manager -├── deploy-thrillwiki.sh # Original deployment script -└── thrillwiki-vm-template.xml # Original XML with autoinstall -``` - -## Commands Reference - -### Template Management - -```bash -# Check template status -./template-utils.sh status -python3 template_manager.py check - -# Get template information -./template-utils.sh info -python3 template_manager.py info - -# List VMs created from template -./template-utils.sh list -python3 template_manager.py list - -# Update template instructions -./template-utils.sh update -python3 template_manager.py update -``` - -### VM Deployment - -```bash -# Complete deployment (VM + ThrillWiki) -./template-utils.sh deploy VM_NAME -python3 main_template.py deploy - -# VM setup only -python3 main_template.py setup - -# Individual operations -python3 main_template.py create -python3 main_template.py start -python3 main_template.py stop -python3 main_template.py delete -``` - -### VM Information - -```bash -# Get VM status -python3 main_template.py status - -# Get VM IP and connection info -python3 main_template.py ip - -# Get detailed VM information -python3 main_template.py info -``` - -## Environment Variables - -Configure these in your `***REMOVED***.unraid` file or export them: - -```bash -# Required -UNRAID_HOST="192.168.1.100" # Your Unraid server IP -UNRAID_USER="root" # Unraid SSH user -VM_NAME="thrillwiki-vm" # Name for new VM - -# Optional VM Configuration -VM_MEMORY="4096" # Memory in MB -VM_VCPUS="2" # Number of vCPUs -VM_DISK_SIZE="50" # Disk size in GB (for reference) -VM_IP="dhcp" # IP configuration (dhcp or static IP) - -# ThrillWiki Configuration -REPO_URL="owner/repository-name" # GitHub repository -GITHUB_TOKEN="ghp_xxxxx" # GitHub token (optional) -``` - -## Advantages of Template Approach - -### Speed ⚡ -- **VM Creation**: 2-5 minutes vs 20-30 minutes -- **Boot Time**: Instant boot vs full Ubuntu installation -- **Total Deployment**: ~10 minutes vs ~45 minutes - -### Reliability 🔒 -- **Pre-tested**: Template is already configured and tested -- **Consistent**: All VMs start from identical base -- **No Installation Failures**: No autoinstall ISO issues - -### Efficiency 💾 -- **Disk Space**: Copy-on-write QCOW2 format -- **Network**: No ISO downloads during deployment -- **Resources**: Less CPU usage during creation - -## Troubleshooting - -### Template Not Found -``` -❌ Template VM disk not found at: /mnt/user/domains/thrillwiki-template-ubuntu/vdisk1.qcow2 -``` - -**Solution**: Create the template VM first or verify the path. - -### Template VM Running -``` -⚠️ Template VM is currently running! -``` - -**Solution**: Stop the template VM before creating new instances: -```bash -ssh root@unraid-host "virsh shutdown thrillwiki-template-ubuntu" -``` - -### SSH Connection Issues -``` -❌ Cannot connect to Unraid server -``` - -**Solutions**: -1. Verify `UNRAID_HOST` is correct -2. Ensure SSH key authentication is set up -3. Check network connectivity - -### Template Disk Corruption - -If template VM gets corrupted: -1. Start template VM and fix issues -2. Or recreate template VM from scratch -3. Update template: `./template-utils.sh update` - -## Template Maintenance - -### Updating the Template - -Periodically update your template: - -1. **Start template VM** on Unraid -2. **SSH into template** and update: - ```bash - sudo apt update && sudo apt upgrade -y - sudo apt autoremove -y && sudo apt autoclean - - # Update UV if installed - ~/.cargo/bin/uv --version - - # Clear history - history -c && history -w - ``` -3. **Shutdown template VM** -4. **Verify update**: `./template-utils.sh check` - -### Template Best Practices - -- Keep template VM stopped when not maintaining it -- Update template monthly or before major deployments -- Test template by creating a test VM before important deployments -- Document any custom configurations in the template - -## Migration Guide - -### From Autoinstall to Template - -1. **Create your template VM** following the setup guide above -2. **Test template deployment**: - ```bash - ./template-utils.sh deploy test-vm - ``` -3. **Update your automation scripts** to use template approach -4. **Keep autoinstall scripts** as backup for special cases - -### Switching Between Approaches - -You can use both approaches as needed: - -```bash -# Template-based (fast) -python3 main_template.py deploy - -# Autoinstall-based (traditional) -python3 main.py setup -``` - -## Integration with CI/CD - -The template approach integrates perfectly with your existing CI/CD: - -```bash -# In your automation scripts -export UNRAID_HOST="your-server" -export VM_NAME="thrillwiki-$(date +%s)" -export REPO_URL="your-org/thrillwiki" - -# Deploy quickly -./scripts/unraid/template-utils.sh deploy "$VM_NAME" - -# VM is ready in minutes instead of 30+ minutes -``` - -## FAQ - -**Q: Can I use both template and autoinstall approaches?** -A: Yes! Keep both. Use template for speed, autoinstall for special configurations. - -**Q: How much disk space does template copying use?** -A: QCOW2 copy-on-write format means copies only store differences, saving space. - -**Q: What if I need different Ubuntu versions?** -A: Create multiple template VMs (e.g., `thrillwiki-template-ubuntu-22`, `thrillwiki-template-ubuntu-24`). - -**Q: Can I customize the template VM configuration?** -A: Yes! The template VM is just a regular VM. Customize it as needed. - -**Q: Is this approach secure?** -A: Yes. Each VM gets a fresh copy and can be configured independently. - ---- - -This template-based approach should make your VM deployments much faster and more reliable! 🚀 diff --git a/scripts/unraid/README.md b/scripts/unraid/README.md deleted file mode 100644 index b2b8cf17..00000000 --- a/scripts/unraid/README.md +++ /dev/null @@ -1,131 +0,0 @@ -# ThrillWiki Unraid VM Automation - -This directory contains scripts and configuration files for automating the creation and deployment of ThrillWiki VMs on Unraid servers using Ubuntu autoinstall. - -## Files - -- **`vm-manager.py`** - Main VM management script with direct kernel boot support -- **`thrillwiki-vm-template.xml`** - VM XML configuration template for libvirt -- **`cloud-init-template.yaml`** - Ubuntu autoinstall configuration template -- **`validate-autoinstall.py`** - Validation script for autoinstall configuration - -## Key Features - -### Direct Kernel Boot Approach -The system now uses direct kernel boot instead of GRUB-based boot for maximum reliability: - -1. **Kernel Extraction**: Automatically extracts Ubuntu kernel and initrd files from the ISO -2. **Direct Boot**: VM boots directly using extracted kernel with explicit autoinstall parameters -3. **Reliable Autoinstall**: Kernel cmdline explicitly specifies `autoinstall ds=nocloud-net;s=cdrom:/` - -### Schema-Compliant Configuration -The autoinstall configuration has been validated against Ubuntu's official schema: - -- ✅ Proper network configuration structure -- ✅ Correct storage layout specification -- ✅ Valid shutdown configuration -- ✅ Schema-compliant field types and values - -## Usage - -### Environment Variables -Set these environment variables before running: - -```bash -export UNRAID_HOST="your-unraid-server" -export UNRAID_USER="root" -export UNRAID_PASSWORD="your-password" -export SSH_PUBLIC_KEY="your-ssh-public-key" -export REPO_URL="https://github.com/your-username/thrillwiki.git" -export VM_IP="192.168.20.20" # or "dhcp" for DHCP -export VM_GATEWAY="192.168.20.1" -``` - -### Basic Operations - -```bash -# Create and configure VM -./vm-manager.py create - -# Start the VM -./vm-manager.py start - -# Check VM status -./vm-manager.py status - -# Get VM IP address -./vm-manager.py ip - -# Complete setup (create + start + get IP) -./vm-manager.py setup - -# Stop the VM -./vm-manager.py stop - -# Delete VM and all files -./vm-manager.py delete -``` - -### Configuration Validation - -```bash -# Validate autoinstall configuration -./validate-autoinstall.py -``` - -## How It Works - -### VM Creation Process - -1. **Extract Kernel**: Mount Ubuntu ISO and extract `vmlinuz` and `initrd` from `/casper/` -2. **Create Cloud-Init ISO**: Generate configuration ISO with autoinstall settings -3. **Generate VM XML**: Create libvirt VM configuration with direct kernel boot -4. **Define VM**: Register VM as persistent domain in libvirt - -### Boot Process - -1. **Direct Kernel Boot**: VM starts using extracted kernel and initrd directly -2. **Autoinstall Trigger**: Kernel cmdline forces Ubuntu installer into autoinstall mode -3. **Cloud-Init Data**: NoCloud datasource provides configuration from CD-ROM -4. **Automated Setup**: Ubuntu installs and configures ThrillWiki automatically - -### Network Configuration - -The system supports both static IP and DHCP configurations: - -- **Static IP**: Set `VM_IP` to desired IP address (e.g., "192.168.20.20") -- **DHCP**: Set `VM_IP` to "dhcp" for automatic IP assignment - -## Troubleshooting - -### VM Console Access -Connect to VM console to monitor autoinstall progress: -```bash -ssh root@unraid-server -virsh console thrillwiki-vm -``` - -### Check VM Logs -View autoinstall logs inside the VM: -```bash -# After VM is accessible -ssh ubuntu@vm-ip -sudo journalctl -u cloud-init -tail -f /var/log/cloud-init.log -``` - -### Validation Errors -If autoinstall validation fails, check: -1. YAML syntax in `cloud-init-template.yaml` -2. Required fields according to Ubuntu schema -3. Proper data types for configuration values - -## Architecture Benefits - -1. **Reliable Boot**: Direct kernel boot eliminates GRUB-related issues -2. **Schema Compliance**: Configuration validated against official Ubuntu schema -3. **Predictable Behavior**: Explicit kernel parameters ensure consistent autoinstall -4. **Clean Separation**: VM configuration, cloud-init, and kernel files are properly organized -5. **Easy Maintenance**: Modular design allows independent updates of components - -This implementation provides a robust, schema-compliant solution for automated ThrillWiki deployment on Unraid VMs. diff --git a/scripts/unraid/TEMPLATE_VM_SETUP.md b/scripts/unraid/TEMPLATE_VM_SETUP.md deleted file mode 100644 index 941b957c..00000000 --- a/scripts/unraid/TEMPLATE_VM_SETUP.md +++ /dev/null @@ -1,245 +0,0 @@ -# Template VM Setup Instructions - -## Prerequisites for Template-Based Deployment - -Before using the template-based deployment system, you need to: - -1. **Create the template VM** named `thrillwiki-template-ubuntu` on your Unraid server -2. **Configure SSH access** with your public key -3. **Set up the template** with all required software - -## Step 1: Create Template VM on Unraid - -1. Create a new VM on your Unraid server: - - **Name**: `thrillwiki-template-ubuntu` - - **OS**: Ubuntu 24.04 LTS - - **Memory**: 4GB (you can adjust this later for instances) - - **vCPUs**: 2 (you can adjust this later for instances) - - **Disk**: 50GB (sufficient for template) - -2. Install Ubuntu 24.04 LTS using standard installation - -## Step 2: Configure Template VM - -SSH into your template VM and run the following setup: - -### Create thrillwiki User -```bash -# Create the thrillwiki user with password 'thrillwiki' -sudo useradd -m -s /bin/bash thrillwiki -echo 'thrillwiki:thrillwiki' | sudo chpasswd -sudo usermod -aG sudo thrillwiki - -# Switch to thrillwiki user for remaining setup -sudo su - thrillwiki -``` - -### Set Up SSH Access -**IMPORTANT**: Add your SSH public key to the template VM: - -```bash -# Create .ssh directory -mkdir -p ~/.ssh -chmod 700 ~/.ssh - -# Add your public key (replace with your actual public key) -echo "YOUR_PUBLIC_KEY_HERE" >> ~/.ssh/***REMOVED*** -chmod 600 ~/.ssh/***REMOVED*** -``` - -**To get your public key** (run this on your Mac): -```bash -# Generate key if it doesn't exist -if [ ! -f ~/.ssh/thrillwiki_vm ]; then - ssh-keygen -t rsa -b 4096 -f ~/.ssh/thrillwiki_vm -N "" -C "thrillwiki-template-vm-access" -fi - -# Show your public key to copy -cat ~/.ssh/thrillwiki_vm.pub -``` - -Copy this public key and paste it into the template VM's ***REMOVED*** file. - -### Install Required Software -```bash -# Update system -sudo apt update && sudo apt upgrade -y - -# Install essential packages -sudo apt install -y \ - git curl wget build-essential \ - python3 python3-pip python3-venv python3-dev \ - postgresql postgresql-contrib postgresql-client \ - nginx \ - htop tree vim nano \ - software-properties-common - -# Install UV (Python package manager) -curl -LsSf https://astral.sh/uv/install.sh | sh -source ~/.cargo/env - -# Add UV to PATH permanently -echo 'export PATH="$HOME/.cargo/bin:$PATH"' >> ~/.bashrc - -# Configure PostgreSQL -sudo systemctl enable postgresql -sudo systemctl start postgresql - -# Create database user and database -sudo -u postgres createuser thrillwiki -sudo -u postgres createdb thrillwiki -sudo -u postgres psql -c "ALTER USER thrillwiki WITH PASSWORD 'thrillwiki';" -sudo -u postgres psql -c "GRANT ALL PRIVILEGES ON DATABASE thrillwiki TO thrillwiki;" - -# Configure Nginx -sudo systemctl enable nginx - -# Create ThrillWiki directories -mkdir -p ~/thrillwiki ~/logs ~/backups - -# Set up basic environment -echo "export DJANGO_SETTINGS_MODULE=thrillwiki.settings" >> ~/.bashrc -echo "export DATABASE_URL=[DATABASE-URL-REMOVED] >> ~/.bashrc -``` - -### Pre-install Common Python Packages (Optional) -```bash -# Create a base virtual environment with common packages -cd ~ -python3 -m venv base_venv -source base_venv/bin/activate -pip install --upgrade pip - -# Install common Django packages -pip install \ - django \ - psycopg2-binary \ - gunicorn \ - whitenoise \ - python-decouple \ - pillow \ - requests - -deactivate -``` - -### Clean Up Template -```bash -# Clean package cache -sudo apt autoremove -y -sudo apt autoclean - -# Clear bash history -history -c -history -w - -# Clear any temporary files -sudo find /tmp -type f -delete -sudo find /var/tmp -type f -delete - -# Shutdown the template VM -sudo shutdown now -``` - -## Step 3: Verify Template Setup - -After the template VM shuts down, verify it's ready: - -```bash -# From your Mac, check the template -cd /path/to/your/thrillwiki/project -./scripts/unraid/template-utils.sh check -``` - -## Step 4: Test Template Deployment - -Create a test VM from the template: - -```bash -# Deploy a test VM -./scripts/unraid/template-utils.sh deploy test-thrillwiki-vm - -# Check if it worked -ssh thrillwiki@ "echo 'Template VM working!'" -``` - -## Template VM Configuration Summary - -Your template VM should now have: - -- ✅ **Username**: `thrillwiki` (password: `thrillwiki`) -- ✅ **SSH Access**: Your public key in `/home/thrillwiki/.ssh/***REMOVED***` -- ✅ **Python**: Python 3 with UV package manager -- ✅ **Database**: PostgreSQL with `thrillwiki` user and database -- ✅ **Web Server**: Nginx installed and enabled -- ✅ **Directories**: `~/thrillwiki`, `~/logs`, `~/backups` ready - -## SSH Configuration on Your Mac - -The automation scripts will set this up, but you can also configure manually: - -```bash -# Add to ~/.ssh/config -cat >> ~/.ssh/config << EOF - -# ThrillWiki Template VM -Host thrillwiki-vm - HostName %h - User thrillwiki - IdentityFile ~/.ssh/thrillwiki_vm - StrictHostKeyChecking no - UserKnownHostsFile /dev/null -EOF -``` - -## Next Steps - -Once your template is set up: - -1. **Run the automation setup**: - ```bash - ./scripts/unraid/setup-template-automation.sh - ``` - -2. **Deploy VMs quickly**: - ```bash - ./scripts/unraid/template-utils.sh deploy my-vm-name - ``` - -3. **Enjoy 5-10x faster deployments** (2-5 minutes instead of 20-30 minutes!) - -## Troubleshooting - -### SSH Access Issues -```bash -# Test SSH access to template (when it's running for updates) -ssh -i ~/.ssh/thrillwiki_vm thrillwiki@TEMPLATE_VM_IP - -# If access fails, check: -# 1. Template VM is running -# 2. Public key is in ***REMOVED*** -# 3. Permissions are correct (700 for .ssh, 600 for ***REMOVED***) -``` - -### Template VM Updates -```bash -# Start template VM on Unraid -# SSH in and update: -sudo apt update && sudo apt upgrade -y -~/.cargo/bin/uv --version # Check UV is still working - -# Clean up and shutdown -sudo apt autoremove -y && sudo apt autoclean -history -c && history -w -sudo shutdown now -``` - -### Permission Issues -```bash -# If you get permission errors, ensure thrillwiki user owns everything -sudo chown -R thrillwiki:thrillwiki /home/thrillwiki/ -sudo chmod 700 /home/thrillwiki/.ssh -sudo chmod 600 /home/thrillwiki/.ssh/***REMOVED*** -``` - -Your template is now ready for lightning-fast VM deployments! ⚡ diff --git a/scripts/unraid/autoinstall-user-data.yaml b/scripts/unraid/autoinstall-user-data.yaml deleted file mode 100644 index 60ff8671..00000000 --- a/scripts/unraid/autoinstall-user-data.yaml +++ /dev/null @@ -1,206 +0,0 @@ -#cloud-config -autoinstall: - # version is an Autoinstall required field. - version: 1 - - # Install Ubuntu server packages and ThrillWiki dependencies - packages: - - ubuntu-server - - curl - - wget - - git - - python3 - - python3-pip - - python3-venv - - nginx - - postgresql - - postgresql-contrib - - redis-server - - nodejs - - npm - - build-essential - - ufw - - fail2ban - - htop - - tree - - vim - - tmux - - qemu-guest-agent - - # User creation - identity: - realname: 'ThrillWiki Admin' - username: thrillwiki - # Default [PASSWORD-REMOVED] (change after login) - password: '$6$rounds=4096$saltsalt$[AWS-SECRET-REMOVED]AzpI8g8T14F8VnhXo0sUkZV2NV6/.c77tHgVi34DgbPu.' - hostname: thrillwiki-vm - - locale: en_US.UTF-8 - keyboard: - layout: us - - package_update: true - package_upgrade: true - - # Use direct storage layout (no LVM) - storage: - swap: - size: 0 - layout: - name: direct - - # SSH configuration - ssh: - allow-pw: true - install-server: true - authorized-keys: - - {SSH_PUBLIC_KEY} - - # Network configuration - will be replaced with proper config - network: - version: 2 - ethernets: - enp1s0: - dhcp4: true - dhcp-identifier: mac - - # Commands to run after installation - late-commands: - # Update GRUB - - curtin in-target -- update-grub - - # Enable and start services - - curtin in-target -- systemctl enable qemu-guest-agent - - curtin in-target -- systemctl enable postgresql - - curtin in-target -- systemctl enable redis-server - - curtin in-target -- systemctl enable nginx - - # Configure PostgreSQL - - curtin in-target -- sudo -u postgres createuser -s thrillwiki - - curtin in-target -- sudo -u postgres createdb thrillwiki_db - - curtin in-target -- sudo -u postgres psql -c "ALTER USER thrillwiki PASSWORD 'thrillwiki123';" - - # Configure firewall - - curtin in-target -- ufw allow OpenSSH - - curtin in-target -- ufw allow 'Nginx Full' - - curtin in-target -- ufw --force enable - - # Clone ThrillWiki repository if provided - - curtin in-target -- bash -c 'if [ -n "{GITHUB_REPO}" ]; then cd /home/thrillwiki && git clone "{GITHUB_REPO}" thrillwiki-app && chown -R thrillwiki:thrillwiki thrillwiki-app; fi' - - # Create deployment script - - curtin in-target -- tee /home/thrillwiki/deploy-thrillwiki.sh << 'EOF' -#!/bin/bash -set -e - -echo "=== ThrillWiki Deployment Script ===" - -# Check if repo was cloned -if [ ! -d "/home/thrillwiki/thrillwiki-app" ]; then - echo "Repository not found. Please clone your ThrillWiki repository:" - echo "git clone YOUR_REPO_URL thrillwiki-app" - exit 1 -fi - -cd /home/thrillwiki/thrillwiki-app - -# Create virtual environment -python3 -m venv venv -source venv/bin/activate - -# Install Python dependencies -if [ -f "requirements.txt" ]; then - pip install -r requirements.txt -else - echo "Warning: requirements.txt not found" -fi - -# Install Django if not in requirements -pip install django psycopg2-binary redis celery gunicorn - -# Set up environment variables -cat > ***REMOVED*** << 'ENVEOF' -DEBUG=False -SECRET_KEY=your-secret-key-change-this -DATABASE_URL=[DATABASE-URL-REMOVED] -REDIS_URL=redis://localhost:6379/0 -ALLOWED_HOSTS=localhost,127.0.0.1,thrillwiki-vm -ENVEOF - -# Run Django setup commands -if [ -f "manage.py" ]; then - python manage.py collectstatic --noinput - python manage.py migrate - echo "from django.contrib.auth import get_user_model; User = get_user_model(); User.objects.create_superuser('admin', 'admin@thrillwiki.com', 'thrillwiki123') if not User.objects.filter(username='admin').exists() else None" | python manage.py shell -fi - -# Configure Nginx -sudo tee /etc/nginx/sites-available/thrillwiki << 'NGINXEOF' -server { - listen 80; - server_name _; - - location /static/ { - alias /home/thrillwiki/thrillwiki-app/staticfiles/; - } - - location /media/ { - alias /home/thrillwiki/thrillwiki-app/media/; - } - - location / { - proxy_pass http://127.0.0.1:8000; - proxy_set_header Host $host; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_set_header X-Forwarded-Proto $scheme; - } -} -NGINXEOF - -# Enable Nginx site -sudo ln -sf /etc/nginx/sites-available/thrillwiki /etc/nginx/sites-enabled/ -sudo rm -f /etc/nginx/sites-enabled/default -sudo systemctl reload nginx - -# Create systemd service for Django -sudo tee /etc/systemd/system/thrillwiki.service << 'SERVICEEOF' -[Unit] -Description=ThrillWiki Django App -After=network.target - -[Service] -User=thrillwiki -Group=thrillwiki -[AWS-SECRET-REMOVED]wiki-app -[AWS-SECRET-REMOVED]wiki-app/venv/bin -ExecStart=/home/thrillwiki/thrillwiki-app/venv/bin/gunicorn --workers 3 --bind 127.0.0.1:8000 thrillwiki.wsgi:application -Restart=always - -[Install] -WantedBy=multi-user.target -SERVICEEOF - -# Enable and start ThrillWiki service -sudo systemctl daemon-reload -sudo systemctl enable thrillwiki -sudo systemctl start thrillwiki - -echo "=== ThrillWiki deployment complete! ===" -echo "Access your application at: http://$(hostname -I | awk '{print $1}')" -echo "Django Admin: http://$(hostname -I | awk '{print $1}')/admin" -echo "Default superuser: admin / thrillwiki123" -echo "" -echo "Important: Change default passwords!" -EOF - - # Make deployment script executable - - curtin in-target -- chmod +x /home/thrillwiki/deploy-thrillwiki.sh - - curtin in-target -- chown thrillwiki:thrillwiki /home/thrillwiki/deploy-thrillwiki.sh - - # Clean up - - curtin in-target -- apt-get autoremove -y - - curtin in-target -- apt-get autoclean - - # Reboot after installation - shutdown: reboot diff --git a/scripts/unraid/cloud-init-template.yaml b/scripts/unraid/cloud-init-template.yaml deleted file mode 100644 index 2ac6a66c..00000000 --- a/scripts/unraid/cloud-init-template.yaml +++ /dev/null @@ -1,62 +0,0 @@ -#cloud-config -# Ubuntu autoinstall configuration -autoinstall: - version: 1 - locale: en_US.UTF-8 - keyboard: - layout: us - network: - version: 2 - ethernets: - ens3: - dhcp4: true - enp1s0: - dhcp4: true - eth0: - dhcp4: true - ssh: - install-server: true - authorized-keys: - - {SSH_PUBLIC_KEY} - allow-pw: false - storage: - layout: - name: lvm - identity: - hostname: thrillwiki-vm - username: ubuntu - password: "$6$rounds=4096$salt$hash" # disabled - ssh key only - packages: - - openssh-server - - curl - - git - - python3 - - python3-pip - - python3-venv - - build-essential - - postgresql - - postgresql-contrib - - nginx - - nodejs - - npm - - wget - - ca-certificates - - openssl - - dnsutils - - net-tools - early-commands: - - systemctl stop ssh - late-commands: - # Enable sudo for ubuntu user - - echo 'ubuntu ALL=(ALL) NOPASSWD:ALL' > /target/etc/sudoers.d/ubuntu - # Install uv Python package manager - - chroot /target su - ubuntu -c 'curl -LsSf https://astral.sh/uv/install.sh | sh || pip3 install uv' - # Add uv to PATH - - chroot /target su - ubuntu -c 'echo "export PATH=\$HOME/.cargo/bin:\$PATH" >> /home/ubuntu/.bashrc' - # Clone ThrillWiki repository - - chroot /target su - ubuntu -c 'cd /home/ubuntu && git clone {GITHUB_REPO} thrillwiki' - # Setup systemd service for ThrillWiki - - systemctl enable postgresql - - systemctl enable nginx - - shutdown: reboot diff --git a/scripts/unraid/deploy-thrillwiki-template.sh b/scripts/unraid/deploy-thrillwiki-template.sh deleted file mode 100644 index a16c4c55..00000000 --- a/scripts/unraid/deploy-thrillwiki-template.sh +++ /dev/null @@ -1,451 +0,0 @@ -#!/bin/bash -# -# ThrillWiki Template-Based Deployment Script -# Optimized for VMs deployed from templates that already have basic setup -# - -# Function to log messages with timestamp -log() { - echo "[$(date '+%Y-%m-%d %H:%M:%S')] $1" | tee -a /home/ubuntu/thrillwiki-deploy.log -} - -# Function to check if a command exists -command_exists() { - command -v "$1" >/dev/null 2>&1 -} - -# Function to wait for network connectivity -wait_for_network() { - log "Waiting for network connectivity..." - local max_attempts=20 # Reduced from 30 since template VMs boot faster - local attempt=1 - while [ $attempt -le $max_attempts ]; do - if curl -s --connect-timeout 5 https://github.com >/dev/null 2>&1; then - log "Network connectivity confirmed" - return 0 - fi - log "Network attempt $attempt/$max_attempts failed, retrying in 5 seconds..." - sleep 5 # Reduced from 10 since template VMs should have faster networking - attempt=$((attempt + 1)) - done - log "WARNING: Network connectivity check failed after $max_attempts attempts" - return 1 -} - -# Function to update system packages (lighter since template should be recent) -update_system() { - log "Updating system packages..." - - # Quick update - template should already have most packages - sudo apt update || log "WARNING: apt update failed" - - # Only upgrade security packages to save time - sudo apt list --upgradable 2>/dev/null | grep -q security && { - log "Installing security updates..." - sudo apt upgrade -y --with-new-pkgs -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" || log "WARNING: Security updates failed" - } || log "No security updates needed" -} - -# Function to setup Python environment with template optimizations -setup_python_env() { - log "Setting up Python environment..." - - # Check if uv is already available (should be in template) - export PATH="/home/ubuntu/.cargo/bin:$PATH" - - if command_exists uv; then - log "Using existing uv installation from template" - uv --version - else - log "Installing uv (not found in template)..." - if wait_for_network; then - curl -LsSf --connect-timeout 30 --retry 2 --retry-delay 5 https://astral.sh/uv/install.sh | sh - export PATH="/home/ubuntu/.cargo/bin:$PATH" - else - log "WARNING: Network not available, falling back to pip" - fi - fi - - # Setup virtual environment - if command_exists uv; then - log "Creating virtual environment with uv..." - if uv venv .venv && source .venv/bin/activate; then - if uv sync; then - log "Successfully set up environment with uv" - return 0 - else - log "uv sync failed, falling back to pip" - fi - else - log "uv venv failed, falling back to pip" - fi - fi - - # Fallback to pip with venv - log "Setting up environment with pip and venv" - if python3 -m venv .venv && source .venv/bin/activate; then - pip install --upgrade pip || log "WARNING: Failed to upgrade pip" - - # Try different dependency installation methods - if [ -f pyproject.toml ]; then - log "Installing dependencies from pyproject.toml" - if pip install -e . || pip install .; then - log "Successfully installed dependencies from pyproject.toml" - return 0 - else - log "Failed to install from pyproject.toml" - fi - fi - - if [ -f requirements.txt ]; then - log "Installing dependencies from requirements.txt" - if pip install -r requirements.txt; then - log "Successfully installed dependencies from requirements.txt" - return 0 - else - log "Failed to install from requirements.txt" - fi - fi - - # Last resort: install common Django packages - log "Installing basic Django packages as fallback" - pip install django psycopg2-binary gunicorn || log "WARNING: Failed to install basic packages" - else - log "ERROR: Failed to create virtual environment" - return 1 - fi -} - -# Function to setup database (should already exist in template) -setup_database() { - log "Setting up PostgreSQL database..." - - # Check if PostgreSQL is already running (should be in template) - if sudo systemctl is-active --quiet postgresql; then - log "PostgreSQL is already running" - else - log "Starting PostgreSQL service..." - sudo systemctl start postgresql || { - log "Failed to start PostgreSQL, trying alternative methods" - sudo service postgresql start || { - log "ERROR: Could not start PostgreSQL" - return 1 - } - } - fi - - # Check if database and user already exist (may be in template) - if sudo -u postgres psql -lqt | cut -d \| -f 1 | grep -qw thrillwiki_production; then - log "Database 'thrillwiki_production' already exists" - else - log "Creating database 'thrillwiki_production'..." - sudo -u postgres createdb thrillwiki_production || { - log "ERROR: Failed to create database" - return 1 - } - fi - - # Create/update database user - if sudo -u postgres psql -c "SELECT 1 FROM pg_user WHERE usename = 'ubuntu'" | grep -q 1; then - log "Database user 'ubuntu' already exists" - else - sudo -u postgres createuser ubuntu || log "WARNING: Failed to create user (may already exist)" - fi - - # Grant permissions - sudo -u postgres psql -c "ALTER USER ubuntu WITH SUPERUSER;" || { - log "WARNING: Failed to grant superuser privileges, trying alternative permissions" - sudo -u postgres psql -c "GRANT ALL PRIVILEGES ON DATABASE thrillwiki_production TO ubuntu;" || log "WARNING: Failed to grant database privileges" - } - - log "Database setup completed" -} - -# Function to run Django commands with fallbacks -run_django_commands() { - log "Running Django management commands..." - - # Ensure we're in the virtual environment - if [ ! -d ".venv" ] || ! source .venv/bin/activate; then - log "WARNING: Virtual environment not found or failed to activate" - # Try to run without venv activation - fi - - # Function to run a Django command with fallbacks - run_django_cmd() { - local cmd="$1" - local description="$2" - - log "Running: $description" - - # Try uv run first - if command_exists uv && uv run manage.py $cmd; then - log "Successfully ran '$cmd' with uv" - return 0 - fi - - # Try python in venv - if python manage.py $cmd; then - log "Successfully ran '$cmd' with python" - return 0 - fi - - # Try python3 - if python3 manage.py $cmd; then - log "Successfully ran '$cmd' with python3" - return 0 - fi - - log "WARNING: Failed to run '$cmd'" - return 1 - } - - # Run migrations - run_django_cmd "migrate" "Database migrations" || log "WARNING: Database migration failed" - - # Collect static files - run_django_cmd "collectstatic --noinput" "Static files collection" || log "WARNING: Static files collection failed" - - # Build Tailwind CSS (if available) - if run_django_cmd "tailwind build" "Tailwind CSS build"; then - log "Tailwind CSS built successfully" - else - log "Tailwind CSS build not available or failed - this is optional" - fi -} - -# Function to setup systemd services (may already exist in template) -setup_services() { - log "Setting up systemd services..." - - # Check if systemd service files exist - if [ -f scripts/systemd/thrillwiki.service ]; then - log "Copying ThrillWiki systemd service..." - sudo cp scripts/systemd/thrillwiki.service /etc/systemd/system/ || { - log "Failed to copy thrillwiki.service, creating basic service" - create_basic_service - } - else - log "Systemd service file not found, creating basic service" - create_basic_service - fi - - # Copy webhook service if available - if [ -f scripts/systemd/thrillwiki-webhook.service ]; then - sudo cp scripts/systemd/thrillwiki-webhook.service /etc/systemd/system/ || { - log "Failed to copy webhook service, skipping" - } - else - log "Webhook service file not found, skipping" - fi - - # Update service files with correct paths - if [ -f /etc/systemd/system/thrillwiki.service ]; then - sudo sed -i "s|/opt/thrillwiki|/home/ubuntu/thrillwiki|g" /etc/systemd/system/thrillwiki.service - sudo sed -i "s|User=thrillwiki|User=ubuntu|g" /etc/systemd/system/thrillwiki.service - fi - - if [ -f /etc/systemd/system/thrillwiki-webhook.service ]; then - sudo sed -i "s|/opt/thrillwiki|/home/ubuntu/thrillwiki|g" /etc/systemd/system/thrillwiki-webhook.service - sudo sed -i "s|User=thrillwiki|User=ubuntu|g" /etc/systemd/system/thrillwiki-webhook.service - fi - - # Reload systemd and start services - sudo systemctl daemon-reload - - # Enable and start main service - if sudo systemctl enable thrillwiki 2>/dev/null; then - log "ThrillWiki service enabled" - if sudo systemctl start thrillwiki; then - log "ThrillWiki service started successfully" - else - log "WARNING: Failed to start ThrillWiki service" - sudo systemctl status thrillwiki --no-pager || true - fi - else - log "WARNING: Failed to enable ThrillWiki service" - fi - - # Try to start webhook service if it exists - if [ -f /etc/systemd/system/thrillwiki-webhook.service ]; then - sudo systemctl enable thrillwiki-webhook 2>/dev/null && sudo systemctl start thrillwiki-webhook || { - log "WARNING: Failed to start webhook service" - } - fi -} - -# Function to create a basic systemd service if none exists -create_basic_service() { - log "Creating basic systemd service..." - - sudo tee /etc/systemd/system/thrillwiki.service > /dev/null << 'SERVICE_EOF' -[Unit] -Description=ThrillWiki Django Application -After=network.target postgresql.service -Wants=postgresql.service - -[Service] -Type=exec -User=ubuntu -Group=ubuntu -[AWS-SECRET-REMOVED] -[AWS-SECRET-REMOVED]/.venv/bin:/home/ubuntu/.cargo/bin:/usr/local/bin:/usr/bin:/bin -ExecStart=/home/ubuntu/thrillwiki/.venv/bin/python manage.py runserver 0.0.0.0:8000 -Restart=always -RestartSec=3 - -[Install] -WantedBy=multi-user.target -SERVICE_EOF - - log "Basic systemd service created" -} - -# Function to setup web server (may already be configured in template) -setup_webserver() { - log "Setting up web server..." - - # Check if nginx is installed and running - if command_exists nginx; then - if ! sudo systemctl is-active --quiet nginx; then - log "Starting nginx..." - sudo systemctl start nginx || log "WARNING: Failed to start nginx" - fi - - # Create basic nginx config if none exists - if [ ! -f /etc/nginx/sites-available/thrillwiki ]; then - log "Creating nginx configuration..." - sudo tee /etc/nginx/sites-available/thrillwiki > /dev/null << 'NGINX_EOF' -server { - listen 80; - server_name _; - - location /static/ { - alias /home/ubuntu/thrillwiki/staticfiles/; - } - - location /media/ { - alias /home/ubuntu/thrillwiki/media/; - } - - location / { - proxy_pass http://127.0.0.1:8000; - proxy_set_header Host $host; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_set_header X-Forwarded-Proto $scheme; - } -} -NGINX_EOF - - # Enable the site - sudo ln -sf /etc/nginx/sites-available/thrillwiki /etc/nginx/sites-enabled/ || log "WARNING: Failed to enable nginx site" - sudo nginx -t && sudo systemctl reload nginx || log "WARNING: nginx configuration test failed" - else - log "nginx configuration already exists" - fi - else - log "nginx not installed, ThrillWiki will run on port 8000 directly" - fi -} - -# Main deployment function -main() { - log "Starting ThrillWiki template-based deployment..." - - # Shorter wait time since template VMs boot faster - log "Waiting for system to be ready..." - sleep 10 - - # Wait for network - wait_for_network || log "WARNING: Network check failed, continuing anyway" - - # Clone or update repository - log "Setting up ThrillWiki repository..." - export GITHUB_TOKEN=$(cat /home/ubuntu/.github-token 2>/dev/null || echo "") - - # Get the GitHub repository from environment or parameter - GITHUB_REPO="${1:-}" - if [ -z "$GITHUB_REPO" ]; then - log "ERROR: GitHub repository not specified" - return 1 - fi - - if [ -d "/home/ubuntu/thrillwiki" ]; then - log "ThrillWiki directory already exists, updating..." - cd /home/ubuntu/thrillwiki - git pull || log "WARNING: Failed to update repository" - else - if [ -n "$GITHUB_TOKEN" ]; then - log "Cloning with GitHub token..." - git clone https://$GITHUB_TOKEN@github.com/$GITHUB_REPO /home/ubuntu/thrillwiki || { - log "Failed to clone with token, trying without..." - git clone https://github.com/$GITHUB_REPO /home/ubuntu/thrillwiki || { - log "ERROR: Failed to clone repository" - return 1 - } - } - else - log "Cloning without GitHub token..." - git clone https://github.com/$GITHUB_REPO /home/ubuntu/thrillwiki || { - log "ERROR: Failed to clone repository" - return 1 - } - fi - cd /home/ubuntu/thrillwiki - fi - - # Update system (lighter for template VMs) - update_system - - # Setup Python environment - setup_python_env || { - log "ERROR: Failed to set up Python environment" - return 1 - } - - # Setup environment file - log "Setting up environment configuration..." - if [ -f ***REMOVED***.example ]; then - cp ***REMOVED***.example ***REMOVED*** || log "WARNING: Failed to copy ***REMOVED***.example" - fi - - # Update ***REMOVED*** with production settings - { - echo "DEBUG=False" - echo "DATABASE_URL=postgresql://ubuntu@localhost/thrillwiki_production" - echo "ALLOWED_HOSTS=*" - echo "STATIC_[AWS-SECRET-REMOVED]" - } >> ***REMOVED*** - - # Setup database - setup_database || { - log "ERROR: Database setup failed" - return 1 - } - - # Run Django commands - run_django_commands - - # Setup systemd services - setup_services - - # Setup web server - setup_webserver - - log "ThrillWiki template-based deployment completed!" - log "Application should be available at http://$(hostname -I | awk '{print $1}'):8000" - log "Logs are available at /home/ubuntu/thrillwiki-deploy.log" -} - -# Run main function and capture any errors -main "$@" 2>&1 | tee -a /home/ubuntu/thrillwiki-deploy.log -exit_code=${PIPESTATUS[0]} - -if [ $exit_code -eq 0 ]; then - log "Template-based deployment completed successfully!" -else - log "Template-based deployment completed with errors (exit code: $exit_code)" -fi - -exit $exit_code diff --git a/scripts/unraid/deploy-thrillwiki.sh b/scripts/unraid/deploy-thrillwiki.sh deleted file mode 100755 index 45a6d65c..00000000 --- a/scripts/unraid/deploy-thrillwiki.sh +++ /dev/null @@ -1,467 +0,0 @@ -#!/bin/bash - -# Function to log messages with timestamp -log() { - echo "[$(date '+%Y-%m-%d %H:%M:%S')] $1" | tee -a /home/ubuntu/thrillwiki-deploy.log -} - -# Function to check if a command exists -command_exists() { - command -v "$1" >/dev/null 2>&1 -} - -# Function to wait for network connectivity -wait_for_network() { - log "Waiting for network connectivity..." - local max_attempts=30 - local attempt=1 - while [ $attempt -le $max_attempts ]; do - if curl -s --connect-timeout 5 https://github.com >/dev/null 2>&1; then - log "Network connectivity confirmed" - return 0 - fi - log "Network attempt $attempt/$max_attempts failed, retrying in 10 seconds..." - sleep 10 - attempt=$((attempt + 1)) - done - log "WARNING: Network connectivity check failed after $max_attempts attempts" - return 1 -} - -# Function to install uv if not available -install_uv() { - log "Checking for uv installation..." - export PATH="/home/ubuntu/.cargo/bin:$PATH" - - if command_exists uv; then - log "uv is already available" - return 0 - fi - - log "Installing uv..." - - # Wait for network connectivity first - wait_for_network || { - log "Network not available, skipping uv installation" - return 1 - } - - # Try to install uv with multiple attempts - local max_attempts=3 - local attempt=1 - while [ $attempt -le $max_attempts ]; do - log "uv installation attempt $attempt/$max_attempts" - - if curl -LsSf --connect-timeout 30 --retry 2 --retry-delay 5 https://astral.sh/uv/install.sh | sh; then - # Reload PATH - export PATH="/home/ubuntu/.cargo/bin:$PATH" - if command_exists uv; then - log "uv installed successfully" - return 0 - else - log "uv installation completed but command not found, checking PATH..." - # Try to source the shell profile to get updated PATH - if [ -f /home/ubuntu/.bashrc ]; then - source /home/ubuntu/.bashrc 2>/dev/null || true - fi - if [ -f /home/ubuntu/.cargo/env ]; then - source /home/ubuntu/.cargo/env 2>/dev/null || true - fi - export PATH="/home/ubuntu/.cargo/bin:$PATH" - if command_exists uv; then - log "uv is now available after PATH update" - return 0 - fi - fi - fi - - log "uv installation attempt $attempt failed" - attempt=$((attempt + 1)) - [ $attempt -le $max_attempts ] && sleep 10 - done - - log "Failed to install uv after $max_attempts attempts, will use pip fallback" - return 1 -} - -# Function to setup Python environment with fallbacks -setup_python_env() { - log "Setting up Python environment..." - - # Try to install uv first if not available - install_uv - - export PATH="/home/ubuntu/.cargo/bin:$PATH" - - # Try uv first - if command_exists uv; then - log "Using uv for Python environment management" - if uv venv .venv && source .venv/bin/activate; then - if uv sync; then - log "Successfully set up environment with uv" - return 0 - else - log "uv sync failed, falling back to pip" - fi - else - log "uv venv failed, falling back to pip" - fi - else - log "uv not available, using pip" - fi - - # Fallback to pip with venv - log "Setting up environment with pip and venv" - if python3 -m venv .venv && source .venv/bin/activate; then - pip install --upgrade pip || log "WARNING: Failed to upgrade pip" - - # Try different dependency installation methods - if [ -f pyproject.toml ]; then - log "Installing dependencies from pyproject.toml" - if pip install -e . || pip install .; then - log "Successfully installed dependencies from pyproject.toml" - return 0 - else - log "Failed to install from pyproject.toml" - fi - fi - - if [ -f requirements.txt ]; then - log "Installing dependencies from requirements.txt" - if pip install -r requirements.txt; then - log "Successfully installed dependencies from requirements.txt" - return 0 - else - log "Failed to install from requirements.txt" - fi - fi - - # Last resort: install common Django packages - log "Installing basic Django packages as fallback" - pip install django psycopg2-binary gunicorn || log "WARNING: Failed to install basic packages" - else - log "ERROR: Failed to create virtual environment" - return 1 - fi -} - -# Function to setup database with fallbacks -setup_database() { - log "Setting up PostgreSQL database..." - - # Ensure PostgreSQL is running - if ! sudo systemctl is-active --quiet postgresql; then - log "Starting PostgreSQL service..." - sudo systemctl start postgresql || { - log "Failed to start PostgreSQL, trying alternative methods" - sudo service postgresql start || { - log "ERROR: Could not start PostgreSQL" - return 1 - } - } - fi - - # Create database user and database with error handling - if sudo -u postgres createuser ubuntu 2>/dev/null || sudo -u postgres psql -c "SELECT 1 FROM pg_user WHERE usename = 'ubuntu'" | grep -q 1; then - log "Database user 'ubuntu' created or already exists" - else - log "ERROR: Failed to create database user" - return 1 - fi - - if sudo -u postgres createdb thrillwiki_production 2>/dev/null || sudo -u postgres psql -lqt | cut -d \| -f 1 | grep -qw thrillwiki_production; then - log "Database 'thrillwiki_production' created or already exists" - else - log "ERROR: Failed to create database" - return 1 - fi - - # Grant permissions - sudo -u postgres psql -c "ALTER USER ubuntu WITH SUPERUSER;" || { - log "WARNING: Failed to grant superuser privileges, trying alternative permissions" - sudo -u postgres psql -c "GRANT ALL PRIVILEGES ON DATABASE thrillwiki_production TO ubuntu;" || log "WARNING: Failed to grant database privileges" - } - - log "Database setup completed" -} - -# Function to run Django commands with fallbacks -run_django_commands() { - log "Running Django management commands..." - - # Ensure we're in the virtual environment - if [ ! -d ".venv" ] || ! source .venv/bin/activate; then - log "WARNING: Virtual environment not found or failed to activate" - # Try to run without venv activation - fi - - # Function to run a Django command with fallbacks - run_django_cmd() { - local cmd="$1" - local description="$2" - - log "Running: $description" - - # Try uv run first - if command_exists uv && uv run manage.py $cmd; then - log "Successfully ran '$cmd' with uv" - return 0 - fi - - # Try python in venv - if python manage.py $cmd; then - log "Successfully ran '$cmd' with python" - return 0 - fi - - # Try python3 - if python3 manage.py $cmd; then - log "Successfully ran '$cmd' with python3" - return 0 - fi - - log "WARNING: Failed to run '$cmd'" - return 1 - } - - # Run migrations - run_django_cmd "migrate" "Database migrations" || log "WARNING: Database migration failed" - - # Collect static files - run_django_cmd "collectstatic --noinput" "Static files collection" || log "WARNING: Static files collection failed" - - # Build Tailwind CSS (if available) - if run_django_cmd "tailwind build" "Tailwind CSS build"; then - log "Tailwind CSS built successfully" - else - log "Tailwind CSS build not available or failed - this is optional" - fi -} - -# Function to setup systemd services with fallbacks -setup_services() { - log "Setting up systemd services..." - - # Check if systemd service files exist - if [ -f scripts/systemd/thrillwiki.service ]; then - sudo cp scripts/systemd/thrillwiki.service /etc/systemd/system/ || { - log "Failed to copy thrillwiki.service, creating basic service" - create_basic_service - } - else - log "Systemd service file not found, creating basic service" - create_basic_service - fi - - if [ -f scripts/systemd/thrillwiki-webhook.service ]; then - sudo cp scripts/systemd/thrillwiki-webhook.service /etc/systemd/system/ || { - log "Failed to copy webhook service, skipping" - } - else - log "Webhook service file not found, skipping" - fi - - # Update service files with correct paths - if [ -f /etc/systemd/system/thrillwiki.service ]; then - sudo sed -i "s|/opt/thrillwiki|/home/ubuntu/thrillwiki|g" /etc/systemd/system/thrillwiki.service - sudo sed -i "s|User=thrillwiki|User=ubuntu|g" /etc/systemd/system/thrillwiki.service - fi - - if [ -f /etc/systemd/system/thrillwiki-webhook.service ]; then - sudo sed -i "s|/opt/thrillwiki|/home/ubuntu/thrillwiki|g" /etc/systemd/system/thrillwiki-webhook.service - sudo sed -i "s|User=thrillwiki|User=ubuntu|g" /etc/systemd/system/thrillwiki-webhook.service - fi - - # Reload systemd and start services - sudo systemctl daemon-reload - - if sudo systemctl enable thrillwiki 2>/dev/null; then - log "ThrillWiki service enabled" - if sudo systemctl start thrillwiki; then - log "ThrillWiki service started successfully" - else - log "WARNING: Failed to start ThrillWiki service" - sudo systemctl status thrillwiki --no-pager || true - fi - else - log "WARNING: Failed to enable ThrillWiki service" - fi - - # Try to start webhook service if it exists - if [ -f /etc/systemd/system/thrillwiki-webhook.service ]; then - sudo systemctl enable thrillwiki-webhook 2>/dev/null && sudo systemctl start thrillwiki-webhook || { - log "WARNING: Failed to start webhook service" - } - fi -} - -# Function to create a basic systemd service if none exists -create_basic_service() { - log "Creating basic systemd service..." - - sudo tee /etc/systemd/system/thrillwiki.service > /dev/null << 'SERVICE_EOF' -[Unit] -Description=ThrillWiki Django Application -After=network.target postgresql.service -Wants=postgresql.service - -[Service] -Type=exec -User=ubuntu -Group=ubuntu -[AWS-SECRET-REMOVED] -[AWS-SECRET-REMOVED]/.venv/bin:/home/ubuntu/.cargo/bin:/usr/local/bin:/usr/bin:/bin -ExecStart=/home/ubuntu/thrillwiki/.venv/bin/python manage.py runserver 0.0.0.0:8000 -Restart=always -RestartSec=3 - -[Install] -WantedBy=multi-user.target -SERVICE_EOF - - log "Basic systemd service created" -} - -# Function to setup web server (nginx) with fallbacks -setup_webserver() { - log "Setting up web server..." - - # Check if nginx is installed and running - if command_exists nginx; then - if ! sudo systemctl is-active --quiet nginx; then - log "Starting nginx..." - sudo systemctl start nginx || log "WARNING: Failed to start nginx" - fi - - # Create basic nginx config if none exists - if [ ! -f /etc/nginx/sites-available/thrillwiki ]; then - log "Creating nginx configuration..." - sudo tee /etc/nginx/sites-available/thrillwiki > /dev/null << 'NGINX_EOF' -server { - listen 80; - server_name _; - - location /static/ { - alias /home/ubuntu/thrillwiki/staticfiles/; - } - - location /media/ { - alias /home/ubuntu/thrillwiki/media/; - } - - location / { - proxy_pass http://127.0.0.1:8000; - proxy_set_header Host $host; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - proxy_set_header X-Forwarded-Proto $scheme; - } -} -NGINX_EOF - - # Enable the site - sudo ln -sf /etc/nginx/sites-available/thrillwiki /etc/nginx/sites-enabled/ || log "WARNING: Failed to enable nginx site" - sudo nginx -t && sudo systemctl reload nginx || log "WARNING: nginx configuration test failed" - fi - else - log "nginx not installed, ThrillWiki will run on port 8000 directly" - fi -} - -# Main deployment function -main() { - log "Starting ThrillWiki deployment..." - - # Wait for system to be ready - log "Waiting for system to be ready..." - sleep 30 - - # Wait for network - wait_for_network || log "WARNING: Network check failed, continuing anyway" - - # Clone repository - log "Cloning ThrillWiki repository..." - export GITHUB_TOKEN=$(cat /home/ubuntu/.github-token 2>/dev/null || echo "") - - # Get the GitHub repository from environment or parameter - GITHUB_REPO="${1:-}" - if [ -z "$GITHUB_REPO" ]; then - log "ERROR: GitHub repository not specified" - return 1 - fi - - if [ -d "/home/ubuntu/thrillwiki" ]; then - log "ThrillWiki directory already exists, updating..." - cd /home/ubuntu/thrillwiki - git pull || log "WARNING: Failed to update repository" - else - if [ -n "$GITHUB_TOKEN" ]; then - log "Cloning with GitHub token..." - git clone https://$GITHUB_TOKEN@github.com/$GITHUB_REPO /home/ubuntu/thrillwiki || { - log "Failed to clone with token, trying without..." - git clone https://github.com/$GITHUB_REPO /home/ubuntu/thrillwiki || { - log "ERROR: Failed to clone repository" - return 1 - } - } - else - log "Cloning without GitHub token..." - git clone https://github.com/$GITHUB_REPO /home/ubuntu/thrillwiki || { - log "ERROR: Failed to clone repository" - return 1 - } - fi - cd /home/ubuntu/thrillwiki - fi - - # Setup Python environment - setup_python_env || { - log "ERROR: Failed to set up Python environment" - return 1 - } - - # Setup environment file - log "Setting up environment configuration..." - if [ -f ***REMOVED***.example ]; then - cp ***REMOVED***.example ***REMOVED*** || log "WARNING: Failed to copy ***REMOVED***.example" - fi - - # Update ***REMOVED*** with production settings - { - echo "DEBUG=False" - echo "DATABASE_URL=postgresql://ubuntu@localhost/thrillwiki_production" - echo "ALLOWED_HOSTS=*" - echo "STATIC_[AWS-SECRET-REMOVED]" - } >> ***REMOVED*** - - # Setup database - setup_database || { - log "ERROR: Database setup failed" - return 1 - } - - # Run Django commands - run_django_commands - - # Setup systemd services - setup_services - - # Setup web server - setup_webserver - - log "ThrillWiki deployment completed!" - log "Application should be available at http://$(hostname -I | awk '{print $1}'):8000" - log "Logs are available at /home/ubuntu/thrillwiki-deploy.log" -} - -# Run main function and capture any errors -main "$@" 2>&1 | tee -a /home/ubuntu/thrillwiki-deploy.log -exit_code=${PIPESTATUS[0]} - -if [ $exit_code -eq 0 ]; then - log "Deployment completed successfully!" -else - log "Deployment completed with errors (exit code: $exit_code)" -fi - -exit $exit_code diff --git a/scripts/unraid/example-non-interactive.sh b/scripts/unraid/example-non-interactive.sh deleted file mode 100755 index e7c2c746..00000000 --- a/scripts/unraid/example-non-interactive.sh +++ /dev/null @@ -1,39 +0,0 @@ -#!/bin/bash - -# Example: How to use non-interactive mode for ThrillWiki setup -# -# This script shows how to set up environment variables for non-interactive mode -# and run the automation without any user prompts. - -echo "🤖 ThrillWiki Non-Interactive Setup Example" -echo "[AWS-SECRET-REMOVED]==" - -# Set required environment variables for non-interactive mode -# These replace the interactive prompts - -# Unraid password (REQUIRED) -export UNRAID_PASSWORD="your_unraid_password_here" - -# GitHub token (REQUIRED if using GitHub API) -export GITHUB_TOKEN="your_github_token_here" - -# Webhook secret (REQUIRED if webhooks enabled) -export WEBHOOK_SECRET="your_webhook_secret_here" - -echo "✅ Environment variables set" -echo "📋 Configuration summary:" -echo " - UNRAID_PASSWORD: [HIDDEN]" -echo " - GITHUB_TOKEN: [HIDDEN]" -echo " - WEBHOOK_SECRET: [HIDDEN]" -echo - -echo "🚀 Starting non-interactive setup..." -echo "This will use saved configuration and the environment variables above" -echo - -# Run the setup script in non-interactive mode -./setup-complete-automation.sh -y - -echo -echo "✨ Non-interactive setup completed!" -echo "📝 Note: This example script should be customized with your actual credentials" diff --git a/scripts/unraid/iso_builder.py b/scripts/unraid/iso_builder.py deleted file mode 100644 index cbfcb548..00000000 --- a/scripts/unraid/iso_builder.py +++ /dev/null @@ -1,531 +0,0 @@ -#!/usr/bin/env python3 -""" -Ubuntu ISO Builder for Autoinstall -Follows the Ubuntu autoinstall guide exactly: -1. Download Ubuntu ISO -2. Extract with 7zip equivalent -3. Modify GRUB configuration -4. Add server/ directory with autoinstall config -5. Rebuild ISO with xorriso equivalent -""" - -import os -import logging -import subprocess -import tempfile -import shutil -import urllib.request -from pathlib import Path -from typing import Optional - -logger = logging.getLogger(__name__) - -# Ubuntu ISO URLs with fallbacks -UBUNTU_MIRRORS = [ - "https://releases.ubuntu.com", # Official Ubuntu releases (primary) - "http://archive.ubuntu.com/ubuntu-releases", # Official archive - "http://mirror.csclub.uwaterloo.ca/ubuntu-releases", # University of Waterloo - "http://mirror.math.princeton.edu/pub/ubuntu-releases", # Princeton mirror -] -UBUNTU_24_04_ISO = "24.04/ubuntu-24.04.3-live-server-amd64.iso" -UBUNTU_22_04_ISO = "22.04/ubuntu-22.04.3-live-server-amd64.iso" - - -def get_latest_ubuntu_server_iso(version: str) -> Optional[str]: - """Dynamically find the latest point release for a given Ubuntu version.""" - try: - import re - - for mirror in UBUNTU_MIRRORS: - try: - url = f"{mirror}/{version}/" - response = urllib.request.urlopen(url, timeout=10) - content = response.read().decode("utf-8") - - # Find all server ISO files for this version - pattern = rf"ubuntu-{ - re.escape(version)}\.[0-9]+-live-server-amd64\.iso" - matches = re.findall(pattern, content) - - if matches: - # Sort by version and return the latest - matches.sort(key=lambda x: [int(n) for n in re.findall(r"\d+", x)]) - latest_iso = matches[-1] - return f"{version}/{latest_iso}" - except Exception as e: - logger.debug(f"Failed to check {mirror}/{version}/: {e}") - continue - - logger.warning(f"Could not dynamically detect latest ISO for Ubuntu {version}") - return None - - except Exception as e: - logger.error(f"Error in dynamic ISO detection: {e}") - return None - - -class UbuntuISOBuilder: - """Builds modified Ubuntu ISO with autoinstall configuration.""" - - def __init__(self, vm_name: str, work_dir: Optional[str] = None): - self.vm_name = vm_name - self.work_dir = ( - Path(work_dir) - if work_dir - else Path(tempfile.mkdtemp(prefix="ubuntu-autoinstall-")) - ) - self.source_files_dir = self.work_dir / "source-files" - self.boot_dir = self.work_dir / "BOOT" - self.server_dir = self.source_files_dir / "server" - self.grub_cfg_path = self.source_files_dir / "boot" / "grub" / "grub.cfg" - - # Ensure directories exist - self.work_dir.mkdir(exist_ok=True, parents=True) - self.source_files_dir.mkdir(exist_ok=True, parents=True) - - def check_tools(self) -> bool: - """Check if required tools are available.""" - - # Check for 7zip equivalent (p7zip on macOS/Linux) - if not shutil.which("7z") and not shutil.which("7za"): - logger.error( - "7zip not found. Install with: brew install p7zip (macOS) or apt install p7zip-full (Ubuntu)" - ) - return False - - # Check for xorriso equivalent - if ( - not shutil.which("xorriso") - and not shutil.which("mkisofs") - and not shutil.which("hdiutil") - ): - logger.error( - "No ISO creation tool found. Install xorriso, mkisofs, or use macOS hdiutil" - ) - return False - - return True - - def download_ubuntu_iso(self, version: str = "24.04") -> Path: - """Download Ubuntu ISO if not already present, trying multiple mirrors.""" - iso_filename = f"ubuntu-{version}-live-server-amd64.iso" - iso_path = self.work_dir / iso_filename - - if iso_path.exists(): - logger.info(f"Ubuntu ISO already exists: {iso_path}") - return iso_path - - if version == "24.04": - iso_subpath = UBUNTU_24_04_ISO - elif version == "22.04": - iso_subpath = UBUNTU_22_04_ISO - else: - raise ValueError(f"Unsupported Ubuntu version: {version}") - - # Try each mirror until one works - last_error = None - for mirror in UBUNTU_MIRRORS: - iso_url = f"{mirror}/{iso_subpath}" - logger.info(f"Trying to download Ubuntu {version} ISO from {iso_url}") - - try: - # Try downloading from this mirror - urllib.request.urlretrieve(iso_url, iso_path) - logger.info( - f"✅ Ubuntu ISO downloaded successfully from {mirror}: {iso_path}" - ) - return iso_path - except Exception as e: - last_error = e - logger.warning(f"Failed to download from {mirror}: {e}") - # Remove partial download if it exists - if iso_path.exists(): - iso_path.unlink() - continue - - # If we get here, all mirrors failed - logger.error( - f"Failed to download Ubuntu ISO from all mirrors. Last error: {last_error}" - ) - raise last_error - - def extract_iso(self, iso_path: Path) -> bool: - """Extract Ubuntu ISO following the guide.""" - logger.info(f"Extracting ISO: {iso_path}") - - # Use 7z to extract ISO - seven_zip_cmd = "7z" if shutil.which("7z") else "7za" - - try: - # Extract ISO: 7z -y x ubuntu.iso -osource-files - subprocess.run( - [ - seven_zip_cmd, - "-y", - "x", - str(iso_path), - f"-o{self.source_files_dir}", - ], - capture_output=True, - text=True, - check=True, - ) - - logger.info("ISO extracted successfully") - - # Move [BOOT] directory as per guide: mv '[BOOT]' ../BOOT - boot_source = self.source_files_dir / "[BOOT]" - if boot_source.exists(): - shutil.move(str(boot_source), str(self.boot_dir)) - logger.info(f"Moved [BOOT] directory to {self.boot_dir}") - else: - logger.warning("[BOOT] directory not found in extracted files") - - return True - - except subprocess.CalledProcessError as e: - logger.error(f"Failed to extract ISO: {e.stderr}") - return False - except Exception as e: - logger.error(f"Error extracting ISO: {e}") - return False - - def modify_grub_config(self) -> bool: - """Modify GRUB configuration to add autoinstall menu entry.""" - logger.info("Modifying GRUB configuration...") - - if not self.grub_cfg_path.exists(): - logger.error(f"GRUB config not found: {self.grub_cfg_path}") - return False - - try: - # Read existing GRUB config - with open(self.grub_cfg_path, "r", encoding="utf-8") as f: - grub_content = f.read() - - # Autoinstall menu entry as per guide - autoinstall_entry = """menuentry "Autoinstall Ubuntu Server" { - set gfxpayload=keep - linux /casper/vmlinuz quiet autoinstall ds=nocloud\\;s=/cdrom/server/ --- - initrd /casper/initrd -} - -""" - - # Insert autoinstall entry at the beginning of menu entries - # Find the first menuentry and insert before it - import re - - first_menu_match = re.search(r'(menuentry\s+["\'])', grub_content) - if first_menu_match: - insert_pos = first_menu_match.start() - modified_content = ( - grub_content[:insert_pos] - + autoinstall_entry - + grub_content[insert_pos:] - ) - else: - # Fallback: append at the end - modified_content = grub_content + "\n" + autoinstall_entry - - # Write modified GRUB config - with open(self.grub_cfg_path, "w", encoding="utf-8") as f: - f.write(modified_content) - - logger.info("GRUB configuration modified successfully") - return True - - except Exception as e: - logger.error(f"Failed to modify GRUB config: {e}") - return False - - def create_autoinstall_config(self, user_data: str) -> bool: - """Create autoinstall configuration in server/ directory.""" - logger.info("Creating autoinstall configuration...") - - try: - # Create server directory - self.server_dir.mkdir(exist_ok=True, parents=True) - - # Create empty meta-data file (as per guide) - meta_data_path = self.server_dir / "meta-data" - meta_data_path.touch() - logger.info(f"Created empty meta-data: {meta_data_path}") - - # Create user-data file with autoinstall configuration - user_data_path = self.server_dir / "user-data" - with open(user_data_path, "w", encoding="utf-8") as f: - f.write(user_data) - logger.info(f"Created user-data: {user_data_path}") - - return True - - except Exception as e: - logger.error(f"Failed to create autoinstall config: {e}") - return False - - def rebuild_iso(self, output_path: Path) -> bool: - """Rebuild ISO with autoinstall configuration using xorriso.""" - logger.info(f"Rebuilding ISO: {output_path}") - - try: - # Change to source-files directory for xorriso command - original_cwd = os.getcwd() - os.chdir(self.source_files_dir) - - # Remove existing output file - if output_path.exists(): - output_path.unlink() - - # Try different ISO creation methods in order of preference - success = False - - # Method 1: xorriso (most complete) - if shutil.which("xorriso") and not success: - try: - logger.info("Trying xorriso method...") - cmd = [ - "xorriso", - "-as", - "mkisofs", - "-r", - "-V", - f"Ubuntu 24.04 LTS AUTO (EFIBIOS)", - "-o", - str(output_path), - "--grub2-mbr", - f"..{os.sep}BOOT{os.sep}1-Boot-NoEmul.img", - "-partition_offset", - "16", - "--mbr-force-bootable", - "-append_partition", - "2", - "28732ac11ff8d211ba4b00a0c93ec93b", - f"..{os.sep}BOOT{os.sep}2-Boot-NoEmul.img", - "-appended_part_as_gpt", - "-iso_mbr_part_type", - "a2a0d0ebe5b9334487c068b6b72699c7", - "-c", - "/boot.catalog", - "-b", - "/boot/grub/i386-pc/eltorito.img", - "-no-emul-boot", - "-boot-load-size", - "4", - "-boot-info-table", - "--grub2-boot-info", - "-eltorito-alt-boot", - "-e", - "--interval:appended_partition_2:::", - "-no-emul-boot", - ".", - ] - subprocess.run(cmd, capture_output=True, text=True, check=True) - success = True - logger.info("✅ ISO created with xorriso") - except subprocess.CalledProcessError as e: - logger.warning(f"xorriso failed: {e.stderr}") - if output_path.exists(): - output_path.unlink() - - # Method 2: mkisofs with joliet-long - if shutil.which("mkisofs") and not success: - try: - logger.info("Trying mkisofs with joliet-long...") - cmd = [ - "mkisofs", - "-r", - "-V", - f"Ubuntu 24.04 LTS AUTO", - "-cache-inodes", - "-J", - "-joliet-long", - "-l", - "-b", - "boot/grub/i386-pc/eltorito.img", - "-c", - "boot.catalog", - "-no-emul-boot", - "-boot-load-size", - "4", - "-boot-info-table", - "-o", - str(output_path), - ".", - ] - subprocess.run(cmd, capture_output=True, text=True, check=True) - success = True - logger.info("✅ ISO created with mkisofs (joliet-long)") - except subprocess.CalledProcessError as e: - logger.warning(f"mkisofs with joliet-long failed: {e.stderr}") - if output_path.exists(): - output_path.unlink() - - # Method 3: mkisofs without Joliet (fallback) - if shutil.which("mkisofs") and not success: - try: - logger.info("Trying mkisofs without Joliet (fallback)...") - cmd = [ - "mkisofs", - "-r", - "-V", - f"Ubuntu 24.04 LTS AUTO", - "-cache-inodes", - "-l", # No -J (Joliet) to avoid filename conflicts - "-b", - "boot/grub/i386-pc/eltorito.img", - "-c", - "boot.catalog", - "-no-emul-boot", - "-boot-load-size", - "4", - "-boot-info-table", - "-o", - str(output_path), - ".", - ] - subprocess.run(cmd, capture_output=True, text=True, check=True) - success = True - logger.info("✅ ISO created with mkisofs (no Joliet)") - except subprocess.CalledProcessError as e: - logger.warning( - f"mkisofs without Joliet failed: { - e.stderr}" - ) - if output_path.exists(): - output_path.unlink() - - # Method 4: macOS hdiutil - if shutil.which("hdiutil") and not success: - try: - logger.info("Trying hdiutil (macOS)...") - cmd = [ - "hdiutil", - "makehybrid", - "-iso", - "-joliet", - "-o", - str(output_path), - ".", - ] - subprocess.run(cmd, capture_output=True, text=True, check=True) - success = True - logger.info("✅ ISO created with hdiutil") - except subprocess.CalledProcessError as e: - logger.warning(f"hdiutil failed: {e.stderr}") - if output_path.exists(): - output_path.unlink() - - if not success: - logger.error("All ISO creation methods failed") - return False - - # Verify the output file was created - if not output_path.exists(): - logger.error("ISO file was not created despite success message") - return False - - logger.info(f"ISO rebuilt successfully: {output_path}") - logger.info( - f"ISO size: {output_path.stat().st_size / (1024 * 1024):.1f} MB" - ) - return True - - except Exception as e: - logger.error(f"Error rebuilding ISO: {e}") - return False - finally: - # Return to original directory - os.chdir(original_cwd) - - def build_autoinstall_iso( - self, user_data: str, output_path: Path, ubuntu_version: str = "24.04" - ) -> bool: - """Complete ISO build process following the Ubuntu autoinstall guide.""" - logger.info( - f"🚀 Starting Ubuntu {ubuntu_version} autoinstall ISO build process" - ) - - try: - # Step 1: Check tools - if not self.check_tools(): - return False - - # Step 2: Download Ubuntu ISO - iso_path = self.download_ubuntu_iso(ubuntu_version) - - # Step 3: Extract ISO - if not self.extract_iso(iso_path): - return False - - # Step 4: Modify GRUB - if not self.modify_grub_config(): - return False - - # Step 5: Create autoinstall config - if not self.create_autoinstall_config(user_data): - return False - - # Step 6: Rebuild ISO - if not self.rebuild_iso(output_path): - return False - - logger.info(f"🎉 Successfully created autoinstall ISO: {output_path}") - logger.info(f"📁 Work directory: {self.work_dir}") - return True - - except Exception as e: - logger.error(f"Failed to build autoinstall ISO: {e}") - return False - - def cleanup(self): - """Clean up temporary work directory.""" - if self.work_dir.exists(): - shutil.rmtree(self.work_dir) - logger.info(f"Cleaned up work directory: {self.work_dir}") - - -def main(): - """Test the ISO builder.""" - import logging - - logging.basicConfig(level=logging.INFO) - - # Sample autoinstall user-data - user_data = """#cloud-config -autoinstall: - version: 1 - packages: - - ubuntu-server - identity: - realname: 'Test User' - username: testuser - password: '$6$rounds=4096$saltsalt$[AWS-SECRET-REMOVED]AzpI8g8T14F8VnhXo0sUkZV2NV6/.c77tHgVi34DgbPu.' - hostname: test-vm - locale: en_US.UTF-8 - keyboard: - layout: us - storage: - layout: - name: direct - ssh: - install-server: true - late-commands: - - curtin in-target -- apt-get autoremove -y -""" - - builder = UbuntuISOBuilder("test-vm") - output_path = Path("/tmp/ubuntu-24.04-autoinstall.iso") - - success = builder.build_autoinstall_iso(user_data, output_path) - if success: - print(f"✅ ISO created: {output_path}") - else: - print("❌ ISO creation failed") - - # Optionally clean up - # builder.cleanup() - - -if __name__ == "__main__": - main() diff --git a/scripts/unraid/main.py b/scripts/unraid/main.py deleted file mode 100644 index 80786d21..00000000 --- a/scripts/unraid/main.py +++ /dev/null @@ -1,288 +0,0 @@ -#!/usr/bin/env python3 -""" -Unraid VM Manager for ThrillWiki - Main Orchestrator -Follows the Ubuntu autoinstall guide exactly: -1. Creates modified Ubuntu ISO with autoinstall configuration -2. Manages VM lifecycle on Unraid server -3. Handles ThrillWiki deployment automation -""" - -import os -import sys -import logging -from pathlib import Path - -# Import our modular components -from iso_builder import UbuntuISOBuilder -from vm_manager import UnraidVMManager - -# Configuration -UNRAID_HOST = os.environ.get("UNRAID_HOST", "localhost") -UNRAID_USER = os.environ.get("UNRAID_USER", "root") -VM_NAME = os.environ.get("VM_NAME", "thrillwiki-vm") -VM_MEMORY = int(os.environ.get("VM_MEMORY", 4096)) # MB -VM_VCPUS = int(os.environ.get("VM_VCPUS", 2)) -VM_DISK_SIZE = int(os.environ.get("VM_DISK_SIZE", 50)) # GB -SSH_PUBLIC_KEY = os.environ.get("SSH_PUBLIC_KEY", "") - -# Network Configuration -VM_IP = os.environ.get("VM_IP", "dhcp") -VM_GATEWAY = os.environ.get("VM_GATEWAY", "192.168.20.1") -VM_NETMASK = os.environ.get("VM_NETMASK", "255.255.255.0") -VM_NETWORK = os.environ.get("VM_NETWORK", "192.168.20.0/24") - -# GitHub Configuration -REPO_URL = os.environ.get("REPO_URL", "") -GITHUB_USERNAME = os.environ.get("GITHUB_USERNAME", "") -GITHUB_TOKEN = os.environ.get("GITHUB_TOKEN", "") - -# Ubuntu version preference -UBUNTU_VERSION = os.environ.get("UBUNTU_VERSION", "24.04") - -# Setup logging -os.makedirs("logs", exist_ok=True) -logging.basicConfig( - level=logging.INFO, - format="%(asctime)s - %(levelname)s - %(message)s", - handlers=[ - logging.FileHandler("logs/unraid-vm.log"), - logging.StreamHandler(), - ], -) -logger = logging.getLogger(__name__) - - -class ThrillWikiVMOrchestrator: - """Main orchestrator for ThrillWiki VM deployment.""" - - def __init__(self): - self.vm_manager = UnraidVMManager(VM_NAME, UNRAID_HOST, UNRAID_USER) - self.iso_builder = None - - def create_autoinstall_user_data(self) -> str: - """Create autoinstall user-data configuration.""" - # Read autoinstall template - template_path = Path(__file__).parent / "autoinstall-user-data.yaml" - if not template_path.exists(): - raise FileNotFoundError(f"Autoinstall template not found: {template_path}") - - with open(template_path, "r", encoding="utf-8") as f: - template = f.read() - - # Replace placeholders using string replacement (avoiding .format() due - # to curly braces in YAML) - user_data = template.replace( - "{SSH_PUBLIC_KEY}", - SSH_PUBLIC_KEY if SSH_PUBLIC_KEY else "# No SSH key provided", - ).replace("{GITHUB_REPO}", REPO_URL if REPO_URL else "") - - # Update network configuration based on VM_IP setting - if VM_IP.lower() == "dhcp": - # Keep DHCP configuration as-is - pass - else: - # Replace with static IP configuration - network_config = f"""dhcp4: false - addresses: - - {VM_IP}/24 - gateway4: {VM_GATEWAY} - nameservers: - addresses: - - 8.8.8.8 - - 8.8.4.4""" - user_data = user_data.replace("dhcp4: true", network_config) - - return user_data - - def build_autoinstall_iso(self) -> Path: - """Build Ubuntu autoinstall ISO following the guide.""" - logger.info("🔨 Building Ubuntu autoinstall ISO...") - - # Create ISO builder - self.iso_builder = UbuntuISOBuilder(VM_NAME) - - # Create user-data configuration - user_data = self.create_autoinstall_user_data() - - # Build autoinstall ISO - iso_output_path = Path(f"/tmp/{VM_NAME}-ubuntu-autoinstall.iso") - - success = self.iso_builder.build_autoinstall_iso( - user_data=user_data, - output_path=iso_output_path, - ubuntu_version=UBUNTU_VERSION, - ) - - if not success: - raise RuntimeError("Failed to build autoinstall ISO") - - logger.info(f"✅ Autoinstall ISO built successfully: {iso_output_path}") - return iso_output_path - - def deploy_vm(self) -> bool: - """Complete VM deployment process.""" - try: - logger.info("🚀 Starting ThrillWiki VM deployment...") - - # Step 1: Check SSH connectivity - logger.info("📡 Testing Unraid connectivity...") - if not self.vm_manager.authenticate(): - logger.error("❌ Cannot connect to Unraid server") - return False - - # Step 2: Build autoinstall ISO - logger.info("🔨 Building Ubuntu autoinstall ISO...") - iso_path = self.build_autoinstall_iso() - - # Step 3: Upload ISO to Unraid - logger.info("📤 Uploading autoinstall ISO to Unraid...") - self.vm_manager.upload_iso_to_unraid(iso_path) - - # Step 4: Create/update VM configuration - logger.info("⚙️ Creating VM configuration...") - success = self.vm_manager.create_vm( - vm_memory=VM_MEMORY, - vm_vcpus=VM_VCPUS, - vm_disk_size=VM_DISK_SIZE, - vm_ip=VM_IP, - ) - - if not success: - logger.error("❌ Failed to create VM configuration") - return False - - # Step 5: Start VM - logger.info("🟢 Starting VM...") - success = self.vm_manager.start_vm() - - if not success: - logger.error("❌ Failed to start VM") - return False - - logger.info("🎉 VM deployment completed successfully!") - logger.info("") - logger.info("📋 Next Steps:") - logger.info("1. VM is now booting with Ubuntu autoinstall") - logger.info("2. Installation will take 15-30 minutes") - logger.info("3. Use 'python main.py ip' to get VM IP when ready") - logger.info("4. SSH to VM and run /home/thrillwiki/deploy-thrillwiki.sh") - logger.info("") - - return True - - except Exception as e: - logger.error(f"❌ VM deployment failed: {e}") - return False - finally: - # Cleanup ISO builder temp files - if self.iso_builder: - self.iso_builder.cleanup() - - def get_vm_info(self) -> dict: - """Get VM information.""" - return { - "name": VM_NAME, - "status": self.vm_manager.vm_status(), - "ip": self.vm_manager.get_vm_ip(), - "memory": VM_MEMORY, - "vcpus": VM_VCPUS, - "disk_size": VM_DISK_SIZE, - } - - -def main(): - """Main entry point.""" - import argparse - - parser = argparse.ArgumentParser( - description="ThrillWiki VM Manager - Ubuntu Autoinstall on Unraid", - epilog=""" -Examples: - python main.py setup # Complete VM setup with autoinstall - python main.py start # Start existing VM - python main.py ip # Get VM IP address - python main.py status # Get VM status - python main.py delete # Remove VM completely - """, - formatter_class=argparse.RawDescriptionHelpFormatter, - ) - - parser.add_argument( - "action", - choices=[ - "setup", - "create", - "start", - "stop", - "status", - "ip", - "delete", - "info", - ], - help="Action to perform", - ) - - args = parser.parse_args() - - # Create orchestrator - orchestrator = ThrillWikiVMOrchestrator() - - if args.action == "setup": - logger.info("🚀 Setting up complete ThrillWiki VM environment...") - success = orchestrator.deploy_vm() - sys.exit(0 if success else 1) - - elif args.action == "create": - logger.info("⚙️ Creating VM configuration...") - success = orchestrator.vm_manager.create_vm( - VM_MEMORY, VM_VCPUS, VM_DISK_SIZE, VM_IP - ) - sys.exit(0 if success else 1) - - elif args.action == "start": - logger.info("🟢 Starting VM...") - success = orchestrator.vm_manager.start_vm() - sys.exit(0 if success else 1) - - elif args.action == "stop": - logger.info("🛑 Stopping VM...") - success = orchestrator.vm_manager.stop_vm() - sys.exit(0 if success else 1) - - elif args.action == "status": - status = orchestrator.vm_manager.vm_status() - print(f"VM Status: {status}") - sys.exit(0) - - elif args.action == "ip": - ip = orchestrator.vm_manager.get_vm_ip() - if ip: - print(f"VM IP: {ip}") - print(f"SSH: ssh thrillwiki@{ip}") - print( - f"Deploy: ssh thrillwiki@{ip} '/home/thrillwiki/deploy-thrillwiki.sh'" - ) - sys.exit(0) - else: - print("❌ Failed to get VM IP (VM may not be ready yet)") - sys.exit(1) - - elif args.action == "info": - info = orchestrator.get_vm_info() - print("🖥️ VM Information:") - print(f" Name: {info['name']}") - print(f" Status: {info['status']}") - print(f" IP: {info['ip'] or 'Not available'}") - print(f" Memory: {info['memory']} MB") - print(f" vCPUs: {info['vcpus']}") - print(f" Disk: {info['disk_size']} GB") - sys.exit(0) - - elif args.action == "delete": - logger.info("🗑️ Deleting VM and all files...") - success = orchestrator.vm_manager.delete_vm() - sys.exit(0 if success else 1) - - -if __name__ == "__main__": - main() diff --git a/scripts/unraid/main_template.py b/scripts/unraid/main_template.py deleted file mode 100644 index 105445b6..00000000 --- a/scripts/unraid/main_template.py +++ /dev/null @@ -1,456 +0,0 @@ -#!/usr/bin/env python3 -""" -Unraid VM Manager for ThrillWiki - Template-Based Main Orchestrator -Uses pre-built template VMs for fast deployment instead of autoinstall. -""" - -import os -import sys -import logging -from pathlib import Path - -# Import our modular components -from template_manager import TemplateVMManager -from vm_manager_template import UnraidTemplateVMManager - - -class ConfigLoader: - """Dynamic configuration loader that reads environment variables when needed.""" - - def __init__(self): - # Try to load ***REMOVED***.unraid if it exists to ensure we have the - # latest config - self._load_env_file() - - def _load_env_file(self): - """Load ***REMOVED***.unraid file if it exists.""" - # Find the project directory (two levels up from this script) - script_dir = Path(__file__).parent - project_dir = script_dir.parent.parent - env_file = project_dir / "***REMOVED***.unraid" - - if env_file.exists(): - try: - with open(env_file, "r") as f: - for line in f: - line = line.strip() - if line and not line.startswith("#") and "=" in line: - key, value = line.split("=", 1) - # Remove quotes if present - value = value.strip("\"'") - # Only set if not already in environment (env vars - # take precedence) - if key not in os.environ: - os.environ[key] = value - - logging.info(f"📝 Loaded configuration from {env_file}") - except Exception as e: - logging.warning(f"⚠️ Could not load ***REMOVED***.unraid: {e}") - - @property - def UNRAID_HOST(self): - return os.environ.get("UNRAID_HOST", "localhost") - - @property - def UNRAID_USER(self): - return os.environ.get("UNRAID_USER", "root") - - @property - def VM_NAME(self): - return os.environ.get("VM_NAME", "thrillwiki-vm") - - @property - def VM_MEMORY(self): - return int(os.environ.get("VM_MEMORY", 4096)) - - @property - def VM_VCPUS(self): - return int(os.environ.get("VM_VCPUS", 2)) - - @property - def VM_DISK_SIZE(self): - return int(os.environ.get("VM_DISK_SIZE", 50)) - - @property - def SSH_PUBLIC_KEY(self): - return os.environ.get("SSH_PUBLIC_KEY", "") - - @property - def VM_IP(self): - return os.environ.get("VM_IP", "dhcp") - - @property - def VM_GATEWAY(self): - return os.environ.get("VM_GATEWAY", "192.168.20.1") - - @property - def VM_NETMASK(self): - return os.environ.get("VM_NETMASK", "255.255.255.0") - - @property - def VM_NETWORK(self): - return os.environ.get("VM_NETWORK", "192.168.20.0/24") - - @property - def REPO_URL(self): - return os.environ.get("REPO_URL", "") - - @property - def GITHUB_USERNAME(self): - return os.environ.get("GITHUB_USERNAME", "") - - @property - def GITHUB_TOKEN(self): - return os.environ.get("GITHUB_TOKEN", "") - - -# Create a global configuration instance -config = ConfigLoader() - -# Setup logging with reduced buffering -os.makedirs("logs", exist_ok=True) - -# Configure console handler with line buffering -console_handler = logging.StreamHandler(sys.stdout) -console_handler.setLevel(logging.INFO) -console_handler.setFormatter( - logging.Formatter("%(asctime)s - %(levelname)s - %(message)s") -) -# Force flush after each log message -console_handler.flush = lambda: sys.stdout.flush() - -# Configure file handler -file_handler = logging.FileHandler("logs/unraid-vm.log") -file_handler.setLevel(logging.INFO) -file_handler.setFormatter( - logging.Formatter("%(asctime)s - %(levelname)s - %(message)s") -) - -# Set up basic config with both handlers -logging.basicConfig( - level=logging.INFO, - handlers=[file_handler, console_handler], -) - -# Ensure stdout is line buffered for real-time output -sys.stdout.reconfigure(line_buffering=True) -logger = logging.getLogger(__name__) - - -class ThrillWikiTemplateVMOrchestrator: - """Main orchestrator for template-based ThrillWiki VM deployment.""" - - def __init__(self): - # Log current configuration for debugging - logger.info( - f"🔧 Using configuration: UNRAID_HOST={ - config.UNRAID_HOST}, UNRAID_USER={ - config.UNRAID_USER}, VM_NAME={ - config.VM_NAME}" - ) - - self.template_manager = TemplateVMManager( - config.UNRAID_HOST, config.UNRAID_USER - ) - self.vm_manager = UnraidTemplateVMManager( - config.VM_NAME, config.UNRAID_HOST, config.UNRAID_USER - ) - - def check_template_ready(self) -> bool: - """Check if template VM is ready for use.""" - logger.info("🔍 Checking template VM availability...") - - if not self.template_manager.check_template_exists(): - logger.error("❌ Template VM disk not found!") - logger.error( - "Please ensure 'thrillwiki-template-ubuntu' VM exists and is properly configured" - ) - logger.error( - "Template should be located at: /mnt/user/domains/thrillwiki-template-ubuntu/vdisk1.qcow2" - ) - return False - - # Check template status - if not self.template_manager.update_template(): - logger.warning("⚠️ Template VM may be running - this could cause issues") - logger.warning( - "Ensure the template VM is stopped before creating new instances" - ) - - info = self.template_manager.get_template_info() - if info: - logger.info(f"📋 Template Info:") - logger.info(f" Virtual Size: {info['virtual_size']}") - logger.info(f" File Size: {info['file_size']}") - logger.info(f" Last Modified: {info['last_modified']}") - - return True - - def deploy_vm_from_template(self) -> bool: - """Complete template-based VM deployment process.""" - try: - logger.info("🚀 Starting ThrillWiki template-based VM deployment...") - - # Step 1: Check SSH connectivity - logger.info("📡 Testing Unraid connectivity...") - if not self.vm_manager.authenticate(): - logger.error("❌ Cannot connect to Unraid server") - return False - - # Step 2: Check template availability - logger.info("🔍 Verifying template VM...") - if not self.check_template_ready(): - logger.error("❌ Template VM not ready") - return False - - # Step 3: Create VM from template - logger.info("⚙️ Creating VM from template...") - success = self.vm_manager.create_vm_from_template( - vm_memory=config.VM_MEMORY, - vm_vcpus=config.VM_VCPUS, - vm_disk_size=config.VM_DISK_SIZE, - vm_ip=config.VM_IP, - ) - - if not success: - logger.error("❌ Failed to create VM from template") - return False - - # Step 4: Start VM - logger.info("🟢 Starting VM...") - success = self.vm_manager.start_vm() - - if not success: - logger.error("❌ Failed to start VM") - return False - - logger.info("🎉 Template-based VM deployment completed successfully!") - logger.info("") - logger.info("📋 Next Steps:") - logger.info("1. VM is now booting from template disk") - logger.info("2. Boot time should be much faster (2-5 minutes)") - logger.info("3. Use 'python main_template.py ip' to get VM IP when ready") - logger.info("4. SSH to VM and run deployment commands") - logger.info("") - - return True - - except Exception as e: - logger.error(f"❌ Template VM deployment failed: {e}") - return False - - def deploy_and_configure_thrillwiki(self) -> bool: - """Deploy VM from template and configure ThrillWiki.""" - try: - logger.info("🚀 Starting complete ThrillWiki deployment from template...") - - # Step 1: Deploy VM from template - if not self.deploy_vm_from_template(): - return False - - # Step 2: Wait for VM to be accessible and configure ThrillWiki - if config.REPO_URL: - logger.info("🔧 Configuring ThrillWiki on VM...") - success = self.vm_manager.customize_vm_for_thrillwiki( - config.REPO_URL, config.GITHUB_TOKEN - ) - - if success: - vm_ip = self.vm_manager.get_vm_ip() - logger.info("🎉 Complete ThrillWiki deployment successful!") - logger.info(f"🌐 ThrillWiki is available at: http://{vm_ip}:8000") - else: - logger.warning( - "⚠️ VM deployed but ThrillWiki configuration may have failed" - ) - logger.info( - "You can manually configure ThrillWiki by SSH'ing to the VM" - ) - else: - logger.info( - "📝 No repository URL provided - VM deployed but ThrillWiki not configured" - ) - logger.info( - "Set REPO_URL environment variable to auto-configure ThrillWiki" - ) - - return True - - except Exception as e: - logger.error(f"❌ Complete deployment failed: {e}") - return False - - def get_vm_info(self) -> dict: - """Get VM information.""" - return { - "name": config.VM_NAME, - "status": self.vm_manager.vm_status(), - "ip": self.vm_manager.get_vm_ip(), - "memory": config.VM_MEMORY, - "vcpus": config.VM_VCPUS, - "disk_size": config.VM_DISK_SIZE, - "deployment_type": "template-based", - } - - -def main(): - """Main entry point.""" - import argparse - - parser = argparse.ArgumentParser( - description="ThrillWiki Template-Based VM Manager - Fast VM deployment using templates", - epilog=""" -Examples: - python main_template.py setup # Deploy VM from template only - python main_template.py deploy # Deploy VM and configure ThrillWiki - python main_template.py start # Start existing VM - python main_template.py ip # Get VM IP address - python main_template.py status # Get VM status - python main_template.py delete # Remove VM completely - python main_template.py template # Manage template VM - """, - formatter_class=argparse.RawDescriptionHelpFormatter, - ) - - parser.add_argument( - "action", - choices=[ - "setup", - "deploy", - "create", - "start", - "stop", - "status", - "ip", - "delete", - "info", - "template", - ], - help="Action to perform", - ) - - parser.add_argument( - "template_action", - nargs="?", - choices=["info", "check", "update", "list"], - help="Template management action (used with 'template' action)", - ) - - args = parser.parse_args() - - # Create orchestrator - orchestrator = ThrillWikiTemplateVMOrchestrator() - - if args.action == "setup": - logger.info("🚀 Setting up VM from template...") - success = orchestrator.deploy_vm_from_template() - sys.exit(0 if success else 1) - - elif args.action == "deploy": - logger.info("🚀 Complete ThrillWiki deployment from template...") - success = orchestrator.deploy_and_configure_thrillwiki() - sys.exit(0 if success else 1) - - elif args.action == "create": - logger.info("⚙️ Creating VM from template...") - success = orchestrator.vm_manager.create_vm_from_template( - config.VM_MEMORY, - config.VM_VCPUS, - config.VM_DISK_SIZE, - config.VM_IP, - ) - sys.exit(0 if success else 1) - - elif args.action == "start": - logger.info("🟢 Starting VM...") - success = orchestrator.vm_manager.start_vm() - sys.exit(0 if success else 1) - - elif args.action == "stop": - logger.info("🛑 Stopping VM...") - success = orchestrator.vm_manager.stop_vm() - sys.exit(0 if success else 1) - - elif args.action == "status": - status = orchestrator.vm_manager.vm_status() - print(f"VM Status: {status}") - sys.exit(0) - - elif args.action == "ip": - ip = orchestrator.vm_manager.get_vm_ip() - if ip: - print(f"VM IP: {ip}") - print(f"SSH: ssh thrillwiki@{ip}") - print(f"ThrillWiki: http://{ip}:8000") - sys.exit(0) - else: - print("❌ Failed to get VM IP (VM may not be ready yet)") - sys.exit(1) - - elif args.action == "info": - info = orchestrator.get_vm_info() - print("🖥️ VM Information:") - print(f" Name: {info['name']}") - print(f" Status: {info['status']}") - print(f" IP: {info['ip'] or 'Not available'}") - print(f" Memory: {info['memory']} MB") - print(f" vCPUs: {info['vcpus']}") - print(f" Disk: {info['disk_size']} GB") - print(f" Type: {info['deployment_type']}") - sys.exit(0) - - elif args.action == "delete": - logger.info("🗑️ Deleting VM and all files...") - success = orchestrator.vm_manager.delete_vm() - sys.exit(0 if success else 1) - - elif args.action == "template": - template_action = args.template_action or "info" - - if template_action == "info": - logger.info("📋 Template VM Information") - info = orchestrator.template_manager.get_template_info() - if info: - print(f"Template Path: {info['template_path']}") - print(f"Virtual Size: {info['virtual_size']}") - print(f"File Size: {info['file_size']}") - print(f"Last Modified: {info['last_modified']}") - else: - print("❌ Failed to get template information") - sys.exit(1) - - elif template_action == "check": - if orchestrator.template_manager.check_template_exists(): - logger.info("✅ Template VM disk exists and is ready to use") - sys.exit(0) - else: - logger.error("❌ Template VM disk not found") - sys.exit(1) - - elif template_action == "update": - success = orchestrator.template_manager.update_template() - sys.exit(0 if success else 1) - - elif template_action == "list": - logger.info("📋 Template-based VM Instances") - instances = orchestrator.template_manager.list_template_instances() - if instances: - for instance in instances: - status_emoji = ( - "🟢" - if instance["status"] == "running" - else "🔴" if instance["status"] == "shut off" else "🟡" - ) - print( - f"{status_emoji} { - instance['name']} ({ - instance['status']})" - ) - else: - print("No template instances found") - - sys.exit(0) - - -if __name__ == "__main__": - main() diff --git a/scripts/unraid/setup-complete-automation.sh b/scripts/unraid/setup-complete-automation.sh deleted file mode 100755 index 34095eeb..00000000 --- a/scripts/unraid/setup-complete-automation.sh +++ /dev/null @@ -1,1109 +0,0 @@ -#!/bin/bash - -# ThrillWiki Complete Unraid Automation Setup -# This script automates the entire VM creation and deployment process on Unraid -# -# Usage: -# ./setup-complete-automation.sh # Standard setup -# ./setup-complete-automation.sh --reset # Delete VM and config, start completely fresh -# ./setup-complete-automation.sh --reset-vm # Delete VM only, keep configuration -# ./setup-complete-automation.sh --reset-config # Delete config only, keep VM - -# Function to show help -show_help() { - echo "ThrillWiki CI/CD Automation Setup" - echo "" - echo "Usage:" - echo " $0 Set up or update ThrillWiki automation" - echo " $0 -y Non-interactive mode, use saved configuration" - echo " $0 --reset Delete VM and config, start completely fresh" - echo " $0 --reset-vm Delete VM only, keep configuration" - echo " $0 --reset-config Delete config only, keep VM" - echo " $0 --help Show this help message" - echo "" - echo "Options:" - echo " -y, --yes Non-interactive mode - use saved configuration" - echo " and passwords without prompting. Requires existing" - echo " configuration file with saved settings." - echo "" - echo "Reset Options:" - echo " --reset Completely removes existing VM, disks, and config" - echo " before starting fresh installation" - echo " --reset-vm Removes only the VM and disks, preserves saved" - echo " configuration to avoid re-entering settings" - echo " --reset-config Removes only the saved configuration, preserves" - echo " VM and prompts for fresh configuration input" - echo " --help Display this help and exit" - echo "" - echo "Examples:" - echo " $0 # Normal setup/update" - echo " $0 -y # Non-interactive setup with saved config" - echo " $0 --reset # Complete fresh installation" - echo " $0 --reset-vm # Fresh VM with saved settings" - echo " $0 --reset-config # Re-configure existing VM" - exit 0 -} - -# Check for help flag -if [[ "$1" == "--help" || "$1" == "-h" ]]; then - show_help -fi - -# Parse command line flags -RESET_ALL=false -RESET_VM_ONLY=false -RESET_CONFIG_ONLY=false -NON_INTERACTIVE=false - -# Process all arguments -while [[ $# -gt 0 ]]; do - case $1 in - -y|--yes) - NON_INTERACTIVE=true - echo "🤖 NON-INTERACTIVE MODE: Using saved configuration only" - shift - ;; - --reset) - RESET_ALL=true - echo "🔄 COMPLETE RESET MODE: Will delete VM and configuration" - shift - ;; - --reset-vm) - RESET_VM_ONLY=true - echo "🔄 VM RESET MODE: Will delete VM only, keep configuration" - shift - ;; - --reset-config) - RESET_CONFIG_ONLY=true - echo "🔄 CONFIG RESET MODE: Will delete configuration only, keep VM" - shift - ;; - --help|-h) - show_help - ;; - *) - echo "Unknown option: $1" - show_help - ;; - esac -done - -set -e - -# Colors for output -RED='\033[0;31m' -GREEN='\033[0;32m' -YELLOW='\033[1;33m' -BLUE='\033[0;34m' -NC='\033[0m' # No Color - -log() { - echo -e "${BLUE}[AUTOMATION]${NC} $1" -} - -log_success() { - echo -e "${GREEN}[SUCCESS]${NC} $1" -} - -log_warning() { - echo -e "${YELLOW}[WARNING]${NC} $1" -} - -log_error() { - echo -e "${RED}[ERROR]${NC} $1" -} - -# Configuration -SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -PROJECT_DIR="$(cd "$SCRIPT_DIR/../.." && pwd)" -LOG_DIR="$PROJECT_DIR/logs" - -# Default values -DEFAULT_UNRAID_HOST="" -DEFAULT_VM_NAME="thrillwiki-vm" -DEFAULT_VM_MEMORY="4096" -DEFAULT_VM_VCPUS="2" -DEFAULT_VM_DISK_SIZE="50" -DEFAULT_WEBHOOK_PORT="9000" - -# Configuration file -CONFIG_FILE="$PROJECT_DIR/.thrillwiki-config" - -# Function to save configuration -save_config() { - log "Saving configuration to $CONFIG_FILE..." - cat > "$CONFIG_FILE" << EOF -# ThrillWiki Automation Configuration -# This file stores your settings to avoid re-entering them each time - -# Unraid Server Configuration -UNRAID_HOST="$UNRAID_HOST" -UNRAID_USER="$UNRAID_USER" -VM_NAME="$VM_NAME" -VM_MEMORY="$VM_MEMORY" -VM_VCPUS="$VM_VCPUS" -VM_DISK_SIZE="$VM_DISK_SIZE" - -# Network Configuration -VM_IP="$VM_IP" -VM_GATEWAY="$VM_GATEWAY" -VM_NETMASK="$VM_NETMASK" -VM_NETWORK="$VM_NETWORK" - -# GitHub Configuration -REPO_URL="$REPO_URL" -GITHUB_USERNAME="$GITHUB_USERNAME" -GITHUB_API_ENABLED="$GITHUB_API_ENABLED" -GITHUB_AUTH_METHOD="$GITHUB_AUTH_METHOD" - -# Webhook Configuration -WEBHOOK_PORT="$WEBHOOK_PORT" -WEBHOOK_ENABLED="$WEBHOOK_ENABLED" - -# SSH Configuration (path to key, not the key content) -SSH_KEY_PATH="$HOME/.ssh/thrillwiki_vm" -EOF - - log_success "Configuration saved to $CONFIG_FILE" -} - -# Function to load configuration -load_config() { - if [ -f "$CONFIG_FILE" ]; then - log "Loading existing configuration from $CONFIG_FILE..." - source "$CONFIG_FILE" - return 0 - else - return 1 - fi -} - -# Function for non-interactive configuration loading -load_non_interactive_config() { - log "=== Non-Interactive Configuration Loading ===" - - # Load saved configuration - if ! load_config; then - log_error "No saved configuration found. Cannot run in non-interactive mode." - log_error "Please run the script without -y flag first to create initial configuration." - exit 1 - fi - - log_success "Loaded saved configuration successfully" - - # Check for required environment variables for passwords - if [ -z "${UNRAID_PASSWORD:-}" ]; then - log_error "UNRAID_PASSWORD environment variable not set." - log_error "For non-interactive mode, set: export UNRAID_PASSWORD='your_password'" - exit 1 - fi - - # Handle GitHub authentication based on saved method - if [ -n "$GITHUB_USERNAME" ] && [ "$GITHUB_API_ENABLED" = "true" ]; then - if [ "$GITHUB_AUTH_METHOD" = "oauth" ]; then - # Check if OAuth token is still valid - if python3 "$SCRIPT_DIR/../github-auth.py" validate 2>/dev/null; then - GITHUB_TOKEN=$(python3 "$SCRIPT_DIR/../github-auth.py" token) - log "Using existing OAuth token" - else - log_error "OAuth token expired and cannot refresh in non-interactive mode" - log_error "Please run without -y flag to re-authenticate with GitHub" - exit 1 - fi - else - # Personal access token method - if [ -z "${GITHUB_TOKEN:-}" ]; then - log_error "GITHUB_TOKEN environment variable not set." - log_error "For non-interactive mode, set: export GITHUB_TOKEN='your_token'" - exit 1 - fi - fi - fi - - # Handle webhook secret - if [ "$WEBHOOK_ENABLED" = "true" ]; then - if [ -z "${WEBHOOK_SECRET:-}" ]; then - log_error "WEBHOOK_SECRET environment variable not set." - log_error "For non-interactive mode, set: export WEBHOOK_SECRET='your_secret'" - exit 1 - fi - fi - - log_success "All required credentials loaded from environment variables" - log "Configuration summary:" - echo " Unraid Host: $UNRAID_HOST" - echo " VM Name: $VM_NAME" - echo " VM IP: $VM_IP" - echo " Repository: $REPO_URL" - echo " GitHub Auth: $GITHUB_AUTH_METHOD" - echo " Webhook Enabled: $WEBHOOK_ENABLED" -} - -# Function to prompt for configuration -prompt_unraid_config() { - # In non-interactive mode, use saved config only - if [ "$NON_INTERACTIVE" = "true" ]; then - load_non_interactive_config - return 0 - fi - - log "=== Unraid VM Configuration ===" - echo - - # Try to load existing config first - if load_config; then - log_success "Loaded existing configuration" - echo "Current settings:" - echo " Unraid Host: $UNRAID_HOST" - echo " VM Name: $VM_NAME" - echo " VM IP: $VM_IP" - echo " Repository: $REPO_URL" - echo - read -p "Use existing configuration? (y/n): " use_existing - if [ "$use_existing" = "y" ] || [ "$use_existing" = "Y" ]; then - # Still need to get sensitive info that we don't save - read -s -p "Enter Unraid [PASSWORD-REMOVED] - echo - - # Handle GitHub authentication based on saved method - if [ -n "$GITHUB_USERNAME" ] && [ "$GITHUB_API_ENABLED" = "true" ]; then - if [ "$GITHUB_AUTH_METHOD" = "oauth" ]; then - # Check if OAuth token is still valid - if python3 "$SCRIPT_DIR/../github-auth.py" validate 2>/dev/null; then - GITHUB_TOKEN=$(python3 "$SCRIPT_DIR/../github-auth.py" token) - log "Using existing OAuth token" - else - log "OAuth token expired, re-authenticating..." - if python3 "$SCRIPT_DIR/../github-auth.py" login; then - GITHUB_TOKEN=$(python3 "$SCRIPT_DIR/../github-auth.py" token) - log_success "OAuth token refreshed" - else - log_error "OAuth re-authentication failed" - exit 1 - fi - fi - else - # Personal access token method - read -s -p "Enter GitHub personal access token: " GITHUB_TOKEN - echo - fi - fi - - if [ "$WEBHOOK_ENABLED" = "true" ]; then - read -s -p "Enter GitHub webhook secret: " WEBHOOK_SECRET - echo - fi - return 0 - fi - fi - - # Prompt for new configuration - read -p "Enter your Unraid server IP address: " UNRAID_HOST - save_config - - read -p "Enter Unraid username (default: root): " UNRAID_USER - UNRAID_USER=${UNRAID_USER:-root} - save_config - - read -s -p "Enter Unraid [PASSWORD-REMOVED] - echo - # Note: Password not saved for security - - read -p "Enter VM name (default: $DEFAULT_VM_NAME): " VM_NAME - VM_NAME=${VM_NAME:-$DEFAULT_VM_NAME} - save_config - - read -p "Enter VM memory in MB (default: $DEFAULT_VM_MEMORY): " VM_MEMORY - VM_MEMORY=${VM_MEMORY:-$DEFAULT_VM_MEMORY} - save_config - - read -p "Enter VM vCPUs (default: $DEFAULT_VM_VCPUS): " VM_VCPUS - VM_VCPUS=${VM_VCPUS:-$DEFAULT_VM_VCPUS} - save_config - - read -p "Enter VM disk size in GB (default: $DEFAULT_VM_DISK_SIZE): " VM_DISK_SIZE - VM_DISK_SIZE=${VM_DISK_SIZE:-$DEFAULT_VM_DISK_SIZE} - save_config - - read -p "Enter GitHub repository URL: " REPO_URL - save_config - - # GitHub API Configuration - echo - log "=== GitHub API Configuration ===" - echo "Choose GitHub authentication method:" - echo "1. OAuth Device Flow (recommended - secure, supports private repos)" - echo "2. Personal Access Token (manual token entry)" - echo "3. Skip (public repositories only)" - - while true; do - read -p "Select option (1-3): " auth_choice - case $auth_choice in - 1) - log "Using GitHub OAuth Device Flow..." - if python3 "$SCRIPT_DIR/../github-auth.py" validate 2>/dev/null; then - log "Existing GitHub authentication found and valid" - GITHUB_USERNAME=$(python3 "$SCRIPT_DIR/../github-auth.py" whoami 2>/dev/null | grep "You are authenticated as:" | cut -d: -f2 | xargs) - GITHUB_TOKEN=$(python3 "$SCRIPT_DIR/../github-auth.py" token) - else - log "Starting GitHub OAuth authentication..." - if python3 "$SCRIPT_DIR/../github-auth.py" login; then - GITHUB_USERNAME=$(python3 "$SCRIPT_DIR/../github-auth.py" whoami 2>/dev/null | grep "You are authenticated as:" | cut -d: -f2 | xargs) - GITHUB_TOKEN=$(python3 "$SCRIPT_DIR/../github-auth.py" token) - log_success "GitHub OAuth authentication completed" - else - log_error "GitHub authentication failed" - continue - fi - fi - GITHUB_API_ENABLED=true - GITHUB_AUTH_METHOD="oauth" - break - ;; - 2) - read -p "Enter GitHub username: " GITHUB_USERNAME - read -s -p "Enter GitHub personal access token: " GITHUB_TOKEN - echo - if [ -n "$GITHUB_USERNAME" ] && [ -n "$GITHUB_TOKEN" ]; then - GITHUB_API_ENABLED=true - GITHUB_AUTH_METHOD="token" - log "Personal access token configured" - else - log_error "Both username and token are required" - continue - fi - break - ;; - 3) - GITHUB_USERNAME="" - GITHUB_TOKEN="" - GITHUB_API_ENABLED=false - GITHUB_AUTH_METHOD="none" - log "Skipping GitHub API - using public access only" - break - ;; - *) - echo "Invalid option. Please select 1, 2, or 3." - ;; - esac - done - - # Save GitHub configuration - save_config - log "GitHub authentication configuration saved" - - # Webhook Configuration - echo - read -s -p "Enter GitHub webhook secret (optional, press Enter to skip): " WEBHOOK_SECRET - echo - - # If no webhook secret provided, disable webhook functionality - if [ -z "$WEBHOOK_SECRET" ]; then - log "No webhook secret provided - webhook functionality will be disabled" - WEBHOOK_ENABLED=false - else - WEBHOOK_ENABLED=true - fi - - read -p "Enter webhook port (default: $DEFAULT_WEBHOOK_PORT): " WEBHOOK_PORT - WEBHOOK_PORT=${WEBHOOK_PORT:-$DEFAULT_WEBHOOK_PORT} - - # Save webhook configuration - save_config - log "Webhook configuration saved" - - # Get VM network configuration preference - echo - log "=== Network Configuration ===" - echo "Choose network configuration method:" - echo "1. DHCP (automatic IP assignment - recommended)" - echo "2. Static IP (manual IP configuration)" - - while true; do - read -p "Select option (1-2): " network_choice - case $network_choice in - 1) - log "Using DHCP network configuration..." - VM_IP="dhcp" - VM_GATEWAY="192.168.20.1" - VM_NETMASK="255.255.255.0" - VM_NETWORK="192.168.20.0/24" - NETWORK_MODE="dhcp" - break - ;; - 2) - log "Using static IP network configuration..." - # Get VM IP address with proper range validation - while true; do - read -p "Enter VM IP address (192.168.20.10-192.168.20.100): " VM_IP - if [[ "$VM_IP" =~ ^192\.168\.20\.([1-9][0-9]|100)$ ]]; then - local ip_last_octet="${BASH_REMATCH[1]}" - if [ "$ip_last_octet" -ge 10 ] && [ "$ip_last_octet" -le 100 ]; then - break - fi - fi - echo "Invalid IP address. Please enter an IP in the range 192.168.20.10-192.168.20.100" - done - VM_GATEWAY="192.168.20.1" - VM_NETMASK="255.255.255.0" - VM_NETWORK="192.168.20.0/24" - NETWORK_MODE="static" - break - ;; - *) - echo "Invalid option. Please select 1 or 2." - ;; - esac - done - - # Save final network configuration - save_config - log "Network configuration saved - setup complete!" -} - -# Generate SSH keys for VM access -setup_ssh_keys() { - log "Setting up SSH keys for VM access..." - - local ssh_key_path="$HOME/.ssh/thrillwiki_vm" - local ssh_config_path="$HOME/.ssh/config" - - if [ ! -f "$ssh_key_path" ]; then - ssh-keygen -t rsa -b 4096 -f "$ssh_key_path" -N "" -C "thrillwiki-vm-access" - log_success "SSH key generated: $ssh_key_path" - else - log "SSH key already exists: $ssh_key_path" - fi - - # Add SSH config entry - if ! grep -q "Host $VM_NAME" "$ssh_config_path" 2>/dev/null; then - cat >> "$ssh_config_path" << EOF - -# ThrillWiki VM -Host $VM_NAME - HostName %h - User ubuntu - IdentityFile $ssh_key_path - StrictHostKeyChecking no - UserKnownHostsFile /dev/null -EOF - log_success "SSH config updated" - fi - - # Store public key for VM setup - SSH_PUBLIC_KEY=$(cat "$ssh_key_path.pub") - export SSH_PUBLIC_KEY -} - -# Setup Unraid host access -setup_unraid_access() { - log "Setting up Unraid server access..." - - local unraid_key_path="$HOME/.ssh/unraid_access" - - if [ ! -f "$unraid_key_path" ]; then - ssh-keygen -t rsa -b 4096 -f "$unraid_key_path" -N "" -C "unraid-access" - - log "Please add this public key to your Unraid server:" - echo "---" - cat "$unraid_key_path.pub" - echo "---" - echo - log "Add this to /root/.ssh/***REMOVED*** on your Unraid server" - read -p "Press Enter when you've added the key..." - fi - - # Test Unraid connection - log "Testing Unraid connection..." - if ssh -i "$unraid_key_path" -o ConnectTimeout=5 -o StrictHostKeyChecking=no "$UNRAID_USER@$UNRAID_HOST" "echo 'Connected to Unraid successfully'"; then - log_success "Unraid connection test passed" - else - log_error "Unraid connection test failed" - exit 1 - fi - - # Update SSH config for Unraid - if ! grep -q "Host unraid" "$HOME/.ssh/config" 2>/dev/null; then - cat >> "$HOME/.ssh/config" << EOF - -# Unraid Server -Host unraid - HostName $UNRAID_HOST - User $UNRAID_USER - IdentityFile $unraid_key_path - StrictHostKeyChecking no -EOF - fi -} - -# Create environment files -create_environment_files() { - log "Creating environment configuration files..." - - # Get SSH public key content safely - local ssh_key_path="$HOME/.ssh/thrillwiki_vm.pub" - local ssh_public_key="" - if [ -f "$ssh_key_path" ]; then - ssh_public_key=$(cat "$ssh_key_path") - fi - - # Unraid VM environment - cat > "$PROJECT_DIR/***REMOVED***.unraid" << EOF -# Unraid VM Configuration -UNRAID_HOST=$UNRAID_HOST -UNRAID_USER=$UNRAID_USER -UNRAID_PASSWORD=$UNRAID_PASSWORD -VM_NAME=$VM_NAME -VM_MEMORY=$VM_MEMORY -VM_VCPUS=$VM_VCPUS -VM_DISK_SIZE=$VM_DISK_SIZE -SSH_PUBLIC_KEY="$ssh_public_key" - -# Network Configuration -VM_IP=$VM_IP -VM_GATEWAY=$VM_GATEWAY -VM_NETMASK=$VM_NETMASK -VM_NETWORK=$VM_NETWORK - -# GitHub Configuration -REPO_URL=$REPO_URL -GITHUB_USERNAME=$GITHUB_USERNAME -GITHUB_TOKEN=$GITHUB_TOKEN -GITHUB_API_ENABLED=$GITHUB_API_ENABLED -EOF - - # Webhook environment (updated with VM info) - cat > "$PROJECT_DIR/***REMOVED***.webhook" << EOF -# ThrillWiki Webhook Configuration -WEBHOOK_PORT=$WEBHOOK_PORT -WEBHOOK_SECRET=$WEBHOOK_SECRET -WEBHOOK_ENABLED=$WEBHOOK_ENABLED -VM_HOST=$VM_IP -VM_PORT=22 -VM_USER=ubuntu -VM_KEY_PATH=$HOME/.ssh/thrillwiki_vm -VM_PROJECT_PATH=/home/ubuntu/thrillwiki -REPO_URL=$REPO_URL -DEPLOY_BRANCH=main - -# GitHub API Configuration -GITHUB_USERNAME=$GITHUB_USERNAME -GITHUB_TOKEN=$GITHUB_TOKEN -GITHUB_API_ENABLED=$GITHUB_API_ENABLED -EOF - - log_success "Environment files created" -} - -# Install required tools -install_dependencies() { - log "Installing required dependencies..." - - # Check for required tools - local missing_tools=() - local mac_tools=() - - command -v python3 >/dev/null 2>&1 || missing_tools+=("python3") - command -v ssh >/dev/null 2>&1 || missing_tools+=("openssh-client") - command -v scp >/dev/null 2>&1 || missing_tools+=("openssh-client") - - # Check for ISO creation tools and handle platform differences - if ! command -v genisoimage >/dev/null 2>&1 && ! command -v mkisofs >/dev/null 2>&1 && ! command -v hdiutil >/dev/null 2>&1; then - if [[ "$OSTYPE" == "linux-gnu"* ]]; then - missing_tools+=("genisoimage") - elif [[ "$OSTYPE" == "darwin"* ]]; then - # On macOS, hdiutil should be available, but add cdrtools as backup - if command -v brew >/dev/null 2>&1; then - mac_tools+=("cdrtools") - fi - fi - fi - - # Install Linux packages - if [ ${#missing_tools[@]} -gt 0 ]; then - log "Installing missing tools for Linux: ${missing_tools[*]}" - - if command -v apt-get >/dev/null 2>&1; then - sudo apt-get update - sudo apt-get install -y "${missing_tools[@]}" - elif command -v yum >/dev/null 2>&1; then - sudo yum install -y "${missing_tools[@]}" - elif command -v dnf >/dev/null 2>&1; then - sudo dnf install -y "${missing_tools[@]}" - else - log_error "Linux package manager not found. Please install: ${missing_tools[*]}" - exit 1 - fi - fi - - # Install macOS packages - if [ ${#mac_tools[@]} -gt 0 ]; then - log "Installing additional tools for macOS: ${mac_tools[*]}" - if command -v brew >/dev/null 2>&1; then - brew install "${mac_tools[@]}" - else - log "Homebrew not found. Skipping optional tool installation." - log "Note: hdiutil should be available on macOS for ISO creation" - fi - fi - - # Install Python dependencies - if [ -f "$PROJECT_DIR/pyproject.toml" ]; then - log "Installing Python dependencies with UV..." - if ! command -v uv >/dev/null 2>&1; then - curl -LsSf https://astral.sh/uv/install.sh | sh - source ~/.cargo/env - fi - uv sync - fi - - log_success "Dependencies installed" -} - -# Create VM using the VM manager -create_vm() { - log "Creating VM on Unraid server..." - - # Export all environment variables from the file - set -a # automatically export all variables - source "$PROJECT_DIR/***REMOVED***.unraid" - set +a # turn off automatic export - - # Run complete VM setup (builds ISO, creates VM, starts VM) - cd "$PROJECT_DIR" - python3 scripts/unraid/main.py setup - - if [ $? -eq 0 ]; then - log_success "VM setup completed successfully" - else - log_error "VM setup failed" - exit 1 - fi -} - -# Wait for VM to be ready and get IP -wait_for_vm() { - log "Waiting for VM to be ready..." - sleep 120 - # Export all environment variables from the file - set -a # automatically export all variables - source "$PROJECT_DIR/***REMOVED***.unraid" - set +a # turn off automatic export - - local max_attempts=60 - local attempt=1 - - while [ $attempt -le $max_attempts ]; do - VM_IP=$(python3 scripts/unraid/main.py ip 2>/dev/null | grep "VM IP:" | cut -d' ' -f3) - - if [ -n "$VM_IP" ]; then - log_success "VM is ready with IP: $VM_IP" - - # Update SSH config with actual IP - sed -i.bak "s/HostName %h/HostName $VM_IP/" "$HOME/.ssh/config" - - # Update webhook environment with IP - sed -i.bak "s/VM_HOST=$VM_NAME/VM_HOST=$VM_IP/" "$PROJECT_DIR/***REMOVED***.webhook" - - return 0 - fi - - log "Waiting for VM to get IP... (attempt $attempt/$max_attempts)" - sleep 30 - ((attempt++)) - done - - log_error "VM failed to get IP address" - exit 1 -} - -# Configure VM for ThrillWiki -configure_vm() { - log "Configuring VM for ThrillWiki deployment..." - - local vm_setup_script="/tmp/vm_thrillwiki_setup.sh" - - # Create VM setup script - cat > "$vm_setup_script" << 'EOF' -#!/bin/bash -set -e - -echo "Setting up VM for ThrillWiki..." - -# Update system -sudo apt update && sudo apt upgrade -y - -# Install required packages -sudo apt install -y git curl build-essential python3-pip lsof postgresql postgresql-contrib nginx - -# Install UV -curl -LsSf https://astral.sh/uv/install.sh | sh -source ~/.cargo/env - -# Configure PostgreSQL -sudo -u postgres psql << PSQL -CREATE DATABASE thrillwiki; -CREATE USER thrillwiki_user WITH ENCRYPTED PASSWORD 'thrillwiki_pass'; -GRANT ALL PRIVILEGES ON DATABASE thrillwiki TO thrillwiki_user; -\q -PSQL - -# Clone repository -git clone REPO_URL_PLACEHOLDER thrillwiki -cd thrillwiki - -# Install dependencies -~/.cargo/bin/uv sync - -# Create directories -mkdir -p logs backups - -# Make scripts executable -chmod +x scripts/*.sh - -# Run initial setup -~/.cargo/bin/uv run manage.py migrate -~/.cargo/bin/uv run manage.py collectstatic --noinput - -# Install systemd services -sudo cp scripts/systemd/thrillwiki.service /etc/systemd/system/ -sudo sed -i 's|/home/ubuntu|/home/ubuntu|g' /etc/systemd/system/thrillwiki.service -sudo systemctl daemon-reload -sudo systemctl enable thrillwiki.service - -echo "VM setup completed!" -EOF - - # Replace placeholder with actual repo URL - sed -i "s|REPO_URL_PLACEHOLDER|$REPO_URL|g" "$vm_setup_script" - - # Copy and execute setup script on VM - scp "$vm_setup_script" "$VM_NAME:/tmp/" - ssh "$VM_NAME" "bash /tmp/vm_thrillwiki_setup.sh" - - # Cleanup - rm "$vm_setup_script" - - log_success "VM configured for ThrillWiki" -} - -# Start services -start_services() { - log "Starting ThrillWiki services..." - - # Start VM service - ssh "$VM_NAME" "sudo systemctl start thrillwiki" - - # Verify service is running - if ssh "$VM_NAME" "systemctl is-active --quiet thrillwiki"; then - log_success "ThrillWiki service started successfully" - else - log_error "Failed to start ThrillWiki service" - exit 1 - fi - - # Get service status - log "Service status:" - ssh "$VM_NAME" "systemctl status thrillwiki --no-pager -l" -} - -# Setup webhook listener -setup_webhook_listener() { - log "Setting up webhook listener..." - - # Create webhook start script - cat > "$PROJECT_DIR/start-webhook.sh" << 'EOF' -#!/bin/bash -cd "$(dirname "$0")" -source ***REMOVED***.webhook -python3 scripts/webhook-listener.py -EOF - - chmod +x "$PROJECT_DIR/start-webhook.sh" - - log_success "Webhook listener configured" - log "You can start the webhook listener with: ./start-webhook.sh" -} - -# Perform end-to-end test -test_deployment() { - log "Performing end-to-end deployment test..." - - # Test VM connectivity - if ssh "$VM_NAME" "echo 'VM connectivity test passed'"; then - log_success "VM connectivity test passed" - else - log_error "VM connectivity test failed" - return 1 - fi - - # Test ThrillWiki service - if ssh "$VM_NAME" "curl -f http://localhost:8000 >/dev/null 2>&1"; then - log_success "ThrillWiki service test passed" - else - log_warning "ThrillWiki service test failed - checking logs..." - ssh "$VM_NAME" "journalctl -u thrillwiki --no-pager -l | tail -20" - fi - - # Test deployment script - log "Testing deployment script..." - ssh "$VM_NAME" "cd thrillwiki && ./scripts/vm-deploy.sh status" - - log_success "End-to-end test completed" -} - -# Generate final instructions -generate_instructions() { - log "Generating final setup instructions..." - - cat > "$PROJECT_DIR/UNRAID_SETUP_COMPLETE.md" << EOF -# ThrillWiki Unraid Automation - Setup Complete! 🎉 - -Your ThrillWiki CI/CD system has been fully automated and deployed! - -## VM Information -- **VM Name**: $VM_NAME -- **VM IP**: $VM_IP -- **SSH Access**: \`ssh $VM_NAME\` - -## Services Status -- **ThrillWiki Service**: Running on VM -- **Database**: PostgreSQL configured -- **Web Server**: Available at http://$VM_IP:8000 - -## Next Steps - -### 1. Start Webhook Listener -\`\`\`bash -./start-webhook.sh -\`\`\` - -### 2. Configure GitHub Webhook -- Go to your repository: $REPO_URL -- Settings → Webhooks → Add webhook -- **Payload URL**: http://YOUR_PUBLIC_IP:$WEBHOOK_PORT/webhook -- **Content type**: application/json -- **Secret**: (your webhook secret) -- **Events**: Just the push event - -### 3. Test the System -\`\`\`bash -# Test VM connection -ssh $VM_NAME - -# Test service status -ssh $VM_NAME "systemctl status thrillwiki" - -# Test manual deployment -ssh $VM_NAME "cd thrillwiki && ./scripts/vm-deploy.sh" - -# Make a test commit to trigger automatic deployment -git add . -git commit -m "Test automated deployment" -git push origin main -\`\`\` - -## Management Commands - -### VM Management -\`\`\`bash -# Check VM status -python3 scripts/unraid/vm-manager.py status - -# Start/stop VM -python3 scripts/unraid/vm-manager.py start -python3 scripts/unraid/vm-manager.py stop - -# Get VM IP -python3 scripts/unraid/vm-manager.py ip -\`\`\` - -### Service Management on VM -\`\`\`bash -# Check service status -ssh $VM_NAME "./scripts/vm-deploy.sh status" - -# Restart service -ssh $VM_NAME "./scripts/vm-deploy.sh restart" - -# View logs -ssh $VM_NAME "journalctl -u thrillwiki -f" -\`\`\` - -## Troubleshooting - -### Common Issues -1. **VM not accessible**: Check VM is running and has IP -2. **Service not starting**: Check logs with \`journalctl -u thrillwiki\` -3. **Webhook not working**: Verify port $WEBHOOK_PORT is open - -### Support Files -- Configuration: \`***REMOVED***.unraid\`, \`***REMOVED***.webhook\` -- Logs: \`logs/\` directory -- Documentation: \`docs/VM_DEPLOYMENT_SETUP.md\` - -**Your automated CI/CD system is now ready!** 🚀 - -Every push to the main branch will automatically deploy to your VM. -EOF - - log_success "Setup instructions saved to UNRAID_SETUP_COMPLETE.md" -} - -# Main automation function -main() { - log "🚀 Starting ThrillWiki Complete Unraid Automation" - echo "[AWS-SECRET-REMOVED]==========" - echo - - # Parse command line arguments - while [[ $# -gt 0 ]]; do - case $1 in - --reset) - RESET_ALL=true - shift - ;; - --reset-vm) - RESET_VM_ONLY=true - shift - ;; - --reset-config) - RESET_CONFIG_ONLY=true - shift - ;; - --help|-h) - show_help - exit 0 - ;; - *) - echo "Unknown option: $1" - show_help - exit 1 - ;; - esac - done - - # Create logs directory - mkdir -p "$LOG_DIR" - - # Handle reset modes - if [[ "$RESET_ALL" == "true" ]]; then - log "🔄 Complete reset mode - deleting VM and configuration" - echo - - # Load configuration first to get connection details for VM deletion - if [[ -f "$CONFIG_FILE" ]]; then - source "$CONFIG_FILE" - log_success "Loaded existing configuration for VM deletion" - else - log_warning "No configuration file found, will skip VM deletion" - fi - - # Delete existing VM if config exists - if [[ -f "$CONFIG_FILE" ]]; then - log "🗑️ Deleting existing VM..." - # Export environment variables for VM manager - set -a - source "$PROJECT_DIR/***REMOVED***.unraid" 2>/dev/null || true - set +a - - if python3 "$SCRIPT_DIR/vm-manager.py" delete; then - log_success "VM deleted successfully" - else - log "⚠️ VM deletion failed or VM didn't exist" - fi - fi - - # Remove configuration files - if [[ -f "$CONFIG_FILE" ]]; then - rm "$CONFIG_FILE" - log_success "Configuration file removed" - fi - - # Remove environment files - rm -f "$PROJECT_DIR/***REMOVED***.unraid" "$PROJECT_DIR/***REMOVED***.webhook" - log_success "Environment files removed" - - log_success "Complete reset finished - continuing with fresh setup" - echo - - elif [[ "$RESET_VM_ONLY" == "true" ]]; then - log "🔄 VM-only reset mode - deleting VM, preserving configuration" - echo - - # Load configuration to get connection details - if [[ -f "$CONFIG_FILE" ]]; then - source "$CONFIG_FILE" - log_success "Loaded existing configuration" - else - log_error "No configuration file found. Cannot reset VM without connection details." - echo " Run the script without reset flags first to create initial configuration." - exit 1 - fi - - # Delete existing VM - log "🗑️ Deleting existing VM..." - # Export environment variables for VM manager - set -a - source "$PROJECT_DIR/***REMOVED***.unraid" 2>/dev/null || true - set +a - - if python3 "$SCRIPT_DIR/vm-manager.py" delete; then - log_success "VM deleted successfully" - else - log "⚠️ VM deletion failed or VM didn't exist" - fi - - # Remove only environment files, keep main config - rm -f "$PROJECT_DIR/***REMOVED***.unraid" "$PROJECT_DIR/***REMOVED***.webhook" - log_success "Environment files removed, configuration preserved" - - log_success "VM reset complete - will recreate VM with saved configuration" - echo - - elif [[ "$RESET_CONFIG_ONLY" == "true" ]]; then - log "🔄 Config-only reset mode - deleting configuration, preserving VM" - echo - - # Remove configuration files - if [[ -f "$CONFIG_FILE" ]]; then - rm "$CONFIG_FILE" - log_success "Configuration file removed" - fi - - # Remove environment files - rm -f "$PROJECT_DIR/***REMOVED***.unraid" "$PROJECT_DIR/***REMOVED***.webhook" - log_success "Environment files removed" - - log_success "Configuration reset complete - will prompt for fresh configuration" - echo - fi - - # Collect configuration - prompt_unraid_config - - # Setup steps - setup_ssh_keys - setup_unraid_access - create_environment_files - install_dependencies - create_vm - wait_for_vm - configure_vm - start_services - setup_webhook_listener - test_deployment - generate_instructions - - echo - log_success "🎉 Complete automation setup finished!" - echo - log "Your ThrillWiki VM is running at: http://$VM_IP:8000" - log "Start the webhook listener: ./start-webhook.sh" - log "See UNRAID_SETUP_COMPLETE.md for detailed instructions" - echo - log "The system will now automatically deploy when you push to GitHub!" -} - -# Run main function and log output -main "$@" 2>&1 | tee "$LOG_DIR/unraid-automation.log" \ No newline at end of file diff --git a/scripts/unraid/setup-ssh-key.sh b/scripts/unraid/setup-ssh-key.sh deleted file mode 100755 index 6534caf4..00000000 --- a/scripts/unraid/setup-ssh-key.sh +++ /dev/null @@ -1,75 +0,0 @@ -#!/bin/bash - -# ThrillWiki Template VM SSH Key Setup Helper -# This script generates the SSH key needed for template VM access - -set -e - -# Colors for output -GREEN='\033[0;32m' -YELLOW='\033[1;33m' -BLUE='\033[0;34m' -NC='\033[0m' # No Color - -echo -e "${BLUE}ThrillWiki Template VM SSH Key Setup${NC}" -echo "[AWS-SECRET-REMOVED]" -echo - -SSH_KEY_PATH="$HOME/.ssh/thrillwiki_vm" - -# Generate SSH key if it doesn't exist -if [ ! -f "$SSH_KEY_PATH" ]; then - echo -e "${YELLOW}Generating new SSH key for ThrillWiki template VM...${NC}" - ssh-keygen -t rsa -b 4096 -f "$SSH_KEY_PATH" -N "" -C "thrillwiki-template-vm-access" - echo -e "${GREEN}✅ SSH key generated: $SSH_KEY_PATH${NC}" - echo -else - echo -e "${GREEN}✅ SSH key already exists: $SSH_KEY_PATH${NC}" - echo -fi - -# Display the public key -echo -e "${YELLOW}📋 Your SSH Public Key:${NC}" -echo "Copy this ENTIRE line and add it to your template VM:" -echo -echo -e "${GREEN}$(cat "$SSH_KEY_PATH.pub")${NC}" -echo - -# Instructions -echo -e "${BLUE}📝 Template VM Setup Instructions:${NC}" -echo "1. SSH into your template VM (thrillwiki-template-ubuntu)" -echo "2. Switch to the thrillwiki user:" -echo " sudo su - thrillwiki" -echo "3. Create .ssh directory and set permissions:" -echo " mkdir -p ~/.ssh && chmod 700 ~/.ssh" -echo "4. Add the public key above to ***REMOVED***:" -echo " echo 'YOUR_PUBLIC_KEY_HERE' >> ~/.ssh/***REMOVED***" -echo " chmod 600 ~/.ssh/***REMOVED***" -echo "5. Test SSH access:" -echo " ssh -i ~/.ssh/thrillwiki_vm thrillwiki@YOUR_TEMPLATE_VM_IP" -echo - -# SSH config helper -SSH_CONFIG="$HOME/.ssh/config" -echo -e "${BLUE}🔧 SSH Config Setup:${NC}" -if ! grep -q "thrillwiki-vm" "$SSH_CONFIG" 2>/dev/null; then - echo "Adding SSH config entry..." - cat >> "$SSH_CONFIG" << EOF - -# ThrillWiki Template VM -Host thrillwiki-vm - HostName %h - User thrillwiki - IdentityFile $SSH_KEY_PATH - StrictHostKeyChecking no - UserKnownHostsFile /dev/null -EOF - echo -e "${GREEN}✅ SSH config updated${NC}" -else - echo -e "${GREEN}✅ SSH config already contains thrillwiki-vm entry${NC}" -fi - -echo -echo -e "${GREEN}🎉 SSH key setup complete!${NC}" -echo "Next: Set up your template VM using TEMPLATE_VM_SETUP.md" -echo "Then run: ./setup-template-automation.sh" diff --git a/scripts/unraid/setup-template-automation.sh b/scripts/unraid/setup-template-automation.sh deleted file mode 100755 index df776b7e..00000000 --- a/scripts/unraid/setup-template-automation.sh +++ /dev/null @@ -1,2262 +0,0 @@ -#!/bin/bash - -# ThrillWiki Template-Based Complete Unraid Automation Setup -# This script automates the entire template-based VM creation and deployment process on Unraid -# -# Usage: -# ./setup-template-automation.sh # Standard template-based setup -# ./setup-template-automation.sh --reset # Delete VM and config, start completely fresh -# ./setup-template-automation.sh --reset-vm # Delete VM only, keep configuration -# ./setup-template-automation.sh --reset-config # Delete config only, keep VM - -# Function to show help -show_help() { - echo "ThrillWiki Template-Based CI/CD Automation Setup" - echo "" - echo "This script sets up FAST template-based VM deployment using pre-configured Ubuntu templates." - echo "Template VMs deploy in 2-5 minutes instead of 20-30 minutes with autoinstall." - echo "" - echo "Usage:" - echo " $0 Set up or update ThrillWiki template automation" - echo " $0 -y Non-interactive mode, use saved configuration" - echo " $0 --reset Delete VM and config, start completely fresh" - echo " $0 --reset-vm Delete VM only, keep configuration" - echo " $0 --reset-config Delete config only, keep VM" - echo " $0 --help Show this help message" - echo "" - echo "Template Benefits:" - echo " ⚡ Speed: 2-5 min deployment vs 20-30 min with autoinstall" - echo " 🔒 Reliability: Pre-tested template eliminates installation failures" - echo " 💾 Efficiency: Copy-on-write disk format saves space" - echo "" - echo "Options:" - echo " -y, --yes Non-interactive mode - use saved configuration" - echo " and passwords without prompting. Requires existing" - echo " configuration file with saved settings." - echo "" - echo "Reset Options:" - echo " --reset Completely removes existing VM, disks, and config" - echo " before starting fresh template-based installation" - echo " --reset-vm Removes only the VM and disks, preserves saved" - echo " configuration to avoid re-entering settings" - echo " --reset-config Removes only the saved configuration, preserves" - echo " VM and prompts for fresh configuration input" - echo " --help Display this help and exit" - echo "" - echo "Examples:" - echo " $0 # Normal template-based setup/update" - echo " $0 -y # Non-interactive setup with saved config" - echo " $0 --reset # Complete fresh template installation" - echo " $0 --reset-vm # Fresh VM with saved settings" - echo " $0 --reset-config # Re-configure existing VM" - exit 0 -} - -# Check for help flag -if [[ "$1" == "--help" || "$1" == "-h" ]]; then - show_help -fi - -# Parse command line flags -RESET_ALL=false -RESET_VM_ONLY=false -RESET_CONFIG_ONLY=false -NON_INTERACTIVE=false - -# Process all arguments -while [[ $# -gt 0 ]]; do - case $1 in - -y|--yes) - NON_INTERACTIVE=true - echo "🤖 NON-INTERACTIVE MODE: Using saved configuration only" - shift - ;; - --reset) - RESET_ALL=true - echo "🔄 COMPLETE RESET MODE: Will delete VM and configuration" - shift - ;; - --reset-vm) - RESET_VM_ONLY=true - echo "🔄 VM RESET MODE: Will delete VM only, keep configuration" - shift - ;; - --reset-config) - RESET_CONFIG_ONLY=true - echo "🔄 CONFIG RESET MODE: Will delete configuration only, keep VM" - shift - ;; - --help|-h) - show_help - ;; - *) - echo "Unknown option: $1" - show_help - ;; - esac -done - -set -e - -# Colors for output -RED='\033[0;31m' -GREEN='\033[0;32m' -YELLOW='\033[1;33m' -BLUE='\033[0;34m' -CYAN='\033[0;36m' -NC='\033[0m' # No Color - -log() { - echo -e "${BLUE}[TEMPLATE-AUTOMATION]${NC} $1" -} - -log_success() { - echo -e "${GREEN}[SUCCESS]${NC} $1" -} - -log_warning() { - echo -e "${YELLOW}[WARNING]${NC} $1" -} - -log_error() { - echo -e "${RED}[ERROR]${NC} $1" -} - -log_template() { - echo -e "${CYAN}[TEMPLATE]${NC} $1" -} - -# Configuration -SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -PROJECT_DIR="$(cd "$SCRIPT_DIR/../.." && pwd)" -LOG_DIR="$PROJECT_DIR/logs" - -# Default values -DEFAULT_UNRAID_HOST="" -DEFAULT_VM_NAME="thrillwiki-vm" -DEFAULT_VM_MEMORY="4096" -DEFAULT_VM_VCPUS="2" -DEFAULT_VM_DISK_SIZE="50" -DEFAULT_WEBHOOK_PORT="9000" -TEMPLATE_VM_NAME="thrillwiki-template-ubuntu" - -# Configuration files -CONFIG_FILE="$PROJECT_DIR/.thrillwiki-template-config" -TOKEN_FILE="$PROJECT_DIR/.thrillwiki-github-token" - -# Function to save configuration -save_config() { - log "Saving template configuration to $CONFIG_FILE..." - cat > "$CONFIG_FILE" << EOF -# ThrillWiki Template-Based Automation Configuration -# This file stores your settings to avoid re-entering them each time - -# Unraid Server Configuration -UNRAID_HOST="$UNRAID_HOST" -UNRAID_USER="$UNRAID_USER" -VM_NAME="$VM_NAME" -VM_MEMORY="$VM_MEMORY" -VM_VCPUS="$VM_VCPUS" -VM_DISK_SIZE="$VM_DISK_SIZE" - -# Template Configuration -TEMPLATE_VM_NAME="$TEMPLATE_VM_NAME" -DEPLOYMENT_TYPE="template-based" - -# Network Configuration -VM_IP="$VM_IP" -VM_GATEWAY="$VM_GATEWAY" -VM_NETMASK="$VM_NETMASK" -VM_NETWORK="$VM_NETWORK" - -# GitHub Configuration -REPO_URL="$REPO_URL" -GITHUB_USERNAME="$GITHUB_USERNAME" -GITHUB_API_ENABLED="$GITHUB_API_ENABLED" -GITHUB_AUTH_METHOD="$GITHUB_AUTH_METHOD" - -# Webhook Configuration -WEBHOOK_PORT="$WEBHOOK_PORT" -WEBHOOK_ENABLED="$WEBHOOK_ENABLED" - -# SSH Configuration (path to key, not the key content) -SSH_KEY_PATH="$HOME/.ssh/thrillwiki_vm" -EOF - - log_success "Template configuration saved to $CONFIG_FILE" -} - -# Function to save GitHub token securely - OVERWRITE THE OLD ONE COMPLETELY -save_github_token() { - if [ -n "$GITHUB_TOKEN" ]; then - log "🔒 OVERWRITING GitHub token (new token will REPLACE old one)..." - - # Force remove any existing token file first - rm -f "$TOKEN_FILE" 2>/dev/null || true - - # Write new token - this COMPLETELY OVERWRITES any old token - echo "$GITHUB_TOKEN" > "$TOKEN_FILE" - chmod 600 "$TOKEN_FILE" # Restrict to owner read/write only - - log_success "✅ NEW GitHub token saved securely (OLD TOKEN COMPLETELY REPLACED)" - log "Token file: $TOKEN_FILE" - else - log_error "No GITHUB_TOKEN to save!" - fi -} - -# Function to load GitHub token -load_github_token() { - if [ -f "$TOKEN_FILE" ]; then - GITHUB_TOKEN=$(cat "$TOKEN_FILE") - if [ -n "$GITHUB_TOKEN" ]; then - log "🔓 Loaded saved GitHub token for reuse" - return 0 - fi - fi - return 1 -} - -# Function to load configuration -load_config() { - if [ -f "$CONFIG_FILE" ]; then - log "Loading existing template configuration from $CONFIG_FILE..." - source "$CONFIG_FILE" - return 0 - else - return 1 - fi -} - -# Function for non-interactive configuration loading -load_non_interactive_config() { - log "=== Non-Interactive Template Configuration Loading ===" - - # Load saved configuration - if ! load_config; then - log_error "No saved template configuration found. Cannot run in non-interactive mode." - log_error "Please run the script without -y flag first to create initial configuration." - exit 1 - fi - - log_success "Loaded saved template configuration successfully" - - # Check for required environment variables for passwords - if [ -z "${UNRAID_PASSWORD:-}" ]; then - log_error "UNRAID_PASSWORD environment variable not set." - log_error "For non-interactive mode, set: export UNRAID_PASSWORD='your_password'" - exit 1 - fi - - # Handle GitHub authentication based on saved method - if [ -n "$GITHUB_USERNAME" ] && [ "$GITHUB_API_ENABLED" = "true" ]; then - # Personal access token method - try authentication script first - log "Attempting to get PAT token from authentication script..." - if GITHUB_TOKEN=$(python3 "$SCRIPT_DIR/../github-auth.py" token 2>/dev/null) && [ -n "$GITHUB_TOKEN" ]; then - log_success "Token obtained from authentication script" - elif [ -n "${GITHUB_TOKEN:-}" ]; then - log "Using token from environment variable" - else - log_error "No GitHub PAT token available. Either:" - log_error "1. Run setup interactively to configure token" - log_error "2. Set GITHUB_TOKEN environment variable: export GITHUB_TOKEN='your_token'" - exit 1 - fi - fi - - # Handle webhook secret - if [ "$WEBHOOK_ENABLED" = "true" ]; then - if [ -z "${WEBHOOK_SECRET:-}" ]; then - log_error "WEBHOOK_SECRET environment variable not set." - log_error "For non-interactive mode, set: export WEBHOOK_SECRET='your_secret'" - exit 1 - fi - fi - - log_success "All required credentials loaded from environment variables" - log "Template configuration summary:" - echo " Unraid Host: $UNRAID_HOST" - echo " VM Name: $VM_NAME" - echo " Template VM: $TEMPLATE_VM_NAME" - echo " VM IP: $VM_IP" - echo " Repository: $REPO_URL" - echo " GitHub Auth: $GITHUB_AUTH_METHOD" - echo " Webhook Enabled: $WEBHOOK_ENABLED" - echo " Deployment Type: template-based ⚡" -} -# Function to stop and clean up existing VM before reset -stop_existing_vm_for_reset() { - local vm_name="$1" - local unraid_host="$2" - local unraid_user="$3" - - if [ -z "$vm_name" ] || [ -z "$unraid_host" ] || [ -z "$unraid_user" ]; then - log_warning "Missing VM connection details for VM shutdown" - log "VM Name: ${vm_name:-'not set'}" - log "Unraid Host: ${unraid_host:-'not set'}" - log "Unraid User: ${unraid_user:-'not set'}" - return 0 - fi - - log "🔍 Checking if VM '$vm_name' exists and needs to be stopped..." - - # Test connection first - if ! ssh -o ConnectTimeout=10 "$unraid_user@$unraid_host" "echo 'Connected'" > /dev/null 2>&1; then - log_warning "Cannot connect to Unraid server at $unraid_host - skipping VM shutdown" - return 0 - fi - - # Check VM status - local vm_status=$(ssh "$unraid_user@$unraid_host" "virsh domstate $vm_name 2>/dev/null || echo 'not defined'") - - if [ "$vm_status" = "not defined" ]; then - log "VM '$vm_name' does not exist - no need to stop" - return 0 - elif [ "$vm_status" = "shut off" ]; then - log "VM '$vm_name' is already stopped - good for reset" - return 0 - elif [ "$vm_status" = "running" ]; then - log_warning "⚠️ VM '$vm_name' is currently RUNNING!" - log_warning "VM must be stopped before reset to avoid conflicts." - echo - - if [ "$NON_INTERACTIVE" = "true" ]; then - log "Non-interactive mode: Automatically stopping VM..." - stop_choice="y" - else - echo "Options:" - echo " 1. Stop the VM gracefully before reset (recommended)" - echo " 2. Force stop the VM before reset" - echo " 3. Skip VM shutdown (may cause issues)" - echo " 4. Cancel reset" - echo - read -p "What would you like to do? (1-4): " stop_choice - fi - - case $stop_choice in - 1|y|Y) - log "Stopping VM '$vm_name' gracefully before reset..." - - # Try graceful shutdown first - log "Attempting graceful shutdown..." - if ssh "$unraid_user@$unraid_host" "virsh shutdown $vm_name"; then - log "Shutdown command sent, waiting for VM to stop..." - - # Wait up to 60 seconds for graceful shutdown - local wait_count=0 - local max_wait=12 # 60 seconds (12 * 5 seconds) - - while [ $wait_count -lt $max_wait ]; do - sleep 5 - local current_status=$(ssh "$unraid_user@$unraid_host" "virsh domstate $vm_name 2>/dev/null || echo 'not defined'") - - if [ "$current_status" != "running" ]; then - log_success "✅ VM '$vm_name' stopped gracefully (status: $current_status)" - return 0 - fi - - ((wait_count++)) - log "Waiting for graceful shutdown... ($((wait_count * 5))s)" - done - - # If graceful shutdown didn't work, ask about force stop - log_warning "Graceful shutdown took too long. VM is still running." - - if [ "$NON_INTERACTIVE" = "true" ]; then - log "Non-interactive mode: Force stopping VM..." - force_choice="y" - else - echo - read -p "Force stop the VM? (y/n): " force_choice - fi - - if [ "$force_choice" = "y" ] || [ "$force_choice" = "Y" ]; then - log "Force stopping VM '$vm_name'..." - if ssh "$unraid_user@$unraid_host" "virsh destroy $vm_name"; then - log_success "✅ VM '$vm_name' force stopped" - return 0 - else - log_error "❌ Failed to force stop VM" - return 1 - fi - else - log_error "VM is still running. Cannot proceed safely with reset." - return 1 - fi - else - log_error "❌ Failed to send shutdown command to VM" - return 1 - fi - ;; - 2) - log "Force stopping VM '$vm_name' before reset..." - if ssh "$unraid_user@$unraid_host" "virsh destroy $vm_name"; then - log_success "✅ VM '$vm_name' force stopped" - return 0 - else - log_error "❌ Failed to force stop VM" - return 1 - fi - ;; - 3) - log_warning "⚠️ Continuing with running VM (NOT RECOMMENDED)" - log_warning "This may cause conflicts during VM recreation!" - return 0 - ;; - 4|n|N|"") - log "VM reset cancelled by user" - exit 0 - ;; - *) - log_error "Invalid choice. Please select 1, 2, 3, or 4." - return 1 - ;; - esac - else - log "VM '$vm_name' status: $vm_status - continuing with reset" - return 0 - fi -} - -# Function to gracefully stop template VM if running -stop_template_vm_if_running() { - local template_status=$(ssh "$UNRAID_USER@$UNRAID_HOST" "virsh domstate $TEMPLATE_VM_NAME 2>/dev/null || echo 'not defined'") - - if [ "$template_status" = "running" ]; then - log_warning "⚠️ Template VM '$TEMPLATE_VM_NAME' is currently RUNNING!" - log_warning "Template VMs must be stopped to create new instances safely." - echo - - if [ "$NON_INTERACTIVE" = "true" ]; then - log "Non-interactive mode: Automatically stopping template VM..." - stop_choice="y" - else - echo "Options:" - echo " 1. Stop the template VM gracefully (recommended)" - echo " 2. Continue anyway (may cause issues)" - echo " 3. Cancel setup" - echo - read -p "What would you like to do? (1/2/3): " stop_choice - fi - - case $stop_choice in - 1|y|Y) - log "Stopping template VM gracefully..." - - # Try graceful shutdown first - log "Attempting graceful shutdown..." - if ssh "$UNRAID_USER@$UNRAID_HOST" "virsh shutdown $TEMPLATE_VM_NAME"; then - log "Shutdown command sent, waiting for VM to stop..." - - # Wait up to 60 seconds for graceful shutdown - local wait_count=0 - local max_wait=12 # 60 seconds (12 * 5 seconds) - - while [ $wait_count -lt $max_wait ]; do - sleep 5 - local current_status=$(ssh "$UNRAID_USER@$UNRAID_HOST" "virsh domstate $TEMPLATE_VM_NAME 2>/dev/null || echo 'not defined'") - - if [ "$current_status" != "running" ]; then - log_success "✅ Template VM stopped gracefully (status: $current_status)" - return 0 - fi - - ((wait_count++)) - log "Waiting for graceful shutdown... ($((wait_count * 5))s)" - done - - # If graceful shutdown didn't work, ask about force stop - log_warning "Graceful shutdown took too long. Template VM is still running." - - if [ "$NON_INTERACTIVE" = "true" ]; then - log "Non-interactive mode: Force stopping template VM..." - force_choice="y" - else - echo - read -p "Force stop the template VM? (y/n): " force_choice - fi - - if [ "$force_choice" = "y" ] || [ "$force_choice" = "Y" ]; then - log "Force stopping template VM..." - if ssh "$UNRAID_USER@$UNRAID_HOST" "virsh destroy $TEMPLATE_VM_NAME"; then - log_success "✅ Template VM force stopped" - return 0 - else - log_error "❌ Failed to force stop template VM" - return 1 - fi - else - log_error "Template VM is still running. Cannot proceed safely." - return 1 - fi - else - log_error "❌ Failed to send shutdown command to template VM" - return 1 - fi - ;; - 2) - log_warning "⚠️ Continuing with running template VM (NOT RECOMMENDED)" - log_warning "This may cause disk corruption or deployment issues!" - return 0 - ;; - 3|n|N|"") - log "Setup cancelled by user" - exit 0 - ;; - *) - log_error "Invalid choice. Please select 1, 2, or 3." - return 1 - ;; - esac - fi - - return 0 -} - -# Function to check template VM availability -check_template_vm() { - log_template "Checking template VM availability..." - - # Test connection first - if ! ssh -o ConnectTimeout=10 "$UNRAID_USER@$UNRAID_HOST" "echo 'Connected'" > /dev/null 2>&1; then - log_error "Cannot connect to Unraid server at $UNRAID_HOST" - log_error "Please verify:" - log_error "1. Unraid server IP address is correct" - log_error "2. SSH key authentication is set up" - log_error "3. Network connectivity" - return 1 - fi - - # Check if template VM disk exists - if ssh "$UNRAID_USER@$UNRAID_HOST" "test -f /mnt/user/domains/$TEMPLATE_VM_NAME/vdisk1.qcow2"; then - log_template "✅ Template VM disk found: /mnt/user/domains/$TEMPLATE_VM_NAME/vdisk1.qcow2" - - # Get template info - template_info=$(ssh "$UNRAID_USER@$UNRAID_HOST" "qemu-img info /mnt/user/domains/$TEMPLATE_VM_NAME/vdisk1.qcow2 | grep 'virtual size' || echo 'Size info not available'") - log_template "📋 Template info: $template_info" - - # Check and handle template VM status - template_status=$(ssh "$UNRAID_USER@$UNRAID_HOST" "virsh domstate $TEMPLATE_VM_NAME 2>/dev/null || echo 'not defined'") - - if [ "$template_status" = "running" ]; then - log_template "Template VM status: $template_status (needs to be stopped)" - - # Stop the template VM if running - if ! stop_template_vm_if_running; then - log_error "Failed to stop template VM. Cannot proceed safely." - return 1 - fi - else - log_template "✅ Template VM status: $template_status (good for template use)" - fi - - return 0 - else - log_error "❌ Template VM disk not found!" - log_error "Expected location: /mnt/user/domains/$TEMPLATE_VM_NAME/vdisk1.qcow2" - log_error "" - log_error "To create the template VM:" - log_error "1. Create a VM named '$TEMPLATE_VM_NAME' on your Unraid server" - log_error "2. Install Ubuntu 24.04 LTS with required packages" - log_error "3. Configure it with Python, PostgreSQL, Nginx, etc." - log_error "4. Shut it down to use as a template" - log_error "" - log_error "See README-template-deployment.md for detailed setup instructions" - return 1 - fi -} - -# Function to prompt for configuration -prompt_template_config() { - # In non-interactive mode, use saved config only - if [ "$NON_INTERACTIVE" = "true" ]; then - load_non_interactive_config - return 0 - fi - - log "=== ThrillWiki Template-Based VM Configuration ===" - echo - log_template "🚀 This setup uses TEMPLATE-BASED deployment for ultra-fast VM creation!" - echo - - # Try to load existing config first - if load_config; then - log_success "Loaded existing template configuration" - echo "Current settings:" - echo " Unraid Host: $UNRAID_HOST" - echo " VM Name: $VM_NAME" - echo " Template VM: $TEMPLATE_VM_NAME" - echo " VM IP: $VM_IP" - echo " Repository: $REPO_URL" - echo " Deployment: template-based ⚡" - echo - read -p "Use existing configuration? (y/n): " use_existing - if [ "$use_existing" = "y" ] || [ "$use_existing" = "Y" ]; then - # Still need to get sensitive info that we don't save - read -s -p "Enter Unraid [PASSWORD-REMOVED] - echo - - # Handle GitHub authentication based on saved method - if [ -n "$GITHUB_USERNAME" ] && [ "$GITHUB_API_ENABLED" = "true" ]; then - # Try different sources for the token in order of preference - log "Loading GitHub PAT token..." - - # 1. Try authentication script first - if GITHUB_TOKEN=$(python3 "$SCRIPT_DIR/../github-auth.py" token 2>/dev/null) && [ -n "$GITHUB_TOKEN" ]; then - log_success "Token obtained from authentication script" - log "Using existing PAT token from authentication script" - - # Validate token and repository access immediately - log "🔍 Validating GitHub token and repository access..." - if ! validate_github_access; then - log_error "❌ GitHub token validation failed. Please check your token and repository access." - log "Please try entering a new token or check your repository URL." - return 1 - fi - - # 2. Try saved token file - elif load_github_token; then - log_success "Token loaded from secure storage (reusing for VM reset)" - - # Validate token and repository access immediately - log "🔍 Validating GitHub token and repository access..." - if ! validate_github_access; then - log_error "❌ GitHub token validation failed. Please check your token and repository access." - log "Please try entering a new token or check your repository URL." - return 1 - fi - - else - log "No token found in authentication script or saved storage" - read -s -p "Enter GitHub personal access token: " GITHUB_TOKEN - echo - - # Validate the new token immediately - if [ -n "$GITHUB_TOKEN" ]; then - log "🔍 Validating new GitHub token..." - if ! validate_github_access; then - log_error "❌ GitHub token validation failed. Please check your token and repository access." - log "Please try running the setup again with a valid token." - return 1 - fi - fi - - # Save the new token for future VM resets - save_github_token - fi - fi - - if [ "$WEBHOOK_ENABLED" = "true" ]; then - read -s -p "Enter GitHub webhook secret: " WEBHOOK_SECRET - echo - fi - - # Check template VM before proceeding - if ! check_template_vm; then - log_error "Template VM check failed. Please set up your template VM first." - exit 1 - fi - - return 0 - fi - fi - - # Prompt for new configuration - read -p "Enter your Unraid server IP address: " UNRAID_HOST - - read -p "Enter Unraid username (default: root): " UNRAID_USER - UNRAID_USER=${UNRAID_USER:-root} - - read -s -p "Enter Unraid [PASSWORD-REMOVED] - echo - # Note: Password not saved for security - - # Check template VM availability early - log_template "Verifying template VM setup..." - if ! check_template_vm; then - log_error "Template VM setup is required before proceeding." - echo - read -p "Do you want to continue setup anyway? (y/n): " continue_anyway - if [ "$continue_anyway" != "y" ] && [ "$continue_anyway" != "Y" ]; then - log "Setup cancelled. Please set up your template VM first." - log "See README-template-deployment.md for instructions." - exit 1 - fi - log_warning "Continuing setup without verified template VM..." - else - log_success "Template VM verified and ready!" - fi - - read -p "Enter VM name (default: $DEFAULT_VM_NAME): " VM_NAME - VM_NAME=${VM_NAME:-$DEFAULT_VM_NAME} - - read -p "Enter VM memory in MB (default: $DEFAULT_VM_MEMORY): " VM_MEMORY - VM_MEMORY=${VM_MEMORY:-$DEFAULT_VM_MEMORY} - - read -p "Enter VM vCPUs (default: $DEFAULT_VM_VCPUS): " VM_VCPUS - VM_VCPUS=${VM_VCPUS:-$DEFAULT_VM_VCPUS} - - read -p "Enter VM disk size in GB (default: $DEFAULT_VM_DISK_SIZE): " VM_DISK_SIZE - VM_DISK_SIZE=${VM_DISK_SIZE:-$DEFAULT_VM_DISK_SIZE} - - # Template VM name (usually fixed) - read -p "Enter template VM name (default: $TEMPLATE_VM_NAME): " TEMPLATE_VM_NAME_INPUT - TEMPLATE_VM_NAME=${TEMPLATE_VM_NAME_INPUT:-$TEMPLATE_VM_NAME} - - read -p "Enter GitHub repository URL: " REPO_URL - - # GitHub API Configuration - PAT Only - echo - log "=== GitHub Personal Access Token Configuration ===" - echo "This setup requires a GitHub Personal Access Token (PAT) for repository access." - echo "Both classic tokens and fine-grained tokens are supported." - echo "" - echo "Required token permissions:" - echo " - Repository access (read/write)" - echo " - Contents (read/write)" - echo " - Metadata (read)" - echo "" - - # Try to get token from authentication script first - log "Checking for existing GitHub token..." - if GITHUB_TOKEN=$(python3 "$SCRIPT_DIR/../github-auth.py" token 2>/dev/null) && [ -n "$GITHUB_TOKEN" ]; then - # Get username from authentication script if possible - if GITHUB_USERNAME=$(python3 "$SCRIPT_DIR/../github-auth.py" whoami 2>/dev/null | grep "You are authenticated as:" | cut -d: -f2 | xargs) && [ -n "$GITHUB_USERNAME" ]; then - log_success "Found existing token and username from authentication script" - echo "Username: $GITHUB_USERNAME" - echo "Token: ${GITHUB_TOKEN:0:8}... (masked)" - echo - read -p "Use this existing token? (y/n): " use_existing_token - - if [ "$use_existing_token" != "y" ] && [ "$use_existing_token" != "Y" ]; then - GITHUB_TOKEN="" - GITHUB_USERNAME="" - fi - else - log "Found token but no username, need to get username..." - read -p "Enter GitHub username: " GITHUB_USERNAME - fi - fi - - # If no token found or user chose not to use existing, prompt for manual entry - if [ -z "$GITHUB_TOKEN" ]; then - log "Enter your GitHub credentials manually:" - read -p "Enter GitHub username: " GITHUB_USERNAME - read -s -p "Enter GitHub Personal Access Token (classic or fine-grained): " GITHUB_TOKEN - echo - fi - - # Validate that we have both username and token - if [ -n "$GITHUB_USERNAME" ] && [ -n "$GITHUB_TOKEN" ]; then - GITHUB_API_ENABLED=true - GITHUB_AUTH_METHOD="token" - log_success "Personal access token configured for user: $GITHUB_USERNAME" - - # Test the token quickly - log "Testing GitHub token access..." - if curl -sf -H "Authorization: token $GITHUB_TOKEN" https://api.github.com/user >/dev/null 2>&1; then - log_success "✅ GitHub token validated successfully" - else - log_warning "⚠️ Could not validate GitHub token (API may be rate-limited)" - log "Proceeding anyway - token will be tested during repository operations" - fi - else - log_error "Both username and token are required for GitHub access" - log_error "Repository cloning and auto-pull functionality will not work without proper authentication" - exit 1 - fi - - # Webhook Configuration - echo - read -s -p "Enter GitHub webhook secret (optional, press Enter to skip): " WEBHOOK_SECRET - echo - - # If no webhook secret provided, disable webhook functionality - if [ -z "$WEBHOOK_SECRET" ]; then - log "No webhook secret provided - webhook functionality will be disabled" - WEBHOOK_ENABLED=false - else - WEBHOOK_ENABLED=true - fi - - read -p "Enter webhook port (default: $DEFAULT_WEBHOOK_PORT): " WEBHOOK_PORT - WEBHOOK_PORT=${WEBHOOK_PORT:-$DEFAULT_WEBHOOK_PORT} - - # Get VM network configuration preference - echo - log "=== Network Configuration ===" - echo "Choose network configuration method:" - echo "1. DHCP (automatic IP assignment - recommended)" - echo "2. Static IP (manual IP configuration)" - - while true; do - read -p "Select option (1-2): " network_choice - case $network_choice in - 1) - log "Using DHCP network configuration..." - VM_IP="dhcp" - VM_GATEWAY="192.168.20.1" - VM_NETMASK="255.255.255.0" - VM_NETWORK="192.168.20.0/24" - NETWORK_MODE="dhcp" - break - ;; - 2) - log "Using static IP network configuration..." - # Get VM IP address with proper range validation - while true; do - read -p "Enter VM IP address (192.168.20.10-192.168.20.100): " VM_IP - if [[ "$VM_IP" =~ ^192\.168\.20\.([1-9][0-9]|100)$ ]]; then - local ip_last_octet="${BASH_REMATCH[1]}" - if [ "$ip_last_octet" -ge 10 ] && [ "$ip_last_octet" -le 100 ]; then - break - fi - fi - echo "Invalid IP address. Please enter an IP in the range 192.168.20.10-192.168.20.100" - done - VM_GATEWAY="192.168.20.1" - VM_NETMASK="255.255.255.0" - VM_NETWORK="192.168.20.0/24" - NETWORK_MODE="static" - break - ;; - *) - echo "Invalid option. Please select 1 or 2." - ;; - esac - done - - # Save configuration and GitHub token - save_config - save_github_token # Save token for VM resets - log_success "Template configuration saved - setup complete!" -} - -# Function to update SSH config with actual VM IP address -update_ssh_config_with_ip() { - local vm_name="$1" - local vm_ip="$2" - local ssh_config_path="$HOME/.ssh/config" - - log "Updating SSH config with actual IP: $vm_ip" - - # Check if SSH config exists and has our VM entry - if [ -f "$ssh_config_path" ] && grep -q "Host $vm_name" "$ssh_config_path"; then - # Update the HostName to use actual IP instead of %h placeholder - if grep -A 10 "Host $vm_name" "$ssh_config_path" | grep -q "HostName %h"; then - # Replace %h with actual IP - sed -i.bak "/Host $vm_name/,/^Host\|^$/s/HostName %h/HostName $vm_ip/" "$ssh_config_path" - log_success "SSH config updated: $vm_name now points to $vm_ip" - elif grep -A 10 "Host $vm_name" "$ssh_config_path" | grep -q "HostName "; then - # Update existing IP - sed -i.bak "/Host $vm_name/,/^Host\|^$/s/HostName .*/HostName $vm_ip/" "$ssh_config_path" - log_success "SSH config updated: $vm_name IP changed to $vm_ip" - else - # Add HostName line after Host line - sed -i.bak "/Host $vm_name/a\\ - HostName $vm_ip" "$ssh_config_path" - log_success "SSH config updated: Added IP $vm_ip for $vm_name" - fi - - # Show the updated config section - log "Updated SSH config for $vm_name:" - grep -A 6 "Host $vm_name" "$ssh_config_path" | head -7 - else - log_warning "SSH config entry for $vm_name not found, cannot update IP" - fi -} - -# Generate SSH keys for VM access -setup_ssh_keys() { - log "Setting up SSH keys for template VM access..." - - local ssh_key_path="$HOME/.ssh/thrillwiki_vm" - local ssh_config_path="$HOME/.ssh/config" - - if [ ! -f "$ssh_key_path" ]; then - ssh-keygen -t rsa -b 4096 -f "$ssh_key_path" -N "" -C "thrillwiki-template-vm-access" - log_success "SSH key generated: $ssh_key_path" - else - log "SSH key already exists: $ssh_key_path" - fi - - # Add SSH config entry - if ! grep -q "Host $VM_NAME" "$ssh_config_path" 2>/dev/null; then - cat >> "$ssh_config_path" << EOF - -# ThrillWiki Template VM -Host $VM_NAME - HostName %h - User thrillwiki - IdentityFile $ssh_key_path - StrictHostKeyChecking no - UserKnownHostsFile /dev/null -EOF - log_success "SSH config updated for template VM" - fi - - # Store public key for VM setup - SSH_PUBLIC_KEY=$(cat "$ssh_key_path.pub") - export SSH_PUBLIC_KEY -} - -# Setup Unraid host access -setup_unraid_access() { - log "Setting up Unraid server access..." - - local unraid_key_path="$HOME/.ssh/unraid_access" - - if [ ! -f "$unraid_key_path" ]; then - ssh-keygen -t rsa -b 4096 -f "$unraid_key_path" -N "" -C "unraid-template-access" - - log "Please add this public key to your Unraid server:" - echo "---" - cat "$unraid_key_path.pub" - echo "---" - echo - log "Add this to /root/.ssh/***REMOVED*** on your Unraid server" - read -p "Press Enter when you've added the key..." - fi - - # Test Unraid connection - log "Testing Unraid connection..." - if ssh -i "$unraid_key_path" -o ConnectTimeout=5 -o StrictHostKeyChecking=no "$UNRAID_USER@$UNRAID_HOST" "echo 'Connected to Unraid successfully'"; then - log_success "Unraid connection test passed" - else - log_error "Unraid connection test failed" - exit 1 - fi - - # Update SSH config for Unraid - if ! grep -q "Host unraid" "$HOME/.ssh/config" 2>/dev/null; then - cat >> "$HOME/.ssh/config" << EOF - -# Unraid Server -Host unraid - HostName $UNRAID_HOST - User $UNRAID_USER - IdentityFile $unraid_key_path - StrictHostKeyChecking no -EOF - fi -} - -# Create environment files for template deployment -create_environment_files() { - log "Creating template deployment environment files..." - log "🔄 NEW TOKEN WILL BE WRITTEN TO ALL ENVIRONMENT FILES (overwriting any old tokens)" - - # Force remove old environment files first - rm -f "$PROJECT_DIR/***REMOVED***.unraid" "$PROJECT_DIR/***REMOVED***.webhook" 2>/dev/null || true - - # Get SSH public key content safely - local ssh_key_path="$HOME/.ssh/thrillwiki_vm.pub" - local ssh_public_key="" - if [ -f "$ssh_key_path" ]; then - ssh_public_key=$(cat "$ssh_key_path") - fi - - # Template-based Unraid VM environment - COMPLETELY NEW FILE WITH NEW TOKEN - cat > "$PROJECT_DIR/***REMOVED***.unraid" << EOF -# ThrillWiki Template-Based VM Configuration -UNRAID_HOST=$UNRAID_HOST -UNRAID_USER=$UNRAID_USER -UNRAID_PASSWORD=$UNRAID_PASSWORD -VM_NAME=$VM_NAME -VM_MEMORY=$VM_MEMORY -VM_VCPUS=$VM_VCPUS -VM_DISK_SIZE=$VM_DISK_SIZE -SSH_PUBLIC_KEY="$ssh_public_key" - -# Template Configuration -TEMPLATE_VM_NAME=$TEMPLATE_VM_NAME -DEPLOYMENT_TYPE=template-based - -# Network Configuration -VM_IP=$VM_IP -VM_GATEWAY=$VM_GATEWAY -VM_NETMASK=$VM_NETMASK -VM_NETWORK=$VM_NETWORK - -# GitHub Configuration -REPO_URL=$REPO_URL -GITHUB_USERNAME=$GITHUB_USERNAME -GITHUB_TOKEN=$GITHUB_TOKEN -GITHUB_API_ENABLED=$GITHUB_API_ENABLED -EOF - - # Webhook environment (updated with VM info) - cat > "$PROJECT_DIR/***REMOVED***.webhook" << EOF -# ThrillWiki Template-Based Webhook Configuration -WEBHOOK_PORT=$WEBHOOK_PORT -WEBHOOK_SECRET=$WEBHOOK_SECRET -WEBHOOK_ENABLED=$WEBHOOK_ENABLED -VM_HOST=$VM_IP -VM_PORT=22 -VM_USER=thrillwiki -VM_KEY_PATH=$HOME/.ssh/thrillwiki_vm -VM_PROJECT_PATH=/home/thrillwiki/thrillwiki -REPO_URL=$REPO_URL -DEPLOY_BRANCH=main - -# Template Configuration -TEMPLATE_VM_NAME=$TEMPLATE_VM_NAME -DEPLOYMENT_TYPE=template-based - -# GitHub API Configuration -GITHUB_USERNAME=$GITHUB_USERNAME -GITHUB_TOKEN=$GITHUB_TOKEN -GITHUB_API_ENABLED=$GITHUB_API_ENABLED -EOF - - log_success "Template deployment environment files created" -} - -# Install required tools -install_dependencies() { - log "Installing required dependencies for template deployment..." - - # Check for required tools - local missing_tools=() - local mac_tools=() - - command -v python3 >/dev/null 2>&1 || missing_tools+=("python3") - command -v ssh >/dev/null 2>&1 || missing_tools+=("openssh-client") - command -v scp >/dev/null 2>&1 || missing_tools+=("openssh-client") - - # Install missing tools based on platform - if [ ${#missing_tools[@]} -gt 0 ]; then - log "Installing missing tools: ${missing_tools[*]}" - - if command -v apt-get >/dev/null 2>&1; then - sudo apt-get update - sudo apt-get install -y "${missing_tools[@]}" - elif command -v yum >/dev/null 2>&1; then - sudo yum install -y "${missing_tools[@]}" - elif command -v dnf >/dev/null 2>&1; then - sudo dnf install -y "${missing_tools[@]}" - elif command -v brew >/dev/null 2>&1; then - # macOS with Homebrew - for tool in "${missing_tools[@]}"; do - case $tool in - python3) brew install python3 ;; - openssh-client) log "OpenSSH should be available on macOS" ;; - esac - done - else - log_error "Package manager not found. Please install: ${missing_tools[*]}" - exit 1 - fi - fi - - # Install Python dependencies - if [ -f "$PROJECT_DIR/pyproject.toml" ]; then - log "Installing Python dependencies with UV..." - if ! command -v uv >/dev/null 2>&1; then - curl -LsSf https://astral.sh/uv/install.sh | sh - source ~/.cargo/env - fi - cd "$PROJECT_DIR" - uv sync - fi - - log_success "Dependencies installed for template deployment" -} - -# Create VM using the template-based VM manager -create_template_vm() { - log "Creating VM from template on Unraid server..." - - # Export all environment variables from the file - set -a # automatically export all variables - source "$PROJECT_DIR/***REMOVED***.unraid" - set +a # turn off automatic export - - # Run template-based VM setup - cd "$PROJECT_DIR" - python3 scripts/unraid/main_template.py setup - - if [ $? -eq 0 ]; then - log_success "Template-based VM setup completed successfully ⚡" - log_template "VM deployed in minutes instead of 30+ minutes!" - else - log_error "Template-based VM setup failed" - exit 1 - fi -} - -# Wait for template VM to be ready and get IP -wait_for_template_vm() { - log "🔍 Getting VM IP address from guest agent..." - log_template "Template VMs should get IP immediately via guest agent!" - - # Export all environment variables from the file - set -a # automatically export all variables - source "$PROJECT_DIR/***REMOVED***.unraid" - set +a # turn off automatic export - - # Check for IP immediately - template VMs should have guest agent running - local max_attempts=12 # 3 minutes max wait (much shorter) - local attempt=1 - - log "🔍 Phase 1: Checking guest agent for IP address..." - - while [ $attempt -le $max_attempts ]; do - log "🔍 Attempt $attempt/$max_attempts: Querying guest agent on VM '$VM_NAME'..." - - # Add timeout to the IP detection to prevent hanging - VM_IP_RESULT="" - VM_IP="" - - # Use timeout command to prevent hanging (30 seconds max per attempt) - if command -v timeout >/dev/null 2>&1; then - VM_IP_RESULT=$(timeout 30 python3 scripts/unraid/main_template.py ip 2>&1 || echo "TIMEOUT") - elif command -v gtimeout >/dev/null 2>&1; then - # macOS with coreutils installed - VM_IP_RESULT=$(gtimeout 30 python3 scripts/unraid/main_template.py ip 2>&1 || echo "TIMEOUT") - else - # Fallback for systems without timeout command - use background process with kill - log "⚠️ No timeout command available, using background process method..." - VM_IP_RESULT=$(python3 scripts/unraid/main_template.py ip 2>&1 & - PID=$! - ( - sleep 30 - if kill -0 $PID 2>/dev/null; then - kill $PID 2>/dev/null - echo "TIMEOUT" - fi - ) & - wait $PID 2>/dev/null || echo "TIMEOUT") - fi - - # Check if we got a timeout - if echo "$VM_IP_RESULT" | grep -q "TIMEOUT"; then - log "⚠️ IP detection timed out after 30 seconds - guest agent may not be ready" - elif [ -n "$VM_IP_RESULT" ]; then - # Show what we got from the query - log "📝 Guest agent response: $(echo "$VM_IP_RESULT" | head -1)" - - # Extract IP from successful response - VM_IP=$(echo "$VM_IP_RESULT" | grep "VM IP:" | cut -d' ' -f3) - else - log "⚠️ No response from guest agent query" - fi - - if [ -n "$VM_IP" ] && [ "$VM_IP" != "None" ] && [ "$VM_IP" != "null" ] && [ "$VM_IP" != "TIMEOUT" ]; then - log_success "✅ Template VM got IP address: $VM_IP ⚡" - - # Update SSH config with actual IP - update_ssh_config_with_ip "$VM_NAME" "$VM_IP" - - # Update webhook environment with IP - sed -i.bak "s/VM_HOST=$VM_NAME/VM_HOST=$VM_IP/" "$PROJECT_DIR/***REMOVED***.webhook" - - break - fi - - # Much shorter wait time since template VMs should be fast - if [ $attempt -le 3 ]; then - log "⏳ No IP yet, waiting 5 seconds... (VM may still be booting)" - sleep 5 # Very short wait for first few attempts - else - log "⏳ Still waiting for IP... ($(($attempt * 15))s elapsed, checking every 15s)" - - # Show VM status to help debug - also with timeout - log "🔍 Checking VM status for debugging..." - if command -v timeout >/dev/null 2>&1; then - VM_STATUS=$(timeout 15 python3 scripts/unraid/main_template.py status 2>&1 | head -1 || echo "Status check timed out") - else - VM_STATUS=$(python3 scripts/unraid/main_template.py status 2>&1 | head -1) - fi - - if [ -n "$VM_STATUS" ]; then - log "📊 VM Status: $VM_STATUS" - fi - - sleep 15 - fi - ((attempt++)) - done - - if [ -z "$VM_IP" ] || [ "$VM_IP" = "None" ] || [ "$VM_IP" = "null" ]; then - log_error "❌ Template VM failed to get IP address after $((max_attempts * 15)) seconds" - log_error "Guest agent may not be running or network configuration issue" - log_error "Check VM console on Unraid: virsh console $VM_NAME" - exit 1 - fi - - # Phase 2: Wait for SSH connectivity (should be very fast for templates) - log "🔍 Phase 2: Testing SSH connectivity to $VM_IP..." - wait_for_ssh_connectivity "$VM_IP" -} - -# Wait for SSH connectivity to be available -wait_for_ssh_connectivity() { - local vm_ip="$1" - local max_ssh_attempts=20 # 5 minutes max wait for SSH - local ssh_attempt=1 - - while [ $ssh_attempt -le $max_ssh_attempts ]; do - log "🔑 Testing SSH connection to $vm_ip... (attempt $ssh_attempt/$max_ssh_attempts)" - - # Test SSH connectivity with a simple command - if ssh -o ConnectTimeout=10 -o StrictHostKeyChecking=no -o BatchMode=yes "$VM_NAME" "echo 'SSH connection successful'" >/dev/null 2>&1; then - log_success "✅ SSH connectivity established to template VM! 🚀" - return 0 - fi - - # More detailed error for first few attempts - if [ $ssh_attempt -le 3 ]; then - log "⏳ SSH not ready yet - VM may still be booting or initializing SSH service..." - else - log "⏳ Still waiting for SSH... ($(($ssh_attempt * 15))s elapsed)" - fi - - sleep 15 - ((ssh_attempt++)) - done - - log_error "❌ SSH connection failed after $((max_ssh_attempts * 15)) seconds" - log_error "VM IP: $vm_ip" - log_error "Try manually: ssh $VM_NAME" - log_error "Check VM console on Unraid for boot issues" - exit 1 -} -# Configure VM for ThrillWiki using template-optimized deployment -configure_template_vm() { - log "🚀 Deploying ThrillWiki to template VM..." - log "This will sync the project files and set up the application" - - # First, sync the current project files to the VM - deploy_project_files - - # Then run the setup script on the VM - run_vm_setup_script - - log_success "✅ Template VM configured and application deployed! ⚡" -} - -# Configure passwordless sudo for required operations -configure_passwordless_sudo() { - log "⚙️ Configuring passwordless sudo for deployment operations..." - - # Create sudoers configuration file for thrillwiki user - local sudoers_config="/tmp/thrillwiki-sudoers" - - cat > "$sudoers_config" << 'EOF' -# ThrillWiki deployment sudo configuration -# Allow thrillwiki user to run specific commands without password - -# File system operations for deployment -thrillwiki ALL=(ALL) NOPASSWD: /bin/rm, /bin/mkdir, /bin/chown, /bin/chmod - -# Package management for updates -thrillwiki ALL=(ALL) NOPASSWD: /usr/bin/apt, /usr/bin/apt-get, /usr/bin/apt-cache - -# System service management -thrillwiki ALL=(ALL) NOPASSWD: /bin/systemctl - -# PostgreSQL management -thrillwiki ALL=(ALL) NOPASSWD: /usr/bin/sudo -u postgres * - -# Service file management -thrillwiki ALL=(ALL) NOPASSWD: /bin/cp [AWS-SECRET-REMOVED]emd/* /etc/systemd/system/ -thrillwiki ALL=(ALL) NOPASSWD: /bin/sed -i * /etc/systemd/system/thrillwiki.service -EOF - - # Copy sudoers file to VM and install it - log "📋 Copying sudoers configuration to VM..." - scp "$sudoers_config" "$VM_NAME:/tmp/" - - # Install sudoers configuration (this requires password once) - log "Installing sudo configuration (may require password this one time)..." - if ssh -t "$VM_NAME" "sudo cp /tmp/thrillwiki-sudoers /etc/sudoers.d/thrillwiki && sudo chmod 440 /etc/sudoers.d/thrillwiki && sudo visudo -c"; then - log_success "✅ Passwordless sudo configured successfully" - else - log_error "Failed to configure passwordless sudo. Setup will continue but may prompt for passwords." - # Continue anyway, as the user might have already configured this - fi - - # Cleanup - rm -f "$sudoers_config" - ssh "$VM_NAME" "rm -f /tmp/thrillwiki-sudoers" -} - -# Validate GitHub token and repository access -validate_github_access() { - log "🔍 Validating GitHub token and repository access..." - - # Extract repository path from REPO_URL - local repo_path=$(echo "$REPO_URL" | sed 's|^https://github.com/||' | sed 's|/$||') - if [ -z "$repo_path" ]; then - repo_path="pacnpal/thrillwiki_django_no_react" # fallback - log_warning "Using fallback repository path: $repo_path" - fi - - # Test GitHub API authentication - log "Testing GitHub API authentication..." - if ! curl -sf -H "Authorization: token $GITHUB_TOKEN" "https://api.github.com/user" > /dev/null; then - log_error "❌ GitHub token authentication failed!" - log_error "The token cannot authenticate with GitHub API." - - if [ "$NON_INTERACTIVE" = "true" ]; then - log_error "Non-interactive mode: Cannot prompt for new token." - log_error "Please update your GITHUB_TOKEN environment variable with a valid token." - exit 1 - fi - - echo - echo "❌ Your GitHub token is invalid or expired!" - echo "Please create a new Personal Access Token at: https://github.com/settings/tokens" - echo "Required permissions: repo (full control of private repositories)" - echo - read -s -p "Enter a new GitHub Personal Access Token: " GITHUB_TOKEN - echo - - if [ -z "$GITHUB_TOKEN" ]; then - log_error "No token provided. Cannot continue." - return 1 - fi - - # Save the new token - save_github_token - - # Test the new token - if ! curl -sf -H "Authorization: token $GITHUB_TOKEN" "https://api.github.com/user" > /dev/null; then - log_error "❌ New token is also invalid. Please check your token and try again." - return 1 - fi - - log_success "✅ New GitHub token validated successfully" - else - log_success "✅ GitHub token authentication successful" - fi - - # Test repository access - log "Testing repository access: $repo_path" - local repo_response=$(curl -sf -H "Authorization: token $GITHUB_TOKEN" "https://api.github.com/repos/$repo_path") - - if [ $? -ne 0 ] || [ -z "$repo_response" ]; then - log_error "❌ Cannot access repository: $repo_path" - log_error "This could be due to:" - log_error "1. Repository doesn't exist" - log_error "2. Repository is private and token lacks access" - log_error "3. Token doesn't have 'repo' permissions" - - if [ "$NON_INTERACTIVE" = "true" ]; then - log_error "Non-interactive mode: Cannot prompt for new repository." - log_error "Please update your repository URL or token permissions." - return 1 - fi - - echo - echo "❌ Cannot access repository: $REPO_URL" - echo "Current repository path: $repo_path" - echo - echo "The token has these scopes: $(curl -sf -H "Authorization: token $GITHUB_TOKEN" -I "https://api.github.com/user" | grep -i "x-oauth-scopes:" | cut -d: -f2 | xargs || echo "unknown")" - echo "Required scope: 'repo' (full control of private repositories)" - echo - echo "Options:" - echo "1. Enter a new GitHub token with 'repo' permissions" - echo "2. Enter a different repository URL" - echo "3. Exit and fix token permissions at https://github.com/settings/tokens" - echo - read -p "Select option (1-3): " repo_access_choice - - case $repo_access_choice in - 1) - echo - echo "Please create a new GitHub Personal Access Token:" - echo "1. Go to: https://github.com/settings/tokens/new" - echo "2. Give it a name like 'ThrillWiki Template Automation'" - echo "3. Check the 'repo' scope (full control of private repositories)" - echo "4. Click 'Generate token'" - echo "5. Copy the new token" - echo - read -s -p "Enter new GitHub Personal Access Token: " new_github_token - echo - - if [ -z "$new_github_token" ]; then - log_error "No token provided. Cannot continue." - return 1 - fi - - # Test the new token - log "Testing new GitHub token..." - if ! curl -sf -H "Authorization: token $new_github_token" "https://api.github.com/user" > /dev/null; then - log_error "❌ New token authentication failed. Please check your token." - return 1 - fi - - # Test repository access with new token - log "Testing repository access with new token: $repo_path" - local new_repo_response=$(curl -sf -H "Authorization: token $new_github_token" "https://api.github.com/repos/$repo_path") - - if [ $? -ne 0 ] || [ -z "$new_repo_response" ]; then - log_error "❌ New token still cannot access the repository." - log_error "Please ensure the token has 'repo' scope and try again." - return 1 - fi - - # Token works! Update it - GITHUB_TOKEN="$new_github_token" - log_success "✅ New GitHub token validated successfully" - - # Show new token scopes - local new_scopes=$(curl -sf -H "Authorization: token $GITHUB_TOKEN" -I "https://api.github.com/user" | grep -i "x-oauth-scopes:" | cut -d: -f2 | xargs || echo "unknown") - log "New token scopes: $new_scopes" - - # Save the new token - save_github_token - - # Continue with validation using the new token - repo_response="$new_repo_response" - ;; - 2) - echo - read -p "Enter new repository URL: " new_repo_url - - if [ -z "$new_repo_url" ]; then - log "Setup cancelled by user" - exit 0 - fi - - REPO_URL="$new_repo_url" - - # Extract new repo path and test again - repo_path=$(echo "$REPO_URL" | sed 's|^https://github.com/||' | sed 's|/$||') - log "Testing new repository: $repo_path" - - repo_response=$(curl -sf -H "Authorization: token $GITHUB_TOKEN" "https://api.github.com/repos/$repo_path") - if [ $? -ne 0 ] || [ -z "$repo_response" ]; then - log_error "❌ New repository is also inaccessible. Please check the URL and token permissions." - return 1 - fi - - log_success "✅ New repository validated successfully" - - # Update saved configuration with new repo URL - save_config - ;; - 3|"") - log "Setup cancelled by user" - echo "Please update your token permissions at: https://github.com/settings/tokens" - return 1 - ;; - *) - log_error "Invalid choice. Please select 1, 2, or 3." - return 1 - ;; - esac - else - log_success "✅ Repository access confirmed: $repo_path" - fi - - # Show repository info - local repo_name=$(echo "$repo_response" | python3 -c "import sys, json; print(json.load(sys.stdin).get('full_name', 'Unknown'))" 2>/dev/null || echo "$repo_path") - local repo_private=$(echo "$repo_response" | python3 -c "import sys, json; print(json.load(sys.stdin).get('private', False))" 2>/dev/null || echo "Unknown") - - log "📊 Repository info:" - echo " Name: $repo_name" - echo " Private: $repo_private" - echo " URL: $REPO_URL" -} - -# Clone project from GitHub using PAT authentication -deploy_project_files() { - log "🔄 Cloning project from GitHub repository..." - - # Validate GitHub access before attempting clone - if ! validate_github_access; then - log_error "❌ GitHub token validation failed during deployment." - log_error "Cannot proceed with repository cloning without valid GitHub access." - exit 1 - fi - - # First, configure passwordless sudo for required operations - configure_passwordless_sudo - - # Remove any existing directory first - ssh "$VM_NAME" "sudo rm -rf /home/thrillwiki/thrillwiki" - - # Create parent directory - ssh "$VM_NAME" "sudo mkdir -p /home/thrillwiki && sudo chown thrillwiki:thrillwiki /home/thrillwiki" - - # Clone the repository using PAT authentication - # Extract repository path from REPO_URL (already validated) - local repo_path=$(echo "$REPO_URL" | sed 's|^https://github.com/||' | sed 's|/$||') - local auth_url="https://${GITHUB_USERNAME}:${GITHUB_TOKEN}@github.com/${repo_path}.git" - - log "Cloning repository: $REPO_URL" - if ssh "$VM_NAME" "cd /home/thrillwiki && git clone '$auth_url' thrillwiki"; then - log_success "✅ Repository cloned successfully from GitHub!" - else - log_error "❌ Failed to clone repository from GitHub" - log_error "Repository access was validated, but clone failed. This may be due to:" - log_error "1. Network connectivity issues from VM to GitHub" - log_error "2. Git not installed on VM" - log_error "3. Disk space issues on VM" - log_error "Try manually: ssh $VM_NAME 'git --version && df -h'" - exit 1 - fi - - # Set proper ownership - ssh "$VM_NAME" "sudo chown -R thrillwiki:thrillwiki /home/thrillwiki/thrillwiki" - - # Show repository info - local commit_info=$(ssh "$VM_NAME" "cd /home/thrillwiki/thrillwiki && git log -1 --oneline") - log "📊 Cloned repository at commit: $commit_info" - - # Remove the authentication URL from git config for security - ssh "$VM_NAME" "cd /home/thrillwiki/thrillwiki && git remote set-url origin $REPO_URL" - log "🔒 Cleaned up authentication URL from git configuration" -} - -# Run setup script on the VM after files are synchronized -run_vm_setup_script() { - log "⚙️ Running application setup on template VM..." - - # Create optimized VM setup script for template VMs - local vm_setup_script="/tmp/template_vm_thrillwiki_setup.sh" - - cat > "$vm_setup_script" << 'EOF' -#!/bin/bash -set -e - -echo "🚀 Setting up ThrillWiki on template VM (optimized for pre-configured templates)..." - -# Navigate to project directory -cd /home/thrillwiki/thrillwiki - -# Template VMs should already have most packages - just update security -echo "📦 Quick system update (template optimization)..." -sudo apt update >/dev/null 2>&1 -if sudo apt list --upgradable 2>/dev/null | grep -q security; then - echo "🔒 Installing security updates..." - sudo apt upgrade -y --with-new-pkgs -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" >/dev/null 2>&1 -else - echo "✅ No security updates needed" -fi - -# UV should already be installed in template -echo "🔧 Checking UV installation..." -# Check multiple possible UV locations -export PATH="/home/thrillwiki/.local/bin:/home/thrillwiki/.cargo/bin:$PATH" -if ! command -v uv > /dev/null 2>&1; then - echo "📥 Installing UV (not found in template)..." - curl -LsSf https://astral.sh/uv/install.sh | sh - - # UV installer may put it in .local/bin or .cargo/bin - if [ -f ~/.cargo/env ]; then - source ~/.cargo/env - fi - - # Add both possible paths - export PATH="/home/thrillwiki/.local/bin:/home/thrillwiki/.cargo/bin:$PATH" - - # Verify installation worked - if command -v uv > /dev/null 2>&1; then - echo "✅ UV installed successfully at: $(which uv)" - else - echo "❌ UV installation failed or not in PATH" - echo "Current PATH: $PATH" - echo "Checking possible locations:" - ls -la ~/.local/bin/ 2>/dev/null || echo "~/.local/bin/ not found" - ls -la ~/.cargo/bin/ 2>/dev/null || echo "~/.cargo/bin/ not found" - exit 1 - fi -else - echo "✅ UV already installed at: $(which uv)" -fi - -# PostgreSQL should already be configured in template -echo "🗄️ Checking PostgreSQL..." -if ! sudo systemctl is-active --quiet postgresql; then - echo "▶️ Starting PostgreSQL..." - sudo systemctl start postgresql - sudo systemctl enable postgresql -else - echo "✅ PostgreSQL already running" -fi - -# Configure database if not already done -echo "🔧 Setting up database..." -sudo -u postgres createdb thrillwiki 2>/dev/null || echo "📋 Database may already exist" -sudo -u postgres createuser thrillwiki_user 2>/dev/null || echo "👤 User may already exist" -sudo -u postgres psql -c "GRANT ALL PRIVILEGES ON DATABASE thrillwiki TO thrillwiki_user;" 2>/dev/null || echo "🔑 Privileges may already be set" - -# Install Python dependencies with UV -echo "📦 Installing Python dependencies..." -UV_CMD="$(which uv)" -if [ -n "$UV_CMD" ] && "$UV_CMD" sync; then - echo "✅ UV sync completed successfully" -else - echo "⚠️ UV sync failed, falling back to pip..." - python3 -m venv .venv - source .venv/bin/activate - pip install -e . -fi - -# Create necessary directories -echo "📁 Creating directories..." -mkdir -p logs backups static media - -# Make scripts executable -echo "⚡ Making scripts executable..." -find scripts -name "*.sh" -exec chmod +x {} \; 2>/dev/null || echo "ℹ️ No shell scripts found" - -# Run Django setup -echo "🌍 Running Django setup..." -UV_CMD="$(which uv)" -echo " 🔄 Running migrations..." -if [ -n "$UV_CMD" ] && "$UV_CMD" run python manage.py migrate; then - echo " ✅ Migrations completed" -else - echo " ⚠️ UV run failed, trying direct Python..." - python3 manage.py migrate -fi - -echo " 📦 Collecting static files..." -if [ -n "$UV_CMD" ] && "$UV_CMD" run python manage.py collectstatic --noinput; then - echo " ✅ Static files collected" -else - echo " ⚠️ UV run failed, trying direct Python..." - python3 manage.py collectstatic --noinput -fi - -# Install systemd services if available -if [ -f scripts/systemd/thrillwiki.service ]; then - echo "🔧 Installing systemd service..." - sudo cp scripts/systemd/thrillwiki.service /etc/systemd/system/ - # Fix the home directory path for thrillwiki user - sudo sed -i 's|/home/ubuntu|/home/thrillwiki|g' /etc/systemd/system/thrillwiki.service - sudo systemctl daemon-reload - sudo systemctl enable thrillwiki.service - - if sudo systemctl start thrillwiki.service; then - echo "✅ ThrillWiki service started successfully" - else - echo "⚠️ Service start failed, checking logs..." - sudo systemctl status thrillwiki.service --no-pager -l - fi -else - echo "ℹ️ No systemd service files found, ThrillWiki ready for manual start" - echo "💡 You can start it manually with: uv run python manage.py runserver 0.0.0.0:8000" -fi - -# Test the application -echo "🧪 Testing application..." -sleep 3 -if curl -f http://localhost:8000 >/dev/null 2>&1; then - echo "✅ ThrillWiki is responding on port 8000!" -else - echo "⚠️ ThrillWiki may not be responding yet (this is normal for first start)" -fi - -# Setup auto-pull functionality -echo "🔄 Setting up auto-pull functionality..." - -# Create ***REMOVED*** file with GitHub token for auto-pull authentication -if [ -n "${GITHUB_TOKEN:-}" ]; then - echo "GITHUB_TOKEN=$GITHUB_TOKEN" > ***REMOVED*** - echo "✅ GitHub token configured for auto-pull" -else - echo "⚠️ GITHUB_TOKEN not found - auto-pull will use fallback mode" - echo "# GitHub token not available during setup" > ***REMOVED*** -fi - -# Ensure scripts/vm directory exists and make auto-pull script executable -if [ -f "scripts/vm/auto-pull.sh" ]; then - chmod +x scripts/vm/auto-pull.sh - - # Create cron job for auto-pull (every 10 minutes) - echo "⏰ Installing cron job for auto-pull (every 10 minutes)..." - - # Create cron entry - CRON_ENTRY="*/10 * * * * [AWS-SECRET-REMOVED]uto-pull.sh >> /home/thrillwiki/logs/cron.log 2>&1" - - # Install cron job if not already present - if ! crontab -l 2>/dev/null | grep -q "auto-pull.sh"; then - # Add to existing crontab or create new one - (crontab -l 2>/dev/null || echo "") | { - cat - echo "# ThrillWiki Auto-Pull - Update repository every 10 minutes" - echo "$CRON_ENTRY" - } | crontab - - - echo "✅ Auto-pull cron job installed successfully" - echo "📋 Cron job: $CRON_ENTRY" - else - echo "✅ Auto-pull cron job already exists" - fi - - # Ensure cron service is running - if ! systemctl is-active --quiet cron 2>/dev/null; then - echo "▶️ Starting cron service..." - sudo systemctl start cron - sudo systemctl enable cron - else - echo "✅ Cron service is already running" - fi - - # Test auto-pull script - echo "🧪 Testing auto-pull script..." - if timeout 30 ./scripts/vm/auto-pull.sh --status; then - echo "✅ Auto-pull script test successful" - else - echo "⚠️ Auto-pull script test failed or timed out (this may be normal)" - fi - - echo "📋 Auto-pull setup completed:" - echo " - Script: [AWS-SECRET-REMOVED]uto-pull.sh" - echo " - Schedule: Every 10 minutes" - echo " - Logs: /home/thrillwiki/logs/auto-pull.log" - echo " - Status: Run './scripts/vm/auto-pull.sh --status' to check" - -else - echo "⚠️ Auto-pull script not found, skipping auto-pull setup" -fi - -echo "🎉 Template VM ThrillWiki setup completed successfully! ⚡" -echo "🌐 Application should be available at http://$(hostname -I | awk '{print $1}'):8000" -echo "🔄 Auto-pull: Repository will be updated every 10 minutes automatically" -EOF - - # Copy setup script to VM with progress - log "📋 Copying setup script to VM..." - scp "$vm_setup_script" "$VM_NAME:/tmp/" - - # Make it executable and run it - ssh "$VM_NAME" "chmod +x /tmp/template_vm_thrillwiki_setup.sh" - - log "⚡ Executing setup script on VM (this may take a few minutes)..." - if ssh "$VM_NAME" "bash /tmp/template_vm_thrillwiki_setup.sh"; then - log_success "✅ Application setup completed successfully!" - else - log_error "❌ Application setup failed" - log "Try debugging with: ssh $VM_NAME 'journalctl -u thrillwiki -f'" - exit 1 - fi - - # Cleanup - rm -f "$vm_setup_script" -} - -# Start services -start_template_services() { - log "Starting ThrillWiki services on template VM..." - - # Start VM service - ssh "$VM_NAME" "sudo systemctl start thrillwiki 2>/dev/null || echo 'Service may need manual start'" - - # Verify service is running - if ssh "$VM_NAME" "systemctl is-active --quiet thrillwiki 2>/dev/null"; then - log_success "ThrillWiki service started successfully on template VM ⚡" - else - log_warning "ThrillWiki service may need manual configuration" - log "Try: ssh $VM_NAME 'systemctl status thrillwiki'" - fi - - # Get service status - log "Template VM service status:" - ssh "$VM_NAME" "systemctl status thrillwiki --no-pager -l 2>/dev/null || echo 'Service status not available'" -} - -# Setup webhook listener -setup_template_webhook_listener() { - log "Setting up webhook listener for template deployments..." - - # Create webhook start script - cat > "$PROJECT_DIR/start-template-webhook.sh" << 'EOF' -#!/bin/bash -cd "$(dirname "$0")" -source ***REMOVED***.webhook -echo "Starting webhook listener for template-based deployments ⚡" -python3 scripts/webhook-listener.py -EOF - - chmod +x "$PROJECT_DIR/start-template-webhook.sh" - - log_success "Template webhook listener configured" - log "You can start the webhook listener with: ./start-template-webhook.sh" -} - -# Perform end-to-end test -test_template_deployment() { - log "Performing end-to-end template deployment test..." - - # Test VM connectivity - if ssh "$VM_NAME" "echo 'Template VM connectivity test passed'"; then - log_success "Template VM connectivity test passed ⚡" - else - log_error "Template VM connectivity test failed" - return 1 - fi - - # Test ThrillWiki service - if ssh "$VM_NAME" "curl -f http://localhost:8000 >/dev/null 2>&1"; then - log_success "ThrillWiki service test passed on template VM ⚡" - else - log_warning "ThrillWiki service test failed - checking logs..." - ssh "$VM_NAME" "journalctl -u thrillwiki --no-pager -l | tail -20 2>/dev/null || echo 'Service logs not available'" - fi - - # Test template deployment script - log "Testing template deployment capabilities..." - cd "$PROJECT_DIR/scripts/unraid" - ./template-utils.sh check && log_success "Template utilities working ⚡" - - log_success "End-to-end template deployment test completed ⚡" -} - -# Generate final instructions for template deployment -generate_template_instructions() { - log "Generating final template deployment instructions..." - - cat > "$PROJECT_DIR/TEMPLATE_SETUP_COMPLETE.md" << EOF -# ThrillWiki Template-Based Automation - Setup Complete! 🚀⚡ - -Your ThrillWiki template-based CI/CD system has been fully automated and deployed! - -## Template Deployment Benefits ⚡ - -- **Speed**: 2-5 minute VM deployment vs 20-30 minutes with autoinstall -- **Reliability**: Pre-configured template eliminates installation failures -- **Efficiency**: Copy-on-write disk format saves space - -## VM Information - -- **VM Name**: $VM_NAME -- **Template VM**: $TEMPLATE_VM_NAME -- **VM IP**: $VM_IP -- **SSH Access**: \`ssh $VM_NAME\` -- **Deployment Type**: Template-based ⚡ - -## Services Status - -- **ThrillWiki Service**: Running on template VM -- **Database**: PostgreSQL configured in template -- **Web Server**: Available at http://$VM_IP:8000 - -## Next Steps - -### 1. Start Template Webhook Listener -\`\`\`bash -./start-template-webhook.sh -\`\`\` - -### 2. Configure GitHub Webhook -- Go to your repository: $REPO_URL -- Settings → Webhooks → Add webhook -- **Payload URL**: http://YOUR_PUBLIC_IP:$WEBHOOK_PORT/webhook -- **Content type**: application/json -- **Secret**: (your webhook secret) -- **Events**: Just the push event - -### 3. Test the Template System -\`\`\`bash -# Test template VM connection -ssh $VM_NAME - -# Test service status -ssh $VM_NAME "systemctl status thrillwiki" - -# Test template utilities -cd scripts/unraid -./template-utils.sh check -./template-utils.sh info - -# Deploy another VM from template (fast!) -./template-utils.sh deploy test-vm-2 - -# Make a test commit to trigger automatic deployment -git add . -git commit -m "Test automated template deployment" -git push origin main -\`\`\` - -## Template Management Commands - -### Template VM Management -\`\`\`bash -# Check template status and info -./scripts/unraid/template-utils.sh status -./scripts/unraid/template-utils.sh info - -# List all template-based VMs -./scripts/unraid/template-utils.sh list - -# Deploy new VM from template (2-5 minutes!) -./scripts/unraid/template-utils.sh deploy VM_NAME - -# Copy template to new VM -./scripts/unraid/template-utils.sh copy VM_NAME -\`\`\` - -### Python Template Scripts -\`\`\`bash -# Template-based deployment -python3 scripts/unraid/main_template.py deploy - -# Template management -python3 scripts/unraid/main_template.py template info -python3 scripts/unraid/main_template.py template check -python3 scripts/unraid/main_template.py template list - -# VM operations (fast with templates!) -python3 scripts/unraid/main_template.py setup -python3 scripts/unraid/main_template.py start -python3 scripts/unraid/main_template.py ip -python3 scripts/unraid/main_template.py status -\`\`\` - -### Service Management on Template VM -\`\`\`bash -# Check service status -ssh $VM_NAME "systemctl status thrillwiki" - -# Restart service -ssh $VM_NAME "sudo systemctl restart thrillwiki" - -# View logs -ssh $VM_NAME "journalctl -u thrillwiki -f" -\`\`\` - -## Template Maintenance - -### Updating Your Template VM -\`\`\`bash -# Get update instructions -./scripts/unraid/template-utils.sh update - -# After updating template VM manually: -./scripts/unraid/template-utils.sh check -\`\`\` - -### Creating Additional Template VMs -You can create multiple template VMs for different purposes: -- Development: \`thrillwiki-template-dev\` -- Staging: \`thrillwiki-template-staging\` -- Production: \`thrillwiki-template-prod\` - -## Troubleshooting - -### Template VM Issues -1. **Template not found**: Verify template VM exists and is stopped -2. **Template VM running**: Stop template before creating instances -3. **Deployment slow**: Template should be 5-10x faster than autoinstall - -### Common Commands -\`\`\`bash -# Check if template is ready -./scripts/unraid/template-utils.sh check - -# Test template VM connectivity -ssh root@unraid-server "virsh domstate $TEMPLATE_VM_NAME" - -# Force stop template VM if needed -ssh root@unraid-server "virsh shutdown $TEMPLATE_VM_NAME" -\`\`\` - -### Support Files -- Template Configuration: \`.thrillwiki-template-config\` -- Environment: \`***REMOVED***.unraid\`, \`***REMOVED***.webhook\` -- Logs: \`logs/\` directory -- Documentation: \`scripts/unraid/README-template-deployment.md\` - -## Performance Comparison - -| Operation | Autoinstall | Template | Improvement | -|-----------|------------|----------|-------------| -| VM Creation | 20-30 min | 2-5 min | **5-6x faster** | -| Boot Time | Full install | Instant | **Instant** | -| Reliability | ISO issues | Pre-tested | **Much higher** | -| Total Deploy | 45+ min | ~10 min | **4-5x faster** | - -**Your template-based automated CI/CD system is now ready!** 🚀⚡ - -Every push to the main branch will automatically deploy to your template VM in minutes, not hours! -EOF - - log_success "Template setup instructions saved to TEMPLATE_SETUP_COMPLETE.md" -} - -# Main automation function -main() { - log "🚀⚡ Starting ThrillWiki Template-Based Complete Unraid Automation" - echo "[AWS-SECRET-REMOVED]==========================" - echo - log_template "Template deployment is 5-10x FASTER than autoinstall approach!" - echo - - # Create logs directory - mkdir -p "$LOG_DIR" - - # Handle reset modes - if [[ "$RESET_ALL" == "true" ]]; then - log "🔄 Complete reset mode - deleting VM and configuration" - echo - - # Load configuration first to get connection details for VM deletion - if [[ -f "$CONFIG_FILE" ]]; then - source "$CONFIG_FILE" - log_success "Loaded existing configuration for VM deletion" - else - log_warning "No configuration file found, will skip VM deletion" - fi - - # Delete existing VM if config exists - if [[ -f "$CONFIG_FILE" ]]; then - log "🗑️ Deleting existing template VM..." - - # Check if ***REMOVED***.unraid file exists - if [ -f "$PROJECT_DIR/***REMOVED***.unraid" ]; then - log "Loading environment from ***REMOVED***.unraid..." - set -a - source "$PROJECT_DIR/***REMOVED***.unraid" 2>/dev/null || true - set +a - else - log_warning "***REMOVED***.unraid file not found - VM deletion may not work properly" - log "The VM may not exist or may have been deleted manually" - fi - - # Stop existing VM if running before deletion (for complete reset) - log "🛑 Ensuring VM is stopped before deletion..." - if [ -n "${VM_NAME:-}" ] && [ -n "${UNRAID_HOST:-}" ] && [ -n "${UNRAID_USER:-}" ]; then - if ! stop_existing_vm_for_reset "$VM_NAME" "$UNRAID_HOST" "$UNRAID_USER"; then - log_warning "Failed to stop VM '$VM_NAME' - continuing anyway for complete reset" - log_warning "VM may be forcibly deleted during reset process" - fi - else - log_warning "Missing VM connection details - skipping VM shutdown check" - fi - - # Debug environment loading - log "Debug: VM_NAME=${VM_NAME:-'not set'}" - log "Debug: UNRAID_HOST=${UNRAID_HOST:-'not set'}" - - # Check if main_template.py exists - if [ ! -f "$SCRIPT_DIR/main_template.py" ]; then - log_error "main_template.py not found at: $SCRIPT_DIR/main_template.py" - log "Available files in $SCRIPT_DIR:" - ls -la "$SCRIPT_DIR" - log "Skipping VM deletion due to missing script..." - elif [ -z "${VM_NAME:-}" ] || [ -z "${UNRAID_HOST:-}" ]; then - log_warning "Missing required environment variables for VM deletion" - log "VM_NAME: ${VM_NAME:-'not set'}" - log "UNRAID_HOST: ${UNRAID_HOST:-'not set'}" - log "Skipping VM deletion - VM may not exist or was deleted manually" - else - log "Found main_template.py at: $SCRIPT_DIR/main_template.py" - - # Run delete with timeout and better error handling - log "Attempting VM deletion with timeout..." - if timeout 60 python3 "$SCRIPT_DIR/main_template.py" delete 2>&1; then - log_success "Template VM deleted successfully" - else - deletion_exit_code=$? - if [ $deletion_exit_code -eq 124 ]; then - log_error "⚠️ VM deletion timed out after 60 seconds" - else - log "⚠️ Template VM deletion failed (exit code: $deletion_exit_code) or VM didn't exist" - fi - - # Continue anyway since this might be expected - log "Continuing with script execution..." - fi - fi - fi - - # Remove configuration files - if [[ -f "$CONFIG_FILE" ]]; then - rm "$CONFIG_FILE" - log_success "Template configuration file removed" - fi - - # Remove GitHub token file - if [[ -f "$TOKEN_FILE" ]]; then - rm "$TOKEN_FILE" - log_success "GitHub token file removed" - fi - - # Remove environment files - rm -f "$PROJECT_DIR/***REMOVED***.unraid" "$PROJECT_DIR/***REMOVED***.webhook" - log_success "Environment files removed" - - log_success "Complete reset finished - continuing with fresh template setup" - echo - - elif [[ "$RESET_VM_ONLY" == "true" ]]; then - log "🔄 VM-only reset mode - deleting VM, preserving configuration" - echo - - # Load configuration to get connection details - if [[ -f "$CONFIG_FILE" ]]; then - source "$CONFIG_FILE" - log_success "Loaded existing configuration" - else - log_error "No configuration file found. Cannot reset VM without connection details." - echo " Run the script without reset flags first to create initial configuration." - exit 1 - fi - - # Stop existing VM if running before deletion - log "🛑 Ensuring VM is stopped before deletion..." - if ! stop_existing_vm_for_reset "$VM_NAME" "$UNRAID_HOST" "$UNRAID_USER"; then - log_error "Failed to stop VM '$VM_NAME'. Cannot proceed safely with VM deletion." - log_error "Please manually stop the VM or resolve the connection issue." - exit 1 - fi - - # Delete existing VM - log "🗑️ Deleting existing template VM..." - - # Check if ***REMOVED***.unraid file exists - if [ -f "$PROJECT_DIR/***REMOVED***.unraid" ]; then - log "Loading environment from ***REMOVED***.unraid..." - set -a - source "$PROJECT_DIR/***REMOVED***.unraid" 2>/dev/null || true - set +a - else - log_warning "***REMOVED***.unraid file not found - VM deletion may not work properly" - log "The VM may not exist or may have been deleted manually" - fi - - # Debug environment loading - log "Debug: VM_NAME=${VM_NAME:-'not set'}" - log "Debug: UNRAID_HOST=${UNRAID_HOST:-'not set'}" - - # Check if main_template.py exists - if [ ! -f "$SCRIPT_DIR/main_template.py" ]; then - log_error "main_template.py not found at: $SCRIPT_DIR/main_template.py" - log "Available files in $SCRIPT_DIR:" - ls -la "$SCRIPT_DIR" - log "Skipping VM deletion due to missing script..." - elif [ -z "${VM_NAME:-}" ] || [ -z "${UNRAID_HOST:-}" ]; then - log_warning "Missing required environment variables for VM deletion" - log "VM_NAME: ${VM_NAME:-'not set'}" - log "UNRAID_HOST: ${UNRAID_HOST:-'not set'}" - log "Skipping VM deletion - VM may not exist or was deleted manually" - else - log "Found main_template.py at: $SCRIPT_DIR/main_template.py" - - # Run delete with timeout and better error handling - log "Attempting VM deletion with timeout..." - if timeout 60 python3 "$SCRIPT_DIR/main_template.py" delete 2>&1; then - log_success "Template VM deleted successfully" - else - deletion_exit_code=$? - if [ $deletion_exit_code -eq 124 ]; then - log_error "⚠️ VM deletion timed out after 60 seconds" - else - log "⚠️ Template VM deletion failed (exit code: $deletion_exit_code) or VM didn't exist" - fi - - # Continue anyway since this might be expected - log "Continuing with script execution..." - fi - fi - - # Remove only environment files, keep main config - rm -f "$PROJECT_DIR/***REMOVED***.unraid" "$PROJECT_DIR/***REMOVED***.webhook" - log_success "Environment files removed, configuration preserved" - - # Check if GitHub token is available for VM recreation - if [ "$GITHUB_API_ENABLED" = "true" ] && [ -n "$GITHUB_USERNAME" ]; then - log "🔍 Checking for GitHub token availability..." - - # Try to load token from saved file - if load_github_token; then - log_success "✅ GitHub token loaded from secure storage" - elif GITHUB_TOKEN=$(python3 "$SCRIPT_DIR/../github-auth.py" token 2>/dev/null) && [ -n "$GITHUB_TOKEN" ]; then - log_success "✅ GitHub token obtained from authentication script" - - # Validate the token can access the repository immediately - log "🔍 Validating token can access repository..." - if ! validate_github_access; then - log_error "❌ GitHub token validation failed during VM reset." - log_error "Please check your token and repository access before recreating the VM." - return 1 - fi - - # Save the token for future use - save_github_token - else - log_warning "⚠️ No GitHub token found - you'll need to provide it" - echo "GitHub authentication is required for repository cloning and auto-pull." - echo - - if [ "$NON_INTERACTIVE" = "true" ]; then - if [ -n "${GITHUB_TOKEN:-}" ]; then - log "Using token from environment variable" - save_github_token - else - log_error "GITHUB_TOKEN environment variable not set for non-interactive mode" - log_error "Set: export GITHUB_TOKEN='your_token'" - exit 1 - fi - else - read -s -p "Enter GitHub Personal Access Token: " GITHUB_TOKEN - echo - - if [ -n "$GITHUB_TOKEN" ]; then - save_github_token - log_success "✅ GitHub token saved for VM recreation" - else - log_error "GitHub token is required for repository operations" - exit 1 - fi - fi - fi - fi - - log_success "VM reset complete - will recreate VM with saved configuration" - echo - - elif [[ "$RESET_CONFIG_ONLY" == "true" ]]; then - log "🔄 Config-only reset mode - deleting configuration, preserving VM" - echo - - # Remove configuration files - if [[ -f "$CONFIG_FILE" ]]; then - rm "$CONFIG_FILE" - log_success "Template configuration file removed" - fi - - # Remove environment files - rm -f "$PROJECT_DIR/***REMOVED***.unraid" "$PROJECT_DIR/***REMOVED***.webhook" - log_success "Environment files removed" - - log_success "Configuration reset complete - will prompt for fresh configuration" - echo - fi - - # Collect configuration - prompt_template_config - - # Setup steps - setup_ssh_keys - setup_unraid_access - create_environment_files - install_dependencies - create_template_vm - wait_for_template_vm - configure_template_vm - start_template_services - setup_template_webhook_listener - test_template_deployment - generate_template_instructions - - echo - log_success "🎉⚡ Template-based complete automation setup finished!" - echo - log "Your ThrillWiki template VM is running at: http://$VM_IP:8000" - log "Start the webhook listener: ./start-template-webhook.sh" - log "See TEMPLATE_SETUP_COMPLETE.md for detailed instructions" - echo - log_template "🚀 Template deployment is 5-10x FASTER than traditional autoinstall!" - log "The system will now automatically deploy in MINUTES when you push to GitHub!" -} - -# Run main function and log output -main "$@" 2>&1 | tee "$LOG_DIR/template-automation.log" diff --git a/scripts/unraid/template-utils.sh b/scripts/unraid/template-utils.sh deleted file mode 100755 index 61ed9945..00000000 --- a/scripts/unraid/template-utils.sh +++ /dev/null @@ -1,249 +0,0 @@ -#!/bin/bash -# -# ThrillWiki Template VM Management Utilities -# Quick helpers for managing template VMs on Unraid -# - -# Set strict mode -set -e - -# Colors for output -RED='\033[0;31m' -GREEN='\033[0;32m' -YELLOW='\033[1;33m' -BLUE='\033[0;34m' -NC='\033[0m' # No Color - -log() { - echo -e "${BLUE}[TEMPLATE]${NC} $1" -} - -log_success() { - echo -e "${GREEN}[SUCCESS]${NC} $1" -} - -log_warning() { - echo -e "${YELLOW}[WARNING]${NC} $1" -} - -log_error() { - echo -e "${RED}[ERROR]${NC} $1" -} - -# Configuration -SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -PROJECT_DIR="$(cd "$SCRIPT_DIR/../.." && pwd)" - -# Load environment variables if available -if [[ -f "$PROJECT_DIR/***REMOVED***.unraid" ]]; then - source "$PROJECT_DIR/***REMOVED***.unraid" -else - log_error "No ***REMOVED***.unraid file found. Please run setup-complete-automation.sh first." - exit 1 -fi - -# Function to show help -show_help() { - echo "ThrillWiki Template VM Management Utilities" - echo "" - echo "Usage:" - echo " $0 check Check if template exists and is ready" - echo " $0 info Show template information" - echo " $0 list List all template-based VM instances" - echo " $0 copy VM_NAME Copy template to new VM" - echo " $0 deploy VM_NAME Deploy complete VM from template" - echo " $0 status Show template VM status" - echo " $0 update Update template VM (instructions)" - echo " $0 autopull Manage auto-pull functionality" - echo "" - echo "Auto-pull Commands:" - echo " $0 autopull status Show auto-pull status on VMs" - echo " $0 autopull enable VM Enable auto-pull on specific VM" - echo " $0 autopull disable VM Disable auto-pull on specific VM" - echo " $0 autopull logs VM Show auto-pull logs from VM" - echo " $0 autopull test VM Test auto-pull on specific VM" - echo "" - echo "Examples:" - echo " $0 check # Verify template is ready" - echo " $0 copy thrillwiki-prod # Copy template to new VM" - echo " $0 deploy thrillwiki-test # Complete deployment from template" - echo " $0 autopull status # Check auto-pull status on all VMs" - echo " $0 autopull logs $VM_NAME # View auto-pull logs" - exit 0 -} - -# Check if required environment variables are set -check_environment() { - if [[ -z "$UNRAID_HOST" ]]; then - log_error "UNRAID_HOST not set. Please configure your environment." - exit 1 - fi - - if [[ -z "$UNRAID_USER" ]]; then - UNRAID_USER="root" - log "Using default UNRAID_USER: $UNRAID_USER" - fi - - log_success "Environment configured: $UNRAID_USER@$UNRAID_HOST" -} - -# Function to run python template manager commands -run_template_manager() { - cd "$SCRIPT_DIR" - export UNRAID_HOST="$UNRAID_HOST" - export UNRAID_USER="$UNRAID_USER" - python3 template_manager.py "$@" -} - -# Function to run template-based main script -run_main_template() { - cd "$SCRIPT_DIR" - - # Export all environment variables - export UNRAID_HOST="$UNRAID_HOST" - export UNRAID_USER="$UNRAID_USER" - export VM_NAME="$1" - export VM_MEMORY="${VM_MEMORY:-4096}" - export VM_VCPUS="${VM_VCPUS:-2}" - export VM_DISK_SIZE="${VM_DISK_SIZE:-50}" - export VM_IP="${VM_IP:-dhcp}" - export REPO_URL="${REPO_URL:-}" - export GITHUB_TOKEN="${GITHUB_TOKEN:-}" - - shift # Remove VM_NAME from arguments - python3 main_template.py "$@" -} - -# Parse command line arguments -case "${1:-}" in - check) - log "🔍 Checking template VM availability..." - check_environment - run_template_manager check - ;; - - info) - log "📋 Getting template VM information..." - check_environment - run_template_manager info - ;; - - list) - log "📋 Listing template-based VM instances..." - check_environment - run_template_manager list - ;; - - copy) - if [[ -z "${2:-}" ]]; then - log_error "VM name is required for copy operation" - echo "Usage: $0 copy VM_NAME" - exit 1 - fi - - log "💾 Copying template to VM: $2" - check_environment - run_template_manager copy "$2" - ;; - - deploy) - if [[ -z "${2:-}" ]]; then - log_error "VM name is required for deploy operation" - echo "Usage: $0 deploy VM_NAME" - exit 1 - fi - - log "🚀 Deploying complete VM from template: $2" - check_environment - run_main_template "$2" deploy - ;; - - status) - log "📊 Checking template VM status..." - check_environment - - # Check template VM status directly - ssh "$UNRAID_USER@$UNRAID_HOST" "virsh domstate thrillwiki-template-ubuntu" 2>/dev/null || { - log_error "Could not check template VM status" - exit 1 - } - ;; - - update) - log "🔄 Template VM update instructions:" - echo "" - echo "To update your template VM:" - echo "1. Start the template VM on Unraid" - echo "2. SSH into the template VM" - echo "3. Update packages: sudo apt update && sudo apt upgrade -y" - echo "4. Update ThrillWiki dependencies if needed" - echo "5. Clean up temporary files: sudo apt autoremove && sudo apt autoclean" - echo "6. Clear bash history: history -c && history -w" - echo "7. Shutdown the template VM: sudo shutdown now" - echo "8. The updated disk is now ready as a template" - echo "" - log_warning "IMPORTANT: Template VM must be stopped before creating new instances" - - check_environment - run_template_manager update - ;; - - autopull) - shift # Remove 'autopull' from arguments - autopull_command="${1:-status}" - vm_name="${2:-$VM_NAME}" - - log "🔄 Managing auto-pull functionality..." - check_environment - - # Get list of all template VMs - if [[ "$autopull_command" == "status" ]] && [[ "$vm_name" == "$VM_NAME" ]]; then - all_vms=$(run_template_manager list | grep -E "(running|shut off)" | awk '{print $2}' || echo "") - else - all_vms=$vm_name - fi - - if [[ -z "$all_vms" ]]; then - log_warning "No running template VMs found to manage auto-pull on." - exit 0 - fi - - for vm in $all_vms; do - log "====== Auto-pull for VM: $vm ======" - - case "$autopull_command" in - status) - ssh "$vm" "[AWS-SECRET-REMOVED]uto-pull.sh --status" - ;; - enable) - ssh "$vm" "(crontab -l 2>/dev/null || echo \"\") | { cat; echo \"*/10 * * * * [AWS-SECRET-REMOVED]uto-pull.sh >> /home/thrillwiki/logs/cron.log 2>&1\"; } | crontab - && echo '✅ Auto-pull enabled' || echo '❌ Failed to enable'" - ;; - disable) - ssh "$vm" "crontab -l 2>/dev/null | grep -v 'auto-pull.sh' | crontab - && echo '✅ Auto-pull disabled' || echo '❌ Failed to disable'" - ;; - logs) - ssh "$vm" "[AWS-SECRET-REMOVED]uto-pull.sh --logs" - ;; - test) - ssh "$vm" "[AWS-SECRET-REMOVED]uto-pull.sh --force" - ;; - *) - log_error "Invalid auto-pull command: $autopull_command" - show_help - exit 1 - ;; - esac - echo - done - ;; - - --help|-h|help|"") - show_help - ;; - - *) - log_error "Unknown command: ${1:-}" - echo "" - show_help - ;; -esac diff --git a/scripts/unraid/template_manager.py b/scripts/unraid/template_manager.py deleted file mode 100644 index f0641367..00000000 --- a/scripts/unraid/template_manager.py +++ /dev/null @@ -1,571 +0,0 @@ -#!/usr/bin/env python3 -""" -Template VM Manager for ThrillWiki -Handles copying template VM disks and managing template-based deployments. -""" - -import os -import sys -import time -import logging -import subprocess -from typing import Dict - -logger = logging.getLogger(__name__) - - -class TemplateVMManager: - """Manages template-based VM deployment on Unraid.""" - - def __init__(self, unraid_host: str, unraid_user: str = "root"): - self.unraid_host = unraid_host - self.unraid_user = unraid_user - self.template_vm_name = "thrillwiki-template-ubuntu" - self.template_path = f"/mnt/user/domains/{self.template_vm_name}" - - def authenticate(self) -> bool: - """Test SSH connectivity to Unraid server.""" - try: - result = subprocess.run( - f"ssh -o ConnectTimeout=10 {self.unraid_user}@{self.unraid_host} 'echo Connected'", - shell=True, - capture_output=True, - text=True, - timeout=15, - ) - - if result.returncode == 0 and "Connected" in result.stdout: - logger.info("Successfully connected to Unraid via SSH") - return True - else: - logger.error(f"SSH connection failed: {result.stderr}") - return False - except Exception as e: - logger.error(f"SSH authentication error: {e}") - return False - - def check_template_exists(self) -> bool: - """Check if template VM disk exists.""" - try: - result = subprocess.run( - f"ssh {self.unraid_user}@{self.unraid_host} 'test -f {self.template_path}/vdisk1.qcow2'", - shell=True, - capture_output=True, - text=True, - ) - if result.returncode == 0: - logger.info( - f"Template VM disk found at { - self.template_path}/vdisk1.qcow2" - ) - return True - else: - logger.error( - f"Template VM disk not found at { - self.template_path}/vdisk1.qcow2" - ) - return False - except Exception as e: - logger.error(f"Error checking template existence: {e}") - return False - - def get_template_info(self) -> Dict[str, str]: - """Get information about the template VM.""" - try: - # Get disk size - size_result = subprocess.run( - f"ssh { - self.unraid_user}@{ - self.unraid_host} 'qemu-img info { - self.template_path}/vdisk1.qcow2 | grep \"virtual size\"'", - shell=True, - capture_output=True, - text=True, - ) - - # Get file size - file_size_result = subprocess.run( - f"ssh {self.unraid_user}@{self.unraid_host} 'ls -lh {self.template_path}/vdisk1.qcow2'", - shell=True, - capture_output=True, - text=True, - ) - - # Get last modification time - mod_time_result = subprocess.run( - f"ssh {self.unraid_user}@{self.unraid_host} 'stat -c \"%y\" {self.template_path}/vdisk1.qcow2'", - shell=True, - capture_output=True, - text=True, - ) - - info = { - "template_path": f"{ - self.template_path}/vdisk1.qcow2", - "virtual_size": ( - size_result.stdout.strip() - if size_result.returncode == 0 - else "Unknown" - ), - "file_size": ( - file_size_result.stdout.split()[4] - if file_size_result.returncode == 0 - else "Unknown" - ), - "last_modified": ( - mod_time_result.stdout.strip() - if mod_time_result.returncode == 0 - else "Unknown" - ), - } - - return info - - except Exception as e: - logger.error(f"Error getting template info: {e}") - return {} - - def copy_template_disk(self, target_vm_name: str) -> bool: - """Copy template VM disk to a new VM instance.""" - try: - if not self.check_template_exists(): - logger.error("Template VM disk not found. Cannot proceed with copy.") - return False - - target_path = f"/mnt/user/domains/{target_vm_name}" - target_disk = f"{target_path}/vdisk1.qcow2" - - logger.info(f"Copying template disk to new VM: {target_vm_name}") - - # Create target directory - subprocess.run( - f"ssh {self.unraid_user}@{self.unraid_host} 'mkdir -p {target_path}'", - shell=True, - check=True, - ) - - # Check if target disk already exists - disk_check = subprocess.run( - f"ssh {self.unraid_user}@{self.unraid_host} 'test -f {target_disk}'", - shell=True, - capture_output=True, - ) - - if disk_check.returncode == 0: - logger.warning(f"Target disk already exists: {target_disk}") - logger.info( - "Removing existing disk to replace with fresh template copy..." - ) - subprocess.run( - f"ssh {self.unraid_user}@{self.unraid_host} 'rm -f {target_disk}'", - shell=True, - check=True, - ) - - # Copy template disk with rsync progress display - logger.info("🚀 Copying template disk with rsync progress display...") - start_time = time.time() - - # First, get the size of the template disk for progress calculation - size_result = subprocess.run( - f"ssh {self.unraid_user}@{self.unraid_host} 'stat -c%s {self.template_path}/vdisk1.qcow2'", - shell=True, - capture_output=True, - text=True, - ) - - template_size = "unknown size" - if size_result.returncode == 0: - size_bytes = int(size_result.stdout.strip()) - if size_bytes > 1024 * 1024 * 1024: # GB - template_size = f"{size_bytes / - (1024 * - 1024 * - 1024):.1f}GB" - elif size_bytes > 1024 * 1024: # MB - template_size = f"{size_bytes / (1024 * 1024):.1f}MB" - else: - template_size = f"{size_bytes / 1024:.1f}KB" - - logger.info(f"📊 Template disk size: {template_size}") - - # Use rsync with progress display - logger.info("📈 Using rsync for real-time progress display...") - - # Force rsync to output progress to stderr and capture it - copy_cmd = f"ssh { - self.unraid_user}@{ - self.unraid_host} 'rsync -av --progress --stats { - self.template_path}/vdisk1.qcow2 {target_disk}'" - - # Run with real-time output, unbuffered - process = subprocess.Popen( - copy_cmd, - shell=True, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - text=True, - bufsize=0, # Unbuffered - universal_newlines=True, - ) - - import select - - # Read both stdout and stderr for progress with real-time display - while True: - # Check if process is still running - if process.poll() is not None: - # Process finished, read any remaining output - remaining_out = process.stdout.read() - remaining_err = process.stderr.read() - if remaining_out: - print(f"📊 {remaining_out.strip()}", flush=True) - logger.info(f"📊 {remaining_out.strip()}") - if remaining_err: - for line in remaining_err.strip().split("\n"): - if line.strip(): - print(f"⚡ {line.strip()}", flush=True) - logger.info(f"⚡ {line.strip()}") - break - - # Use select to check for available data - try: - ready, _, _ = select.select( - [process.stdout, process.stderr], [], [], 0.1 - ) - - for stream in ready: - line = stream.readline() - if line: - line = line.strip() - if line: - if stream == process.stdout: - print(f"📊 {line}", flush=True) - logger.info(f"📊 {line}") - else: # stderr - # rsync progress goes to stderr - if any( - keyword in line - for keyword in [ - "%", - "bytes/sec", - "to-check=", - "xfr#", - ] - ): - print(f"⚡ {line}", flush=True) - logger.info(f"⚡ {line}") - else: - print(f"📋 {line}", flush=True) - logger.info(f"📋 {line}") - except select.error: - # Fallback for systems without select (like some Windows - # environments) - print( - "⚠️ select() not available, using fallback method...", - flush=True, - ) - logger.info("⚠️ select() not available, using fallback method...") - - # Simple fallback - just wait and read what's available - time.sleep(0.5) - try: - # Try to read non-blocking - import fcntl - import os - - # Make stdout/stderr non-blocking - fd_out = process.stdout.fileno() - fd_err = process.stderr.fileno() - fl_out = fcntl.fcntl(fd_out, fcntl.F_GETFL) - fl_err = fcntl.fcntl(fd_err, fcntl.F_GETFL) - fcntl.fcntl(fd_out, fcntl.F_SETFL, fl_out | os.O_NONBLOCK) - fcntl.fcntl(fd_err, fcntl.F_SETFL, fl_err | os.O_NONBLOCK) - - try: - out_line = process.stdout.readline() - if out_line: - print(f"📊 {out_line.strip()}", flush=True) - logger.info(f"📊 {out_line.strip()}") - except BaseException: - pass - - try: - err_line = process.stderr.readline() - if err_line: - if any( - keyword in err_line - for keyword in [ - "%", - "bytes/sec", - "to-check=", - "xfr#", - ] - ): - print(f"⚡ {err_line.strip()}", flush=True) - logger.info(f"⚡ {err_line.strip()}") - else: - print(f"📋 {err_line.strip()}", flush=True) - logger.info(f"📋 {err_line.strip()}") - except BaseException: - pass - except ImportError: - # If fcntl not available, just continue - print( - "📊 Progress display limited - continuing copy...", - flush=True, - ) - logger.info("📊 Progress display limited - continuing copy...") - break - - copy_result_code = process.wait() - - end_time = time.time() - copy_time = end_time - start_time - - if copy_result_code == 0: - logger.info( - f"✅ Template disk copied successfully in { - copy_time:.1f} seconds" - ) - logger.info(f"🎯 New VM disk created: {target_disk}") - - # Verify the copy by checking file size - verify_result = subprocess.run( - f"ssh {self.unraid_user}@{self.unraid_host} 'ls -lh {target_disk}'", - shell=True, - capture_output=True, - text=True, - ) - - if verify_result.returncode == 0: - file_info = verify_result.stdout.strip().split() - if len(file_info) >= 5: - copied_size = file_info[4] - logger.info(f"📋 Copied disk size: {copied_size}") - - return True - else: - logger.error( - f"❌ Failed to copy template disk (exit code: {copy_result_code})" - ) - logger.error("Check Unraid server disk space and permissions") - return False - - except Exception as e: - logger.error(f"Error copying template disk: {e}") - return False - - def prepare_vm_from_template( - self, target_vm_name: str, vm_memory: int, vm_vcpus: int, vm_ip: str - ) -> bool: - """Complete template-based VM preparation.""" - try: - logger.info(f"Preparing VM '{target_vm_name}' from template...") - - # Step 1: Copy template disk - if not self.copy_template_disk(target_vm_name): - return False - - logger.info(f"VM '{target_vm_name}' prepared successfully from template") - logger.info("The VM disk is ready with Ubuntu pre-installed") - logger.info("You can now create the VM configuration and start it") - - return True - - except Exception as e: - logger.error(f"Error preparing VM from template: {e}") - return False - - def update_template(self) -> bool: - """Update the template VM with latest changes.""" - try: - logger.info("Updating template VM...") - logger.info("Note: This should be done manually by:") - logger.info("1. Starting the template VM") - logger.info("2. Updating Ubuntu packages") - logger.info("3. Updating ThrillWiki dependencies") - logger.info("4. Stopping the template VM") - logger.info("5. The disk will automatically be the new template") - - # Check template VM status - template_status = subprocess.run( - f"ssh {self.unraid_user}@{self.unraid_host} 'virsh domstate {self.template_vm_name}'", - shell=True, - capture_output=True, - text=True, - ) - - if template_status.returncode == 0: - status = template_status.stdout.strip() - logger.info( - f"Template VM '{ - self.template_vm_name}' status: {status}" - ) - - if status == "running": - logger.warning("Template VM is currently running!") - logger.warning("Stop the template VM when updates are complete") - logger.warning("Running VMs should not be used as templates") - return False - elif status in ["shut off", "shutoff"]: - logger.info( - "Template VM is properly stopped and ready to use as template" - ) - return True - else: - logger.warning(f"Template VM in unexpected state: {status}") - return False - else: - logger.error("Could not check template VM status") - return False - - except Exception as e: - logger.error(f"Error updating template: {e}") - return False - - def list_template_instances(self) -> list: - """List all VMs that were created from the template.""" - try: - # Get all domains - result = subprocess.run( - f"ssh {self.unraid_user}@{self.unraid_host} 'virsh list --all --name'", - shell=True, - capture_output=True, - text=True, - ) - - if result.returncode != 0: - logger.error("Failed to list VMs") - return [] - - all_vms = result.stdout.strip().split("\n") - - # Filter for thrillwiki VMs (excluding template) - template_instances = [] - for vm in all_vms: - vm = vm.strip() - if vm and "thrillwiki" in vm.lower() and vm != self.template_vm_name: - # Get VM status - status_result = subprocess.run( - f"ssh {self.unraid_user}@{self.unraid_host} 'virsh domstate {vm}'", - shell=True, - capture_output=True, - text=True, - ) - status = ( - status_result.stdout.strip() - if status_result.returncode == 0 - else "unknown" - ) - template_instances.append({"name": vm, "status": status}) - - return template_instances - - except Exception as e: - logger.error(f"Error listing template instances: {e}") - return [] - - -def main(): - """Main entry point for template manager.""" - import argparse - - parser = argparse.ArgumentParser( - description="ThrillWiki Template VM Manager", - epilog=""" -Examples: - python template_manager.py info # Show template info - python template_manager.py copy my-vm # Copy template to new VM - python template_manager.py list # List template instances - python template_manager.py update # Update template VM - """, - formatter_class=argparse.RawDescriptionHelpFormatter, - ) - - parser.add_argument( - "action", - choices=["info", "copy", "list", "update", "check"], - help="Action to perform", - ) - - parser.add_argument("vm_name", nargs="?", help="VM name (required for copy action)") - - args = parser.parse_args() - - # Get Unraid connection details from environment - unraid_host = os.environ.get("UNRAID_HOST") - unraid_user = os.environ.get("UNRAID_USER", "root") - - if not unraid_host: - logger.error("UNRAID_HOST environment variable is required") - sys.exit(1) - - # Create template manager - template_manager = TemplateVMManager(unraid_host, unraid_user) - - # Authenticate - if not template_manager.authenticate(): - logger.error("Failed to connect to Unraid server") - sys.exit(1) - - if args.action == "info": - logger.info("📋 Template VM Information") - info = template_manager.get_template_info() - if info: - print(f"Template Path: {info['template_path']}") - print(f"Virtual Size: {info['virtual_size']}") - print(f"File Size: {info['file_size']}") - print(f"Last Modified: {info['last_modified']}") - else: - print("❌ Failed to get template information") - sys.exit(1) - - elif args.action == "check": - if template_manager.check_template_exists(): - logger.info("✅ Template VM disk exists and is ready to use") - sys.exit(0) - else: - logger.error("❌ Template VM disk not found") - sys.exit(1) - - elif args.action == "copy": - if not args.vm_name: - logger.error("VM name is required for copy action") - sys.exit(1) - - success = template_manager.copy_template_disk(args.vm_name) - sys.exit(0 if success else 1) - - elif args.action == "list": - logger.info("📋 Template-based VM Instances") - instances = template_manager.list_template_instances() - if instances: - for instance in instances: - status_emoji = ( - "🟢" - if instance["status"] == "running" - else "🔴" if instance["status"] == "shut off" else "🟡" - ) - print( - f"{status_emoji} { - instance['name']} ({ - instance['status']})" - ) - else: - print("No template instances found") - - elif args.action == "update": - success = template_manager.update_template() - sys.exit(0 if success else 1) - - -if __name__ == "__main__": - # Setup logging - logging.basicConfig( - level=logging.INFO, - format="%(asctime)s - %(levelname)s - %(message)s", - handlers=[logging.StreamHandler()], - ) - - main() diff --git a/scripts/unraid/thrillwiki-vm-template-simple.xml b/scripts/unraid/thrillwiki-vm-template-simple.xml deleted file mode 100644 index 89be074c..00000000 --- a/scripts/unraid/thrillwiki-vm-template-simple.xml +++ /dev/null @@ -1,116 +0,0 @@ - - - {VM_NAME} - {VM_UUID} - - - - {VM_MEMORY_KIB} - {VM_MEMORY_KIB} - {VM_VCPUS} - - hvm - /usr/share/qemu/ovmf-x64/OVMF_CODE-pure-efi.fd - /etc/libvirt/qemu/nvram/{VM_UUID}_VARS-pure-efi.fd - - - - - - - - - - - - - - - - - - destroy - restart - restart - - - - - - /usr/local/sbin/qemu - - - - - -
- - -
- - - - - -
- - - - -
- - - - -
- - - - -
- - - - -
- - -
- - - - - -
- - - - - - - - - - - -
- - -
- - - - - - -