mirror of
https://github.com/pacnpal/thrillwiki_django_no_react.git
synced 2025-12-20 11:51:10 -05:00
Compare commits
20 Commits
pixeebot/d
...
clean-hist
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d0ee283abf | ||
|
|
31d83c8889 | ||
|
|
46c6e45eae | ||
|
|
f5db23a791 | ||
|
|
78248aa892 | ||
|
|
641fc1a253 | ||
|
|
ca7555c052 | ||
|
|
74b45aa143 | ||
|
|
d9fc13f350 | ||
|
|
f4f8ec8f9b | ||
|
|
274ba650b3 | ||
|
|
cc990ee003 | ||
|
|
63b9cf1a70 | ||
|
|
c26414ff74 | ||
|
|
17228e9935 | ||
|
|
32736ae660 | ||
|
|
b5bae44cb8 | ||
|
|
da7c7e3381 | ||
|
|
f6c8e0e25c | ||
|
|
16386deee7 |
90
.env.example
Normal file
90
.env.example
Normal file
@@ -0,0 +1,90 @@
|
||||
# [AWS-SECRET-REMOVED]===========================
|
||||
# ThrillWiki Environment Configuration
|
||||
# [AWS-SECRET-REMOVED]===========================
|
||||
# Copy this file to ***REMOVED*** and fill in your actual values
|
||||
|
||||
# [AWS-SECRET-REMOVED]===========================
|
||||
# Core Django Settings
|
||||
# [AWS-SECRET-REMOVED]===========================
|
||||
SECRET_KEY=your-secret-key-here-generate-a-new-one
|
||||
DEBUG=True
|
||||
ALLOWED_HOSTS=localhost,127.0.0.1,beta.thrillwiki.com
|
||||
CSRF_TRUSTED_ORIGINS=https://beta.thrillwiki.com,http://localhost:8000
|
||||
|
||||
# [AWS-SECRET-REMOVED]===========================
|
||||
# Database Configuration
|
||||
# [AWS-SECRET-REMOVED]===========================
|
||||
# PostgreSQL with PostGIS for production/development
|
||||
DATABASE_URL=postgis://username:password@localhost:5432/thrillwiki
|
||||
|
||||
# SQLite for quick local development (uncomment to use)
|
||||
# DATABASE_URL=spatialite:///path/to/your/db.sqlite3
|
||||
|
||||
# [AWS-SECRET-REMOVED]===========================
|
||||
# Cache Configuration
|
||||
# [AWS-SECRET-REMOVED]===========================
|
||||
# Local memory cache for development
|
||||
CACHE_URL=locmem://
|
||||
|
||||
# Redis for production (uncomment and configure for production)
|
||||
# CACHE_URL=redis://localhost:6379/1
|
||||
# REDIS_URL=redis://localhost:6379/0
|
||||
|
||||
CACHE_MIDDLEWARE_SECONDS=300
|
||||
CACHE_MIDDLEWARE_KEY_PREFIX=thrillwiki
|
||||
|
||||
# [AWS-SECRET-REMOVED]===========================
|
||||
# Email Configuration
|
||||
# [AWS-SECRET-REMOVED]===========================
|
||||
EMAIL_BACKEND=django.core.mail.backends.console.EmailBackend
|
||||
SERVER_EMAIL=django_webmaster@thrillwiki.com
|
||||
|
||||
# ForwardEmail configuration (uncomment to use)
|
||||
# EMAIL_BACKEND=email_service.backends.ForwardEmailBackend
|
||||
# FORWARD_EMAIL_BASE_URL=https://api.forwardemail.net
|
||||
|
||||
# SMTP configuration (uncomment to use)
|
||||
# EMAIL_URL=smtp://username:password@smtp.example.com:587
|
||||
|
||||
# [AWS-SECRET-REMOVED]===========================
|
||||
# Security Settings
|
||||
# [AWS-SECRET-REMOVED]===========================
|
||||
# Cloudflare Turnstile (get keys from Cloudflare dashboard)
|
||||
TURNSTILE_SITE_KEY=your-turnstile-site-key
|
||||
TURNSTILE_SECRET_KEY=your-turnstile-secret-key
|
||||
TURNSTILE_VERIFY_URL=https://challenges.cloudflare.com/turnstile/v0/siteverify
|
||||
|
||||
# Security headers (set to True for production)
|
||||
SECURE_SSL_REDIRECT=False
|
||||
SESSION_COOKIE_SECURE=False
|
||||
CSRF_COOKIE_SECURE=False
|
||||
SECURE_HSTS_SECONDS=31536000
|
||||
SECURE_HSTS_INCLUDE_SUBDOMAINS=True
|
||||
|
||||
# [AWS-SECRET-REMOVED]===========================
|
||||
# GeoDjango Settings (macOS with Homebrew)
|
||||
# [AWS-SECRET-REMOVED]===========================
|
||||
GDAL_LIBRARY_PATH=/opt/homebrew/lib/libgdal.dylib
|
||||
GEOS_LIBRARY_PATH=/opt/homebrew/lib/libgeos_c.dylib
|
||||
|
||||
# Linux alternatives (uncomment if on Linux)
|
||||
# GDAL_LIBRARY_PATH=/usr/lib/x86_64-linux-gnu/libgdal.so
|
||||
# GEOS_LIBRARY_PATH=/usr/lib/x86_64-linux-gnu/libgeos_c.so
|
||||
|
||||
# [AWS-SECRET-REMOVED]===========================
|
||||
# Optional: Third-party Integrations
|
||||
# [AWS-SECRET-REMOVED]===========================
|
||||
# Sentry for error tracking (uncomment to use)
|
||||
# SENTRY_DSN=https://your-sentry-dsn-here
|
||||
|
||||
# Google Analytics (uncomment to use)
|
||||
# GOOGLE_ANALYTICS_ID=GA-XXXXXXXXX
|
||||
|
||||
# [AWS-SECRET-REMOVED]===========================
|
||||
# Development/Debug Settings
|
||||
# [AWS-SECRET-REMOVED]===========================
|
||||
# Set to comma-separated list for debug toolbar
|
||||
# INTERNAL_IPS=127.0.0.1,::1
|
||||
|
||||
# Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)
|
||||
LOG_LEVEL=INFO
|
||||
24
.gitignore
vendored
24
.gitignore
vendored
@@ -347,13 +347,19 @@ cython_debug/
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
|
||||
# Pixi package manager
|
||||
.pixi/
|
||||
|
||||
# Django Tailwind CLI
|
||||
.django_tailwind_cli/
|
||||
|
||||
# General
|
||||
.DS_Store
|
||||
.AppleDouble
|
||||
.LSOverride
|
||||
|
||||
# Icon must end with two \r
|
||||
Icon
|
||||
Icon
|
||||
|
||||
# Thumbnails
|
||||
._*
|
||||
@@ -373,3 +379,19 @@ Icon
|
||||
Network Trash Folder
|
||||
Temporary Items
|
||||
.apdisk
|
||||
|
||||
|
||||
# ThrillWiki CI/CD Configuration
|
||||
.thrillwiki-config
|
||||
***REMOVED***.unraid
|
||||
***REMOVED***.webhook
|
||||
.github-token
|
||||
logs/
|
||||
profiles
|
||||
.thrillwiki-github-token
|
||||
.thrillwiki-template-config
|
||||
|
||||
# Environment files with potential secrets
|
||||
scripts/systemd/thrillwiki-automation***REMOVED***
|
||||
scripts/systemd/thrillwiki-deployment***REMOVED***
|
||||
scripts/systemd/****REMOVED***backups/
|
||||
|
||||
277
CI_README.md
Normal file
277
CI_README.md
Normal file
@@ -0,0 +1,277 @@
|
||||
# ThrillWiki CI/CD System
|
||||
|
||||
This repository includes a **complete automated CI/CD system** that creates a Linux VM on Unraid and automatically deploys ThrillWiki when commits are pushed to GitHub.
|
||||
|
||||
## 🚀 Complete Automation (Unraid)
|
||||
|
||||
For **full automation** including VM creation on Unraid:
|
||||
|
||||
```bash
|
||||
./scripts/unraid/setup-complete-automation.sh
|
||||
```
|
||||
|
||||
This single command will:
|
||||
- ✅ Create and configure VM on Unraid
|
||||
- ✅ Install Ubuntu Server with all dependencies
|
||||
- ✅ Deploy ThrillWiki application
|
||||
- ✅ Set up automated CI/CD pipeline
|
||||
- ✅ Configure webhook listener
|
||||
- ✅ Test the entire system
|
||||
|
||||
## Manual Setup (Any Linux VM)
|
||||
|
||||
For manual setup on existing Linux VMs:
|
||||
|
||||
```bash
|
||||
./scripts/setup-vm-ci.sh
|
||||
```
|
||||
|
||||
## System Components
|
||||
|
||||
### 📁 Files Created
|
||||
|
||||
```
|
||||
scripts/
|
||||
├── ci-start.sh # Local development server startup
|
||||
├── webhook-listener.py # GitHub webhook listener
|
||||
├── vm-deploy.sh # VM deployment script
|
||||
├── setup-vm-ci.sh # Manual VM setup script
|
||||
├── unraid/
|
||||
│ ├── vm-manager.py # Unraid VM management
|
||||
│ └── setup-complete-automation.sh # Complete automation
|
||||
└── systemd/
|
||||
├── thrillwiki.service # Django app service
|
||||
└── thrillwiki-webhook.service # Webhook listener service
|
||||
|
||||
docs/
|
||||
├── VM_DEPLOYMENT_SETUP.md # Manual setup documentation
|
||||
└── UNRAID_COMPLETE_AUTOMATION.md # Complete automation guide
|
||||
```
|
||||
|
||||
### 🔄 Deployment Flow
|
||||
|
||||
**Complete Automation:**
|
||||
```
|
||||
GitHub Push → Webhook → Local Listener → SSH → Unraid VM → Deploy & Restart
|
||||
```
|
||||
|
||||
**Manual Setup:**
|
||||
```
|
||||
GitHub Push → Webhook → Local Listener → SSH to VM → Deploy Script → Server Restart
|
||||
```
|
||||
|
||||
## Features
|
||||
|
||||
- **Complete VM Automation**: Automatically creates VMs on Unraid
|
||||
- **Automatic Deployment**: Deploys on push to main branch
|
||||
- **Health Checks**: Verifies deployment success
|
||||
- **Rollback Support**: Automatic rollback on deployment failure
|
||||
- **Service Management**: Systemd integration for reliable service management
|
||||
- **Database Setup**: Automated PostgreSQL configuration
|
||||
- **Logging**: Comprehensive logging for debugging
|
||||
- **Security**: SSH key authentication and webhook secrets
|
||||
- **One-Command Setup**: Full automation with single script
|
||||
|
||||
## Usage
|
||||
|
||||
### Complete Automation (Recommended)
|
||||
|
||||
For Unraid users, run the complete automation:
|
||||
|
||||
```bash
|
||||
./scripts/unraid/setup-complete-automation.sh
|
||||
```
|
||||
|
||||
After setup, start the webhook listener:
|
||||
```bash
|
||||
./start-webhook.sh
|
||||
```
|
||||
|
||||
### Local Development
|
||||
|
||||
Start the local development server:
|
||||
|
||||
```bash
|
||||
./scripts/ci-start.sh
|
||||
```
|
||||
|
||||
### VM Management (Unraid)
|
||||
|
||||
```bash
|
||||
# Check VM status
|
||||
python3 scripts/unraid/vm-manager.py status
|
||||
|
||||
# Start/stop VM
|
||||
python3 scripts/unraid/vm-manager.py start
|
||||
python3 scripts/unraid/vm-manager.py stop
|
||||
|
||||
# Get VM IP
|
||||
python3 scripts/unraid/vm-manager.py ip
|
||||
```
|
||||
|
||||
### Service Management
|
||||
|
||||
On the VM:
|
||||
|
||||
```bash
|
||||
# Check status
|
||||
ssh thrillwiki-vm "./scripts/vm-deploy.sh status"
|
||||
|
||||
# Restart service
|
||||
ssh thrillwiki-vm "./scripts/vm-deploy.sh restart"
|
||||
|
||||
# View logs
|
||||
ssh thrillwiki-vm "journalctl -u thrillwiki -f"
|
||||
```
|
||||
|
||||
### Manual VM Deployment
|
||||
|
||||
Deploy to VM manually:
|
||||
|
||||
```bash
|
||||
ssh thrillwiki-vm "cd thrillwiki && ./scripts/vm-deploy.sh"
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### Automated Configuration
|
||||
|
||||
The complete automation script creates all necessary configuration files:
|
||||
|
||||
- `***REMOVED***.unraid` - Unraid VM configuration
|
||||
- `***REMOVED***.webhook` - Webhook listener configuration
|
||||
- SSH keys and configuration
|
||||
- Service configurations
|
||||
|
||||
### Manual Environment Variables
|
||||
|
||||
For manual setup, create `***REMOVED***.webhook` file:
|
||||
|
||||
```bash
|
||||
WEBHOOK_PORT=9000
|
||||
WEBHOOK_SECRET=your_secret_here
|
||||
VM_HOST=your_vm_ip
|
||||
VM_USER=ubuntu
|
||||
VM_KEY_PATH=/path/to/ssh/key
|
||||
VM_PROJECT_PATH=/home/ubuntu/thrillwiki
|
||||
REPO_URL=https://github.com/username/repo.git
|
||||
DEPLOY_BRANCH=main
|
||||
```
|
||||
|
||||
### GitHub Webhook
|
||||
|
||||
Configure in your GitHub repository:
|
||||
- **URL**: `http://YOUR_PUBLIC_IP:9000/webhook`
|
||||
- **Content Type**: `application/json`
|
||||
- **Secret**: Your webhook secret
|
||||
- **Events**: Push events
|
||||
|
||||
## Requirements
|
||||
|
||||
### For Complete Automation
|
||||
- **Local Machine**: Python 3.8+, SSH client
|
||||
- **Unraid Server**: 6.8+ with VM support
|
||||
- **Resources**: 4GB RAM, 50GB disk minimum
|
||||
- **Ubuntu ISO**: Ubuntu Server 22.04 in `/mnt/user/isos/`
|
||||
|
||||
### For Manual Setup
|
||||
- **Local Machine**: Python 3.8+, SSH access to VM, Public IP
|
||||
- **Linux VM**: Ubuntu 20.04+, Python 3.8+, UV package manager, Git, SSH server
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Complete Automation Issues
|
||||
|
||||
1. **VM Creation Fails**
|
||||
```bash
|
||||
# Check Unraid VM support
|
||||
ssh unraid "virsh list --all"
|
||||
|
||||
# Verify Ubuntu ISO exists
|
||||
ssh unraid "ls -la /mnt/user/isos/ubuntu-*.iso"
|
||||
```
|
||||
|
||||
2. **VM Won't Start**
|
||||
```bash
|
||||
# Check VM status
|
||||
python3 scripts/unraid/vm-manager.py status
|
||||
|
||||
# Check Unraid logs
|
||||
ssh unraid "tail -f /var/log/libvirt/qemu/thrillwiki-vm.log"
|
||||
```
|
||||
|
||||
### General Issues
|
||||
|
||||
1. **SSH Connection Failed**
|
||||
```bash
|
||||
# Check SSH key permissions
|
||||
chmod 600 ~/.ssh/thrillwiki_vm
|
||||
|
||||
# Test connection
|
||||
ssh thrillwiki-vm
|
||||
```
|
||||
|
||||
2. **Webhook Not Receiving Events**
|
||||
```bash
|
||||
# Check if port is open
|
||||
sudo ufw allow 9000
|
||||
|
||||
# Verify webhook URL in GitHub
|
||||
curl -X GET http://localhost:9000/health
|
||||
```
|
||||
|
||||
3. **Service Won't Start**
|
||||
```bash
|
||||
# Check service logs
|
||||
ssh thrillwiki-vm "journalctl -u thrillwiki --no-pager"
|
||||
|
||||
# Manual start
|
||||
ssh thrillwiki-vm "cd thrillwiki && ./scripts/ci-start.sh"
|
||||
```
|
||||
|
||||
### Logs
|
||||
|
||||
- **Setup logs**: `logs/unraid-automation.log`
|
||||
- **Local webhook**: `logs/webhook.log`
|
||||
- **VM deployment**: `logs/deploy.log` (on VM)
|
||||
- **Django server**: `logs/django.log` (on VM)
|
||||
- **System logs**: `journalctl -u thrillwiki -f` (on VM)
|
||||
|
||||
## Security Notes
|
||||
|
||||
- Automated SSH key generation and management
|
||||
- Dedicated keys for each connection (VM access, Unraid access)
|
||||
- No password authentication
|
||||
- Systemd security features enabled
|
||||
- Firewall configuration support
|
||||
- Secret management in environment files
|
||||
|
||||
## Documentation
|
||||
|
||||
- **Complete Automation**: [`docs/UNRAID_COMPLETE_AUTOMATION.md`](docs/UNRAID_COMPLETE_AUTOMATION.md)
|
||||
- **Manual Setup**: [`docs/VM_DEPLOYMENT_SETUP.md`](docs/VM_DEPLOYMENT_SETUP.md)
|
||||
|
||||
---
|
||||
|
||||
## Quick Start Summary
|
||||
|
||||
### For Unraid Users (Complete Automation)
|
||||
```bash
|
||||
# One command to set up everything
|
||||
./scripts/unraid/setup-complete-automation.sh
|
||||
|
||||
# Start webhook listener
|
||||
./start-webhook.sh
|
||||
|
||||
# Push commits to auto-deploy!
|
||||
```
|
||||
|
||||
### For Existing VM Users
|
||||
```bash
|
||||
# Manual setup
|
||||
./scripts/setup-vm-ci.sh
|
||||
|
||||
# Configure webhook and push to deploy
|
||||
```
|
||||
|
||||
**The system will automatically deploy your Django application whenever you push commits to the main branch!** 🚀
|
||||
23
README.md
23
README.md
@@ -183,12 +183,33 @@ uv run manage.py collectstatic
|
||||
|
||||
### CSS Development
|
||||
|
||||
The project uses Tailwind CSS with a custom dark theme. CSS files are located in:
|
||||
The project uses **Tailwind CSS v4** with a custom dark theme. CSS files are located in:
|
||||
- Source: [`static/css/src/input.css`](static/css/src/input.css)
|
||||
- Compiled: [`static/css/`](static/css/) (auto-generated)
|
||||
|
||||
Tailwind automatically compiles when using the `tailwind runserver` command.
|
||||
|
||||
#### Tailwind CSS v4 Migration
|
||||
|
||||
This project has been migrated from Tailwind CSS v3 to v4. For complete migration details:
|
||||
|
||||
- **📖 Full Migration Documentation**: [`TAILWIND_V4_MIGRATION.md`](TAILWIND_V4_MIGRATION.md)
|
||||
- **⚡ Quick Reference Guide**: [`TAILWIND_V4_QUICK_REFERENCE.md`](TAILWIND_V4_QUICK_REFERENCE.md)
|
||||
|
||||
**Key v4 Changes**:
|
||||
- New CSS-first approach with `@theme` blocks
|
||||
- Updated utility class names (e.g., `outline-none` → `outline-hidden`)
|
||||
- New opacity syntax (e.g., `bg-blue-500/50` instead of `bg-blue-500 bg-opacity-50`)
|
||||
- Enhanced performance and smaller bundle sizes
|
||||
|
||||
**Custom Theme Variables** (available in CSS):
|
||||
```css
|
||||
var(--color-primary) /* #4f46e5 - Indigo-600 */
|
||||
var(--color-secondary) /* #e11d48 - Rose-600 */
|
||||
var(--color-accent) /* #8b5cf6 - Violet-500 */
|
||||
var(--font-family-sans) /* Poppins, sans-serif */
|
||||
```
|
||||
|
||||
## 🏗️ Project Structure
|
||||
|
||||
```
|
||||
|
||||
326
TAILWIND_V4_MIGRATION.md
Normal file
326
TAILWIND_V4_MIGRATION.md
Normal file
@@ -0,0 +1,326 @@
|
||||
# Tailwind CSS v3 to v4 Migration Documentation
|
||||
|
||||
## Overview
|
||||
|
||||
This document details the complete migration process from Tailwind CSS v3 to v4 for the Django ThrillWiki project. The migration was performed on August 15, 2025, and includes all changes, configurations, and verification steps.
|
||||
|
||||
## Migration Summary
|
||||
|
||||
- **From**: Tailwind CSS v3.x
|
||||
- **To**: Tailwind CSS v4.1.12
|
||||
- **Project**: Django ThrillWiki (Django + Tailwind CSS integration)
|
||||
- **Status**: ✅ Complete and Verified
|
||||
- **Breaking Changes**: None (all styling preserved)
|
||||
|
||||
## Key Changes in Tailwind CSS v4
|
||||
|
||||
### 1. CSS Import Syntax
|
||||
- **v3**: Used `@tailwind` directives
|
||||
- **v4**: Uses single `@import "tailwindcss"` statement
|
||||
|
||||
### 2. Theme Configuration
|
||||
- **v3**: Configuration in `tailwind.config.js`
|
||||
- **v4**: CSS-first approach with `@theme` blocks
|
||||
|
||||
### 3. Deprecated Utilities
|
||||
Multiple utility classes were renamed or deprecated in v4.
|
||||
|
||||
## Migration Steps Performed
|
||||
|
||||
### Step 1: Update Main CSS File
|
||||
|
||||
**File**: `static/css/src/input.css`
|
||||
|
||||
**Before (v3)**:
|
||||
```css
|
||||
@tailwind base;
|
||||
@tailwind components;
|
||||
@tailwind utilities;
|
||||
|
||||
/* Custom styles... */
|
||||
```
|
||||
|
||||
**After (v4)**:
|
||||
```css
|
||||
@import "tailwindcss";
|
||||
|
||||
@theme {
|
||||
--color-primary: #4f46e5;
|
||||
--color-secondary: #e11d48;
|
||||
--color-accent: #8b5cf6;
|
||||
--font-family-sans: Poppins, sans-serif;
|
||||
}
|
||||
|
||||
/* Custom styles... */
|
||||
```
|
||||
|
||||
### Step 2: Theme Variable Migration
|
||||
|
||||
Migrated custom colors and fonts from `tailwind.config.js` to CSS variables in `@theme` block:
|
||||
|
||||
| Variable | Value | Description |
|
||||
|----------|-------|-------------|
|
||||
| `--color-primary` | `#4f46e5` | Indigo-600 (primary brand color) |
|
||||
| `--color-secondary` | `#e11d48` | Rose-600 (secondary brand color) |
|
||||
| `--color-accent` | `#8b5cf6` | Violet-500 (accent color) |
|
||||
| `--font-family-sans` | `Poppins, sans-serif` | Primary font family |
|
||||
|
||||
### Step 3: Deprecated Utility Updates
|
||||
|
||||
#### Outline Utilities
|
||||
- **Changed**: `outline-none` → `outline-hidden`
|
||||
- **Files affected**: All template files, component CSS
|
||||
|
||||
#### Ring Utilities
|
||||
- **Changed**: `ring` → `ring-3`
|
||||
- **Reason**: Default ring width now requires explicit specification
|
||||
|
||||
#### Shadow Utilities
|
||||
- **Changed**:
|
||||
- `shadow-sm` → `shadow-xs`
|
||||
- `shadow` → `shadow-sm`
|
||||
- **Files affected**: Button components, card components
|
||||
|
||||
#### Opacity Utilities
|
||||
- **Changed**: `bg-opacity-*` format → `color/opacity` format
|
||||
- **Example**: `bg-blue-500 bg-opacity-50` → `bg-blue-500/50`
|
||||
|
||||
#### Flex Utilities
|
||||
- **Changed**: `flex-shrink-0` → `shrink-0`
|
||||
|
||||
#### Important Modifier
|
||||
- **Changed**: `!important` → `!` (shorter syntax)
|
||||
- **Example**: `!outline-none` → `!outline-hidden`
|
||||
|
||||
### Step 4: Template File Updates
|
||||
|
||||
Updated the following template files with new utility classes:
|
||||
|
||||
#### Core Templates
|
||||
- `templates/base.html`
|
||||
- `templates/components/navbar.html`
|
||||
- `templates/components/footer.html`
|
||||
|
||||
#### Page Templates
|
||||
- `templates/parks/park_list.html`
|
||||
- `templates/parks/park_detail.html`
|
||||
- `templates/rides/ride_list.html`
|
||||
- `templates/rides/ride_detail.html`
|
||||
- `templates/companies/company_list.html`
|
||||
- `templates/companies/company_detail.html`
|
||||
|
||||
#### Form Templates
|
||||
- `templates/parks/park_form.html`
|
||||
- `templates/rides/ride_form.html`
|
||||
- `templates/companies/company_form.html`
|
||||
|
||||
#### Component Templates
|
||||
- `templates/components/search_results.html`
|
||||
- `templates/components/pagination.html`
|
||||
|
||||
### Step 5: Component CSS Updates
|
||||
|
||||
Updated custom component classes in `static/css/src/input.css`:
|
||||
|
||||
**Button Components**:
|
||||
```css
|
||||
.btn-primary {
|
||||
@apply inline-flex items-center px-6 py-2.5 border border-transparent rounded-full shadow-md text-sm font-medium text-white bg-gradient-to-r from-primary to-secondary hover:from-primary/90 hover:to-secondary/90 focus:outline-hidden focus:ring-3 focus:ring-offset-2 focus:ring-primary/50 transform hover:scale-105 transition-all;
|
||||
}
|
||||
|
||||
.btn-secondary {
|
||||
@apply inline-flex items-center px-6 py-2.5 border border-gray-200 dark:border-gray-700 rounded-full shadow-md text-sm font-medium text-gray-700 dark:text-gray-200 bg-white dark:bg-gray-800 hover:bg-gray-50 dark:hover:bg-gray-700 focus:outline-hidden focus:ring-3 focus:ring-offset-2 focus:ring-primary/50 transform hover:scale-105 transition-all;
|
||||
}
|
||||
```
|
||||
|
||||
## Configuration Files
|
||||
|
||||
### Tailwind Config (Preserved for Reference)
|
||||
|
||||
**File**: `tailwind.config.js`
|
||||
|
||||
The original v3 configuration was preserved for reference but is no longer the primary configuration method:
|
||||
|
||||
```javascript
|
||||
module.exports = {
|
||||
content: [
|
||||
'./templates/**/*.html',
|
||||
'./static/js/**/*.js',
|
||||
'./*/templates/**/*.html',
|
||||
],
|
||||
darkMode: 'class',
|
||||
theme: {
|
||||
extend: {
|
||||
colors: {
|
||||
primary: '#4f46e5',
|
||||
secondary: '#e11d48',
|
||||
accent: '#8b5cf6',
|
||||
},
|
||||
fontFamily: {
|
||||
sans: ['Poppins', 'sans-serif'],
|
||||
},
|
||||
},
|
||||
},
|
||||
plugins: [
|
||||
require('@tailwindcss/forms'),
|
||||
require('@tailwindcss/typography'),
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
### Package.json Updates
|
||||
|
||||
No changes required to `package.json` as the Django-Tailwind package handles version management.
|
||||
|
||||
## Verification Steps
|
||||
|
||||
### 1. Build Process Verification
|
||||
```bash
|
||||
# Clean and rebuild CSS
|
||||
lsof -ti :8000 | xargs kill -9
|
||||
find . -type d -name "__pycache__" -exec rm -r {} +
|
||||
uv run manage.py tailwind runserver
|
||||
```
|
||||
|
||||
**Result**: ✅ Build successful, no errors
|
||||
|
||||
### 2. CSS Compilation Check
|
||||
```bash
|
||||
# Check compiled CSS size and content
|
||||
ls -la static/css/tailwind.css
|
||||
head -50 static/css/tailwind.css | grep -E "(primary|secondary|accent)"
|
||||
```
|
||||
|
||||
**Result**: ✅ CSS properly compiled with theme variables
|
||||
|
||||
### 3. Server Response Check
|
||||
```bash
|
||||
curl -s -o /dev/null -w "%{http_code}" http://localhost:8000/
|
||||
```
|
||||
|
||||
**Result**: ✅ HTTP 200 - Server responding correctly
|
||||
|
||||
### 4. Visual Verification
|
||||
- ✅ Primary colors (indigo) displaying correctly
|
||||
- ✅ Secondary colors (rose) displaying correctly
|
||||
- ✅ Accent colors (violet) displaying correctly
|
||||
- ✅ Poppins font family loading correctly
|
||||
- ✅ Button styling and interactions working
|
||||
- ✅ Dark mode functionality preserved
|
||||
- ✅ Responsive design intact
|
||||
- ✅ All animations and transitions working
|
||||
|
||||
## Files Modified
|
||||
|
||||
### CSS Files
|
||||
- `static/css/src/input.css` - ✅ Major updates (import syntax, theme variables, component classes)
|
||||
|
||||
### Template Files (Updated utility classes)
|
||||
- `templates/base.html`
|
||||
- `templates/components/navbar.html`
|
||||
- `templates/components/footer.html`
|
||||
- `templates/parks/park_list.html`
|
||||
- `templates/parks/park_detail.html`
|
||||
- `templates/parks/park_form.html`
|
||||
- `templates/rides/ride_list.html`
|
||||
- `templates/rides/ride_detail.html`
|
||||
- `templates/rides/ride_form.html`
|
||||
- `templates/companies/company_list.html`
|
||||
- `templates/companies/company_detail.html`
|
||||
- `templates/companies/company_form.html`
|
||||
- `templates/components/search_results.html`
|
||||
- `templates/components/pagination.html`
|
||||
|
||||
### Configuration Files (Preserved)
|
||||
- `tailwind.config.js` - ✅ Preserved for reference
|
||||
|
||||
## Benefits of v4 Migration
|
||||
|
||||
### Performance Improvements
|
||||
- Smaller CSS bundle size
|
||||
- Faster compilation times
|
||||
- Improved CSS-in-JS performance
|
||||
|
||||
### Developer Experience
|
||||
- CSS-first configuration approach
|
||||
- Better IDE support for theme variables
|
||||
- Simplified import syntax
|
||||
|
||||
### Future Compatibility
|
||||
- Modern CSS features support
|
||||
- Better container queries support
|
||||
- Enhanced dark mode capabilities
|
||||
|
||||
## Troubleshooting Guide
|
||||
|
||||
### Common Issues and Solutions
|
||||
|
||||
#### Issue: "Cannot apply unknown utility class"
|
||||
**Solution**: Check if utility was renamed in v4 migration table above
|
||||
|
||||
#### Issue: Custom colors not working
|
||||
**Solution**: Ensure `@theme` block is properly defined with CSS variables
|
||||
|
||||
#### Issue: Build errors
|
||||
**Solution**: Run clean build process:
|
||||
```bash
|
||||
lsof -ti :8000 | xargs kill -9
|
||||
find . -type d -name "__pycache__" -exec rm -r {} +
|
||||
uv run manage.py tailwind runserver
|
||||
```
|
||||
|
||||
## Rollback Plan
|
||||
|
||||
If rollback is needed:
|
||||
|
||||
1. **Restore CSS Import Syntax**:
|
||||
```css
|
||||
@tailwind base;
|
||||
@tailwind components;
|
||||
@tailwind utilities;
|
||||
```
|
||||
|
||||
2. **Remove @theme Block**: Delete the `@theme` section from input.css
|
||||
|
||||
3. **Revert Utility Classes**: Use search/replace to revert utility class changes
|
||||
|
||||
4. **Downgrade Tailwind**: Update package to v3.x version
|
||||
|
||||
## Post-Migration Checklist
|
||||
|
||||
- [x] CSS compilation working
|
||||
- [x] Development server running
|
||||
- [x] All pages loading correctly
|
||||
- [x] Colors displaying properly
|
||||
- [x] Fonts loading correctly
|
||||
- [x] Interactive elements working
|
||||
- [x] Dark mode functioning
|
||||
- [x] Responsive design intact
|
||||
- [x] No console errors
|
||||
- [x] Performance acceptable
|
||||
|
||||
## Future Considerations
|
||||
|
||||
### New v4 Features to Explore
|
||||
- Enhanced container queries
|
||||
- Improved dark mode utilities
|
||||
- New color-mix() support
|
||||
- Advanced CSS nesting
|
||||
|
||||
### Maintenance Notes
|
||||
- Monitor for v4 updates and new features
|
||||
- Consider migrating more configuration to CSS variables
|
||||
- Evaluate new utility classes as they're released
|
||||
|
||||
## Contact and Support
|
||||
|
||||
For questions about this migration:
|
||||
- Review this documentation
|
||||
- Check Tailwind CSS v4 official documentation
|
||||
- Consult the preserved `tailwind.config.js` for original settings
|
||||
|
||||
---
|
||||
|
||||
**Migration Completed**: August 15, 2025
|
||||
**Tailwind Version**: v4.1.12
|
||||
**Status**: Production Ready ✅
|
||||
80
TAILWIND_V4_QUICK_REFERENCE.md
Normal file
80
TAILWIND_V4_QUICK_REFERENCE.md
Normal file
@@ -0,0 +1,80 @@
|
||||
# Tailwind CSS v4 Quick Reference Guide
|
||||
|
||||
## Common v3 → v4 Utility Migrations
|
||||
|
||||
| v3 Utility | v4 Utility | Notes |
|
||||
|------------|------------|-------|
|
||||
| `outline-none` | `outline-hidden` | Accessibility improvement |
|
||||
| `ring` | `ring-3` | Must specify ring width |
|
||||
| `shadow-sm` | `shadow-xs` | Renamed for consistency |
|
||||
| `shadow` | `shadow-sm` | Renamed for consistency |
|
||||
| `flex-shrink-0` | `shrink-0` | Shortened syntax |
|
||||
| `bg-blue-500 bg-opacity-50` | `bg-blue-500/50` | New opacity syntax |
|
||||
| `text-gray-700 text-opacity-75` | `text-gray-700/75` | New opacity syntax |
|
||||
| `!outline-none` | `!outline-hidden` | Updated important syntax |
|
||||
|
||||
## Theme Variables (Available in CSS)
|
||||
|
||||
```css
|
||||
/* Colors */
|
||||
var(--color-primary) /* #4f46e5 - Indigo-600 */
|
||||
var(--color-secondary) /* #e11d48 - Rose-600 */
|
||||
var(--color-accent) /* #8b5cf6 - Violet-500 */
|
||||
|
||||
/* Fonts */
|
||||
var(--font-family-sans) /* Poppins, sans-serif */
|
||||
```
|
||||
|
||||
## Usage in Templates
|
||||
|
||||
### Before (v3)
|
||||
```html
|
||||
<button class="outline-none ring hover:ring-2 shadow-sm bg-blue-500 bg-opacity-75">
|
||||
Click me
|
||||
</button>
|
||||
```
|
||||
|
||||
### After (v4)
|
||||
```html
|
||||
<button class="outline-hidden ring-3 hover:ring-2 shadow-xs bg-blue-500/75">
|
||||
Click me
|
||||
</button>
|
||||
```
|
||||
|
||||
## Development Commands
|
||||
|
||||
### Start Development Server
|
||||
```bash
|
||||
lsof -ti :8000 | xargs kill -9; find . -type d -name "__pycache__" -exec rm -r {} +; uv run manage.py tailwind runserver
|
||||
```
|
||||
|
||||
### Force CSS Rebuild
|
||||
```bash
|
||||
uv run manage.py tailwind build
|
||||
```
|
||||
|
||||
## New v4 Features
|
||||
|
||||
- **CSS-first configuration** via `@theme` blocks
|
||||
- **Improved opacity syntax** with `/` operator
|
||||
- **Better color-mix() support**
|
||||
- **Enhanced dark mode utilities**
|
||||
- **Faster compilation**
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Unknown utility class error
|
||||
1. Check if utility was renamed (see table above)
|
||||
2. Verify custom theme variables are defined
|
||||
3. Run clean build process
|
||||
|
||||
### Colors not working
|
||||
1. Ensure `@theme` block exists in `static/css/src/input.css`
|
||||
2. Check CSS variable names match usage
|
||||
3. Verify CSS compilation completed
|
||||
|
||||
## Resources
|
||||
|
||||
- [Full Migration Documentation](./TAILWIND_V4_MIGRATION.md)
|
||||
- [Tailwind CSS v4 Official Docs](https://tailwindcss.com/docs)
|
||||
- [Django-Tailwind Package](https://django-tailwind.readthedocs.io/)
|
||||
@@ -1,8 +1,7 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.contrib.auth.models import Group
|
||||
from reviews.models import Review
|
||||
from parks.models import Park
|
||||
from parks.models import Park, ParkReview as Review
|
||||
from rides.models import Ride
|
||||
from media.models import Photo
|
||||
|
||||
@@ -14,19 +13,22 @@ class Command(BaseCommand):
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
# Delete test users
|
||||
test_users = User.objects.filter(username__in=["testuser", "moderator"])
|
||||
test_users = User.objects.filter(
|
||||
username__in=["testuser", "moderator"])
|
||||
count = test_users.count()
|
||||
test_users.delete()
|
||||
self.stdout.write(self.style.SUCCESS(f"Deleted {count} test users"))
|
||||
|
||||
# Delete test reviews
|
||||
reviews = Review.objects.filter(user__username__in=["testuser", "moderator"])
|
||||
reviews = Review.objects.filter(
|
||||
user__username__in=["testuser", "moderator"])
|
||||
count = reviews.count()
|
||||
reviews.delete()
|
||||
self.stdout.write(self.style.SUCCESS(f"Deleted {count} test reviews"))
|
||||
|
||||
# Delete test photos
|
||||
photos = Photo.objects.filter(uploader__username__in=["testuser", "moderator"])
|
||||
photos = Photo.objects.filter(uploader__username__in=[
|
||||
"testuser", "moderator"])
|
||||
count = photos.count()
|
||||
photos.delete()
|
||||
self.stdout.write(self.style.SUCCESS(f"Deleted {count} test photos"))
|
||||
@@ -62,6 +64,7 @@ class Command(BaseCommand):
|
||||
os.remove(f)
|
||||
self.stdout.write(self.style.SUCCESS(f"Deleted {f}"))
|
||||
except OSError as e:
|
||||
self.stdout.write(self.style.WARNING(f"Error deleting {f}: {e}"))
|
||||
self.stdout.write(self.style.WARNING(
|
||||
f"Error deleting {f}: {e}"))
|
||||
|
||||
self.stdout.write(self.style.SUCCESS("Test data cleanup complete"))
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Generated by Django 5.1.4 on 2025-02-10 01:10
|
||||
# Generated by Django 5.1.4 on 2025-08-13 21:35
|
||||
|
||||
import django.contrib.auth.models
|
||||
import django.contrib.auth.validators
|
||||
@@ -232,7 +232,15 @@ class Migration(migrations.Migration):
|
||||
migrations.CreateModel(
|
||||
name="TopList",
|
||||
fields=[
|
||||
("id", models.BigAutoField(primary_key=True, serialize=False)),
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("title", models.CharField(max_length=100)),
|
||||
(
|
||||
"category",
|
||||
@@ -324,7 +332,17 @@ class Migration(migrations.Migration):
|
||||
migrations.CreateModel(
|
||||
name="TopListItem",
|
||||
fields=[
|
||||
("id", models.BigAutoField(primary_key=True, serialize=False)),
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
("object_id", models.PositiveIntegerField()),
|
||||
("rank", models.PositiveIntegerField()),
|
||||
("notes", models.TextField(blank=True)),
|
||||
@@ -355,6 +373,8 @@ class Migration(migrations.Migration):
|
||||
("pgh_created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("pgh_label", models.TextField(help_text="The event label.")),
|
||||
("id", models.BigIntegerField()),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
("object_id", models.PositiveIntegerField()),
|
||||
("rank", models.PositiveIntegerField()),
|
||||
("notes", models.TextField(blank=True)),
|
||||
@@ -490,7 +510,7 @@ class Migration(migrations.Migration):
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "accounts_toplistitemevent" ("content_type_id", "id", "notes", "object_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "rank", "top_list_id") VALUES (NEW."content_type_id", NEW."id", NEW."notes", NEW."object_id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."rank", NEW."top_list_id"); RETURN NULL;',
|
||||
func='INSERT INTO "accounts_toplistitemevent" ("content_type_id", "created_at", "id", "notes", "object_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "rank", "top_list_id", "updated_at") VALUES (NEW."content_type_id", NEW."created_at", NEW."id", NEW."notes", NEW."object_id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."rank", NEW."top_list_id", NEW."updated_at"); RETURN NULL;',
|
||||
hash="[AWS-SECRET-REMOVED]",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_56dfc",
|
||||
@@ -505,7 +525,7 @@ class Migration(migrations.Migration):
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "accounts_toplistitemevent" ("content_type_id", "id", "notes", "object_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "rank", "top_list_id") VALUES (NEW."content_type_id", NEW."id", NEW."notes", NEW."object_id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."rank", NEW."top_list_id"); RETURN NULL;',
|
||||
func='INSERT INTO "accounts_toplistitemevent" ("content_type_id", "created_at", "id", "notes", "object_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "rank", "top_list_id", "updated_at") VALUES (NEW."content_type_id", NEW."created_at", NEW."id", NEW."notes", NEW."object_id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."rank", NEW."top_list_id", NEW."updated_at"); RETURN NULL;',
|
||||
hash="[AWS-SECRET-REMOVED]",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_2b6e3",
|
||||
|
||||
@@ -1,93 +0,0 @@
|
||||
# Generated by Django 5.1.4 on 2025-02-21 17:55
|
||||
|
||||
import django.utils.timezone
|
||||
import pgtrigger.compiler
|
||||
import pgtrigger.migrations
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("accounts", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
pgtrigger.migrations.RemoveTrigger(
|
||||
model_name="toplistitem",
|
||||
name="insert_insert",
|
||||
),
|
||||
pgtrigger.migrations.RemoveTrigger(
|
||||
model_name="toplistitem",
|
||||
name="update_update",
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="toplistitem",
|
||||
name="created_at",
|
||||
field=models.DateTimeField(
|
||||
auto_now_add=True, default=django.utils.timezone.now
|
||||
),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="toplistitem",
|
||||
name="updated_at",
|
||||
field=models.DateTimeField(auto_now=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="toplistitemevent",
|
||||
name="created_at",
|
||||
field=models.DateTimeField(
|
||||
auto_now_add=True, default=django.utils.timezone.now
|
||||
),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="toplistitemevent",
|
||||
name="updated_at",
|
||||
field=models.DateTimeField(auto_now=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="toplist",
|
||||
name="id",
|
||||
field=models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="toplistitem",
|
||||
name="id",
|
||||
field=models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="toplistitem",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "accounts_toplistitemevent" ("content_type_id", "created_at", "id", "notes", "object_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "rank", "top_list_id", "updated_at") VALUES (NEW."content_type_id", NEW."created_at", NEW."id", NEW."notes", NEW."object_id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."rank", NEW."top_list_id", NEW."updated_at"); RETURN NULL;',
|
||||
hash="[AWS-SECRET-REMOVED]",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_56dfc",
|
||||
table="accounts_toplistitem",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="toplistitem",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "accounts_toplistitemevent" ("content_type_id", "created_at", "id", "notes", "object_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "rank", "top_list_id", "updated_at") VALUES (NEW."content_type_id", NEW."created_at", NEW."id", NEW."notes", NEW."object_id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."rank", NEW."top_list_id", NEW."updated_at"); RETURN NULL;',
|
||||
hash="[AWS-SECRET-REMOVED]",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_2b6e3",
|
||||
table="accounts_toplistitem",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -7,8 +7,8 @@ from io import BytesIO
|
||||
import base64
|
||||
import os
|
||||
import secrets
|
||||
from history_tracking.models import TrackedModel
|
||||
import pghistory
|
||||
from core.history import TrackedModel
|
||||
# import pghistory
|
||||
|
||||
def generate_random_id(model_class, id_field):
|
||||
"""Generate a random ID starting at 4 digits, expanding to 5 if needed"""
|
||||
@@ -115,7 +115,7 @@ class UserProfile(models.Model):
|
||||
"""Return the avatar URL or serve a pre-generated avatar based on the first letter of the username"""
|
||||
if self.avatar:
|
||||
return self.avatar.url
|
||||
first_letter = self.user.username[0].upper()
|
||||
first_letter = self.user.username.upper()
|
||||
avatar_path = f"avatars/letters/{first_letter}_avatar.png"
|
||||
if os.path.exists(avatar_path):
|
||||
return f"/{avatar_path}"
|
||||
@@ -160,7 +160,7 @@ class PasswordReset(models.Model):
|
||||
verbose_name = "Password Reset"
|
||||
verbose_name_plural = "Password Resets"
|
||||
|
||||
@pghistory.track()
|
||||
# @pghistory.track()
|
||||
class TopList(TrackedModel):
|
||||
class Categories(models.TextChoices):
|
||||
ROLLER_COASTER = 'RC', _('Roller Coaster')
|
||||
@@ -189,7 +189,7 @@ class TopList(TrackedModel):
|
||||
def __str__(self):
|
||||
return f"{self.user.get_display_name()}'s {self.category} Top List: {self.title}"
|
||||
|
||||
@pghistory.track()
|
||||
# @pghistory.track()
|
||||
class TopListItem(TrackedModel):
|
||||
top_list = models.ForeignKey(
|
||||
TopList,
|
||||
@@ -209,4 +209,4 @@ class TopListItem(TrackedModel):
|
||||
unique_together = [['top_list', 'rank']]
|
||||
|
||||
def __str__(self):
|
||||
return f"#{self.rank} in {self.top_list.title}"
|
||||
return f"#{self.rank} in {self.top_list.title}"
|
||||
212
accounts/models_temp.py
Normal file
212
accounts/models_temp.py
Normal file
@@ -0,0 +1,212 @@
|
||||
from django.contrib.auth.models import AbstractUser
|
||||
from django.db import models
|
||||
from django.urls import reverse
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from PIL import Image, ImageDraw, ImageFont
|
||||
from io import BytesIO
|
||||
import base64
|
||||
import os
|
||||
import secrets
|
||||
from core.history import TrackedModel
|
||||
import pghistory
|
||||
|
||||
def generate_random_id(model_class, id_field):
|
||||
"""Generate a random ID starting at 4 digits, expanding to 5 if needed"""
|
||||
while True:
|
||||
# Try to get a 4-digit number first
|
||||
new_id = str(secrets.SystemRandom().randint(1000, 9999))
|
||||
if not model_class.objects.filter(**{id_field: new_id}).exists():
|
||||
return new_id
|
||||
|
||||
# If all 4-digit numbers are taken, try 5 digits
|
||||
new_id = str(secrets.SystemRandom().randint(10000, 99999))
|
||||
if not model_class.objects.filter(**{id_field: new_id}).exists():
|
||||
return new_id
|
||||
|
||||
class User(AbstractUser):
|
||||
class Roles(models.TextChoices):
|
||||
USER = 'USER', _('User')
|
||||
MODERATOR = 'MODERATOR', _('Moderator')
|
||||
ADMIN = 'ADMIN', _('Admin')
|
||||
SUPERUSER = 'SUPERUSER', _('Superuser')
|
||||
|
||||
class ThemePreference(models.TextChoices):
|
||||
LIGHT = 'light', _('Light')
|
||||
DARK = 'dark', _('Dark')
|
||||
|
||||
# Read-only ID
|
||||
user_id = models.CharField(
|
||||
max_length=10,
|
||||
unique=True,
|
||||
editable=False,
|
||||
help_text='Unique identifier for this user that remains constant even if the username changes'
|
||||
)
|
||||
|
||||
role = models.CharField(
|
||||
max_length=10,
|
||||
choices=Roles.choices,
|
||||
default=Roles.USER,
|
||||
)
|
||||
is_banned = models.BooleanField(default=False)
|
||||
ban_reason = models.TextField(blank=True)
|
||||
ban_date = models.DateTimeField(null=True, blank=True)
|
||||
pending_email = models.EmailField(blank=True, null=True)
|
||||
theme_preference = models.CharField(
|
||||
max_length=5,
|
||||
choices=ThemePreference.choices,
|
||||
default=ThemePreference.LIGHT,
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return self.get_display_name()
|
||||
|
||||
def get_absolute_url(self):
|
||||
return reverse('profile', kwargs={'username': self.username})
|
||||
|
||||
def get_display_name(self):
|
||||
"""Get the user's display name, falling back to username if not set"""
|
||||
profile = getattr(self, 'profile', None)
|
||||
if profile and profile.display_name:
|
||||
return profile.display_name
|
||||
return self.username
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if not self.user_id:
|
||||
self.user_id = generate_random_id(User, 'user_id')
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
class UserProfile(models.Model):
|
||||
# Read-only ID
|
||||
profile_id = models.CharField(
|
||||
max_length=10,
|
||||
unique=True,
|
||||
editable=False,
|
||||
help_text='Unique identifier for this profile that remains constant'
|
||||
)
|
||||
|
||||
user = models.OneToOneField(
|
||||
User,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='profile'
|
||||
)
|
||||
display_name = models.CharField(
|
||||
max_length=50,
|
||||
unique=True,
|
||||
help_text="This is the name that will be displayed on the site"
|
||||
)
|
||||
avatar = models.ImageField(upload_to='avatars/', blank=True)
|
||||
pronouns = models.CharField(max_length=50, blank=True)
|
||||
|
||||
bio = models.TextField(max_length=500, blank=True)
|
||||
|
||||
# Social media links
|
||||
twitter = models.URLField(blank=True)
|
||||
instagram = models.URLField(blank=True)
|
||||
youtube = models.URLField(blank=True)
|
||||
discord = models.CharField(max_length=100, blank=True)
|
||||
|
||||
# Ride statistics
|
||||
coaster_credits = models.IntegerField(default=0)
|
||||
dark_ride_credits = models.IntegerField(default=0)
|
||||
flat_ride_credits = models.IntegerField(default=0)
|
||||
water_ride_credits = models.IntegerField(default=0)
|
||||
|
||||
def get_avatar(self):
|
||||
"""Return the avatar URL or serve a pre-generated avatar based on the first letter of the username"""
|
||||
if self.avatar:
|
||||
return self.avatar.url
|
||||
first_letter = self.user.username[0].upper()
|
||||
avatar_path = f"avatars/letters/{first_letter}_avatar.png"
|
||||
if os.path.exists(avatar_path):
|
||||
return f"/{avatar_path}"
|
||||
return "/static/images/default-avatar.png"
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
# If no display name is set, use the username
|
||||
if not self.display_name:
|
||||
self.display_name = self.user.username
|
||||
|
||||
if not self.profile_id:
|
||||
self.profile_id = generate_random_id(UserProfile, 'profile_id')
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
def __str__(self):
|
||||
return self.display_name
|
||||
|
||||
class EmailVerification(models.Model):
|
||||
user = models.OneToOneField(User, on_delete=models.CASCADE)
|
||||
token = models.CharField(max_length=64, unique=True)
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
last_sent = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
def __str__(self):
|
||||
return f"Email verification for {self.user.username}"
|
||||
|
||||
class Meta:
|
||||
verbose_name = "Email Verification"
|
||||
verbose_name_plural = "Email Verifications"
|
||||
|
||||
class PasswordReset(models.Model):
|
||||
user = models.ForeignKey(User, on_delete=models.CASCADE)
|
||||
token = models.CharField(max_length=64)
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
expires_at = models.DateTimeField()
|
||||
used = models.BooleanField(default=False)
|
||||
|
||||
def __str__(self):
|
||||
return f"Password reset for {self.user.username}"
|
||||
|
||||
class Meta:
|
||||
verbose_name = "Password Reset"
|
||||
verbose_name_plural = "Password Resets"
|
||||
|
||||
@pghistory.track()
|
||||
class TopList(TrackedModel):
|
||||
class Categories(models.TextChoices):
|
||||
ROLLER_COASTER = 'RC', _('Roller Coaster')
|
||||
DARK_RIDE = 'DR', _('Dark Ride')
|
||||
FLAT_RIDE = 'FR', _('Flat Ride')
|
||||
WATER_RIDE = 'WR', _('Water Ride')
|
||||
PARK = 'PK', _('Park')
|
||||
|
||||
user = models.ForeignKey(
|
||||
User,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='top_lists' # Added related_name for User model access
|
||||
)
|
||||
title = models.CharField(max_length=100)
|
||||
category = models.CharField(
|
||||
max_length=2,
|
||||
choices=Categories.choices
|
||||
)
|
||||
description = models.TextField(blank=True)
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
class Meta:
|
||||
ordering = ['-updated_at']
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.user.get_display_name()}'s {self.category} Top List: {self.title}"
|
||||
|
||||
@pghistory.track()
|
||||
class TopListItem(TrackedModel):
|
||||
top_list = models.ForeignKey(
|
||||
TopList,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='items'
|
||||
)
|
||||
content_type = models.ForeignKey(
|
||||
'contenttypes.ContentType',
|
||||
on_delete=models.CASCADE
|
||||
)
|
||||
object_id = models.PositiveIntegerField()
|
||||
rank = models.PositiveIntegerField()
|
||||
notes = models.TextField(blank=True)
|
||||
|
||||
class Meta:
|
||||
ordering = ['rank']
|
||||
unique_together = [['top_list', 'rank']]
|
||||
|
||||
def __str__(self):
|
||||
return f"#{self.rank} in {self.top_list.title}"
|
||||
226
accounts/selectors.py
Normal file
226
accounts/selectors.py
Normal file
@@ -0,0 +1,226 @@
|
||||
"""
|
||||
Selectors for user and account-related data retrieval.
|
||||
Following Django styleguide pattern for separating data access from business logic.
|
||||
"""
|
||||
|
||||
from typing import Optional, Dict, Any, List
|
||||
from django.db.models import QuerySet, Q, F, Count, Avg, Prefetch
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.utils import timezone
|
||||
from datetime import timedelta
|
||||
|
||||
User = get_user_model()
|
||||
|
||||
|
||||
def user_profile_optimized(*, user_id: int) -> Any:
|
||||
"""
|
||||
Get a user with optimized queries for profile display.
|
||||
|
||||
Args:
|
||||
user_id: User ID
|
||||
|
||||
Returns:
|
||||
User instance with prefetched related data
|
||||
|
||||
Raises:
|
||||
User.DoesNotExist: If user doesn't exist
|
||||
"""
|
||||
return User.objects.prefetch_related(
|
||||
'park_reviews',
|
||||
'ride_reviews',
|
||||
'socialaccount_set'
|
||||
).annotate(
|
||||
park_review_count=Count('park_reviews', filter=Q(park_reviews__is_published=True)),
|
||||
ride_review_count=Count('ride_reviews', filter=Q(ride_reviews__is_published=True)),
|
||||
total_review_count=F('park_review_count') + F('ride_review_count')
|
||||
).get(id=user_id)
|
||||
|
||||
|
||||
def active_users_with_stats() -> QuerySet:
|
||||
"""
|
||||
Get active users with review statistics.
|
||||
|
||||
Returns:
|
||||
QuerySet of active users with review counts
|
||||
"""
|
||||
return User.objects.filter(
|
||||
is_active=True
|
||||
).annotate(
|
||||
park_review_count=Count('park_reviews', filter=Q(park_reviews__is_published=True)),
|
||||
ride_review_count=Count('ride_reviews', filter=Q(ride_reviews__is_published=True)),
|
||||
total_review_count=F('park_review_count') + F('ride_review_count')
|
||||
).order_by('-total_review_count')
|
||||
|
||||
|
||||
def users_with_recent_activity(*, days: int = 30) -> QuerySet:
|
||||
"""
|
||||
Get users who have been active in the last N days.
|
||||
|
||||
Args:
|
||||
days: Number of days to look back for activity
|
||||
|
||||
Returns:
|
||||
QuerySet of recently active users
|
||||
"""
|
||||
cutoff_date = timezone.now() - timedelta(days=days)
|
||||
|
||||
return User.objects.filter(
|
||||
Q(last_login__gte=cutoff_date) |
|
||||
Q(park_reviews__created_at__gte=cutoff_date) |
|
||||
Q(ride_reviews__created_at__gte=cutoff_date)
|
||||
).annotate(
|
||||
recent_park_reviews=Count('park_reviews', filter=Q(park_reviews__created_at__gte=cutoff_date)),
|
||||
recent_ride_reviews=Count('ride_reviews', filter=Q(ride_reviews__created_at__gte=cutoff_date)),
|
||||
recent_total_reviews=F('recent_park_reviews') + F('recent_ride_reviews')
|
||||
).order_by('-last_login').distinct()
|
||||
|
||||
|
||||
def top_reviewers(*, limit: int = 10) -> QuerySet:
|
||||
"""
|
||||
Get top users by review count.
|
||||
|
||||
Args:
|
||||
limit: Maximum number of users to return
|
||||
|
||||
Returns:
|
||||
QuerySet of top reviewers
|
||||
"""
|
||||
return User.objects.filter(
|
||||
is_active=True
|
||||
).annotate(
|
||||
park_review_count=Count('park_reviews', filter=Q(park_reviews__is_published=True)),
|
||||
ride_review_count=Count('ride_reviews', filter=Q(ride_reviews__is_published=True)),
|
||||
total_review_count=F('park_review_count') + F('ride_review_count')
|
||||
).filter(
|
||||
total_review_count__gt=0
|
||||
).order_by('-total_review_count')[:limit]
|
||||
|
||||
|
||||
def moderator_users() -> QuerySet:
|
||||
"""
|
||||
Get users with moderation permissions.
|
||||
|
||||
Returns:
|
||||
QuerySet of users who can moderate content
|
||||
"""
|
||||
return User.objects.filter(
|
||||
Q(is_staff=True) |
|
||||
Q(groups__name='Moderators') |
|
||||
Q(user_permissions__codename__in=['change_parkreview', 'change_ridereview'])
|
||||
).distinct().order_by('username')
|
||||
|
||||
|
||||
def users_by_registration_date(*, start_date, end_date) -> QuerySet:
|
||||
"""
|
||||
Get users who registered within a date range.
|
||||
|
||||
Args:
|
||||
start_date: Start of date range
|
||||
end_date: End of date range
|
||||
|
||||
Returns:
|
||||
QuerySet of users registered in the date range
|
||||
"""
|
||||
return User.objects.filter(
|
||||
date_joined__date__gte=start_date,
|
||||
date_joined__date__lte=end_date
|
||||
).order_by('-date_joined')
|
||||
|
||||
|
||||
def user_search_autocomplete(*, query: str, limit: int = 10) -> QuerySet:
|
||||
"""
|
||||
Get users matching a search query for autocomplete functionality.
|
||||
|
||||
Args:
|
||||
query: Search string
|
||||
limit: Maximum number of results
|
||||
|
||||
Returns:
|
||||
QuerySet of matching users for autocomplete
|
||||
"""
|
||||
return User.objects.filter(
|
||||
Q(username__icontains=query) |
|
||||
Q(first_name__icontains=query) |
|
||||
Q(last_name__icontains=query),
|
||||
is_active=True
|
||||
).order_by('username')[:limit]
|
||||
|
||||
|
||||
def users_with_social_accounts() -> QuerySet:
|
||||
"""
|
||||
Get users who have connected social accounts.
|
||||
|
||||
Returns:
|
||||
QuerySet of users with social account connections
|
||||
"""
|
||||
return User.objects.filter(
|
||||
socialaccount__isnull=False
|
||||
).prefetch_related(
|
||||
'socialaccount_set'
|
||||
).distinct().order_by('username')
|
||||
|
||||
|
||||
def user_statistics_summary() -> Dict[str, Any]:
|
||||
"""
|
||||
Get overall user statistics for dashboard/analytics.
|
||||
|
||||
Returns:
|
||||
Dictionary containing user statistics
|
||||
"""
|
||||
total_users = User.objects.count()
|
||||
active_users = User.objects.filter(is_active=True).count()
|
||||
staff_users = User.objects.filter(is_staff=True).count()
|
||||
|
||||
# Users with reviews
|
||||
users_with_reviews = User.objects.filter(
|
||||
Q(park_reviews__isnull=False) |
|
||||
Q(ride_reviews__isnull=False)
|
||||
).distinct().count()
|
||||
|
||||
# Recent registrations (last 30 days)
|
||||
cutoff_date = timezone.now() - timedelta(days=30)
|
||||
recent_registrations = User.objects.filter(
|
||||
date_joined__gte=cutoff_date
|
||||
).count()
|
||||
|
||||
return {
|
||||
'total_users': total_users,
|
||||
'active_users': active_users,
|
||||
'inactive_users': total_users - active_users,
|
||||
'staff_users': staff_users,
|
||||
'users_with_reviews': users_with_reviews,
|
||||
'recent_registrations': recent_registrations,
|
||||
'review_participation_rate': (users_with_reviews / total_users * 100) if total_users > 0 else 0
|
||||
}
|
||||
|
||||
|
||||
def users_needing_email_verification() -> QuerySet:
|
||||
"""
|
||||
Get users who haven't verified their email addresses.
|
||||
|
||||
Returns:
|
||||
QuerySet of users with unverified emails
|
||||
"""
|
||||
return User.objects.filter(
|
||||
is_active=True,
|
||||
emailaddress__verified=False
|
||||
).distinct().order_by('date_joined')
|
||||
|
||||
|
||||
def users_by_review_activity(*, min_reviews: int = 1) -> QuerySet:
|
||||
"""
|
||||
Get users who have written at least a minimum number of reviews.
|
||||
|
||||
Args:
|
||||
min_reviews: Minimum number of reviews required
|
||||
|
||||
Returns:
|
||||
QuerySet of users with sufficient review activity
|
||||
"""
|
||||
return User.objects.annotate(
|
||||
park_review_count=Count('park_reviews', filter=Q(park_reviews__is_published=True)),
|
||||
ride_review_count=Count('ride_reviews', filter=Q(ride_reviews__is_published=True)),
|
||||
total_review_count=F('park_review_count') + F('ride_review_count')
|
||||
).filter(
|
||||
total_review_count__gte=min_reviews
|
||||
).order_by('-total_review_count')
|
||||
@@ -1,3 +1,91 @@
|
||||
from django.test import TestCase
|
||||
from django.contrib.auth.models import Group, Permission
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from unittest.mock import patch, MagicMock
|
||||
from .models import User, UserProfile
|
||||
from .signals import create_default_groups
|
||||
|
||||
# Create your tests here.
|
||||
class SignalsTestCase(TestCase):
|
||||
def setUp(self):
|
||||
self.user = User.objects.create_user(
|
||||
username='testuser',
|
||||
email='testuser@example.com',
|
||||
password='password'
|
||||
)
|
||||
|
||||
def test_create_user_profile(self):
|
||||
self.assertTrue(hasattr(self.user, 'profile'))
|
||||
self.assertIsInstance(self.user.profile, UserProfile)
|
||||
|
||||
@patch('accounts.signals.requests.get')
|
||||
def test_create_user_profile_with_social_avatar(self, mock_get):
|
||||
# Mock the response from requests.get
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.content = b'fake-image-content'
|
||||
mock_get.return_value = mock_response
|
||||
|
||||
# Create a social account for the user
|
||||
social_account = self.user.socialaccount_set.create(
|
||||
provider='google',
|
||||
extra_data={'picture': 'http://example.com/avatar.png'}
|
||||
)
|
||||
|
||||
# The signal should have been triggered when the user was created,
|
||||
# but we can trigger it again to test the avatar download
|
||||
from .signals import create_user_profile
|
||||
create_user_profile(sender=User, instance=self.user, created=True)
|
||||
|
||||
self.user.profile.refresh_from_db()
|
||||
self.assertTrue(self.user.profile.avatar.name.startswith('avatars/avatar_testuser'))
|
||||
|
||||
def test_save_user_profile(self):
|
||||
self.user.profile.delete()
|
||||
self.assertFalse(hasattr(self.user, 'profile'))
|
||||
self.user.save()
|
||||
self.assertTrue(hasattr(self.user, 'profile'))
|
||||
self.assertIsInstance(self.user.profile, UserProfile)
|
||||
|
||||
def test_sync_user_role_with_groups(self):
|
||||
self.user.role = User.Roles.MODERATOR
|
||||
self.user.save()
|
||||
self.assertTrue(self.user.groups.filter(name=User.Roles.MODERATOR).exists())
|
||||
self.assertTrue(self.user.is_staff)
|
||||
|
||||
self.user.role = User.Roles.ADMIN
|
||||
self.user.save()
|
||||
self.assertFalse(self.user.groups.filter(name=User.Roles.MODERATOR).exists())
|
||||
self.assertTrue(self.user.groups.filter(name=User.Roles.ADMIN).exists())
|
||||
self.assertTrue(self.user.is_staff)
|
||||
|
||||
self.user.role = User.Roles.SUPERUSER
|
||||
self.user.save()
|
||||
self.assertFalse(self.user.groups.filter(name=User.Roles.ADMIN).exists())
|
||||
self.assertTrue(self.user.groups.filter(name=User.Roles.SUPERUSER).exists())
|
||||
self.assertTrue(self.user.is_superuser)
|
||||
self.assertTrue(self.user.is_staff)
|
||||
|
||||
self.user.role = User.Roles.USER
|
||||
self.user.save()
|
||||
self.assertFalse(self.user.groups.exists())
|
||||
self.assertFalse(self.user.is_superuser)
|
||||
self.assertFalse(self.user.is_staff)
|
||||
|
||||
def test_create_default_groups(self):
|
||||
# Create some permissions for testing
|
||||
content_type = ContentType.objects.get_for_model(User)
|
||||
Permission.objects.create(codename='change_review', name='Can change review', content_type=content_type)
|
||||
Permission.objects.create(codename='delete_review', name='Can delete review', content_type=content_type)
|
||||
Permission.objects.create(codename='change_user', name='Can change user', content_type=content_type)
|
||||
|
||||
create_default_groups()
|
||||
|
||||
moderator_group = Group.objects.get(name=User.Roles.MODERATOR)
|
||||
self.assertIsNotNone(moderator_group)
|
||||
self.assertTrue(moderator_group.permissions.filter(codename='change_review').exists())
|
||||
self.assertFalse(moderator_group.permissions.filter(codename='change_user').exists())
|
||||
|
||||
admin_group = Group.objects.get(name=User.Roles.ADMIN)
|
||||
self.assertIsNotNone(admin_group)
|
||||
self.assertTrue(admin_group.permissions.filter(codename='change_review').exists())
|
||||
self.assertTrue(admin_group.permissions.filter(codename='change_user').exists())
|
||||
|
||||
@@ -21,8 +21,9 @@ from django.urls import reverse
|
||||
from django.contrib.auth import login
|
||||
from django.core.files.uploadedfile import UploadedFile
|
||||
from accounts.models import User, PasswordReset, TopList, EmailVerification, UserProfile
|
||||
from reviews.models import Review
|
||||
from email_service.services import EmailService
|
||||
from parks.models import ParkReview
|
||||
from rides.models import RideReview
|
||||
from allauth.account.views import LoginView, SignupView
|
||||
from .mixins import TurnstileMixin
|
||||
from typing import Dict, Any, Optional, Union, cast, TYPE_CHECKING
|
||||
@@ -137,21 +138,30 @@ class ProfileView(DetailView):
|
||||
context = super().get_context_data(**kwargs)
|
||||
user = cast(User, self.get_object())
|
||||
|
||||
context['recent_reviews'] = self._get_user_reviews(user)
|
||||
context['park_reviews'] = self._get_user_park_reviews(user)
|
||||
context['ride_reviews'] = self._get_user_ride_reviews(user)
|
||||
context['top_lists'] = self._get_user_top_lists(user)
|
||||
|
||||
return context
|
||||
|
||||
def _get_user_reviews(self, user: User) -> QuerySet[Review]:
|
||||
return Review.objects.filter(
|
||||
def _get_user_park_reviews(self, user: User) -> QuerySet[ParkReview]:
|
||||
return ParkReview.objects.filter(
|
||||
user=user,
|
||||
is_published=True
|
||||
).select_related(
|
||||
'user',
|
||||
'user__profile',
|
||||
'content_type'
|
||||
).prefetch_related(
|
||||
'content_object'
|
||||
'park'
|
||||
).order_by('-created_at')[:5]
|
||||
|
||||
def _get_user_ride_reviews(self, user: User) -> QuerySet[RideReview]:
|
||||
return RideReview.objects.filter(
|
||||
user=user,
|
||||
is_published=True
|
||||
).select_related(
|
||||
'user',
|
||||
'user__profile',
|
||||
'ride'
|
||||
).order_by('-created_at')[:5]
|
||||
|
||||
def _get_user_top_lists(self, user: User) -> QuerySet[TopList]:
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
default_app_config = 'analytics.apps.AnalyticsConfig'
|
||||
@@ -1,3 +0,0 @@
|
||||
from django.contrib import admin
|
||||
|
||||
# Register your models here.
|
||||
@@ -1,5 +0,0 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
class AnalyticsConfig(AppConfig):
|
||||
default_auto_field = 'django.db.models.BigAutoField'
|
||||
name = 'analytics'
|
||||
@@ -1,53 +0,0 @@
|
||||
# Generated by Django 5.1.4 on 2025-02-10 01:10
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
("contenttypes", "0002_remove_content_type_name"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="PageView",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("object_id", models.PositiveIntegerField()),
|
||||
("timestamp", models.DateTimeField(auto_now_add=True, db_index=True)),
|
||||
("ip_address", models.GenericIPAddressField()),
|
||||
("user_agent", models.CharField(blank=True, max_length=512)),
|
||||
(
|
||||
"content_type",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="page_views",
|
||||
to="contenttypes.contenttype",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["timestamp"], name="analytics_p_timesta_835321_idx"
|
||||
),
|
||||
models.Index(
|
||||
fields=["content_type", "object_id"],
|
||||
name="analytics_p_content_73920a_idx",
|
||||
),
|
||||
],
|
||||
},
|
||||
),
|
||||
]
|
||||
@@ -1,3 +0,0 @@
|
||||
from django.test import TestCase
|
||||
|
||||
# Create your tests here.
|
||||
@@ -1,3 +0,0 @@
|
||||
from django.shortcuts import render
|
||||
|
||||
# Create your views here.
|
||||
2
config/__init__.py
Normal file
2
config/__init__.py
Normal file
@@ -0,0 +1,2 @@
|
||||
# Configuration package for thrillwiki project
|
||||
|
||||
2
config/django/__init__.py
Normal file
2
config/django/__init__.py
Normal file
@@ -0,0 +1,2 @@
|
||||
# Django settings package
|
||||
|
||||
370
config/django/base.py
Normal file
370
config/django/base.py
Normal file
@@ -0,0 +1,370 @@
|
||||
"""
|
||||
Base Django settings for thrillwiki project.
|
||||
Common settings shared across all environments.
|
||||
"""
|
||||
|
||||
import os
|
||||
import environ
|
||||
from pathlib import Path
|
||||
|
||||
# Initialize environment variables
|
||||
env = environ.Env(
|
||||
DEBUG=(bool, False),
|
||||
SECRET_KEY=(str, ''),
|
||||
ALLOWED_HOSTS=(list, []),
|
||||
DATABASE_URL=(str, ''),
|
||||
CACHE_URL=(str, 'locmem://'),
|
||||
EMAIL_URL=(str, ''),
|
||||
REDIS_URL=(str, ''),
|
||||
)
|
||||
|
||||
# Build paths inside the project like this: BASE_DIR / 'subdir'.
|
||||
BASE_DIR = Path(__file__).resolve().parent.parent.parent
|
||||
|
||||
# Read environment file if it exists
|
||||
environ.Env.read_env(BASE_DIR / '***REMOVED***')
|
||||
|
||||
# SECURITY WARNING: keep the secret key used in production secret!
|
||||
SECRET_KEY = env('SECRET_KEY')
|
||||
|
||||
# SECURITY WARNING: don't run with debug turned on in production!
|
||||
DEBUG = env('DEBUG')
|
||||
|
||||
# Allowed hosts
|
||||
ALLOWED_HOSTS = env('ALLOWED_HOSTS')
|
||||
|
||||
# CSRF trusted origins
|
||||
CSRF_TRUSTED_ORIGINS = env('CSRF_TRUSTED_ORIGINS', default=[])
|
||||
|
||||
# Application definition
|
||||
DJANGO_APPS = [
|
||||
"django.contrib.admin",
|
||||
"django.contrib.auth",
|
||||
"django.contrib.contenttypes",
|
||||
"django.contrib.sessions",
|
||||
"django.contrib.messages",
|
||||
"django.contrib.staticfiles",
|
||||
"django.contrib.sites",
|
||||
"django.contrib.gis", # GeoDjango
|
||||
]
|
||||
|
||||
THIRD_PARTY_APPS = [
|
||||
"rest_framework", # Django REST Framework
|
||||
"drf_spectacular", # OpenAPI 3.0 documentation
|
||||
"corsheaders", # CORS headers for API
|
||||
"pghistory", # django-pghistory
|
||||
"pgtrigger", # Required by django-pghistory
|
||||
"allauth",
|
||||
"allauth.account",
|
||||
"allauth.socialaccount",
|
||||
"allauth.socialaccount.providers.google",
|
||||
"allauth.socialaccount.providers.discord",
|
||||
"django_cleanup",
|
||||
"django_filters",
|
||||
"django_htmx",
|
||||
"whitenoise",
|
||||
"django_tailwind_cli",
|
||||
"autocomplete", # Django HTMX Autocomplete
|
||||
"health_check", # Health checks
|
||||
"health_check.db",
|
||||
"health_check.cache",
|
||||
"health_check.storage",
|
||||
"health_check.contrib.migrations",
|
||||
"health_check.contrib.redis",
|
||||
]
|
||||
|
||||
LOCAL_APPS = [
|
||||
"core",
|
||||
"accounts",
|
||||
"parks",
|
||||
"rides",
|
||||
"email_service",
|
||||
"media.apps.MediaConfig",
|
||||
"moderation",
|
||||
"location",
|
||||
]
|
||||
|
||||
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
|
||||
|
||||
MIDDLEWARE = [
|
||||
"django.middleware.cache.UpdateCacheMiddleware",
|
||||
"corsheaders.middleware.CorsMiddleware", # CORS middleware for API
|
||||
"django.middleware.security.SecurityMiddleware",
|
||||
"whitenoise.middleware.WhiteNoiseMiddleware",
|
||||
"django.contrib.sessions.middleware.SessionMiddleware",
|
||||
"django.middleware.common.CommonMiddleware",
|
||||
"django.middleware.csrf.CsrfViewMiddleware",
|
||||
"django.contrib.auth.middleware.AuthenticationMiddleware",
|
||||
"django.contrib.messages.middleware.MessageMiddleware",
|
||||
"django.middleware.clickjacking.XFrameOptionsMiddleware",
|
||||
"core.middleware.PgHistoryContextMiddleware", # Add history context tracking
|
||||
"allauth.account.middleware.AccountMiddleware",
|
||||
"django.middleware.cache.FetchFromCacheMiddleware",
|
||||
"django_htmx.middleware.HtmxMiddleware",
|
||||
"core.middleware.PageViewMiddleware", # Add our page view tracking
|
||||
]
|
||||
|
||||
ROOT_URLCONF = "thrillwiki.urls"
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
"BACKEND": "django.template.backends.django.DjangoTemplates",
|
||||
"DIRS": [BASE_DIR / "templates"],
|
||||
"APP_DIRS": True,
|
||||
"OPTIONS": {
|
||||
"context_processors": [
|
||||
"django.template.context_processors.debug",
|
||||
"django.template.context_processors.request",
|
||||
"django.contrib.auth.context_processors.auth",
|
||||
"django.contrib.messages.context_processors.messages",
|
||||
"moderation.context_processors.moderation_access",
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
WSGI_APPLICATION = "thrillwiki.wsgi.application"
|
||||
|
||||
# Password validation
|
||||
AUTH_PASSWORD_VALIDATORS = [
|
||||
{
|
||||
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
|
||||
},
|
||||
{
|
||||
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
|
||||
},
|
||||
{
|
||||
"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
|
||||
},
|
||||
{
|
||||
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
|
||||
},
|
||||
]
|
||||
|
||||
# Internationalization
|
||||
LANGUAGE_CODE = "en-us"
|
||||
TIME_ZONE = "America/New_York"
|
||||
USE_I18N = True
|
||||
USE_TZ = True
|
||||
|
||||
# Static files (CSS, JavaScript, Images)
|
||||
STATIC_URL = "static/"
|
||||
STATICFILES_DIRS = [BASE_DIR / "static"]
|
||||
STATIC_ROOT = BASE_DIR / "staticfiles"
|
||||
|
||||
# Media files
|
||||
MEDIA_URL = "/media/"
|
||||
MEDIA_ROOT = BASE_DIR / "media"
|
||||
|
||||
# Default primary key field type
|
||||
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
|
||||
|
||||
# Authentication settings
|
||||
AUTHENTICATION_BACKENDS = [
|
||||
"django.contrib.auth.backends.ModelBackend",
|
||||
"allauth.account.auth_backends.AuthenticationBackend",
|
||||
]
|
||||
|
||||
# django-allauth settings
|
||||
SITE_ID = 1
|
||||
ACCOUNT_SIGNUP_FIELDS = ['email*', 'username*', 'password1*', 'password2*']
|
||||
ACCOUNT_LOGIN_METHODS = {'email', 'username'}
|
||||
ACCOUNT_EMAIL_VERIFICATION = "optional"
|
||||
LOGIN_REDIRECT_URL = "/"
|
||||
ACCOUNT_LOGOUT_REDIRECT_URL = "/"
|
||||
|
||||
# Custom adapters
|
||||
ACCOUNT_ADAPTER = "accounts.adapters.CustomAccountAdapter"
|
||||
SOCIALACCOUNT_ADAPTER = "accounts.adapters.CustomSocialAccountAdapter"
|
||||
|
||||
# Social account settings
|
||||
SOCIALACCOUNT_PROVIDERS = {
|
||||
"google": {
|
||||
"SCOPE": [
|
||||
"profile",
|
||||
"email",
|
||||
],
|
||||
"AUTH_PARAMS": {"access_type": "online"},
|
||||
},
|
||||
"discord": {
|
||||
"SCOPE": ["identify", "email"],
|
||||
"OAUTH_PKCE_ENABLED": True,
|
||||
}
|
||||
}
|
||||
|
||||
# Additional social account settings
|
||||
SOCIALACCOUNT_LOGIN_ON_GET = True
|
||||
SOCIALACCOUNT_AUTO_SIGNUP = False
|
||||
SOCIALACCOUNT_STORE_TOKENS = True
|
||||
|
||||
# Custom User Model
|
||||
AUTH_USER_MODEL = "accounts.User"
|
||||
|
||||
# Autocomplete configuration
|
||||
AUTOCOMPLETE_BLOCK_UNAUTHENTICATED = False
|
||||
|
||||
# Tailwind configuration
|
||||
TAILWIND_CLI_CONFIG_FILE = BASE_DIR / "tailwind.config.js"
|
||||
TAILWIND_CLI_SRC_CSS = BASE_DIR / "static/css/src/input.css"
|
||||
TAILWIND_CLI_DIST_CSS = BASE_DIR / "static/css/tailwind.css"
|
||||
|
||||
# Test runner
|
||||
TEST_RUNNER = "django.test.runner.DiscoverRunner"
|
||||
|
||||
# Road Trip Service Settings
|
||||
ROADTRIP_CACHE_TIMEOUT = 3600 * 24 # 24 hours for geocoding
|
||||
ROADTRIP_ROUTE_CACHE_TIMEOUT = 3600 * 6 # 6 hours for routes
|
||||
ROADTRIP_MAX_REQUESTS_PER_SECOND = 1 # Respect OSM rate limits
|
||||
ROADTRIP_USER_AGENT = "ThrillWiki Road Trip Planner (https://thrillwiki.com)"
|
||||
ROADTRIP_REQUEST_TIMEOUT = 10 # seconds
|
||||
ROADTRIP_MAX_RETRIES = 3
|
||||
ROADTRIP_BACKOFF_FACTOR = 2
|
||||
|
||||
# Django REST Framework Settings
|
||||
REST_FRAMEWORK = {
|
||||
'DEFAULT_AUTHENTICATION_CLASSES': [
|
||||
'rest_framework.authentication.SessionAuthentication',
|
||||
'rest_framework.authentication.TokenAuthentication',
|
||||
],
|
||||
'DEFAULT_PERMISSION_CLASSES': [
|
||||
'rest_framework.permissions.IsAuthenticated',
|
||||
],
|
||||
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.PageNumberPagination',
|
||||
'PAGE_SIZE': 20,
|
||||
'DEFAULT_VERSIONING_CLASS': 'rest_framework.versioning.AcceptHeaderVersioning',
|
||||
'DEFAULT_VERSION': 'v1',
|
||||
'ALLOWED_VERSIONS': ['v1'],
|
||||
'DEFAULT_RENDERER_CLASSES': [
|
||||
'rest_framework.renderers.JSONRenderer',
|
||||
'rest_framework.renderers.BrowsableAPIRenderer',
|
||||
],
|
||||
'DEFAULT_PARSER_CLASSES': [
|
||||
'rest_framework.parsers.JSONParser',
|
||||
'rest_framework.parsers.FormParser',
|
||||
'rest_framework.parsers.MultiPartParser',
|
||||
],
|
||||
'EXCEPTION_HANDLER': 'core.api.exceptions.custom_exception_handler',
|
||||
'DEFAULT_FILTER_BACKENDS': [
|
||||
'django_filters.rest_framework.DjangoFilterBackend',
|
||||
'rest_framework.filters.SearchFilter',
|
||||
'rest_framework.filters.OrderingFilter',
|
||||
],
|
||||
'TEST_REQUEST_DEFAULT_FORMAT': 'json',
|
||||
'NON_FIELD_ERRORS_KEY': 'non_field_errors',
|
||||
'DEFAULT_SCHEMA_CLASS': 'drf_spectacular.openapi.AutoSchema',
|
||||
}
|
||||
|
||||
# CORS Settings for API
|
||||
CORS_ALLOWED_ORIGINS = env('CORS_ALLOWED_ORIGINS', default=[])
|
||||
CORS_ALLOW_CREDENTIALS = True
|
||||
CORS_ALLOW_ALL_ORIGINS = env('CORS_ALLOW_ALL_ORIGINS', default=False)
|
||||
|
||||
# API-specific settings
|
||||
API_RATE_LIMIT_PER_MINUTE = env.int('API_RATE_LIMIT_PER_MINUTE', default=60)
|
||||
API_RATE_LIMIT_PER_HOUR = env.int('API_RATE_LIMIT_PER_HOUR', default=1000)
|
||||
|
||||
# drf-spectacular settings
|
||||
SPECTACULAR_SETTINGS = {
|
||||
'TITLE': 'ThrillWiki API',
|
||||
'DESCRIPTION': 'Comprehensive theme park and ride information API',
|
||||
'VERSION': '1.0.0',
|
||||
'SERVE_INCLUDE_SCHEMA': False,
|
||||
'COMPONENT_SPLIT_REQUEST': True,
|
||||
'TAGS': [
|
||||
{'name': 'parks', 'description': 'Theme park operations'},
|
||||
{'name': 'rides', 'description': 'Ride information and management'},
|
||||
{'name': 'locations', 'description': 'Geographic location services'},
|
||||
{'name': 'accounts', 'description': 'User account management'},
|
||||
{'name': 'media', 'description': 'Media and image management'},
|
||||
{'name': 'moderation', 'description': 'Content moderation'},
|
||||
],
|
||||
'SCHEMA_PATH_PREFIX': '/api/',
|
||||
'DEFAULT_GENERATOR_CLASS': 'drf_spectacular.generators.SchemaGenerator',
|
||||
'SERVE_PERMISSIONS': ['rest_framework.permissions.AllowAny'],
|
||||
'SWAGGER_UI_SETTINGS': {
|
||||
'deepLinking': True,
|
||||
'persistAuthorization': True,
|
||||
'displayOperationId': False,
|
||||
'displayRequestDuration': True,
|
||||
},
|
||||
'REDOC_UI_SETTINGS': {
|
||||
'hideDownloadButton': False,
|
||||
'hideHostname': False,
|
||||
'hideLoading': False,
|
||||
'hideSchemaPattern': True,
|
||||
'scrollYOffset': 0,
|
||||
'theme': {
|
||||
'colors': {
|
||||
'primary': {
|
||||
'main': '#1976d2'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Health Check Configuration
|
||||
HEALTH_CHECK = {
|
||||
'DISK_USAGE_MAX': 90, # Fail if disk usage is over 90%
|
||||
'MEMORY_MIN': 100, # Fail if less than 100MB available memory
|
||||
}
|
||||
|
||||
# Custom health check backends
|
||||
HEALTH_CHECK_BACKENDS = [
|
||||
'health_check.db',
|
||||
'health_check.cache',
|
||||
'health_check.storage',
|
||||
'core.health_checks.custom_checks.CacheHealthCheck',
|
||||
'core.health_checks.custom_checks.DatabasePerformanceCheck',
|
||||
'core.health_checks.custom_checks.ApplicationHealthCheck',
|
||||
'core.health_checks.custom_checks.ExternalServiceHealthCheck',
|
||||
'core.health_checks.custom_checks.DiskSpaceHealthCheck',
|
||||
]
|
||||
|
||||
# Enhanced Cache Configuration
|
||||
DJANGO_REDIS_CACHE_BACKEND = 'django_redis.cache.RedisCache'
|
||||
DJANGO_REDIS_CLIENT_CLASS = 'django_redis.client.DefaultClient'
|
||||
|
||||
CACHES = {
|
||||
'default': {
|
||||
'BACKEND': DJANGO_REDIS_CACHE_BACKEND,
|
||||
'LOCATION': env('REDIS_URL', default='redis://127.0.0.1:6379/1'),
|
||||
'OPTIONS': {
|
||||
'CLIENT_CLASS': DJANGO_REDIS_CLIENT_CLASS,
|
||||
'PARSER_CLASS': 'redis.connection.HiredisParser',
|
||||
'CONNECTION_POOL_CLASS': 'redis.BlockingConnectionPool',
|
||||
'CONNECTION_POOL_CLASS_KWARGS': {
|
||||
'max_connections': 50,
|
||||
'timeout': 20,
|
||||
},
|
||||
'COMPRESSOR': 'django_redis.compressors.zlib.ZlibCompressor',
|
||||
'IGNORE_EXCEPTIONS': True,
|
||||
},
|
||||
'KEY_PREFIX': 'thrillwiki',
|
||||
'VERSION': 1,
|
||||
},
|
||||
'sessions': {
|
||||
'BACKEND': DJANGO_REDIS_CACHE_BACKEND,
|
||||
'LOCATION': env('REDIS_URL', default='redis://127.0.0.1:6379/2'),
|
||||
'OPTIONS': {
|
||||
'CLIENT_CLASS': DJANGO_REDIS_CLIENT_CLASS,
|
||||
}
|
||||
},
|
||||
'api': {
|
||||
'BACKEND': DJANGO_REDIS_CACHE_BACKEND,
|
||||
'LOCATION': env('REDIS_URL', default='redis://127.0.0.1:6379/3'),
|
||||
'OPTIONS': {
|
||||
'CLIENT_CLASS': DJANGO_REDIS_CLIENT_CLASS,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Use Redis for sessions
|
||||
SESSION_ENGINE = 'django.contrib.sessions.backends.cache'
|
||||
SESSION_CACHE_ALIAS = 'sessions'
|
||||
SESSION_COOKIE_AGE = 86400 # 24 hours
|
||||
|
||||
# Cache middleware settings
|
||||
CACHE_MIDDLEWARE_SECONDS = 300 # 5 minutes
|
||||
CACHE_MIDDLEWARE_KEY_PREFIX = 'thrillwiki'
|
||||
|
||||
178
config/django/local.py
Normal file
178
config/django/local.py
Normal file
@@ -0,0 +1,178 @@
|
||||
"""
|
||||
Local development settings for thrillwiki project.
|
||||
"""
|
||||
|
||||
from .base import *
|
||||
from ..settings import database
|
||||
from ..settings import email # Import the module and use its members, e.g., email.EMAIL_HOST
|
||||
from ..settings import security # Import the module and use its members, e.g., security.SECURE_HSTS_SECONDS
|
||||
from .base import env # Import env for environment variable access
|
||||
|
||||
# Import database configuration
|
||||
DATABASES = database.DATABASES
|
||||
|
||||
# Development-specific settings
|
||||
DEBUG = True
|
||||
|
||||
# For local development, allow all hosts
|
||||
ALLOWED_HOSTS = ['*']
|
||||
|
||||
# CSRF trusted origins for local development
|
||||
CSRF_TRUSTED_ORIGINS = [
|
||||
"http://localhost:8000",
|
||||
"http://127.0.0.1:8000",
|
||||
"https://beta.thrillwiki.com",
|
||||
]
|
||||
|
||||
# GeoDjango Settings for macOS development
|
||||
GDAL_LIBRARY_PATH = env('GDAL_LIBRARY_PATH', default="/opt/homebrew/lib/libgdal.dylib")
|
||||
GEOS_LIBRARY_PATH = env('GEOS_LIBRARY_PATH', default="/opt/homebrew/lib/libgeos_c.dylib")
|
||||
|
||||
# Local cache configuration
|
||||
LOC_MEM_CACHE_BACKEND = "django.core.cache.backends.locmem.LocMemCache"
|
||||
|
||||
CACHES = {
|
||||
"default": {
|
||||
"BACKEND": LOC_MEM_CACHE_BACKEND,
|
||||
"LOCATION": "unique-snowflake",
|
||||
"TIMEOUT": 300, # 5 minutes
|
||||
"OPTIONS": {"MAX_ENTRIES": 1000},
|
||||
},
|
||||
"sessions": {
|
||||
"BACKEND": LOC_MEM_CACHE_BACKEND,
|
||||
"LOCATION": "sessions-cache",
|
||||
"TIMEOUT": 86400, # 24 hours (same as SESSION_COOKIE_AGE)
|
||||
"OPTIONS": {"MAX_ENTRIES": 5000},
|
||||
},
|
||||
"api": {
|
||||
"BACKEND": LOC_MEM_CACHE_BACKEND,
|
||||
"LOCATION": "api-cache",
|
||||
"TIMEOUT": 300, # 5 minutes
|
||||
"OPTIONS": {"MAX_ENTRIES": 2000},
|
||||
}
|
||||
}
|
||||
|
||||
# Development-friendly cache settings
|
||||
CACHE_MIDDLEWARE_SECONDS = 1 # Very short cache for development
|
||||
CACHE_MIDDLEWARE_KEY_PREFIX = "thrillwiki_dev"
|
||||
|
||||
# Development email backend
|
||||
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
|
||||
|
||||
# Security settings for development
|
||||
SECURE_SSL_REDIRECT = False
|
||||
SESSION_COOKIE_SECURE = False
|
||||
CSRF_COOKIE_SECURE = False
|
||||
|
||||
# Development monitoring tools
|
||||
DEVELOPMENT_APPS = [
|
||||
'silk',
|
||||
'debug_toolbar',
|
||||
'nplusone.ext.django',
|
||||
]
|
||||
|
||||
# Add development apps if available
|
||||
for app in DEVELOPMENT_APPS:
|
||||
if app not in INSTALLED_APPS:
|
||||
INSTALLED_APPS.append(app)
|
||||
|
||||
# Development middleware
|
||||
DEVELOPMENT_MIDDLEWARE = [
|
||||
'silk.middleware.SilkyMiddleware',
|
||||
'debug_toolbar.middleware.DebugToolbarMiddleware',
|
||||
'nplusone.ext.django.NPlusOneMiddleware',
|
||||
'core.middleware.performance_middleware.PerformanceMiddleware',
|
||||
'core.middleware.performance_middleware.QueryCountMiddleware',
|
||||
]
|
||||
|
||||
# Add development middleware
|
||||
for middleware in DEVELOPMENT_MIDDLEWARE:
|
||||
if middleware not in MIDDLEWARE:
|
||||
MIDDLEWARE.insert(1, middleware) # Insert after security middleware
|
||||
|
||||
# Debug toolbar configuration
|
||||
INTERNAL_IPS = ['127.0.0.1', '::1']
|
||||
|
||||
# Silk configuration for development
|
||||
SILKY_PYTHON_PROFILER = True
|
||||
SILKY_PYTHON_PROFILER_BINARY = True
|
||||
SILKY_PYTHON_PROFILER_RESULT_PATH = BASE_DIR / 'profiles'
|
||||
SILKY_AUTHENTICATION = True
|
||||
SILKY_AUTHORISATION = True
|
||||
|
||||
# NPlusOne configuration
|
||||
import logging
|
||||
NPLUSONE_LOGGER = logging.getLogger('nplusone')
|
||||
NPLUSONE_LOG_LEVEL = logging.WARN
|
||||
|
||||
# Enhanced development logging
|
||||
LOGGING = {
|
||||
'version': 1,
|
||||
'disable_existing_loggers': False,
|
||||
'formatters': {
|
||||
'verbose': {
|
||||
'format': '{levelname} {asctime} {module} {process:d} {thread:d} {message}',
|
||||
'style': '{',
|
||||
},
|
||||
'json': {
|
||||
'()': 'pythonjsonlogger.jsonlogger.JsonFormatter',
|
||||
'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s'
|
||||
},
|
||||
},
|
||||
'handlers': {
|
||||
'console': {
|
||||
'class': 'logging.StreamHandler',
|
||||
'formatter': 'verbose',
|
||||
},
|
||||
'file': {
|
||||
'class': 'logging.handlers.RotatingFileHandler',
|
||||
'filename': BASE_DIR / 'logs' / 'thrillwiki.log',
|
||||
'maxBytes': 1024*1024*10, # 10MB
|
||||
'backupCount': 5,
|
||||
'formatter': 'json',
|
||||
},
|
||||
'performance': {
|
||||
'class': 'logging.handlers.RotatingFileHandler',
|
||||
'filename': BASE_DIR / 'logs' / 'performance.log',
|
||||
'maxBytes': 1024*1024*10, # 10MB
|
||||
'backupCount': 5,
|
||||
'formatter': 'json',
|
||||
},
|
||||
},
|
||||
'root': {
|
||||
'level': 'INFO',
|
||||
'handlers': ['console'],
|
||||
},
|
||||
'loggers': {
|
||||
'django': {
|
||||
'handlers': ['file'],
|
||||
'level': 'INFO',
|
||||
'propagate': False,
|
||||
},
|
||||
'django.db.backends': {
|
||||
'handlers': ['console'],
|
||||
'level': 'DEBUG',
|
||||
'propagate': False,
|
||||
},
|
||||
'thrillwiki': {
|
||||
'handlers': ['console', 'file'],
|
||||
'level': 'DEBUG',
|
||||
'propagate': False,
|
||||
},
|
||||
'performance': {
|
||||
'handlers': ['performance'],
|
||||
'level': 'INFO',
|
||||
'propagate': False,
|
||||
},
|
||||
'query_optimization': {
|
||||
'handlers': ['console', 'file'],
|
||||
'level': 'WARNING',
|
||||
'propagate': False,
|
||||
},
|
||||
'nplusone': {
|
||||
'handlers': ['console'],
|
||||
'level': 'WARNING',
|
||||
'propagate': False,
|
||||
},
|
||||
},
|
||||
}
|
||||
97
config/django/production.py
Normal file
97
config/django/production.py
Normal file
@@ -0,0 +1,97 @@
|
||||
"""
|
||||
Production settings for thrillwiki project.
|
||||
"""
|
||||
|
||||
from . import base # Import the module and use its members, e.g., base.BASE_DIR, base***REMOVED***
|
||||
from ..settings import database # Import the module and use its members, e.g., database.DATABASES
|
||||
from ..settings import email # Import the module and use its members, e.g., email.EMAIL_HOST
|
||||
from ..settings import security # Import the module and use its members, e.g., security.SECURE_HSTS_SECONDS
|
||||
from ..settings import email # Import the module and use its members, e.g., email.EMAIL_HOST
|
||||
from ..settings import security # Import the module and use its members, e.g., security.SECURE_HSTS_SECONDS
|
||||
|
||||
# Production settings
|
||||
DEBUG = False
|
||||
|
||||
# Allowed hosts must be explicitly set in production
|
||||
ALLOWED_HOSTS = base***REMOVED***('ALLOWED_HOSTS')
|
||||
|
||||
# CSRF trusted origins for production
|
||||
CSRF_TRUSTED_ORIGINS = base***REMOVED***('CSRF_TRUSTED_ORIGINS', default=[])
|
||||
|
||||
# Security settings for production
|
||||
SECURE_SSL_REDIRECT = True
|
||||
SESSION_COOKIE_SECURE = True
|
||||
CSRF_COOKIE_SECURE = True
|
||||
SECURE_HSTS_SECONDS = 31536000 # 1 year
|
||||
SECURE_HSTS_INCLUDE_SUBDOMAINS = True
|
||||
SECURE_HSTS_PRELOAD = True
|
||||
|
||||
# Production logging
|
||||
LOGGING = {
|
||||
'version': 1,
|
||||
'disable_existing_loggers': False,
|
||||
'formatters': {
|
||||
'verbose': {
|
||||
'format': '{levelname} {asctime} {module} {process:d} {thread:d} {message}',
|
||||
'style': '{',
|
||||
},
|
||||
'simple': {
|
||||
'format': '{levelname} {message}',
|
||||
'style': '{',
|
||||
},
|
||||
},
|
||||
'handlers': {
|
||||
'file': {
|
||||
'level': 'INFO',
|
||||
'class': 'logging.handlers.RotatingFileHandler',
|
||||
'filename': base.BASE_DIR / 'logs' / 'django.log',
|
||||
'maxBytes': 1024*1024*15, # 15MB
|
||||
'backupCount': 10,
|
||||
'formatter': 'verbose',
|
||||
},
|
||||
'error_file': {
|
||||
'level': 'ERROR',
|
||||
'class': 'logging.handlers.RotatingFileHandler',
|
||||
'filename': base.BASE_DIR / 'logs' / 'django_error.log',
|
||||
'maxBytes': 1024*1024*15, # 15MB
|
||||
'backupCount': 10,
|
||||
'formatter': 'verbose',
|
||||
},
|
||||
},
|
||||
'root': {
|
||||
'handlers': ['file'],
|
||||
'level': 'INFO',
|
||||
},
|
||||
'loggers': {
|
||||
'django': {
|
||||
'handlers': ['file', 'error_file'],
|
||||
'level': 'INFO',
|
||||
'propagate': False,
|
||||
},
|
||||
'thrillwiki': {
|
||||
'handlers': ['file', 'error_file'],
|
||||
'level': 'INFO',
|
||||
'propagate': False,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
# Static files collection for production
|
||||
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
|
||||
|
||||
# Cache settings for production (Redis recommended)
|
||||
if base***REMOVED***('REDIS_URL', default=None):
|
||||
CACHES = {
|
||||
'default': {
|
||||
'BACKEND': 'django_redis.cache.RedisCache',
|
||||
'LOCATION': base***REMOVED***('REDIS_URL'),
|
||||
'OPTIONS': {
|
||||
'CLIENT_CLASS': 'django_redis.client.DefaultClient',
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Use Redis for sessions in production
|
||||
SESSION_ENGINE = 'django.contrib.sessions.backends.cache'
|
||||
SESSION_CACHE_ALIAS = 'default'
|
||||
|
||||
65
config/django/test.py
Normal file
65
config/django/test.py
Normal file
@@ -0,0 +1,65 @@
|
||||
"""
|
||||
Test settings for thrillwiki project.
|
||||
"""
|
||||
|
||||
from .base import *
|
||||
|
||||
# Test-specific settings
|
||||
DEBUG = False
|
||||
|
||||
# Use in-memory database for faster tests
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.contrib.gis.db.backends.spatialite',
|
||||
'NAME': ':memory:',
|
||||
}
|
||||
}
|
||||
|
||||
# Use in-memory cache for tests
|
||||
CACHES = {
|
||||
'default': {
|
||||
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
||||
'LOCATION': 'test-cache',
|
||||
}
|
||||
}
|
||||
|
||||
# Disable migrations for faster tests
|
||||
|
||||
|
||||
class DisableMigrations:
|
||||
def __contains__(self, item):
|
||||
return True
|
||||
|
||||
def __getitem__(self, item):
|
||||
return None
|
||||
|
||||
|
||||
MIGRATION_MODULES = DisableMigrations()
|
||||
|
||||
# Email backend for tests
|
||||
EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
|
||||
|
||||
# Password hashers for faster tests
|
||||
PASSWORD_HASHERS = [
|
||||
'django.contrib.auth.hashers.MD5PasswordHasher',
|
||||
]
|
||||
|
||||
# Disable logging during tests
|
||||
LOGGING_CONFIG = None
|
||||
|
||||
# Media files for tests
|
||||
MEDIA_ROOT = BASE_DIR / 'test_media'
|
||||
|
||||
# Static files for tests
|
||||
STATIC_ROOT = BASE_DIR / 'test_static'
|
||||
|
||||
# Disable Turnstile for tests
|
||||
TURNSTILE_SITE_KEY = 'test-key'
|
||||
TURNSTILE_SECRET_KEY = 'test-secret'
|
||||
|
||||
# Test-specific middleware (remove caching middleware)
|
||||
MIDDLEWARE = [m for m in MIDDLEWARE if 'cache' not in m.lower()]
|
||||
|
||||
# Celery settings for tests (if Celery is used)
|
||||
CELERY_TASK_ALWAYS_EAGER = True
|
||||
CELERY_TASK_EAGER_PROPAGATES = True
|
||||
46
config/django/test_accounts.py
Normal file
46
config/django/test_accounts.py
Normal file
@@ -0,0 +1,46 @@
|
||||
"""
|
||||
Test Django settings for thrillwiki accounts app.
|
||||
"""
|
||||
|
||||
from .base import *
|
||||
|
||||
# Use in-memory database for tests
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.contrib.gis.db.backends.postgis',
|
||||
'NAME': 'test_db',
|
||||
}
|
||||
}
|
||||
|
||||
# Use a faster password hasher for tests
|
||||
PASSWORD_HASHERS = [
|
||||
'django.contrib.auth.hashers.MD5PasswordHasher',
|
||||
]
|
||||
|
||||
# Disable whitenoise for tests
|
||||
WHITENOISE_AUTOREFRESH = True
|
||||
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
|
||||
|
||||
INSTALLED_APPS = [
|
||||
"django.contrib.admin",
|
||||
"django.contrib.auth",
|
||||
"django.contrib.contenttypes",
|
||||
"django.contrib.sessions",
|
||||
"django.contrib.messages",
|
||||
"django.contrib.staticfiles",
|
||||
"django.contrib.sites",
|
||||
"allauth",
|
||||
"allauth.account",
|
||||
"allauth.socialaccount",
|
||||
"accounts",
|
||||
"core",
|
||||
"pghistory",
|
||||
"pgtrigger",
|
||||
"email_service",
|
||||
"parks",
|
||||
"rides",
|
||||
"media.apps.MediaConfig",
|
||||
]
|
||||
|
||||
GDAL_LIBRARY_PATH = '/opt/homebrew/lib/libgdal.dylib'
|
||||
GEOS_LIBRARY_PATH = '/opt/homebrew/lib/libgeos_c.dylib'
|
||||
2
config/settings/__init__.py
Normal file
2
config/settings/__init__.py
Normal file
@@ -0,0 +1,2 @@
|
||||
# Settings modules package
|
||||
|
||||
30
config/settings/database.py
Normal file
30
config/settings/database.py
Normal file
@@ -0,0 +1,30 @@
|
||||
"""
|
||||
Database configuration for thrillwiki project.
|
||||
"""
|
||||
|
||||
import environ
|
||||
|
||||
env = environ.Env()
|
||||
|
||||
# Database configuration
|
||||
db_config = env.db()
|
||||
# Force PostGIS backend for spatial data support
|
||||
db_config['ENGINE'] = 'django.contrib.gis.db.backends.postgis'
|
||||
|
||||
DATABASES = {
|
||||
'default': db_config,
|
||||
}
|
||||
|
||||
# GeoDjango Settings - Environment specific
|
||||
GDAL_LIBRARY_PATH = env('GDAL_LIBRARY_PATH', default=None)
|
||||
GEOS_LIBRARY_PATH = env('GEOS_LIBRARY_PATH', default=None)
|
||||
|
||||
# Cache settings
|
||||
CACHES = {
|
||||
'default': env.cache('CACHE_URL', default='locmemcache://')
|
||||
}
|
||||
|
||||
CACHE_MIDDLEWARE_SECONDS = env.int(
|
||||
'CACHE_MIDDLEWARE_SECONDS', default=300) # 5 minutes
|
||||
CACHE_MIDDLEWARE_KEY_PREFIX = env(
|
||||
'CACHE_MIDDLEWARE_KEY_PREFIX', default='thrillwiki')
|
||||
19
config/settings/email.py
Normal file
19
config/settings/email.py
Normal file
@@ -0,0 +1,19 @@
|
||||
"""
|
||||
Email configuration for thrillwiki project.
|
||||
"""
|
||||
|
||||
import environ
|
||||
|
||||
env = environ.Env()
|
||||
|
||||
# Email settings
|
||||
EMAIL_BACKEND = env('EMAIL_BACKEND', default='email_service.backends.ForwardEmailBackend')
|
||||
FORWARD_EMAIL_BASE_URL = env('FORWARD_EMAIL_BASE_URL', default='https://api.forwardemail.net')
|
||||
SERVER_EMAIL = env('SERVER_EMAIL', default='django_webmaster@thrillwiki.com')
|
||||
|
||||
# Email URLs can be configured using EMAIL_URL environment variable
|
||||
# Example: EMAIL_URL=smtp://user:pass@localhost:587
|
||||
if env('EMAIL_URL', default=None):
|
||||
email_config = env.email_url()
|
||||
vars().update(email_config)
|
||||
|
||||
32
config/settings/security.py
Normal file
32
config/settings/security.py
Normal file
@@ -0,0 +1,32 @@
|
||||
"""
|
||||
Security configuration for thrillwiki project.
|
||||
"""
|
||||
|
||||
import environ
|
||||
|
||||
env = environ.Env()
|
||||
|
||||
# Cloudflare Turnstile settings
|
||||
TURNSTILE_SITE_KEY = env('TURNSTILE_SITE_KEY', default='')
|
||||
TURNSTILE_SECRET_KEY = env('TURNSTILE_SECRET_KEY', default='')
|
||||
TURNSTILE_VERIFY_URL = env('TURNSTILE_VERIFY_URL', default='https://challenges.cloudflare.com/turnstile/v0/siteverify')
|
||||
|
||||
# Security headers and settings (for production)
|
||||
SECURE_BROWSER_XSS_FILTER = env.bool('SECURE_BROWSER_XSS_FILTER', default=True)
|
||||
SECURE_CONTENT_TYPE_NOSNIFF = env.bool('SECURE_CONTENT_TYPE_NOSNIFF', default=True)
|
||||
SECURE_HSTS_INCLUDE_SUBDOMAINS = env.bool('SECURE_HSTS_INCLUDE_SUBDOMAINS', default=True)
|
||||
SECURE_HSTS_SECONDS = env.int('SECURE_HSTS_SECONDS', default=31536000) # 1 year
|
||||
SECURE_REDIRECT_EXEMPT = env.list('SECURE_REDIRECT_EXEMPT', default=[])
|
||||
SECURE_SSL_REDIRECT = env.bool('SECURE_SSL_REDIRECT', default=False)
|
||||
SECURE_PROXY_SSL_HEADER = env.tuple('SECURE_PROXY_SSL_HEADER', default=None)
|
||||
|
||||
# Session security
|
||||
SESSION_COOKIE_SECURE = env.bool('SESSION_COOKIE_SECURE', default=False)
|
||||
SESSION_COOKIE_HTTPONLY = env.bool('SESSION_COOKIE_HTTPONLY', default=True)
|
||||
SESSION_COOKIE_SAMESITE = env('SESSION_COOKIE_SAMESITE', default='Lax')
|
||||
|
||||
# CSRF security
|
||||
CSRF_COOKIE_SECURE = env.bool('CSRF_COOKIE_SECURE', default=False)
|
||||
CSRF_COOKIE_HTTPONLY = env.bool('CSRF_COOKIE_HTTPONLY', default=True)
|
||||
CSRF_COOKIE_SAMESITE = env('CSRF_COOKIE_SAMESITE', default='Lax')
|
||||
|
||||
1
core/api/__init__.py
Normal file
1
core/api/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Core API infrastructure for ThrillWiki
|
||||
172
core/api/exceptions.py
Normal file
172
core/api/exceptions.py
Normal file
@@ -0,0 +1,172 @@
|
||||
"""
|
||||
Custom exception handling for ThrillWiki API.
|
||||
Provides standardized error responses following Django styleguide patterns.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from django.http import Http404
|
||||
from django.core.exceptions import PermissionDenied, ValidationError as DjangoValidationError
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import exception_handler
|
||||
from rest_framework.exceptions import ValidationError as DRFValidationError, NotFound, PermissionDenied as DRFPermissionDenied
|
||||
|
||||
from ..exceptions import ThrillWikiException
|
||||
from ..logging import get_logger, log_exception
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
def custom_exception_handler(exc: Exception, context: Dict[str, Any]) -> Optional[Response]:
|
||||
"""
|
||||
Custom exception handler for DRF that provides standardized error responses.
|
||||
|
||||
Returns:
|
||||
Response with standardized error format or None to fallback to default handler
|
||||
"""
|
||||
# Call REST framework's default exception handler first
|
||||
response = exception_handler(exc, context)
|
||||
|
||||
if response is not None:
|
||||
# Standardize the error response format
|
||||
custom_response_data = {
|
||||
'status': 'error',
|
||||
'error': {
|
||||
'code': _get_error_code(exc),
|
||||
'message': _get_error_message(exc, response.data),
|
||||
'details': _get_error_details(exc, response.data),
|
||||
},
|
||||
'data': None,
|
||||
}
|
||||
|
||||
# Add request context for debugging
|
||||
if hasattr(context.get('request'), 'user'):
|
||||
custom_response_data['error']['request_user'] = str(context['request'].user)
|
||||
|
||||
# Log the error for monitoring
|
||||
log_exception(logger, exc, context={'response_status': response.status_code}, request=context.get('request'))
|
||||
|
||||
response.data = custom_response_data
|
||||
|
||||
# Handle ThrillWiki custom exceptions
|
||||
elif isinstance(exc, ThrillWikiException):
|
||||
custom_response_data = {
|
||||
'status': 'error',
|
||||
'error': exc.to_dict(),
|
||||
'data': None,
|
||||
}
|
||||
|
||||
log_exception(logger, exc, context={'response_status': exc.status_code}, request=context.get('request'))
|
||||
response = Response(custom_response_data, status=exc.status_code)
|
||||
|
||||
# Handle specific Django exceptions that DRF doesn't catch
|
||||
elif isinstance(exc, DjangoValidationError):
|
||||
custom_response_data = {
|
||||
'status': 'error',
|
||||
'error': {
|
||||
'code': 'VALIDATION_ERROR',
|
||||
'message': 'Validation failed',
|
||||
'details': _format_django_validation_errors(exc),
|
||||
},
|
||||
'data': None,
|
||||
}
|
||||
|
||||
log_exception(logger, exc, context={'response_status': status.HTTP_400_BAD_REQUEST}, request=context.get('request'))
|
||||
response = Response(custom_response_data, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
elif isinstance(exc, Http404):
|
||||
custom_response_data = {
|
||||
'status': 'error',
|
||||
'error': {
|
||||
'code': 'NOT_FOUND',
|
||||
'message': 'Resource not found',
|
||||
'details': str(exc) if str(exc) else None,
|
||||
},
|
||||
'data': None,
|
||||
}
|
||||
|
||||
log_exception(logger, exc, context={'response_status': status.HTTP_404_NOT_FOUND}, request=context.get('request'))
|
||||
response = Response(custom_response_data, status=status.HTTP_404_NOT_FOUND)
|
||||
|
||||
elif isinstance(exc, PermissionDenied):
|
||||
custom_response_data = {
|
||||
'status': 'error',
|
||||
'error': {
|
||||
'code': 'PERMISSION_DENIED',
|
||||
'message': 'Permission denied',
|
||||
'details': str(exc) if str(exc) else None,
|
||||
},
|
||||
'data': None,
|
||||
}
|
||||
|
||||
log_exception(logger, exc, context={'response_status': status.HTTP_403_FORBIDDEN}, request=context.get('request'))
|
||||
response = Response(custom_response_data, status=status.HTTP_403_FORBIDDEN)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
def _get_error_code(exc: Exception) -> str:
|
||||
"""Extract or determine error code from exception."""
|
||||
if hasattr(exc, 'default_code'):
|
||||
return exc.default_code.upper()
|
||||
|
||||
if isinstance(exc, DRFValidationError):
|
||||
return 'VALIDATION_ERROR'
|
||||
elif isinstance(exc, NotFound):
|
||||
return 'NOT_FOUND'
|
||||
elif isinstance(exc, DRFPermissionDenied):
|
||||
return 'PERMISSION_DENIED'
|
||||
|
||||
return exc.__class__.__name__.upper()
|
||||
|
||||
|
||||
def _get_error_message(exc: Exception, response_data: Any) -> str:
|
||||
"""Extract user-friendly error message."""
|
||||
if isinstance(response_data, dict):
|
||||
# Handle DRF validation errors
|
||||
if 'detail' in response_data:
|
||||
return str(response_data['detail'])
|
||||
elif 'non_field_errors' in response_data:
|
||||
errors = response_data['non_field_errors']
|
||||
return errors[0] if isinstance(errors, list) and errors else str(errors)
|
||||
elif isinstance(response_data, dict) and len(response_data) == 1:
|
||||
key, value = next(iter(response_data.items()))
|
||||
if isinstance(value, list) and value:
|
||||
return f"{key}: {value[0]}"
|
||||
return f"{key}: {value}"
|
||||
|
||||
# Fallback to exception message
|
||||
return str(exc) if str(exc) else 'An error occurred'
|
||||
|
||||
|
||||
def _get_error_details(exc: Exception, response_data: Any) -> Optional[Dict[str, Any]]:
|
||||
"""Extract detailed error information for debugging."""
|
||||
if isinstance(response_data, dict) and len(response_data) > 1:
|
||||
return response_data
|
||||
|
||||
if hasattr(exc, 'detail') and isinstance(exc.detail, dict):
|
||||
return exc.detail
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _format_django_validation_errors(exc: DjangoValidationError) -> Dict[str, Any]:
|
||||
"""Format Django ValidationError for API response."""
|
||||
if hasattr(exc, 'error_dict'):
|
||||
# Field-specific errors
|
||||
return {
|
||||
field: [str(error) for error in errors]
|
||||
for field, errors in exc.error_dict.items()
|
||||
}
|
||||
elif hasattr(exc, 'error_list'):
|
||||
# Non-field errors
|
||||
return {
|
||||
'non_field_errors': [str(error) for error in exc.error_list]
|
||||
}
|
||||
|
||||
return {'non_field_errors': [str(exc)]}
|
||||
|
||||
|
||||
# Removed _log_api_error - using centralized logging instead
|
||||
252
core/api/mixins.py
Normal file
252
core/api/mixins.py
Normal file
@@ -0,0 +1,252 @@
|
||||
"""
|
||||
Common mixins for API views following Django styleguide patterns.
|
||||
"""
|
||||
|
||||
from typing import Dict, Any, Optional
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
|
||||
|
||||
class ApiMixin:
|
||||
"""
|
||||
Base mixin for API views providing standardized response formatting.
|
||||
"""
|
||||
|
||||
def create_response(
|
||||
self,
|
||||
*,
|
||||
data: Any = None,
|
||||
message: Optional[str] = None,
|
||||
status_code: int = status.HTTP_200_OK,
|
||||
pagination: Optional[Dict[str, Any]] = None,
|
||||
metadata: Optional[Dict[str, Any]] = None
|
||||
) -> Response:
|
||||
"""
|
||||
Create standardized API response.
|
||||
|
||||
Args:
|
||||
data: Response data
|
||||
message: Optional success message
|
||||
status_code: HTTP status code
|
||||
pagination: Pagination information
|
||||
metadata: Additional metadata
|
||||
|
||||
Returns:
|
||||
Standardized Response object
|
||||
"""
|
||||
response_data = {
|
||||
'status': 'success' if status_code < 400 else 'error',
|
||||
'data': data,
|
||||
}
|
||||
|
||||
if message:
|
||||
response_data['message'] = message
|
||||
|
||||
if pagination:
|
||||
response_data['pagination'] = pagination
|
||||
|
||||
if metadata:
|
||||
response_data['metadata'] = metadata
|
||||
|
||||
return Response(response_data, status=status_code)
|
||||
|
||||
def create_error_response(
|
||||
self,
|
||||
*,
|
||||
message: str,
|
||||
status_code: int = status.HTTP_400_BAD_REQUEST,
|
||||
error_code: Optional[str] = None,
|
||||
details: Optional[Dict[str, Any]] = None
|
||||
) -> Response:
|
||||
"""
|
||||
Create standardized error response.
|
||||
|
||||
Args:
|
||||
message: Error message
|
||||
status_code: HTTP status code
|
||||
error_code: Optional error code
|
||||
details: Additional error details
|
||||
|
||||
Returns:
|
||||
Standardized error Response object
|
||||
"""
|
||||
error_data = {
|
||||
'code': error_code or 'GENERIC_ERROR',
|
||||
'message': message,
|
||||
}
|
||||
|
||||
if details:
|
||||
error_data['details'] = details
|
||||
|
||||
response_data = {
|
||||
'status': 'error',
|
||||
'error': error_data,
|
||||
'data': None,
|
||||
}
|
||||
|
||||
return Response(response_data, status=status_code)
|
||||
|
||||
|
||||
class CreateApiMixin(ApiMixin):
|
||||
"""
|
||||
Mixin for create API endpoints with standardized input/output handling.
|
||||
"""
|
||||
|
||||
def create(self, request: Request, *args, **kwargs) -> Response:
|
||||
"""Handle POST requests for creating resources."""
|
||||
serializer = self.get_input_serializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
# Create the object using the service layer
|
||||
obj = self.perform_create(**serializer.validated_data)
|
||||
|
||||
# Serialize the output
|
||||
output_serializer = self.get_output_serializer(obj)
|
||||
|
||||
return self.create_response(
|
||||
data=output_serializer.data,
|
||||
status_code=status.HTTP_201_CREATED,
|
||||
message="Resource created successfully"
|
||||
)
|
||||
|
||||
def perform_create(self, **validated_data):
|
||||
"""
|
||||
Override this method to implement object creation logic.
|
||||
Should use service layer methods.
|
||||
"""
|
||||
raise NotImplementedError("Subclasses must implement perform_create")
|
||||
|
||||
def get_input_serializer(self, *args, **kwargs):
|
||||
"""Get the input serializer for validation."""
|
||||
return self.InputSerializer(*args, **kwargs)
|
||||
|
||||
def get_output_serializer(self, *args, **kwargs):
|
||||
"""Get the output serializer for response."""
|
||||
return self.OutputSerializer(*args, **kwargs)
|
||||
|
||||
|
||||
class UpdateApiMixin(ApiMixin):
|
||||
"""
|
||||
Mixin for update API endpoints with standardized input/output handling.
|
||||
"""
|
||||
|
||||
def update(self, request: Request, *args, **kwargs) -> Response:
|
||||
"""Handle PUT/PATCH requests for updating resources."""
|
||||
instance = self.get_object()
|
||||
serializer = self.get_input_serializer(data=request.data, partial=kwargs.get('partial', False))
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
# Update the object using the service layer
|
||||
updated_obj = self.perform_update(instance, **serializer.validated_data)
|
||||
|
||||
# Serialize the output
|
||||
output_serializer = self.get_output_serializer(updated_obj)
|
||||
|
||||
return self.create_response(
|
||||
data=output_serializer.data,
|
||||
message="Resource updated successfully"
|
||||
)
|
||||
|
||||
def perform_update(self, instance, **validated_data):
|
||||
"""
|
||||
Override this method to implement object update logic.
|
||||
Should use service layer methods.
|
||||
"""
|
||||
raise NotImplementedError("Subclasses must implement perform_update")
|
||||
|
||||
def get_input_serializer(self, *args, **kwargs):
|
||||
"""Get the input serializer for validation."""
|
||||
return self.InputSerializer(*args, **kwargs)
|
||||
|
||||
def get_output_serializer(self, *args, **kwargs):
|
||||
"""Get the output serializer for response."""
|
||||
return self.OutputSerializer(*args, **kwargs)
|
||||
|
||||
|
||||
class ListApiMixin(ApiMixin):
|
||||
"""
|
||||
Mixin for list API endpoints with pagination and filtering.
|
||||
"""
|
||||
|
||||
def list(self, request: Request, *args, **kwargs) -> Response:
|
||||
"""Handle GET requests for listing resources."""
|
||||
# Use selector to get filtered queryset
|
||||
queryset = self.get_queryset()
|
||||
|
||||
# Apply pagination
|
||||
page = self.paginate_queryset(queryset)
|
||||
if page is not None:
|
||||
serializer = self.get_output_serializer(page, many=True)
|
||||
return self.get_paginated_response(serializer.data)
|
||||
|
||||
# No pagination
|
||||
serializer = self.get_output_serializer(queryset, many=True)
|
||||
return self.create_response(data=serializer.data)
|
||||
|
||||
def get_queryset(self):
|
||||
"""
|
||||
Override this method to use selector patterns.
|
||||
Should call selector functions, not access model managers directly.
|
||||
"""
|
||||
raise NotImplementedError("Subclasses must implement get_queryset using selectors")
|
||||
|
||||
def get_output_serializer(self, *args, **kwargs):
|
||||
"""Get the output serializer for response."""
|
||||
return self.OutputSerializer(*args, **kwargs)
|
||||
|
||||
|
||||
class RetrieveApiMixin(ApiMixin):
|
||||
"""
|
||||
Mixin for retrieve API endpoints.
|
||||
"""
|
||||
|
||||
def retrieve(self, request: Request, *args, **kwargs) -> Response:
|
||||
"""Handle GET requests for retrieving a single resource."""
|
||||
instance = self.get_object()
|
||||
serializer = self.get_output_serializer(instance)
|
||||
|
||||
return self.create_response(data=serializer.data)
|
||||
|
||||
def get_object(self):
|
||||
"""
|
||||
Override this method to use selector patterns.
|
||||
Should call selector functions for optimized queries.
|
||||
"""
|
||||
raise NotImplementedError("Subclasses must implement get_object using selectors")
|
||||
|
||||
def get_output_serializer(self, *args, **kwargs):
|
||||
"""Get the output serializer for response."""
|
||||
return self.OutputSerializer(*args, **kwargs)
|
||||
|
||||
|
||||
class DestroyApiMixin(ApiMixin):
|
||||
"""
|
||||
Mixin for delete API endpoints.
|
||||
"""
|
||||
|
||||
def destroy(self, request: Request, *args, **kwargs) -> Response:
|
||||
"""Handle DELETE requests for destroying resources."""
|
||||
instance = self.get_object()
|
||||
|
||||
# Delete using service layer
|
||||
self.perform_destroy(instance)
|
||||
|
||||
return self.create_response(
|
||||
status_code=status.HTTP_204_NO_CONTENT,
|
||||
message="Resource deleted successfully"
|
||||
)
|
||||
|
||||
def perform_destroy(self, instance):
|
||||
"""
|
||||
Override this method to implement object deletion logic.
|
||||
Should use service layer methods.
|
||||
"""
|
||||
raise NotImplementedError("Subclasses must implement perform_destroy")
|
||||
|
||||
def get_object(self):
|
||||
"""
|
||||
Override this method to use selector patterns.
|
||||
Should call selector functions for optimized queries.
|
||||
"""
|
||||
raise NotImplementedError("Subclasses must implement get_object using selectors")
|
||||
1
core/decorators/__init__.py
Normal file
1
core/decorators/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Decorators module
|
||||
343
core/decorators/cache_decorators.py
Normal file
343
core/decorators/cache_decorators.py
Normal file
@@ -0,0 +1,343 @@
|
||||
"""
|
||||
Advanced caching decorators for API views and functions.
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import time
|
||||
from functools import wraps
|
||||
from typing import Optional, List, Callable, Any
|
||||
from django.core.cache import cache
|
||||
from django.http import JsonResponse
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.cache import cache_control, never_cache
|
||||
from django.views.decorators.vary import vary_on_headers
|
||||
from rest_framework.response import Response
|
||||
from core.services.enhanced_cache_service import EnhancedCacheService
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def cache_api_response(timeout=1800, vary_on=None, key_prefix='api', cache_backend='api'):
|
||||
"""
|
||||
Advanced decorator for caching API responses with flexible configuration
|
||||
|
||||
Args:
|
||||
timeout: Cache timeout in seconds
|
||||
vary_on: List of request attributes to vary cache on
|
||||
key_prefix: Prefix for cache keys
|
||||
cache_backend: Cache backend to use
|
||||
"""
|
||||
def decorator(view_func):
|
||||
@wraps(view_func)
|
||||
def wrapper(self, request, *args, **kwargs):
|
||||
# Only cache GET requests
|
||||
if request.method != 'GET':
|
||||
return view_func(self, request, *args, **kwargs)
|
||||
|
||||
# Generate cache key based on view, user, and parameters
|
||||
cache_key_parts = [
|
||||
key_prefix,
|
||||
view_func.__name__,
|
||||
str(request.user.id) if request.user.is_authenticated else 'anonymous',
|
||||
str(hash(frozenset(request.GET.items()))),
|
||||
]
|
||||
|
||||
# Add URL parameters to cache key
|
||||
if args:
|
||||
cache_key_parts.append(str(hash(args)))
|
||||
if kwargs:
|
||||
cache_key_parts.append(str(hash(frozenset(kwargs.items()))))
|
||||
|
||||
# Add custom vary_on fields
|
||||
if vary_on:
|
||||
for field in vary_on:
|
||||
value = getattr(request, field, '')
|
||||
cache_key_parts.append(str(value))
|
||||
|
||||
cache_key = ':'.join(cache_key_parts)
|
||||
|
||||
# Try to get from cache
|
||||
cache_service = EnhancedCacheService()
|
||||
cached_response = getattr(cache_service, cache_backend + '_cache').get(cache_key)
|
||||
|
||||
if cached_response:
|
||||
logger.debug(f"Cache hit for API view {view_func.__name__}", extra={
|
||||
'cache_key': cache_key,
|
||||
'view': view_func.__name__,
|
||||
'cache_hit': True
|
||||
})
|
||||
return cached_response
|
||||
|
||||
# Execute view and cache result
|
||||
start_time = time.time()
|
||||
response = view_func(self, request, *args, **kwargs)
|
||||
execution_time = time.time() - start_time
|
||||
|
||||
# Only cache successful responses
|
||||
if hasattr(response, 'status_code') and response.status_code == 200:
|
||||
getattr(cache_service, cache_backend + '_cache').set(cache_key, response, timeout)
|
||||
logger.debug(f"Cached API response for view {view_func.__name__}", extra={
|
||||
'cache_key': cache_key,
|
||||
'view': view_func.__name__,
|
||||
'execution_time': execution_time,
|
||||
'cache_timeout': timeout,
|
||||
'cache_miss': True
|
||||
})
|
||||
else:
|
||||
logger.debug(f"Not caching response for view {view_func.__name__} (status: {getattr(response, 'status_code', 'unknown')})")
|
||||
|
||||
return response
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
|
||||
def cache_queryset_result(cache_key_template: str, timeout: int = 3600, cache_backend='default'):
|
||||
"""
|
||||
Decorator for caching expensive queryset operations
|
||||
|
||||
Args:
|
||||
cache_key_template: Template for cache key (can use format placeholders)
|
||||
timeout: Cache timeout in seconds
|
||||
cache_backend: Cache backend to use
|
||||
"""
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
# Generate cache key from template and arguments
|
||||
try:
|
||||
cache_key = cache_key_template.format(*args, **kwargs)
|
||||
except (KeyError, IndexError):
|
||||
# Fallback to simpler key generation
|
||||
cache_key = f"{cache_key_template}:{hash(str(args) + str(kwargs))}"
|
||||
|
||||
cache_service = EnhancedCacheService()
|
||||
cached_result = getattr(cache_service, cache_backend + '_cache').get(cache_key)
|
||||
|
||||
if cached_result is not None:
|
||||
logger.debug(f"Cache hit for queryset operation: {func.__name__}")
|
||||
return cached_result
|
||||
|
||||
# Execute function and cache result
|
||||
start_time = time.time()
|
||||
result = func(*args, **kwargs)
|
||||
execution_time = time.time() - start_time
|
||||
|
||||
getattr(cache_service, cache_backend + '_cache').set(cache_key, result, timeout)
|
||||
logger.debug(f"Cached queryset result for {func.__name__}", extra={
|
||||
'cache_key': cache_key,
|
||||
'function': func.__name__,
|
||||
'execution_time': execution_time,
|
||||
'cache_timeout': timeout
|
||||
})
|
||||
|
||||
return result
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
|
||||
def invalidate_cache_on_save(model_name: str, cache_patterns: List[str] = None):
|
||||
"""
|
||||
Decorator to invalidate cache when model instances are saved
|
||||
|
||||
Args:
|
||||
model_name: Name of the model
|
||||
cache_patterns: List of cache key patterns to invalidate
|
||||
"""
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
def wrapper(self, *args, **kwargs):
|
||||
result = func(self, *args, **kwargs)
|
||||
|
||||
# Invalidate related cache entries
|
||||
cache_service = EnhancedCacheService()
|
||||
|
||||
# Standard model cache invalidation
|
||||
instance_id = getattr(self, 'id', None)
|
||||
cache_service.invalidate_model_cache(model_name, instance_id)
|
||||
|
||||
# Custom pattern invalidation
|
||||
if cache_patterns:
|
||||
for pattern in cache_patterns:
|
||||
if instance_id:
|
||||
pattern = pattern.format(model=model_name, id=instance_id)
|
||||
cache_service.invalidate_pattern(pattern)
|
||||
|
||||
logger.info(f"Invalidated cache for {model_name} after save", extra={
|
||||
'model': model_name,
|
||||
'instance_id': instance_id,
|
||||
'patterns': cache_patterns
|
||||
})
|
||||
|
||||
return result
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
|
||||
class CachedAPIViewMixin:
|
||||
"""Mixin to add caching capabilities to API views"""
|
||||
|
||||
cache_timeout = 1800 # 30 minutes default
|
||||
cache_vary_on = ['version']
|
||||
cache_key_prefix = 'api'
|
||||
cache_backend = 'api'
|
||||
|
||||
@method_decorator(vary_on_headers('User-Agent', 'Accept-Language'))
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
"""Add caching to the dispatch method"""
|
||||
if request.method == 'GET' and getattr(self, 'enable_caching', True):
|
||||
return self._cached_dispatch(request, *args, **kwargs)
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
|
||||
def _cached_dispatch(self, request, *args, **kwargs):
|
||||
"""Handle cached dispatch for GET requests"""
|
||||
cache_key = self._generate_cache_key(request, *args, **kwargs)
|
||||
|
||||
cache_service = EnhancedCacheService()
|
||||
cached_response = getattr(cache_service, self.cache_backend + '_cache').get(cache_key)
|
||||
|
||||
if cached_response:
|
||||
logger.debug(f"Cache hit for view {self.__class__.__name__}")
|
||||
return cached_response
|
||||
|
||||
# Execute view
|
||||
response = super().dispatch(request, *args, **kwargs)
|
||||
|
||||
# Cache successful responses
|
||||
if hasattr(response, 'status_code') and response.status_code == 200:
|
||||
getattr(cache_service, self.cache_backend + '_cache').set(
|
||||
cache_key, response, self.cache_timeout
|
||||
)
|
||||
logger.debug(f"Cached response for view {self.__class__.__name__}")
|
||||
|
||||
return response
|
||||
|
||||
def _generate_cache_key(self, request, *args, **kwargs):
|
||||
"""Generate cache key for the request"""
|
||||
key_parts = [
|
||||
self.cache_key_prefix,
|
||||
self.__class__.__name__,
|
||||
request.method,
|
||||
str(request.user.id) if request.user.is_authenticated else 'anonymous',
|
||||
str(hash(frozenset(request.GET.items()))),
|
||||
]
|
||||
|
||||
if args:
|
||||
key_parts.append(str(hash(args)))
|
||||
if kwargs:
|
||||
key_parts.append(str(hash(frozenset(kwargs.items()))))
|
||||
|
||||
# Add vary_on fields
|
||||
for field in self.cache_vary_on:
|
||||
value = getattr(request, field, '')
|
||||
key_parts.append(str(value))
|
||||
|
||||
return ':'.join(key_parts)
|
||||
|
||||
|
||||
def smart_cache(
|
||||
timeout: int = 3600,
|
||||
key_func: Optional[Callable] = None,
|
||||
invalidate_on: Optional[List[str]] = None,
|
||||
cache_backend: str = 'default'
|
||||
):
|
||||
"""
|
||||
Smart caching decorator that adapts to function arguments
|
||||
|
||||
Args:
|
||||
timeout: Cache timeout in seconds
|
||||
key_func: Custom function to generate cache key
|
||||
invalidate_on: List of signals to invalidate cache on
|
||||
cache_backend: Cache backend to use
|
||||
"""
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
# Generate cache key
|
||||
if key_func:
|
||||
cache_key = key_func(*args, **kwargs)
|
||||
else:
|
||||
# Default key generation
|
||||
key_data = {
|
||||
'func': f"{func.__module__}.{func.__name__}",
|
||||
'args': str(args),
|
||||
'kwargs': json.dumps(kwargs, sort_keys=True, default=str)
|
||||
}
|
||||
key_string = json.dumps(key_data, sort_keys=True)
|
||||
cache_key = f"smart_cache:{hashlib.md5(key_string.encode()).hexdigest()}"
|
||||
|
||||
# Try to get from cache
|
||||
cache_service = EnhancedCacheService()
|
||||
cached_result = getattr(cache_service, cache_backend + '_cache').get(cache_key)
|
||||
|
||||
if cached_result is not None:
|
||||
logger.debug(f"Smart cache hit for {func.__name__}")
|
||||
return cached_result
|
||||
|
||||
# Execute function
|
||||
start_time = time.time()
|
||||
result = func(*args, **kwargs)
|
||||
execution_time = time.time() - start_time
|
||||
|
||||
# Cache result
|
||||
getattr(cache_service, cache_backend + '_cache').set(cache_key, result, timeout)
|
||||
|
||||
logger.debug(f"Smart cached result for {func.__name__}", extra={
|
||||
'cache_key': cache_key,
|
||||
'execution_time': execution_time,
|
||||
'function': func.__name__
|
||||
})
|
||||
|
||||
return result
|
||||
|
||||
# Add cache invalidation if specified
|
||||
if invalidate_on:
|
||||
wrapper._cache_invalidate_on = invalidate_on
|
||||
wrapper._cache_backend = cache_backend
|
||||
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
|
||||
def conditional_cache(condition_func: Callable, **cache_kwargs):
|
||||
"""
|
||||
Cache decorator that only caches when condition is met
|
||||
|
||||
Args:
|
||||
condition_func: Function that returns True if caching should be applied
|
||||
**cache_kwargs: Arguments passed to smart_cache
|
||||
"""
|
||||
def decorator(func):
|
||||
cached_func = smart_cache(**cache_kwargs)(func)
|
||||
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
if condition_func(*args, **kwargs):
|
||||
return cached_func(*args, **kwargs)
|
||||
else:
|
||||
return func(*args, **kwargs)
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
|
||||
# Utility functions for cache key generation
|
||||
def generate_user_cache_key(user, suffix: str = ''):
|
||||
"""Generate cache key based on user"""
|
||||
user_id = user.id if user.is_authenticated else 'anonymous'
|
||||
return f"user:{user_id}:{suffix}" if suffix else f"user:{user_id}"
|
||||
|
||||
|
||||
def generate_model_cache_key(model_instance, suffix: str = ''):
|
||||
"""Generate cache key based on model instance"""
|
||||
model_name = model_instance._meta.model_name
|
||||
instance_id = model_instance.id
|
||||
return f"{model_name}:{instance_id}:{suffix}" if suffix else f"{model_name}:{instance_id}"
|
||||
|
||||
|
||||
def generate_queryset_cache_key(queryset, params: dict = None):
|
||||
"""Generate cache key for queryset with parameters"""
|
||||
model_name = queryset.model._meta.model_name
|
||||
params_str = json.dumps(params or {}, sort_keys=True, default=str)
|
||||
params_hash = hashlib.md5(params_str.encode()).hexdigest()
|
||||
return f"queryset:{model_name}:{params_hash}"
|
||||
213
core/exceptions.py
Normal file
213
core/exceptions.py
Normal file
@@ -0,0 +1,213 @@
|
||||
"""
|
||||
Custom exception classes for ThrillWiki.
|
||||
Provides domain-specific exceptions with proper error codes and messages.
|
||||
"""
|
||||
|
||||
from typing import Optional, Dict, Any
|
||||
|
||||
|
||||
class ThrillWikiException(Exception):
|
||||
"""Base exception for all ThrillWiki-specific errors."""
|
||||
|
||||
default_message = "An error occurred"
|
||||
error_code = "THRILLWIKI_ERROR"
|
||||
status_code = 500
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message: Optional[str] = None,
|
||||
error_code: Optional[str] = None,
|
||||
details: Optional[Dict[str, Any]] = None
|
||||
):
|
||||
self.message = message or self.default_message
|
||||
self.error_code = error_code or self.error_code
|
||||
self.details = details or {}
|
||||
super().__init__(self.message)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert exception to dictionary for API responses."""
|
||||
return {
|
||||
'error_code': self.error_code,
|
||||
'message': self.message,
|
||||
'details': self.details
|
||||
}
|
||||
|
||||
|
||||
class ValidationException(ThrillWikiException):
|
||||
"""Raised when data validation fails."""
|
||||
|
||||
default_message = "Validation failed"
|
||||
error_code = "VALIDATION_ERROR"
|
||||
status_code = 400
|
||||
|
||||
|
||||
class NotFoundError(ThrillWikiException):
|
||||
"""Raised when a requested resource is not found."""
|
||||
|
||||
default_message = "Resource not found"
|
||||
error_code = "NOT_FOUND"
|
||||
status_code = 404
|
||||
|
||||
|
||||
class PermissionDeniedError(ThrillWikiException):
|
||||
"""Raised when user lacks permission for an operation."""
|
||||
|
||||
default_message = "Permission denied"
|
||||
error_code = "PERMISSION_DENIED"
|
||||
status_code = 403
|
||||
|
||||
|
||||
class BusinessLogicError(ThrillWikiException):
|
||||
"""Raised when business logic constraints are violated."""
|
||||
|
||||
default_message = "Business logic violation"
|
||||
error_code = "BUSINESS_LOGIC_ERROR"
|
||||
status_code = 400
|
||||
|
||||
|
||||
class ExternalServiceError(ThrillWikiException):
|
||||
"""Raised when external service calls fail."""
|
||||
|
||||
default_message = "External service error"
|
||||
error_code = "EXTERNAL_SERVICE_ERROR"
|
||||
status_code = 502
|
||||
|
||||
|
||||
# Domain-specific exceptions
|
||||
|
||||
class ParkError(ThrillWikiException):
|
||||
"""Base exception for park-related errors."""
|
||||
error_code = "PARK_ERROR"
|
||||
|
||||
|
||||
class ParkNotFoundError(NotFoundError):
|
||||
"""Raised when a park is not found."""
|
||||
|
||||
default_message = "Park not found"
|
||||
error_code = "PARK_NOT_FOUND"
|
||||
|
||||
def __init__(self, park_slug: Optional[str] = None, **kwargs):
|
||||
if park_slug:
|
||||
kwargs['details'] = {'park_slug': park_slug}
|
||||
kwargs['message'] = f"Park with slug '{park_slug}' not found"
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
class ParkOperationError(BusinessLogicError):
|
||||
"""Raised when park operation constraints are violated."""
|
||||
|
||||
default_message = "Invalid park operation"
|
||||
error_code = "PARK_OPERATION_ERROR"
|
||||
|
||||
|
||||
class RideError(ThrillWikiException):
|
||||
"""Base exception for ride-related errors."""
|
||||
error_code = "RIDE_ERROR"
|
||||
|
||||
|
||||
class RideNotFoundError(NotFoundError):
|
||||
"""Raised when a ride is not found."""
|
||||
|
||||
default_message = "Ride not found"
|
||||
error_code = "RIDE_NOT_FOUND"
|
||||
|
||||
def __init__(self, ride_slug: Optional[str] = None, **kwargs):
|
||||
if ride_slug:
|
||||
kwargs['details'] = {'ride_slug': ride_slug}
|
||||
kwargs['message'] = f"Ride with slug '{ride_slug}' not found"
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
class RideOperationError(BusinessLogicError):
|
||||
"""Raised when ride operation constraints are violated."""
|
||||
|
||||
default_message = "Invalid ride operation"
|
||||
error_code = "RIDE_OPERATION_ERROR"
|
||||
|
||||
|
||||
class LocationError(ThrillWikiException):
|
||||
"""Base exception for location-related errors."""
|
||||
error_code = "LOCATION_ERROR"
|
||||
|
||||
|
||||
class InvalidCoordinatesError(ValidationException):
|
||||
"""Raised when geographic coordinates are invalid."""
|
||||
|
||||
default_message = "Invalid geographic coordinates"
|
||||
error_code = "INVALID_COORDINATES"
|
||||
|
||||
def __init__(self, latitude: Optional[float] = None, longitude: Optional[float] = None, **kwargs):
|
||||
if latitude is not None or longitude is not None:
|
||||
kwargs['details'] = {'latitude': latitude, 'longitude': longitude}
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
class GeolocationError(ExternalServiceError):
|
||||
"""Raised when geolocation services fail."""
|
||||
|
||||
default_message = "Geolocation service unavailable"
|
||||
error_code = "GEOLOCATION_ERROR"
|
||||
|
||||
|
||||
class ReviewError(ThrillWikiException):
|
||||
"""Base exception for review-related errors."""
|
||||
error_code = "REVIEW_ERROR"
|
||||
|
||||
|
||||
class ReviewModerationError(BusinessLogicError):
|
||||
"""Raised when review moderation constraints are violated."""
|
||||
|
||||
default_message = "Review moderation error"
|
||||
error_code = "REVIEW_MODERATION_ERROR"
|
||||
|
||||
|
||||
class DuplicateReviewError(BusinessLogicError):
|
||||
"""Raised when user tries to create duplicate reviews."""
|
||||
|
||||
default_message = "User has already reviewed this item"
|
||||
error_code = "DUPLICATE_REVIEW"
|
||||
|
||||
|
||||
class AccountError(ThrillWikiException):
|
||||
"""Base exception for account-related errors."""
|
||||
error_code = "ACCOUNT_ERROR"
|
||||
|
||||
|
||||
class InsufficientPermissionsError(PermissionDeniedError):
|
||||
"""Raised when user lacks required permissions."""
|
||||
|
||||
default_message = "Insufficient permissions"
|
||||
error_code = "INSUFFICIENT_PERMISSIONS"
|
||||
|
||||
def __init__(self, required_permission: Optional[str] = None, **kwargs):
|
||||
if required_permission:
|
||||
kwargs['details'] = {'required_permission': required_permission}
|
||||
kwargs['message'] = f"Permission '{required_permission}' required"
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
class EmailError(ExternalServiceError):
|
||||
"""Raised when email operations fail."""
|
||||
|
||||
default_message = "Email service error"
|
||||
error_code = "EMAIL_ERROR"
|
||||
|
||||
|
||||
class CacheError(ThrillWikiException):
|
||||
"""Raised when cache operations fail."""
|
||||
|
||||
default_message = "Cache operation failed"
|
||||
error_code = "CACHE_ERROR"
|
||||
status_code = 500
|
||||
|
||||
|
||||
class RoadTripError(ExternalServiceError):
|
||||
"""Raised when road trip planning fails."""
|
||||
|
||||
default_message = "Road trip planning error"
|
||||
error_code = "ROADTRIP_ERROR"
|
||||
|
||||
def __init__(self, service_name: Optional[str] = None, **kwargs):
|
||||
if service_name:
|
||||
kwargs['details'] = {'service': service_name}
|
||||
super().__init__(**kwargs)
|
||||
1
core/forms/__init__.py
Normal file
1
core/forms/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
from .search import LocationSearchForm
|
||||
105
core/forms/search.py
Normal file
105
core/forms/search.py
Normal file
@@ -0,0 +1,105 @@
|
||||
from django import forms
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
class LocationSearchForm(forms.Form):
|
||||
"""
|
||||
A comprehensive search form that includes text search, location-based
|
||||
search, and content type filtering for a unified search experience.
|
||||
"""
|
||||
|
||||
# Text search query
|
||||
q = forms.CharField(
|
||||
required=False,
|
||||
label=_("Search Query"),
|
||||
widget=forms.TextInput(attrs={
|
||||
'placeholder': _("Search parks, rides, companies..."),
|
||||
'class': 'w-full px-3 py-2 border border-gray-300 rounded-md shadow-sm focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 dark:border-gray-600 dark:text-white'
|
||||
})
|
||||
)
|
||||
|
||||
# Location-based search
|
||||
location = forms.CharField(
|
||||
required=False,
|
||||
label=_("Near Location"),
|
||||
widget=forms.TextInput(attrs={
|
||||
'placeholder': _("City, address, or coordinates..."),
|
||||
'id': 'location-input',
|
||||
'class': 'w-full px-3 py-2 border border-gray-300 rounded-md shadow-sm focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 dark:border-gray-600 dark:text-white'
|
||||
})
|
||||
)
|
||||
|
||||
# Hidden fields for coordinates
|
||||
lat = forms.FloatField(required=False, widget=forms.HiddenInput(attrs={'id': 'lat-input'}))
|
||||
lng = forms.FloatField(required=False, widget=forms.HiddenInput(attrs={'id': 'lng-input'}))
|
||||
|
||||
# Search radius
|
||||
radius_km = forms.ChoiceField(
|
||||
required=False,
|
||||
label=_("Search Radius"),
|
||||
choices=[
|
||||
('', _("Any distance")),
|
||||
('5', _("5 km")),
|
||||
('10', _("10 km")),
|
||||
('25', _("25 km")),
|
||||
('50', _("50 km")),
|
||||
('100', _("100 km")),
|
||||
('200', _("200 km")),
|
||||
],
|
||||
widget=forms.Select(attrs={
|
||||
'class': 'w-full px-3 py-2 border border-gray-300 rounded-md shadow-sm focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 dark:border-gray-600 dark:text-white'
|
||||
})
|
||||
)
|
||||
|
||||
# Content type filters
|
||||
search_parks = forms.BooleanField(
|
||||
required=False,
|
||||
initial=True,
|
||||
label=_("Search Parks"),
|
||||
widget=forms.CheckboxInput(attrs={'class': 'rounded border-gray-300 text-blue-600 focus:ring-blue-500 dark:border-gray-600 dark:bg-gray-700'})
|
||||
)
|
||||
search_rides = forms.BooleanField(
|
||||
required=False,
|
||||
label=_("Search Rides"),
|
||||
widget=forms.CheckboxInput(attrs={'class': 'rounded border-gray-300 text-blue-600 focus:ring-blue-500 dark:border-gray-600 dark:bg-gray-700'})
|
||||
)
|
||||
search_companies = forms.BooleanField(
|
||||
required=False,
|
||||
label=_("Search Companies"),
|
||||
widget=forms.CheckboxInput(attrs={'class': 'rounded border-gray-300 text-blue-600 focus:ring-blue-500 dark:border-gray-600 dark:bg-gray-700'})
|
||||
)
|
||||
|
||||
# Geographic filters
|
||||
country = forms.CharField(
|
||||
required=False,
|
||||
widget=forms.TextInput(attrs={
|
||||
'placeholder': _("Country"),
|
||||
'class': 'w-full px-3 py-2 text-sm border border-gray-300 rounded-md shadow-sm focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 dark:border-gray-600 dark:text-white'
|
||||
})
|
||||
)
|
||||
state = forms.CharField(
|
||||
required=False,
|
||||
widget=forms.TextInput(attrs={
|
||||
'placeholder': _("State/Region"),
|
||||
'class': 'w-full px-3 py-2 text-sm border border-gray-300 rounded-md shadow-sm focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 dark:border-gray-600 dark:text-white'
|
||||
})
|
||||
)
|
||||
city = forms.CharField(
|
||||
required=False,
|
||||
widget=forms.TextInput(attrs={
|
||||
'placeholder': _("City"),
|
||||
'class': 'w-full px-3 py-2 text-sm border border-gray-300 rounded-md shadow-sm focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 dark:border-gray-600 dark:text-white'
|
||||
})
|
||||
)
|
||||
|
||||
def clean(self):
|
||||
cleaned_data = super().clean()
|
||||
|
||||
# If lat/lng are provided, ensure location field is populated for display
|
||||
lat = cleaned_data.get('lat')
|
||||
lng = cleaned_data.get('lng')
|
||||
location = cleaned_data.get('location')
|
||||
|
||||
if lat and lng and not location:
|
||||
cleaned_data['location'] = f"{lat}, {lng}"
|
||||
|
||||
return cleaned_data
|
||||
1
core/health_checks/__init__.py
Normal file
1
core/health_checks/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Health checks module
|
||||
275
core/health_checks/custom_checks.py
Normal file
275
core/health_checks/custom_checks.py
Normal file
@@ -0,0 +1,275 @@
|
||||
"""
|
||||
Custom health checks for ThrillWiki application.
|
||||
"""
|
||||
|
||||
import time
|
||||
import logging
|
||||
from django.core.cache import cache
|
||||
from django.db import connection
|
||||
from health_check.backends import BaseHealthCheckBackend
|
||||
from health_check.exceptions import ServiceUnavailable, ServiceReturnedUnexpectedResult
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CacheHealthCheck(BaseHealthCheckBackend):
|
||||
"""Check Redis cache connectivity and performance"""
|
||||
|
||||
critical_service = True
|
||||
|
||||
def check_status(self):
|
||||
try:
|
||||
# Test cache write/read performance
|
||||
test_key = 'health_check_test'
|
||||
test_value = 'test_value_' + str(int(time.time()))
|
||||
|
||||
start_time = time.time()
|
||||
cache.set(test_key, test_value, timeout=30)
|
||||
cached_value = cache.get(test_key)
|
||||
cache_time = time.time() - start_time
|
||||
|
||||
if cached_value != test_value:
|
||||
self.add_error("Cache read/write test failed - values don't match")
|
||||
return
|
||||
|
||||
# Check cache performance
|
||||
if cache_time > 0.1: # Warn if cache operations take more than 100ms
|
||||
self.add_error(f"Cache performance degraded: {cache_time:.3f}s for read/write operation")
|
||||
return
|
||||
|
||||
# Clean up test key
|
||||
cache.delete(test_key)
|
||||
|
||||
# Additional Redis-specific checks if using django-redis
|
||||
try:
|
||||
from django_redis import get_redis_connection
|
||||
redis_client = get_redis_connection("default")
|
||||
info = redis_client.info()
|
||||
|
||||
# Check memory usage
|
||||
used_memory = info.get('used_memory', 0)
|
||||
max_memory = info.get('maxmemory', 0)
|
||||
|
||||
if max_memory > 0:
|
||||
memory_usage_percent = (used_memory / max_memory) * 100
|
||||
if memory_usage_percent > 90:
|
||||
self.add_error(f"Redis memory usage critical: {memory_usage_percent:.1f}%")
|
||||
elif memory_usage_percent > 80:
|
||||
logger.warning(f"Redis memory usage high: {memory_usage_percent:.1f}%")
|
||||
|
||||
except ImportError:
|
||||
# django-redis not available, skip additional checks
|
||||
pass
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not get Redis info: {e}")
|
||||
|
||||
except Exception as e:
|
||||
self.add_error(f"Cache service unavailable: {e}")
|
||||
|
||||
|
||||
class DatabasePerformanceCheck(BaseHealthCheckBackend):
|
||||
"""Check database performance and connectivity"""
|
||||
|
||||
critical_service = False
|
||||
|
||||
def check_status(self):
|
||||
try:
|
||||
start_time = time.time()
|
||||
|
||||
# Test basic connectivity
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("SELECT 1")
|
||||
result = cursor.fetchone()
|
||||
|
||||
if result[0] != 1:
|
||||
self.add_error("Database connectivity test failed")
|
||||
return
|
||||
|
||||
basic_query_time = time.time() - start_time
|
||||
|
||||
# Test a more complex query (if it takes too long, there might be performance issues)
|
||||
start_time = time.time()
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("SELECT COUNT(*) FROM django_content_type")
|
||||
cursor.fetchone()
|
||||
|
||||
complex_query_time = time.time() - start_time
|
||||
|
||||
# Performance thresholds
|
||||
if basic_query_time > 1.0:
|
||||
self.add_error(f"Database responding slowly: basic query took {basic_query_time:.2f}s")
|
||||
elif basic_query_time > 0.5:
|
||||
logger.warning(f"Database performance degraded: basic query took {basic_query_time:.2f}s")
|
||||
|
||||
if complex_query_time > 2.0:
|
||||
self.add_error(f"Database performance critical: complex query took {complex_query_time:.2f}s")
|
||||
elif complex_query_time > 1.0:
|
||||
logger.warning(f"Database performance slow: complex query took {complex_query_time:.2f}s")
|
||||
|
||||
# Check database version and settings if possible
|
||||
try:
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("SELECT version()")
|
||||
version = cursor.fetchone()[0]
|
||||
logger.debug(f"Database version: {version}")
|
||||
except Exception as e:
|
||||
logger.debug(f"Could not get database version: {e}")
|
||||
|
||||
except Exception as e:
|
||||
self.add_error(f"Database performance check failed: {e}")
|
||||
|
||||
|
||||
class ApplicationHealthCheck(BaseHealthCheckBackend):
|
||||
"""Check application-specific health indicators"""
|
||||
|
||||
critical_service = False
|
||||
|
||||
def check_status(self):
|
||||
try:
|
||||
# Check if we can import critical modules
|
||||
critical_modules = [
|
||||
'parks.models',
|
||||
'rides.models',
|
||||
'accounts.models',
|
||||
'core.services',
|
||||
]
|
||||
|
||||
for module_name in critical_modules:
|
||||
try:
|
||||
__import__(module_name)
|
||||
except ImportError as e:
|
||||
self.add_error(f"Critical module import failed: {module_name} - {e}")
|
||||
|
||||
# Check if we can access critical models
|
||||
try:
|
||||
from parks.models import Park
|
||||
from rides.models import Ride
|
||||
from django.contrib.auth import get_user_model
|
||||
|
||||
User = get_user_model()
|
||||
|
||||
# Test that we can query these models (just count, don't load data)
|
||||
park_count = Park.objects.count()
|
||||
ride_count = Ride.objects.count()
|
||||
user_count = User.objects.count()
|
||||
|
||||
logger.debug(f"Model counts - Parks: {park_count}, Rides: {ride_count}, Users: {user_count}")
|
||||
|
||||
except Exception as e:
|
||||
self.add_error(f"Model access check failed: {e}")
|
||||
|
||||
# Check media and static file configuration
|
||||
from django.conf import settings
|
||||
import os
|
||||
|
||||
if not os.path.exists(settings.MEDIA_ROOT):
|
||||
self.add_error(f"Media directory does not exist: {settings.MEDIA_ROOT}")
|
||||
|
||||
if not os.path.exists(settings.STATIC_ROOT) and not settings.DEBUG:
|
||||
self.add_error(f"Static directory does not exist: {settings.STATIC_ROOT}")
|
||||
|
||||
except Exception as e:
|
||||
self.add_error(f"Application health check failed: {e}")
|
||||
|
||||
|
||||
class ExternalServiceHealthCheck(BaseHealthCheckBackend):
|
||||
"""Check external services and dependencies"""
|
||||
|
||||
critical_service = False
|
||||
|
||||
def check_status(self):
|
||||
# Check email service if configured
|
||||
try:
|
||||
from django.core.mail import get_connection
|
||||
from django.conf import settings
|
||||
|
||||
if hasattr(settings, 'EMAIL_BACKEND') and 'console' not in settings.EMAIL_BACKEND:
|
||||
# Only check if not using console backend
|
||||
connection = get_connection()
|
||||
if hasattr(connection, 'open'):
|
||||
try:
|
||||
connection.open()
|
||||
connection.close()
|
||||
except Exception as e:
|
||||
logger.warning(f"Email service check failed: {e}")
|
||||
# Don't fail the health check for email issues in development
|
||||
|
||||
except Exception as e:
|
||||
logger.debug(f"Email service check error: {e}")
|
||||
|
||||
# Check if Sentry is configured and working
|
||||
try:
|
||||
import sentry_sdk
|
||||
|
||||
if sentry_sdk.Hub.current.client:
|
||||
# Sentry is configured
|
||||
try:
|
||||
# Test that we can capture a test message (this won't actually send to Sentry)
|
||||
with sentry_sdk.push_scope() as scope:
|
||||
scope.set_tag("health_check", True)
|
||||
# Don't actually send a message, just verify the SDK is working
|
||||
logger.debug("Sentry SDK is operational")
|
||||
except Exception as e:
|
||||
logger.warning(f"Sentry SDK check failed: {e}")
|
||||
|
||||
except ImportError:
|
||||
logger.debug("Sentry SDK not installed")
|
||||
except Exception as e:
|
||||
logger.debug(f"Sentry check error: {e}")
|
||||
|
||||
# Check Redis connection if configured
|
||||
try:
|
||||
from django.core.cache import caches
|
||||
from django.conf import settings
|
||||
|
||||
cache_config = settings.CACHES.get('default', {})
|
||||
if 'redis' in cache_config.get('BACKEND', '').lower():
|
||||
# Redis is configured, test basic connectivity
|
||||
redis_cache = caches['default']
|
||||
redis_cache.set('health_check_redis', 'test', 10)
|
||||
value = redis_cache.get('health_check_redis')
|
||||
if value != 'test':
|
||||
self.add_error("Redis cache connectivity test failed")
|
||||
else:
|
||||
redis_cache.delete('health_check_redis')
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Redis connectivity check failed: {e}")
|
||||
|
||||
|
||||
class DiskSpaceHealthCheck(BaseHealthCheckBackend):
|
||||
"""Check available disk space"""
|
||||
|
||||
critical_service = False
|
||||
|
||||
def check_status(self):
|
||||
try:
|
||||
import shutil
|
||||
from django.conf import settings
|
||||
|
||||
# Check disk space for media directory
|
||||
media_usage = shutil.disk_usage(settings.MEDIA_ROOT)
|
||||
media_free_percent = (media_usage.free / media_usage.total) * 100
|
||||
|
||||
# Check disk space for logs directory if it exists
|
||||
logs_dir = getattr(settings, 'BASE_DIR', '/tmp') / 'logs'
|
||||
if logs_dir.exists():
|
||||
logs_usage = shutil.disk_usage(logs_dir)
|
||||
logs_free_percent = (logs_usage.free / logs_usage.total) * 100
|
||||
else:
|
||||
logs_free_percent = media_free_percent # Use same as media
|
||||
|
||||
# Alert thresholds
|
||||
if media_free_percent < 10:
|
||||
self.add_error(f"Critical disk space: {media_free_percent:.1f}% free in media directory")
|
||||
elif media_free_percent < 20:
|
||||
logger.warning(f"Low disk space: {media_free_percent:.1f}% free in media directory")
|
||||
|
||||
if logs_free_percent < 10:
|
||||
self.add_error(f"Critical disk space: {logs_free_percent:.1f}% free in logs directory")
|
||||
elif logs_free_percent < 20:
|
||||
logger.warning(f"Low disk space: {logs_free_percent:.1f}% free in logs directory")
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Disk space check failed: {e}")
|
||||
# Don't fail health check for disk space issues in development
|
||||
233
core/logging.py
Normal file
233
core/logging.py
Normal file
@@ -0,0 +1,233 @@
|
||||
"""
|
||||
Centralized logging configuration for ThrillWiki.
|
||||
Provides structured logging with proper formatting and context.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import sys
|
||||
from typing import Dict, Any, Optional
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
|
||||
|
||||
class ThrillWikiFormatter(logging.Formatter):
|
||||
"""Custom formatter for ThrillWiki logs with structured output."""
|
||||
|
||||
def format(self, record):
|
||||
# Add timestamp if not present
|
||||
if not hasattr(record, 'timestamp'):
|
||||
record.timestamp = timezone.now().isoformat()
|
||||
|
||||
# Add request context if available
|
||||
if hasattr(record, 'request'):
|
||||
record.request_id = getattr(record.request, 'id', 'unknown')
|
||||
record.user_id = getattr(record.request.user, 'id', 'anonymous') if hasattr(record.request, 'user') else 'unknown'
|
||||
record.path = getattr(record.request, 'path', 'unknown')
|
||||
record.method = getattr(record.request, 'method', 'unknown')
|
||||
|
||||
# Structure the log message
|
||||
if hasattr(record, 'extra_data'):
|
||||
record.structured_data = record.extra_data
|
||||
|
||||
return super().format(record)
|
||||
|
||||
|
||||
def get_logger(name: str) -> logging.Logger:
|
||||
"""
|
||||
Get a configured logger for ThrillWiki components.
|
||||
|
||||
Args:
|
||||
name: Logger name (usually __name__)
|
||||
|
||||
Returns:
|
||||
Configured logger instance
|
||||
"""
|
||||
logger = logging.getLogger(name)
|
||||
|
||||
# Only configure if not already configured
|
||||
if not logger.handlers:
|
||||
handler = logging.StreamHandler(sys.stdout)
|
||||
formatter = ThrillWikiFormatter(
|
||||
fmt='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
||||
)
|
||||
handler.setFormatter(formatter)
|
||||
logger.addHandler(handler)
|
||||
logger.setLevel(logging.INFO if settings.DEBUG else logging.WARNING)
|
||||
|
||||
return logger
|
||||
|
||||
|
||||
def log_exception(
|
||||
logger: logging.Logger,
|
||||
exception: Exception,
|
||||
*,
|
||||
context: Optional[Dict[str, Any]] = None,
|
||||
request=None,
|
||||
level: int = logging.ERROR
|
||||
) -> None:
|
||||
"""
|
||||
Log an exception with structured context.
|
||||
|
||||
Args:
|
||||
logger: Logger instance
|
||||
exception: Exception to log
|
||||
context: Additional context data
|
||||
request: Django request object
|
||||
level: Log level
|
||||
"""
|
||||
log_data = {
|
||||
'exception_type': exception.__class__.__name__,
|
||||
'exception_message': str(exception),
|
||||
'context': context or {}
|
||||
}
|
||||
|
||||
if request:
|
||||
log_data.update({
|
||||
'request_path': getattr(request, 'path', 'unknown'),
|
||||
'request_method': getattr(request, 'method', 'unknown'),
|
||||
'user_id': getattr(request.user, 'id', 'anonymous') if hasattr(request, 'user') else 'unknown'
|
||||
})
|
||||
|
||||
logger.log(level, f"Exception occurred: {exception}", extra={'extra_data': log_data}, exc_info=True)
|
||||
|
||||
|
||||
def log_business_event(
|
||||
logger: logging.Logger,
|
||||
event_type: str,
|
||||
*,
|
||||
message: str,
|
||||
context: Optional[Dict[str, Any]] = None,
|
||||
request=None,
|
||||
level: int = logging.INFO
|
||||
) -> None:
|
||||
"""
|
||||
Log a business event with structured context.
|
||||
|
||||
Args:
|
||||
logger: Logger instance
|
||||
event_type: Type of business event
|
||||
message: Event message
|
||||
context: Additional context data
|
||||
request: Django request object
|
||||
level: Log level
|
||||
"""
|
||||
log_data = {
|
||||
'event_type': event_type,
|
||||
'context': context or {}
|
||||
}
|
||||
|
||||
if request:
|
||||
log_data.update({
|
||||
'request_path': getattr(request, 'path', 'unknown'),
|
||||
'request_method': getattr(request, 'method', 'unknown'),
|
||||
'user_id': getattr(request.user, 'id', 'anonymous') if hasattr(request, 'user') else 'unknown'
|
||||
})
|
||||
|
||||
logger.log(level, message, extra={'extra_data': log_data})
|
||||
|
||||
|
||||
def log_performance_metric(
|
||||
logger: logging.Logger,
|
||||
operation: str,
|
||||
*,
|
||||
duration_ms: float,
|
||||
context: Optional[Dict[str, Any]] = None,
|
||||
level: int = logging.INFO
|
||||
) -> None:
|
||||
"""
|
||||
Log a performance metric.
|
||||
|
||||
Args:
|
||||
logger: Logger instance
|
||||
operation: Operation name
|
||||
duration_ms: Duration in milliseconds
|
||||
context: Additional context data
|
||||
level: Log level
|
||||
"""
|
||||
log_data = {
|
||||
'metric_type': 'performance',
|
||||
'operation': operation,
|
||||
'duration_ms': duration_ms,
|
||||
'context': context or {}
|
||||
}
|
||||
|
||||
message = f"Performance: {operation} took {duration_ms:.2f}ms"
|
||||
logger.log(level, message, extra={'extra_data': log_data})
|
||||
|
||||
|
||||
def log_api_request(
|
||||
logger: logging.Logger,
|
||||
request,
|
||||
*,
|
||||
response_status: Optional[int] = None,
|
||||
duration_ms: Optional[float] = None,
|
||||
level: int = logging.INFO
|
||||
) -> None:
|
||||
"""
|
||||
Log an API request with context.
|
||||
|
||||
Args:
|
||||
logger: Logger instance
|
||||
request: Django request object
|
||||
response_status: HTTP response status code
|
||||
duration_ms: Request duration in milliseconds
|
||||
level: Log level
|
||||
"""
|
||||
log_data = {
|
||||
'request_type': 'api',
|
||||
'path': getattr(request, 'path', 'unknown'),
|
||||
'method': getattr(request, 'method', 'unknown'),
|
||||
'user_id': getattr(request.user, 'id', 'anonymous') if hasattr(request, 'user') else 'unknown',
|
||||
'response_status': response_status,
|
||||
'duration_ms': duration_ms
|
||||
}
|
||||
|
||||
message = f"API Request: {request.method} {request.path}"
|
||||
if response_status:
|
||||
message += f" -> {response_status}"
|
||||
if duration_ms:
|
||||
message += f" ({duration_ms:.2f}ms)"
|
||||
|
||||
logger.log(level, message, extra={'extra_data': log_data})
|
||||
|
||||
|
||||
def log_security_event(
|
||||
logger: logging.Logger,
|
||||
event_type: str,
|
||||
*,
|
||||
message: str,
|
||||
severity: str = 'medium',
|
||||
context: Optional[Dict[str, Any]] = None,
|
||||
request=None
|
||||
) -> None:
|
||||
"""
|
||||
Log a security-related event.
|
||||
|
||||
Args:
|
||||
logger: Logger instance
|
||||
event_type: Type of security event
|
||||
message: Event message
|
||||
severity: Event severity (low, medium, high, critical)
|
||||
context: Additional context data
|
||||
request: Django request object
|
||||
"""
|
||||
log_data = {
|
||||
'security_event': True,
|
||||
'event_type': event_type,
|
||||
'severity': severity,
|
||||
'context': context or {}
|
||||
}
|
||||
|
||||
if request:
|
||||
log_data.update({
|
||||
'request_path': getattr(request, 'path', 'unknown'),
|
||||
'request_method': getattr(request, 'method', 'unknown'),
|
||||
'user_id': getattr(request.user, 'id', 'anonymous') if hasattr(request, 'user') else 'unknown',
|
||||
'remote_addr': request.META.get('REMOTE_ADDR', 'unknown'),
|
||||
'user_agent': request.META.get('HTTP_USER_AGENT', 'unknown')
|
||||
})
|
||||
|
||||
# Use WARNING for medium/high, ERROR for critical
|
||||
level = logging.ERROR if severity in ['high', 'critical'] else logging.WARNING
|
||||
|
||||
logger.log(level, f"SECURITY: {message}", extra={'extra_data': log_data})
|
||||
@@ -2,7 +2,7 @@ from django.core.management.base import BaseCommand
|
||||
from django.core.cache import cache
|
||||
from parks.models import Park
|
||||
from rides.models import Ride
|
||||
from analytics.models import PageView
|
||||
from core.analytics import PageView
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Updates trending parks and rides cache based on views in the last 24 hours'
|
||||
263
core/managers.py
Normal file
263
core/managers.py
Normal file
@@ -0,0 +1,263 @@
|
||||
"""
|
||||
Custom managers and QuerySets for optimized database patterns.
|
||||
Following Django styleguide best practices for database access.
|
||||
"""
|
||||
|
||||
from typing import Optional, List, Dict, Any, Union
|
||||
from django.db import models
|
||||
from django.db.models import Q, F, Count, Avg, Max, Min, Sum, Prefetch
|
||||
from django.contrib.gis.geos import Point
|
||||
from django.contrib.gis.measure import Distance
|
||||
from django.utils import timezone
|
||||
from datetime import timedelta
|
||||
|
||||
|
||||
class BaseQuerySet(models.QuerySet):
|
||||
"""Base QuerySet with common optimizations and patterns."""
|
||||
|
||||
def active(self):
|
||||
"""Filter for active/enabled records."""
|
||||
if hasattr(self.model, 'is_active'):
|
||||
return self.filter(is_active=True)
|
||||
return self
|
||||
|
||||
def published(self):
|
||||
"""Filter for published records."""
|
||||
if hasattr(self.model, 'is_published'):
|
||||
return self.filter(is_published=True)
|
||||
return self
|
||||
|
||||
def recent(self, *, days: int = 30):
|
||||
"""Filter for recently created records."""
|
||||
cutoff_date = timezone.now() - timedelta(days=days)
|
||||
return self.filter(created_at__gte=cutoff_date)
|
||||
|
||||
def search(self, *, query: str, fields: Optional[List[str]] = None):
|
||||
"""
|
||||
Full-text search across specified fields.
|
||||
|
||||
Args:
|
||||
query: Search query string
|
||||
fields: List of field names to search (defaults to name, description)
|
||||
"""
|
||||
if not query:
|
||||
return self
|
||||
|
||||
if fields is None:
|
||||
fields = ['name', 'description'] if hasattr(self.model, 'name') else []
|
||||
|
||||
q_objects = Q()
|
||||
for field in fields:
|
||||
if hasattr(self.model, field):
|
||||
q_objects |= Q(**{f"{field}__icontains": query})
|
||||
|
||||
return self.filter(q_objects) if q_objects else self
|
||||
|
||||
def with_stats(self):
|
||||
"""Add basic statistics annotations."""
|
||||
return self
|
||||
|
||||
def optimized_for_list(self):
|
||||
"""Optimize queryset for list display."""
|
||||
return self.select_related().prefetch_related()
|
||||
|
||||
def optimized_for_detail(self):
|
||||
"""Optimize queryset for detail display."""
|
||||
return self.select_related().prefetch_related()
|
||||
|
||||
|
||||
class BaseManager(models.Manager):
|
||||
"""Base manager with common patterns."""
|
||||
|
||||
def get_queryset(self):
|
||||
return BaseQuerySet(self.model, using=self._db)
|
||||
|
||||
def active(self):
|
||||
return self.get_queryset().active()
|
||||
|
||||
def published(self):
|
||||
return self.get_queryset().published()
|
||||
|
||||
def recent(self, *, days: int = 30):
|
||||
return self.get_queryset().recent(days=days)
|
||||
|
||||
def search(self, *, query: str, fields: Optional[List[str]] = None):
|
||||
return self.get_queryset().search(query=query, fields=fields)
|
||||
|
||||
|
||||
class LocationQuerySet(BaseQuerySet):
|
||||
"""QuerySet for location-based models with geographic functionality."""
|
||||
|
||||
def near_point(self, *, point: Point, distance_km: float = 50):
|
||||
"""Filter locations near a geographic point."""
|
||||
if hasattr(self.model, 'point'):
|
||||
return self.filter(
|
||||
point__distance_lte=(point, Distance(km=distance_km))
|
||||
).distance(point).order_by('distance')
|
||||
return self
|
||||
|
||||
def within_bounds(self, *, north: float, south: float, east: float, west: float):
|
||||
"""Filter locations within geographic bounds."""
|
||||
if hasattr(self.model, 'point'):
|
||||
return self.filter(
|
||||
point__latitude__gte=south,
|
||||
point__latitude__lte=north,
|
||||
point__longitude__gte=west,
|
||||
point__longitude__lte=east
|
||||
)
|
||||
return self
|
||||
|
||||
def by_country(self, *, country: str):
|
||||
"""Filter by country."""
|
||||
if hasattr(self.model, 'country'):
|
||||
return self.filter(country__iexact=country)
|
||||
return self
|
||||
|
||||
def by_region(self, *, state: str):
|
||||
"""Filter by state/region."""
|
||||
if hasattr(self.model, 'state'):
|
||||
return self.filter(state__iexact=state)
|
||||
return self
|
||||
|
||||
def by_city(self, *, city: str):
|
||||
"""Filter by city."""
|
||||
if hasattr(self.model, 'city'):
|
||||
return self.filter(city__iexact=city)
|
||||
return self
|
||||
|
||||
|
||||
class LocationManager(BaseManager):
|
||||
"""Manager for location-based models."""
|
||||
|
||||
def get_queryset(self):
|
||||
return LocationQuerySet(self.model, using=self._db)
|
||||
|
||||
def near_point(self, *, point: Point, distance_km: float = 50):
|
||||
return self.get_queryset().near_point(point=point, distance_km=distance_km)
|
||||
|
||||
def within_bounds(self, *, north: float, south: float, east: float, west: float):
|
||||
return self.get_queryset().within_bounds(north=north, south=south, east=east, west=west)
|
||||
|
||||
|
||||
class ReviewableQuerySet(BaseQuerySet):
|
||||
"""QuerySet for models that can be reviewed."""
|
||||
|
||||
def with_review_stats(self):
|
||||
"""Add review statistics annotations."""
|
||||
return self.annotate(
|
||||
review_count=Count('reviews', filter=Q(reviews__is_published=True)),
|
||||
average_rating=Avg('reviews__rating', filter=Q(reviews__is_published=True)),
|
||||
latest_review_date=Max('reviews__created_at', filter=Q(reviews__is_published=True))
|
||||
)
|
||||
|
||||
def highly_rated(self, *, min_rating: float = 8.0):
|
||||
"""Filter for highly rated items."""
|
||||
return self.with_review_stats().filter(average_rating__gte=min_rating)
|
||||
|
||||
def recently_reviewed(self, *, days: int = 30):
|
||||
"""Filter for items with recent reviews."""
|
||||
cutoff_date = timezone.now() - timedelta(days=days)
|
||||
return self.filter(reviews__created_at__gte=cutoff_date, reviews__is_published=True).distinct()
|
||||
|
||||
|
||||
class ReviewableManager(BaseManager):
|
||||
"""Manager for reviewable models."""
|
||||
|
||||
def get_queryset(self):
|
||||
return ReviewableQuerySet(self.model, using=self._db)
|
||||
|
||||
def with_review_stats(self):
|
||||
return self.get_queryset().with_review_stats()
|
||||
|
||||
def highly_rated(self, *, min_rating: float = 8.0):
|
||||
return self.get_queryset().highly_rated(min_rating=min_rating)
|
||||
|
||||
|
||||
class HierarchicalQuerySet(BaseQuerySet):
|
||||
"""QuerySet for hierarchical models (with parent/child relationships)."""
|
||||
|
||||
def root_level(self):
|
||||
"""Filter for root-level items (no parent)."""
|
||||
if hasattr(self.model, 'parent'):
|
||||
return self.filter(parent__isnull=True)
|
||||
return self
|
||||
|
||||
def children_of(self, *, parent_id: int):
|
||||
"""Get children of a specific parent."""
|
||||
if hasattr(self.model, 'parent'):
|
||||
return self.filter(parent_id=parent_id)
|
||||
return self
|
||||
|
||||
def with_children_count(self):
|
||||
"""Add count of children."""
|
||||
if hasattr(self.model, 'children'):
|
||||
return self.annotate(children_count=Count('children'))
|
||||
return self
|
||||
|
||||
|
||||
class HierarchicalManager(BaseManager):
|
||||
"""Manager for hierarchical models."""
|
||||
|
||||
def get_queryset(self):
|
||||
return HierarchicalQuerySet(self.model, using=self._db)
|
||||
|
||||
def root_level(self):
|
||||
return self.get_queryset().root_level()
|
||||
|
||||
|
||||
class TimestampedQuerySet(BaseQuerySet):
|
||||
"""QuerySet for models with created_at/updated_at timestamps."""
|
||||
|
||||
def created_between(self, *, start_date, end_date):
|
||||
"""Filter by creation date range."""
|
||||
return self.filter(created_at__date__range=[start_date, end_date])
|
||||
|
||||
def updated_since(self, *, since_date):
|
||||
"""Filter for records updated since a date."""
|
||||
return self.filter(updated_at__gte=since_date)
|
||||
|
||||
def by_creation_date(self, *, descending: bool = True):
|
||||
"""Order by creation date."""
|
||||
order = '-created_at' if descending else 'created_at'
|
||||
return self.order_by(order)
|
||||
|
||||
|
||||
class TimestampedManager(BaseManager):
|
||||
"""Manager for timestamped models."""
|
||||
|
||||
def get_queryset(self):
|
||||
return TimestampedQuerySet(self.model, using=self._db)
|
||||
|
||||
def created_between(self, *, start_date, end_date):
|
||||
return self.get_queryset().created_between(start_date=start_date, end_date=end_date)
|
||||
|
||||
|
||||
class StatusQuerySet(BaseQuerySet):
|
||||
"""QuerySet for models with status fields."""
|
||||
|
||||
def with_status(self, *, status: Union[str, List[str]]):
|
||||
"""Filter by status."""
|
||||
if isinstance(status, list):
|
||||
return self.filter(status__in=status)
|
||||
return self.filter(status=status)
|
||||
|
||||
def operating(self):
|
||||
"""Filter for operating/active status."""
|
||||
return self.filter(status='OPERATING')
|
||||
|
||||
def closed(self):
|
||||
"""Filter for closed status."""
|
||||
return self.filter(status__in=['CLOSED_TEMP', 'CLOSED_PERM'])
|
||||
|
||||
|
||||
class StatusManager(BaseManager):
|
||||
"""Manager for status-based models."""
|
||||
|
||||
def get_queryset(self):
|
||||
return StatusQuerySet(self.model, using=self._db)
|
||||
|
||||
def operating(self):
|
||||
return self.get_queryset().operating()
|
||||
|
||||
def closed(self):
|
||||
return self.get_queryset().closed()
|
||||
@@ -1,27 +0,0 @@
|
||||
import pghistory
|
||||
from django.contrib.auth.models import AnonymousUser
|
||||
from django.core.handlers.wsgi import WSGIRequest
|
||||
|
||||
class RequestContextProvider(pghistory.context):
|
||||
"""Custom context provider for pghistory that extracts information from the request."""
|
||||
def __call__(self, request: WSGIRequest) -> dict:
|
||||
return {
|
||||
'user': str(request.user) if request.user and not isinstance(request.user, AnonymousUser) else None,
|
||||
'ip': request.META.get('REMOTE_ADDR'),
|
||||
'user_agent': request.META.get('HTTP_USER_AGENT'),
|
||||
'session_key': request.session.session_key if hasattr(request, 'session') else None
|
||||
}
|
||||
|
||||
# Initialize the context provider
|
||||
request_context = RequestContextProvider()
|
||||
|
||||
class PgHistoryContextMiddleware:
|
||||
"""
|
||||
Middleware that ensures request object is available to pghistory context.
|
||||
"""
|
||||
def __init__(self, get_response):
|
||||
self.get_response = get_response
|
||||
|
||||
def __call__(self, request):
|
||||
response = self.get_response(request)
|
||||
return response
|
||||
22
core/middleware/__init__.py
Normal file
22
core/middleware/__init__.py
Normal file
@@ -0,0 +1,22 @@
|
||||
# Core middleware modules
|
||||
|
||||
# Import middleware classes from the analytics module
|
||||
from .analytics import PageViewMiddleware, PgHistoryContextMiddleware
|
||||
|
||||
# Import middleware classes from the performance_middleware.py module
|
||||
from .performance_middleware import (
|
||||
PerformanceMiddleware,
|
||||
QueryCountMiddleware,
|
||||
DatabaseConnectionMiddleware,
|
||||
CachePerformanceMiddleware
|
||||
)
|
||||
|
||||
# Make all middleware classes available at the package level
|
||||
__all__ = [
|
||||
'PageViewMiddleware',
|
||||
'PgHistoryContextMiddleware',
|
||||
'PerformanceMiddleware',
|
||||
'QueryCountMiddleware',
|
||||
'DatabaseConnectionMiddleware',
|
||||
'CachePerformanceMiddleware'
|
||||
]
|
||||
@@ -1,9 +1,46 @@
|
||||
"""
|
||||
Analytics and tracking middleware for Django application.
|
||||
"""
|
||||
|
||||
import pghistory
|
||||
from django.contrib.auth.models import AnonymousUser
|
||||
from django.core.handlers.wsgi import WSGIRequest
|
||||
from django.utils.deprecation import MiddlewareMixin
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.views.generic.detail import DetailView
|
||||
from .models import PageView
|
||||
from core.analytics import PageView
|
||||
|
||||
|
||||
class RequestContextProvider(pghistory.context):
|
||||
"""Custom context provider for pghistory that extracts information from the request."""
|
||||
def __call__(self, request: WSGIRequest) -> dict:
|
||||
return {
|
||||
'user': str(request.user) if request.user and not isinstance(request.user, AnonymousUser) else None,
|
||||
'ip': request.META.get('REMOTE_ADDR'),
|
||||
'user_agent': request.META.get('HTTP_USER_AGENT'),
|
||||
'session_key': request.session.session_key if hasattr(request, 'session') else None
|
||||
}
|
||||
|
||||
|
||||
# Initialize the context provider
|
||||
request_context = RequestContextProvider()
|
||||
|
||||
|
||||
class PgHistoryContextMiddleware:
|
||||
"""
|
||||
Middleware that ensures request object is available to pghistory context.
|
||||
"""
|
||||
def __init__(self, get_response):
|
||||
self.get_response = get_response
|
||||
|
||||
def __call__(self, request):
|
||||
response = self.get_response(request)
|
||||
return response
|
||||
|
||||
|
||||
class PageViewMiddleware(MiddlewareMixin):
|
||||
"""Middleware to track page views for DetailView-based pages."""
|
||||
|
||||
def process_view(self, request, view_func, view_args, view_kwargs):
|
||||
# Only track GET requests
|
||||
if request.method != 'GET':
|
||||
268
core/middleware/performance_middleware.py
Normal file
268
core/middleware/performance_middleware.py
Normal file
@@ -0,0 +1,268 @@
|
||||
"""
|
||||
Performance monitoring middleware for tracking request metrics.
|
||||
"""
|
||||
|
||||
import time
|
||||
import logging
|
||||
from django.db import connection
|
||||
from django.utils.deprecation import MiddlewareMixin
|
||||
from django.conf import settings
|
||||
|
||||
performance_logger = logging.getLogger('performance')
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PerformanceMiddleware(MiddlewareMixin):
|
||||
"""Middleware to collect performance metrics for each request"""
|
||||
|
||||
def process_request(self, request):
|
||||
"""Initialize performance tracking for the request"""
|
||||
request._performance_start_time = time.time()
|
||||
request._performance_initial_queries = len(connection.queries) if hasattr(connection, 'queries') else 0
|
||||
return None
|
||||
|
||||
def process_response(self, request, response):
|
||||
"""Log performance metrics after response is ready"""
|
||||
# Skip performance tracking for certain paths
|
||||
skip_paths = ['/health/', '/admin/jsi18n/', '/static/', '/media/', '/__debug__/']
|
||||
if any(request.path.startswith(path) for path in skip_paths):
|
||||
return response
|
||||
|
||||
# Calculate metrics
|
||||
end_time = time.time()
|
||||
start_time = getattr(request, '_performance_start_time', end_time)
|
||||
duration = end_time - start_time
|
||||
|
||||
initial_queries = getattr(request, '_performance_initial_queries', 0)
|
||||
total_queries = len(connection.queries) - initial_queries if hasattr(connection, 'queries') else 0
|
||||
|
||||
# Get content length
|
||||
content_length = 0
|
||||
if hasattr(response, 'content'):
|
||||
content_length = len(response.content)
|
||||
elif hasattr(response, 'streaming_content'):
|
||||
# For streaming responses, we can't easily measure content length
|
||||
content_length = -1
|
||||
|
||||
# Build performance data
|
||||
performance_data = {
|
||||
'path': request.path,
|
||||
'method': request.method,
|
||||
'status_code': response.status_code,
|
||||
'duration_ms': round(duration * 1000, 2),
|
||||
'duration_seconds': round(duration, 3),
|
||||
'query_count': total_queries,
|
||||
'content_length_bytes': content_length,
|
||||
'user_id': getattr(request.user, 'id', None) if hasattr(request, 'user') and request.user.is_authenticated else None,
|
||||
'user_agent': request.META.get('HTTP_USER_AGENT', '')[:100], # Truncate user agent
|
||||
'remote_addr': self._get_client_ip(request),
|
||||
}
|
||||
|
||||
# Add query details in debug mode
|
||||
if settings.DEBUG and hasattr(connection, 'queries') and total_queries > 0:
|
||||
recent_queries = connection.queries[-total_queries:]
|
||||
performance_data['queries'] = [
|
||||
{
|
||||
'sql': query['sql'][:200] + '...' if len(query['sql']) > 200 else query['sql'],
|
||||
'time': float(query['time'])
|
||||
}
|
||||
for query in recent_queries[-10:] # Last 10 queries only
|
||||
]
|
||||
|
||||
# Identify slow queries
|
||||
slow_queries = [q for q in recent_queries if float(q['time']) > 0.1]
|
||||
if slow_queries:
|
||||
performance_data['slow_query_count'] = len(slow_queries)
|
||||
performance_data['slowest_query_time'] = max(float(q['time']) for q in slow_queries)
|
||||
|
||||
# Determine log level based on performance
|
||||
log_level = self._get_log_level(duration, total_queries, response.status_code)
|
||||
|
||||
# Log the performance data
|
||||
performance_logger.log(
|
||||
log_level,
|
||||
f"Request performance: {request.method} {request.path} - "
|
||||
f"{duration:.3f}s, {total_queries} queries, {response.status_code}",
|
||||
extra=performance_data
|
||||
)
|
||||
|
||||
# Add performance headers for debugging (only in debug mode)
|
||||
if settings.DEBUG:
|
||||
response['X-Response-Time'] = f"{duration * 1000:.2f}ms"
|
||||
response['X-Query-Count'] = str(total_queries)
|
||||
if total_queries > 0 and hasattr(connection, 'queries'):
|
||||
total_query_time = sum(float(q['time']) for q in connection.queries[-total_queries:])
|
||||
response['X-Query-Time'] = f"{total_query_time * 1000:.2f}ms"
|
||||
|
||||
return response
|
||||
|
||||
def process_exception(self, request, exception):
|
||||
"""Log performance data even when an exception occurs"""
|
||||
end_time = time.time()
|
||||
start_time = getattr(request, '_performance_start_time', end_time)
|
||||
duration = end_time - start_time
|
||||
|
||||
initial_queries = getattr(request, '_performance_initial_queries', 0)
|
||||
total_queries = len(connection.queries) - initial_queries if hasattr(connection, 'queries') else 0
|
||||
|
||||
performance_data = {
|
||||
'path': request.path,
|
||||
'method': request.method,
|
||||
'status_code': 500, # Exception occurred
|
||||
'duration_ms': round(duration * 1000, 2),
|
||||
'query_count': total_queries,
|
||||
'exception': str(exception),
|
||||
'exception_type': type(exception).__name__,
|
||||
'user_id': getattr(request.user, 'id', None) if hasattr(request, 'user') and request.user.is_authenticated else None,
|
||||
}
|
||||
|
||||
performance_logger.error(
|
||||
f"Request exception: {request.method} {request.path} - "
|
||||
f"{duration:.3f}s, {total_queries} queries, {type(exception).__name__}: {exception}",
|
||||
extra=performance_data
|
||||
)
|
||||
|
||||
return None # Don't handle the exception, just log it
|
||||
|
||||
def _get_client_ip(self, request):
|
||||
"""Extract client IP address from request"""
|
||||
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
|
||||
if x_forwarded_for:
|
||||
ip = x_forwarded_for.split(',')[0].strip()
|
||||
else:
|
||||
ip = request.META.get('REMOTE_ADDR', '')
|
||||
return ip
|
||||
|
||||
def _get_log_level(self, duration, query_count, status_code):
|
||||
"""Determine appropriate log level based on performance metrics"""
|
||||
# Error responses
|
||||
if status_code >= 500:
|
||||
return logging.ERROR
|
||||
elif status_code >= 400:
|
||||
return logging.WARNING
|
||||
|
||||
# Performance-based log levels
|
||||
if duration > 5.0: # Very slow requests
|
||||
return logging.ERROR
|
||||
elif duration > 2.0 or query_count > 20: # Slow requests or high query count
|
||||
return logging.WARNING
|
||||
elif duration > 1.0 or query_count > 10: # Moderately slow
|
||||
return logging.INFO
|
||||
else:
|
||||
return logging.DEBUG
|
||||
|
||||
|
||||
class QueryCountMiddleware(MiddlewareMixin):
|
||||
"""Middleware to track and limit query counts per request"""
|
||||
|
||||
def __init__(self, get_response):
|
||||
self.get_response = get_response
|
||||
self.query_limit = getattr(settings, 'MAX_QUERIES_PER_REQUEST', 50)
|
||||
super().__init__(get_response)
|
||||
|
||||
def process_request(self, request):
|
||||
"""Initialize query tracking"""
|
||||
request._query_count_start = len(connection.queries) if hasattr(connection, 'queries') else 0
|
||||
return None
|
||||
|
||||
def process_response(self, request, response):
|
||||
"""Check query count and warn if excessive"""
|
||||
if not hasattr(connection, 'queries'):
|
||||
return response
|
||||
|
||||
start_count = getattr(request, '_query_count_start', 0)
|
||||
current_count = len(connection.queries)
|
||||
request_query_count = current_count - start_count
|
||||
|
||||
if request_query_count > self.query_limit:
|
||||
logger.warning(
|
||||
f"Excessive query count: {request.path} executed {request_query_count} queries "
|
||||
f"(limit: {self.query_limit})",
|
||||
extra={
|
||||
'path': request.path,
|
||||
'method': request.method,
|
||||
'query_count': request_query_count,
|
||||
'query_limit': self.query_limit,
|
||||
'excessive_queries': True
|
||||
}
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
class DatabaseConnectionMiddleware(MiddlewareMixin):
|
||||
"""Middleware to monitor database connection health"""
|
||||
|
||||
def process_request(self, request):
|
||||
"""Check database connection at start of request"""
|
||||
try:
|
||||
# Simple connection test
|
||||
from django.db import connection
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("SELECT 1")
|
||||
cursor.fetchone()
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Database connection failed at request start: {e}",
|
||||
extra={
|
||||
'path': request.path,
|
||||
'method': request.method,
|
||||
'database_error': str(e)
|
||||
}
|
||||
)
|
||||
# Don't block the request, let Django handle the database error
|
||||
|
||||
return None
|
||||
|
||||
def process_response(self, request, response):
|
||||
"""Close database connections properly"""
|
||||
try:
|
||||
from django.db import connection
|
||||
connection.close()
|
||||
except Exception as e:
|
||||
logger.warning(f"Error closing database connection: {e}")
|
||||
|
||||
return response
|
||||
|
||||
|
||||
class CachePerformanceMiddleware(MiddlewareMixin):
|
||||
"""Middleware to monitor cache performance"""
|
||||
|
||||
def process_request(self, request):
|
||||
"""Initialize cache performance tracking"""
|
||||
request._cache_hits = 0
|
||||
request._cache_misses = 0
|
||||
request._cache_start_time = time.time()
|
||||
return None
|
||||
|
||||
def process_response(self, request, response):
|
||||
"""Log cache performance metrics"""
|
||||
cache_duration = time.time() - getattr(request, '_cache_start_time', time.time())
|
||||
cache_hits = getattr(request, '_cache_hits', 0)
|
||||
cache_misses = getattr(request, '_cache_misses', 0)
|
||||
|
||||
if cache_hits + cache_misses > 0:
|
||||
hit_rate = (cache_hits / (cache_hits + cache_misses)) * 100
|
||||
|
||||
cache_data = {
|
||||
'path': request.path,
|
||||
'cache_hits': cache_hits,
|
||||
'cache_misses': cache_misses,
|
||||
'cache_hit_rate': round(hit_rate, 2),
|
||||
'cache_operations': cache_hits + cache_misses,
|
||||
'cache_duration': round(cache_duration * 1000, 2) # milliseconds
|
||||
}
|
||||
|
||||
# Log cache performance
|
||||
if hit_rate < 50 and cache_hits + cache_misses > 5:
|
||||
logger.warning(
|
||||
f"Low cache hit rate for {request.path}: {hit_rate:.1f}%",
|
||||
extra=cache_data
|
||||
)
|
||||
else:
|
||||
logger.debug(
|
||||
f"Cache performance for {request.path}: {hit_rate:.1f}% hit rate",
|
||||
extra=cache_data
|
||||
)
|
||||
|
||||
return response
|
||||
@@ -1,4 +1,4 @@
|
||||
# Generated by Django 5.1.4 on 2025-02-10 01:10
|
||||
# Generated by Django 5.1.4 on 2025-08-13 21:35
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
98
core/migrations/0002_historicalslug_pageview.py
Normal file
98
core/migrations/0002_historicalslug_pageview.py
Normal file
@@ -0,0 +1,98 @@
|
||||
# Generated by Django 5.1.4 on 2025-08-14 14:50
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("contenttypes", "0002_remove_content_type_name"),
|
||||
("core", "0001_initial"),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="HistoricalSlug",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("object_id", models.PositiveIntegerField()),
|
||||
("slug", models.SlugField(max_length=255)),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
(
|
||||
"content_type",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="contenttypes.contenttype",
|
||||
),
|
||||
),
|
||||
(
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="historical_slugs",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["content_type", "object_id"],
|
||||
name="core_histor_content_b4c470_idx",
|
||||
),
|
||||
models.Index(fields=["slug"], name="core_histor_slug_8fd7b3_idx"),
|
||||
],
|
||||
"unique_together": {("content_type", "slug")},
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="PageView",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("object_id", models.PositiveIntegerField()),
|
||||
("timestamp", models.DateTimeField(auto_now_add=True, db_index=True)),
|
||||
("ip_address", models.GenericIPAddressField()),
|
||||
("user_agent", models.CharField(blank=True, max_length=512)),
|
||||
(
|
||||
"content_type",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="page_views",
|
||||
to="contenttypes.contenttype",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["timestamp"], name="core_pagevi_timesta_757ebb_idx"
|
||||
),
|
||||
models.Index(
|
||||
fields=["content_type", "object_id"],
|
||||
name="core_pagevi_content_eda7ad_idx",
|
||||
),
|
||||
],
|
||||
},
|
||||
),
|
||||
]
|
||||
17
core/mixins/__init__.py
Normal file
17
core/mixins/__init__.py
Normal file
@@ -0,0 +1,17 @@
|
||||
from django.views.generic.list import MultipleObjectMixin
|
||||
|
||||
class HTMXFilterableMixin(MultipleObjectMixin):
|
||||
"""
|
||||
A mixin that provides filtering capabilities for HTMX requests.
|
||||
"""
|
||||
filter_class = None
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = super().get_queryset()
|
||||
self.filterset = self.filter_class(self.request.GET, queryset=queryset)
|
||||
return self.filterset.qs
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super().get_context_data(**kwargs)
|
||||
context['filter'] = self.filterset
|
||||
return context
|
||||
@@ -2,7 +2,7 @@ from django.db import models
|
||||
from django.contrib.contenttypes.fields import GenericForeignKey
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.utils.text import slugify
|
||||
from history_tracking.models import TrackedModel
|
||||
from core.history import TrackedModel
|
||||
|
||||
class SlugHistory(models.Model):
|
||||
"""
|
||||
|
||||
299
core/selectors.py
Normal file
299
core/selectors.py
Normal file
@@ -0,0 +1,299 @@
|
||||
"""
|
||||
Selectors for core functionality including map services and analytics.
|
||||
Following Django styleguide pattern for separating data access from business logic.
|
||||
"""
|
||||
|
||||
from typing import Optional, Dict, Any, List, Union
|
||||
from django.db.models import QuerySet, Q, F, Count, Avg
|
||||
from django.contrib.gis.geos import Point, Polygon
|
||||
from django.contrib.gis.measure import Distance
|
||||
from django.utils import timezone
|
||||
from datetime import timedelta
|
||||
|
||||
from .analytics import PageView
|
||||
from parks.models import Park
|
||||
from rides.models import Ride
|
||||
|
||||
|
||||
def unified_locations_for_map(
|
||||
*,
|
||||
bounds: Optional[Polygon] = None,
|
||||
location_types: Optional[List[str]] = None,
|
||||
filters: Optional[Dict[str, Any]] = None
|
||||
) -> Dict[str, QuerySet]:
|
||||
"""
|
||||
Get unified location data for map display across all location types.
|
||||
|
||||
Args:
|
||||
bounds: Geographic boundary polygon
|
||||
location_types: List of location types to include ('park', 'ride')
|
||||
filters: Additional filter parameters
|
||||
|
||||
Returns:
|
||||
Dictionary containing querysets for each location type
|
||||
"""
|
||||
results = {}
|
||||
|
||||
# Default to all location types if none specified
|
||||
if not location_types:
|
||||
location_types = ['park', 'ride']
|
||||
|
||||
# Parks
|
||||
if 'park' in location_types:
|
||||
park_queryset = Park.objects.select_related(
|
||||
'operator'
|
||||
).prefetch_related(
|
||||
'location'
|
||||
).annotate(
|
||||
ride_count_calculated=Count('rides')
|
||||
)
|
||||
|
||||
if bounds:
|
||||
park_queryset = park_queryset.filter(
|
||||
location__coordinates__within=bounds
|
||||
)
|
||||
|
||||
if filters:
|
||||
if 'status' in filters:
|
||||
park_queryset = park_queryset.filter(status=filters['status'])
|
||||
if 'operator' in filters:
|
||||
park_queryset = park_queryset.filter(operator=filters['operator'])
|
||||
|
||||
results['parks'] = park_queryset.order_by('name')
|
||||
|
||||
# Rides
|
||||
if 'ride' in location_types:
|
||||
ride_queryset = Ride.objects.select_related(
|
||||
'park',
|
||||
'manufacturer'
|
||||
).prefetch_related(
|
||||
'park__location',
|
||||
'location'
|
||||
)
|
||||
|
||||
if bounds:
|
||||
ride_queryset = ride_queryset.filter(
|
||||
Q(location__coordinates__within=bounds) |
|
||||
Q(park__location__coordinates__within=bounds)
|
||||
)
|
||||
|
||||
if filters:
|
||||
if 'category' in filters:
|
||||
ride_queryset = ride_queryset.filter(category=filters['category'])
|
||||
if 'manufacturer' in filters:
|
||||
ride_queryset = ride_queryset.filter(manufacturer=filters['manufacturer'])
|
||||
if 'park' in filters:
|
||||
ride_queryset = ride_queryset.filter(park=filters['park'])
|
||||
|
||||
results['rides'] = ride_queryset.order_by('park__name', 'name')
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def locations_near_point(
|
||||
*,
|
||||
point: Point,
|
||||
distance_km: float = 50,
|
||||
location_types: Optional[List[str]] = None,
|
||||
limit: int = 20
|
||||
) -> Dict[str, QuerySet]:
|
||||
"""
|
||||
Get locations near a specific geographic point across all types.
|
||||
|
||||
Args:
|
||||
point: Geographic point (longitude, latitude)
|
||||
distance_km: Maximum distance in kilometers
|
||||
location_types: List of location types to include
|
||||
limit: Maximum number of results per type
|
||||
|
||||
Returns:
|
||||
Dictionary containing nearby locations by type
|
||||
"""
|
||||
results = {}
|
||||
|
||||
if not location_types:
|
||||
location_types = ['park', 'ride']
|
||||
|
||||
# Parks near point
|
||||
if 'park' in location_types:
|
||||
results['parks'] = Park.objects.filter(
|
||||
location__coordinates__distance_lte=(point, Distance(km=distance_km))
|
||||
).select_related(
|
||||
'operator'
|
||||
).prefetch_related(
|
||||
'location'
|
||||
).distance(point).order_by('distance')[:limit]
|
||||
|
||||
# Rides near point
|
||||
if 'ride' in location_types:
|
||||
results['rides'] = Ride.objects.filter(
|
||||
Q(location__coordinates__distance_lte=(point, Distance(km=distance_km))) |
|
||||
Q(park__location__coordinates__distance_lte=(point, Distance(km=distance_km)))
|
||||
).select_related(
|
||||
'park',
|
||||
'manufacturer'
|
||||
).prefetch_related(
|
||||
'park__location'
|
||||
).distance(point).order_by('distance')[:limit]
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def search_all_locations(*, query: str, limit: int = 20) -> Dict[str, QuerySet]:
|
||||
"""
|
||||
Search across all location types for a query string.
|
||||
|
||||
Args:
|
||||
query: Search string
|
||||
limit: Maximum results per type
|
||||
|
||||
Returns:
|
||||
Dictionary containing search results by type
|
||||
"""
|
||||
results = {}
|
||||
|
||||
# Search parks
|
||||
results['parks'] = Park.objects.filter(
|
||||
Q(name__icontains=query) |
|
||||
Q(description__icontains=query) |
|
||||
Q(location__city__icontains=query) |
|
||||
Q(location__region__icontains=query)
|
||||
).select_related(
|
||||
'operator'
|
||||
).prefetch_related(
|
||||
'location'
|
||||
).order_by('name')[:limit]
|
||||
|
||||
# Search rides
|
||||
results['rides'] = Ride.objects.filter(
|
||||
Q(name__icontains=query) |
|
||||
Q(description__icontains=query) |
|
||||
Q(park__name__icontains=query) |
|
||||
Q(manufacturer__name__icontains=query)
|
||||
).select_related(
|
||||
'park',
|
||||
'manufacturer'
|
||||
).prefetch_related(
|
||||
'park__location'
|
||||
).order_by('park__name', 'name')[:limit]
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def page_views_for_analytics(
|
||||
*,
|
||||
start_date: Optional[timezone.datetime] = None,
|
||||
end_date: Optional[timezone.datetime] = None,
|
||||
path_pattern: Optional[str] = None
|
||||
) -> QuerySet[PageView]:
|
||||
"""
|
||||
Get page views for analytics with optional filtering.
|
||||
|
||||
Args:
|
||||
start_date: Start date for filtering
|
||||
end_date: End date for filtering
|
||||
path_pattern: URL path pattern to filter by
|
||||
|
||||
Returns:
|
||||
QuerySet of page views
|
||||
"""
|
||||
queryset = PageView.objects.all()
|
||||
|
||||
if start_date:
|
||||
queryset = queryset.filter(timestamp__gte=start_date)
|
||||
|
||||
if end_date:
|
||||
queryset = queryset.filter(timestamp__lte=end_date)
|
||||
|
||||
if path_pattern:
|
||||
queryset = queryset.filter(path__icontains=path_pattern)
|
||||
|
||||
return queryset.order_by('-timestamp')
|
||||
|
||||
|
||||
def popular_pages_summary(*, days: int = 30) -> Dict[str, Any]:
|
||||
"""
|
||||
Get summary of most popular pages in the last N days.
|
||||
|
||||
Args:
|
||||
days: Number of days to analyze
|
||||
|
||||
Returns:
|
||||
Dictionary containing popular pages statistics
|
||||
"""
|
||||
cutoff_date = timezone.now() - timedelta(days=days)
|
||||
|
||||
# Most viewed pages
|
||||
popular_pages = PageView.objects.filter(
|
||||
timestamp__gte=cutoff_date
|
||||
).values('path').annotate(
|
||||
view_count=Count('id')
|
||||
).order_by('-view_count')[:10]
|
||||
|
||||
# Total page views
|
||||
total_views = PageView.objects.filter(
|
||||
timestamp__gte=cutoff_date
|
||||
).count()
|
||||
|
||||
# Unique visitors (based on IP)
|
||||
unique_visitors = PageView.objects.filter(
|
||||
timestamp__gte=cutoff_date
|
||||
).values('ip_address').distinct().count()
|
||||
|
||||
return {
|
||||
'popular_pages': list(popular_pages),
|
||||
'total_views': total_views,
|
||||
'unique_visitors': unique_visitors,
|
||||
'period_days': days
|
||||
}
|
||||
|
||||
|
||||
def geographic_distribution_summary() -> Dict[str, Any]:
|
||||
"""
|
||||
Get geographic distribution statistics for all locations.
|
||||
|
||||
Returns:
|
||||
Dictionary containing geographic statistics
|
||||
"""
|
||||
# Parks by country
|
||||
parks_by_country = Park.objects.filter(
|
||||
location__country__isnull=False
|
||||
).values('location__country').annotate(
|
||||
count=Count('id')
|
||||
).order_by('-count')
|
||||
|
||||
# Rides by country (through park location)
|
||||
rides_by_country = Ride.objects.filter(
|
||||
park__location__country__isnull=False
|
||||
).values('park__location__country').annotate(
|
||||
count=Count('id')
|
||||
).order_by('-count')
|
||||
|
||||
return {
|
||||
'parks_by_country': list(parks_by_country),
|
||||
'rides_by_country': list(rides_by_country)
|
||||
}
|
||||
|
||||
|
||||
def system_health_metrics() -> Dict[str, Any]:
|
||||
"""
|
||||
Get system health and activity metrics.
|
||||
|
||||
Returns:
|
||||
Dictionary containing system health statistics
|
||||
"""
|
||||
now = timezone.now()
|
||||
last_24h = now - timedelta(hours=24)
|
||||
last_7d = now - timedelta(days=7)
|
||||
|
||||
return {
|
||||
'total_parks': Park.objects.count(),
|
||||
'operating_parks': Park.objects.filter(status='OPERATING').count(),
|
||||
'total_rides': Ride.objects.count(),
|
||||
'page_views_24h': PageView.objects.filter(timestamp__gte=last_24h).count(),
|
||||
'page_views_7d': PageView.objects.filter(timestamp__gte=last_7d).count(),
|
||||
'data_freshness': {
|
||||
'latest_park_update': Park.objects.order_by('-updated_at').first().updated_at if Park.objects.exists() else None,
|
||||
'latest_ride_update': Ride.objects.order_by('-updated_at').first().updated_at if Ride.objects.exists() else None,
|
||||
}
|
||||
}
|
||||
27
core/services/__init__.py
Normal file
27
core/services/__init__.py
Normal file
@@ -0,0 +1,27 @@
|
||||
"""
|
||||
Core services for ThrillWiki unified map functionality.
|
||||
"""
|
||||
|
||||
from .map_service import UnifiedMapService
|
||||
from .clustering_service import ClusteringService
|
||||
from .map_cache_service import MapCacheService
|
||||
from .data_structures import (
|
||||
UnifiedLocation,
|
||||
LocationType,
|
||||
GeoBounds,
|
||||
MapFilters,
|
||||
MapResponse,
|
||||
ClusterData
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
'UnifiedMapService',
|
||||
'ClusteringService',
|
||||
'MapCacheService',
|
||||
'UnifiedLocation',
|
||||
'LocationType',
|
||||
'GeoBounds',
|
||||
'MapFilters',
|
||||
'MapResponse',
|
||||
'ClusterData'
|
||||
]
|
||||
342
core/services/clustering_service.py
Normal file
342
core/services/clustering_service.py
Normal file
@@ -0,0 +1,342 @@
|
||||
"""
|
||||
Clustering service for map locations to improve performance and user experience.
|
||||
"""
|
||||
|
||||
import math
|
||||
from typing import List, Tuple, Dict, Any, Optional, Set
|
||||
from dataclasses import dataclass
|
||||
from collections import defaultdict
|
||||
|
||||
from .data_structures import (
|
||||
UnifiedLocation,
|
||||
ClusterData,
|
||||
GeoBounds,
|
||||
LocationType
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ClusterPoint:
|
||||
"""Internal representation of a point for clustering."""
|
||||
location: UnifiedLocation
|
||||
x: float # Projected x coordinate
|
||||
y: float # Projected y coordinate
|
||||
|
||||
|
||||
class ClusteringService:
|
||||
"""
|
||||
Handles location clustering for map display using a simple grid-based approach
|
||||
with zoom-level dependent clustering radius.
|
||||
"""
|
||||
|
||||
# Clustering configuration
|
||||
DEFAULT_RADIUS = 40 # pixels
|
||||
MIN_POINTS_TO_CLUSTER = 2
|
||||
MAX_ZOOM_FOR_CLUSTERING = 15
|
||||
MIN_ZOOM_FOR_CLUSTERING = 3
|
||||
|
||||
# Zoom level configurations
|
||||
ZOOM_CONFIGS = {
|
||||
3: {'radius': 80, 'min_points': 5}, # World level
|
||||
4: {'radius': 70, 'min_points': 4}, # Continent level
|
||||
5: {'radius': 60, 'min_points': 3}, # Country level
|
||||
6: {'radius': 50, 'min_points': 3}, # Large region level
|
||||
7: {'radius': 45, 'min_points': 2}, # Region level
|
||||
8: {'radius': 40, 'min_points': 2}, # State level
|
||||
9: {'radius': 35, 'min_points': 2}, # Metro area level
|
||||
10: {'radius': 30, 'min_points': 2}, # City level
|
||||
11: {'radius': 25, 'min_points': 2}, # District level
|
||||
12: {'radius': 20, 'min_points': 2}, # Neighborhood level
|
||||
13: {'radius': 15, 'min_points': 2}, # Block level
|
||||
14: {'radius': 10, 'min_points': 2}, # Street level
|
||||
15: {'radius': 5, 'min_points': 2}, # Building level
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
self.cluster_id_counter = 0
|
||||
|
||||
def should_cluster(self, zoom_level: int, point_count: int) -> bool:
|
||||
"""Determine if clustering should be applied based on zoom level and point count."""
|
||||
if zoom_level > self.MAX_ZOOM_FOR_CLUSTERING:
|
||||
return False
|
||||
if zoom_level < self.MIN_ZOOM_FOR_CLUSTERING:
|
||||
return True
|
||||
|
||||
config = self.ZOOM_CONFIGS.get(zoom_level, {'min_points': self.MIN_POINTS_TO_CLUSTER})
|
||||
return point_count >= config['min_points']
|
||||
|
||||
def cluster_locations(
|
||||
self,
|
||||
locations: List[UnifiedLocation],
|
||||
zoom_level: int,
|
||||
bounds: Optional[GeoBounds] = None
|
||||
) -> Tuple[List[UnifiedLocation], List[ClusterData]]:
|
||||
"""
|
||||
Cluster locations based on zoom level and density.
|
||||
Returns (unclustered_locations, clusters).
|
||||
"""
|
||||
if not locations or not self.should_cluster(zoom_level, len(locations)):
|
||||
return locations, []
|
||||
|
||||
# Convert locations to projected coordinates for clustering
|
||||
cluster_points = self._project_locations(locations, bounds)
|
||||
|
||||
# Get clustering configuration for zoom level
|
||||
config = self.ZOOM_CONFIGS.get(zoom_level, {
|
||||
'radius': self.DEFAULT_RADIUS,
|
||||
'min_points': self.MIN_POINTS_TO_CLUSTER
|
||||
})
|
||||
|
||||
# Perform clustering
|
||||
clustered_groups = self._cluster_points(cluster_points, config['radius'], config['min_points'])
|
||||
|
||||
# Separate individual locations from clusters
|
||||
unclustered_locations = []
|
||||
clusters = []
|
||||
|
||||
for group in clustered_groups:
|
||||
if len(group) < config['min_points']:
|
||||
# Add individual locations
|
||||
unclustered_locations.extend([cp.location for cp in group])
|
||||
else:
|
||||
# Create cluster
|
||||
cluster = self._create_cluster(group)
|
||||
clusters.append(cluster)
|
||||
|
||||
return unclustered_locations, clusters
|
||||
|
||||
def _project_locations(
|
||||
self,
|
||||
locations: List[UnifiedLocation],
|
||||
bounds: Optional[GeoBounds] = None
|
||||
) -> List[ClusterPoint]:
|
||||
"""Convert lat/lng coordinates to projected x/y for clustering calculations."""
|
||||
cluster_points = []
|
||||
|
||||
# Use bounds or calculate from locations
|
||||
if not bounds:
|
||||
lats = [loc.latitude for loc in locations]
|
||||
lngs = [loc.longitude for loc in locations]
|
||||
bounds = GeoBounds(
|
||||
north=max(lats),
|
||||
south=min(lats),
|
||||
east=max(lngs),
|
||||
west=min(lngs)
|
||||
)
|
||||
|
||||
# Simple equirectangular projection (good enough for clustering)
|
||||
center_lat = (bounds.north + bounds.south) / 2
|
||||
lat_scale = 111320 # meters per degree latitude
|
||||
lng_scale = 111320 * math.cos(math.radians(center_lat)) # meters per degree longitude
|
||||
|
||||
for location in locations:
|
||||
# Convert to meters relative to bounds center
|
||||
x = (location.longitude - (bounds.west + bounds.east) / 2) * lng_scale
|
||||
y = (location.latitude - (bounds.north + bounds.south) / 2) * lat_scale
|
||||
|
||||
cluster_points.append(ClusterPoint(
|
||||
location=location,
|
||||
x=x,
|
||||
y=y
|
||||
))
|
||||
|
||||
return cluster_points
|
||||
|
||||
def _cluster_points(
|
||||
self,
|
||||
points: List[ClusterPoint],
|
||||
radius_pixels: int,
|
||||
min_points: int
|
||||
) -> List[List[ClusterPoint]]:
|
||||
"""
|
||||
Cluster points using a simple distance-based approach.
|
||||
Radius is in pixels, converted to meters based on zoom level.
|
||||
"""
|
||||
# Convert pixel radius to meters (rough approximation)
|
||||
# At zoom level 10, 1 pixel ≈ 150 meters
|
||||
radius_meters = radius_pixels * 150
|
||||
|
||||
clustered = [False] * len(points)
|
||||
clusters = []
|
||||
|
||||
for i, point in enumerate(points):
|
||||
if clustered[i]:
|
||||
continue
|
||||
|
||||
# Find all points within radius
|
||||
cluster_group = [point]
|
||||
clustered[i] = True
|
||||
|
||||
for j, other_point in enumerate(points):
|
||||
if i == j or clustered[j]:
|
||||
continue
|
||||
|
||||
distance = self._calculate_distance(point, other_point)
|
||||
if distance <= radius_meters:
|
||||
cluster_group.append(other_point)
|
||||
clustered[j] = True
|
||||
|
||||
clusters.append(cluster_group)
|
||||
|
||||
return clusters
|
||||
|
||||
def _calculate_distance(self, point1: ClusterPoint, point2: ClusterPoint) -> float:
|
||||
"""Calculate Euclidean distance between two projected points in meters."""
|
||||
dx = point1.x - point2.x
|
||||
dy = point1.y - point2.y
|
||||
return math.sqrt(dx * dx + dy * dy)
|
||||
|
||||
def _create_cluster(self, cluster_points: List[ClusterPoint]) -> ClusterData:
|
||||
"""Create a ClusterData object from a group of points."""
|
||||
locations = [cp.location for cp in cluster_points]
|
||||
|
||||
# Calculate cluster center (average position)
|
||||
avg_lat = sum(loc.latitude for loc in locations) / len(locations)
|
||||
avg_lng = sum(loc.longitude for loc in locations) / len(locations)
|
||||
|
||||
# Calculate cluster bounds
|
||||
lats = [loc.latitude for loc in locations]
|
||||
lngs = [loc.longitude for loc in locations]
|
||||
cluster_bounds = GeoBounds(
|
||||
north=max(lats),
|
||||
south=min(lats),
|
||||
east=max(lngs),
|
||||
west=min(lngs)
|
||||
)
|
||||
|
||||
# Collect location types in cluster
|
||||
types = set(loc.type for loc in locations)
|
||||
|
||||
# Select representative location (highest weight)
|
||||
representative = self._select_representative_location(locations)
|
||||
|
||||
# Generate cluster ID
|
||||
self.cluster_id_counter += 1
|
||||
cluster_id = f"cluster_{self.cluster_id_counter}"
|
||||
|
||||
return ClusterData(
|
||||
id=cluster_id,
|
||||
coordinates=(avg_lat, avg_lng),
|
||||
count=len(locations),
|
||||
types=types,
|
||||
bounds=cluster_bounds,
|
||||
representative_location=representative
|
||||
)
|
||||
|
||||
def _select_representative_location(self, locations: List[UnifiedLocation]) -> Optional[UnifiedLocation]:
|
||||
"""Select the most representative location for a cluster."""
|
||||
if not locations:
|
||||
return None
|
||||
|
||||
# Prioritize by: 1) Parks over rides/companies, 2) Higher weight, 3) Better rating
|
||||
parks = [loc for loc in locations if loc.type == LocationType.PARK]
|
||||
if parks:
|
||||
return max(parks, key=lambda x: (
|
||||
x.cluster_weight,
|
||||
x.metadata.get('rating', 0) or 0
|
||||
))
|
||||
|
||||
rides = [loc for loc in locations if loc.type == LocationType.RIDE]
|
||||
if rides:
|
||||
return max(rides, key=lambda x: (
|
||||
x.cluster_weight,
|
||||
x.metadata.get('rating', 0) or 0
|
||||
))
|
||||
|
||||
companies = [loc for loc in locations if loc.type == LocationType.COMPANY]
|
||||
if companies:
|
||||
return max(companies, key=lambda x: x.cluster_weight)
|
||||
|
||||
# Fall back to highest weight location
|
||||
return max(locations, key=lambda x: x.cluster_weight)
|
||||
|
||||
def get_cluster_breakdown(self, clusters: List[ClusterData]) -> Dict[str, Any]:
|
||||
"""Get statistics about clustering results."""
|
||||
if not clusters:
|
||||
return {
|
||||
'total_clusters': 0,
|
||||
'total_points_clustered': 0,
|
||||
'average_cluster_size': 0,
|
||||
'type_distribution': {},
|
||||
'category_distribution': {}
|
||||
}
|
||||
|
||||
total_points = sum(cluster.count for cluster in clusters)
|
||||
type_counts = defaultdict(int)
|
||||
category_counts = defaultdict(int)
|
||||
|
||||
for cluster in clusters:
|
||||
for location_type in cluster.types:
|
||||
type_counts[location_type.value] += cluster.count
|
||||
|
||||
if cluster.representative_location:
|
||||
category_counts[cluster.representative_location.cluster_category] += 1
|
||||
|
||||
return {
|
||||
'total_clusters': len(clusters),
|
||||
'total_points_clustered': total_points,
|
||||
'average_cluster_size': total_points / len(clusters),
|
||||
'largest_cluster_size': max(cluster.count for cluster in clusters),
|
||||
'smallest_cluster_size': min(cluster.count for cluster in clusters),
|
||||
'type_distribution': dict(type_counts),
|
||||
'category_distribution': dict(category_counts)
|
||||
}
|
||||
|
||||
def expand_cluster(self, cluster: ClusterData, zoom_level: int) -> List[UnifiedLocation]:
|
||||
"""
|
||||
Expand a cluster to show individual locations (for drill-down functionality).
|
||||
This would typically require re-querying the database with the cluster bounds.
|
||||
"""
|
||||
# This is a placeholder - in practice, this would re-query the database
|
||||
# with the cluster bounds and higher detail level
|
||||
return []
|
||||
|
||||
|
||||
class SmartClusteringRules:
|
||||
"""
|
||||
Advanced clustering rules that consider location types and importance.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def should_cluster_together(loc1: UnifiedLocation, loc2: UnifiedLocation) -> bool:
|
||||
"""Determine if two locations should be clustered together."""
|
||||
|
||||
# Same park rides should cluster together more readily
|
||||
if loc1.type == LocationType.RIDE and loc2.type == LocationType.RIDE:
|
||||
park1_id = loc1.metadata.get('park_id')
|
||||
park2_id = loc2.metadata.get('park_id')
|
||||
if park1_id and park2_id and park1_id == park2_id:
|
||||
return True
|
||||
|
||||
# Major parks should resist clustering unless very close
|
||||
if (loc1.cluster_category == "major_park" or loc2.cluster_category == "major_park"):
|
||||
return False
|
||||
|
||||
# Similar types cluster more readily
|
||||
if loc1.type == loc2.type:
|
||||
return True
|
||||
|
||||
# Different types can cluster but with higher threshold
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def calculate_cluster_priority(locations: List[UnifiedLocation]) -> UnifiedLocation:
|
||||
"""Select the representative location for a cluster based on priority rules."""
|
||||
# Prioritize by: 1) Parks over rides, 2) Higher weight, 3) Better rating
|
||||
parks = [loc for loc in locations if loc.type == LocationType.PARK]
|
||||
if parks:
|
||||
return max(parks, key=lambda x: (
|
||||
x.cluster_weight,
|
||||
x.metadata.get('rating', 0) or 0,
|
||||
x.metadata.get('ride_count', 0) or 0
|
||||
))
|
||||
|
||||
rides = [loc for loc in locations if loc.type == LocationType.RIDE]
|
||||
if rides:
|
||||
return max(rides, key=lambda x: (
|
||||
x.cluster_weight,
|
||||
x.metadata.get('rating', 0) or 0
|
||||
))
|
||||
|
||||
# Fall back to highest weight
|
||||
return max(locations, key=lambda x: x.cluster_weight)
|
||||
240
core/services/data_structures.py
Normal file
240
core/services/data_structures.py
Normal file
@@ -0,0 +1,240 @@
|
||||
"""
|
||||
Data structures for the unified map service.
|
||||
"""
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from enum import Enum
|
||||
from typing import Dict, List, Optional, Set, Tuple, Any
|
||||
from django.contrib.gis.geos import Polygon, Point
|
||||
|
||||
|
||||
class LocationType(Enum):
|
||||
"""Types of locations supported by the map service."""
|
||||
PARK = "park"
|
||||
RIDE = "ride"
|
||||
COMPANY = "company"
|
||||
GENERIC = "generic"
|
||||
|
||||
|
||||
@dataclass
|
||||
class GeoBounds:
|
||||
"""Geographic boundary box for spatial queries."""
|
||||
north: float
|
||||
south: float
|
||||
east: float
|
||||
west: float
|
||||
|
||||
def __post_init__(self):
|
||||
"""Validate bounds after initialization."""
|
||||
if self.north < self.south:
|
||||
raise ValueError("North bound must be greater than south bound")
|
||||
if self.east < self.west:
|
||||
raise ValueError("East bound must be greater than west bound")
|
||||
if not (-90 <= self.south <= 90 and -90 <= self.north <= 90):
|
||||
raise ValueError("Latitude bounds must be between -90 and 90")
|
||||
if not (-180 <= self.west <= 180 and -180 <= self.east <= 180):
|
||||
raise ValueError("Longitude bounds must be between -180 and 180")
|
||||
|
||||
def to_polygon(self) -> Polygon:
|
||||
"""Convert bounds to PostGIS Polygon for database queries."""
|
||||
return Polygon.from_bbox((self.west, self.south, self.east, self.north))
|
||||
|
||||
def expand(self, factor: float = 1.1) -> 'GeoBounds':
|
||||
"""Expand bounds by factor for buffer queries."""
|
||||
center_lat = (self.north + self.south) / 2
|
||||
center_lng = (self.east + self.west) / 2
|
||||
|
||||
lat_range = (self.north - self.south) * factor / 2
|
||||
lng_range = (self.east - self.west) * factor / 2
|
||||
|
||||
return GeoBounds(
|
||||
north=min(90, center_lat + lat_range),
|
||||
south=max(-90, center_lat - lat_range),
|
||||
east=min(180, center_lng + lng_range),
|
||||
west=max(-180, center_lng - lng_range)
|
||||
)
|
||||
|
||||
def contains_point(self, lat: float, lng: float) -> bool:
|
||||
"""Check if a point is within these bounds."""
|
||||
return (self.south <= lat <= self.north and
|
||||
self.west <= lng <= self.east)
|
||||
|
||||
def to_dict(self) -> Dict[str, float]:
|
||||
"""Convert to dictionary for JSON serialization."""
|
||||
return {
|
||||
'north': self.north,
|
||||
'south': self.south,
|
||||
'east': self.east,
|
||||
'west': self.west
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class MapFilters:
|
||||
"""Filtering options for map queries."""
|
||||
location_types: Optional[Set[LocationType]] = None
|
||||
park_status: Optional[Set[str]] = None # OPERATING, CLOSED_TEMP, etc.
|
||||
ride_types: Optional[Set[str]] = None
|
||||
company_roles: Optional[Set[str]] = None # OPERATOR, MANUFACTURER, etc.
|
||||
search_query: Optional[str] = None
|
||||
min_rating: Optional[float] = None
|
||||
has_coordinates: bool = True
|
||||
country: Optional[str] = None
|
||||
state: Optional[str] = None
|
||||
city: Optional[str] = None
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert to dictionary for caching and serialization."""
|
||||
return {
|
||||
'location_types': [t.value for t in self.location_types] if self.location_types else None,
|
||||
'park_status': list(self.park_status) if self.park_status else None,
|
||||
'ride_types': list(self.ride_types) if self.ride_types else None,
|
||||
'company_roles': list(self.company_roles) if self.company_roles else None,
|
||||
'search_query': self.search_query,
|
||||
'min_rating': self.min_rating,
|
||||
'has_coordinates': self.has_coordinates,
|
||||
'country': self.country,
|
||||
'state': self.state,
|
||||
'city': self.city,
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnifiedLocation:
|
||||
"""Unified location interface for all location types."""
|
||||
id: str # Composite: f"{type}_{id}"
|
||||
type: LocationType
|
||||
name: str
|
||||
coordinates: Tuple[float, float] # (lat, lng)
|
||||
address: Optional[str] = None
|
||||
metadata: Dict[str, Any] = field(default_factory=dict)
|
||||
type_data: Dict[str, Any] = field(default_factory=dict)
|
||||
cluster_weight: int = 1
|
||||
cluster_category: str = "default"
|
||||
|
||||
@property
|
||||
def latitude(self) -> float:
|
||||
"""Get latitude from coordinates."""
|
||||
return self.coordinates[0]
|
||||
|
||||
@property
|
||||
def longitude(self) -> float:
|
||||
"""Get longitude from coordinates."""
|
||||
return self.coordinates[1]
|
||||
|
||||
def to_geojson_feature(self) -> Dict[str, Any]:
|
||||
"""Convert to GeoJSON feature for mapping libraries."""
|
||||
return {
|
||||
'type': 'Feature',
|
||||
'properties': {
|
||||
'id': self.id,
|
||||
'type': self.type.value,
|
||||
'name': self.name,
|
||||
'address': self.address,
|
||||
'metadata': self.metadata,
|
||||
'type_data': self.type_data,
|
||||
'cluster_weight': self.cluster_weight,
|
||||
'cluster_category': self.cluster_category
|
||||
},
|
||||
'geometry': {
|
||||
'type': 'Point',
|
||||
'coordinates': [self.longitude, self.latitude] # GeoJSON uses lng, lat
|
||||
}
|
||||
}
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert to dictionary for JSON responses."""
|
||||
return {
|
||||
'id': self.id,
|
||||
'type': self.type.value,
|
||||
'name': self.name,
|
||||
'coordinates': list(self.coordinates),
|
||||
'address': self.address,
|
||||
'metadata': self.metadata,
|
||||
'type_data': self.type_data,
|
||||
'cluster_weight': self.cluster_weight,
|
||||
'cluster_category': self.cluster_category
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class ClusterData:
|
||||
"""Represents a cluster of locations for map display."""
|
||||
id: str
|
||||
coordinates: Tuple[float, float] # (lat, lng)
|
||||
count: int
|
||||
types: Set[LocationType]
|
||||
bounds: GeoBounds
|
||||
representative_location: Optional[UnifiedLocation] = None
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert to dictionary for JSON responses."""
|
||||
return {
|
||||
'id': self.id,
|
||||
'coordinates': list(self.coordinates),
|
||||
'count': self.count,
|
||||
'types': [t.value for t in self.types],
|
||||
'bounds': self.bounds.to_dict(),
|
||||
'representative': self.representative_location.to_dict() if self.representative_location else None
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class MapResponse:
|
||||
"""Response structure for map API calls."""
|
||||
locations: List[UnifiedLocation] = field(default_factory=list)
|
||||
clusters: List[ClusterData] = field(default_factory=list)
|
||||
bounds: Optional[GeoBounds] = None
|
||||
total_count: int = 0
|
||||
filtered_count: int = 0
|
||||
zoom_level: Optional[int] = None
|
||||
clustered: bool = False
|
||||
cache_hit: bool = False
|
||||
query_time_ms: Optional[int] = None
|
||||
filters_applied: List[str] = field(default_factory=list)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert to dictionary for JSON responses."""
|
||||
return {
|
||||
'status': 'success',
|
||||
'data': {
|
||||
'locations': [loc.to_dict() for loc in self.locations],
|
||||
'clusters': [cluster.to_dict() for cluster in self.clusters],
|
||||
'bounds': self.bounds.to_dict() if self.bounds else None,
|
||||
'total_count': self.total_count,
|
||||
'filtered_count': self.filtered_count,
|
||||
'zoom_level': self.zoom_level,
|
||||
'clustered': self.clustered
|
||||
},
|
||||
'meta': {
|
||||
'cache_hit': self.cache_hit,
|
||||
'query_time_ms': self.query_time_ms,
|
||||
'filters_applied': self.filters_applied,
|
||||
'pagination': {
|
||||
'has_more': False, # TODO: Implement pagination
|
||||
'total_pages': 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class QueryPerformanceMetrics:
|
||||
"""Performance metrics for query optimization."""
|
||||
query_time_ms: int
|
||||
db_query_count: int
|
||||
cache_hit: bool
|
||||
result_count: int
|
||||
bounds_used: bool
|
||||
clustering_used: bool
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert to dictionary for logging."""
|
||||
return {
|
||||
'query_time_ms': self.query_time_ms,
|
||||
'db_query_count': self.db_query_count,
|
||||
'cache_hit': self.cache_hit,
|
||||
'result_count': self.result_count,
|
||||
'bounds_used': self.bounds_used,
|
||||
'clustering_used': self.clustering_used
|
||||
}
|
||||
254
core/services/enhanced_cache_service.py
Normal file
254
core/services/enhanced_cache_service.py
Normal file
@@ -0,0 +1,254 @@
|
||||
"""
|
||||
Enhanced caching service with multiple cache backends and strategies.
|
||||
"""
|
||||
|
||||
from typing import Optional, Any, Dict, List, Callable
|
||||
from django.core.cache import caches
|
||||
from django.core.cache.utils import make_template_fragment_key
|
||||
from django.conf import settings
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
import time
|
||||
from functools import wraps
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Define GeoBounds for type hinting
|
||||
class GeoBounds:
|
||||
def __init__(self, min_lat: float, min_lng: float, max_lat: float, max_lng: float):
|
||||
self.min_lat = min_lat
|
||||
self.min_lng = min_lng
|
||||
self.max_lat = max_lat
|
||||
self.max_lng = max_lng
|
||||
|
||||
|
||||
class EnhancedCacheService:
|
||||
"""Comprehensive caching service with multiple cache backends"""
|
||||
|
||||
def __init__(self):
|
||||
self.default_cache = caches['default']
|
||||
try:
|
||||
self.api_cache = caches['api']
|
||||
except Exception:
|
||||
# Fallback to default cache if api cache not configured
|
||||
self.api_cache = self.default_cache
|
||||
|
||||
# L1: Query-level caching
|
||||
def cache_queryset(self, cache_key: str, queryset_func: Callable, timeout: int = 3600, **kwargs) -> Any:
|
||||
"""Cache expensive querysets"""
|
||||
cached_result = self.default_cache.get(cache_key)
|
||||
if cached_result is None:
|
||||
start_time = time.time()
|
||||
result = queryset_func(**kwargs)
|
||||
duration = time.time() - start_time
|
||||
|
||||
# Log cache miss and function execution time
|
||||
logger.info(
|
||||
f"Cache miss for key '{cache_key}', executed in {duration:.3f}s",
|
||||
extra={'cache_key': cache_key, 'execution_time': duration}
|
||||
)
|
||||
|
||||
self.default_cache.set(cache_key, result, timeout)
|
||||
return result
|
||||
|
||||
logger.debug(f"Cache hit for key '{cache_key}'")
|
||||
return cached_result
|
||||
|
||||
# L2: API response caching
|
||||
def cache_api_response(self, view_name: str, params: Dict, response_data: Any, timeout: int = 1800):
|
||||
"""Cache API responses based on view and parameters"""
|
||||
cache_key = self._generate_api_cache_key(view_name, params)
|
||||
self.api_cache.set(cache_key, response_data, timeout)
|
||||
logger.debug(f"Cached API response for view '{view_name}'")
|
||||
|
||||
def get_cached_api_response(self, view_name: str, params: Dict) -> Optional[Any]:
|
||||
"""Retrieve cached API response"""
|
||||
cache_key = self._generate_api_cache_key(view_name, params)
|
||||
result = self.api_cache.get(cache_key)
|
||||
|
||||
if result:
|
||||
logger.debug(f"Cache hit for API view '{view_name}'")
|
||||
else:
|
||||
logger.debug(f"Cache miss for API view '{view_name}'")
|
||||
|
||||
return result
|
||||
|
||||
# L3: Geographic caching (building on existing MapCacheService)
|
||||
def cache_geographic_data(self, bounds: 'GeoBounds', data: Any, zoom_level: int, timeout: int = 1800):
|
||||
"""Cache geographic data with spatial keys"""
|
||||
# Generate spatial cache key based on bounds and zoom level
|
||||
cache_key = f"geo:{bounds.min_lat}:{bounds.min_lng}:{bounds.max_lat}:{bounds.max_lng}:z{zoom_level}"
|
||||
self.default_cache.set(cache_key, data, timeout)
|
||||
logger.debug(f"Cached geographic data for bounds {bounds}")
|
||||
|
||||
def get_cached_geographic_data(self, bounds: 'GeoBounds', zoom_level: int) -> Optional[Any]:
|
||||
"""Retrieve cached geographic data"""
|
||||
cache_key = f"geo:{bounds.min_lat}:{bounds.min_lng}:{bounds.max_lat}:{bounds.max_lng}:z{zoom_level}"
|
||||
return self.default_cache.get(cache_key)
|
||||
|
||||
# Cache invalidation utilities
|
||||
def invalidate_pattern(self, pattern: str):
|
||||
"""Invalidate cache keys matching a pattern (if backend supports it)"""
|
||||
try:
|
||||
# For Redis cache backends
|
||||
if hasattr(self.default_cache, 'delete_pattern'):
|
||||
deleted_count = self.default_cache.delete_pattern(pattern)
|
||||
logger.info(f"Invalidated {deleted_count} cache keys matching pattern '{pattern}'")
|
||||
return deleted_count
|
||||
else:
|
||||
logger.warning(f"Cache backend does not support pattern deletion for pattern '{pattern}'")
|
||||
except Exception as e:
|
||||
logger.error(f"Error invalidating cache pattern '{pattern}': {e}")
|
||||
|
||||
def invalidate_model_cache(self, model_name: str, instance_id: Optional[int] = None):
|
||||
"""Invalidate cache keys related to a specific model"""
|
||||
if instance_id:
|
||||
pattern = f"*{model_name}:{instance_id}*"
|
||||
else:
|
||||
pattern = f"*{model_name}*"
|
||||
|
||||
self.invalidate_pattern(pattern)
|
||||
|
||||
# Cache warming utilities
|
||||
def warm_cache(self, cache_key: str, warm_func: Callable, timeout: int = 3600, **kwargs):
|
||||
"""Proactively warm cache with data"""
|
||||
try:
|
||||
data = warm_func(**kwargs)
|
||||
self.default_cache.set(cache_key, data, timeout)
|
||||
logger.info(f"Warmed cache for key '{cache_key}'")
|
||||
except Exception as e:
|
||||
logger.error(f"Error warming cache for key '{cache_key}': {e}")
|
||||
|
||||
def _generate_api_cache_key(self, view_name: str, params: Dict) -> str:
|
||||
"""Generate consistent cache keys for API responses"""
|
||||
# Sort params to ensure consistent key generation
|
||||
params_str = json.dumps(params, sort_keys=True, default=str)
|
||||
params_hash = hashlib.md5(params_str.encode()).hexdigest()
|
||||
return f"api:{view_name}:{params_hash}"
|
||||
|
||||
|
||||
# Cache decorators
|
||||
def cache_api_response(timeout=1800, vary_on=None, key_prefix=''):
|
||||
"""Decorator for caching API responses"""
|
||||
def decorator(view_func):
|
||||
@wraps(view_func)
|
||||
def wrapper(self, request, *args, **kwargs):
|
||||
if request.method != 'GET':
|
||||
return view_func(self, request, *args, **kwargs)
|
||||
|
||||
# Generate cache key based on view, user, and parameters
|
||||
cache_key_parts = [
|
||||
key_prefix or view_func.__name__,
|
||||
str(request.user.id) if request.user.is_authenticated else 'anonymous',
|
||||
str(hash(frozenset(request.GET.items())))
|
||||
]
|
||||
|
||||
if vary_on:
|
||||
for field in vary_on:
|
||||
cache_key_parts.append(str(getattr(request, field, '')))
|
||||
|
||||
cache_key = ':'.join(cache_key_parts)
|
||||
|
||||
# Try to get from cache
|
||||
cache_service = EnhancedCacheService()
|
||||
cached_response = cache_service.api_cache.get(cache_key)
|
||||
if cached_response:
|
||||
logger.debug(f"Cache hit for API view {view_func.__name__}")
|
||||
return cached_response
|
||||
|
||||
# Execute view and cache result
|
||||
response = view_func(self, request, *args, **kwargs)
|
||||
if hasattr(response, 'status_code') and response.status_code == 200:
|
||||
cache_service.api_cache.set(cache_key, response, timeout)
|
||||
logger.debug(f"Cached API response for view {view_func.__name__}")
|
||||
|
||||
return response
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
|
||||
def cache_queryset_result(cache_key_template: str, timeout: int = 3600):
|
||||
"""Decorator for caching queryset results"""
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
# Generate cache key from template and arguments
|
||||
cache_key = cache_key_template.format(*args, **kwargs)
|
||||
|
||||
cache_service = EnhancedCacheService()
|
||||
return cache_service.cache_queryset(cache_key, func, timeout, *args, **kwargs)
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
|
||||
# Context manager for cache warming
|
||||
class CacheWarmer:
|
||||
"""Context manager for batch cache warming operations"""
|
||||
|
||||
def __init__(self):
|
||||
self.cache_service = EnhancedCacheService()
|
||||
self.warm_operations = []
|
||||
|
||||
def add(self, cache_key: str, warm_func: Callable, timeout: int = 3600, **kwargs):
|
||||
"""Add a cache warming operation to the batch"""
|
||||
self.warm_operations.append({
|
||||
'cache_key': cache_key,
|
||||
'warm_func': warm_func,
|
||||
'timeout': timeout,
|
||||
'kwargs': kwargs
|
||||
})
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
"""Execute all cache warming operations"""
|
||||
logger.info(f"Warming {len(self.warm_operations)} cache entries")
|
||||
|
||||
for operation in self.warm_operations:
|
||||
try:
|
||||
self.cache_service.warm_cache(**operation)
|
||||
except Exception as e:
|
||||
logger.error(f"Error warming cache for {operation['cache_key']}: {e}")
|
||||
|
||||
|
||||
# Cache statistics and monitoring
|
||||
class CacheMonitor:
|
||||
"""Monitor cache performance and statistics"""
|
||||
|
||||
def __init__(self):
|
||||
self.cache_service = EnhancedCacheService()
|
||||
|
||||
def get_cache_stats(self) -> Dict[str, Any]:
|
||||
"""Get cache statistics if available"""
|
||||
stats = {}
|
||||
|
||||
try:
|
||||
# Redis cache stats
|
||||
if hasattr(self.cache_service.default_cache, '_cache'):
|
||||
redis_client = self.cache_service.default_cache._cache.get_client()
|
||||
info = redis_client.info()
|
||||
stats['redis'] = {
|
||||
'used_memory': info.get('used_memory_human'),
|
||||
'connected_clients': info.get('connected_clients'),
|
||||
'total_commands_processed': info.get('total_commands_processed'),
|
||||
'keyspace_hits': info.get('keyspace_hits'),
|
||||
'keyspace_misses': info.get('keyspace_misses'),
|
||||
}
|
||||
|
||||
# Calculate hit rate
|
||||
hits = info.get('keyspace_hits', 0)
|
||||
misses = info.get('keyspace_misses', 0)
|
||||
if hits + misses > 0:
|
||||
stats['redis']['hit_rate'] = hits / (hits + misses) * 100
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting cache stats: {e}")
|
||||
|
||||
return stats
|
||||
|
||||
def log_cache_performance(self):
|
||||
"""Log cache performance metrics"""
|
||||
stats = self.get_cache_stats()
|
||||
if stats:
|
||||
logger.info("Cache performance statistics", extra=stats)
|
||||
380
core/services/location_adapters.py
Normal file
380
core/services/location_adapters.py
Normal file
@@ -0,0 +1,380 @@
|
||||
"""
|
||||
Location adapters for converting between domain-specific models and UnifiedLocation.
|
||||
"""
|
||||
|
||||
from typing import List, Optional, Dict, Any
|
||||
from django.db.models import QuerySet
|
||||
from django.urls import reverse
|
||||
|
||||
from .data_structures import UnifiedLocation, LocationType, GeoBounds, MapFilters
|
||||
from parks.models.location import ParkLocation
|
||||
from rides.models.location import RideLocation
|
||||
from parks.models.companies import CompanyHeadquarters
|
||||
from location.models import Location
|
||||
|
||||
|
||||
class BaseLocationAdapter:
|
||||
"""Base adapter class for location conversions."""
|
||||
|
||||
def to_unified_location(self, location_obj) -> Optional[UnifiedLocation]:
|
||||
"""Convert model instance to UnifiedLocation."""
|
||||
raise NotImplementedError
|
||||
|
||||
def get_queryset(self, bounds: Optional[GeoBounds] = None,
|
||||
filters: Optional[MapFilters] = None) -> QuerySet:
|
||||
"""Get optimized queryset for this location type."""
|
||||
raise NotImplementedError
|
||||
|
||||
def bulk_convert(self, queryset: QuerySet) -> List[UnifiedLocation]:
|
||||
"""Convert multiple location objects efficiently."""
|
||||
unified_locations = []
|
||||
for obj in queryset:
|
||||
unified_loc = self.to_unified_location(obj)
|
||||
if unified_loc:
|
||||
unified_locations.append(unified_loc)
|
||||
return unified_locations
|
||||
|
||||
|
||||
class ParkLocationAdapter(BaseLocationAdapter):
|
||||
"""Converts Park/ParkLocation to UnifiedLocation."""
|
||||
|
||||
def to_unified_location(self, park_location: ParkLocation) -> Optional[UnifiedLocation]:
|
||||
"""Convert ParkLocation to UnifiedLocation."""
|
||||
if not park_location.point:
|
||||
return None
|
||||
|
||||
park = park_location.park
|
||||
|
||||
return UnifiedLocation(
|
||||
id=f"park_{park.id}",
|
||||
type=LocationType.PARK,
|
||||
name=park.name,
|
||||
coordinates=(park_location.latitude, park_location.longitude),
|
||||
address=park_location.formatted_address,
|
||||
metadata={
|
||||
'status': getattr(park, 'status', 'UNKNOWN'),
|
||||
'rating': float(park.average_rating) if hasattr(park, 'average_rating') and park.average_rating else None,
|
||||
'ride_count': getattr(park, 'ride_count', 0),
|
||||
'coaster_count': getattr(park, 'coaster_count', 0),
|
||||
'operator': park.operator.name if hasattr(park, 'operator') and park.operator else None,
|
||||
'city': park_location.city,
|
||||
'state': park_location.state,
|
||||
'country': park_location.country,
|
||||
},
|
||||
type_data={
|
||||
'slug': park.slug,
|
||||
'opening_date': park.opening_date.isoformat() if hasattr(park, 'opening_date') and park.opening_date else None,
|
||||
'website': getattr(park, 'website', ''),
|
||||
'operating_season': getattr(park, 'operating_season', ''),
|
||||
'highway_exit': park_location.highway_exit,
|
||||
'parking_notes': park_location.parking_notes,
|
||||
'best_arrival_time': park_location.best_arrival_time.strftime('%H:%M') if park_location.best_arrival_time else None,
|
||||
'seasonal_notes': park_location.seasonal_notes,
|
||||
'url': self._get_park_url(park),
|
||||
},
|
||||
cluster_weight=self._calculate_park_weight(park),
|
||||
cluster_category=self._get_park_category(park)
|
||||
)
|
||||
|
||||
def get_queryset(self, bounds: Optional[GeoBounds] = None,
|
||||
filters: Optional[MapFilters] = None) -> QuerySet:
|
||||
"""Get optimized queryset for park locations."""
|
||||
queryset = ParkLocation.objects.select_related(
|
||||
'park', 'park__operator'
|
||||
).filter(point__isnull=False)
|
||||
|
||||
# Spatial filtering
|
||||
if bounds:
|
||||
queryset = queryset.filter(point__within=bounds.to_polygon())
|
||||
|
||||
# Park-specific filters
|
||||
if filters:
|
||||
if filters.park_status:
|
||||
queryset = queryset.filter(park__status__in=filters.park_status)
|
||||
if filters.search_query:
|
||||
queryset = queryset.filter(park__name__icontains=filters.search_query)
|
||||
if filters.country:
|
||||
queryset = queryset.filter(country=filters.country)
|
||||
if filters.state:
|
||||
queryset = queryset.filter(state=filters.state)
|
||||
if filters.city:
|
||||
queryset = queryset.filter(city=filters.city)
|
||||
|
||||
return queryset.order_by('park__name')
|
||||
|
||||
def _calculate_park_weight(self, park) -> int:
|
||||
"""Calculate clustering weight based on park importance."""
|
||||
weight = 1
|
||||
if hasattr(park, 'ride_count') and park.ride_count and park.ride_count > 20:
|
||||
weight += 2
|
||||
if hasattr(park, 'coaster_count') and park.coaster_count and park.coaster_count > 5:
|
||||
weight += 1
|
||||
if hasattr(park, 'average_rating') and park.average_rating and park.average_rating > 4.0:
|
||||
weight += 1
|
||||
return min(weight, 5) # Cap at 5
|
||||
|
||||
def _get_park_category(self, park) -> str:
|
||||
"""Determine park category for clustering."""
|
||||
coaster_count = getattr(park, 'coaster_count', 0) or 0
|
||||
ride_count = getattr(park, 'ride_count', 0) or 0
|
||||
|
||||
if coaster_count >= 10:
|
||||
return "major_park"
|
||||
elif ride_count >= 15:
|
||||
return "theme_park"
|
||||
else:
|
||||
return "small_park"
|
||||
|
||||
def _get_park_url(self, park) -> str:
|
||||
"""Get URL for park detail page."""
|
||||
try:
|
||||
return reverse('parks:detail', kwargs={'slug': park.slug})
|
||||
except:
|
||||
return f"/parks/{park.slug}/"
|
||||
|
||||
|
||||
class RideLocationAdapter(BaseLocationAdapter):
|
||||
"""Converts Ride/RideLocation to UnifiedLocation."""
|
||||
|
||||
def to_unified_location(self, ride_location: RideLocation) -> Optional[UnifiedLocation]:
|
||||
"""Convert RideLocation to UnifiedLocation."""
|
||||
if not ride_location.point:
|
||||
return None
|
||||
|
||||
ride = ride_location.ride
|
||||
|
||||
return UnifiedLocation(
|
||||
id=f"ride_{ride.id}",
|
||||
type=LocationType.RIDE,
|
||||
name=ride.name,
|
||||
coordinates=(ride_location.latitude, ride_location.longitude),
|
||||
address=f"{ride_location.park_area}, {ride.park.name}" if ride_location.park_area else ride.park.name,
|
||||
metadata={
|
||||
'park_id': ride.park.id,
|
||||
'park_name': ride.park.name,
|
||||
'park_area': ride_location.park_area,
|
||||
'ride_type': getattr(ride, 'ride_type', 'Unknown'),
|
||||
'status': getattr(ride, 'status', 'UNKNOWN'),
|
||||
'rating': float(ride.average_rating) if hasattr(ride, 'average_rating') and ride.average_rating else None,
|
||||
'manufacturer': getattr(ride, 'manufacturer', {}).get('name') if hasattr(ride, 'manufacturer') else None,
|
||||
},
|
||||
type_data={
|
||||
'slug': ride.slug,
|
||||
'opening_date': ride.opening_date.isoformat() if hasattr(ride, 'opening_date') and ride.opening_date else None,
|
||||
'height_requirement': getattr(ride, 'height_requirement', ''),
|
||||
'duration_minutes': getattr(ride, 'duration_minutes', None),
|
||||
'max_speed_mph': getattr(ride, 'max_speed_mph', None),
|
||||
'entrance_notes': ride_location.entrance_notes,
|
||||
'accessibility_notes': ride_location.accessibility_notes,
|
||||
'url': self._get_ride_url(ride),
|
||||
},
|
||||
cluster_weight=self._calculate_ride_weight(ride),
|
||||
cluster_category=self._get_ride_category(ride)
|
||||
)
|
||||
|
||||
def get_queryset(self, bounds: Optional[GeoBounds] = None,
|
||||
filters: Optional[MapFilters] = None) -> QuerySet:
|
||||
"""Get optimized queryset for ride locations."""
|
||||
queryset = RideLocation.objects.select_related(
|
||||
'ride', 'ride__park', 'ride__park__operator'
|
||||
).filter(point__isnull=False)
|
||||
|
||||
# Spatial filtering
|
||||
if bounds:
|
||||
queryset = queryset.filter(point__within=bounds.to_polygon())
|
||||
|
||||
# Ride-specific filters
|
||||
if filters:
|
||||
if filters.ride_types:
|
||||
queryset = queryset.filter(ride__ride_type__in=filters.ride_types)
|
||||
if filters.search_query:
|
||||
queryset = queryset.filter(ride__name__icontains=filters.search_query)
|
||||
|
||||
return queryset.order_by('ride__name')
|
||||
|
||||
def _calculate_ride_weight(self, ride) -> int:
|
||||
"""Calculate clustering weight based on ride importance."""
|
||||
weight = 1
|
||||
ride_type = getattr(ride, 'ride_type', '').lower()
|
||||
if 'coaster' in ride_type or 'roller' in ride_type:
|
||||
weight += 1
|
||||
if hasattr(ride, 'average_rating') and ride.average_rating and ride.average_rating > 4.0:
|
||||
weight += 1
|
||||
return min(weight, 3) # Cap at 3 for rides
|
||||
|
||||
def _get_ride_category(self, ride) -> str:
|
||||
"""Determine ride category for clustering."""
|
||||
ride_type = getattr(ride, 'ride_type', '').lower()
|
||||
if 'coaster' in ride_type or 'roller' in ride_type:
|
||||
return "coaster"
|
||||
elif 'water' in ride_type or 'splash' in ride_type:
|
||||
return "water_ride"
|
||||
else:
|
||||
return "other_ride"
|
||||
|
||||
def _get_ride_url(self, ride) -> str:
|
||||
"""Get URL for ride detail page."""
|
||||
try:
|
||||
return reverse('rides:detail', kwargs={'slug': ride.slug})
|
||||
except:
|
||||
return f"/rides/{ride.slug}/"
|
||||
|
||||
|
||||
class CompanyLocationAdapter(BaseLocationAdapter):
|
||||
"""Converts Company/CompanyHeadquarters to UnifiedLocation."""
|
||||
|
||||
def to_unified_location(self, company_headquarters: CompanyHeadquarters) -> Optional[UnifiedLocation]:
|
||||
"""Convert CompanyHeadquarters to UnifiedLocation."""
|
||||
# Note: CompanyHeadquarters doesn't have coordinates, so we need to geocode
|
||||
# For now, we'll skip companies without coordinates
|
||||
# TODO: Implement geocoding service integration
|
||||
return None
|
||||
|
||||
def get_queryset(self, bounds: Optional[GeoBounds] = None,
|
||||
filters: Optional[MapFilters] = None) -> QuerySet:
|
||||
"""Get optimized queryset for company locations."""
|
||||
queryset = CompanyHeadquarters.objects.select_related('company')
|
||||
|
||||
# Company-specific filters
|
||||
if filters:
|
||||
if filters.company_roles:
|
||||
queryset = queryset.filter(company__roles__overlap=filters.company_roles)
|
||||
if filters.search_query:
|
||||
queryset = queryset.filter(company__name__icontains=filters.search_query)
|
||||
if filters.country:
|
||||
queryset = queryset.filter(country=filters.country)
|
||||
if filters.city:
|
||||
queryset = queryset.filter(city=filters.city)
|
||||
|
||||
return queryset.order_by('company__name')
|
||||
|
||||
|
||||
class GenericLocationAdapter(BaseLocationAdapter):
|
||||
"""Converts generic Location model to UnifiedLocation."""
|
||||
|
||||
def to_unified_location(self, location: Location) -> Optional[UnifiedLocation]:
|
||||
"""Convert generic Location to UnifiedLocation."""
|
||||
if not location.point and not (location.latitude and location.longitude):
|
||||
return None
|
||||
|
||||
# Use point coordinates if available, fall back to lat/lng fields
|
||||
if location.point:
|
||||
coordinates = (location.point.y, location.point.x)
|
||||
else:
|
||||
coordinates = (float(location.latitude), float(location.longitude))
|
||||
|
||||
return UnifiedLocation(
|
||||
id=f"generic_{location.id}",
|
||||
type=LocationType.GENERIC,
|
||||
name=location.name,
|
||||
coordinates=coordinates,
|
||||
address=location.get_formatted_address(),
|
||||
metadata={
|
||||
'location_type': location.location_type,
|
||||
'content_type': location.content_type.model if location.content_type else None,
|
||||
'object_id': location.object_id,
|
||||
'city': location.city,
|
||||
'state': location.state,
|
||||
'country': location.country,
|
||||
},
|
||||
type_data={
|
||||
'created_at': location.created_at.isoformat() if location.created_at else None,
|
||||
'updated_at': location.updated_at.isoformat() if location.updated_at else None,
|
||||
},
|
||||
cluster_weight=1,
|
||||
cluster_category="generic"
|
||||
)
|
||||
|
||||
def get_queryset(self, bounds: Optional[GeoBounds] = None,
|
||||
filters: Optional[MapFilters] = None) -> QuerySet:
|
||||
"""Get optimized queryset for generic locations."""
|
||||
queryset = Location.objects.select_related('content_type').filter(
|
||||
models.Q(point__isnull=False) |
|
||||
models.Q(latitude__isnull=False, longitude__isnull=False)
|
||||
)
|
||||
|
||||
# Spatial filtering
|
||||
if bounds:
|
||||
queryset = queryset.filter(
|
||||
models.Q(point__within=bounds.to_polygon()) |
|
||||
models.Q(
|
||||
latitude__gte=bounds.south,
|
||||
latitude__lte=bounds.north,
|
||||
longitude__gte=bounds.west,
|
||||
longitude__lte=bounds.east
|
||||
)
|
||||
)
|
||||
|
||||
# Generic filters
|
||||
if filters:
|
||||
if filters.search_query:
|
||||
queryset = queryset.filter(name__icontains=filters.search_query)
|
||||
if filters.country:
|
||||
queryset = queryset.filter(country=filters.country)
|
||||
if filters.city:
|
||||
queryset = queryset.filter(city=filters.city)
|
||||
|
||||
return queryset.order_by('name')
|
||||
|
||||
|
||||
class LocationAbstractionLayer:
|
||||
"""
|
||||
Abstraction layer handling different location model types.
|
||||
Implements the adapter pattern to provide unified access to all location types.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.adapters = {
|
||||
LocationType.PARK: ParkLocationAdapter(),
|
||||
LocationType.RIDE: RideLocationAdapter(),
|
||||
LocationType.COMPANY: CompanyLocationAdapter(),
|
||||
LocationType.GENERIC: GenericLocationAdapter()
|
||||
}
|
||||
|
||||
def get_all_locations(self, bounds: Optional[GeoBounds] = None,
|
||||
filters: Optional[MapFilters] = None) -> List[UnifiedLocation]:
|
||||
"""Get locations from all sources within bounds."""
|
||||
all_locations = []
|
||||
|
||||
# Determine which location types to include
|
||||
location_types = filters.location_types if filters and filters.location_types else set(LocationType)
|
||||
|
||||
for location_type in location_types:
|
||||
adapter = self.adapters[location_type]
|
||||
queryset = adapter.get_queryset(bounds, filters)
|
||||
locations = adapter.bulk_convert(queryset)
|
||||
all_locations.extend(locations)
|
||||
|
||||
return all_locations
|
||||
|
||||
def get_locations_by_type(self, location_type: LocationType,
|
||||
bounds: Optional[GeoBounds] = None,
|
||||
filters: Optional[MapFilters] = None) -> List[UnifiedLocation]:
|
||||
"""Get locations of specific type."""
|
||||
adapter = self.adapters[location_type]
|
||||
queryset = adapter.get_queryset(bounds, filters)
|
||||
return adapter.bulk_convert(queryset)
|
||||
|
||||
def get_location_by_id(self, location_type: LocationType, location_id: int) -> Optional[UnifiedLocation]:
|
||||
"""Get single location with full details."""
|
||||
adapter = self.adapters[location_type]
|
||||
|
||||
try:
|
||||
if location_type == LocationType.PARK:
|
||||
obj = ParkLocation.objects.select_related('park', 'park__operator').get(park_id=location_id)
|
||||
elif location_type == LocationType.RIDE:
|
||||
obj = RideLocation.objects.select_related('ride', 'ride__park').get(ride_id=location_id)
|
||||
elif location_type == LocationType.COMPANY:
|
||||
obj = CompanyHeadquarters.objects.select_related('company').get(company_id=location_id)
|
||||
elif location_type == LocationType.GENERIC:
|
||||
obj = Location.objects.select_related('content_type').get(id=location_id)
|
||||
else:
|
||||
return None
|
||||
|
||||
return adapter.to_unified_location(obj)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
# Import models after defining adapters to avoid circular imports
|
||||
from django.db import models
|
||||
393
core/services/location_search.py
Normal file
393
core/services/location_search.py
Normal file
@@ -0,0 +1,393 @@
|
||||
"""
|
||||
Location-aware search service for ThrillWiki.
|
||||
|
||||
Integrates PostGIS location data with existing search functionality
|
||||
to provide proximity-based search, location filtering, and geographic
|
||||
search capabilities.
|
||||
"""
|
||||
|
||||
from django.contrib.gis.geos import Point
|
||||
from django.contrib.gis.measure import Distance
|
||||
from django.db.models import Q, Case, When, F, Value, CharField
|
||||
from django.db.models.functions import Coalesce
|
||||
from typing import Optional, List, Dict, Any, Tuple, Set
|
||||
from dataclasses import dataclass
|
||||
|
||||
from parks.models import Park
|
||||
from rides.models import Ride
|
||||
from parks.models.companies import Company
|
||||
from parks.models.location import ParkLocation
|
||||
from rides.models.location import RideLocation
|
||||
from parks.models.companies import CompanyHeadquarters
|
||||
|
||||
|
||||
@dataclass
|
||||
class LocationSearchFilters:
|
||||
"""Filters for location-aware search queries."""
|
||||
|
||||
# Text search
|
||||
search_query: Optional[str] = None
|
||||
|
||||
# Location-based filters
|
||||
location_point: Optional[Point] = None
|
||||
radius_km: Optional[float] = None
|
||||
location_types: Optional[Set[str]] = None # 'park', 'ride', 'company'
|
||||
|
||||
# Geographic filters
|
||||
country: Optional[str] = None
|
||||
state: Optional[str] = None
|
||||
city: Optional[str] = None
|
||||
|
||||
# Content-specific filters
|
||||
park_status: Optional[List[str]] = None
|
||||
ride_types: Optional[List[str]] = None
|
||||
company_roles: Optional[List[str]] = None
|
||||
|
||||
# Result options
|
||||
include_distance: bool = True
|
||||
max_results: int = 100
|
||||
|
||||
|
||||
@dataclass
|
||||
class LocationSearchResult:
|
||||
"""Single search result with location data."""
|
||||
|
||||
# Core data
|
||||
content_type: str # 'park', 'ride', 'company'
|
||||
object_id: int
|
||||
name: str
|
||||
description: Optional[str] = None
|
||||
url: Optional[str] = None
|
||||
|
||||
# Location data
|
||||
latitude: Optional[float] = None
|
||||
longitude: Optional[float] = None
|
||||
address: Optional[str] = None
|
||||
city: Optional[str] = None
|
||||
state: Optional[str] = None
|
||||
country: Optional[str] = None
|
||||
|
||||
# Distance data (if proximity search)
|
||||
distance_km: Optional[float] = None
|
||||
|
||||
# Additional metadata
|
||||
status: Optional[str] = None
|
||||
tags: Optional[List[str]] = None
|
||||
rating: Optional[float] = None
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert to dictionary for JSON serialization."""
|
||||
return {
|
||||
'content_type': self.content_type,
|
||||
'object_id': self.object_id,
|
||||
'name': self.name,
|
||||
'description': self.description,
|
||||
'url': self.url,
|
||||
'location': {
|
||||
'latitude': self.latitude,
|
||||
'longitude': self.longitude,
|
||||
'address': self.address,
|
||||
'city': self.city,
|
||||
'state': self.state,
|
||||
'country': self.country,
|
||||
},
|
||||
'distance_km': self.distance_km,
|
||||
'status': self.status,
|
||||
'tags': self.tags or [],
|
||||
'rating': self.rating,
|
||||
}
|
||||
|
||||
|
||||
class LocationSearchService:
|
||||
"""Service for performing location-aware searches across ThrillWiki content."""
|
||||
|
||||
def search(self, filters: LocationSearchFilters) -> List[LocationSearchResult]:
|
||||
"""
|
||||
Perform a comprehensive location-aware search.
|
||||
|
||||
Args:
|
||||
filters: Search filters and options
|
||||
|
||||
Returns:
|
||||
List of search results with location data
|
||||
"""
|
||||
results = []
|
||||
|
||||
# Search each content type based on filters
|
||||
if not filters.location_types or 'park' in filters.location_types:
|
||||
results.extend(self._search_parks(filters))
|
||||
|
||||
if not filters.location_types or 'ride' in filters.location_types:
|
||||
results.extend(self._search_rides(filters))
|
||||
|
||||
if not filters.location_types or 'company' in filters.location_types:
|
||||
results.extend(self._search_companies(filters))
|
||||
|
||||
# Sort by distance if proximity search, otherwise by relevance
|
||||
if filters.location_point and filters.include_distance:
|
||||
results.sort(key=lambda x: x.distance_km or float('inf'))
|
||||
else:
|
||||
results.sort(key=lambda x: x.name.lower())
|
||||
|
||||
# Apply max results limit
|
||||
return results[:filters.max_results]
|
||||
|
||||
def _search_parks(self, filters: LocationSearchFilters) -> List[LocationSearchResult]:
|
||||
"""Search parks with location data."""
|
||||
queryset = Park.objects.select_related('location', 'operator').all()
|
||||
|
||||
# Apply location filters
|
||||
queryset = self._apply_location_filters(queryset, filters, 'location__point')
|
||||
|
||||
# Apply text search
|
||||
if filters.search_query:
|
||||
query = Q(name__icontains=filters.search_query) | \
|
||||
Q(description__icontains=filters.search_query) | \
|
||||
Q(location__city__icontains=filters.search_query) | \
|
||||
Q(location__state__icontains=filters.search_query) | \
|
||||
Q(location__country__icontains=filters.search_query)
|
||||
queryset = queryset.filter(query)
|
||||
|
||||
# Apply park-specific filters
|
||||
if filters.park_status:
|
||||
queryset = queryset.filter(status__in=filters.park_status)
|
||||
|
||||
# Add distance annotation if proximity search
|
||||
if filters.location_point and filters.include_distance:
|
||||
queryset = queryset.annotate(
|
||||
distance=Distance('location__point', filters.location_point)
|
||||
).order_by('distance')
|
||||
|
||||
# Convert to search results
|
||||
results = []
|
||||
for park in queryset:
|
||||
result = LocationSearchResult(
|
||||
content_type='park',
|
||||
object_id=park.id,
|
||||
name=park.name,
|
||||
description=park.description,
|
||||
url=park.get_absolute_url() if hasattr(park, 'get_absolute_url') else None,
|
||||
status=park.get_status_display(),
|
||||
rating=float(park.average_rating) if park.average_rating else None,
|
||||
tags=['park', park.status.lower()]
|
||||
)
|
||||
|
||||
# Add location data
|
||||
if hasattr(park, 'location') and park.location:
|
||||
location = park.location
|
||||
result.latitude = location.latitude
|
||||
result.longitude = location.longitude
|
||||
result.address = location.formatted_address
|
||||
result.city = location.city
|
||||
result.state = location.state
|
||||
result.country = location.country
|
||||
|
||||
# Add distance if proximity search
|
||||
if filters.location_point and filters.include_distance and hasattr(park, 'distance'):
|
||||
result.distance_km = float(park.distance.km)
|
||||
|
||||
results.append(result)
|
||||
|
||||
return results
|
||||
|
||||
def _search_rides(self, filters: LocationSearchFilters) -> List[LocationSearchResult]:
|
||||
"""Search rides with location data."""
|
||||
queryset = Ride.objects.select_related('park', 'location').all()
|
||||
|
||||
# Apply location filters
|
||||
queryset = self._apply_location_filters(queryset, filters, 'location__point')
|
||||
|
||||
# Apply text search
|
||||
if filters.search_query:
|
||||
query = Q(name__icontains=filters.search_query) | \
|
||||
Q(description__icontains=filters.search_query) | \
|
||||
Q(park__name__icontains=filters.search_query) | \
|
||||
Q(location__park_area__icontains=filters.search_query)
|
||||
queryset = queryset.filter(query)
|
||||
|
||||
# Apply ride-specific filters
|
||||
if filters.ride_types:
|
||||
queryset = queryset.filter(ride_type__in=filters.ride_types)
|
||||
|
||||
# Add distance annotation if proximity search
|
||||
if filters.location_point and filters.include_distance:
|
||||
queryset = queryset.annotate(
|
||||
distance=Distance('location__point', filters.location_point)
|
||||
).order_by('distance')
|
||||
|
||||
# Convert to search results
|
||||
results = []
|
||||
for ride in queryset:
|
||||
result = LocationSearchResult(
|
||||
content_type='ride',
|
||||
object_id=ride.id,
|
||||
name=ride.name,
|
||||
description=ride.description,
|
||||
url=ride.get_absolute_url() if hasattr(ride, 'get_absolute_url') else None,
|
||||
status=ride.status,
|
||||
tags=['ride', ride.ride_type.lower() if ride.ride_type else 'attraction']
|
||||
)
|
||||
|
||||
# Add location data from ride location or park location
|
||||
location = None
|
||||
if hasattr(ride, 'location') and ride.location:
|
||||
location = ride.location
|
||||
result.latitude = location.latitude
|
||||
result.longitude = location.longitude
|
||||
result.address = f"{ride.park.name} - {location.park_area}" if location.park_area else ride.park.name
|
||||
|
||||
# Add distance if proximity search
|
||||
if filters.location_point and filters.include_distance and hasattr(ride, 'distance'):
|
||||
result.distance_km = float(ride.distance.km)
|
||||
|
||||
# Fall back to park location if no specific ride location
|
||||
elif ride.park and hasattr(ride.park, 'location') and ride.park.location:
|
||||
park_location = ride.park.location
|
||||
result.latitude = park_location.latitude
|
||||
result.longitude = park_location.longitude
|
||||
result.address = park_location.formatted_address
|
||||
result.city = park_location.city
|
||||
result.state = park_location.state
|
||||
result.country = park_location.country
|
||||
|
||||
results.append(result)
|
||||
|
||||
return results
|
||||
|
||||
def _search_companies(self, filters: LocationSearchFilters) -> List[LocationSearchResult]:
|
||||
"""Search companies with headquarters location data."""
|
||||
queryset = Company.objects.select_related('headquarters').all()
|
||||
|
||||
# Apply location filters
|
||||
queryset = self._apply_location_filters(queryset, filters, 'headquarters__point')
|
||||
|
||||
# Apply text search
|
||||
if filters.search_query:
|
||||
query = Q(name__icontains=filters.search_query) | \
|
||||
Q(description__icontains=filters.search_query) | \
|
||||
Q(headquarters__city__icontains=filters.search_query) | \
|
||||
Q(headquarters__state_province__icontains=filters.search_query) | \
|
||||
Q(headquarters__country__icontains=filters.search_query)
|
||||
queryset = queryset.filter(query)
|
||||
|
||||
# Apply company-specific filters
|
||||
if filters.company_roles:
|
||||
queryset = queryset.filter(roles__overlap=filters.company_roles)
|
||||
|
||||
# Add distance annotation if proximity search
|
||||
if filters.location_point and filters.include_distance:
|
||||
queryset = queryset.annotate(
|
||||
distance=Distance('headquarters__point', filters.location_point)
|
||||
).order_by('distance')
|
||||
|
||||
# Convert to search results
|
||||
results = []
|
||||
for company in queryset:
|
||||
result = LocationSearchResult(
|
||||
content_type='company',
|
||||
object_id=company.id,
|
||||
name=company.name,
|
||||
description=company.description,
|
||||
url=company.get_absolute_url() if hasattr(company, 'get_absolute_url') else None,
|
||||
tags=['company'] + (company.roles or [])
|
||||
)
|
||||
|
||||
# Add location data
|
||||
if hasattr(company, 'headquarters') and company.headquarters:
|
||||
hq = company.headquarters
|
||||
result.latitude = hq.latitude
|
||||
result.longitude = hq.longitude
|
||||
result.address = hq.formatted_address
|
||||
result.city = hq.city
|
||||
result.state = hq.state_province
|
||||
result.country = hq.country
|
||||
|
||||
# Add distance if proximity search
|
||||
if filters.location_point and filters.include_distance and hasattr(company, 'distance'):
|
||||
result.distance_km = float(company.distance.km)
|
||||
|
||||
results.append(result)
|
||||
|
||||
return results
|
||||
|
||||
def _apply_location_filters(self, queryset, filters: LocationSearchFilters, point_field: str):
|
||||
"""Apply common location filters to a queryset."""
|
||||
|
||||
# Proximity filter
|
||||
if filters.location_point and filters.radius_km:
|
||||
distance = Distance(km=filters.radius_km)
|
||||
queryset = queryset.filter(**{
|
||||
f'{point_field}__distance_lte': (filters.location_point, distance)
|
||||
})
|
||||
|
||||
# Geographic filters - adjust field names based on model
|
||||
if filters.country:
|
||||
if 'headquarters' in point_field:
|
||||
queryset = queryset.filter(headquarters__country__icontains=filters.country)
|
||||
else:
|
||||
location_field = point_field.split('__')[0]
|
||||
queryset = queryset.filter(**{f'{location_field}__country__icontains': filters.country})
|
||||
|
||||
if filters.state:
|
||||
if 'headquarters' in point_field:
|
||||
queryset = queryset.filter(headquarters__state_province__icontains=filters.state)
|
||||
else:
|
||||
location_field = point_field.split('__')[0]
|
||||
queryset = queryset.filter(**{f'{location_field}__state__icontains': filters.state})
|
||||
|
||||
if filters.city:
|
||||
location_field = point_field.split('__')[0]
|
||||
queryset = queryset.filter(**{f'{location_field}__city__icontains': filters.city})
|
||||
|
||||
return queryset
|
||||
|
||||
def suggest_locations(self, query: str, limit: int = 10) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get location suggestions for autocomplete.
|
||||
|
||||
Args:
|
||||
query: Search query string
|
||||
limit: Maximum number of suggestions
|
||||
|
||||
Returns:
|
||||
List of location suggestions
|
||||
"""
|
||||
suggestions = []
|
||||
|
||||
if len(query) < 2:
|
||||
return suggestions
|
||||
|
||||
# Get park location suggestions
|
||||
park_locations = ParkLocation.objects.filter(
|
||||
Q(park__name__icontains=query) |
|
||||
Q(city__icontains=query) |
|
||||
Q(state__icontains=query)
|
||||
).select_related('park')[:limit//3]
|
||||
|
||||
for location in park_locations:
|
||||
suggestions.append({
|
||||
'type': 'park',
|
||||
'name': location.park.name,
|
||||
'address': location.formatted_address,
|
||||
'coordinates': location.coordinates,
|
||||
'url': location.park.get_absolute_url() if hasattr(location.park, 'get_absolute_url') else None
|
||||
})
|
||||
|
||||
# Get city suggestions
|
||||
cities = ParkLocation.objects.filter(
|
||||
city__icontains=query
|
||||
).values('city', 'state', 'country').distinct()[:limit//3]
|
||||
|
||||
for city_data in cities:
|
||||
suggestions.append({
|
||||
'type': 'city',
|
||||
'name': f"{city_data['city']}, {city_data['state']}",
|
||||
'address': f"{city_data['city']}, {city_data['state']}, {city_data['country']}",
|
||||
'coordinates': None
|
||||
})
|
||||
|
||||
return suggestions[:limit]
|
||||
|
||||
|
||||
# Global instance
|
||||
location_search_service = LocationSearchService()
|
||||
401
core/services/map_cache_service.py
Normal file
401
core/services/map_cache_service.py
Normal file
@@ -0,0 +1,401 @@
|
||||
"""
|
||||
Caching service for map data to improve performance and reduce database load.
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import time
|
||||
from typing import Dict, List, Optional, Any, Union
|
||||
from dataclasses import asdict
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
|
||||
from .data_structures import (
|
||||
UnifiedLocation,
|
||||
ClusterData,
|
||||
GeoBounds,
|
||||
MapFilters,
|
||||
MapResponse,
|
||||
QueryPerformanceMetrics
|
||||
)
|
||||
|
||||
|
||||
class MapCacheService:
|
||||
"""
|
||||
Handles caching of map data with geographic partitioning and intelligent invalidation.
|
||||
"""
|
||||
|
||||
# Cache configuration
|
||||
DEFAULT_TTL = 3600 # 1 hour
|
||||
CLUSTER_TTL = 7200 # 2 hours (clusters change less frequently)
|
||||
LOCATION_DETAIL_TTL = 1800 # 30 minutes
|
||||
BOUNDS_CACHE_TTL = 1800 # 30 minutes
|
||||
|
||||
# Cache key prefixes
|
||||
CACHE_PREFIX = "thrillwiki_map"
|
||||
LOCATIONS_PREFIX = f"{CACHE_PREFIX}:locations"
|
||||
CLUSTERS_PREFIX = f"{CACHE_PREFIX}:clusters"
|
||||
BOUNDS_PREFIX = f"{CACHE_PREFIX}:bounds"
|
||||
DETAIL_PREFIX = f"{CACHE_PREFIX}:detail"
|
||||
STATS_PREFIX = f"{CACHE_PREFIX}:stats"
|
||||
|
||||
# Geographic partitioning settings
|
||||
GEOHASH_PRECISION = 6 # ~1.2km precision for cache partitioning
|
||||
|
||||
def __init__(self):
|
||||
self.cache_stats = {
|
||||
'hits': 0,
|
||||
'misses': 0,
|
||||
'invalidations': 0,
|
||||
'geohash_partitions': 0
|
||||
}
|
||||
|
||||
def get_locations_cache_key(self, bounds: Optional[GeoBounds],
|
||||
filters: Optional[MapFilters],
|
||||
zoom_level: Optional[int] = None) -> str:
|
||||
"""Generate cache key for location queries."""
|
||||
key_parts = [self.LOCATIONS_PREFIX]
|
||||
|
||||
if bounds:
|
||||
# Use geohash for spatial locality
|
||||
geohash = self._bounds_to_geohash(bounds)
|
||||
key_parts.append(f"geo:{geohash}")
|
||||
|
||||
if filters:
|
||||
# Create deterministic hash of filters
|
||||
filter_hash = self._hash_filters(filters)
|
||||
key_parts.append(f"filters:{filter_hash}")
|
||||
|
||||
if zoom_level is not None:
|
||||
key_parts.append(f"zoom:{zoom_level}")
|
||||
|
||||
return ":".join(key_parts)
|
||||
|
||||
def get_clusters_cache_key(self, bounds: Optional[GeoBounds],
|
||||
filters: Optional[MapFilters],
|
||||
zoom_level: int) -> str:
|
||||
"""Generate cache key for cluster queries."""
|
||||
key_parts = [self.CLUSTERS_PREFIX, f"zoom:{zoom_level}"]
|
||||
|
||||
if bounds:
|
||||
geohash = self._bounds_to_geohash(bounds)
|
||||
key_parts.append(f"geo:{geohash}")
|
||||
|
||||
if filters:
|
||||
filter_hash = self._hash_filters(filters)
|
||||
key_parts.append(f"filters:{filter_hash}")
|
||||
|
||||
return ":".join(key_parts)
|
||||
|
||||
def get_location_detail_cache_key(self, location_type: str, location_id: int) -> str:
|
||||
"""Generate cache key for individual location details."""
|
||||
return f"{self.DETAIL_PREFIX}:{location_type}:{location_id}"
|
||||
|
||||
def cache_locations(self, cache_key: str, locations: List[UnifiedLocation],
|
||||
ttl: Optional[int] = None) -> None:
|
||||
"""Cache location data."""
|
||||
try:
|
||||
# Convert locations to serializable format
|
||||
cache_data = {
|
||||
'locations': [loc.to_dict() for loc in locations],
|
||||
'cached_at': timezone.now().isoformat(),
|
||||
'count': len(locations)
|
||||
}
|
||||
|
||||
cache.set(cache_key, cache_data, ttl or self.DEFAULT_TTL)
|
||||
except Exception as e:
|
||||
# Log error but don't fail the request
|
||||
print(f"Cache write error for key {cache_key}: {e}")
|
||||
|
||||
def cache_clusters(self, cache_key: str, clusters: List[ClusterData],
|
||||
ttl: Optional[int] = None) -> None:
|
||||
"""Cache cluster data."""
|
||||
try:
|
||||
cache_data = {
|
||||
'clusters': [cluster.to_dict() for cluster in clusters],
|
||||
'cached_at': timezone.now().isoformat(),
|
||||
'count': len(clusters)
|
||||
}
|
||||
|
||||
cache.set(cache_key, cache_data, ttl or self.CLUSTER_TTL)
|
||||
except Exception as e:
|
||||
print(f"Cache write error for clusters {cache_key}: {e}")
|
||||
|
||||
def cache_map_response(self, cache_key: str, response: MapResponse,
|
||||
ttl: Optional[int] = None) -> None:
|
||||
"""Cache complete map response."""
|
||||
try:
|
||||
cache_data = response.to_dict()
|
||||
cache_data['cached_at'] = timezone.now().isoformat()
|
||||
|
||||
cache.set(cache_key, cache_data, ttl or self.DEFAULT_TTL)
|
||||
except Exception as e:
|
||||
print(f"Cache write error for response {cache_key}: {e}")
|
||||
|
||||
def get_cached_locations(self, cache_key: str) -> Optional[List[UnifiedLocation]]:
|
||||
"""Retrieve cached location data."""
|
||||
try:
|
||||
cache_data = cache.get(cache_key)
|
||||
if not cache_data:
|
||||
self.cache_stats['misses'] += 1
|
||||
return None
|
||||
|
||||
self.cache_stats['hits'] += 1
|
||||
|
||||
# Convert back to UnifiedLocation objects
|
||||
locations = []
|
||||
for loc_data in cache_data['locations']:
|
||||
# Reconstruct UnifiedLocation from dictionary
|
||||
locations.append(self._dict_to_unified_location(loc_data))
|
||||
|
||||
return locations
|
||||
|
||||
except Exception as e:
|
||||
print(f"Cache read error for key {cache_key}: {e}")
|
||||
self.cache_stats['misses'] += 1
|
||||
return None
|
||||
|
||||
def get_cached_clusters(self, cache_key: str) -> Optional[List[ClusterData]]:
|
||||
"""Retrieve cached cluster data."""
|
||||
try:
|
||||
cache_data = cache.get(cache_key)
|
||||
if not cache_data:
|
||||
self.cache_stats['misses'] += 1
|
||||
return None
|
||||
|
||||
self.cache_stats['hits'] += 1
|
||||
|
||||
# Convert back to ClusterData objects
|
||||
clusters = []
|
||||
for cluster_data in cache_data['clusters']:
|
||||
clusters.append(self._dict_to_cluster_data(cluster_data))
|
||||
|
||||
return clusters
|
||||
|
||||
except Exception as e:
|
||||
print(f"Cache read error for clusters {cache_key}: {e}")
|
||||
self.cache_stats['misses'] += 1
|
||||
return None
|
||||
|
||||
def get_cached_map_response(self, cache_key: str) -> Optional[MapResponse]:
|
||||
"""Retrieve cached map response."""
|
||||
try:
|
||||
cache_data = cache.get(cache_key)
|
||||
if not cache_data:
|
||||
self.cache_stats['misses'] += 1
|
||||
return None
|
||||
|
||||
self.cache_stats['hits'] += 1
|
||||
|
||||
# Convert back to MapResponse object
|
||||
return self._dict_to_map_response(cache_data['data'])
|
||||
|
||||
except Exception as e:
|
||||
print(f"Cache read error for response {cache_key}: {e}")
|
||||
self.cache_stats['misses'] += 1
|
||||
return None
|
||||
|
||||
def invalidate_location_cache(self, location_type: str, location_id: Optional[int] = None) -> None:
|
||||
"""Invalidate cache for specific location or all locations of a type."""
|
||||
try:
|
||||
if location_id:
|
||||
# Invalidate specific location detail
|
||||
detail_key = self.get_location_detail_cache_key(location_type, location_id)
|
||||
cache.delete(detail_key)
|
||||
|
||||
# Invalidate related location and cluster caches
|
||||
# In a production system, you'd want more sophisticated cache tagging
|
||||
cache.delete_many([
|
||||
f"{self.LOCATIONS_PREFIX}:*",
|
||||
f"{self.CLUSTERS_PREFIX}:*"
|
||||
])
|
||||
|
||||
self.cache_stats['invalidations'] += 1
|
||||
|
||||
except Exception as e:
|
||||
print(f"Cache invalidation error: {e}")
|
||||
|
||||
def invalidate_bounds_cache(self, bounds: GeoBounds) -> None:
|
||||
"""Invalidate cache for specific geographic bounds."""
|
||||
try:
|
||||
geohash = self._bounds_to_geohash(bounds)
|
||||
pattern = f"{self.LOCATIONS_PREFIX}:geo:{geohash}*"
|
||||
|
||||
# In production, you'd use cache tagging or Redis SCAN
|
||||
# For now, we'll invalidate broader patterns
|
||||
cache.delete_many([pattern])
|
||||
|
||||
self.cache_stats['invalidations'] += 1
|
||||
|
||||
except Exception as e:
|
||||
print(f"Bounds cache invalidation error: {e}")
|
||||
|
||||
def clear_all_map_cache(self) -> None:
|
||||
"""Clear all map-related cache data."""
|
||||
try:
|
||||
cache.delete_many([
|
||||
f"{self.LOCATIONS_PREFIX}:*",
|
||||
f"{self.CLUSTERS_PREFIX}:*",
|
||||
f"{self.BOUNDS_PREFIX}:*",
|
||||
f"{self.DETAIL_PREFIX}:*"
|
||||
])
|
||||
|
||||
self.cache_stats['invalidations'] += 1
|
||||
|
||||
except Exception as e:
|
||||
print(f"Cache clear error: {e}")
|
||||
|
||||
def get_cache_stats(self) -> Dict[str, Any]:
|
||||
"""Get cache performance statistics."""
|
||||
total_requests = self.cache_stats['hits'] + self.cache_stats['misses']
|
||||
hit_rate = (self.cache_stats['hits'] / total_requests * 100) if total_requests > 0 else 0
|
||||
|
||||
return {
|
||||
'hits': self.cache_stats['hits'],
|
||||
'misses': self.cache_stats['misses'],
|
||||
'hit_rate_percent': round(hit_rate, 2),
|
||||
'invalidations': self.cache_stats['invalidations'],
|
||||
'geohash_partitions': self.cache_stats['geohash_partitions']
|
||||
}
|
||||
|
||||
def record_performance_metrics(self, metrics: QueryPerformanceMetrics) -> None:
|
||||
"""Record query performance metrics for analysis."""
|
||||
try:
|
||||
stats_key = f"{self.STATS_PREFIX}:performance:{int(time.time() // 300)}" # 5-minute buckets
|
||||
|
||||
current_stats = cache.get(stats_key, {
|
||||
'query_count': 0,
|
||||
'total_time_ms': 0,
|
||||
'cache_hits': 0,
|
||||
'db_queries': 0
|
||||
})
|
||||
|
||||
current_stats['query_count'] += 1
|
||||
current_stats['total_time_ms'] += metrics.query_time_ms
|
||||
current_stats['cache_hits'] += 1 if metrics.cache_hit else 0
|
||||
current_stats['db_queries'] += metrics.db_query_count
|
||||
|
||||
cache.set(stats_key, current_stats, 3600) # Keep for 1 hour
|
||||
|
||||
except Exception as e:
|
||||
print(f"Performance metrics recording error: {e}")
|
||||
|
||||
def _bounds_to_geohash(self, bounds: GeoBounds) -> str:
|
||||
"""Convert geographic bounds to geohash for cache partitioning."""
|
||||
# Use center point of bounds for geohash
|
||||
center_lat = (bounds.north + bounds.south) / 2
|
||||
center_lng = (bounds.east + bounds.west) / 2
|
||||
|
||||
# Simple geohash implementation (in production, use a library)
|
||||
return self._encode_geohash(center_lat, center_lng, self.GEOHASH_PRECISION)
|
||||
|
||||
def _encode_geohash(self, lat: float, lng: float, precision: int) -> str:
|
||||
"""Simple geohash encoding implementation."""
|
||||
# This is a simplified implementation
|
||||
# In production, use the `geohash` library
|
||||
lat_range = [-90.0, 90.0]
|
||||
lng_range = [-180.0, 180.0]
|
||||
|
||||
geohash = ""
|
||||
bits = 0
|
||||
bit_count = 0
|
||||
even_bit = True
|
||||
|
||||
while len(geohash) < precision:
|
||||
if even_bit:
|
||||
# longitude
|
||||
mid = (lng_range[0] + lng_range[1]) / 2
|
||||
if lng >= mid:
|
||||
bits = (bits << 1) + 1
|
||||
lng_range[0] = mid
|
||||
else:
|
||||
bits = bits << 1
|
||||
lng_range[1] = mid
|
||||
else:
|
||||
# latitude
|
||||
mid = (lat_range[0] + lat_range[1]) / 2
|
||||
if lat >= mid:
|
||||
bits = (bits << 1) + 1
|
||||
lat_range[0] = mid
|
||||
else:
|
||||
bits = bits << 1
|
||||
lat_range[1] = mid
|
||||
|
||||
even_bit = not even_bit
|
||||
bit_count += 1
|
||||
|
||||
if bit_count == 5:
|
||||
# Convert 5 bits to base32 character
|
||||
geohash += "0123456789bcdefghjkmnpqrstuvwxyz"[bits]
|
||||
bits = 0
|
||||
bit_count = 0
|
||||
|
||||
return geohash
|
||||
|
||||
def _hash_filters(self, filters: MapFilters) -> str:
|
||||
"""Create deterministic hash of filters for cache keys."""
|
||||
filter_dict = filters.to_dict()
|
||||
# Sort to ensure consistent ordering
|
||||
filter_str = json.dumps(filter_dict, sort_keys=True)
|
||||
return hashlib.md5(filter_str.encode()).hexdigest()[:8]
|
||||
|
||||
def _dict_to_unified_location(self, data: Dict[str, Any]) -> UnifiedLocation:
|
||||
"""Convert dictionary back to UnifiedLocation object."""
|
||||
from .data_structures import LocationType
|
||||
|
||||
return UnifiedLocation(
|
||||
id=data['id'],
|
||||
type=LocationType(data['type']),
|
||||
name=data['name'],
|
||||
coordinates=tuple(data['coordinates']),
|
||||
address=data.get('address'),
|
||||
metadata=data.get('metadata', {}),
|
||||
type_data=data.get('type_data', {}),
|
||||
cluster_weight=data.get('cluster_weight', 1),
|
||||
cluster_category=data.get('cluster_category', 'default')
|
||||
)
|
||||
|
||||
def _dict_to_cluster_data(self, data: Dict[str, Any]) -> ClusterData:
|
||||
"""Convert dictionary back to ClusterData object."""
|
||||
from .data_structures import LocationType
|
||||
|
||||
bounds = GeoBounds(**data['bounds'])
|
||||
types = {LocationType(t) for t in data['types']}
|
||||
|
||||
representative = None
|
||||
if data.get('representative'):
|
||||
representative = self._dict_to_unified_location(data['representative'])
|
||||
|
||||
return ClusterData(
|
||||
id=data['id'],
|
||||
coordinates=tuple(data['coordinates']),
|
||||
count=data['count'],
|
||||
types=types,
|
||||
bounds=bounds,
|
||||
representative_location=representative
|
||||
)
|
||||
|
||||
def _dict_to_map_response(self, data: Dict[str, Any]) -> MapResponse:
|
||||
"""Convert dictionary back to MapResponse object."""
|
||||
locations = [self._dict_to_unified_location(loc) for loc in data.get('locations', [])]
|
||||
clusters = [self._dict_to_cluster_data(cluster) for cluster in data.get('clusters', [])]
|
||||
|
||||
bounds = None
|
||||
if data.get('bounds'):
|
||||
bounds = GeoBounds(**data['bounds'])
|
||||
|
||||
return MapResponse(
|
||||
locations=locations,
|
||||
clusters=clusters,
|
||||
bounds=bounds,
|
||||
total_count=data.get('total_count', 0),
|
||||
filtered_count=data.get('filtered_count', 0),
|
||||
zoom_level=data.get('zoom_level'),
|
||||
clustered=data.get('clustered', False)
|
||||
)
|
||||
|
||||
|
||||
# Global cache service instance
|
||||
map_cache = MapCacheService()
|
||||
428
core/services/map_service.py
Normal file
428
core/services/map_service.py
Normal file
@@ -0,0 +1,428 @@
|
||||
"""
|
||||
Unified Map Service - Main orchestrating service for all map functionality.
|
||||
"""
|
||||
|
||||
import time
|
||||
from typing import List, Optional, Dict, Any, Set
|
||||
from django.db import connection
|
||||
from django.utils import timezone
|
||||
|
||||
from .data_structures import (
|
||||
UnifiedLocation,
|
||||
ClusterData,
|
||||
GeoBounds,
|
||||
MapFilters,
|
||||
MapResponse,
|
||||
LocationType,
|
||||
QueryPerformanceMetrics
|
||||
)
|
||||
from .location_adapters import LocationAbstractionLayer
|
||||
from .clustering_service import ClusteringService
|
||||
from .map_cache_service import MapCacheService
|
||||
|
||||
|
||||
class UnifiedMapService:
|
||||
"""
|
||||
Main service orchestrating map data retrieval, filtering, clustering, and caching.
|
||||
Provides a unified interface for all location types with performance optimization.
|
||||
"""
|
||||
|
||||
# Performance thresholds
|
||||
MAX_UNCLUSTERED_POINTS = 500
|
||||
MAX_CLUSTERED_POINTS = 2000
|
||||
DEFAULT_ZOOM_LEVEL = 10
|
||||
|
||||
def __init__(self):
|
||||
self.location_layer = LocationAbstractionLayer()
|
||||
self.clustering_service = ClusteringService()
|
||||
self.cache_service = MapCacheService()
|
||||
|
||||
def get_map_data(
|
||||
self,
|
||||
*,
|
||||
bounds: Optional[GeoBounds] = None,
|
||||
filters: Optional[MapFilters] = None,
|
||||
zoom_level: int = DEFAULT_ZOOM_LEVEL,
|
||||
cluster: bool = True,
|
||||
use_cache: bool = True
|
||||
) -> MapResponse:
|
||||
"""
|
||||
Primary method for retrieving unified map data.
|
||||
|
||||
Args:
|
||||
bounds: Geographic bounds to query within
|
||||
filters: Filtering criteria for locations
|
||||
zoom_level: Map zoom level for clustering decisions
|
||||
cluster: Whether to apply clustering
|
||||
use_cache: Whether to use cached data
|
||||
|
||||
Returns:
|
||||
MapResponse with locations, clusters, and metadata
|
||||
"""
|
||||
start_time = time.time()
|
||||
initial_query_count = len(connection.queries)
|
||||
cache_hit = False
|
||||
|
||||
try:
|
||||
# Generate cache key
|
||||
cache_key = None
|
||||
if use_cache:
|
||||
cache_key = self._generate_cache_key(bounds, filters, zoom_level, cluster)
|
||||
|
||||
# Try to get from cache first
|
||||
cached_response = self.cache_service.get_cached_map_response(cache_key)
|
||||
if cached_response:
|
||||
cached_response.cache_hit = True
|
||||
cached_response.query_time_ms = int((time.time() - start_time) * 1000)
|
||||
return cached_response
|
||||
|
||||
# Get locations from database
|
||||
locations = self._get_locations_from_db(bounds, filters)
|
||||
|
||||
# Apply smart limiting based on zoom level and density
|
||||
locations = self._apply_smart_limiting(locations, bounds, zoom_level)
|
||||
|
||||
# Determine if clustering should be applied
|
||||
should_cluster = cluster and self.clustering_service.should_cluster(zoom_level, len(locations))
|
||||
|
||||
# Apply clustering if needed
|
||||
clusters = []
|
||||
if should_cluster:
|
||||
locations, clusters = self.clustering_service.cluster_locations(
|
||||
locations, zoom_level, bounds
|
||||
)
|
||||
|
||||
# Calculate response bounds
|
||||
response_bounds = self._calculate_response_bounds(locations, clusters, bounds)
|
||||
|
||||
# Create response
|
||||
response = MapResponse(
|
||||
locations=locations,
|
||||
clusters=clusters,
|
||||
bounds=response_bounds,
|
||||
total_count=len(locations) + sum(cluster.count for cluster in clusters),
|
||||
filtered_count=len(locations),
|
||||
zoom_level=zoom_level,
|
||||
clustered=should_cluster,
|
||||
cache_hit=cache_hit,
|
||||
query_time_ms=int((time.time() - start_time) * 1000),
|
||||
filters_applied=self._get_applied_filters_list(filters)
|
||||
)
|
||||
|
||||
# Cache the response
|
||||
if use_cache and cache_key:
|
||||
self.cache_service.cache_map_response(cache_key, response)
|
||||
|
||||
# Record performance metrics
|
||||
self._record_performance_metrics(
|
||||
start_time, initial_query_count, cache_hit, len(locations) + len(clusters),
|
||||
bounds is not None, should_cluster
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
# Return error response
|
||||
return MapResponse(
|
||||
locations=[],
|
||||
clusters=[],
|
||||
total_count=0,
|
||||
filtered_count=0,
|
||||
query_time_ms=int((time.time() - start_time) * 1000),
|
||||
cache_hit=False
|
||||
)
|
||||
|
||||
def get_location_details(self, location_type: str, location_id: int) -> Optional[UnifiedLocation]:
|
||||
"""
|
||||
Get detailed information for a specific location.
|
||||
|
||||
Args:
|
||||
location_type: Type of location (park, ride, company, generic)
|
||||
location_id: ID of the location
|
||||
|
||||
Returns:
|
||||
UnifiedLocation with full details or None if not found
|
||||
"""
|
||||
try:
|
||||
# Check cache first
|
||||
cache_key = self.cache_service.get_location_detail_cache_key(location_type, location_id)
|
||||
cached_locations = self.cache_service.get_cached_locations(cache_key)
|
||||
if cached_locations:
|
||||
return cached_locations[0] if cached_locations else None
|
||||
|
||||
# Get from database
|
||||
location_type_enum = LocationType(location_type.lower())
|
||||
location = self.location_layer.get_location_by_id(location_type_enum, location_id)
|
||||
|
||||
# Cache the result
|
||||
if location:
|
||||
self.cache_service.cache_locations(cache_key, [location],
|
||||
self.cache_service.LOCATION_DETAIL_TTL)
|
||||
|
||||
return location
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error getting location details: {e}")
|
||||
return None
|
||||
|
||||
def search_locations(
|
||||
self,
|
||||
query: str,
|
||||
bounds: Optional[GeoBounds] = None,
|
||||
location_types: Optional[Set[LocationType]] = None,
|
||||
limit: int = 50
|
||||
) -> List[UnifiedLocation]:
|
||||
"""
|
||||
Search locations with text query.
|
||||
|
||||
Args:
|
||||
query: Search query string
|
||||
bounds: Optional geographic bounds to search within
|
||||
location_types: Optional set of location types to search
|
||||
limit: Maximum number of results
|
||||
|
||||
Returns:
|
||||
List of matching UnifiedLocation objects
|
||||
"""
|
||||
try:
|
||||
# Create search filters
|
||||
filters = MapFilters(
|
||||
search_query=query,
|
||||
location_types=location_types or {LocationType.PARK, LocationType.RIDE},
|
||||
has_coordinates=True
|
||||
)
|
||||
|
||||
# Get locations
|
||||
locations = self.location_layer.get_all_locations(bounds, filters)
|
||||
|
||||
# Apply limit
|
||||
return locations[:limit]
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error searching locations: {e}")
|
||||
return []
|
||||
|
||||
def get_locations_by_bounds(
|
||||
self,
|
||||
north: float,
|
||||
south: float,
|
||||
east: float,
|
||||
west: float,
|
||||
location_types: Optional[Set[LocationType]] = None,
|
||||
zoom_level: int = DEFAULT_ZOOM_LEVEL
|
||||
) -> MapResponse:
|
||||
"""
|
||||
Get locations within specific geographic bounds.
|
||||
|
||||
Args:
|
||||
north, south, east, west: Bounding box coordinates
|
||||
location_types: Optional filter for location types
|
||||
zoom_level: Map zoom level for optimization
|
||||
|
||||
Returns:
|
||||
MapResponse with locations in bounds
|
||||
"""
|
||||
try:
|
||||
bounds = GeoBounds(north=north, south=south, east=east, west=west)
|
||||
filters = MapFilters(location_types=location_types) if location_types else None
|
||||
|
||||
return self.get_map_data(bounds=bounds, filters=filters, zoom_level=zoom_level)
|
||||
|
||||
except ValueError as e:
|
||||
# Invalid bounds
|
||||
return MapResponse(
|
||||
locations=[],
|
||||
clusters=[],
|
||||
total_count=0,
|
||||
filtered_count=0
|
||||
)
|
||||
|
||||
def get_clustered_locations(
|
||||
self,
|
||||
zoom_level: int,
|
||||
bounds: Optional[GeoBounds] = None,
|
||||
filters: Optional[MapFilters] = None
|
||||
) -> MapResponse:
|
||||
"""
|
||||
Get clustered location data for map display.
|
||||
|
||||
Args:
|
||||
zoom_level: Map zoom level for clustering configuration
|
||||
bounds: Optional geographic bounds
|
||||
filters: Optional filtering criteria
|
||||
|
||||
Returns:
|
||||
MapResponse with clustered data
|
||||
"""
|
||||
return self.get_map_data(
|
||||
bounds=bounds,
|
||||
filters=filters,
|
||||
zoom_level=zoom_level,
|
||||
cluster=True
|
||||
)
|
||||
|
||||
def get_locations_by_type(
|
||||
self,
|
||||
location_type: LocationType,
|
||||
bounds: Optional[GeoBounds] = None,
|
||||
limit: Optional[int] = None
|
||||
) -> List[UnifiedLocation]:
|
||||
"""
|
||||
Get locations of a specific type.
|
||||
|
||||
Args:
|
||||
location_type: Type of locations to retrieve
|
||||
bounds: Optional geographic bounds
|
||||
limit: Optional limit on results
|
||||
|
||||
Returns:
|
||||
List of UnifiedLocation objects
|
||||
"""
|
||||
try:
|
||||
filters = MapFilters(location_types={location_type})
|
||||
locations = self.location_layer.get_locations_by_type(location_type, bounds, filters)
|
||||
|
||||
if limit:
|
||||
locations = locations[:limit]
|
||||
|
||||
return locations
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error getting locations by type: {e}")
|
||||
return []
|
||||
|
||||
def invalidate_cache(self, location_type: Optional[str] = None,
|
||||
location_id: Optional[int] = None,
|
||||
bounds: Optional[GeoBounds] = None) -> None:
|
||||
"""
|
||||
Invalidate cached map data.
|
||||
|
||||
Args:
|
||||
location_type: Optional specific location type to invalidate
|
||||
location_id: Optional specific location ID to invalidate
|
||||
bounds: Optional specific bounds to invalidate
|
||||
"""
|
||||
if location_type and location_id:
|
||||
self.cache_service.invalidate_location_cache(location_type, location_id)
|
||||
elif bounds:
|
||||
self.cache_service.invalidate_bounds_cache(bounds)
|
||||
else:
|
||||
self.cache_service.clear_all_map_cache()
|
||||
|
||||
def get_service_stats(self) -> Dict[str, Any]:
|
||||
"""Get service performance and usage statistics."""
|
||||
cache_stats = self.cache_service.get_cache_stats()
|
||||
|
||||
return {
|
||||
'cache_performance': cache_stats,
|
||||
'clustering_available': True,
|
||||
'supported_location_types': [t.value for t in LocationType],
|
||||
'max_unclustered_points': self.MAX_UNCLUSTERED_POINTS,
|
||||
'max_clustered_points': self.MAX_CLUSTERED_POINTS,
|
||||
'service_version': '1.0.0'
|
||||
}
|
||||
|
||||
def _get_locations_from_db(self, bounds: Optional[GeoBounds],
|
||||
filters: Optional[MapFilters]) -> List[UnifiedLocation]:
|
||||
"""Get locations from database using the abstraction layer."""
|
||||
return self.location_layer.get_all_locations(bounds, filters)
|
||||
|
||||
def _apply_smart_limiting(self, locations: List[UnifiedLocation],
|
||||
bounds: Optional[GeoBounds], zoom_level: int) -> List[UnifiedLocation]:
|
||||
"""Apply intelligent limiting based on zoom level and density."""
|
||||
if zoom_level < 6: # Very zoomed out - show only major parks
|
||||
major_parks = [
|
||||
loc for loc in locations
|
||||
if (loc.type == LocationType.PARK and
|
||||
loc.cluster_category in ['major_park', 'theme_park'])
|
||||
]
|
||||
return major_parks[:200]
|
||||
elif zoom_level < 10: # Regional level
|
||||
return locations[:1000]
|
||||
else: # City level and closer
|
||||
return locations[:self.MAX_CLUSTERED_POINTS]
|
||||
|
||||
def _calculate_response_bounds(self, locations: List[UnifiedLocation],
|
||||
clusters: List[ClusterData],
|
||||
request_bounds: Optional[GeoBounds]) -> Optional[GeoBounds]:
|
||||
"""Calculate the actual bounds of the response data."""
|
||||
if request_bounds:
|
||||
return request_bounds
|
||||
|
||||
all_coords = []
|
||||
|
||||
# Add location coordinates
|
||||
for loc in locations:
|
||||
all_coords.append((loc.latitude, loc.longitude))
|
||||
|
||||
# Add cluster coordinates
|
||||
for cluster in clusters:
|
||||
all_coords.append(cluster.coordinates)
|
||||
|
||||
if not all_coords:
|
||||
return None
|
||||
|
||||
lats, lngs = zip(*all_coords)
|
||||
return GeoBounds(
|
||||
north=max(lats),
|
||||
south=min(lats),
|
||||
east=max(lngs),
|
||||
west=min(lngs)
|
||||
)
|
||||
|
||||
def _get_applied_filters_list(self, filters: Optional[MapFilters]) -> List[str]:
|
||||
"""Get list of applied filter types for metadata."""
|
||||
if not filters:
|
||||
return []
|
||||
|
||||
applied = []
|
||||
if filters.location_types:
|
||||
applied.append('location_types')
|
||||
if filters.search_query:
|
||||
applied.append('search_query')
|
||||
if filters.park_status:
|
||||
applied.append('park_status')
|
||||
if filters.ride_types:
|
||||
applied.append('ride_types')
|
||||
if filters.company_roles:
|
||||
applied.append('company_roles')
|
||||
if filters.min_rating:
|
||||
applied.append('min_rating')
|
||||
if filters.country:
|
||||
applied.append('country')
|
||||
if filters.state:
|
||||
applied.append('state')
|
||||
if filters.city:
|
||||
applied.append('city')
|
||||
|
||||
return applied
|
||||
|
||||
def _generate_cache_key(self, bounds: Optional[GeoBounds], filters: Optional[MapFilters],
|
||||
zoom_level: int, cluster: bool) -> str:
|
||||
"""Generate cache key for the request."""
|
||||
if cluster:
|
||||
return self.cache_service.get_clusters_cache_key(bounds, filters, zoom_level)
|
||||
else:
|
||||
return self.cache_service.get_locations_cache_key(bounds, filters, zoom_level)
|
||||
|
||||
def _record_performance_metrics(self, start_time: float, initial_query_count: int,
|
||||
cache_hit: bool, result_count: int, bounds_used: bool,
|
||||
clustering_used: bool) -> None:
|
||||
"""Record performance metrics for monitoring."""
|
||||
query_time_ms = int((time.time() - start_time) * 1000)
|
||||
db_query_count = len(connection.queries) - initial_query_count
|
||||
|
||||
metrics = QueryPerformanceMetrics(
|
||||
query_time_ms=query_time_ms,
|
||||
db_query_count=db_query_count,
|
||||
cache_hit=cache_hit,
|
||||
result_count=result_count,
|
||||
bounds_used=bounds_used,
|
||||
clustering_used=clustering_used
|
||||
)
|
||||
|
||||
self.cache_service.record_performance_metrics(metrics)
|
||||
|
||||
|
||||
# Global service instance
|
||||
unified_map_service = UnifiedMapService()
|
||||
370
core/services/performance_monitoring.py
Normal file
370
core/services/performance_monitoring.py
Normal file
@@ -0,0 +1,370 @@
|
||||
"""
|
||||
Performance monitoring utilities and context managers.
|
||||
"""
|
||||
|
||||
import time
|
||||
import logging
|
||||
from contextlib import contextmanager
|
||||
from functools import wraps
|
||||
from typing import Optional, Dict, Any, List
|
||||
from django.db import connection
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
|
||||
logger = logging.getLogger('performance')
|
||||
|
||||
|
||||
@contextmanager
|
||||
def monitor_performance(operation_name: str, **tags):
|
||||
"""Context manager for monitoring operation performance"""
|
||||
start_time = time.time()
|
||||
initial_queries = len(connection.queries)
|
||||
|
||||
# Create performance context
|
||||
performance_context = {
|
||||
'operation': operation_name,
|
||||
'start_time': start_time,
|
||||
'timestamp': timezone.now().isoformat(),
|
||||
**tags
|
||||
}
|
||||
|
||||
try:
|
||||
yield performance_context
|
||||
except Exception as e:
|
||||
performance_context['error'] = str(e)
|
||||
performance_context['status'] = 'error'
|
||||
raise
|
||||
else:
|
||||
performance_context['status'] = 'success'
|
||||
finally:
|
||||
end_time = time.time()
|
||||
duration = end_time - start_time
|
||||
total_queries = len(connection.queries) - initial_queries
|
||||
|
||||
# Update performance context with final metrics
|
||||
performance_context.update({
|
||||
'duration_seconds': duration,
|
||||
'duration_ms': round(duration * 1000, 2),
|
||||
'query_count': total_queries,
|
||||
'end_time': end_time,
|
||||
})
|
||||
|
||||
# Log performance data
|
||||
log_level = logging.WARNING if duration > 2.0 or total_queries > 10 else logging.INFO
|
||||
logger.log(
|
||||
log_level,
|
||||
f"Performance: {operation_name} completed in {duration:.3f}s with {total_queries} queries",
|
||||
extra=performance_context
|
||||
)
|
||||
|
||||
# Log slow operations with additional detail
|
||||
if duration > 2.0:
|
||||
logger.warning(
|
||||
f"Slow operation detected: {operation_name} took {duration:.3f}s",
|
||||
extra={
|
||||
'slow_operation': True,
|
||||
'threshold_exceeded': 'duration',
|
||||
**performance_context
|
||||
}
|
||||
)
|
||||
|
||||
if total_queries > 10:
|
||||
logger.warning(
|
||||
f"High query count: {operation_name} executed {total_queries} queries",
|
||||
extra={
|
||||
'high_query_count': True,
|
||||
'threshold_exceeded': 'query_count',
|
||||
**performance_context
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def track_queries(operation_name: str, warn_threshold: int = 10):
|
||||
"""Context manager to track database queries for specific operations"""
|
||||
if not settings.DEBUG:
|
||||
yield
|
||||
return
|
||||
|
||||
initial_queries = len(connection.queries)
|
||||
start_time = time.time()
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
end_time = time.time()
|
||||
total_queries = len(connection.queries) - initial_queries
|
||||
execution_time = end_time - start_time
|
||||
|
||||
query_details = []
|
||||
if hasattr(connection, 'queries') and total_queries > 0:
|
||||
recent_queries = connection.queries[-total_queries:]
|
||||
query_details = [
|
||||
{
|
||||
'sql': query['sql'][:200] + '...' if len(query['sql']) > 200 else query['sql'],
|
||||
'time': float(query['time'])
|
||||
}
|
||||
for query in recent_queries
|
||||
]
|
||||
|
||||
performance_data = {
|
||||
'operation': operation_name,
|
||||
'query_count': total_queries,
|
||||
'execution_time': execution_time,
|
||||
'queries': query_details if settings.DEBUG else []
|
||||
}
|
||||
|
||||
if total_queries > warn_threshold or execution_time > 1.0:
|
||||
logger.warning(
|
||||
f"Performance concern in {operation_name}: "
|
||||
f"{total_queries} queries, {execution_time:.2f}s",
|
||||
extra=performance_data
|
||||
)
|
||||
else:
|
||||
logger.debug(
|
||||
f"Query tracking for {operation_name}: "
|
||||
f"{total_queries} queries, {execution_time:.2f}s",
|
||||
extra=performance_data
|
||||
)
|
||||
|
||||
|
||||
class PerformanceProfiler:
|
||||
"""Advanced performance profiling with detailed metrics"""
|
||||
|
||||
def __init__(self, name: str):
|
||||
self.name = name
|
||||
self.start_time = None
|
||||
self.end_time = None
|
||||
self.checkpoints = []
|
||||
self.initial_queries = 0
|
||||
self.memory_usage = {}
|
||||
|
||||
def start(self):
|
||||
"""Start profiling"""
|
||||
self.start_time = time.time()
|
||||
self.initial_queries = len(connection.queries)
|
||||
|
||||
# Track memory usage if psutil is available
|
||||
try:
|
||||
import psutil
|
||||
process = psutil.Process()
|
||||
self.memory_usage['start'] = process.memory_info().rss
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
logger.debug(f"Started profiling: {self.name}")
|
||||
|
||||
def checkpoint(self, name: str):
|
||||
"""Add a checkpoint"""
|
||||
if self.start_time is None:
|
||||
logger.warning(f"Checkpoint '{name}' called before profiling started")
|
||||
return
|
||||
|
||||
current_time = time.time()
|
||||
elapsed = current_time - self.start_time
|
||||
queries_since_start = len(connection.queries) - self.initial_queries
|
||||
|
||||
checkpoint = {
|
||||
'name': name,
|
||||
'timestamp': current_time,
|
||||
'elapsed_seconds': elapsed,
|
||||
'queries_since_start': queries_since_start,
|
||||
}
|
||||
|
||||
# Memory usage if available
|
||||
try:
|
||||
import psutil
|
||||
process = psutil.Process()
|
||||
checkpoint['memory_rss'] = process.memory_info().rss
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
self.checkpoints.append(checkpoint)
|
||||
logger.debug(f"Checkpoint '{name}' at {elapsed:.3f}s")
|
||||
|
||||
def stop(self):
|
||||
"""Stop profiling and log results"""
|
||||
if self.start_time is None:
|
||||
logger.warning("Profiling stopped before it was started")
|
||||
return
|
||||
|
||||
self.end_time = time.time()
|
||||
total_duration = self.end_time - self.start_time
|
||||
total_queries = len(connection.queries) - self.initial_queries
|
||||
|
||||
# Final memory usage
|
||||
try:
|
||||
import psutil
|
||||
process = psutil.Process()
|
||||
self.memory_usage['end'] = process.memory_info().rss
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# Create detailed profiling report
|
||||
report = {
|
||||
'profiler_name': self.name,
|
||||
'total_duration': total_duration,
|
||||
'total_queries': total_queries,
|
||||
'checkpoints': self.checkpoints,
|
||||
'memory_usage': self.memory_usage,
|
||||
'queries_per_second': total_queries / total_duration if total_duration > 0 else 0,
|
||||
}
|
||||
|
||||
# Calculate checkpoint intervals
|
||||
if len(self.checkpoints) > 1:
|
||||
intervals = []
|
||||
for i in range(1, len(self.checkpoints)):
|
||||
prev = self.checkpoints[i-1]
|
||||
curr = self.checkpoints[i]
|
||||
intervals.append({
|
||||
'from': prev['name'],
|
||||
'to': curr['name'],
|
||||
'duration': curr['elapsed_seconds'] - prev['elapsed_seconds'],
|
||||
'queries': curr['queries_since_start'] - prev['queries_since_start'],
|
||||
})
|
||||
report['checkpoint_intervals'] = intervals
|
||||
|
||||
# Log the complete report
|
||||
log_level = logging.WARNING if total_duration > 1.0 else logging.INFO
|
||||
logger.log(
|
||||
log_level,
|
||||
f"Profiling complete: {self.name} took {total_duration:.3f}s with {total_queries} queries",
|
||||
extra=report
|
||||
)
|
||||
|
||||
return report
|
||||
|
||||
|
||||
@contextmanager
|
||||
def profile_operation(name: str):
|
||||
"""Context manager for detailed operation profiling"""
|
||||
profiler = PerformanceProfiler(name)
|
||||
profiler.start()
|
||||
|
||||
try:
|
||||
yield profiler
|
||||
finally:
|
||||
profiler.stop()
|
||||
|
||||
|
||||
class DatabaseQueryAnalyzer:
|
||||
"""Analyze database query patterns and performance"""
|
||||
|
||||
@staticmethod
|
||||
def analyze_queries(queries: List[Dict]) -> Dict[str, Any]:
|
||||
"""Analyze a list of queries for patterns and issues"""
|
||||
if not queries:
|
||||
return {}
|
||||
|
||||
total_time = sum(float(q.get('time', 0)) for q in queries)
|
||||
query_count = len(queries)
|
||||
|
||||
# Group queries by type
|
||||
query_types = {}
|
||||
for query in queries:
|
||||
sql = query.get('sql', '').strip().upper()
|
||||
query_type = sql.split()[0] if sql else 'UNKNOWN'
|
||||
query_types[query_type] = query_types.get(query_type, 0) + 1
|
||||
|
||||
# Find slow queries (top 10% by time)
|
||||
sorted_queries = sorted(queries, key=lambda q: float(q.get('time', 0)), reverse=True)
|
||||
slow_query_count = max(1, query_count // 10)
|
||||
slow_queries = sorted_queries[:slow_query_count]
|
||||
|
||||
# Detect duplicate queries
|
||||
query_signatures = {}
|
||||
for query in queries:
|
||||
# Simplified signature - remove literals and normalize whitespace
|
||||
sql = query.get('sql', '')
|
||||
signature = ' '.join(sql.split()) # Normalize whitespace
|
||||
query_signatures[signature] = query_signatures.get(signature, 0) + 1
|
||||
|
||||
duplicates = {sig: count for sig, count in query_signatures.items() if count > 1}
|
||||
|
||||
analysis = {
|
||||
'total_queries': query_count,
|
||||
'total_time': total_time,
|
||||
'average_time': total_time / query_count if query_count > 0 else 0,
|
||||
'query_types': query_types,
|
||||
'slow_queries': [
|
||||
{
|
||||
'sql': q.get('sql', '')[:200] + '...' if len(q.get('sql', '')) > 200 else q.get('sql', ''),
|
||||
'time': float(q.get('time', 0))
|
||||
}
|
||||
for q in slow_queries
|
||||
],
|
||||
'duplicate_query_count': len(duplicates),
|
||||
'duplicate_queries': duplicates if len(duplicates) <= 10 else dict(list(duplicates.items())[:10]),
|
||||
}
|
||||
|
||||
return analysis
|
||||
|
||||
@classmethod
|
||||
def analyze_current_queries(cls) -> Dict[str, Any]:
|
||||
"""Analyze the current request's queries"""
|
||||
if hasattr(connection, 'queries'):
|
||||
return cls.analyze_queries(connection.queries)
|
||||
return {}
|
||||
|
||||
|
||||
# Performance monitoring decorators
|
||||
def monitor_function_performance(operation_name: Optional[str] = None):
|
||||
"""Decorator to monitor function performance"""
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
name = operation_name or f"{func.__module__}.{func.__name__}"
|
||||
with monitor_performance(name, function=func.__name__, module=func.__module__):
|
||||
return func(*args, **kwargs)
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
|
||||
def track_database_queries(warn_threshold: int = 10):
|
||||
"""Decorator to track database queries for a function"""
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
operation_name = f"{func.__module__}.{func.__name__}"
|
||||
with track_queries(operation_name, warn_threshold):
|
||||
return func(*args, **kwargs)
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
|
||||
# Performance metrics collection
|
||||
class PerformanceMetrics:
|
||||
"""Collect and aggregate performance metrics"""
|
||||
|
||||
def __init__(self):
|
||||
self.metrics = []
|
||||
|
||||
def record_metric(self, name: str, value: float, tags: Optional[Dict] = None):
|
||||
"""Record a performance metric"""
|
||||
metric = {
|
||||
'name': name,
|
||||
'value': value,
|
||||
'timestamp': timezone.now().isoformat(),
|
||||
'tags': tags or {}
|
||||
}
|
||||
self.metrics.append(metric)
|
||||
|
||||
# Log the metric
|
||||
logger.info(
|
||||
f"Performance metric: {name} = {value}",
|
||||
extra=metric
|
||||
)
|
||||
|
||||
def get_metrics(self, name: Optional[str] = None) -> List[Dict]:
|
||||
"""Get recorded metrics, optionally filtered by name"""
|
||||
if name:
|
||||
return [m for m in self.metrics if m['name'] == name]
|
||||
return self.metrics.copy()
|
||||
|
||||
def clear_metrics(self):
|
||||
"""Clear all recorded metrics"""
|
||||
self.metrics.clear()
|
||||
|
||||
|
||||
# Global performance metrics instance
|
||||
performance_metrics = PerformanceMetrics()
|
||||
37
core/urls/map_urls.py
Normal file
37
core/urls/map_urls.py
Normal file
@@ -0,0 +1,37 @@
|
||||
"""
|
||||
URL patterns for the unified map service API.
|
||||
"""
|
||||
|
||||
from django.urls import path
|
||||
from ..views.map_views import (
|
||||
MapLocationsView,
|
||||
MapLocationDetailView,
|
||||
MapSearchView,
|
||||
MapBoundsView,
|
||||
MapStatsView,
|
||||
MapCacheView
|
||||
)
|
||||
|
||||
app_name = 'map_api'
|
||||
|
||||
urlpatterns = [
|
||||
# Main map data endpoint
|
||||
path('locations/', MapLocationsView.as_view(), name='locations'),
|
||||
|
||||
# Location detail endpoint
|
||||
path('locations/<str:location_type>/<int:location_id>/',
|
||||
MapLocationDetailView.as_view(), name='location_detail'),
|
||||
|
||||
# Search endpoint
|
||||
path('search/', MapSearchView.as_view(), name='search'),
|
||||
|
||||
# Bounds-based query endpoint
|
||||
path('bounds/', MapBoundsView.as_view(), name='bounds'),
|
||||
|
||||
# Service statistics endpoint
|
||||
path('stats/', MapStatsView.as_view(), name='stats'),
|
||||
|
||||
# Cache management endpoints
|
||||
path('cache/', MapCacheView.as_view(), name='cache'),
|
||||
path('cache/invalidate/', MapCacheView.as_view(), name='cache_invalidate'),
|
||||
]
|
||||
33
core/urls/maps.py
Normal file
33
core/urls/maps.py
Normal file
@@ -0,0 +1,33 @@
|
||||
"""
|
||||
URL patterns for map views.
|
||||
Includes both HTML views and HTMX endpoints.
|
||||
"""
|
||||
|
||||
from django.urls import path
|
||||
from ..views.maps import (
|
||||
UniversalMapView,
|
||||
ParkMapView,
|
||||
NearbyLocationsView,
|
||||
LocationFilterView,
|
||||
LocationSearchView,
|
||||
MapBoundsUpdateView,
|
||||
LocationDetailModalView,
|
||||
LocationListView,
|
||||
)
|
||||
|
||||
app_name = 'maps'
|
||||
|
||||
urlpatterns = [
|
||||
# Main map views
|
||||
path('', UniversalMapView.as_view(), name='universal_map'),
|
||||
path('parks/', ParkMapView.as_view(), name='park_map'),
|
||||
path('nearby/', NearbyLocationsView.as_view(), name='nearby_locations'),
|
||||
path('list/', LocationListView.as_view(), name='location_list'),
|
||||
|
||||
# HTMX endpoints for dynamic updates
|
||||
path('htmx/filter/', LocationFilterView.as_view(), name='htmx_filter'),
|
||||
path('htmx/search/', LocationSearchView.as_view(), name='htmx_search'),
|
||||
path('htmx/bounds/', MapBoundsUpdateView.as_view(), name='htmx_bounds_update'),
|
||||
path('htmx/location/<str:location_type>/<int:location_id>/',
|
||||
LocationDetailModalView.as_view(), name='htmx_location_detail'),
|
||||
]
|
||||
21
core/urls/search.py
Normal file
21
core/urls/search.py
Normal file
@@ -0,0 +1,21 @@
|
||||
from django.urls import path
|
||||
from core.views.search import (
|
||||
AdaptiveSearchView,
|
||||
FilterFormView,
|
||||
LocationSearchView,
|
||||
LocationSuggestionsView
|
||||
)
|
||||
from rides.views import RideSearchView
|
||||
|
||||
app_name = 'search'
|
||||
|
||||
urlpatterns = [
|
||||
path('parks/', AdaptiveSearchView.as_view(), name='search'),
|
||||
path('parks/filters/', FilterFormView.as_view(), name='filter_form'),
|
||||
path('rides/', RideSearchView.as_view(), name='ride_search'),
|
||||
path('rides/results/', RideSearchView.as_view(), name='ride_search_results'),
|
||||
|
||||
# Location-aware search
|
||||
path('location/', LocationSearchView.as_view(), name='location_search'),
|
||||
path('location/suggestions/', LocationSuggestionsView.as_view(), name='location_suggestions'),
|
||||
]
|
||||
1
core/utils/__init__.py
Normal file
1
core/utils/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Core utilities
|
||||
385
core/utils/query_optimization.py
Normal file
385
core/utils/query_optimization.py
Normal file
@@ -0,0 +1,385 @@
|
||||
"""
|
||||
Database query optimization utilities and helpers.
|
||||
"""
|
||||
|
||||
import time
|
||||
import logging
|
||||
from contextlib import contextmanager
|
||||
from typing import Optional, Dict, Any, List, Type
|
||||
from django.db import connection, models
|
||||
from django.db.models import QuerySet, Prefetch, Count, Avg, Max, Min
|
||||
from django.conf import settings
|
||||
from django.core.cache import cache
|
||||
|
||||
logger = logging.getLogger('query_optimization')
|
||||
|
||||
|
||||
@contextmanager
|
||||
def track_queries(operation_name: str, warn_threshold: int = 10, time_threshold: float = 1.0):
|
||||
"""
|
||||
Context manager to track database queries for specific operations
|
||||
|
||||
Args:
|
||||
operation_name: Name of the operation being tracked
|
||||
warn_threshold: Number of queries that triggers a warning
|
||||
time_threshold: Execution time in seconds that triggers a warning
|
||||
"""
|
||||
if not settings.DEBUG:
|
||||
yield
|
||||
return
|
||||
|
||||
initial_queries = len(connection.queries)
|
||||
start_time = time.time()
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
end_time = time.time()
|
||||
total_queries = len(connection.queries) - initial_queries
|
||||
execution_time = end_time - start_time
|
||||
|
||||
# Collect query details
|
||||
query_details = []
|
||||
if hasattr(connection, 'queries') and total_queries > 0:
|
||||
recent_queries = connection.queries[-total_queries:]
|
||||
query_details = [
|
||||
{
|
||||
'sql': query['sql'][:500] + '...' if len(query['sql']) > 500 else query['sql'],
|
||||
'time': float(query['time']),
|
||||
'duplicate_count': sum(1 for q in recent_queries if q['sql'] == query['sql'])
|
||||
}
|
||||
for query in recent_queries
|
||||
]
|
||||
|
||||
performance_data = {
|
||||
'operation': operation_name,
|
||||
'query_count': total_queries,
|
||||
'execution_time': execution_time,
|
||||
'queries': query_details if settings.DEBUG else [],
|
||||
'slow_queries': [q for q in query_details if q['time'] > 0.1], # Queries slower than 100ms
|
||||
}
|
||||
|
||||
# Log warnings for performance issues
|
||||
if total_queries > warn_threshold or execution_time > time_threshold:
|
||||
logger.warning(
|
||||
f"Performance concern in {operation_name}: "
|
||||
f"{total_queries} queries, {execution_time:.2f}s",
|
||||
extra=performance_data
|
||||
)
|
||||
else:
|
||||
logger.debug(
|
||||
f"Query tracking for {operation_name}: "
|
||||
f"{total_queries} queries, {execution_time:.2f}s",
|
||||
extra=performance_data
|
||||
)
|
||||
|
||||
|
||||
class QueryOptimizer:
|
||||
"""Utility class for common query optimization patterns"""
|
||||
|
||||
@staticmethod
|
||||
def optimize_park_queryset(queryset: QuerySet) -> QuerySet:
|
||||
"""
|
||||
Optimize Park queryset with proper select_related and prefetch_related
|
||||
"""
|
||||
return queryset.select_related(
|
||||
'location',
|
||||
'operator',
|
||||
'created_by'
|
||||
).prefetch_related(
|
||||
'areas',
|
||||
'rides__manufacturer',
|
||||
'reviews__user'
|
||||
).annotate(
|
||||
ride_count=Count('rides'),
|
||||
average_rating=Avg('reviews__rating'),
|
||||
latest_review_date=Max('reviews__created_at')
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def optimize_ride_queryset(queryset: QuerySet) -> QuerySet:
|
||||
"""
|
||||
Optimize Ride queryset with proper relationships
|
||||
"""
|
||||
return queryset.select_related(
|
||||
'park',
|
||||
'park__location',
|
||||
'manufacturer',
|
||||
'created_by'
|
||||
).prefetch_related(
|
||||
'reviews__user',
|
||||
'media_items'
|
||||
).annotate(
|
||||
review_count=Count('reviews'),
|
||||
average_rating=Avg('reviews__rating'),
|
||||
latest_review_date=Max('reviews__created_at')
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def optimize_user_queryset(queryset: QuerySet) -> QuerySet:
|
||||
"""
|
||||
Optimize User queryset for profile views
|
||||
"""
|
||||
return queryset.prefetch_related(
|
||||
Prefetch('park_reviews', to_attr='cached_park_reviews'),
|
||||
Prefetch('ride_reviews', to_attr='cached_ride_reviews'),
|
||||
'authored_parks',
|
||||
'authored_rides'
|
||||
).annotate(
|
||||
total_reviews=Count('park_reviews') + Count('ride_reviews'),
|
||||
parks_authored=Count('authored_parks'),
|
||||
rides_authored=Count('authored_rides')
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def create_bulk_queryset(model: Type[models.Model], ids: List[int]) -> QuerySet:
|
||||
"""
|
||||
Create an optimized queryset for bulk operations
|
||||
"""
|
||||
queryset = model.objects.filter(id__in=ids)
|
||||
|
||||
# Apply model-specific optimizations
|
||||
if hasattr(model, '_meta') and model._meta.model_name == 'park':
|
||||
return QueryOptimizer.optimize_park_queryset(queryset)
|
||||
elif hasattr(model, '_meta') and model._meta.model_name == 'ride':
|
||||
return QueryOptimizer.optimize_ride_queryset(queryset)
|
||||
elif hasattr(model, '_meta') and model._meta.model_name == 'user':
|
||||
return QueryOptimizer.optimize_user_queryset(queryset)
|
||||
|
||||
return queryset
|
||||
|
||||
|
||||
class QueryCache:
|
||||
"""Caching utilities for expensive queries"""
|
||||
|
||||
@staticmethod
|
||||
def cache_queryset_result(cache_key: str, queryset_func, timeout: int = 3600, **kwargs):
|
||||
"""
|
||||
Cache the result of an expensive queryset operation
|
||||
|
||||
Args:
|
||||
cache_key: Unique key for caching
|
||||
queryset_func: Function that returns the queryset result
|
||||
timeout: Cache timeout in seconds
|
||||
**kwargs: Arguments to pass to queryset_func
|
||||
"""
|
||||
# Try to get from cache first
|
||||
cached_result = cache.get(cache_key)
|
||||
if cached_result is not None:
|
||||
logger.debug(f"Cache hit for queryset: {cache_key}")
|
||||
return cached_result
|
||||
|
||||
# Execute the expensive operation
|
||||
with track_queries(f"cache_miss_{cache_key}"):
|
||||
result = queryset_func(**kwargs)
|
||||
|
||||
# Cache the result
|
||||
cache.set(cache_key, result, timeout)
|
||||
logger.debug(f"Cached queryset result: {cache_key}")
|
||||
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def invalidate_model_cache(model_name: str, instance_id: Optional[int] = None):
|
||||
"""
|
||||
Invalidate cache keys related to a specific model
|
||||
|
||||
Args:
|
||||
model_name: Name of the model (e.g., 'park', 'ride')
|
||||
instance_id: Specific instance ID, if applicable
|
||||
"""
|
||||
# Pattern-based cache invalidation (works with Redis)
|
||||
if instance_id:
|
||||
pattern = f"*{model_name}_{instance_id}*"
|
||||
else:
|
||||
pattern = f"*{model_name}*"
|
||||
|
||||
try:
|
||||
# For Redis cache backends that support pattern deletion
|
||||
if hasattr(cache, 'delete_pattern'):
|
||||
deleted_count = cache.delete_pattern(pattern)
|
||||
logger.info(f"Invalidated {deleted_count} cache keys for pattern: {pattern}")
|
||||
else:
|
||||
logger.warning(f"Cache backend does not support pattern deletion: {pattern}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error invalidating cache pattern {pattern}: {e}")
|
||||
|
||||
|
||||
class IndexAnalyzer:
|
||||
"""Analyze and suggest database indexes"""
|
||||
|
||||
@staticmethod
|
||||
def analyze_slow_queries(min_time: float = 0.1) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Analyze slow queries from the current request
|
||||
|
||||
Args:
|
||||
min_time: Minimum query time in seconds to consider "slow"
|
||||
"""
|
||||
if not hasattr(connection, 'queries'):
|
||||
return []
|
||||
|
||||
slow_queries = []
|
||||
for query in connection.queries:
|
||||
query_time = float(query.get('time', 0))
|
||||
if query_time >= min_time:
|
||||
slow_queries.append({
|
||||
'sql': query['sql'],
|
||||
'time': query_time,
|
||||
'analysis': IndexAnalyzer._analyze_query_sql(query['sql'])
|
||||
})
|
||||
|
||||
return slow_queries
|
||||
|
||||
@staticmethod
|
||||
def _analyze_query_sql(sql: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Analyze SQL to suggest potential optimizations
|
||||
"""
|
||||
sql_upper = sql.upper()
|
||||
analysis = {
|
||||
'has_where_clause': 'WHERE' in sql_upper,
|
||||
'has_join': any(join in sql_upper for join in ['JOIN', 'INNER JOIN', 'LEFT JOIN', 'RIGHT JOIN']),
|
||||
'has_order_by': 'ORDER BY' in sql_upper,
|
||||
'has_group_by': 'GROUP BY' in sql_upper,
|
||||
'has_like': 'LIKE' in sql_upper,
|
||||
'table_scans': [],
|
||||
'suggestions': []
|
||||
}
|
||||
|
||||
# Detect potential table scans
|
||||
if 'WHERE' not in sql_upper and 'SELECT COUNT(*) FROM' not in sql_upper:
|
||||
analysis['table_scans'].append("Query may be doing a full table scan")
|
||||
|
||||
# Suggest indexes based on patterns
|
||||
if analysis['has_where_clause'] and not analysis['has_join']:
|
||||
analysis['suggestions'].append("Consider adding indexes on WHERE clause columns")
|
||||
|
||||
if analysis['has_order_by']:
|
||||
analysis['suggestions'].append("Consider adding indexes on ORDER BY columns")
|
||||
|
||||
if analysis['has_like'] and '%' not in sql[:sql.find('LIKE') + 10]:
|
||||
analysis['suggestions'].append("LIKE queries with leading wildcards cannot use indexes efficiently")
|
||||
|
||||
return analysis
|
||||
|
||||
@staticmethod
|
||||
def suggest_model_indexes(model: Type[models.Model]) -> List[str]:
|
||||
"""
|
||||
Suggest database indexes for a Django model based on its fields
|
||||
"""
|
||||
suggestions = []
|
||||
opts = model._meta
|
||||
|
||||
# Foreign key fields should have indexes (Django adds these automatically)
|
||||
for field in opts.fields:
|
||||
if isinstance(field, models.ForeignKey):
|
||||
suggestions.append(f"Index on {field.name} (automatically created by Django)")
|
||||
|
||||
# Suggest composite indexes for common query patterns
|
||||
date_fields = [f.name for f in opts.fields if isinstance(f, (models.DateField, models.DateTimeField))]
|
||||
status_fields = [f.name for f in opts.fields if f.name in ['status', 'is_active', 'is_published']]
|
||||
|
||||
if date_fields and status_fields:
|
||||
for date_field in date_fields:
|
||||
for status_field in status_fields:
|
||||
suggestions.append(f"Composite index on ({status_field}, {date_field}) for filtered date queries")
|
||||
|
||||
# Suggest indexes for fields commonly used in WHERE clauses
|
||||
common_filter_fields = ['slug', 'name', 'created_at', 'updated_at']
|
||||
for field in opts.fields:
|
||||
if field.name in common_filter_fields and not field.db_index:
|
||||
suggestions.append(f"Consider adding db_index=True to {field.name}")
|
||||
|
||||
return suggestions
|
||||
|
||||
|
||||
def log_query_performance():
|
||||
"""Decorator to log query performance for a function"""
|
||||
def decorator(func):
|
||||
def wrapper(*args, **kwargs):
|
||||
operation_name = f"{func.__module__}.{func.__name__}"
|
||||
with track_queries(operation_name):
|
||||
return func(*args, **kwargs)
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
|
||||
def optimize_queryset_for_serialization(queryset: QuerySet, fields: List[str]) -> QuerySet:
|
||||
"""
|
||||
Optimize a queryset for API serialization by only selecting needed fields
|
||||
|
||||
Args:
|
||||
queryset: The queryset to optimize
|
||||
fields: List of field names that will be serialized
|
||||
"""
|
||||
# Extract foreign key fields that need select_related
|
||||
model = queryset.model
|
||||
opts = model._meta
|
||||
|
||||
select_related_fields = []
|
||||
prefetch_related_fields = []
|
||||
|
||||
for field_name in fields:
|
||||
try:
|
||||
field = opts.get_field(field_name)
|
||||
if isinstance(field, models.ForeignKey):
|
||||
select_related_fields.append(field_name)
|
||||
elif isinstance(field, (models.ManyToManyField, models.reverse.ManyToManyRel)):
|
||||
prefetch_related_fields.append(field_name)
|
||||
except models.FieldDoesNotExist:
|
||||
# Field might be a property or method, skip optimization
|
||||
continue
|
||||
|
||||
# Apply optimizations
|
||||
if select_related_fields:
|
||||
queryset = queryset.select_related(*select_related_fields)
|
||||
|
||||
if prefetch_related_fields:
|
||||
queryset = queryset.prefetch_related(*prefetch_related_fields)
|
||||
|
||||
return queryset
|
||||
|
||||
|
||||
# Query performance monitoring context manager
|
||||
@contextmanager
|
||||
def monitor_db_performance(operation_name: str):
|
||||
"""
|
||||
Context manager that monitors database performance for an operation
|
||||
"""
|
||||
initial_queries = len(connection.queries) if hasattr(connection, 'queries') else 0
|
||||
start_time = time.time()
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
end_time = time.time()
|
||||
duration = end_time - start_time
|
||||
|
||||
if hasattr(connection, 'queries'):
|
||||
total_queries = len(connection.queries) - initial_queries
|
||||
|
||||
# Analyze queries for performance issues
|
||||
slow_queries = IndexAnalyzer.analyze_slow_queries(0.05) # 50ms threshold
|
||||
|
||||
performance_data = {
|
||||
'operation': operation_name,
|
||||
'duration': duration,
|
||||
'query_count': total_queries,
|
||||
'slow_query_count': len(slow_queries),
|
||||
'slow_queries': slow_queries[:5] # Limit to top 5 slow queries
|
||||
}
|
||||
|
||||
# Log performance data
|
||||
if duration > 1.0 or total_queries > 15 or slow_queries:
|
||||
logger.warning(
|
||||
f"Performance issue in {operation_name}: "
|
||||
f"{duration:.3f}s, {total_queries} queries, {len(slow_queries)} slow",
|
||||
extra=performance_data
|
||||
)
|
||||
else:
|
||||
logger.debug(
|
||||
f"DB performance for {operation_name}: "
|
||||
f"{duration:.3f}s, {total_queries} queries",
|
||||
extra=performance_data
|
||||
)
|
||||
1
core/views/__init__.py
Normal file
1
core/views/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Core views
|
||||
256
core/views/health_views.py
Normal file
256
core/views/health_views.py
Normal file
@@ -0,0 +1,256 @@
|
||||
"""
|
||||
Enhanced health check views for API monitoring.
|
||||
"""
|
||||
|
||||
import time
|
||||
from django.http import JsonResponse
|
||||
from django.utils import timezone
|
||||
from django.views import View
|
||||
from django.conf import settings
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.permissions import AllowAny
|
||||
from health_check.views import MainView
|
||||
from core.services.enhanced_cache_service import CacheMonitor
|
||||
from core.utils.query_optimization import IndexAnalyzer
|
||||
|
||||
|
||||
class HealthCheckAPIView(APIView):
|
||||
"""
|
||||
Enhanced API endpoint for health checks with detailed JSON response
|
||||
"""
|
||||
|
||||
permission_classes = [AllowAny] # Public endpoint
|
||||
|
||||
def get(self, request):
|
||||
"""Return comprehensive health check information"""
|
||||
start_time = time.time()
|
||||
|
||||
# Get basic health check results
|
||||
main_view = MainView()
|
||||
main_view.request = request
|
||||
|
||||
plugins = main_view.plugins
|
||||
errors = main_view.errors
|
||||
|
||||
# Collect additional performance metrics
|
||||
cache_monitor = CacheMonitor()
|
||||
cache_stats = cache_monitor.get_cache_stats()
|
||||
|
||||
# Build comprehensive health data
|
||||
health_data = {
|
||||
'status': 'healthy' if not errors else 'unhealthy',
|
||||
'timestamp': timezone.now().isoformat(),
|
||||
'version': getattr(settings, 'VERSION', '1.0.0'),
|
||||
'environment': getattr(settings, 'ENVIRONMENT', 'development'),
|
||||
'response_time_ms': 0, # Will be calculated at the end
|
||||
'checks': {},
|
||||
'metrics': {
|
||||
'cache': cache_stats,
|
||||
'database': self._get_database_metrics(),
|
||||
'system': self._get_system_metrics(),
|
||||
}
|
||||
}
|
||||
|
||||
# Process individual health checks
|
||||
for plugin in plugins:
|
||||
plugin_name = plugin.identifier()
|
||||
plugin_errors = errors.get(plugin.__class__.__name__, [])
|
||||
|
||||
health_data['checks'][plugin_name] = {
|
||||
'status': 'healthy' if not plugin_errors else 'unhealthy',
|
||||
'critical': getattr(plugin, 'critical_service', False),
|
||||
'errors': [str(error) for error in plugin_errors],
|
||||
'response_time_ms': getattr(plugin, '_response_time', None)
|
||||
}
|
||||
|
||||
# Calculate total response time
|
||||
health_data['response_time_ms'] = round((time.time() - start_time) * 1000, 2)
|
||||
|
||||
# Determine HTTP status code
|
||||
status_code = 200
|
||||
if errors:
|
||||
# Check if any critical services are failing
|
||||
critical_errors = any(
|
||||
getattr(plugin, 'critical_service', False)
|
||||
for plugin in plugins
|
||||
if errors.get(plugin.__class__.__name__)
|
||||
)
|
||||
status_code = 503 if critical_errors else 200
|
||||
|
||||
return Response(health_data, status=status_code)
|
||||
|
||||
def _get_database_metrics(self):
|
||||
"""Get database performance metrics"""
|
||||
try:
|
||||
from django.db import connection
|
||||
|
||||
# Get basic connection info
|
||||
metrics = {
|
||||
'vendor': connection.vendor,
|
||||
'connection_status': 'connected',
|
||||
}
|
||||
|
||||
# Test query performance
|
||||
start_time = time.time()
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("SELECT 1")
|
||||
cursor.fetchone()
|
||||
query_time = (time.time() - start_time) * 1000
|
||||
|
||||
metrics['test_query_time_ms'] = round(query_time, 2)
|
||||
|
||||
# PostgreSQL specific metrics
|
||||
if connection.vendor == 'postgresql':
|
||||
try:
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("""
|
||||
SELECT
|
||||
numbackends as active_connections,
|
||||
xact_commit as transactions_committed,
|
||||
xact_rollback as transactions_rolled_back,
|
||||
blks_read as blocks_read,
|
||||
blks_hit as blocks_hit
|
||||
FROM pg_stat_database
|
||||
WHERE datname = current_database()
|
||||
""")
|
||||
row = cursor.fetchone()
|
||||
if row:
|
||||
metrics.update({
|
||||
'active_connections': row[0],
|
||||
'transactions_committed': row[1],
|
||||
'transactions_rolled_back': row[2],
|
||||
'cache_hit_ratio': round((row[4] / (row[3] + row[4])) * 100, 2) if (row[3] + row[4]) > 0 else 0
|
||||
})
|
||||
except Exception:
|
||||
pass # Skip advanced metrics if not available
|
||||
|
||||
return metrics
|
||||
|
||||
except Exception as e:
|
||||
return {
|
||||
'connection_status': 'error',
|
||||
'error': str(e)
|
||||
}
|
||||
|
||||
def _get_system_metrics(self):
|
||||
"""Get system performance metrics"""
|
||||
metrics = {
|
||||
'debug_mode': settings.DEBUG,
|
||||
'allowed_hosts': settings.ALLOWED_HOSTS if settings.DEBUG else ['hidden'],
|
||||
}
|
||||
|
||||
try:
|
||||
import psutil
|
||||
|
||||
# Memory metrics
|
||||
memory = psutil.virtual_memory()
|
||||
metrics['memory'] = {
|
||||
'total_mb': round(memory.total / 1024 / 1024, 2),
|
||||
'available_mb': round(memory.available / 1024 / 1024, 2),
|
||||
'percent_used': memory.percent,
|
||||
}
|
||||
|
||||
# CPU metrics
|
||||
metrics['cpu'] = {
|
||||
'percent_used': psutil.cpu_percent(interval=0.1),
|
||||
'core_count': psutil.cpu_count(),
|
||||
}
|
||||
|
||||
# Disk metrics
|
||||
disk = psutil.disk_usage('/')
|
||||
metrics['disk'] = {
|
||||
'total_gb': round(disk.total / 1024 / 1024 / 1024, 2),
|
||||
'free_gb': round(disk.free / 1024 / 1024 / 1024, 2),
|
||||
'percent_used': round((disk.used / disk.total) * 100, 2),
|
||||
}
|
||||
|
||||
except ImportError:
|
||||
metrics['system_monitoring'] = 'psutil not available'
|
||||
except Exception as e:
|
||||
metrics['system_error'] = str(e)
|
||||
|
||||
return metrics
|
||||
|
||||
|
||||
class PerformanceMetricsView(APIView):
|
||||
"""
|
||||
API view for performance metrics and database analysis
|
||||
"""
|
||||
|
||||
permission_classes = [AllowAny] if settings.DEBUG else []
|
||||
|
||||
def get(self, request):
|
||||
"""Return performance metrics and analysis"""
|
||||
if not settings.DEBUG:
|
||||
return Response({'error': 'Only available in debug mode'}, status=403)
|
||||
|
||||
metrics = {
|
||||
'timestamp': timezone.now().isoformat(),
|
||||
'database_analysis': self._get_database_analysis(),
|
||||
'cache_performance': self._get_cache_performance(),
|
||||
'recent_slow_queries': self._get_slow_queries(),
|
||||
}
|
||||
|
||||
return Response(metrics)
|
||||
|
||||
def _get_database_analysis(self):
|
||||
"""Analyze database performance"""
|
||||
try:
|
||||
from django.db import connection
|
||||
|
||||
analysis = {
|
||||
'total_queries': len(connection.queries),
|
||||
'query_analysis': IndexAnalyzer.analyze_slow_queries(0.05),
|
||||
}
|
||||
|
||||
if connection.queries:
|
||||
query_times = [float(q.get('time', 0)) for q in connection.queries]
|
||||
analysis.update({
|
||||
'total_query_time': sum(query_times),
|
||||
'average_query_time': sum(query_times) / len(query_times),
|
||||
'slowest_query_time': max(query_times),
|
||||
'fastest_query_time': min(query_times),
|
||||
})
|
||||
|
||||
return analysis
|
||||
|
||||
except Exception as e:
|
||||
return {'error': str(e)}
|
||||
|
||||
def _get_cache_performance(self):
|
||||
"""Get cache performance metrics"""
|
||||
try:
|
||||
cache_monitor = CacheMonitor()
|
||||
return cache_monitor.get_cache_stats()
|
||||
except Exception as e:
|
||||
return {'error': str(e)}
|
||||
|
||||
def _get_slow_queries(self):
|
||||
"""Get recent slow queries"""
|
||||
try:
|
||||
return IndexAnalyzer.analyze_slow_queries(0.1) # 100ms threshold
|
||||
except Exception as e:
|
||||
return {'error': str(e)}
|
||||
|
||||
|
||||
class SimpleHealthView(View):
|
||||
"""
|
||||
Simple health check endpoint for load balancers
|
||||
"""
|
||||
|
||||
def get(self, request):
|
||||
"""Return simple OK status"""
|
||||
try:
|
||||
# Basic database connectivity test
|
||||
from django.db import connection
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("SELECT 1")
|
||||
cursor.fetchone()
|
||||
|
||||
return JsonResponse({'status': 'ok', 'timestamp': timezone.now().isoformat()})
|
||||
except Exception as e:
|
||||
return JsonResponse(
|
||||
{'status': 'error', 'error': str(e), 'timestamp': timezone.now().isoformat()},
|
||||
status=503
|
||||
)
|
||||
629
core/views/map_views.py
Normal file
629
core/views/map_views.py
Normal file
@@ -0,0 +1,629 @@
|
||||
"""
|
||||
API views for the unified map service.
|
||||
Enhanced with proper error handling, pagination, and performance optimizations.
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
from typing import Dict, Any, Optional, Set
|
||||
from django.http import JsonResponse, HttpRequest, Http404
|
||||
from django.views.decorators.http import require_http_methods
|
||||
from django.views.decorators.cache import cache_page
|
||||
from django.views.decorators.gzip import gzip_page
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views import View
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
|
||||
from django.conf import settings
|
||||
import time
|
||||
|
||||
from ..services.map_service import unified_map_service
|
||||
from ..services.data_structures import GeoBounds, MapFilters, LocationType
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MapAPIView(View):
|
||||
"""Base view for map API endpoints with common functionality."""
|
||||
|
||||
# Pagination settings
|
||||
DEFAULT_PAGE_SIZE = 50
|
||||
MAX_PAGE_SIZE = 200
|
||||
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
"""Add CORS headers, compression, and handle preflight requests."""
|
||||
start_time = time.time()
|
||||
|
||||
try:
|
||||
response = super().dispatch(request, *args, **kwargs)
|
||||
|
||||
# Add CORS headers for API access
|
||||
response['Access-Control-Allow-Origin'] = '*'
|
||||
response['Access-Control-Allow-Methods'] = 'GET, POST, OPTIONS'
|
||||
response['Access-Control-Allow-Headers'] = 'Content-Type, Authorization'
|
||||
|
||||
# Add performance headers
|
||||
response['X-Response-Time'] = f"{(time.time() - start_time) * 1000:.2f}ms"
|
||||
|
||||
# Add compression hint for large responses
|
||||
if hasattr(response, 'content') and len(response.content) > 1024:
|
||||
response['Content-Encoding'] = 'gzip'
|
||||
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"API error in {request.path}: {str(e)}", exc_info=True)
|
||||
return self._error_response(
|
||||
"An internal server error occurred",
|
||||
status=500
|
||||
)
|
||||
|
||||
def options(self, request, *args, **kwargs):
|
||||
"""Handle preflight CORS requests."""
|
||||
return JsonResponse({}, status=200)
|
||||
|
||||
def _parse_bounds(self, request: HttpRequest) -> Optional[GeoBounds]:
|
||||
"""Parse geographic bounds from request parameters."""
|
||||
try:
|
||||
north = request.GET.get('north')
|
||||
south = request.GET.get('south')
|
||||
east = request.GET.get('east')
|
||||
west = request.GET.get('west')
|
||||
|
||||
if all(param is not None for param in [north, south, east, west]):
|
||||
bounds = GeoBounds(
|
||||
north=float(north),
|
||||
south=float(south),
|
||||
east=float(east),
|
||||
west=float(west)
|
||||
)
|
||||
|
||||
# Validate bounds
|
||||
if not (-90 <= bounds.south <= bounds.north <= 90):
|
||||
raise ValidationError("Invalid latitude bounds")
|
||||
if not (-180 <= bounds.west <= bounds.east <= 180):
|
||||
raise ValidationError("Invalid longitude bounds")
|
||||
|
||||
return bounds
|
||||
return None
|
||||
except (ValueError, TypeError) as e:
|
||||
raise ValidationError(f"Invalid bounds parameters: {e}")
|
||||
|
||||
def _parse_pagination(self, request: HttpRequest) -> Dict[str, int]:
|
||||
"""Parse pagination parameters from request."""
|
||||
try:
|
||||
page = max(1, int(request.GET.get('page', 1)))
|
||||
page_size = min(
|
||||
self.MAX_PAGE_SIZE,
|
||||
max(1, int(request.GET.get('page_size', self.DEFAULT_PAGE_SIZE)))
|
||||
)
|
||||
offset = (page - 1) * page_size
|
||||
|
||||
return {
|
||||
'page': page,
|
||||
'page_size': page_size,
|
||||
'offset': offset,
|
||||
'limit': page_size
|
||||
}
|
||||
except (ValueError, TypeError):
|
||||
return {
|
||||
'page': 1,
|
||||
'page_size': self.DEFAULT_PAGE_SIZE,
|
||||
'offset': 0,
|
||||
'limit': self.DEFAULT_PAGE_SIZE
|
||||
}
|
||||
|
||||
def _parse_filters(self, request: HttpRequest) -> Optional[MapFilters]:
|
||||
"""Parse filtering parameters from request."""
|
||||
try:
|
||||
filters = MapFilters()
|
||||
|
||||
# Location types
|
||||
location_types_param = request.GET.get('types')
|
||||
if location_types_param:
|
||||
type_strings = location_types_param.split(',')
|
||||
valid_types = {lt.value for lt in LocationType}
|
||||
filters.location_types = {
|
||||
LocationType(t.strip()) for t in type_strings
|
||||
if t.strip() in valid_types
|
||||
}
|
||||
|
||||
# Park status
|
||||
park_status_param = request.GET.get('park_status')
|
||||
if park_status_param:
|
||||
filters.park_status = set(park_status_param.split(','))
|
||||
|
||||
# Ride types
|
||||
ride_types_param = request.GET.get('ride_types')
|
||||
if ride_types_param:
|
||||
filters.ride_types = set(ride_types_param.split(','))
|
||||
|
||||
# Company roles
|
||||
company_roles_param = request.GET.get('company_roles')
|
||||
if company_roles_param:
|
||||
filters.company_roles = set(company_roles_param.split(','))
|
||||
|
||||
# Search query with length validation
|
||||
search_query = request.GET.get('q') or request.GET.get('search')
|
||||
if search_query and len(search_query.strip()) >= 2:
|
||||
filters.search_query = search_query.strip()
|
||||
|
||||
# Rating filter with validation
|
||||
min_rating_param = request.GET.get('min_rating')
|
||||
if min_rating_param:
|
||||
min_rating = float(min_rating_param)
|
||||
if 0 <= min_rating <= 10:
|
||||
filters.min_rating = min_rating
|
||||
|
||||
# Geographic filters with validation
|
||||
country = request.GET.get('country', '').strip()
|
||||
if country and len(country) >= 2:
|
||||
filters.country = country
|
||||
|
||||
state = request.GET.get('state', '').strip()
|
||||
if state and len(state) >= 2:
|
||||
filters.state = state
|
||||
|
||||
city = request.GET.get('city', '').strip()
|
||||
if city and len(city) >= 2:
|
||||
filters.city = city
|
||||
|
||||
# Coordinates requirement
|
||||
has_coordinates_param = request.GET.get('has_coordinates')
|
||||
if has_coordinates_param is not None:
|
||||
filters.has_coordinates = has_coordinates_param.lower() in ['true', '1', 'yes']
|
||||
|
||||
return filters if any([
|
||||
filters.location_types, filters.park_status, filters.ride_types,
|
||||
filters.company_roles, filters.search_query, filters.min_rating,
|
||||
filters.country, filters.state, filters.city
|
||||
]) else None
|
||||
|
||||
except (ValueError, TypeError) as e:
|
||||
raise ValidationError(f"Invalid filter parameters: {e}")
|
||||
|
||||
def _parse_zoom_level(self, request: HttpRequest) -> int:
|
||||
"""Parse zoom level from request with default."""
|
||||
try:
|
||||
zoom_param = request.GET.get('zoom', '10')
|
||||
zoom_level = int(zoom_param)
|
||||
return max(1, min(20, zoom_level)) # Clamp between 1 and 20
|
||||
except (ValueError, TypeError):
|
||||
return 10 # Default zoom level
|
||||
|
||||
def _create_paginated_response(self, data: list, total_count: int,
|
||||
pagination: Dict[str, int], request: HttpRequest) -> Dict[str, Any]:
|
||||
"""Create paginated response with metadata."""
|
||||
total_pages = (total_count + pagination['page_size'] - 1) // pagination['page_size']
|
||||
|
||||
# Build pagination URLs
|
||||
base_url = request.build_absolute_uri(request.path)
|
||||
query_params = request.GET.copy()
|
||||
|
||||
next_url = None
|
||||
if pagination['page'] < total_pages:
|
||||
query_params['page'] = pagination['page'] + 1
|
||||
next_url = f"{base_url}?{query_params.urlencode()}"
|
||||
|
||||
prev_url = None
|
||||
if pagination['page'] > 1:
|
||||
query_params['page'] = pagination['page'] - 1
|
||||
prev_url = f"{base_url}?{query_params.urlencode()}"
|
||||
|
||||
return {
|
||||
'status': 'success',
|
||||
'data': data,
|
||||
'pagination': {
|
||||
'page': pagination['page'],
|
||||
'page_size': pagination['page_size'],
|
||||
'total_pages': total_pages,
|
||||
'total_count': total_count,
|
||||
'has_next': pagination['page'] < total_pages,
|
||||
'has_previous': pagination['page'] > 1,
|
||||
'next_url': next_url,
|
||||
'previous_url': prev_url,
|
||||
}
|
||||
}
|
||||
|
||||
def _error_response(self, message: str, status: int = 400,
|
||||
error_code: str = None, details: Dict[str, Any] = None) -> JsonResponse:
|
||||
"""Return standardized error response with enhanced information."""
|
||||
response_data = {
|
||||
'status': 'error',
|
||||
'message': message,
|
||||
'timestamp': time.time(),
|
||||
'data': None
|
||||
}
|
||||
|
||||
if error_code:
|
||||
response_data['error_code'] = error_code
|
||||
|
||||
if details:
|
||||
response_data['details'] = details
|
||||
|
||||
# Add request ID for debugging in production
|
||||
if hasattr(settings, 'DEBUG') and not settings.DEBUG:
|
||||
response_data['request_id'] = getattr(self.request, 'id', None)
|
||||
|
||||
return JsonResponse(response_data, status=status)
|
||||
|
||||
def _success_response(self, data: Any, message: str = None,
|
||||
metadata: Dict[str, Any] = None) -> JsonResponse:
|
||||
"""Return standardized success response."""
|
||||
response_data = {
|
||||
'status': 'success',
|
||||
'data': data,
|
||||
'timestamp': time.time(),
|
||||
}
|
||||
|
||||
if message:
|
||||
response_data['message'] = message
|
||||
|
||||
if metadata:
|
||||
response_data['metadata'] = metadata
|
||||
|
||||
return JsonResponse(response_data)
|
||||
|
||||
|
||||
class MapLocationsView(MapAPIView):
|
||||
"""
|
||||
API endpoint for getting map locations with optional clustering.
|
||||
|
||||
GET /api/map/locations/
|
||||
Parameters:
|
||||
- north, south, east, west: Bounding box coordinates
|
||||
- zoom: Zoom level (1-20)
|
||||
- types: Comma-separated location types (park,ride,company,generic)
|
||||
- cluster: Whether to enable clustering (true/false)
|
||||
- q: Search query
|
||||
- park_status: Park status filter
|
||||
- ride_types: Ride type filter
|
||||
- min_rating: Minimum rating filter
|
||||
- country, state, city: Geographic filters
|
||||
"""
|
||||
|
||||
@method_decorator(cache_page(300)) # Cache for 5 minutes
|
||||
@method_decorator(gzip_page) # Compress large responses
|
||||
def get(self, request: HttpRequest) -> JsonResponse:
|
||||
"""Get map locations with optional clustering and filtering."""
|
||||
try:
|
||||
# Parse parameters
|
||||
bounds = self._parse_bounds(request)
|
||||
filters = self._parse_filters(request)
|
||||
zoom_level = self._parse_zoom_level(request)
|
||||
pagination = self._parse_pagination(request)
|
||||
|
||||
# Clustering preference
|
||||
cluster_param = request.GET.get('cluster', 'true')
|
||||
enable_clustering = cluster_param.lower() in ['true', '1', 'yes']
|
||||
|
||||
# Cache preference
|
||||
use_cache_param = request.GET.get('cache', 'true')
|
||||
use_cache = use_cache_param.lower() in ['true', '1', 'yes']
|
||||
|
||||
# Validate request
|
||||
if not enable_clustering and not bounds and not filters:
|
||||
return self._error_response(
|
||||
"Either bounds, filters, or clustering must be specified for non-clustered requests",
|
||||
error_code="MISSING_PARAMETERS"
|
||||
)
|
||||
|
||||
# Get map data
|
||||
response = unified_map_service.get_map_data(
|
||||
bounds=bounds,
|
||||
filters=filters,
|
||||
zoom_level=zoom_level,
|
||||
cluster=enable_clustering,
|
||||
use_cache=use_cache
|
||||
)
|
||||
|
||||
# Handle pagination for non-clustered results
|
||||
if not enable_clustering and response.locations:
|
||||
start_idx = pagination['offset']
|
||||
end_idx = start_idx + pagination['limit']
|
||||
paginated_locations = response.locations[start_idx:end_idx]
|
||||
|
||||
return JsonResponse(self._create_paginated_response(
|
||||
[loc.to_dict() for loc in paginated_locations],
|
||||
len(response.locations),
|
||||
pagination,
|
||||
request
|
||||
))
|
||||
|
||||
# For clustered results, return as-is with metadata
|
||||
response_dict = response.to_dict()
|
||||
|
||||
return self._success_response(
|
||||
response_dict,
|
||||
metadata={
|
||||
'clustered': response.clustered,
|
||||
'cache_hit': response.cache_hit,
|
||||
'query_time_ms': response.query_time_ms,
|
||||
'filters_applied': response.filters_applied
|
||||
}
|
||||
)
|
||||
|
||||
except ValidationError as e:
|
||||
logger.warning(f"Validation error in MapLocationsView: {str(e)}")
|
||||
return self._error_response(str(e), 400, error_code="VALIDATION_ERROR")
|
||||
except Exception as e:
|
||||
logger.error(f"Error in MapLocationsView: {str(e)}", exc_info=True)
|
||||
return self._error_response(
|
||||
"Failed to retrieve map locations",
|
||||
500,
|
||||
error_code="INTERNAL_ERROR"
|
||||
)
|
||||
|
||||
|
||||
class MapLocationDetailView(MapAPIView):
|
||||
"""
|
||||
API endpoint for getting detailed information about a specific location.
|
||||
|
||||
GET /api/map/locations/<type>/<id>/
|
||||
"""
|
||||
|
||||
@method_decorator(cache_page(600)) # Cache for 10 minutes
|
||||
def get(self, request: HttpRequest, location_type: str, location_id: int) -> JsonResponse:
|
||||
"""Get detailed information for a specific location."""
|
||||
try:
|
||||
# Validate location type
|
||||
valid_types = [lt.value for lt in LocationType]
|
||||
if location_type not in valid_types:
|
||||
return self._error_response(
|
||||
f"Invalid location type: {location_type}. Valid types: {', '.join(valid_types)}",
|
||||
400,
|
||||
error_code="INVALID_LOCATION_TYPE"
|
||||
)
|
||||
|
||||
# Validate location ID
|
||||
if location_id <= 0:
|
||||
return self._error_response(
|
||||
"Location ID must be a positive integer",
|
||||
400,
|
||||
error_code="INVALID_LOCATION_ID"
|
||||
)
|
||||
|
||||
# Get location details
|
||||
location = unified_map_service.get_location_details(location_type, location_id)
|
||||
|
||||
if not location:
|
||||
return self._error_response(
|
||||
f"Location not found: {location_type}/{location_id}",
|
||||
404,
|
||||
error_code="LOCATION_NOT_FOUND"
|
||||
)
|
||||
|
||||
return self._success_response(
|
||||
location.to_dict(),
|
||||
metadata={
|
||||
'location_type': location_type,
|
||||
'location_id': location_id
|
||||
}
|
||||
)
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning(f"Value error in MapLocationDetailView: {str(e)}")
|
||||
return self._error_response(str(e), 400, error_code="INVALID_PARAMETER")
|
||||
except Exception as e:
|
||||
logger.error(f"Error in MapLocationDetailView: {str(e)}", exc_info=True)
|
||||
return self._error_response(
|
||||
"Failed to retrieve location details",
|
||||
500,
|
||||
error_code="INTERNAL_ERROR"
|
||||
)
|
||||
|
||||
|
||||
class MapSearchView(MapAPIView):
|
||||
"""
|
||||
API endpoint for searching locations by text query.
|
||||
|
||||
GET /api/map/search/
|
||||
Parameters:
|
||||
- q: Search query (required)
|
||||
- north, south, east, west: Optional bounding box
|
||||
- types: Comma-separated location types
|
||||
- limit: Maximum results (default 50)
|
||||
"""
|
||||
|
||||
@method_decorator(gzip_page) # Compress responses
|
||||
def get(self, request: HttpRequest) -> JsonResponse:
|
||||
"""Search locations by text query with pagination."""
|
||||
try:
|
||||
# Get and validate search query
|
||||
query = request.GET.get('q', '').strip()
|
||||
if not query:
|
||||
return self._error_response(
|
||||
"Search query 'q' parameter is required",
|
||||
400,
|
||||
error_code="MISSING_QUERY"
|
||||
)
|
||||
|
||||
if len(query) < 2:
|
||||
return self._error_response(
|
||||
"Search query must be at least 2 characters long",
|
||||
400,
|
||||
error_code="QUERY_TOO_SHORT"
|
||||
)
|
||||
|
||||
# Parse parameters
|
||||
bounds = self._parse_bounds(request)
|
||||
pagination = self._parse_pagination(request)
|
||||
|
||||
# Parse location types
|
||||
location_types = None
|
||||
types_param = request.GET.get('types')
|
||||
if types_param:
|
||||
try:
|
||||
valid_types = {lt.value for lt in LocationType}
|
||||
location_types = {
|
||||
LocationType(t.strip()) for t in types_param.split(',')
|
||||
if t.strip() in valid_types
|
||||
}
|
||||
except ValueError:
|
||||
return self._error_response(
|
||||
"Invalid location types",
|
||||
400,
|
||||
error_code="INVALID_TYPES"
|
||||
)
|
||||
|
||||
# Set reasonable search limit (higher for search than general listings)
|
||||
search_limit = min(500, pagination['page'] * pagination['page_size'])
|
||||
|
||||
# Perform search
|
||||
locations = unified_map_service.search_locations(
|
||||
query=query,
|
||||
bounds=bounds,
|
||||
location_types=location_types,
|
||||
limit=search_limit
|
||||
)
|
||||
|
||||
# Apply pagination
|
||||
start_idx = pagination['offset']
|
||||
end_idx = start_idx + pagination['limit']
|
||||
paginated_locations = locations[start_idx:end_idx]
|
||||
|
||||
return JsonResponse(self._create_paginated_response(
|
||||
[loc.to_dict() for loc in paginated_locations],
|
||||
len(locations),
|
||||
pagination,
|
||||
request
|
||||
))
|
||||
|
||||
except ValidationError as e:
|
||||
logger.warning(f"Validation error in MapSearchView: {str(e)}")
|
||||
return self._error_response(str(e), 400, error_code="VALIDATION_ERROR")
|
||||
except ValueError as e:
|
||||
logger.warning(f"Value error in MapSearchView: {str(e)}")
|
||||
return self._error_response(str(e), 400, error_code="INVALID_PARAMETER")
|
||||
except Exception as e:
|
||||
logger.error(f"Error in MapSearchView: {str(e)}", exc_info=True)
|
||||
return self._error_response(
|
||||
"Search failed due to internal error",
|
||||
500,
|
||||
error_code="SEARCH_FAILED"
|
||||
)
|
||||
|
||||
|
||||
class MapBoundsView(MapAPIView):
|
||||
"""
|
||||
API endpoint for getting locations within specific bounds.
|
||||
|
||||
GET /api/map/bounds/
|
||||
Parameters:
|
||||
- north, south, east, west: Bounding box coordinates (required)
|
||||
- types: Comma-separated location types
|
||||
- zoom: Zoom level
|
||||
"""
|
||||
|
||||
@method_decorator(cache_page(300)) # Cache for 5 minutes
|
||||
def get(self, request: HttpRequest) -> JsonResponse:
|
||||
"""Get locations within specific geographic bounds."""
|
||||
try:
|
||||
# Parse required bounds
|
||||
bounds = self._parse_bounds(request)
|
||||
if not bounds:
|
||||
return self._error_response(
|
||||
"Bounds parameters required: north, south, east, west", 400
|
||||
)
|
||||
|
||||
# Parse optional filters
|
||||
location_types = None
|
||||
types_param = request.GET.get('types')
|
||||
if types_param:
|
||||
location_types = {
|
||||
LocationType(t.strip()) for t in types_param.split(',')
|
||||
if t.strip() in [lt.value for lt in LocationType]
|
||||
}
|
||||
|
||||
zoom_level = self._parse_zoom_level(request)
|
||||
|
||||
# Get locations within bounds
|
||||
response = unified_map_service.get_locations_by_bounds(
|
||||
north=bounds.north,
|
||||
south=bounds.south,
|
||||
east=bounds.east,
|
||||
west=bounds.west,
|
||||
location_types=location_types,
|
||||
zoom_level=zoom_level
|
||||
)
|
||||
|
||||
return JsonResponse(response.to_dict())
|
||||
|
||||
except ValidationError as e:
|
||||
return self._error_response(str(e), 400)
|
||||
except Exception as e:
|
||||
return self._error_response(f"Internal server error: {str(e)}", 500)
|
||||
|
||||
|
||||
class MapStatsView(MapAPIView):
|
||||
"""
|
||||
API endpoint for getting map service statistics and health information.
|
||||
|
||||
GET /api/map/stats/
|
||||
"""
|
||||
|
||||
def get(self, request: HttpRequest) -> JsonResponse:
|
||||
"""Get map service statistics and performance metrics."""
|
||||
try:
|
||||
stats = unified_map_service.get_service_stats()
|
||||
|
||||
return JsonResponse({
|
||||
'status': 'success',
|
||||
'data': stats
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
return self._error_response(f"Internal server error: {str(e)}", 500)
|
||||
|
||||
|
||||
class MapCacheView(MapAPIView):
|
||||
"""
|
||||
API endpoint for cache management (admin only).
|
||||
|
||||
DELETE /api/map/cache/
|
||||
POST /api/map/cache/invalidate/
|
||||
"""
|
||||
|
||||
def delete(self, request: HttpRequest) -> JsonResponse:
|
||||
"""Clear all map cache (admin only)."""
|
||||
# TODO: Add admin permission check
|
||||
try:
|
||||
unified_map_service.invalidate_cache()
|
||||
|
||||
return JsonResponse({
|
||||
'status': 'success',
|
||||
'message': 'Map cache cleared successfully'
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
return self._error_response(f"Internal server error: {str(e)}", 500)
|
||||
|
||||
def post(self, request: HttpRequest) -> JsonResponse:
|
||||
"""Invalidate specific cache entries."""
|
||||
# TODO: Add admin permission check
|
||||
try:
|
||||
data = json.loads(request.body)
|
||||
|
||||
location_type = data.get('location_type')
|
||||
location_id = data.get('location_id')
|
||||
bounds_data = data.get('bounds')
|
||||
|
||||
bounds = None
|
||||
if bounds_data:
|
||||
bounds = GeoBounds(**bounds_data)
|
||||
|
||||
unified_map_service.invalidate_cache(
|
||||
location_type=location_type,
|
||||
location_id=location_id,
|
||||
bounds=bounds
|
||||
)
|
||||
|
||||
return JsonResponse({
|
||||
'status': 'success',
|
||||
'message': 'Cache invalidated successfully'
|
||||
})
|
||||
|
||||
except (json.JSONDecodeError, TypeError, ValueError) as e:
|
||||
return self._error_response(f"Invalid request data: {str(e)}", 400)
|
||||
except Exception as e:
|
||||
return self._error_response(f"Internal server error: {str(e)}", 500)
|
||||
400
core/views/maps.py
Normal file
400
core/views/maps.py
Normal file
@@ -0,0 +1,400 @@
|
||||
"""
|
||||
HTML views for the unified map service.
|
||||
Provides web interfaces for map functionality with HTMX integration.
|
||||
"""
|
||||
|
||||
import json
|
||||
from typing import Dict, Any, Optional, Set
|
||||
from django.shortcuts import render, get_object_or_404
|
||||
from django.http import JsonResponse, HttpRequest, HttpResponse
|
||||
from django.views.generic import TemplateView, View
|
||||
from django.views.decorators.http import require_http_methods
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.contrib.auth.mixins import LoginRequiredMixin
|
||||
from django.core.paginator import Paginator
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db.models import Q
|
||||
|
||||
from ..services.map_service import unified_map_service
|
||||
from ..services.data_structures import GeoBounds, MapFilters, LocationType
|
||||
|
||||
|
||||
class MapViewMixin:
|
||||
"""Mixin providing common functionality for map views."""
|
||||
|
||||
def get_map_context(self, request: HttpRequest) -> Dict[str, Any]:
|
||||
"""Get common context data for map views."""
|
||||
return {
|
||||
'map_api_urls': {
|
||||
'locations': '/api/map/locations/',
|
||||
'search': '/api/map/search/',
|
||||
'bounds': '/api/map/bounds/',
|
||||
'location_detail': '/api/map/locations/',
|
||||
},
|
||||
'location_types': [lt.value for lt in LocationType],
|
||||
'default_zoom': 10,
|
||||
'enable_clustering': True,
|
||||
'enable_search': True,
|
||||
}
|
||||
|
||||
def parse_location_types(self, request: HttpRequest) -> Optional[Set[LocationType]]:
|
||||
"""Parse location types from request parameters."""
|
||||
types_param = request.GET.get('types')
|
||||
if types_param:
|
||||
try:
|
||||
return {
|
||||
LocationType(t.strip()) for t in types_param.split(',')
|
||||
if t.strip() in [lt.value for lt in LocationType]
|
||||
}
|
||||
except ValueError:
|
||||
return None
|
||||
return None
|
||||
|
||||
|
||||
class UniversalMapView(MapViewMixin, TemplateView):
|
||||
"""
|
||||
Main universal map view showing all location types.
|
||||
|
||||
URL: /maps/
|
||||
"""
|
||||
template_name = 'maps/universal_map.html'
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super().get_context_data(**kwargs)
|
||||
context.update(self.get_map_context(self.request))
|
||||
|
||||
# Additional context for universal map
|
||||
context.update({
|
||||
'page_title': 'Interactive Map - All Locations',
|
||||
'map_type': 'universal',
|
||||
'show_all_types': True,
|
||||
'initial_location_types': [lt.value for lt in LocationType],
|
||||
'filters_enabled': True,
|
||||
})
|
||||
|
||||
# Handle initial bounds from query parameters
|
||||
if all(param in self.request.GET for param in ['north', 'south', 'east', 'west']):
|
||||
try:
|
||||
context['initial_bounds'] = {
|
||||
'north': float(self.request.GET['north']),
|
||||
'south': float(self.request.GET['south']),
|
||||
'east': float(self.request.GET['east']),
|
||||
'west': float(self.request.GET['west']),
|
||||
}
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
|
||||
return context
|
||||
|
||||
|
||||
class ParkMapView(MapViewMixin, TemplateView):
|
||||
"""
|
||||
Map view focused specifically on parks.
|
||||
|
||||
URL: /maps/parks/
|
||||
"""
|
||||
template_name = 'maps/park_map.html'
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super().get_context_data(**kwargs)
|
||||
context.update(self.get_map_context(self.request))
|
||||
|
||||
# Park-specific context
|
||||
context.update({
|
||||
'page_title': 'Theme Parks Map',
|
||||
'map_type': 'parks',
|
||||
'show_all_types': False,
|
||||
'initial_location_types': [LocationType.PARK.value],
|
||||
'filters_enabled': True,
|
||||
'park_specific_filters': True,
|
||||
})
|
||||
|
||||
return context
|
||||
|
||||
|
||||
class NearbyLocationsView(MapViewMixin, TemplateView):
|
||||
"""
|
||||
View for showing locations near a specific point.
|
||||
|
||||
URL: /maps/nearby/
|
||||
"""
|
||||
template_name = 'maps/nearby_locations.html'
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super().get_context_data(**kwargs)
|
||||
context.update(self.get_map_context(self.request))
|
||||
|
||||
# Parse coordinates from query parameters
|
||||
lat = self.request.GET.get('lat')
|
||||
lng = self.request.GET.get('lng')
|
||||
radius = self.request.GET.get('radius', '50') # Default 50km radius
|
||||
|
||||
if lat and lng:
|
||||
try:
|
||||
center_lat = float(lat)
|
||||
center_lng = float(lng)
|
||||
search_radius = min(200, max(1, float(radius))) # Clamp between 1-200km
|
||||
|
||||
context.update({
|
||||
'page_title': f'Locations Near {center_lat:.4f}, {center_lng:.4f}',
|
||||
'map_type': 'nearby',
|
||||
'center_coordinates': {'lat': center_lat, 'lng': center_lng},
|
||||
'search_radius': search_radius,
|
||||
'show_radius_circle': True,
|
||||
})
|
||||
except (ValueError, TypeError):
|
||||
context['error'] = 'Invalid coordinates provided'
|
||||
else:
|
||||
context.update({
|
||||
'page_title': 'Nearby Locations',
|
||||
'map_type': 'nearby',
|
||||
'prompt_for_location': True,
|
||||
})
|
||||
|
||||
return context
|
||||
|
||||
|
||||
class LocationFilterView(MapViewMixin, View):
|
||||
"""
|
||||
HTMX endpoint for updating map when filters change.
|
||||
|
||||
URL: /maps/htmx/filter/
|
||||
"""
|
||||
|
||||
def get(self, request: HttpRequest) -> HttpResponse:
|
||||
"""Return filtered location data for HTMX updates."""
|
||||
try:
|
||||
# Parse filter parameters
|
||||
location_types = self.parse_location_types(request)
|
||||
search_query = request.GET.get('q', '').strip()
|
||||
country = request.GET.get('country', '').strip()
|
||||
state = request.GET.get('state', '').strip()
|
||||
|
||||
# Create filters
|
||||
filters = None
|
||||
if any([location_types, search_query, country, state]):
|
||||
filters = MapFilters(
|
||||
location_types=location_types,
|
||||
search_query=search_query or None,
|
||||
country=country or None,
|
||||
state=state or None,
|
||||
has_coordinates=True
|
||||
)
|
||||
|
||||
# Get filtered locations
|
||||
map_response = unified_map_service.get_map_data(
|
||||
filters=filters,
|
||||
zoom_level=int(request.GET.get('zoom', '10')),
|
||||
cluster=request.GET.get('cluster', 'true').lower() == 'true'
|
||||
)
|
||||
|
||||
# Return JSON response for HTMX
|
||||
return JsonResponse({
|
||||
'status': 'success',
|
||||
'data': map_response.to_dict(),
|
||||
'filters_applied': map_response.filters_applied
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
return JsonResponse({
|
||||
'status': 'error',
|
||||
'message': str(e)
|
||||
}, status=400)
|
||||
|
||||
|
||||
class LocationSearchView(MapViewMixin, View):
|
||||
"""
|
||||
HTMX endpoint for real-time location search.
|
||||
|
||||
URL: /maps/htmx/search/
|
||||
"""
|
||||
|
||||
def get(self, request: HttpRequest) -> HttpResponse:
|
||||
"""Return search results for HTMX updates."""
|
||||
query = request.GET.get('q', '').strip()
|
||||
|
||||
if not query or len(query) < 3:
|
||||
return render(request, 'maps/partials/search_results.html', {
|
||||
'results': [],
|
||||
'query': query,
|
||||
'message': 'Enter at least 3 characters to search'
|
||||
})
|
||||
|
||||
try:
|
||||
# Parse optional location types
|
||||
location_types = self.parse_location_types(request)
|
||||
limit = min(20, max(5, int(request.GET.get('limit', '10'))))
|
||||
|
||||
# Perform search
|
||||
results = unified_map_service.search_locations(
|
||||
query=query,
|
||||
location_types=location_types,
|
||||
limit=limit
|
||||
)
|
||||
|
||||
return render(request, 'maps/partials/search_results.html', {
|
||||
'results': results,
|
||||
'query': query,
|
||||
'count': len(results)
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
return render(request, 'maps/partials/search_results.html', {
|
||||
'results': [],
|
||||
'query': query,
|
||||
'error': str(e)
|
||||
})
|
||||
|
||||
|
||||
class MapBoundsUpdateView(MapViewMixin, View):
|
||||
"""
|
||||
HTMX endpoint for updating locations when map bounds change.
|
||||
|
||||
URL: /maps/htmx/bounds/
|
||||
"""
|
||||
|
||||
def post(self, request: HttpRequest) -> HttpResponse:
|
||||
"""Update map data when bounds change."""
|
||||
try:
|
||||
data = json.loads(request.body)
|
||||
|
||||
# Parse bounds
|
||||
bounds = GeoBounds(
|
||||
north=float(data['north']),
|
||||
south=float(data['south']),
|
||||
east=float(data['east']),
|
||||
west=float(data['west'])
|
||||
)
|
||||
|
||||
# Parse additional parameters
|
||||
zoom_level = int(data.get('zoom', 10))
|
||||
location_types = None
|
||||
if 'types' in data:
|
||||
location_types = {
|
||||
LocationType(t) for t in data['types']
|
||||
if t in [lt.value for lt in LocationType]
|
||||
}
|
||||
|
||||
# Create filters if needed
|
||||
filters = None
|
||||
if location_types:
|
||||
filters = MapFilters(location_types=location_types)
|
||||
|
||||
# Get updated map data
|
||||
map_response = unified_map_service.get_locations_by_bounds(
|
||||
north=bounds.north,
|
||||
south=bounds.south,
|
||||
east=bounds.east,
|
||||
west=bounds.west,
|
||||
location_types=location_types,
|
||||
zoom_level=zoom_level
|
||||
)
|
||||
|
||||
return JsonResponse({
|
||||
'status': 'success',
|
||||
'data': map_response.to_dict()
|
||||
})
|
||||
|
||||
except (json.JSONDecodeError, ValueError, KeyError) as e:
|
||||
return JsonResponse({
|
||||
'status': 'error',
|
||||
'message': f'Invalid request data: {str(e)}'
|
||||
}, status=400)
|
||||
except Exception as e:
|
||||
return JsonResponse({
|
||||
'status': 'error',
|
||||
'message': str(e)
|
||||
}, status=500)
|
||||
|
||||
|
||||
class LocationDetailModalView(MapViewMixin, View):
|
||||
"""
|
||||
HTMX endpoint for showing location details in modal.
|
||||
|
||||
URL: /maps/htmx/location/<type>/<id>/
|
||||
"""
|
||||
|
||||
def get(self, request: HttpRequest, location_type: str, location_id: int) -> HttpResponse:
|
||||
"""Return location detail modal content."""
|
||||
try:
|
||||
# Validate location type
|
||||
if location_type not in [lt.value for lt in LocationType]:
|
||||
return render(request, 'maps/partials/location_modal.html', {
|
||||
'error': f'Invalid location type: {location_type}'
|
||||
})
|
||||
|
||||
# Get location details
|
||||
location = unified_map_service.get_location_details(location_type, location_id)
|
||||
|
||||
if not location:
|
||||
return render(request, 'maps/partials/location_modal.html', {
|
||||
'error': 'Location not found'
|
||||
})
|
||||
|
||||
return render(request, 'maps/partials/location_modal.html', {
|
||||
'location': location,
|
||||
'location_type': location_type
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
return render(request, 'maps/partials/location_modal.html', {
|
||||
'error': str(e)
|
||||
})
|
||||
|
||||
|
||||
class LocationListView(MapViewMixin, TemplateView):
|
||||
"""
|
||||
View for listing locations with pagination (non-map view).
|
||||
|
||||
URL: /maps/list/
|
||||
"""
|
||||
template_name = 'maps/location_list.html'
|
||||
paginate_by = 20
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super().get_context_data(**kwargs)
|
||||
|
||||
# Parse filters
|
||||
location_types = self.parse_location_types(self.request)
|
||||
search_query = self.request.GET.get('q', '').strip()
|
||||
country = self.request.GET.get('country', '').strip()
|
||||
state = self.request.GET.get('state', '').strip()
|
||||
|
||||
# Create filters
|
||||
filters = None
|
||||
if any([location_types, search_query, country, state]):
|
||||
filters = MapFilters(
|
||||
location_types=location_types,
|
||||
search_query=search_query or None,
|
||||
country=country or None,
|
||||
state=state or None,
|
||||
has_coordinates=True
|
||||
)
|
||||
|
||||
# Get locations without clustering
|
||||
map_response = unified_map_service.get_map_data(
|
||||
filters=filters,
|
||||
cluster=False,
|
||||
use_cache=True
|
||||
)
|
||||
|
||||
# Paginate results
|
||||
paginator = Paginator(map_response.locations, self.paginate_by)
|
||||
page_number = self.request.GET.get('page')
|
||||
page_obj = paginator.get_page(page_number)
|
||||
|
||||
context.update({
|
||||
'page_title': 'All Locations',
|
||||
'locations': page_obj,
|
||||
'total_count': map_response.total_count,
|
||||
'applied_filters': filters,
|
||||
'location_types': [lt.value for lt in LocationType],
|
||||
'current_filters': {
|
||||
'types': self.request.GET.getlist('types'),
|
||||
'q': search_query,
|
||||
'country': country,
|
||||
'state': state,
|
||||
}
|
||||
})
|
||||
|
||||
return context
|
||||
164
core/views/search.py
Normal file
164
core/views/search.py
Normal file
@@ -0,0 +1,164 @@
|
||||
from django.views.generic import TemplateView
|
||||
from django.http import JsonResponse
|
||||
from django.contrib.gis.geos import Point
|
||||
from django.contrib.gis.measure import Distance
|
||||
from parks.models import Park
|
||||
from parks.filters import ParkFilter
|
||||
from core.services.location_search import location_search_service, LocationSearchFilters
|
||||
from core.forms.search import LocationSearchForm
|
||||
|
||||
class AdaptiveSearchView(TemplateView):
|
||||
template_name = "core/search/results.html"
|
||||
|
||||
def get_queryset(self):
|
||||
"""
|
||||
Get the base queryset, optimized with select_related and prefetch_related
|
||||
"""
|
||||
return Park.objects.select_related('operator', 'property_owner').prefetch_related(
|
||||
'location',
|
||||
'photos'
|
||||
).all()
|
||||
|
||||
def get_filterset(self):
|
||||
"""
|
||||
Get the filterset instance
|
||||
"""
|
||||
return ParkFilter(self.request.GET, queryset=self.get_queryset())
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
"""
|
||||
Add filtered results and filter form to context
|
||||
"""
|
||||
context = super().get_context_data(**kwargs)
|
||||
filterset = self.get_filterset()
|
||||
|
||||
# Check if location-based search is being used
|
||||
location_search = self.request.GET.get('location_search', '').strip()
|
||||
near_location = self.request.GET.get('near_location', '').strip()
|
||||
|
||||
# Add location search context
|
||||
context.update({
|
||||
'results': filterset.qs,
|
||||
'filters': filterset,
|
||||
'applied_filters': bool(self.request.GET), # Check if any filters are applied
|
||||
'is_location_search': bool(location_search or near_location),
|
||||
'location_search_query': location_search or near_location,
|
||||
})
|
||||
|
||||
return context
|
||||
|
||||
class FilterFormView(TemplateView):
|
||||
"""
|
||||
View for rendering just the filter form for HTMX updates
|
||||
"""
|
||||
template_name = "core/search/filters.html"
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super().get_context_data(**kwargs)
|
||||
filterset = ParkFilter(self.request.GET, queryset=Park.objects.all())
|
||||
context['filters'] = filterset
|
||||
return context
|
||||
|
||||
|
||||
class LocationSearchView(TemplateView):
|
||||
"""
|
||||
Enhanced search view with comprehensive location search capabilities.
|
||||
"""
|
||||
template_name = "core/search/location_results.html"
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super().get_context_data(**kwargs)
|
||||
|
||||
# Build search filters from request parameters
|
||||
filters = self._build_search_filters()
|
||||
|
||||
# Perform search
|
||||
results = location_search_service.search(filters)
|
||||
|
||||
# Group results by type for better presentation
|
||||
grouped_results = {
|
||||
'parks': [r for r in results if r.content_type == 'park'],
|
||||
'rides': [r for r in results if r.content_type == 'ride'],
|
||||
'companies': [r for r in results if r.content_type == 'company'],
|
||||
}
|
||||
|
||||
context.update({
|
||||
'results': results,
|
||||
'grouped_results': grouped_results,
|
||||
'total_results': len(results),
|
||||
'search_filters': filters,
|
||||
'has_location_filter': bool(filters.location_point),
|
||||
'search_form': LocationSearchForm(self.request.GET),
|
||||
})
|
||||
|
||||
return context
|
||||
|
||||
def _build_search_filters(self) -> LocationSearchFilters:
|
||||
"""Build LocationSearchFilters from request parameters."""
|
||||
form = LocationSearchForm(self.request.GET)
|
||||
form.is_valid() # Populate cleaned_data
|
||||
|
||||
# Parse location coordinates if provided
|
||||
location_point = None
|
||||
lat = form.cleaned_data.get('lat')
|
||||
lng = form.cleaned_data.get('lng')
|
||||
if lat and lng:
|
||||
try:
|
||||
location_point = Point(float(lng), float(lat), srid=4326)
|
||||
except (ValueError, TypeError):
|
||||
location_point = None
|
||||
|
||||
# Parse location types
|
||||
location_types = set()
|
||||
if form.cleaned_data.get('search_parks'):
|
||||
location_types.add('park')
|
||||
if form.cleaned_data.get('search_rides'):
|
||||
location_types.add('ride')
|
||||
if form.cleaned_data.get('search_companies'):
|
||||
location_types.add('company')
|
||||
|
||||
# If no specific types selected, search all
|
||||
if not location_types:
|
||||
location_types = {'park', 'ride', 'company'}
|
||||
|
||||
# Parse radius
|
||||
radius_km = None
|
||||
radius_str = form.cleaned_data.get('radius_km', '').strip()
|
||||
if radius_str:
|
||||
try:
|
||||
radius_km = float(radius_str)
|
||||
radius_km = max(1, min(500, radius_km)) # Clamp between 1-500km
|
||||
except (ValueError, TypeError):
|
||||
radius_km = None
|
||||
|
||||
return LocationSearchFilters(
|
||||
search_query=form.cleaned_data.get('q', '').strip() or None,
|
||||
location_point=location_point,
|
||||
radius_km=radius_km,
|
||||
location_types=location_types if location_types else None,
|
||||
country=form.cleaned_data.get('country', '').strip() or None,
|
||||
state=form.cleaned_data.get('state', '').strip() or None,
|
||||
city=form.cleaned_data.get('city', '').strip() or None,
|
||||
park_status=self.request.GET.getlist('park_status') or None,
|
||||
include_distance=True,
|
||||
max_results=int(self.request.GET.get('limit', 100))
|
||||
)
|
||||
|
||||
|
||||
class LocationSuggestionsView(TemplateView):
|
||||
"""
|
||||
AJAX endpoint for location search suggestions.
|
||||
"""
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
query = request.GET.get('q', '').strip()
|
||||
limit = int(request.GET.get('limit', 10))
|
||||
|
||||
if len(query) < 2:
|
||||
return JsonResponse({'suggestions': []})
|
||||
|
||||
try:
|
||||
suggestions = location_search_service.suggest_locations(query, limit)
|
||||
return JsonResponse({'suggestions': suggestions})
|
||||
except Exception as e:
|
||||
return JsonResponse({'error': str(e)}, status=500)
|
||||
91
debug-setup-automation.sh
Executable file
91
debug-setup-automation.sh
Executable file
@@ -0,0 +1,91 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Debug version of setup-automation.sh to identify non-interactive mode failures
|
||||
#
|
||||
|
||||
set -e
|
||||
|
||||
# Enable verbose debugging
|
||||
set -x
|
||||
|
||||
echo "DEBUG: Script started at $(date)"
|
||||
echo "DEBUG: Arguments received: $*"
|
||||
echo "DEBUG: Total argument count: $#"
|
||||
|
||||
# Test the exact command that's failing
|
||||
echo "DEBUG: Testing setup-automation.sh with --non-interactive flag"
|
||||
echo "DEBUG: NON_INTERACTIVE environment variable before: ${NON_INTERACTIVE:-unset}"
|
||||
|
||||
# Simulate the command line parsing logic from setup-automation.sh
|
||||
echo "DEBUG: Parsing command line arguments..."
|
||||
|
||||
command="${1:-setup}"
|
||||
echo "DEBUG: Initial command: $command"
|
||||
|
||||
# Parse options (mimicking the main script logic)
|
||||
while [[ $# -gt 0 ]]; do
|
||||
echo "DEBUG: Processing argument: $1"
|
||||
case "$1" in
|
||||
--non-interactive)
|
||||
export NON_INTERACTIVE="true"
|
||||
echo "DEBUG: NON_INTERACTIVE flag set to: $NON_INTERACTIVE"
|
||||
shift
|
||||
;;
|
||||
--force-rebuild)
|
||||
export FORCE_REBUILD="true"
|
||||
echo "DEBUG: FORCE_REBUILD flag set to: $FORCE_REBUILD"
|
||||
shift
|
||||
;;
|
||||
--debug)
|
||||
export CONFIG_DEBUG="true"
|
||||
echo "DEBUG: CONFIG_DEBUG flag set to: $CONFIG_DEBUG"
|
||||
shift
|
||||
;;
|
||||
-h|--help)
|
||||
echo "DEBUG: Help requested"
|
||||
exit 0
|
||||
;;
|
||||
-*)
|
||||
echo "DEBUG: Unknown option: $1"
|
||||
exit 1
|
||||
;;
|
||||
*)
|
||||
echo "DEBUG: Breaking on non-option argument: $1"
|
||||
break
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Update command after option parsing (this might be the bug)
|
||||
command="${1:-setup}"
|
||||
echo "DEBUG: Final command after parsing: $command"
|
||||
echo "DEBUG: Remaining arguments: $*"
|
||||
|
||||
echo "DEBUG: NON_INTERACTIVE environment variable after parsing: ${NON_INTERACTIVE:-unset}"
|
||||
|
||||
# Test the specific condition that shows the interactive banner
|
||||
echo "DEBUG: Testing banner condition..."
|
||||
if [[ "$NON_INTERACTIVE" != "true" ]]; then
|
||||
echo "DEBUG: BANNER WOULD BE SHOWN - this is the problem!"
|
||||
echo "DEBUG: NON_INTERACTIVE value: '$NON_INTERACTIVE'"
|
||||
echo "DEBUG: Comparison result: '$NON_INTERACTIVE' != 'true'"
|
||||
else
|
||||
echo "DEBUG: Banner would be suppressed (correct behavior)"
|
||||
fi
|
||||
|
||||
# Test what happens when we call the actual script
|
||||
echo "DEBUG: Now calling actual setup-automation.sh with timeout..."
|
||||
echo "DEBUG: Command will be: timeout 10s bash scripts/vm/setup-automation.sh setup --non-interactive"
|
||||
|
||||
# Add timeout to prevent hanging
|
||||
if timeout 10s bash scripts/vm/setup-automation.sh setup --non-interactive 2>&1; then
|
||||
echo "DEBUG: Script completed successfully"
|
||||
else
|
||||
exit_code=$?
|
||||
echo "DEBUG: Script failed with exit code: $exit_code"
|
||||
if [[ $exit_code -eq 124 ]]; then
|
||||
echo "DEBUG: Script timed out (likely hanging on interactive prompt)"
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "DEBUG: Debug script completed at $(date)"
|
||||
318
demo_roadtrip_usage.py
Normal file
318
demo_roadtrip_usage.py
Normal file
@@ -0,0 +1,318 @@
|
||||
"""
|
||||
Demonstration script showing practical usage of the RoadTripService.
|
||||
|
||||
This script demonstrates real-world scenarios for using the OSM Road Trip Service
|
||||
in the ThrillWiki application.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import django
|
||||
|
||||
# Setup Django
|
||||
os***REMOVED***iron.setdefault('DJANGO_SETTINGS_MODULE', 'thrillwiki.settings')
|
||||
django.setup()
|
||||
|
||||
from parks.services import RoadTripService
|
||||
from parks.services.roadtrip import Coordinates
|
||||
from parks.models import Park
|
||||
|
||||
|
||||
def demo_florida_theme_park_trip():
|
||||
"""
|
||||
Demonstrate planning a Florida theme park road trip.
|
||||
"""
|
||||
print("🏖️ Florida Theme Park Road Trip Planner")
|
||||
print("=" * 50)
|
||||
|
||||
service = RoadTripService()
|
||||
|
||||
# Define Florida theme parks with addresses
|
||||
florida_parks = [
|
||||
("Magic Kingdom", "Magic Kingdom Dr, Orlando, FL 32830"),
|
||||
("Universal Studios Florida", "6000 Universal Blvd, Orlando, FL 32819"),
|
||||
("SeaWorld Orlando", "7007 Sea World Dr, Orlando, FL 32821"),
|
||||
("Busch Gardens Tampa", "10165 McKinley Dr, Tampa, FL 33612"),
|
||||
]
|
||||
|
||||
print("Planning trip for these Florida parks:")
|
||||
park_coords = {}
|
||||
|
||||
for name, address in florida_parks:
|
||||
print(f"\n📍 Geocoding {name}...")
|
||||
coords = service.geocode_address(address)
|
||||
if coords:
|
||||
park_coords[name] = coords
|
||||
print(f" ✅ Located at {coords.latitude:.4f}, {coords.longitude:.4f}")
|
||||
else:
|
||||
print(f" ❌ Could not geocode {address}")
|
||||
|
||||
if len(park_coords) < 2:
|
||||
print("❌ Need at least 2 parks to plan a trip")
|
||||
return
|
||||
|
||||
# Calculate distances between all parks
|
||||
print(f"\n🗺️ Distance Matrix:")
|
||||
park_names = list(park_coords.keys())
|
||||
|
||||
for i, park1 in enumerate(park_names):
|
||||
for j, park2 in enumerate(park_names):
|
||||
if i < j: # Only calculate each pair once
|
||||
route = service.calculate_route(park_coords[park1], park_coords[park2])
|
||||
if route:
|
||||
print(f" {park1} ↔ {park2}")
|
||||
print(f" {route.formatted_distance}, {route.formatted_duration}")
|
||||
|
||||
# Find central park for radiating searches
|
||||
print(f"\n🎢 Parks within 100km of Magic Kingdom:")
|
||||
magic_kingdom_coords = park_coords.get("Magic Kingdom")
|
||||
if magic_kingdom_coords:
|
||||
for name, coords in park_coords.items():
|
||||
if name != "Magic Kingdom":
|
||||
route = service.calculate_route(magic_kingdom_coords, coords)
|
||||
if route:
|
||||
print(f" {name}: {route.formatted_distance} ({route.formatted_duration})")
|
||||
|
||||
|
||||
def demo_cross_country_road_trip():
|
||||
"""
|
||||
Demonstrate planning a cross-country theme park road trip.
|
||||
"""
|
||||
print("\n\n🇺🇸 Cross-Country Theme Park Road Trip")
|
||||
print("=" * 50)
|
||||
|
||||
service = RoadTripService()
|
||||
|
||||
# Major theme parks across the US
|
||||
major_parks = [
|
||||
("Disneyland", "1313 Disneyland Dr, Anaheim, CA 92802"),
|
||||
("Cedar Point", "1 Cedar Point Dr, Sandusky, OH 44870"),
|
||||
("Six Flags Magic Mountain", "26101 Magic Mountain Pkwy, Valencia, CA 91355"),
|
||||
("Walt Disney World", "Walt Disney World Resort, Orlando, FL 32830"),
|
||||
]
|
||||
|
||||
print("Geocoding major US theme parks:")
|
||||
park_coords = {}
|
||||
|
||||
for name, address in major_parks:
|
||||
print(f"\n📍 {name}...")
|
||||
coords = service.geocode_address(address)
|
||||
if coords:
|
||||
park_coords[name] = coords
|
||||
print(f" ✅ {coords.latitude:.4f}, {coords.longitude:.4f}")
|
||||
|
||||
if len(park_coords) >= 3:
|
||||
# Calculate an optimized route if we have DB parks
|
||||
print(f"\n🛣️ Optimized Route Planning:")
|
||||
print("Note: This would work with actual Park objects from the database")
|
||||
|
||||
# Show distances for a potential route
|
||||
route_order = ["Disneyland", "Six Flags Magic Mountain", "Cedar Point", "Walt Disney World"]
|
||||
total_distance = 0
|
||||
total_time = 0
|
||||
|
||||
for i in range(len(route_order) - 1):
|
||||
from_park = route_order[i]
|
||||
to_park = route_order[i + 1]
|
||||
|
||||
if from_park in park_coords and to_park in park_coords:
|
||||
route = service.calculate_route(park_coords[from_park], park_coords[to_park])
|
||||
if route:
|
||||
total_distance += route.distance_km
|
||||
total_time += route.duration_minutes
|
||||
print(f" {i+1}. {from_park} → {to_park}")
|
||||
print(f" {route.formatted_distance}, {route.formatted_duration}")
|
||||
|
||||
print(f"\n📊 Trip Summary:")
|
||||
print(f" Total Distance: {total_distance:.1f}km")
|
||||
print(f" Total Driving Time: {total_time//60}h {total_time%60}min")
|
||||
print(f" Average Distance per Leg: {total_distance/3:.1f}km")
|
||||
|
||||
|
||||
def demo_database_integration():
|
||||
"""
|
||||
Demonstrate working with actual parks from the database.
|
||||
"""
|
||||
print("\n\n🗄️ Database Integration Demo")
|
||||
print("=" * 50)
|
||||
|
||||
service = RoadTripService()
|
||||
|
||||
# Get parks that have location data
|
||||
parks_with_location = Park.objects.filter(
|
||||
location__point__isnull=False
|
||||
).select_related('location')[:5]
|
||||
|
||||
if not parks_with_location:
|
||||
print("❌ No parks with location data found in database")
|
||||
return
|
||||
|
||||
print(f"Found {len(parks_with_location)} parks with location data:")
|
||||
|
||||
for park in parks_with_location:
|
||||
coords = park.coordinates
|
||||
if coords:
|
||||
print(f" 🎢 {park.name}: {coords[0]:.4f}, {coords[1]:.4f}")
|
||||
|
||||
# Demonstrate nearby park search
|
||||
if len(parks_with_location) >= 1:
|
||||
center_park = parks_with_location[0]
|
||||
print(f"\n🔍 Finding parks within 500km of {center_park.name}:")
|
||||
|
||||
nearby_parks = service.get_park_distances(center_park, radius_km=500)
|
||||
|
||||
if nearby_parks:
|
||||
print(f" Found {len(nearby_parks)} nearby parks:")
|
||||
for result in nearby_parks[:3]: # Show top 3
|
||||
park = result['park']
|
||||
print(f" 📍 {park.name}: {result['formatted_distance']} ({result['formatted_duration']})")
|
||||
else:
|
||||
print(" No nearby parks found (may need larger radius)")
|
||||
|
||||
# Demonstrate multi-park trip planning
|
||||
if len(parks_with_location) >= 3:
|
||||
selected_parks = list(parks_with_location)[:3]
|
||||
print(f"\n🗺️ Planning optimized trip for 3 parks:")
|
||||
|
||||
for park in selected_parks:
|
||||
print(f" - {park.name}")
|
||||
|
||||
trip = service.create_multi_park_trip(selected_parks)
|
||||
|
||||
if trip:
|
||||
print(f"\n✅ Optimized Route:")
|
||||
print(f" Total Distance: {trip.formatted_total_distance}")
|
||||
print(f" Total Duration: {trip.formatted_total_duration}")
|
||||
print(f" Route:")
|
||||
|
||||
for i, leg in enumerate(trip.legs, 1):
|
||||
print(f" {i}. {leg.from_park.name} → {leg.to_park.name}")
|
||||
print(f" {leg.route.formatted_distance}, {leg.route.formatted_duration}")
|
||||
else:
|
||||
print(" ❌ Could not optimize trip route")
|
||||
|
||||
|
||||
def demo_geocoding_fallback():
|
||||
"""
|
||||
Demonstrate geocoding parks that don't have coordinates.
|
||||
"""
|
||||
print("\n\n🌍 Geocoding Demo")
|
||||
print("=" * 50)
|
||||
|
||||
service = RoadTripService()
|
||||
|
||||
# Get parks without location data
|
||||
parks_without_coords = Park.objects.filter(
|
||||
location__point__isnull=True
|
||||
).select_related('location')[:3]
|
||||
|
||||
if not parks_without_coords:
|
||||
print("✅ All parks already have coordinates")
|
||||
return
|
||||
|
||||
print(f"Found {len(parks_without_coords)} parks without coordinates:")
|
||||
|
||||
for park in parks_without_coords:
|
||||
print(f"\n🎢 {park.name}")
|
||||
|
||||
if hasattr(park, 'location') and park.location:
|
||||
location = park.location
|
||||
address_parts = [
|
||||
park.name,
|
||||
location.street_address,
|
||||
location.city,
|
||||
location.state,
|
||||
location.country
|
||||
]
|
||||
address = ", ".join(part for part in address_parts if part)
|
||||
print(f" Address: {address}")
|
||||
|
||||
# Try to geocode
|
||||
success = service.geocode_park_if_needed(park)
|
||||
if success:
|
||||
coords = park.coordinates
|
||||
print(f" ✅ Geocoded to: {coords[0]:.4f}, {coords[1]:.4f}")
|
||||
else:
|
||||
print(f" ❌ Geocoding failed")
|
||||
else:
|
||||
print(f" ❌ No location data available")
|
||||
|
||||
|
||||
def demo_cache_performance():
|
||||
"""
|
||||
Demonstrate caching performance benefits.
|
||||
"""
|
||||
print("\n\n⚡ Cache Performance Demo")
|
||||
print("=" * 50)
|
||||
|
||||
service = RoadTripService()
|
||||
|
||||
import time
|
||||
|
||||
# Test address for geocoding
|
||||
test_address = "Disneyland, Anaheim, CA"
|
||||
|
||||
print(f"Testing cache performance with: {test_address}")
|
||||
|
||||
# First request (cache miss)
|
||||
print(f"\n1️⃣ First request (cache miss):")
|
||||
start_time = time.time()
|
||||
coords1 = service.geocode_address(test_address)
|
||||
first_duration = time.time() - start_time
|
||||
|
||||
if coords1:
|
||||
print(f" ✅ Result: {coords1.latitude:.4f}, {coords1.longitude:.4f}")
|
||||
print(f" ⏱️ Duration: {first_duration:.2f} seconds")
|
||||
|
||||
# Second request (cache hit)
|
||||
print(f"\n2️⃣ Second request (cache hit):")
|
||||
start_time = time.time()
|
||||
coords2 = service.geocode_address(test_address)
|
||||
second_duration = time.time() - start_time
|
||||
|
||||
if coords2:
|
||||
print(f" ✅ Result: {coords2.latitude:.4f}, {coords2.longitude:.4f}")
|
||||
print(f" ⏱️ Duration: {second_duration:.2f} seconds")
|
||||
|
||||
if first_duration > second_duration:
|
||||
speedup = first_duration / second_duration
|
||||
print(f" 🚀 Cache speedup: {speedup:.1f}x faster")
|
||||
|
||||
if coords1.latitude == coords2.latitude and coords1.longitude == coords2.longitude:
|
||||
print(f" ✅ Results identical (cache working)")
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
Run all demonstration scenarios.
|
||||
"""
|
||||
print("🎢 ThrillWiki Road Trip Service Demo")
|
||||
print("This demo shows practical usage scenarios for the OSM Road Trip Service")
|
||||
|
||||
try:
|
||||
demo_florida_theme_park_trip()
|
||||
demo_cross_country_road_trip()
|
||||
demo_database_integration()
|
||||
demo_geocoding_fallback()
|
||||
demo_cache_performance()
|
||||
|
||||
print("\n" + "=" * 50)
|
||||
print("🎉 Demo completed successfully!")
|
||||
print("\nThe Road Trip Service is ready for integration into ThrillWiki!")
|
||||
print("\nKey Features Demonstrated:")
|
||||
print("✅ Geocoding theme park addresses")
|
||||
print("✅ Route calculation with distance/time")
|
||||
print("✅ Multi-park trip optimization")
|
||||
print("✅ Database integration with Park models")
|
||||
print("✅ Caching for performance")
|
||||
print("✅ Rate limiting for OSM compliance")
|
||||
print("✅ Error handling and fallbacks")
|
||||
|
||||
except Exception as e:
|
||||
print(f"\n❌ Demo failed with error: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1,13 +0,0 @@
|
||||
from django.contrib import admin
|
||||
from django.utils.text import slugify
|
||||
from .models import Designer
|
||||
|
||||
@admin.register(Designer)
|
||||
class DesignerAdmin(admin.ModelAdmin):
|
||||
list_display = ('name', 'headquarters', 'founded_date', 'website')
|
||||
search_fields = ('name', 'headquarters')
|
||||
prepopulated_fields = {'slug': ('name',)}
|
||||
readonly_fields = ('created_at', 'updated_at')
|
||||
|
||||
def get_queryset(self, request):
|
||||
return super().get_queryset(request).select_related()
|
||||
@@ -1,6 +0,0 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class DesignersConfig(AppConfig):
|
||||
default_auto_field = "django.db.models.BigAutoField"
|
||||
name = "designers"
|
||||
@@ -1,105 +0,0 @@
|
||||
# Generated by Django 5.1.4 on 2025-02-10 01:10
|
||||
|
||||
import django.db.models.deletion
|
||||
import pgtrigger.compiler
|
||||
import pgtrigger.migrations
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
("pghistory", "0006_delete_aggregateevent"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Designer",
|
||||
fields=[
|
||||
("id", models.BigAutoField(primary_key=True, serialize=False)),
|
||||
("name", models.CharField(max_length=255)),
|
||||
("slug", models.SlugField(max_length=255, unique=True)),
|
||||
("description", models.TextField(blank=True)),
|
||||
("website", models.URLField(blank=True)),
|
||||
("founded_date", models.DateField(blank=True, null=True)),
|
||||
("headquarters", models.CharField(blank=True, max_length=255)),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
],
|
||||
options={
|
||||
"ordering": ["name"],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="DesignerEvent",
|
||||
fields=[
|
||||
("pgh_id", models.AutoField(primary_key=True, serialize=False)),
|
||||
("pgh_created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("pgh_label", models.TextField(help_text="The event label.")),
|
||||
("id", models.BigIntegerField()),
|
||||
("name", models.CharField(max_length=255)),
|
||||
("slug", models.SlugField(db_index=False, max_length=255)),
|
||||
("description", models.TextField(blank=True)),
|
||||
("website", models.URLField(blank=True)),
|
||||
("founded_date", models.DateField(blank=True, null=True)),
|
||||
("headquarters", models.CharField(blank=True, max_length=255)),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="designer",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "designers_designerevent" ("created_at", "description", "founded_date", "headquarters", "id", "name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "slug", "updated_at", "website") VALUES (NEW."created_at", NEW."description", NEW."founded_date", NEW."headquarters", NEW."id", NEW."name", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."slug", NEW."updated_at", NEW."website"); RETURN NULL;',
|
||||
hash="[AWS-SECRET-REMOVED]",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_9be65",
|
||||
table="designers_designer",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="designer",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "designers_designerevent" ("created_at", "description", "founded_date", "headquarters", "id", "name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "slug", "updated_at", "website") VALUES (NEW."created_at", NEW."description", NEW."founded_date", NEW."headquarters", NEW."id", NEW."name", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."slug", NEW."updated_at", NEW."website"); RETURN NULL;',
|
||||
hash="[AWS-SECRET-REMOVED]",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_b5f91",
|
||||
table="designers_designer",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="designerevent",
|
||||
name="pgh_context",
|
||||
field=models.ForeignKey(
|
||||
db_constraint=False,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
to="pghistory.context",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="designerevent",
|
||||
name="pgh_obj",
|
||||
field=models.ForeignKey(
|
||||
db_constraint=False,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="events",
|
||||
to="designers.designer",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,20 +0,0 @@
|
||||
# Generated by Django 5.1.4 on 2025-02-21 17:55
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("designers", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="designer",
|
||||
name="id",
|
||||
field=models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,43 +0,0 @@
|
||||
from django.db import models
|
||||
from django.utils.text import slugify
|
||||
from history_tracking.models import TrackedModel
|
||||
import pghistory
|
||||
|
||||
@pghistory.track()
|
||||
class Designer(TrackedModel):
|
||||
name = models.CharField(max_length=255)
|
||||
slug = models.SlugField(max_length=255, unique=True)
|
||||
description = models.TextField(blank=True)
|
||||
website = models.URLField(blank=True)
|
||||
founded_date = models.DateField(null=True, blank=True)
|
||||
headquarters = models.CharField(max_length=255, blank=True)
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
class Meta:
|
||||
ordering = ['name']
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if not self.slug:
|
||||
self.slug = slugify(self.name)
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def get_by_slug(cls, slug):
|
||||
"""Get designer by current or historical slug"""
|
||||
try:
|
||||
return cls.objects.get(slug=slug), False
|
||||
except cls.DoesNotExist:
|
||||
# Check historical slugs using pghistory
|
||||
history_model = cls.get_history_model()
|
||||
history = (
|
||||
history_model.objects.filter(slug=slug)
|
||||
.order_by('-pgh_created_at')
|
||||
.first()
|
||||
)
|
||||
if history:
|
||||
return cls.objects.get(id=history.pgh_obj_id), True
|
||||
raise cls.DoesNotExist("No designer found with this slug")
|
||||
@@ -1,3 +0,0 @@
|
||||
from django.test import TestCase
|
||||
|
||||
# Create your tests here.
|
||||
@@ -1,8 +0,0 @@
|
||||
from django.urls import path
|
||||
from . import views
|
||||
|
||||
app_name = 'designers'
|
||||
|
||||
urlpatterns = [
|
||||
path('<slug:slug>/', views.DesignerDetailView.as_view(), name='designer_detail'),
|
||||
]
|
||||
@@ -1,29 +0,0 @@
|
||||
from django.views.generic import DetailView
|
||||
from .models import Designer
|
||||
from django.db.models import Count
|
||||
|
||||
class DesignerDetailView(DetailView):
|
||||
model = Designer
|
||||
template_name = 'designers/designer_detail.html'
|
||||
context_object_name = 'designer'
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super().get_context_data(**kwargs)
|
||||
# Get all rides by this designer
|
||||
context['rides'] = self.object.rides.select_related(
|
||||
'park',
|
||||
'manufacturer',
|
||||
'coaster_stats'
|
||||
).order_by('-opening_date')
|
||||
|
||||
# Get stats
|
||||
context['stats'] = {
|
||||
'total_rides': self.object.rides.count(),
|
||||
'total_parks': self.object.rides.values('park').distinct().count(),
|
||||
'total_coasters': self.object.rides.filter(category='RC').count(),
|
||||
'total_countries': self.object.rides.values(
|
||||
'park__location__country'
|
||||
).distinct().count(),
|
||||
}
|
||||
|
||||
return context
|
||||
1752
docs/THRILLWIKI_PROJECT_DOCUMENTATION.md
Normal file
1752
docs/THRILLWIKI_PROJECT_DOCUMENTATION.md
Normal file
File diff suppressed because it is too large
Load Diff
387
docs/UNRAID_COMPLETE_AUTOMATION.md
Normal file
387
docs/UNRAID_COMPLETE_AUTOMATION.md
Normal file
@@ -0,0 +1,387 @@
|
||||
# ThrillWiki Complete Unraid Automation Guide
|
||||
|
||||
This guide provides **complete automation** for ThrillWiki deployment on Unraid, including VM creation, configuration, and CI/CD setup. Everything is automated with a single command.
|
||||
|
||||
## 🚀 One-Command Complete Setup
|
||||
|
||||
Run this single command to automate everything:
|
||||
|
||||
```bash
|
||||
./scripts/unraid/setup-complete-automation.sh
|
||||
```
|
||||
|
||||
This will:
|
||||
1. ✅ Create and configure VM on Unraid
|
||||
2. ✅ Install Ubuntu Server with all dependencies
|
||||
3. ✅ Configure PostgreSQL database
|
||||
4. ✅ Deploy ThrillWiki application
|
||||
5. ✅ Set up systemd services
|
||||
6. ✅ Configure SSH access
|
||||
7. ✅ Set up webhook listener
|
||||
8. ✅ Test the entire system
|
||||
|
||||
## System Architecture
|
||||
|
||||
```
|
||||
GitHub Push → Webhook → Local Listener → SSH → Unraid VM → Deploy & Restart
|
||||
```
|
||||
|
||||
## Prerequisites
|
||||
|
||||
### Local Machine
|
||||
- Python 3.8+
|
||||
- SSH client
|
||||
- Internet connection
|
||||
|
||||
### Unraid Server
|
||||
- Unraid 6.8+ with VM support enabled
|
||||
- SSH access to Unraid server
|
||||
- Sufficient resources (4GB RAM, 50GB disk minimum)
|
||||
- Ubuntu Server 22.04 ISO in `/mnt/user/isos/`
|
||||
|
||||
## Automated Components
|
||||
|
||||
### 1. VM Manager (`scripts/unraid/vm-manager.py`)
|
||||
- Creates VM with proper specifications
|
||||
- Configures networking and storage
|
||||
- Manages VM lifecycle (start/stop/status)
|
||||
- Retrieves VM IP addresses
|
||||
|
||||
### 2. Complete Automation (`scripts/unraid/setup-complete-automation.sh`)
|
||||
- Orchestrates entire setup process
|
||||
- Handles SSH key generation and distribution
|
||||
- Configures all services automatically
|
||||
- Performs end-to-end testing
|
||||
|
||||
### 3. VM Configuration
|
||||
- Ubuntu Server 22.04 LTS
|
||||
- PostgreSQL database
|
||||
- UV package manager
|
||||
- Systemd services for ThrillWiki
|
||||
- Nginx (optional)
|
||||
|
||||
## Step-by-Step Process
|
||||
|
||||
### Phase 1: Initial Setup
|
||||
The automation script will prompt for:
|
||||
- Unraid server IP address
|
||||
- Unraid credentials
|
||||
- VM specifications (memory, CPU, disk)
|
||||
- GitHub repository URL
|
||||
- Webhook secret
|
||||
|
||||
### Phase 2: SSH Key Setup
|
||||
- Generates SSH keys for VM access
|
||||
- Generates SSH keys for Unraid access
|
||||
- Configures SSH client settings
|
||||
- Tests connectivity
|
||||
|
||||
### Phase 3: VM Creation
|
||||
- Creates VM XML configuration
|
||||
- Creates virtual disk (QCOW2 format)
|
||||
- Defines VM in libvirt
|
||||
- Starts VM with Ubuntu installation
|
||||
|
||||
### Phase 4: VM Configuration
|
||||
- Installs Ubuntu Server 22.04
|
||||
- Configures user account with SSH keys
|
||||
- Installs required packages:
|
||||
- Python 3.8+
|
||||
- UV package manager
|
||||
- PostgreSQL
|
||||
- Git
|
||||
- Build tools
|
||||
|
||||
### Phase 5: ThrillWiki Deployment
|
||||
- Clones repository
|
||||
- Installs Python dependencies with UV
|
||||
- Creates database and user
|
||||
- Runs initial migrations
|
||||
- Configures systemd services
|
||||
- Starts ThrillWiki service
|
||||
|
||||
### Phase 6: CI/CD Setup
|
||||
- Configures webhook listener
|
||||
- Tests deployment pipeline
|
||||
- Verifies all services
|
||||
|
||||
## Configuration Files Generated
|
||||
|
||||
### `***REMOVED***.unraid`
|
||||
```bash
|
||||
UNRAID_HOST=192.168.1.100
|
||||
UNRAID_USER=root
|
||||
VM_NAME=thrillwiki-vm
|
||||
VM_MEMORY=4096
|
||||
VM_VCPUS=2
|
||||
VM_DISK_SIZE=50
|
||||
SSH_PUBLIC_KEY=ssh-rsa AAAAB3...
|
||||
```
|
||||
|
||||
### `***REMOVED***.webhook`
|
||||
```bash
|
||||
WEBHOOK_PORT=9000
|
||||
WEBHOOK_SECRET=your_secret
|
||||
VM_HOST=192.168.1.101
|
||||
VM_USER=ubuntu
|
||||
VM_KEY_PATH=/home/user/.ssh/thrillwiki_vm
|
||||
VM_PROJECT_PATH=/home/ubuntu/thrillwiki
|
||||
REPO_URL=https://github.com/user/repo.git
|
||||
DEPLOY_BRANCH=main
|
||||
```
|
||||
|
||||
### SSH Configuration
|
||||
```
|
||||
Host thrillwiki-vm
|
||||
HostName 192.168.1.101
|
||||
User ubuntu
|
||||
IdentityFile ~/.ssh/thrillwiki_vm
|
||||
StrictHostKeyChecking no
|
||||
|
||||
Host unraid
|
||||
HostName 192.168.1.100
|
||||
User root
|
||||
IdentityFile ~/.ssh/unraid_access
|
||||
StrictHostKeyChecking no
|
||||
```
|
||||
|
||||
## VM Specifications
|
||||
|
||||
### Default Configuration
|
||||
- **OS**: Ubuntu Server 22.04 LTS
|
||||
- **Memory**: 4GB RAM
|
||||
- **vCPUs**: 2
|
||||
- **Storage**: 50GB (expandable)
|
||||
- **Network**: Bridge mode (br0)
|
||||
- **Boot**: UEFI with OVMF
|
||||
|
||||
### Customizable Options
|
||||
All specifications can be customized during setup:
|
||||
- Memory allocation
|
||||
- CPU cores
|
||||
- Disk size
|
||||
- VM name
|
||||
- Network configuration
|
||||
|
||||
## Services Installed
|
||||
|
||||
### On VM
|
||||
- **ThrillWiki Django App**: Port 8000
|
||||
- **PostgreSQL Database**: Port 5432
|
||||
- **SSH Server**: Port 22
|
||||
- **Systemd Services**: Auto-start on boot
|
||||
|
||||
### On Local Machine
|
||||
- **Webhook Listener**: Configurable port (default 9000)
|
||||
- **SSH Client**: Configured for VM access
|
||||
|
||||
## Management Commands
|
||||
|
||||
### VM Management
|
||||
```bash
|
||||
# Check VM status
|
||||
python3 scripts/unraid/vm-manager.py status
|
||||
|
||||
# Start VM
|
||||
python3 scripts/unraid/vm-manager.py start
|
||||
|
||||
# Stop VM
|
||||
python3 scripts/unraid/vm-manager.py stop
|
||||
|
||||
# Get VM IP
|
||||
python3 scripts/unraid/vm-manager.py ip
|
||||
|
||||
# Complete VM setup
|
||||
python3 scripts/unraid/vm-manager.py setup
|
||||
```
|
||||
|
||||
### Service Management
|
||||
```bash
|
||||
# Connect to VM
|
||||
ssh thrillwiki-vm
|
||||
|
||||
# Check ThrillWiki service
|
||||
sudo systemctl status thrillwiki
|
||||
|
||||
# Restart service
|
||||
sudo systemctl restart thrillwiki
|
||||
|
||||
# View logs
|
||||
journalctl -u thrillwiki -f
|
||||
|
||||
# Manual deployment
|
||||
cd thrillwiki && ./scripts/vm-deploy.sh
|
||||
```
|
||||
|
||||
### Webhook Management
|
||||
```bash
|
||||
# Start webhook listener
|
||||
./start-webhook.sh
|
||||
|
||||
# Or manually
|
||||
source ***REMOVED***.webhook && python3 scripts/webhook-listener.py
|
||||
|
||||
# Test webhook
|
||||
curl -X GET http://localhost:9000/health
|
||||
```
|
||||
|
||||
## Automated Testing
|
||||
|
||||
The setup includes comprehensive testing:
|
||||
|
||||
### Connectivity Tests
|
||||
- SSH access to Unraid server
|
||||
- SSH access to VM
|
||||
- Network connectivity
|
||||
|
||||
### Service Tests
|
||||
- ThrillWiki application startup
|
||||
- Database connectivity
|
||||
- Web server response
|
||||
|
||||
### Deployment Tests
|
||||
- Git repository access
|
||||
- Deployment script execution
|
||||
- Service restart verification
|
||||
|
||||
## Security Features
|
||||
|
||||
### SSH Security
|
||||
- Dedicated SSH keys for each connection
|
||||
- No password authentication
|
||||
- Key-based access only
|
||||
|
||||
### Network Security
|
||||
- VM isolated in bridge network
|
||||
- Firewall rules (configurable)
|
||||
- SSH key rotation support
|
||||
|
||||
### Service Security
|
||||
- Non-root service execution
|
||||
- Systemd security features
|
||||
- Log rotation and monitoring
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
1. **VM Creation Fails**
|
||||
```bash
|
||||
# Check Unraid VM support
|
||||
ssh unraid "virsh list --all"
|
||||
|
||||
# Verify ISO exists
|
||||
ssh unraid "ls -la /mnt/user/isos/*.iso"
|
||||
```
|
||||
|
||||
2. **VM Won't Start**
|
||||
```bash
|
||||
# Check VM configuration
|
||||
python3 scripts/unraid/vm-manager.py status
|
||||
|
||||
# Check Unraid logs
|
||||
ssh unraid "tail -f /var/log/libvirt/qemu/thrillwiki-vm.log"
|
||||
```
|
||||
|
||||
3. **Can't Connect to VM**
|
||||
```bash
|
||||
# Check VM IP
|
||||
python3 scripts/unraid/vm-manager.py ip
|
||||
|
||||
# Test SSH key
|
||||
ssh -i ~/.ssh/thrillwiki_vm ubuntu@VM_IP
|
||||
```
|
||||
|
||||
4. **Service Won't Start**
|
||||
```bash
|
||||
# Check service logs
|
||||
ssh thrillwiki-vm "journalctl -u thrillwiki --no-pager"
|
||||
|
||||
# Manual start
|
||||
ssh thrillwiki-vm "cd thrillwiki && ./scripts/ci-start.sh"
|
||||
```
|
||||
|
||||
### Log Locations
|
||||
|
||||
- **Setup logs**: `logs/unraid-automation.log`
|
||||
- **VM logs**: SSH to VM, then `journalctl -u thrillwiki`
|
||||
- **Webhook logs**: `logs/webhook.log`
|
||||
- **Deployment logs**: On VM at `~/thrillwiki/logs/deploy.log`
|
||||
|
||||
## Advanced Configuration
|
||||
|
||||
### Custom VM Specifications
|
||||
Edit variables in the automation script:
|
||||
```bash
|
||||
VM_MEMORY=8192 # 8GB RAM
|
||||
VM_VCPUS=4 # 4 CPU cores
|
||||
VM_DISK_SIZE=100 # 100GB disk
|
||||
```
|
||||
|
||||
### Network Configuration
|
||||
For static IP assignment, modify the VM XML template in `vm-manager.py`.
|
||||
|
||||
### Storage Configuration
|
||||
The automation uses QCOW2 format for efficient storage. For better performance, consider:
|
||||
- Raw disk format
|
||||
- NVMe storage on Unraid
|
||||
- Dedicated SSD for VM
|
||||
|
||||
## Performance Optimization
|
||||
|
||||
### Recommended Settings
|
||||
- **Memory**: 4GB minimum, 8GB recommended
|
||||
- **CPU**: 2 cores minimum, 4 cores for production
|
||||
- **Storage**: SSD recommended for database
|
||||
- **Network**: 1Gbps for fast deployments
|
||||
|
||||
### Production Considerations
|
||||
- Use dedicated hardware for database
|
||||
- Configure backup strategies
|
||||
- Monitor resource usage
|
||||
- Set up log rotation
|
||||
|
||||
## Backup and Recovery
|
||||
|
||||
### Automated Backups
|
||||
The deployment script automatically creates backups before each deployment in `~/thrillwiki/backups/`.
|
||||
|
||||
### VM Snapshots
|
||||
```bash
|
||||
# Create VM snapshot
|
||||
ssh unraid "virsh snapshot-create-as thrillwiki-vm snapshot-name"
|
||||
|
||||
# List snapshots
|
||||
ssh unraid "virsh snapshot-list thrillwiki-vm"
|
||||
|
||||
# Restore snapshot
|
||||
ssh unraid "virsh snapshot-revert thrillwiki-vm snapshot-name"
|
||||
```
|
||||
|
||||
### Database Backups
|
||||
```bash
|
||||
# Manual database backup
|
||||
ssh thrillwiki-vm "pg_dump thrillwiki > backup.sql"
|
||||
|
||||
# Automated backup (add to cron)
|
||||
ssh thrillwiki-vm "crontab -e"
|
||||
# Add: 0 2 * * * pg_dump thrillwiki > /home/ubuntu/db-backup-$(date +\%Y\%m\%d).sql
|
||||
```
|
||||
|
||||
## Monitoring
|
||||
|
||||
### Health Checks
|
||||
The system includes built-in health checks:
|
||||
- VM status monitoring
|
||||
- Service health verification
|
||||
- Network connectivity tests
|
||||
- Application response checks
|
||||
|
||||
### Alerts (Optional)
|
||||
Configure alerts for:
|
||||
- Service failures
|
||||
- Resource exhaustion
|
||||
- Deployment failures
|
||||
- Network issues
|
||||
|
||||
This complete automation provides a production-ready ThrillWiki deployment with minimal manual intervention. The entire process from VM creation to application deployment is handled automatically.
|
||||
359
docs/VM_DEPLOYMENT_SETUP.md
Normal file
359
docs/VM_DEPLOYMENT_SETUP.md
Normal file
@@ -0,0 +1,359 @@
|
||||
# ThrillWiki VM Deployment Setup Guide
|
||||
|
||||
This guide explains how to set up a local CI/CD system that automatically deploys ThrillWiki to a Linux VM when commits are pushed to GitHub.
|
||||
|
||||
## System Overview
|
||||
|
||||
The deployment system consists of three main components:
|
||||
|
||||
1. **Local CI Start Script** (`scripts/ci-start.sh`) - Starts the Django server locally
|
||||
2. **GitHub Webhook Listener** (`scripts/webhook-listener.py`) - Listens for GitHub push events
|
||||
3. **VM Deployment Script** (`scripts/vm-deploy.sh`) - Deploys code changes to the Linux VM
|
||||
|
||||
## Architecture Flow
|
||||
|
||||
```
|
||||
GitHub Push → Webhook → Local Listener → SSH to VM → Deploy Script → Restart Server
|
||||
```
|
||||
|
||||
## Prerequisites
|
||||
|
||||
### Local Machine (Webhook Listener Host)
|
||||
- Python 3.8+
|
||||
- SSH access to the Linux VM
|
||||
- Git repository with webhook access
|
||||
|
||||
### Linux VM (Deployment Target)
|
||||
- Ubuntu 20.04+ (recommended)
|
||||
- Python 3.8+
|
||||
- UV package manager
|
||||
- Git
|
||||
- PostgreSQL (if using database)
|
||||
- SSH server running
|
||||
- Sudo access for the deployment user
|
||||
|
||||
## Step 1: Linux VM Setup
|
||||
|
||||
### 1.1 Create Deployment User
|
||||
|
||||
```bash
|
||||
# On the Linux VM
|
||||
sudo adduser ubuntu
|
||||
sudo usermod -aG sudo ubuntu
|
||||
su - ubuntu
|
||||
```
|
||||
|
||||
### 1.2 Install Required Software
|
||||
|
||||
```bash
|
||||
# Update system
|
||||
sudo apt update && sudo apt upgrade -y
|
||||
|
||||
# Install essential packages
|
||||
sudo apt install -y git curl build-essential python3-pip python3-venv postgresql postgresql-contrib nginx
|
||||
|
||||
# Install UV package manager
|
||||
curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
source ~/.cargo/env
|
||||
```
|
||||
|
||||
### 1.3 Set up SSH Keys
|
||||
|
||||
```bash
|
||||
# Generate SSH key on local machine
|
||||
ssh-keygen -t rsa -b 4096 -f ~/.ssh/thrillwiki_vm
|
||||
|
||||
# Copy public key to VM
|
||||
ssh-copy-id -i ~/.ssh/thrillwiki_vm.pub ubuntu@VM_IP_ADDRESS
|
||||
```
|
||||
|
||||
### 1.4 Clone Repository
|
||||
|
||||
```bash
|
||||
# On the VM
|
||||
cd /home/ubuntu
|
||||
git clone https://github.com/YOUR_USERNAME/thrillwiki_django_no_react.git thrillwiki
|
||||
cd thrillwiki
|
||||
```
|
||||
|
||||
### 1.5 Install Dependencies
|
||||
|
||||
```bash
|
||||
# Install Python dependencies
|
||||
uv sync
|
||||
|
||||
# Create required directories
|
||||
mkdir -p logs backups
|
||||
```
|
||||
|
||||
## Step 2: Configure Services
|
||||
|
||||
### 2.1 Install Systemd Services
|
||||
|
||||
```bash
|
||||
# Copy service files to systemd directory
|
||||
sudo cp scripts/systemd/thrillwiki.service /etc/systemd/system/
|
||||
sudo cp scripts/systemd/thrillwiki-webhook.service /etc/systemd/system/
|
||||
|
||||
# Edit service files to match your paths
|
||||
sudo nano /etc/systemd/system/thrillwiki.service
|
||||
sudo nano /etc/systemd/system/thrillwiki-webhook.service
|
||||
|
||||
# Reload systemd and enable services
|
||||
sudo systemctl daemon-reload
|
||||
sudo systemctl enable thrillwiki.service
|
||||
sudo systemctl enable thrillwiki-webhook.service
|
||||
```
|
||||
|
||||
### 2.2 Configure Environment Variables
|
||||
|
||||
Create `/home/ubuntu/thrillwiki/***REMOVED***`:
|
||||
|
||||
```bash
|
||||
# Database configuration
|
||||
DATABASE_URL=[DATABASE-URL-REMOVED]
|
||||
|
||||
# Django settings
|
||||
DJANGO_SECRET_KEY=your_secret_key_here
|
||||
DJANGO_DEBUG=False
|
||||
DJANGO_ALLOWED_HOSTS=your_domain.com,VM_IP_ADDRESS
|
||||
|
||||
# Webhook configuration
|
||||
WEBHOOK_SECRET=your_github_webhook_secret
|
||||
WEBHOOK_PORT=9000
|
||||
VM_HOST=localhost
|
||||
VM_USER=ubuntu
|
||||
VM_PROJECT_PATH=/home/ubuntu/thrillwiki
|
||||
REPO_URL=https://github.com/YOUR_USERNAME/thrillwiki_django_no_react.git
|
||||
```
|
||||
|
||||
## Step 3: Local Machine Setup
|
||||
|
||||
### 3.1 Configure Webhook Listener
|
||||
|
||||
Create a configuration file for the webhook listener:
|
||||
|
||||
```bash
|
||||
# Create environment file
|
||||
cat > ***REMOVED***.webhook << EOF
|
||||
WEBHOOK_PORT=9000
|
||||
WEBHOOK_SECRET=your_github_webhook_secret
|
||||
VM_HOST=VM_IP_ADDRESS
|
||||
VM_PORT=22
|
||||
VM_USER=ubuntu
|
||||
VM_KEY_PATH=/home/your_user/.ssh/thrillwiki_vm
|
||||
VM_PROJECT_PATH=/home/ubuntu/thrillwiki
|
||||
REPO_URL=https://github.com/YOUR_USERNAME/thrillwiki_django_no_react.git
|
||||
DEPLOY_BRANCH=main
|
||||
EOF
|
||||
```
|
||||
|
||||
### 3.2 Set up GitHub Webhook
|
||||
|
||||
1. Go to your GitHub repository
|
||||
2. Navigate to Settings → Webhooks
|
||||
3. Click "Add webhook"
|
||||
4. Configure:
|
||||
- **Payload URL**: `http://YOUR_PUBLIC_IP:9000/webhook`
|
||||
- **Content type**: `application/json`
|
||||
- **Secret**: Your webhook secret
|
||||
- **Events**: Select "Just the push event"
|
||||
|
||||
## Step 4: Database Setup
|
||||
|
||||
### 4.1 PostgreSQL Configuration
|
||||
|
||||
```bash
|
||||
# On the VM
|
||||
sudo -u postgres psql
|
||||
|
||||
-- Create database and user
|
||||
CREATE DATABASE thrillwiki;
|
||||
CREATE USER thrillwiki_user WITH ENCRYPTED PASSWORD 'your_password';
|
||||
GRANT ALL PRIVILEGES ON DATABASE thrillwiki TO thrillwiki_user;
|
||||
\q
|
||||
|
||||
# Install PostGIS (if using geographic features)
|
||||
sudo apt install -y postgresql-postgis postgresql-postgis-scripts
|
||||
sudo -u postgres psql -d thrillwiki -c "CREATE EXTENSION postgis;"
|
||||
```
|
||||
|
||||
### 4.2 Run Initial Migration
|
||||
|
||||
```bash
|
||||
# On the VM
|
||||
cd /home/ubuntu/thrillwiki
|
||||
uv run manage.py migrate
|
||||
uv run manage.py collectstatic --noinput
|
||||
uv run manage.py createsuperuser
|
||||
```
|
||||
|
||||
## Step 5: Start Services
|
||||
|
||||
### 5.1 Start VM Services
|
||||
|
||||
```bash
|
||||
# On the VM
|
||||
sudo systemctl start thrillwiki
|
||||
sudo systemctl start thrillwiki-webhook
|
||||
sudo systemctl status thrillwiki
|
||||
sudo systemctl status thrillwiki-webhook
|
||||
```
|
||||
|
||||
### 5.2 Start Local Webhook Listener
|
||||
|
||||
```bash
|
||||
# On local machine
|
||||
source ***REMOVED***.webhook
|
||||
python3 scripts/webhook-listener.py
|
||||
```
|
||||
|
||||
## Step 6: Testing
|
||||
|
||||
### 6.1 Test Local Server
|
||||
|
||||
```bash
|
||||
# Start local development server
|
||||
./scripts/ci-start.sh
|
||||
|
||||
# Check if server is running
|
||||
curl http://localhost:8000/health
|
||||
```
|
||||
|
||||
### 6.2 Test VM Deployment
|
||||
|
||||
```bash
|
||||
# On the VM, test deployment script
|
||||
./scripts/vm-deploy.sh
|
||||
|
||||
# Check service status
|
||||
./scripts/vm-deploy.sh status
|
||||
|
||||
# View logs
|
||||
journalctl -u thrillwiki -f
|
||||
```
|
||||
|
||||
### 6.3 Test Webhook
|
||||
|
||||
```bash
|
||||
# Test webhook endpoint
|
||||
curl -X GET http://localhost:9000/health
|
||||
|
||||
# Make a test commit and push to trigger deployment
|
||||
git add .
|
||||
git commit -m "Test deployment"
|
||||
git push origin main
|
||||
```
|
||||
|
||||
## Monitoring and Logs
|
||||
|
||||
### Service Logs
|
||||
|
||||
```bash
|
||||
# View service logs
|
||||
journalctl -u thrillwiki -f
|
||||
journalctl -u thrillwiki-webhook -f
|
||||
|
||||
# View deployment logs
|
||||
tail -f /home/ubuntu/thrillwiki/logs/deploy.log
|
||||
tail -f /home/ubuntu/thrillwiki/logs/webhook.log
|
||||
```
|
||||
|
||||
### Health Checks
|
||||
|
||||
```bash
|
||||
# Check services status
|
||||
systemctl status thrillwiki
|
||||
systemctl status thrillwiki-webhook
|
||||
|
||||
# Manual health check
|
||||
curl http://localhost:8000/health
|
||||
curl http://localhost:9000/health
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
1. **Permission Denied**
|
||||
```bash
|
||||
# Fix file permissions
|
||||
chmod +x scripts/*.sh
|
||||
chown ubuntu:ubuntu -R /home/ubuntu/thrillwiki
|
||||
```
|
||||
|
||||
2. **Service Won't Start**
|
||||
```bash
|
||||
# Check service logs
|
||||
journalctl -u thrillwiki --no-pager
|
||||
|
||||
# Verify paths in service files
|
||||
sudo systemctl edit thrillwiki
|
||||
```
|
||||
|
||||
3. **Webhook Not Triggering**
|
||||
```bash
|
||||
# Check webhook listener logs
|
||||
tail -f logs/webhook.log
|
||||
|
||||
# Verify GitHub webhook configuration
|
||||
# Check firewall settings for port 9000
|
||||
```
|
||||
|
||||
4. **Database Connection Issues**
|
||||
```bash
|
||||
# Test database connection
|
||||
uv run manage.py dbshell
|
||||
|
||||
# Check PostgreSQL status
|
||||
sudo systemctl status postgresql
|
||||
```
|
||||
|
||||
### Rollback Procedure
|
||||
|
||||
If deployment fails, you can rollback:
|
||||
|
||||
```bash
|
||||
# On the VM
|
||||
./scripts/vm-deploy.sh
|
||||
# The script automatically handles rollback on failure
|
||||
|
||||
# Manual rollback to specific commit
|
||||
cd /home/ubuntu/thrillwiki
|
||||
git reset --hard COMMIT_HASH
|
||||
./scripts/vm-deploy.sh restart
|
||||
```
|
||||
|
||||
## Security Considerations
|
||||
|
||||
1. **SSH Keys**: Use dedicated SSH keys for deployment
|
||||
2. **Webhook Secret**: Use a strong, unique webhook secret
|
||||
3. **Firewall**: Only open necessary ports (22, 8000, 9000)
|
||||
4. **User Permissions**: Use dedicated deployment user with minimal privileges
|
||||
5. **Environment Variables**: Store sensitive data in environment files, not in code
|
||||
|
||||
## Maintenance
|
||||
|
||||
### Regular Tasks
|
||||
|
||||
1. **Update Dependencies**: Run `uv sync` regularly
|
||||
2. **Log Rotation**: Set up logrotate for application logs
|
||||
3. **Backup Database**: Schedule regular database backups
|
||||
4. **Monitor Disk Space**: Ensure sufficient space for logs and backups
|
||||
|
||||
### Cleanup Old Backups
|
||||
|
||||
```bash
|
||||
# The deployment script automatically cleans old backups
|
||||
# Manual cleanup if needed:
|
||||
find /home/ubuntu/thrillwiki/backups -name "backup_*.commit" -mtime +30 -delete
|
||||
```
|
||||
|
||||
## Performance Optimization
|
||||
|
||||
1. **Use Production WSGI Server**: Consider using Gunicorn instead of development server
|
||||
2. **Reverse Proxy**: Set up Nginx as reverse proxy
|
||||
3. **Database Optimization**: Configure PostgreSQL for production
|
||||
4. **Static Files**: Serve static files through Nginx
|
||||
|
||||
This setup provides a robust CI/CD pipeline for automatic deployment of ThrillWiki to your Linux VM whenever code is pushed to GitHub.
|
||||
73
docs/consolidation_analysis.md
Normal file
73
docs/consolidation_analysis.md
Normal file
@@ -0,0 +1,73 @@
|
||||
# Consolidation Analysis
|
||||
|
||||
## Review System Implementation
|
||||
|
||||
### Current Implementation
|
||||
- Uses Django's GenericForeignKey (confirmed)
|
||||
- Single Review model handles both parks and rides
|
||||
- Related models: ReviewImage, ReviewLike, ReviewReport
|
||||
- Content types: Currently supports any model type
|
||||
|
||||
### Migration Plan
|
||||
|
||||
1. **Create New Models**:
|
||||
```python
|
||||
# parks/models/reviews.py
|
||||
class ParkReview(TrackedModel):
|
||||
park = models.ForeignKey(Park, on_delete=models.CASCADE)
|
||||
# ... other review fields ...
|
||||
|
||||
# rides/models/reviews.py
|
||||
class RideReview(TrackedModel):
|
||||
ride = models.ForeignKey(Ride, on_delete=models.CASCADE)
|
||||
# ... other review fields ...
|
||||
```
|
||||
|
||||
2. **Data Migration Steps**:
|
||||
```python
|
||||
# Migration operations
|
||||
def migrate_reviews(apps, schema_editor):
|
||||
Review = apps.get_model('reviews', 'Review')
|
||||
ParkReview = apps.get_model('parks', 'ParkReview')
|
||||
RideReview = apps.get_model('rides', 'RideReview')
|
||||
|
||||
for review in Review.objects.all():
|
||||
if review.content_type.model == 'park':
|
||||
ParkReview.objects.create(
|
||||
park_id=review.object_id,
|
||||
# ... map other fields ...
|
||||
)
|
||||
elif review.content_type.model == 'ride':
|
||||
RideReview.objects.create(
|
||||
ride_id=review.object_id,
|
||||
# ... map other fields ...
|
||||
)
|
||||
```
|
||||
|
||||
3. **Update Related Models**:
|
||||
```python
|
||||
# Before (generic)
|
||||
class ReviewImage(models.Model):
|
||||
review = models.ForeignKey(Review, ...)
|
||||
|
||||
# After (concrete)
|
||||
class ParkReviewImage(models.Model):
|
||||
review = models.ForeignKey(ParkReview, ...)
|
||||
|
||||
class RideReviewImage(models.Model):
|
||||
review = models.ForeignKey(RideReview, ...)
|
||||
```
|
||||
|
||||
4. **Backward Compatibility**:
|
||||
- Maintain old Review API during transition period
|
||||
- Phase out generic reviews after data migration
|
||||
|
||||
### Entity Relationship Compliance
|
||||
- Park reviews will reference Park model (via Operator)
|
||||
- Ride reviews will reference Ride model (via Park → Operator)
|
||||
- Complies with entity relationship rules in .clinerules
|
||||
|
||||
### Risk Mitigation
|
||||
- Use data migration transactions
|
||||
- Create database backups before migration
|
||||
- Test with staging data first
|
||||
96
docs/search_integration_plan.md
Normal file
96
docs/search_integration_plan.md
Normal file
@@ -0,0 +1,96 @@
|
||||
# Search Integration Plan
|
||||
|
||||
## 1. File Structure
|
||||
```plaintext
|
||||
core/
|
||||
├── views/
|
||||
│ └── search.py # Search views implementation
|
||||
├── utils/
|
||||
│ └── search.py # Search utilities
|
||||
templates/
|
||||
└── core/
|
||||
└── search/ # Search templates
|
||||
├── results.html
|
||||
├── filters.html
|
||||
└── ...
|
||||
```
|
||||
|
||||
## 2. View Migration
|
||||
- Move `search/views.py` → `core/views/search.py`
|
||||
- Update view references:
|
||||
```python
|
||||
# Old: from search.views import AdaptiveSearchView
|
||||
# New:
|
||||
from core.views.search import AdaptiveSearchView, FilterFormView
|
||||
```
|
||||
|
||||
## 3. URL Configuration Updates
|
||||
Update `thrillwiki/urls.py`:
|
||||
```python
|
||||
# Before:
|
||||
path("search/", include("search.urls", namespace="search"))
|
||||
|
||||
# After:
|
||||
path("search/", include("core.urls.search", namespace="search"))
|
||||
```
|
||||
|
||||
Create `core/urls/search.py`:
|
||||
```python
|
||||
from django.urls import path
|
||||
from core.views.search import AdaptiveSearchView, FilterFormView
|
||||
from rides.views import RideSearchView
|
||||
|
||||
urlpatterns = [
|
||||
path('parks/', AdaptiveSearchView.as_view(), name='search'),
|
||||
path('parks/filters/', FilterFormView.as_view(), name='filter_form'),
|
||||
path('rides/', RideSearchView.as_view(), name='ride_search'),
|
||||
path('rides/results/', RideSearchView.as_view(), name='ride_search_results'),
|
||||
]
|
||||
```
|
||||
|
||||
## 4. Import Cleanup Strategy
|
||||
1. Update all imports:
|
||||
```python
|
||||
# Before:
|
||||
from search.views import ...
|
||||
from search.utils import ...
|
||||
|
||||
# After:
|
||||
from core.views.search import ...
|
||||
from core.utils.search import ...
|
||||
```
|
||||
|
||||
2. Remove old search app:
|
||||
```bash
|
||||
rm -rf search/
|
||||
```
|
||||
|
||||
3. Update `INSTALLED_APPS` in `thrillwiki/settings.py`:
|
||||
```python
|
||||
# Remove 'search' from INSTALLED_APPS
|
||||
INSTALLED_APPS = [
|
||||
# ...
|
||||
# 'search', # REMOVE THIS LINE
|
||||
# ...
|
||||
]
|
||||
```
|
||||
|
||||
## 5. Implementation Steps
|
||||
1. Create new directory structure in core
|
||||
2. Move view logic to `core/views/search.py`
|
||||
3. Create URL config in `core/urls/search.py`
|
||||
4. Move templates to `templates/core/search/`
|
||||
5. Update all import references
|
||||
6. Remove old search app
|
||||
7. Test all search functionality:
|
||||
- Park search filters
|
||||
- Ride search
|
||||
- HTMX filter updates
|
||||
8. Verify URL routes
|
||||
|
||||
## 6. Verification Checklist
|
||||
- [ ] All search endpoints respond with 200
|
||||
- [ ] Filter forms render correctly
|
||||
- [ ] HTMX updates work as expected
|
||||
- [ ] No references to old search app in codebase
|
||||
- [ ] Templates render with correct context
|
||||
@@ -1,4 +1,4 @@
|
||||
# Generated by Django 5.1.4 on 2025-02-10 01:10
|
||||
# Generated by Django 5.1.4 on 2025-08-13 21:35
|
||||
|
||||
import django.db.models.deletion
|
||||
import pgtrigger.compiler
|
||||
@@ -19,7 +19,15 @@ class Migration(migrations.Migration):
|
||||
migrations.CreateModel(
|
||||
name="EmailConfiguration",
|
||||
fields=[
|
||||
("id", models.BigAutoField(primary_key=True, serialize=False)),
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("api_key", models.CharField(max_length=255)),
|
||||
("from_email", models.EmailField(max_length=254)),
|
||||
(
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
# Generated by Django 5.1.4 on 2025-02-21 17:55
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("email_service", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="emailconfiguration",
|
||||
name="id",
|
||||
field=models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,6 +1,6 @@
|
||||
from django.db import models
|
||||
from django.contrib.sites.models import Site
|
||||
from history_tracking.models import TrackedModel
|
||||
from core.history import TrackedModel
|
||||
import pghistory
|
||||
|
||||
@pghistory.track()
|
||||
|
||||
@@ -9,7 +9,7 @@ import base64
|
||||
|
||||
class EmailService:
|
||||
@staticmethod
|
||||
def send_email(to, subject, text, from_email=None, html=None, reply_to=None, request=None, site=None):
|
||||
def send_email(*, to: str, subject: str, text: str, from_email: str = None, html: str = None, reply_to: str = None, request = None, site = None):
|
||||
# Get the site configuration
|
||||
if site is None and request is not None:
|
||||
site = get_current_site(request)
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
class HistoryConfig(AppConfig):
|
||||
default_auto_field = 'django.db.models.BigAutoField'
|
||||
name = 'history'
|
||||
verbose_name = 'History Tracking'
|
||||
|
||||
def ready(self):
|
||||
"""Initialize app and signal handlers"""
|
||||
from django.dispatch import Signal
|
||||
# Create a signal for history updates
|
||||
self.history_updated = Signal()
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user