mirror of
https://github.com/pacnpal/thrillwiki_django_no_react.git
synced 2025-12-30 07:07:00 -05:00
Compare commits
34 Commits
pixeebot/d
...
652ea149bd
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
652ea149bd | ||
|
|
66ed4347a9 | ||
|
|
69c07d1381 | ||
|
|
bead0654df | ||
|
|
37a20f83ba | ||
|
|
2304085c32 | ||
|
|
31d83c8889 | ||
|
|
46c6e45eae | ||
|
|
f5db23a791 | ||
|
|
78248aa892 | ||
|
|
641fc1a253 | ||
|
|
ca7555c052 | ||
|
|
74b45aa143 | ||
|
|
d9fc13f350 | ||
|
|
f4f8ec8f9b | ||
|
|
274ba650b3 | ||
|
|
cc990ee003 | ||
|
|
63b9cf1a70 | ||
|
|
c26414ff74 | ||
|
|
17228e9935 | ||
|
|
32736ae660 | ||
|
|
b5bae44cb8 | ||
|
|
da7c7e3381 | ||
|
|
f6c8e0e25c | ||
|
|
16386deee7 | ||
|
|
7815de158e | ||
|
|
b871a1d396 | ||
|
|
751cd86a31 | ||
|
|
8360f3fd43 | ||
|
|
b570cb6848 | ||
|
|
94736acdd5 | ||
|
|
6781fa3564 | ||
|
|
4b11ec112e | ||
|
|
de05a5abda |
34
.clinerules
34
.clinerules
@@ -4,10 +4,9 @@
|
||||
IMPORTANT: Always follow these instructions exactly when starting the development server:
|
||||
|
||||
```bash
|
||||
lsof -ti :8000 | xargs kill -9; find . -type d -name "__pycache__" -exec rm -r {} +; uv run manage.py tailwind runserver
|
||||
```
|
||||
lsof -ti :8000 | xargs kill -9; find . -type d -name "__pycache__" -exec rm -r {} +; ./scripts/dev_server.sh
|
||||
|
||||
Note: These steps must be executed in this exact order as a single command to ensure consistent behavior.
|
||||
Note: These steps must be executed in this exact order as a single command to ensure consistent behavior. If server does not start correctly, do not attempt to modify the dev_server.sh script.
|
||||
|
||||
## Package Management
|
||||
IMPORTANT: When a Python package is needed, only use UV to add it:
|
||||
@@ -24,7 +23,32 @@ uv run manage.py <command>
|
||||
This applies to all management commands including but not limited to:
|
||||
- Making migrations: `uv run manage.py makemigrations`
|
||||
- Applying migrations: `uv run manage.py migrate`
|
||||
- Creating superuser: `uv run manage.py createsuperuser`
|
||||
- Starting shell: `uv run manage.py shell`
|
||||
- Creating superuser: `uv run manage.py createsuperuser` and possible echo commands before for the necessary data input.
|
||||
- Starting shell: `uv run manage.py shell` and possible echo commands before for the necessary data input.
|
||||
|
||||
NEVER use `python manage.py` or `uv run python manage.py`. Always use `uv run manage.py` directly.
|
||||
|
||||
## Entity Relationship Rules
|
||||
IMPORTANT: Follow these entity relationship patterns consistently:
|
||||
|
||||
# Park Relationships
|
||||
- Parks MUST have an Operator (required relationship)
|
||||
- Parks MAY have a PropertyOwner (optional, usually same as Operator)
|
||||
- Parks CANNOT directly reference Company entities
|
||||
|
||||
# Ride Relationships
|
||||
- Rides MUST belong to a Park (required relationship)
|
||||
- Rides MAY have a Manufacturer (optional relationship)
|
||||
- Rides MAY have a Designer (optional relationship)
|
||||
- Rides CANNOT directly reference Company entities
|
||||
|
||||
# Entity Definitions
|
||||
- Operators: Companies that operate theme parks (replaces Company.owner)
|
||||
- PropertyOwners: Companies that own park property (new concept, optional)
|
||||
- Manufacturers: Companies that manufacture rides (replaces Company for rides)
|
||||
- Designers: Companies/individuals that design rides (existing concept)
|
||||
|
||||
# Relationship Constraints
|
||||
- Operator and PropertyOwner are usually the same entity but CAN be different
|
||||
- Manufacturers and Designers are distinct concepts and should not be conflated
|
||||
- All entity relationships should use proper foreign keys with appropriate null/blank settings
|
||||
90
.env.example
Normal file
90
.env.example
Normal file
@@ -0,0 +1,90 @@
|
||||
# [AWS-SECRET-REMOVED]===========================
|
||||
# ThrillWiki Environment Configuration
|
||||
# [AWS-SECRET-REMOVED]===========================
|
||||
# Copy this file to ***REMOVED*** and fill in your actual values
|
||||
|
||||
# [AWS-SECRET-REMOVED]===========================
|
||||
# Core Django Settings
|
||||
# [AWS-SECRET-REMOVED]===========================
|
||||
SECRET_KEY=your-secret-key-here-generate-a-new-one
|
||||
DEBUG=True
|
||||
ALLOWED_HOSTS=localhost,127.0.0.1,beta.thrillwiki.com
|
||||
CSRF_TRUSTED_ORIGINS=https://beta.thrillwiki.com,http://localhost:8000
|
||||
|
||||
# [AWS-SECRET-REMOVED]===========================
|
||||
# Database Configuration
|
||||
# [AWS-SECRET-REMOVED]===========================
|
||||
# PostgreSQL with PostGIS for production/development
|
||||
DATABASE_URL=postgis://username:password@localhost:5432/thrillwiki
|
||||
|
||||
# SQLite for quick local development (uncomment to use)
|
||||
# DATABASE_URL=spatialite:///path/to/your/db.sqlite3
|
||||
|
||||
# [AWS-SECRET-REMOVED]===========================
|
||||
# Cache Configuration
|
||||
# [AWS-SECRET-REMOVED]===========================
|
||||
# Local memory cache for development
|
||||
CACHE_URL=locmem://
|
||||
|
||||
# Redis for production (uncomment and configure for production)
|
||||
# CACHE_URL=redis://localhost:6379/1
|
||||
# REDIS_URL=redis://localhost:6379/0
|
||||
|
||||
CACHE_MIDDLEWARE_SECONDS=300
|
||||
CACHE_MIDDLEWARE_KEY_PREFIX=thrillwiki
|
||||
|
||||
# [AWS-SECRET-REMOVED]===========================
|
||||
# Email Configuration
|
||||
# [AWS-SECRET-REMOVED]===========================
|
||||
EMAIL_BACKEND=django.core.mail.backends.console.EmailBackend
|
||||
SERVER_EMAIL=django_webmaster@thrillwiki.com
|
||||
|
||||
# ForwardEmail configuration (uncomment to use)
|
||||
# EMAIL_BACKEND=email_service.backends.ForwardEmailBackend
|
||||
# FORWARD_EMAIL_BASE_URL=https://api.forwardemail.net
|
||||
|
||||
# SMTP configuration (uncomment to use)
|
||||
# EMAIL_URL=smtp://username:password@smtp.example.com:587
|
||||
|
||||
# [AWS-SECRET-REMOVED]===========================
|
||||
# Security Settings
|
||||
# [AWS-SECRET-REMOVED]===========================
|
||||
# Cloudflare Turnstile (get keys from Cloudflare dashboard)
|
||||
TURNSTILE_SITE_KEY=your-turnstile-site-key
|
||||
TURNSTILE_SECRET_KEY=your-turnstile-secret-key
|
||||
TURNSTILE_VERIFY_URL=https://challenges.cloudflare.com/turnstile/v0/siteverify
|
||||
|
||||
# Security headers (set to True for production)
|
||||
SECURE_SSL_REDIRECT=False
|
||||
SESSION_COOKIE_SECURE=False
|
||||
CSRF_COOKIE_SECURE=False
|
||||
SECURE_HSTS_SECONDS=31536000
|
||||
SECURE_HSTS_INCLUDE_SUBDOMAINS=True
|
||||
|
||||
# [AWS-SECRET-REMOVED]===========================
|
||||
# GeoDjango Settings (macOS with Homebrew)
|
||||
# [AWS-SECRET-REMOVED]===========================
|
||||
GDAL_LIBRARY_PATH=/opt/homebrew/lib/libgdal.dylib
|
||||
GEOS_LIBRARY_PATH=/opt/homebrew/lib/libgeos_c.dylib
|
||||
|
||||
# Linux alternatives (uncomment if on Linux)
|
||||
# GDAL_LIBRARY_PATH=/usr/lib/x86_64-linux-gnu/libgdal.so
|
||||
# GEOS_LIBRARY_PATH=/usr/lib/x86_64-linux-gnu/libgeos_c.so
|
||||
|
||||
# [AWS-SECRET-REMOVED]===========================
|
||||
# Optional: Third-party Integrations
|
||||
# [AWS-SECRET-REMOVED]===========================
|
||||
# Sentry for error tracking (uncomment to use)
|
||||
# SENTRY_DSN=https://your-sentry-dsn-here
|
||||
|
||||
# Google Analytics (uncomment to use)
|
||||
# GOOGLE_ANALYTICS_ID=GA-XXXXXXXXX
|
||||
|
||||
# [AWS-SECRET-REMOVED]===========================
|
||||
# Development/Debug Settings
|
||||
# [AWS-SECRET-REMOVED]===========================
|
||||
# Set to comma-separated list for debug toolbar
|
||||
# INTERNAL_IPS=127.0.0.1,::1
|
||||
|
||||
# Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)
|
||||
LOG_LEVEL=INFO
|
||||
29
.flake8
Normal file
29
.flake8
Normal file
@@ -0,0 +1,29 @@
|
||||
[flake8]
|
||||
# Maximum line length (matches Black formatter)
|
||||
max-line-length = 88
|
||||
|
||||
# Exclude common directories that shouldn't be linted
|
||||
exclude =
|
||||
.git,
|
||||
__pycache__,
|
||||
.venv,
|
||||
venv,
|
||||
env,
|
||||
.env,
|
||||
migrations,
|
||||
node_modules,
|
||||
.tox,
|
||||
.mypy_cache,
|
||||
.pytest_cache,
|
||||
build,
|
||||
dist,
|
||||
*.egg-info
|
||||
|
||||
# Ignore line break style warnings which are style preferences
|
||||
# W503: line break before binary operator (conflicts with PEP8 W504)
|
||||
# W504: line break after binary operator (conflicts with PEP8 W503)
|
||||
# These warnings contradict each other, so it's best to ignore one or both
|
||||
ignore = W503,W504
|
||||
|
||||
# Maximum complexity for McCabe complexity checker
|
||||
max-complexity = 10
|
||||
25
.gitignore
vendored
25
.gitignore
vendored
@@ -347,6 +347,12 @@ cython_debug/
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
|
||||
# Pixi package manager
|
||||
.pixi/
|
||||
|
||||
# Django Tailwind CLI
|
||||
.django_tailwind_cli/
|
||||
|
||||
# General
|
||||
.DS_Store
|
||||
.AppleDouble
|
||||
@@ -373,3 +379,22 @@ Icon
|
||||
Network Trash Folder
|
||||
Temporary Items
|
||||
.apdisk
|
||||
|
||||
|
||||
# ThrillWiki CI/CD Configuration
|
||||
.thrillwiki-config
|
||||
***REMOVED***.unraid
|
||||
***REMOVED***.webhook
|
||||
.github-token
|
||||
logs/
|
||||
profiles
|
||||
.thrillwiki-github-token
|
||||
.thrillwiki-template-config
|
||||
|
||||
# Environment files with potential secrets
|
||||
scripts/systemd/thrillwiki-automation***REMOVED***
|
||||
scripts/systemd/thrillwiki-deployment***REMOVED***
|
||||
scripts/systemd/****REMOVED***
|
||||
logs/
|
||||
profiles/
|
||||
uv.lock
|
||||
|
||||
277
CI_README.md
Normal file
277
CI_README.md
Normal file
@@ -0,0 +1,277 @@
|
||||
# ThrillWiki CI/CD System
|
||||
|
||||
This repository includes a **complete automated CI/CD system** that creates a Linux VM on Unraid and automatically deploys ThrillWiki when commits are pushed to GitHub.
|
||||
|
||||
## 🚀 Complete Automation (Unraid)
|
||||
|
||||
For **full automation** including VM creation on Unraid:
|
||||
|
||||
```bash
|
||||
./scripts/unraid/setup-complete-automation.sh
|
||||
```
|
||||
|
||||
This single command will:
|
||||
- ✅ Create and configure VM on Unraid
|
||||
- ✅ Install Ubuntu Server with all dependencies
|
||||
- ✅ Deploy ThrillWiki application
|
||||
- ✅ Set up automated CI/CD pipeline
|
||||
- ✅ Configure webhook listener
|
||||
- ✅ Test the entire system
|
||||
|
||||
## Manual Setup (Any Linux VM)
|
||||
|
||||
For manual setup on existing Linux VMs:
|
||||
|
||||
```bash
|
||||
./scripts/setup-vm-ci.sh
|
||||
```
|
||||
|
||||
## System Components
|
||||
|
||||
### 📁 Files Created
|
||||
|
||||
```
|
||||
scripts/
|
||||
├── ci-start.sh # Local development server startup
|
||||
├── webhook-listener.py # GitHub webhook listener
|
||||
├── vm-deploy.sh # VM deployment script
|
||||
├── setup-vm-ci.sh # Manual VM setup script
|
||||
├── unraid/
|
||||
│ ├── vm-manager.py # Unraid VM management
|
||||
│ └── setup-complete-automation.sh # Complete automation
|
||||
└── systemd/
|
||||
├── thrillwiki.service # Django app service
|
||||
└── thrillwiki-webhook.service # Webhook listener service
|
||||
|
||||
docs/
|
||||
├── VM_DEPLOYMENT_SETUP.md # Manual setup documentation
|
||||
└── UNRAID_COMPLETE_AUTOMATION.md # Complete automation guide
|
||||
```
|
||||
|
||||
### 🔄 Deployment Flow
|
||||
|
||||
**Complete Automation:**
|
||||
```
|
||||
GitHub Push → Webhook → Local Listener → SSH → Unraid VM → Deploy & Restart
|
||||
```
|
||||
|
||||
**Manual Setup:**
|
||||
```
|
||||
GitHub Push → Webhook → Local Listener → SSH to VM → Deploy Script → Server Restart
|
||||
```
|
||||
|
||||
## Features
|
||||
|
||||
- **Complete VM Automation**: Automatically creates VMs on Unraid
|
||||
- **Automatic Deployment**: Deploys on push to main branch
|
||||
- **Health Checks**: Verifies deployment success
|
||||
- **Rollback Support**: Automatic rollback on deployment failure
|
||||
- **Service Management**: Systemd integration for reliable service management
|
||||
- **Database Setup**: Automated PostgreSQL configuration
|
||||
- **Logging**: Comprehensive logging for debugging
|
||||
- **Security**: SSH key authentication and webhook secrets
|
||||
- **One-Command Setup**: Full automation with single script
|
||||
|
||||
## Usage
|
||||
|
||||
### Complete Automation (Recommended)
|
||||
|
||||
For Unraid users, run the complete automation:
|
||||
|
||||
```bash
|
||||
./scripts/unraid/setup-complete-automation.sh
|
||||
```
|
||||
|
||||
After setup, start the webhook listener:
|
||||
```bash
|
||||
./start-webhook.sh
|
||||
```
|
||||
|
||||
### Local Development
|
||||
|
||||
Start the local development server:
|
||||
|
||||
```bash
|
||||
./scripts/ci-start.sh
|
||||
```
|
||||
|
||||
### VM Management (Unraid)
|
||||
|
||||
```bash
|
||||
# Check VM status
|
||||
python3 scripts/unraid/vm-manager.py status
|
||||
|
||||
# Start/stop VM
|
||||
python3 scripts/unraid/vm-manager.py start
|
||||
python3 scripts/unraid/vm-manager.py stop
|
||||
|
||||
# Get VM IP
|
||||
python3 scripts/unraid/vm-manager.py ip
|
||||
```
|
||||
|
||||
### Service Management
|
||||
|
||||
On the VM:
|
||||
|
||||
```bash
|
||||
# Check status
|
||||
ssh thrillwiki-vm "./scripts/vm-deploy.sh status"
|
||||
|
||||
# Restart service
|
||||
ssh thrillwiki-vm "./scripts/vm-deploy.sh restart"
|
||||
|
||||
# View logs
|
||||
ssh thrillwiki-vm "journalctl -u thrillwiki -f"
|
||||
```
|
||||
|
||||
### Manual VM Deployment
|
||||
|
||||
Deploy to VM manually:
|
||||
|
||||
```bash
|
||||
ssh thrillwiki-vm "cd thrillwiki && ./scripts/vm-deploy.sh"
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### Automated Configuration
|
||||
|
||||
The complete automation script creates all necessary configuration files:
|
||||
|
||||
- `***REMOVED***.unraid` - Unraid VM configuration
|
||||
- `***REMOVED***.webhook` - Webhook listener configuration
|
||||
- SSH keys and configuration
|
||||
- Service configurations
|
||||
|
||||
### Manual Environment Variables
|
||||
|
||||
For manual setup, create `***REMOVED***.webhook` file:
|
||||
|
||||
```bash
|
||||
WEBHOOK_PORT=9000
|
||||
WEBHOOK_SECRET=your_secret_here
|
||||
VM_HOST=your_vm_ip
|
||||
VM_USER=ubuntu
|
||||
VM_KEY_PATH=/path/to/ssh/key
|
||||
VM_PROJECT_PATH=/home/ubuntu/thrillwiki
|
||||
REPO_URL=https://github.com/username/repo.git
|
||||
DEPLOY_BRANCH=main
|
||||
```
|
||||
|
||||
### GitHub Webhook
|
||||
|
||||
Configure in your GitHub repository:
|
||||
- **URL**: `http://YOUR_PUBLIC_IP:9000/webhook`
|
||||
- **Content Type**: `application/json`
|
||||
- **Secret**: Your webhook secret
|
||||
- **Events**: Push events
|
||||
|
||||
## Requirements
|
||||
|
||||
### For Complete Automation
|
||||
- **Local Machine**: Python 3.8+, SSH client
|
||||
- **Unraid Server**: 6.8+ with VM support
|
||||
- **Resources**: 4GB RAM, 50GB disk minimum
|
||||
- **Ubuntu ISO**: Ubuntu Server 22.04 in `/mnt/user/isos/`
|
||||
|
||||
### For Manual Setup
|
||||
- **Local Machine**: Python 3.8+, SSH access to VM, Public IP
|
||||
- **Linux VM**: Ubuntu 20.04+, Python 3.8+, UV package manager, Git, SSH server
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Complete Automation Issues
|
||||
|
||||
1. **VM Creation Fails**
|
||||
```bash
|
||||
# Check Unraid VM support
|
||||
ssh unraid "virsh list --all"
|
||||
|
||||
# Verify Ubuntu ISO exists
|
||||
ssh unraid "ls -la /mnt/user/isos/ubuntu-*.iso"
|
||||
```
|
||||
|
||||
2. **VM Won't Start**
|
||||
```bash
|
||||
# Check VM status
|
||||
python3 scripts/unraid/vm-manager.py status
|
||||
|
||||
# Check Unraid logs
|
||||
ssh unraid "tail -f /var/log/libvirt/qemu/thrillwiki-vm.log"
|
||||
```
|
||||
|
||||
### General Issues
|
||||
|
||||
1. **SSH Connection Failed**
|
||||
```bash
|
||||
# Check SSH key permissions
|
||||
chmod 600 ~/.ssh/thrillwiki_vm
|
||||
|
||||
# Test connection
|
||||
ssh thrillwiki-vm
|
||||
```
|
||||
|
||||
2. **Webhook Not Receiving Events**
|
||||
```bash
|
||||
# Check if port is open
|
||||
sudo ufw allow 9000
|
||||
|
||||
# Verify webhook URL in GitHub
|
||||
curl -X GET http://localhost:9000/health
|
||||
```
|
||||
|
||||
3. **Service Won't Start**
|
||||
```bash
|
||||
# Check service logs
|
||||
ssh thrillwiki-vm "journalctl -u thrillwiki --no-pager"
|
||||
|
||||
# Manual start
|
||||
ssh thrillwiki-vm "cd thrillwiki && ./scripts/ci-start.sh"
|
||||
```
|
||||
|
||||
### Logs
|
||||
|
||||
- **Setup logs**: `logs/unraid-automation.log`
|
||||
- **Local webhook**: `logs/webhook.log`
|
||||
- **VM deployment**: `logs/deploy.log` (on VM)
|
||||
- **Django server**: `logs/django.log` (on VM)
|
||||
- **System logs**: `journalctl -u thrillwiki -f` (on VM)
|
||||
|
||||
## Security Notes
|
||||
|
||||
- Automated SSH key generation and management
|
||||
- Dedicated keys for each connection (VM access, Unraid access)
|
||||
- No password authentication
|
||||
- Systemd security features enabled
|
||||
- Firewall configuration support
|
||||
- Secret management in environment files
|
||||
|
||||
## Documentation
|
||||
|
||||
- **Complete Automation**: [`docs/UNRAID_COMPLETE_AUTOMATION.md`](docs/UNRAID_COMPLETE_AUTOMATION.md)
|
||||
- **Manual Setup**: [`docs/VM_DEPLOYMENT_SETUP.md`](docs/VM_DEPLOYMENT_SETUP.md)
|
||||
|
||||
---
|
||||
|
||||
## Quick Start Summary
|
||||
|
||||
### For Unraid Users (Complete Automation)
|
||||
```bash
|
||||
# One command to set up everything
|
||||
./scripts/unraid/setup-complete-automation.sh
|
||||
|
||||
# Start webhook listener
|
||||
./start-webhook.sh
|
||||
|
||||
# Push commits to auto-deploy!
|
||||
```
|
||||
|
||||
### For Existing VM Users
|
||||
```bash
|
||||
# Manual setup
|
||||
./scripts/setup-vm-ci.sh
|
||||
|
||||
# Configure webhook and push to deploy
|
||||
```
|
||||
|
||||
**The system will automatically deploy your Django application whenever you push commits to the main branch!** 🚀
|
||||
392
README.md
392
README.md
@@ -1 +1,391 @@
|
||||
ThrillWiki.com
|
||||
# ThrillWiki Development Environment Setup
|
||||
|
||||
ThrillWiki is a modern Django web application for theme park and roller coaster enthusiasts, featuring a sophisticated dark theme design with purple-to-blue gradients, HTMX interactivity, and comprehensive park/ride information management.
|
||||
|
||||
## 🏗️ Technology Stack
|
||||
|
||||
- **Backend**: Django 5.0+ with GeoDjango (PostGIS)
|
||||
- **Frontend**: HTMX + Alpine.js + Tailwind CSS
|
||||
- **Database**: PostgreSQL with PostGIS extension
|
||||
- **Package Management**: UV (Python package manager)
|
||||
- **Authentication**: Django Allauth with Google/Discord OAuth
|
||||
- **Styling**: Tailwind CSS with custom dark theme
|
||||
- **History Tracking**: django-pghistory for audit trails
|
||||
- **Testing**: Pytest + Playwright for E2E testing
|
||||
|
||||
## 📋 Prerequisites
|
||||
|
||||
### Required Software
|
||||
|
||||
1. **Python 3.11+**
|
||||
```bash
|
||||
python --version # Should be 3.11 or higher
|
||||
```
|
||||
|
||||
2. **UV Package Manager**
|
||||
```bash
|
||||
# Install UV if not already installed
|
||||
curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
# or
|
||||
pip install uv
|
||||
```
|
||||
|
||||
3. **PostgreSQL with PostGIS**
|
||||
```bash
|
||||
# macOS (Homebrew)
|
||||
brew install postgresql postgis
|
||||
|
||||
# Ubuntu/Debian
|
||||
sudo apt-get install postgresql postgresql-contrib postgis
|
||||
|
||||
# Start PostgreSQL service
|
||||
brew services start postgresql # macOS
|
||||
sudo systemctl start postgresql # Linux
|
||||
```
|
||||
|
||||
4. **GDAL/GEOS Libraries** (for GeoDjango)
|
||||
```bash
|
||||
# macOS (Homebrew)
|
||||
brew install gdal geos
|
||||
|
||||
# Ubuntu/Debian
|
||||
sudo apt-get install gdal-bin libgdal-dev libgeos-dev
|
||||
```
|
||||
|
||||
5. **Node.js** (for Tailwind CSS)
|
||||
```bash
|
||||
# Install Node.js 18+ for Tailwind CSS compilation
|
||||
node --version # Should be 18 or higher
|
||||
```
|
||||
|
||||
## 🚀 Quick Start
|
||||
|
||||
### 1. Clone and Setup Project
|
||||
|
||||
```bash
|
||||
# Clone the repository
|
||||
git clone <repository-url>
|
||||
cd thrillwiki_django_no_react
|
||||
|
||||
# Install Python dependencies using UV
|
||||
uv sync
|
||||
```
|
||||
|
||||
### 2. Database Setup
|
||||
|
||||
```bash
|
||||
# Create PostgreSQL database and user
|
||||
createdb thrillwiki
|
||||
createuser wiki
|
||||
|
||||
# Connect to PostgreSQL and setup
|
||||
psql postgres
|
||||
```
|
||||
|
||||
In the PostgreSQL shell:
|
||||
```sql
|
||||
-- Set password for wiki user
|
||||
ALTER USER wiki WITH PASSWORD 'thrillwiki';
|
||||
|
||||
-- Grant privileges
|
||||
GRANT ALL PRIVILEGES ON DATABASE thrillwiki TO wiki;
|
||||
|
||||
-- Enable PostGIS extension
|
||||
\c thrillwiki
|
||||
CREATE EXTENSION postgis;
|
||||
\q
|
||||
```
|
||||
|
||||
### 3. Environment Configuration
|
||||
|
||||
The project uses these database settings (configured in [`thrillwiki/settings.py`](thrillwiki/settings.py)):
|
||||
```python
|
||||
DATABASES = {
|
||||
"default": {
|
||||
"ENGINE": "django.contrib.gis.db.backends.postgis",
|
||||
"NAME": "thrillwiki",
|
||||
"USER": "wiki",
|
||||
"PASSWORD": "thrillwiki",
|
||||
"HOST": "192.168.86.3", # Update to your PostgreSQL host
|
||||
"PORT": "5432",
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Important**: Update the `HOST` setting in [`thrillwiki/settings.py`](thrillwiki/settings.py) to match your PostgreSQL server location:
|
||||
- Use `"localhost"` or `"127.0.0.1"` for local development
|
||||
- Current setting is `"192.168.86.3"` - update this to your PostgreSQL server IP
|
||||
- For local development, change to `"localhost"` in settings.py
|
||||
|
||||
### 4. Database Migration
|
||||
|
||||
```bash
|
||||
# Run database migrations
|
||||
uv run manage.py migrate
|
||||
|
||||
# Create a superuser account
|
||||
uv run manage.py createsuperuser
|
||||
```
|
||||
|
||||
**Note**: If you're setting up for local development, first update the database HOST in [`thrillwiki/settings.py`](thrillwiki/settings.py) from `"192.168.86.3"` to `"localhost"` before running migrations.
|
||||
|
||||
### 5. Start Development Server
|
||||
|
||||
**CRITICAL**: Always use this exact command sequence for starting the development server:
|
||||
|
||||
```bash
|
||||
lsof -ti :8000 | xargs kill -9; find . -type d -name "__pycache__" -exec rm -r {} +; uv run manage.py tailwind runserver
|
||||
```
|
||||
|
||||
This command:
|
||||
- Kills any existing processes on port 8000
|
||||
- Cleans Python cache files
|
||||
- Starts Tailwind CSS compilation
|
||||
- Runs the Django development server
|
||||
|
||||
The application will be available at: http://localhost:8000
|
||||
|
||||
## 🛠️ Development Workflow
|
||||
|
||||
### Package Management
|
||||
|
||||
**ALWAYS use UV for package management**:
|
||||
|
||||
```bash
|
||||
# Add new Python packages
|
||||
uv add <package-name>
|
||||
|
||||
# Add development dependencies
|
||||
uv add --dev <package-name>
|
||||
|
||||
# Never use pip install - always use UV
|
||||
```
|
||||
|
||||
### Django Management Commands
|
||||
|
||||
**ALWAYS use UV for Django commands**:
|
||||
|
||||
```bash
|
||||
# Correct way to run Django commands
|
||||
uv run manage.py <command>
|
||||
|
||||
# Examples:
|
||||
uv run manage.py makemigrations
|
||||
uv run manage.py migrate
|
||||
uv run manage.py shell
|
||||
uv run manage.py createsuperuser
|
||||
uv run manage.py collectstatic
|
||||
|
||||
# NEVER use these patterns:
|
||||
# python manage.py <command> ❌ Wrong
|
||||
# uv run python manage.py <command> ❌ Wrong
|
||||
```
|
||||
|
||||
### CSS Development
|
||||
|
||||
The project uses **Tailwind CSS v4** with a custom dark theme. CSS files are located in:
|
||||
- Source: [`static/css/src/input.css`](static/css/src/input.css)
|
||||
- Compiled: [`static/css/`](static/css/) (auto-generated)
|
||||
|
||||
Tailwind automatically compiles when using the `tailwind runserver` command.
|
||||
|
||||
#### Tailwind CSS v4 Migration
|
||||
|
||||
This project has been migrated from Tailwind CSS v3 to v4. For complete migration details:
|
||||
|
||||
- **📖 Full Migration Documentation**: [`TAILWIND_V4_MIGRATION.md`](TAILWIND_V4_MIGRATION.md)
|
||||
- **⚡ Quick Reference Guide**: [`TAILWIND_V4_QUICK_REFERENCE.md`](TAILWIND_V4_QUICK_REFERENCE.md)
|
||||
|
||||
**Key v4 Changes**:
|
||||
- New CSS-first approach with `@theme` blocks
|
||||
- Updated utility class names (e.g., `outline-none` → `outline-hidden`)
|
||||
- New opacity syntax (e.g., `bg-blue-500/50` instead of `bg-blue-500 bg-opacity-50`)
|
||||
- Enhanced performance and smaller bundle sizes
|
||||
|
||||
**Custom Theme Variables** (available in CSS):
|
||||
```css
|
||||
var(--color-primary) /* #4f46e5 - Indigo-600 */
|
||||
var(--color-secondary) /* #e11d48 - Rose-600 */
|
||||
var(--color-accent) /* #8b5cf6 - Violet-500 */
|
||||
var(--font-family-sans) /* Poppins, sans-serif */
|
||||
```
|
||||
|
||||
## 🏗️ Project Structure
|
||||
|
||||
```
|
||||
thrillwiki_django_no_react/
|
||||
├── accounts/ # User account management
|
||||
├── analytics/ # Analytics and tracking
|
||||
├── companies/ # Theme park companies
|
||||
├── core/ # Core application logic
|
||||
├── designers/ # Ride designers
|
||||
├── history/ # History timeline features
|
||||
├── location/ # Geographic location handling
|
||||
├── media/ # Media file management
|
||||
├── moderation/ # Content moderation
|
||||
├── parks/ # Theme park management
|
||||
├── reviews/ # User reviews
|
||||
├── rides/ # Roller coaster/ride management
|
||||
├── search/ # Search functionality
|
||||
├── static/ # Static assets (CSS, JS, images)
|
||||
├── templates/ # Django templates
|
||||
├── thrillwiki/ # Main Django project settings
|
||||
├── memory-bank/ # Development documentation
|
||||
└── .clinerules # Project development rules
|
||||
```
|
||||
|
||||
## 🔧 Key Features
|
||||
|
||||
### Authentication System
|
||||
- Django Allauth integration
|
||||
- Google OAuth authentication
|
||||
- Discord OAuth authentication
|
||||
- Custom user profiles with avatars
|
||||
|
||||
### Geographic Features
|
||||
- PostGIS integration for location data
|
||||
- Interactive park maps
|
||||
- Location-based search and filtering
|
||||
|
||||
### Content Management
|
||||
- Park and ride information management
|
||||
- Photo galleries with upload capabilities
|
||||
- User-generated reviews and ratings
|
||||
- Content moderation system
|
||||
|
||||
### Modern Frontend
|
||||
- HTMX for dynamic interactions
|
||||
- Alpine.js for client-side behavior
|
||||
- Tailwind CSS with custom dark theme
|
||||
- Responsive design (mobile-first)
|
||||
|
||||
## 🧪 Testing
|
||||
|
||||
### Running Tests
|
||||
|
||||
```bash
|
||||
# Run Python tests
|
||||
uv run pytest
|
||||
|
||||
# Run with coverage
|
||||
uv run coverage run -m pytest
|
||||
uv run coverage report
|
||||
|
||||
# Run E2E tests with Playwright
|
||||
uv run pytest tests/e2e/
|
||||
```
|
||||
|
||||
### Test Structure
|
||||
- Unit tests: Located within each app's `tests/` directory
|
||||
- E2E tests: [`tests/e2e/`](tests/e2e/)
|
||||
- Test fixtures: [`tests/fixtures/`](tests/fixtures/)
|
||||
|
||||
## 📚 Documentation
|
||||
|
||||
### Memory Bank System
|
||||
The project uses a comprehensive documentation system in [`memory-bank/`](memory-bank/):
|
||||
|
||||
- [`memory-bank/activeContext.md`](memory-bank/activeContext.md) - Current development context
|
||||
- [`memory-bank/documentation/design-system.md`](memory-bank/documentation/design-system.md) - Design system documentation
|
||||
- [`memory-bank/features/`](memory-bank/features/) - Feature-specific documentation
|
||||
- [`memory-bank/testing/`](memory-bank/testing/) - Testing documentation and results
|
||||
|
||||
### Key Documentation Files
|
||||
- [Design System](memory-bank/documentation/design-system.md) - UI/UX guidelines and patterns
|
||||
- [Authentication System](memory-bank/features/auth/) - OAuth and user management
|
||||
- [Layout Optimization](memory-bank/projects/) - Responsive design implementations
|
||||
|
||||
## 🚨 Important Development Rules
|
||||
|
||||
### Critical Commands
|
||||
1. **Server Startup**: Always use the full command sequence:
|
||||
```bash
|
||||
lsof -ti :8000 | xargs kill -9; find . -type d -name "__pycache__" -exec rm -r {} +; uv run manage.py tailwind runserver
|
||||
```
|
||||
|
||||
2. **Package Management**: Only use UV:
|
||||
```bash
|
||||
uv add <package> # ✅ Correct
|
||||
pip install <package> # ❌ Wrong
|
||||
```
|
||||
|
||||
3. **Django Commands**: Always prefix with `uv run`:
|
||||
```bash
|
||||
uv run manage.py <command> # ✅ Correct
|
||||
python manage.py <command> # ❌ Wrong
|
||||
```
|
||||
|
||||
### Database Configuration
|
||||
- Ensure PostgreSQL is running before starting development
|
||||
- PostGIS extension must be enabled
|
||||
- Update database host settings for your environment
|
||||
|
||||
### GeoDjango Requirements
|
||||
- GDAL and GEOS libraries must be properly installed
|
||||
- Library paths are configured in [`thrillwiki/settings.py`](thrillwiki/settings.py) for macOS Homebrew
|
||||
- Current paths: `/opt/homebrew/lib/libgdal.dylib` and `/opt/homebrew/lib/libgeos_c.dylib`
|
||||
- May need adjustment based on your system's library locations (Linux users will need different paths)
|
||||
|
||||
## 🔍 Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
1. **PostGIS Extension Error**
|
||||
```bash
|
||||
# Connect to database and enable PostGIS
|
||||
psql thrillwiki
|
||||
CREATE EXTENSION postgis;
|
||||
```
|
||||
|
||||
2. **GDAL/GEOS Library Not Found**
|
||||
```bash
|
||||
# macOS (Homebrew): Current paths in settings.py
|
||||
GDAL_LIBRARY_PATH = "/opt/homebrew/lib/libgdal.dylib"
|
||||
GEOS_LIBRARY_PATH = "/opt/homebrew/lib/libgeos_c.dylib"
|
||||
|
||||
# Linux: Update paths in settings.py to something like:
|
||||
# GDAL_LIBRARY_PATH = "/usr/lib/x86_64-linux-gnu/libgdal.so"
|
||||
# GEOS_LIBRARY_PATH = "/usr/lib/x86_64-linux-gnu/libgeos_c.so"
|
||||
|
||||
# Find your library locations
|
||||
find /usr -name "libgdal*" 2>/dev/null
|
||||
find /usr -name "libgeos*" 2>/dev/null
|
||||
find /opt -name "libgdal*" 2>/dev/null
|
||||
find /opt -name "libgeos*" 2>/dev/null
|
||||
```
|
||||
|
||||
3. **Port 8000 Already in Use**
|
||||
```bash
|
||||
# Kill existing processes
|
||||
lsof -ti :8000 | xargs kill -9
|
||||
```
|
||||
|
||||
4. **Tailwind CSS Not Compiling**
|
||||
```bash
|
||||
# Ensure Node.js is installed and use the full server command
|
||||
node --version
|
||||
uv run manage.py tailwind runserver
|
||||
```
|
||||
|
||||
### Getting Help
|
||||
|
||||
1. Check the [`memory-bank/`](memory-bank/) documentation for detailed feature information
|
||||
2. Review [`memory-bank/testing/`](memory-bank/testing/) for known issues and solutions
|
||||
3. Ensure all prerequisites are properly installed
|
||||
4. Verify database connection and PostGIS extension
|
||||
|
||||
## 🎯 Next Steps
|
||||
|
||||
After successful setup:
|
||||
|
||||
1. **Explore the Admin Interface**: http://localhost:8000/admin/
|
||||
2. **Browse the Application**: http://localhost:8000/
|
||||
3. **Review Documentation**: Check [`memory-bank/`](memory-bank/) for detailed feature docs
|
||||
4. **Run Tests**: Ensure everything works with `uv run pytest`
|
||||
5. **Start Development**: Follow the development workflow guidelines above
|
||||
|
||||
---
|
||||
|
||||
**Happy Coding!** 🎢✨
|
||||
|
||||
For detailed feature documentation and development context, see the [`memory-bank/`](memory-bank/) directory.
|
||||
|
||||
326
TAILWIND_V4_MIGRATION.md
Normal file
326
TAILWIND_V4_MIGRATION.md
Normal file
@@ -0,0 +1,326 @@
|
||||
# Tailwind CSS v3 to v4 Migration Documentation
|
||||
|
||||
## Overview
|
||||
|
||||
This document details the complete migration process from Tailwind CSS v3 to v4 for the Django ThrillWiki project. The migration was performed on August 15, 2025, and includes all changes, configurations, and verification steps.
|
||||
|
||||
## Migration Summary
|
||||
|
||||
- **From**: Tailwind CSS v3.x
|
||||
- **To**: Tailwind CSS v4.1.12
|
||||
- **Project**: Django ThrillWiki (Django + Tailwind CSS integration)
|
||||
- **Status**: ✅ Complete and Verified
|
||||
- **Breaking Changes**: None (all styling preserved)
|
||||
|
||||
## Key Changes in Tailwind CSS v4
|
||||
|
||||
### 1. CSS Import Syntax
|
||||
- **v3**: Used `@tailwind` directives
|
||||
- **v4**: Uses single `@import "tailwindcss"` statement
|
||||
|
||||
### 2. Theme Configuration
|
||||
- **v3**: Configuration in `tailwind.config.js`
|
||||
- **v4**: CSS-first approach with `@theme` blocks
|
||||
|
||||
### 3. Deprecated Utilities
|
||||
Multiple utility classes were renamed or deprecated in v4.
|
||||
|
||||
## Migration Steps Performed
|
||||
|
||||
### Step 1: Update Main CSS File
|
||||
|
||||
**File**: `static/css/src/input.css`
|
||||
|
||||
**Before (v3)**:
|
||||
```css
|
||||
@tailwind base;
|
||||
@tailwind components;
|
||||
@tailwind utilities;
|
||||
|
||||
/* Custom styles... */
|
||||
```
|
||||
|
||||
**After (v4)**:
|
||||
```css
|
||||
@import "tailwindcss";
|
||||
|
||||
@theme {
|
||||
--color-primary: #4f46e5;
|
||||
--color-secondary: #e11d48;
|
||||
--color-accent: #8b5cf6;
|
||||
--font-family-sans: Poppins, sans-serif;
|
||||
}
|
||||
|
||||
/* Custom styles... */
|
||||
```
|
||||
|
||||
### Step 2: Theme Variable Migration
|
||||
|
||||
Migrated custom colors and fonts from `tailwind.config.js` to CSS variables in `@theme` block:
|
||||
|
||||
| Variable | Value | Description |
|
||||
|----------|-------|-------------|
|
||||
| `--color-primary` | `#4f46e5` | Indigo-600 (primary brand color) |
|
||||
| `--color-secondary` | `#e11d48` | Rose-600 (secondary brand color) |
|
||||
| `--color-accent` | `#8b5cf6` | Violet-500 (accent color) |
|
||||
| `--font-family-sans` | `Poppins, sans-serif` | Primary font family |
|
||||
|
||||
### Step 3: Deprecated Utility Updates
|
||||
|
||||
#### Outline Utilities
|
||||
- **Changed**: `outline-none` → `outline-hidden`
|
||||
- **Files affected**: All template files, component CSS
|
||||
|
||||
#### Ring Utilities
|
||||
- **Changed**: `ring` → `ring-3`
|
||||
- **Reason**: Default ring width now requires explicit specification
|
||||
|
||||
#### Shadow Utilities
|
||||
- **Changed**:
|
||||
- `shadow-sm` → `shadow-xs`
|
||||
- `shadow` → `shadow-sm`
|
||||
- **Files affected**: Button components, card components
|
||||
|
||||
#### Opacity Utilities
|
||||
- **Changed**: `bg-opacity-*` format → `color/opacity` format
|
||||
- **Example**: `bg-blue-500 bg-opacity-50` → `bg-blue-500/50`
|
||||
|
||||
#### Flex Utilities
|
||||
- **Changed**: `flex-shrink-0` → `shrink-0`
|
||||
|
||||
#### Important Modifier
|
||||
- **Changed**: `!important` → `!` (shorter syntax)
|
||||
- **Example**: `!outline-none` → `!outline-hidden`
|
||||
|
||||
### Step 4: Template File Updates
|
||||
|
||||
Updated the following template files with new utility classes:
|
||||
|
||||
#### Core Templates
|
||||
- `templates/base.html`
|
||||
- `templates/components/navbar.html`
|
||||
- `templates/components/footer.html`
|
||||
|
||||
#### Page Templates
|
||||
- `templates/parks/park_list.html`
|
||||
- `templates/parks/park_detail.html`
|
||||
- `templates/rides/ride_list.html`
|
||||
- `templates/rides/ride_detail.html`
|
||||
- `templates/companies/company_list.html`
|
||||
- `templates/companies/company_detail.html`
|
||||
|
||||
#### Form Templates
|
||||
- `templates/parks/park_form.html`
|
||||
- `templates/rides/ride_form.html`
|
||||
- `templates/companies/company_form.html`
|
||||
|
||||
#### Component Templates
|
||||
- `templates/components/search_results.html`
|
||||
- `templates/components/pagination.html`
|
||||
|
||||
### Step 5: Component CSS Updates
|
||||
|
||||
Updated custom component classes in `static/css/src/input.css`:
|
||||
|
||||
**Button Components**:
|
||||
```css
|
||||
.btn-primary {
|
||||
@apply inline-flex items-center px-6 py-2.5 border border-transparent rounded-full shadow-md text-sm font-medium text-white bg-gradient-to-r from-primary to-secondary hover:from-primary/90 hover:to-secondary/90 focus:outline-hidden focus:ring-3 focus:ring-offset-2 focus:ring-primary/50 transform hover:scale-105 transition-all;
|
||||
}
|
||||
|
||||
.btn-secondary {
|
||||
@apply inline-flex items-center px-6 py-2.5 border border-gray-200 dark:border-gray-700 rounded-full shadow-md text-sm font-medium text-gray-700 dark:text-gray-200 bg-white dark:bg-gray-800 hover:bg-gray-50 dark:hover:bg-gray-700 focus:outline-hidden focus:ring-3 focus:ring-offset-2 focus:ring-primary/50 transform hover:scale-105 transition-all;
|
||||
}
|
||||
```
|
||||
|
||||
## Configuration Files
|
||||
|
||||
### Tailwind Config (Preserved for Reference)
|
||||
|
||||
**File**: `tailwind.config.js`
|
||||
|
||||
The original v3 configuration was preserved for reference but is no longer the primary configuration method:
|
||||
|
||||
```javascript
|
||||
module.exports = {
|
||||
content: [
|
||||
'./templates/**/*.html',
|
||||
'./static/js/**/*.js',
|
||||
'./*/templates/**/*.html',
|
||||
],
|
||||
darkMode: 'class',
|
||||
theme: {
|
||||
extend: {
|
||||
colors: {
|
||||
primary: '#4f46e5',
|
||||
secondary: '#e11d48',
|
||||
accent: '#8b5cf6',
|
||||
},
|
||||
fontFamily: {
|
||||
sans: ['Poppins', 'sans-serif'],
|
||||
},
|
||||
},
|
||||
},
|
||||
plugins: [
|
||||
require('@tailwindcss/forms'),
|
||||
require('@tailwindcss/typography'),
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
### Package.json Updates
|
||||
|
||||
No changes required to `package.json` as the Django-Tailwind package handles version management.
|
||||
|
||||
## Verification Steps
|
||||
|
||||
### 1. Build Process Verification
|
||||
```bash
|
||||
# Clean and rebuild CSS
|
||||
lsof -ti :8000 | xargs kill -9
|
||||
find . -type d -name "__pycache__" -exec rm -r {} +
|
||||
uv run manage.py tailwind runserver
|
||||
```
|
||||
|
||||
**Result**: ✅ Build successful, no errors
|
||||
|
||||
### 2. CSS Compilation Check
|
||||
```bash
|
||||
# Check compiled CSS size and content
|
||||
ls -la static/css/tailwind.css
|
||||
head -50 static/css/tailwind.css | grep -E "(primary|secondary|accent)"
|
||||
```
|
||||
|
||||
**Result**: ✅ CSS properly compiled with theme variables
|
||||
|
||||
### 3. Server Response Check
|
||||
```bash
|
||||
curl -s -o /dev/null -w "%{http_code}" http://localhost:8000/
|
||||
```
|
||||
|
||||
**Result**: ✅ HTTP 200 - Server responding correctly
|
||||
|
||||
### 4. Visual Verification
|
||||
- ✅ Primary colors (indigo) displaying correctly
|
||||
- ✅ Secondary colors (rose) displaying correctly
|
||||
- ✅ Accent colors (violet) displaying correctly
|
||||
- ✅ Poppins font family loading correctly
|
||||
- ✅ Button styling and interactions working
|
||||
- ✅ Dark mode functionality preserved
|
||||
- ✅ Responsive design intact
|
||||
- ✅ All animations and transitions working
|
||||
|
||||
## Files Modified
|
||||
|
||||
### CSS Files
|
||||
- `static/css/src/input.css` - ✅ Major updates (import syntax, theme variables, component classes)
|
||||
|
||||
### Template Files (Updated utility classes)
|
||||
- `templates/base.html`
|
||||
- `templates/components/navbar.html`
|
||||
- `templates/components/footer.html`
|
||||
- `templates/parks/park_list.html`
|
||||
- `templates/parks/park_detail.html`
|
||||
- `templates/parks/park_form.html`
|
||||
- `templates/rides/ride_list.html`
|
||||
- `templates/rides/ride_detail.html`
|
||||
- `templates/rides/ride_form.html`
|
||||
- `templates/companies/company_list.html`
|
||||
- `templates/companies/company_detail.html`
|
||||
- `templates/companies/company_form.html`
|
||||
- `templates/components/search_results.html`
|
||||
- `templates/components/pagination.html`
|
||||
|
||||
### Configuration Files (Preserved)
|
||||
- `tailwind.config.js` - ✅ Preserved for reference
|
||||
|
||||
## Benefits of v4 Migration
|
||||
|
||||
### Performance Improvements
|
||||
- Smaller CSS bundle size
|
||||
- Faster compilation times
|
||||
- Improved CSS-in-JS performance
|
||||
|
||||
### Developer Experience
|
||||
- CSS-first configuration approach
|
||||
- Better IDE support for theme variables
|
||||
- Simplified import syntax
|
||||
|
||||
### Future Compatibility
|
||||
- Modern CSS features support
|
||||
- Better container queries support
|
||||
- Enhanced dark mode capabilities
|
||||
|
||||
## Troubleshooting Guide
|
||||
|
||||
### Common Issues and Solutions
|
||||
|
||||
#### Issue: "Cannot apply unknown utility class"
|
||||
**Solution**: Check if utility was renamed in v4 migration table above
|
||||
|
||||
#### Issue: Custom colors not working
|
||||
**Solution**: Ensure `@theme` block is properly defined with CSS variables
|
||||
|
||||
#### Issue: Build errors
|
||||
**Solution**: Run clean build process:
|
||||
```bash
|
||||
lsof -ti :8000 | xargs kill -9
|
||||
find . -type d -name "__pycache__" -exec rm -r {} +
|
||||
uv run manage.py tailwind runserver
|
||||
```
|
||||
|
||||
## Rollback Plan
|
||||
|
||||
If rollback is needed:
|
||||
|
||||
1. **Restore CSS Import Syntax**:
|
||||
```css
|
||||
@tailwind base;
|
||||
@tailwind components;
|
||||
@tailwind utilities;
|
||||
```
|
||||
|
||||
2. **Remove @theme Block**: Delete the `@theme` section from input.css
|
||||
|
||||
3. **Revert Utility Classes**: Use search/replace to revert utility class changes
|
||||
|
||||
4. **Downgrade Tailwind**: Update package to v3.x version
|
||||
|
||||
## Post-Migration Checklist
|
||||
|
||||
- [x] CSS compilation working
|
||||
- [x] Development server running
|
||||
- [x] All pages loading correctly
|
||||
- [x] Colors displaying properly
|
||||
- [x] Fonts loading correctly
|
||||
- [x] Interactive elements working
|
||||
- [x] Dark mode functioning
|
||||
- [x] Responsive design intact
|
||||
- [x] No console errors
|
||||
- [x] Performance acceptable
|
||||
|
||||
## Future Considerations
|
||||
|
||||
### New v4 Features to Explore
|
||||
- Enhanced container queries
|
||||
- Improved dark mode utilities
|
||||
- New color-mix() support
|
||||
- Advanced CSS nesting
|
||||
|
||||
### Maintenance Notes
|
||||
- Monitor for v4 updates and new features
|
||||
- Consider migrating more configuration to CSS variables
|
||||
- Evaluate new utility classes as they're released
|
||||
|
||||
## Contact and Support
|
||||
|
||||
For questions about this migration:
|
||||
- Review this documentation
|
||||
- Check Tailwind CSS v4 official documentation
|
||||
- Consult the preserved `tailwind.config.js` for original settings
|
||||
|
||||
---
|
||||
|
||||
**Migration Completed**: August 15, 2025
|
||||
**Tailwind Version**: v4.1.12
|
||||
**Status**: Production Ready ✅
|
||||
80
TAILWIND_V4_QUICK_REFERENCE.md
Normal file
80
TAILWIND_V4_QUICK_REFERENCE.md
Normal file
@@ -0,0 +1,80 @@
|
||||
# Tailwind CSS v4 Quick Reference Guide
|
||||
|
||||
## Common v3 → v4 Utility Migrations
|
||||
|
||||
| v3 Utility | v4 Utility | Notes |
|
||||
|------------|------------|-------|
|
||||
| `outline-none` | `outline-hidden` | Accessibility improvement |
|
||||
| `ring` | `ring-3` | Must specify ring width |
|
||||
| `shadow-sm` | `shadow-xs` | Renamed for consistency |
|
||||
| `shadow` | `shadow-sm` | Renamed for consistency |
|
||||
| `flex-shrink-0` | `shrink-0` | Shortened syntax |
|
||||
| `bg-blue-500 bg-opacity-50` | `bg-blue-500/50` | New opacity syntax |
|
||||
| `text-gray-700 text-opacity-75` | `text-gray-700/75` | New opacity syntax |
|
||||
| `!outline-none` | `!outline-hidden` | Updated important syntax |
|
||||
|
||||
## Theme Variables (Available in CSS)
|
||||
|
||||
```css
|
||||
/* Colors */
|
||||
var(--color-primary) /* #4f46e5 - Indigo-600 */
|
||||
var(--color-secondary) /* #e11d48 - Rose-600 */
|
||||
var(--color-accent) /* #8b5cf6 - Violet-500 */
|
||||
|
||||
/* Fonts */
|
||||
var(--font-family-sans) /* Poppins, sans-serif */
|
||||
```
|
||||
|
||||
## Usage in Templates
|
||||
|
||||
### Before (v3)
|
||||
```html
|
||||
<button class="outline-none ring hover:ring-2 shadow-sm bg-blue-500 bg-opacity-75">
|
||||
Click me
|
||||
</button>
|
||||
```
|
||||
|
||||
### After (v4)
|
||||
```html
|
||||
<button class="outline-hidden ring-3 hover:ring-2 shadow-xs bg-blue-500/75">
|
||||
Click me
|
||||
</button>
|
||||
```
|
||||
|
||||
## Development Commands
|
||||
|
||||
### Start Development Server
|
||||
```bash
|
||||
lsof -ti :8000 | xargs kill -9; find . -type d -name "__pycache__" -exec rm -r {} +; uv run manage.py tailwind runserver
|
||||
```
|
||||
|
||||
### Force CSS Rebuild
|
||||
```bash
|
||||
uv run manage.py tailwind build
|
||||
```
|
||||
|
||||
## New v4 Features
|
||||
|
||||
- **CSS-first configuration** via `@theme` blocks
|
||||
- **Improved opacity syntax** with `/` operator
|
||||
- **Better color-mix() support**
|
||||
- **Enhanced dark mode utilities**
|
||||
- **Faster compilation**
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Unknown utility class error
|
||||
1. Check if utility was renamed (see table above)
|
||||
2. Verify custom theme variables are defined
|
||||
3. Run clean build process
|
||||
|
||||
### Colors not working
|
||||
1. Ensure `@theme` block exists in `static/css/src/input.css`
|
||||
2. Check CSS variable names match usage
|
||||
3. Verify CSS compilation completed
|
||||
|
||||
## Resources
|
||||
|
||||
- [Full Migration Documentation](./TAILWIND_V4_MIGRATION.md)
|
||||
- [Tailwind CSS v4 Official Docs](https://tailwindcss.com/docs)
|
||||
- [Django-Tailwind Package](https://django-tailwind.readthedocs.io/)
|
||||
@@ -6,18 +6,19 @@ from django.contrib.sites.shortcuts import get_current_site
|
||||
|
||||
User = get_user_model()
|
||||
|
||||
|
||||
class CustomAccountAdapter(DefaultAccountAdapter):
|
||||
def is_open_for_signup(self, request):
|
||||
"""
|
||||
Whether to allow sign ups.
|
||||
"""
|
||||
return getattr(settings, 'ACCOUNT_ALLOW_SIGNUPS', True)
|
||||
return True
|
||||
|
||||
def get_email_confirmation_url(self, request, emailconfirmation):
|
||||
"""
|
||||
Constructs the email confirmation (activation) url.
|
||||
"""
|
||||
site = get_current_site(request)
|
||||
get_current_site(request)
|
||||
return f"{settings.LOGIN_REDIRECT_URL}verify-email?key={emailconfirmation.key}"
|
||||
|
||||
def send_confirmation_mail(self, request, emailconfirmation, signup):
|
||||
@@ -27,30 +28,31 @@ class CustomAccountAdapter(DefaultAccountAdapter):
|
||||
current_site = get_current_site(request)
|
||||
activate_url = self.get_email_confirmation_url(request, emailconfirmation)
|
||||
ctx = {
|
||||
'user': emailconfirmation.email_address.user,
|
||||
'activate_url': activate_url,
|
||||
'current_site': current_site,
|
||||
'key': emailconfirmation.key,
|
||||
"user": emailconfirmation.email_address.user,
|
||||
"activate_url": activate_url,
|
||||
"current_site": current_site,
|
||||
"key": emailconfirmation.key,
|
||||
}
|
||||
if signup:
|
||||
email_template = 'account/email/email_confirmation_signup'
|
||||
email_template = "account/email/email_confirmation_signup"
|
||||
else:
|
||||
email_template = 'account/email/email_confirmation'
|
||||
email_template = "account/email/email_confirmation"
|
||||
self.send_mail(email_template, emailconfirmation.email_address.email, ctx)
|
||||
|
||||
|
||||
class CustomSocialAccountAdapter(DefaultSocialAccountAdapter):
|
||||
def is_open_for_signup(self, request, sociallogin):
|
||||
"""
|
||||
Whether to allow social account sign ups.
|
||||
"""
|
||||
return getattr(settings, 'SOCIALACCOUNT_ALLOW_SIGNUPS', True)
|
||||
return True
|
||||
|
||||
def populate_user(self, request, sociallogin, data):
|
||||
"""
|
||||
Hook that can be used to further populate the user instance.
|
||||
"""
|
||||
user = super().populate_user(request, sociallogin, data)
|
||||
if sociallogin.account.provider == 'discord':
|
||||
if sociallogin.account.provider == "discord":
|
||||
user.discord_id = sociallogin.account.uid
|
||||
return user
|
||||
|
||||
|
||||
@@ -1,78 +1,138 @@
|
||||
from django.contrib import admin
|
||||
from django.contrib.auth.admin import UserAdmin
|
||||
from django.utils.html import format_html
|
||||
from django.urls import reverse
|
||||
from django.contrib.auth.models import Group
|
||||
from .models import User, UserProfile, EmailVerification, TopList, TopListItem
|
||||
|
||||
|
||||
class UserProfileInline(admin.StackedInline):
|
||||
model = UserProfile
|
||||
can_delete = False
|
||||
verbose_name_plural = 'Profile'
|
||||
verbose_name_plural = "Profile"
|
||||
fieldsets = (
|
||||
('Personal Info', {
|
||||
'fields': ('display_name', 'avatar', 'pronouns', 'bio')
|
||||
}),
|
||||
('Social Media', {
|
||||
'fields': ('twitter', 'instagram', 'youtube', 'discord')
|
||||
}),
|
||||
('Ride Credits', {
|
||||
'fields': (
|
||||
'coaster_credits',
|
||||
'dark_ride_credits',
|
||||
'flat_ride_credits',
|
||||
'water_ride_credits'
|
||||
)
|
||||
}),
|
||||
(
|
||||
"Personal Info",
|
||||
{"fields": ("display_name", "avatar", "pronouns", "bio")},
|
||||
),
|
||||
(
|
||||
"Social Media",
|
||||
{"fields": ("twitter", "instagram", "youtube", "discord")},
|
||||
),
|
||||
(
|
||||
"Ride Credits",
|
||||
{
|
||||
"fields": (
|
||||
"coaster_credits",
|
||||
"dark_ride_credits",
|
||||
"flat_ride_credits",
|
||||
"water_ride_credits",
|
||||
)
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class TopListItemInline(admin.TabularInline):
|
||||
model = TopListItem
|
||||
extra = 1
|
||||
fields = ('content_type', 'object_id', 'rank', 'notes')
|
||||
ordering = ('rank',)
|
||||
fields = ("content_type", "object_id", "rank", "notes")
|
||||
ordering = ("rank",)
|
||||
|
||||
|
||||
@admin.register(User)
|
||||
class CustomUserAdmin(UserAdmin):
|
||||
list_display = ('username', 'email', 'get_avatar', 'get_status', 'role', 'date_joined', 'last_login', 'get_credits')
|
||||
list_filter = ('is_active', 'is_staff', 'role', 'is_banned', 'groups', 'date_joined')
|
||||
search_fields = ('username', 'email')
|
||||
ordering = ('-date_joined',)
|
||||
actions = ['activate_users', 'deactivate_users', 'ban_users', 'unban_users']
|
||||
list_display = (
|
||||
"username",
|
||||
"email",
|
||||
"get_avatar",
|
||||
"get_status",
|
||||
"role",
|
||||
"date_joined",
|
||||
"last_login",
|
||||
"get_credits",
|
||||
)
|
||||
list_filter = (
|
||||
"is_active",
|
||||
"is_staff",
|
||||
"role",
|
||||
"is_banned",
|
||||
"groups",
|
||||
"date_joined",
|
||||
)
|
||||
search_fields = ("username", "email")
|
||||
ordering = ("-date_joined",)
|
||||
actions = [
|
||||
"activate_users",
|
||||
"deactivate_users",
|
||||
"ban_users",
|
||||
"unban_users",
|
||||
]
|
||||
inlines = [UserProfileInline]
|
||||
|
||||
fieldsets = (
|
||||
(None, {'fields': ('username', 'password')}),
|
||||
('Personal info', {'fields': ('email', 'pending_email')}),
|
||||
('Roles and Permissions', {
|
||||
'fields': ('role', 'groups', 'user_permissions'),
|
||||
'description': 'Role determines group membership. Groups determine permissions.',
|
||||
}),
|
||||
('Status', {
|
||||
'fields': ('is_active', 'is_staff', 'is_superuser'),
|
||||
'description': 'These are automatically managed based on role.',
|
||||
}),
|
||||
('Ban Status', {
|
||||
'fields': ('is_banned', 'ban_reason', 'ban_date'),
|
||||
}),
|
||||
('Preferences', {
|
||||
'fields': ('theme_preference',),
|
||||
}),
|
||||
('Important dates', {'fields': ('last_login', 'date_joined')}),
|
||||
(None, {"fields": ("username", "password")}),
|
||||
("Personal info", {"fields": ("email", "pending_email")}),
|
||||
(
|
||||
"Roles and Permissions",
|
||||
{
|
||||
"fields": ("role", "groups", "user_permissions"),
|
||||
"description": (
|
||||
"Role determines group membership. Groups determine permissions."
|
||||
),
|
||||
},
|
||||
),
|
||||
(
|
||||
"Status",
|
||||
{
|
||||
"fields": ("is_active", "is_staff", "is_superuser"),
|
||||
"description": "These are automatically managed based on role.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"Ban Status",
|
||||
{
|
||||
"fields": ("is_banned", "ban_reason", "ban_date"),
|
||||
},
|
||||
),
|
||||
(
|
||||
"Preferences",
|
||||
{
|
||||
"fields": ("theme_preference",),
|
||||
},
|
||||
),
|
||||
("Important dates", {"fields": ("last_login", "date_joined")}),
|
||||
)
|
||||
add_fieldsets = (
|
||||
(None, {
|
||||
'classes': ('wide',),
|
||||
'fields': ('username', 'email', 'password1', 'password2', 'role'),
|
||||
}),
|
||||
(
|
||||
None,
|
||||
{
|
||||
"classes": ("wide",),
|
||||
"fields": (
|
||||
"username",
|
||||
"email",
|
||||
"password1",
|
||||
"password2",
|
||||
"role",
|
||||
),
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
@admin.display(description="Avatar")
|
||||
def get_avatar(self, obj):
|
||||
if obj.profile.avatar:
|
||||
return format_html('<img src="{}" width="30" height="30" style="border-radius:50%;" />', obj.profile.avatar.url)
|
||||
return format_html('<div style="width:30px; height:30px; border-radius:50%; background-color:#007bff; color:white; display:flex; align-items:center; justify-content:center;">{}</div>', obj.username[0].upper())
|
||||
get_avatar.short_description = 'Avatar'
|
||||
return format_html(
|
||||
'<img src="{}" width="30" height="30" style="border-radius:50%;" />',
|
||||
obj.profile.avatar.url,
|
||||
)
|
||||
return format_html(
|
||||
'<div style="width:30px; height:30px; border-radius:50%; '
|
||||
"background-color:#007bff; color:white; display:flex; "
|
||||
'align-items:center; justify-content:center;">{}</div>',
|
||||
obj.username[0].upper(),
|
||||
)
|
||||
|
||||
@admin.display(description="Status")
|
||||
def get_status(self, obj):
|
||||
if obj.is_banned:
|
||||
return format_html('<span style="color: red;">Banned</span>')
|
||||
@@ -83,38 +143,38 @@ class CustomUserAdmin(UserAdmin):
|
||||
if obj.is_staff:
|
||||
return format_html('<span style="color: blue;">Staff</span>')
|
||||
return format_html('<span style="color: green;">Active</span>')
|
||||
get_status.short_description = 'Status'
|
||||
|
||||
@admin.display(description="Ride Credits")
|
||||
def get_credits(self, obj):
|
||||
try:
|
||||
profile = obj.profile
|
||||
return format_html(
|
||||
'RC: {}<br>DR: {}<br>FR: {}<br>WR: {}',
|
||||
"RC: {}<br>DR: {}<br>FR: {}<br>WR: {}",
|
||||
profile.coaster_credits,
|
||||
profile.dark_ride_credits,
|
||||
profile.flat_ride_credits,
|
||||
profile.water_ride_credits
|
||||
profile.water_ride_credits,
|
||||
)
|
||||
except UserProfile.DoesNotExist:
|
||||
return '-'
|
||||
get_credits.short_description = 'Ride Credits'
|
||||
return "-"
|
||||
|
||||
@admin.action(description="Activate selected users")
|
||||
def activate_users(self, request, queryset):
|
||||
queryset.update(is_active=True)
|
||||
activate_users.short_description = "Activate selected users"
|
||||
|
||||
@admin.action(description="Deactivate selected users")
|
||||
def deactivate_users(self, request, queryset):
|
||||
queryset.update(is_active=False)
|
||||
deactivate_users.short_description = "Deactivate selected users"
|
||||
|
||||
@admin.action(description="Ban selected users")
|
||||
def ban_users(self, request, queryset):
|
||||
from django.utils import timezone
|
||||
queryset.update(is_banned=True, ban_date=timezone.now())
|
||||
ban_users.short_description = "Ban selected users"
|
||||
|
||||
queryset.update(is_banned=True, ban_date=timezone.now())
|
||||
|
||||
@admin.action(description="Unban selected users")
|
||||
def unban_users(self, request, queryset):
|
||||
queryset.update(is_banned=False, ban_date=None, ban_reason='')
|
||||
unban_users.short_description = "Unban selected users"
|
||||
queryset.update(is_banned=False, ban_date=None, ban_reason="")
|
||||
|
||||
def save_model(self, request, obj, form, change):
|
||||
creating = not obj.pk
|
||||
@@ -125,83 +185,98 @@ class CustomUserAdmin(UserAdmin):
|
||||
if group:
|
||||
obj.groups.add(group)
|
||||
|
||||
|
||||
@admin.register(UserProfile)
|
||||
class UserProfileAdmin(admin.ModelAdmin):
|
||||
list_display = ('user', 'display_name', 'coaster_credits', 'dark_ride_credits', 'flat_ride_credits', 'water_ride_credits')
|
||||
list_filter = ('coaster_credits', 'dark_ride_credits', 'flat_ride_credits', 'water_ride_credits')
|
||||
search_fields = ('user__username', 'user__email', 'display_name', 'bio')
|
||||
list_display = (
|
||||
"user",
|
||||
"display_name",
|
||||
"coaster_credits",
|
||||
"dark_ride_credits",
|
||||
"flat_ride_credits",
|
||||
"water_ride_credits",
|
||||
)
|
||||
list_filter = (
|
||||
"coaster_credits",
|
||||
"dark_ride_credits",
|
||||
"flat_ride_credits",
|
||||
"water_ride_credits",
|
||||
)
|
||||
search_fields = ("user__username", "user__email", "display_name", "bio")
|
||||
|
||||
fieldsets = (
|
||||
('User Information', {
|
||||
'fields': ('user', 'display_name', 'avatar', 'pronouns', 'bio')
|
||||
}),
|
||||
('Social Media', {
|
||||
'fields': ('twitter', 'instagram', 'youtube', 'discord')
|
||||
}),
|
||||
('Ride Credits', {
|
||||
'fields': (
|
||||
'coaster_credits',
|
||||
'dark_ride_credits',
|
||||
'flat_ride_credits',
|
||||
'water_ride_credits'
|
||||
)
|
||||
}),
|
||||
(
|
||||
"User Information",
|
||||
{"fields": ("user", "display_name", "avatar", "pronouns", "bio")},
|
||||
),
|
||||
(
|
||||
"Social Media",
|
||||
{"fields": ("twitter", "instagram", "youtube", "discord")},
|
||||
),
|
||||
(
|
||||
"Ride Credits",
|
||||
{
|
||||
"fields": (
|
||||
"coaster_credits",
|
||||
"dark_ride_credits",
|
||||
"flat_ride_credits",
|
||||
"water_ride_credits",
|
||||
)
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@admin.register(EmailVerification)
|
||||
class EmailVerificationAdmin(admin.ModelAdmin):
|
||||
list_display = ('user', 'created_at', 'last_sent', 'is_expired')
|
||||
list_filter = ('created_at', 'last_sent')
|
||||
search_fields = ('user__username', 'user__email', 'token')
|
||||
readonly_fields = ('created_at', 'last_sent')
|
||||
list_display = ("user", "created_at", "last_sent", "is_expired")
|
||||
list_filter = ("created_at", "last_sent")
|
||||
search_fields = ("user__username", "user__email", "token")
|
||||
readonly_fields = ("created_at", "last_sent")
|
||||
|
||||
fieldsets = (
|
||||
('Verification Details', {
|
||||
'fields': ('user', 'token')
|
||||
}),
|
||||
('Timing', {
|
||||
'fields': ('created_at', 'last_sent')
|
||||
}),
|
||||
("Verification Details", {"fields": ("user", "token")}),
|
||||
("Timing", {"fields": ("created_at", "last_sent")}),
|
||||
)
|
||||
|
||||
@admin.display(description="Status")
|
||||
def is_expired(self, obj):
|
||||
from django.utils import timezone
|
||||
from datetime import timedelta
|
||||
|
||||
if timezone.now() - obj.last_sent > timedelta(days=1):
|
||||
return format_html('<span style="color: red;">Expired</span>')
|
||||
return format_html('<span style="color: green;">Valid</span>')
|
||||
is_expired.short_description = 'Status'
|
||||
|
||||
|
||||
@admin.register(TopList)
|
||||
class TopListAdmin(admin.ModelAdmin):
|
||||
list_display = ('title', 'user', 'category', 'created_at', 'updated_at')
|
||||
list_filter = ('category', 'created_at', 'updated_at')
|
||||
search_fields = ('title', 'user__username', 'description')
|
||||
list_display = ("title", "user", "category", "created_at", "updated_at")
|
||||
list_filter = ("category", "created_at", "updated_at")
|
||||
search_fields = ("title", "user__username", "description")
|
||||
inlines = [TopListItemInline]
|
||||
|
||||
fieldsets = (
|
||||
('Basic Information', {
|
||||
'fields': ('user', 'title', 'category', 'description')
|
||||
}),
|
||||
('Timestamps', {
|
||||
'fields': ('created_at', 'updated_at'),
|
||||
'classes': ('collapse',)
|
||||
}),
|
||||
(
|
||||
"Basic Information",
|
||||
{"fields": ("user", "title", "category", "description")},
|
||||
),
|
||||
(
|
||||
"Timestamps",
|
||||
{"fields": ("created_at", "updated_at"), "classes": ("collapse",)},
|
||||
),
|
||||
)
|
||||
readonly_fields = ('created_at', 'updated_at')
|
||||
readonly_fields = ("created_at", "updated_at")
|
||||
|
||||
|
||||
@admin.register(TopListItem)
|
||||
class TopListItemAdmin(admin.ModelAdmin):
|
||||
list_display = ('top_list', 'content_type', 'object_id', 'rank')
|
||||
list_filter = ('top_list__category', 'rank')
|
||||
search_fields = ('top_list__title', 'notes')
|
||||
ordering = ('top_list', 'rank')
|
||||
list_display = ("top_list", "content_type", "object_id", "rank")
|
||||
list_filter = ("top_list__category", "rank")
|
||||
search_fields = ("top_list__title", "notes")
|
||||
ordering = ("top_list", "rank")
|
||||
|
||||
fieldsets = (
|
||||
('List Information', {
|
||||
'fields': ('top_list', 'rank')
|
||||
}),
|
||||
('Item Details', {
|
||||
'fields': ('content_type', 'object_id', 'notes')
|
||||
}),
|
||||
("List Information", {"fields": ("top_list", "rank")}),
|
||||
("Item Details", {"fields": ("content_type", "object_id", "notes")}),
|
||||
)
|
||||
|
||||
@@ -2,29 +2,45 @@ from django.core.management.base import BaseCommand
|
||||
from allauth.socialaccount.models import SocialApp, SocialAccount, SocialToken
|
||||
from django.contrib.sites.models import Site
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Check all social auth related tables'
|
||||
help = "Check all social auth related tables"
|
||||
|
||||
def handle(self, *args, **options):
|
||||
# Check SocialApp
|
||||
self.stdout.write('\nChecking SocialApp table:')
|
||||
self.stdout.write("\nChecking SocialApp table:")
|
||||
for app in SocialApp.objects.all():
|
||||
self.stdout.write(f'ID: {app.id}, Provider: {app.provider}, Name: {app.name}, Client ID: {app.client_id}')
|
||||
self.stdout.write('Sites:')
|
||||
self.stdout.write(
|
||||
f"ID: {
|
||||
app.pk}, Provider: {
|
||||
app.provider}, Name: {
|
||||
app.name}, Client ID: {
|
||||
app.client_id}"
|
||||
)
|
||||
self.stdout.write("Sites:")
|
||||
for site in app.sites.all():
|
||||
self.stdout.write(f' - {site.domain}')
|
||||
self.stdout.write(f" - {site.domain}")
|
||||
|
||||
# Check SocialAccount
|
||||
self.stdout.write('\nChecking SocialAccount table:')
|
||||
self.stdout.write("\nChecking SocialAccount table:")
|
||||
for account in SocialAccount.objects.all():
|
||||
self.stdout.write(f'ID: {account.id}, Provider: {account.provider}, UID: {account.uid}')
|
||||
self.stdout.write(
|
||||
f"ID: {
|
||||
account.pk}, Provider: {
|
||||
account.provider}, UID: {
|
||||
account.uid}"
|
||||
)
|
||||
|
||||
# Check SocialToken
|
||||
self.stdout.write('\nChecking SocialToken table:')
|
||||
self.stdout.write("\nChecking SocialToken table:")
|
||||
for token in SocialToken.objects.all():
|
||||
self.stdout.write(f'ID: {token.id}, Account: {token.account}, App: {token.app}')
|
||||
self.stdout.write(
|
||||
f"ID: {token.pk}, Account: {token.account}, App: {token.app}"
|
||||
)
|
||||
|
||||
# Check Site
|
||||
self.stdout.write('\nChecking Site table:')
|
||||
self.stdout.write("\nChecking Site table:")
|
||||
for site in Site.objects.all():
|
||||
self.stdout.write(f'ID: {site.id}, Domain: {site.domain}, Name: {site.name}')
|
||||
self.stdout.write(
|
||||
f"ID: {site.pk}, Domain: {site.domain}, Name: {site.name}"
|
||||
)
|
||||
|
||||
@@ -1,19 +1,27 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from allauth.socialaccount.models import SocialApp
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Check social app configurations'
|
||||
help = "Check social app configurations"
|
||||
|
||||
def handle(self, *args, **options):
|
||||
social_apps = SocialApp.objects.all()
|
||||
|
||||
if not social_apps:
|
||||
self.stdout.write(self.style.ERROR('No social apps found'))
|
||||
self.stdout.write(self.style.ERROR("No social apps found"))
|
||||
return
|
||||
|
||||
for app in social_apps:
|
||||
self.stdout.write(self.style.SUCCESS(f'\nProvider: {app.provider}'))
|
||||
self.stdout.write(f'Name: {app.name}')
|
||||
self.stdout.write(f'Client ID: {app.client_id}')
|
||||
self.stdout.write(f'Secret: {app.secret}')
|
||||
self.stdout.write(f'Sites: {", ".join(str(site.domain) for site in app.sites.all())}')
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
f"\nProvider: {
|
||||
app.provider}"
|
||||
)
|
||||
)
|
||||
self.stdout.write(f"Name: {app.name}")
|
||||
self.stdout.write(f"Client ID: {app.client_id}")
|
||||
self.stdout.write(f"Secret: {app.secret}")
|
||||
self.stdout.write(
|
||||
f'Sites: {", ".join(str(site.domain) for site in app.sites.all())}'
|
||||
)
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import connection
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Clean up social auth tables and migrations'
|
||||
help = "Clean up social auth tables and migrations"
|
||||
|
||||
def handle(self, *args, **options):
|
||||
with connection.cursor() as cursor:
|
||||
@@ -14,9 +15,14 @@ class Command(BaseCommand):
|
||||
|
||||
# Remove migration records
|
||||
cursor.execute("DELETE FROM django_migrations WHERE app='socialaccount'")
|
||||
cursor.execute("DELETE FROM django_migrations WHERE app='accounts' AND name LIKE '%social%'")
|
||||
cursor.execute(
|
||||
"DELETE FROM django_migrations WHERE app='accounts' "
|
||||
"AND name LIKE '%social%'"
|
||||
)
|
||||
|
||||
# Reset sequences
|
||||
cursor.execute("DELETE FROM sqlite_sequence WHERE name LIKE '%social%'")
|
||||
|
||||
self.stdout.write(self.style.SUCCESS('Successfully cleaned up social auth configuration'))
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("Successfully cleaned up social auth configuration")
|
||||
)
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.contrib.auth.models import Group
|
||||
from reviews.models import Review
|
||||
from parks.models import Park
|
||||
from parks.models import ParkReview, Park
|
||||
from rides.models import Ride
|
||||
from media.models import Photo
|
||||
|
||||
@@ -20,7 +18,9 @@ class Command(BaseCommand):
|
||||
self.stdout.write(self.style.SUCCESS(f"Deleted {count} test users"))
|
||||
|
||||
# Delete test reviews
|
||||
reviews = Review.objects.filter(user__username__in=["testuser", "moderator"])
|
||||
reviews = ParkReview.objects.filter(
|
||||
user__username__in=["testuser", "moderator"]
|
||||
)
|
||||
count = reviews.count()
|
||||
reviews.delete()
|
||||
self.stdout.write(self.style.SUCCESS(f"Deleted {count} test reviews"))
|
||||
|
||||
@@ -2,47 +2,54 @@ from django.core.management.base import BaseCommand
|
||||
from django.contrib.sites.models import Site
|
||||
from allauth.socialaccount.models import SocialApp
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Create social apps for authentication'
|
||||
help = "Create social apps for authentication"
|
||||
|
||||
def handle(self, *args, **options):
|
||||
# Get the default site
|
||||
site = Site.objects.get_or_create(
|
||||
id=1,
|
||||
defaults={
|
||||
'domain': 'localhost:8000',
|
||||
'name': 'ThrillWiki Development'
|
||||
}
|
||||
"domain": "localhost:8000",
|
||||
"name": "ThrillWiki Development",
|
||||
},
|
||||
)[0]
|
||||
|
||||
# Create Discord app
|
||||
discord_app, created = SocialApp.objects.get_or_create(
|
||||
provider='discord',
|
||||
provider="discord",
|
||||
defaults={
|
||||
'name': 'Discord',
|
||||
'client_id': '1299112802274902047',
|
||||
'secret': 'ece7Pe_M4mD4mYzAgcINjTEKL_3ftL11',
|
||||
}
|
||||
"name": "Discord",
|
||||
"client_id": "1299112802274902047",
|
||||
"secret": "ece7Pe_M4mD4mYzAgcINjTEKL_3ftL11",
|
||||
},
|
||||
)
|
||||
if not created:
|
||||
discord_app.client_id = '1299112802274902047'
|
||||
discord_app.secret = 'ece7Pe_M4mD4mYzAgcINjTEKL_3ftL11'
|
||||
discord_app.client_id = "1299112802274902047"
|
||||
discord_app.secret = "ece7Pe_M4mD4mYzAgcINjTEKL_3ftL11"
|
||||
discord_app.save()
|
||||
discord_app.sites.add(site)
|
||||
self.stdout.write(f'{"Created" if created else "Updated"} Discord app')
|
||||
|
||||
# Create Google app
|
||||
google_app, created = SocialApp.objects.get_or_create(
|
||||
provider='google',
|
||||
provider="google",
|
||||
defaults={
|
||||
'name': 'Google',
|
||||
'client_id': '135166769591-nopcgmo0fkqfqfs9qe783a137mtmcrt2.apps.googleusercontent.com',
|
||||
'secret': 'GOCSPX-Wd_0Ue0Ue0Ue0Ue0Ue0Ue0Ue0Ue',
|
||||
}
|
||||
"name": "Google",
|
||||
"client_id": (
|
||||
"135166769591-nopcgmo0fkqfqfs9qe783a137mtmcrt2."
|
||||
"apps.googleusercontent.com"
|
||||
),
|
||||
"secret": "GOCSPX-Wd_0Ue0Ue0Ue0Ue0Ue0Ue0Ue0Ue",
|
||||
},
|
||||
)
|
||||
if not created:
|
||||
google_app.client_id = '135166769591-nopcgmo0fkqfqfs9qe783a137mtmcrt2.apps.googleusercontent.com'
|
||||
google_app.secret = 'GOCSPX-Wd_0Ue0Ue0Ue0Ue0Ue0Ue0Ue0Ue'
|
||||
google_app.client_id = (
|
||||
"135166769591-nopcgmo0fkqfqfs9qe783a137mtmcrt2."
|
||||
"apps.googleusercontent.com"
|
||||
)
|
||||
google_app.secret = "GOCSPX-Wd_0Ue0Ue0Ue0Ue0Ue0Ue0Ue0Ue"
|
||||
google_app.save()
|
||||
google_app.sites.add(site)
|
||||
self.stdout.write(f'{"Created" if created else "Updated"} Google app')
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.contrib.auth.models import Group, Permission
|
||||
|
||||
User = get_user_model()
|
||||
from django.contrib.auth.models import Group, Permission, User
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
@@ -11,22 +8,25 @@ class Command(BaseCommand):
|
||||
def handle(self, *args, **kwargs):
|
||||
# Create regular test user
|
||||
if not User.objects.filter(username="testuser").exists():
|
||||
user = User.objects.create_user(
|
||||
user = User.objects.create(
|
||||
username="testuser",
|
||||
email="testuser@example.com",
|
||||
[PASSWORD-REMOVED]",
|
||||
)
|
||||
self.stdout.write(self.style.SUCCESS(f"Created test user: {user.username}"))
|
||||
user.set_password("testpass123")
|
||||
user.save()
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(f"Created test user: {user.get_username()}")
|
||||
)
|
||||
else:
|
||||
self.stdout.write(self.style.WARNING("Test user already exists"))
|
||||
|
||||
# Create moderator user
|
||||
if not User.objects.filter(username="moderator").exists():
|
||||
moderator = User.objects.create_user(
|
||||
moderator = User.objects.create(
|
||||
username="moderator",
|
||||
email="moderator@example.com",
|
||||
[PASSWORD-REMOVED]",
|
||||
)
|
||||
moderator.set_password("modpass123")
|
||||
moderator.save()
|
||||
|
||||
# Create moderator group if it doesn't exist
|
||||
moderator_group, created = Group.objects.get_or_create(name="Moderators")
|
||||
@@ -48,7 +48,9 @@ class Command(BaseCommand):
|
||||
moderator.groups.add(moderator_group)
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(f"Created moderator user: {moderator.username}")
|
||||
self.style.SUCCESS(
|
||||
f"Created moderator user: {moderator.get_username()}"
|
||||
)
|
||||
)
|
||||
else:
|
||||
self.stdout.write(self.style.WARNING("Moderator user already exists"))
|
||||
|
||||
@@ -1,10 +1,18 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import connection
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Fix migration history by removing rides.0001_initial'
|
||||
help = "Fix migration history by removing rides.0001_initial"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("DELETE FROM django_migrations WHERE app='rides' AND name='0001_initial';")
|
||||
self.stdout.write(self.style.SUCCESS('Successfully removed rides.0001_initial from migration history'))
|
||||
cursor.execute(
|
||||
"DELETE FROM django_migrations WHERE app='rides' "
|
||||
"AND name='0001_initial';"
|
||||
)
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
"Successfully removed rides.0001_initial from migration history"
|
||||
)
|
||||
)
|
||||
|
||||
@@ -3,33 +3,39 @@ from allauth.socialaccount.models import SocialApp
|
||||
from django.contrib.sites.models import Site
|
||||
import os
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Fix social app configurations'
|
||||
help = "Fix social app configurations"
|
||||
|
||||
def handle(self, *args, **options):
|
||||
# Delete all existing social apps
|
||||
SocialApp.objects.all().delete()
|
||||
self.stdout.write('Deleted all existing social apps')
|
||||
self.stdout.write("Deleted all existing social apps")
|
||||
|
||||
# Get the default site
|
||||
site = Site.objects.get(id=1)
|
||||
|
||||
# Create Google provider
|
||||
google_app = SocialApp.objects.create(
|
||||
provider='google',
|
||||
name='Google',
|
||||
client_id=os.getenv('GOOGLE_CLIENT_ID'),
|
||||
secret=os.getenv('GOOGLE_CLIENT_SECRET'),
|
||||
provider="google",
|
||||
name="Google",
|
||||
client_id=os.getenv("GOOGLE_CLIENT_ID"),
|
||||
secret=os.getenv("GOOGLE_CLIENT_SECRET"),
|
||||
)
|
||||
google_app.sites.add(site)
|
||||
self.stdout.write(f'Created Google app with client_id: {google_app.client_id}')
|
||||
self.stdout.write(
|
||||
f"Created Google app with client_id: {
|
||||
google_app.client_id}"
|
||||
)
|
||||
|
||||
# Create Discord provider
|
||||
discord_app = SocialApp.objects.create(
|
||||
provider='discord',
|
||||
name='Discord',
|
||||
client_id=os.getenv('DISCORD_CLIENT_ID'),
|
||||
secret=os.getenv('DISCORD_CLIENT_SECRET'),
|
||||
provider="discord",
|
||||
name="Discord",
|
||||
client_id=os.getenv("DISCORD_CLIENT_ID"),
|
||||
secret=os.getenv("DISCORD_CLIENT_SECRET"),
|
||||
)
|
||||
discord_app.sites.add(site)
|
||||
self.stdout.write(f'Created Discord app with client_id: {discord_app.client_id}')
|
||||
self.stdout.write(
|
||||
f"Created Discord app with client_id: {discord_app.client_id}"
|
||||
)
|
||||
|
||||
@@ -2,6 +2,7 @@ from django.core.management.base import BaseCommand
|
||||
from PIL import Image, ImageDraw, ImageFont
|
||||
import os
|
||||
|
||||
|
||||
def generate_avatar(letter):
|
||||
"""Generate an avatar for a given letter or number"""
|
||||
avatar_size = (100, 100)
|
||||
@@ -10,7 +11,7 @@ def generate_avatar(letter):
|
||||
font_size = 100
|
||||
|
||||
# Create a blank image with background color
|
||||
image = Image.new('RGB', avatar_size, background_color)
|
||||
image = Image.new("RGB", avatar_size, background_color)
|
||||
draw = ImageDraw.Draw(image)
|
||||
|
||||
# Load a font
|
||||
@@ -19,8 +20,14 @@ def generate_avatar(letter):
|
||||
|
||||
# Calculate text size and position using textbbox
|
||||
text_bbox = draw.textbbox((0, 0), letter, font=font)
|
||||
text_width, text_height = text_bbox[2] - text_bbox[0], text_bbox[3] - text_bbox[1]
|
||||
text_position = ((avatar_size[0] - text_width) / 2, (avatar_size[1] - text_height) / 2)
|
||||
text_width, text_height = (
|
||||
text_bbox[2] - text_bbox[0],
|
||||
text_bbox[3] - text_bbox[1],
|
||||
)
|
||||
text_position = (
|
||||
(avatar_size[0] - text_width) / 2,
|
||||
(avatar_size[1] - text_height) / 2,
|
||||
)
|
||||
|
||||
# Draw the text on the image
|
||||
draw.text(text_position, letter, font=font, fill=text_color)
|
||||
@@ -34,11 +41,14 @@ def generate_avatar(letter):
|
||||
avatar_path = os.path.join(avatar_dir, f"{letter}_avatar.png")
|
||||
image.save(avatar_path)
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Generate avatars for letters A-Z and numbers 0-9'
|
||||
help = "Generate avatars for letters A-Z and numbers 0-9"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
characters = [chr(i) for i in range(65, 91)] + [str(i) for i in range(10)] # A-Z and 0-9
|
||||
characters = [chr(i) for i in range(65, 91)] + [
|
||||
str(i) for i in range(10)
|
||||
] # A-Z and 0-9
|
||||
for char in characters:
|
||||
generate_avatar(char)
|
||||
self.stdout.write(self.style.SUCCESS(f"Generated avatar for {char}"))
|
||||
|
||||
@@ -1,11 +1,18 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from accounts.models import UserProfile
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Regenerate default avatars for users without an uploaded avatar'
|
||||
help = "Regenerate default avatars for users without an uploaded avatar"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
profiles = UserProfile.objects.filter(avatar='')
|
||||
profiles = UserProfile.objects.filter(avatar="")
|
||||
for profile in profiles:
|
||||
profile.save() # This will trigger the avatar generation logic in the save method
|
||||
self.stdout.write(self.style.SUCCESS(f"Regenerated avatar for {profile.user.username}"))
|
||||
# This will trigger the avatar generation logic in the save method
|
||||
profile.save()
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
f"Regenerated avatar for {
|
||||
profile.user.username}"
|
||||
)
|
||||
)
|
||||
|
||||
@@ -3,49 +3,64 @@ from django.db import connection
|
||||
from django.contrib.auth.hashers import make_password
|
||||
import uuid
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Reset database and create admin user'
|
||||
help = "Reset database and create admin user"
|
||||
|
||||
def handle(self, *args, **options):
|
||||
self.stdout.write('Resetting database...')
|
||||
self.stdout.write("Resetting database...")
|
||||
|
||||
# Drop all tables
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("""
|
||||
cursor.execute(
|
||||
"""
|
||||
DO $$ DECLARE
|
||||
r RECORD;
|
||||
BEGIN
|
||||
FOR r IN (SELECT tablename FROM pg_tables WHERE schemaname = current_schema()) LOOP
|
||||
EXECUTE 'DROP TABLE IF EXISTS ' || quote_ident(r.tablename) || ' CASCADE';
|
||||
FOR r IN (
|
||||
SELECT tablename FROM pg_tables
|
||||
WHERE schemaname = current_schema()
|
||||
) LOOP
|
||||
EXECUTE 'DROP TABLE IF EXISTS ' || \
|
||||
quote_ident(r.tablename) || ' CASCADE';
|
||||
END LOOP;
|
||||
END $$;
|
||||
""")
|
||||
"""
|
||||
)
|
||||
|
||||
# Reset sequences
|
||||
cursor.execute("""
|
||||
cursor.execute(
|
||||
"""
|
||||
DO $$ DECLARE
|
||||
r RECORD;
|
||||
BEGIN
|
||||
FOR r IN (SELECT sequencename FROM pg_sequences WHERE schemaname = current_schema()) LOOP
|
||||
EXECUTE 'ALTER SEQUENCE ' || quote_ident(r.sequencename) || ' RESTART WITH 1';
|
||||
FOR r IN (
|
||||
SELECT sequencename FROM pg_sequences
|
||||
WHERE schemaname = current_schema()
|
||||
) LOOP
|
||||
EXECUTE 'ALTER SEQUENCE ' || \
|
||||
quote_ident(r.sequencename) || ' RESTART WITH 1';
|
||||
END LOOP;
|
||||
END $$;
|
||||
""")
|
||||
"""
|
||||
)
|
||||
|
||||
self.stdout.write('All tables dropped and sequences reset.')
|
||||
self.stdout.write("All tables dropped and sequences reset.")
|
||||
|
||||
# Run migrations
|
||||
from django.core.management import call_command
|
||||
call_command('migrate')
|
||||
|
||||
self.stdout.write('Migrations applied.')
|
||||
call_command("migrate")
|
||||
|
||||
self.stdout.write("Migrations applied.")
|
||||
|
||||
# Create superuser using raw SQL
|
||||
try:
|
||||
with connection.cursor() as cursor:
|
||||
# Create user
|
||||
user_id = str(uuid.uuid4())[:10]
|
||||
cursor.execute("""
|
||||
cursor.execute(
|
||||
"""
|
||||
INSERT INTO accounts_user (
|
||||
username, password, email, is_superuser, is_staff,
|
||||
is_active, date_joined, user_id, first_name,
|
||||
@@ -56,13 +71,19 @@ class Command(BaseCommand):
|
||||
true, NOW(), %s, '', '', 'SUPERUSER', false, '',
|
||||
'light'
|
||||
) RETURNING id;
|
||||
""", [make_password('admin'), user_id])
|
||||
""",
|
||||
[make_password("admin"), user_id],
|
||||
)
|
||||
|
||||
user_db_id = cursor.fetchone()[0]
|
||||
result = cursor.fetchone()
|
||||
if result is None:
|
||||
raise Exception("Failed to create user - no ID returned")
|
||||
user_db_id = result[0]
|
||||
|
||||
# Create profile
|
||||
profile_id = str(uuid.uuid4())[:10]
|
||||
cursor.execute("""
|
||||
cursor.execute(
|
||||
"""
|
||||
INSERT INTO accounts_userprofile (
|
||||
profile_id, display_name, pronouns, bio,
|
||||
twitter, instagram, youtube, discord,
|
||||
@@ -75,11 +96,18 @@ class Command(BaseCommand):
|
||||
0, 0, 0, 0,
|
||||
%s, ''
|
||||
);
|
||||
""", [profile_id, user_db_id])
|
||||
""",
|
||||
[profile_id, user_db_id],
|
||||
)
|
||||
|
||||
self.stdout.write('Superuser created.')
|
||||
self.stdout.write("Superuser created.")
|
||||
except Exception as e:
|
||||
self.stdout.write(self.style.ERROR(f'Error creating superuser: {str(e)}'))
|
||||
self.stdout.write(
|
||||
self.style.ERROR(
|
||||
f"Error creating superuser: {
|
||||
str(e)}"
|
||||
)
|
||||
)
|
||||
raise
|
||||
|
||||
self.stdout.write(self.style.SUCCESS('Database reset complete.'))
|
||||
self.stdout.write(self.style.SUCCESS("Database reset complete."))
|
||||
|
||||
@@ -3,8 +3,9 @@ from allauth.socialaccount.models import SocialApp
|
||||
from django.contrib.sites.models import Site
|
||||
from django.db import connection
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Reset social apps configuration'
|
||||
help = "Reset social apps configuration"
|
||||
|
||||
def handle(self, *args, **options):
|
||||
# Delete all social apps using raw SQL to bypass Django's ORM
|
||||
@@ -17,20 +18,22 @@ class Command(BaseCommand):
|
||||
|
||||
# Create Discord app
|
||||
discord_app = SocialApp.objects.create(
|
||||
provider='discord',
|
||||
name='Discord',
|
||||
client_id='1299112802274902047',
|
||||
secret='ece7Pe_M4mD4mYzAgcINjTEKL_3ftL11',
|
||||
provider="discord",
|
||||
name="Discord",
|
||||
client_id="1299112802274902047",
|
||||
secret="ece7Pe_M4mD4mYzAgcINjTEKL_3ftL11",
|
||||
)
|
||||
discord_app.sites.add(site)
|
||||
self.stdout.write(f'Created Discord app with ID: {discord_app.id}')
|
||||
self.stdout.write(f"Created Discord app with ID: {discord_app.pk}")
|
||||
|
||||
# Create Google app
|
||||
google_app = SocialApp.objects.create(
|
||||
provider='google',
|
||||
name='Google',
|
||||
client_id='135166769591-nopcgmo0fkqfqfs9qe783a137mtmcrt2.apps.googleusercontent.com',
|
||||
secret='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',
|
||||
provider="google",
|
||||
name="Google",
|
||||
client_id=(
|
||||
"135166769591-nopcgmo0fkqfqfs9qe783a137mtmcrt2.apps.googleusercontent.com"
|
||||
),
|
||||
secret="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",
|
||||
)
|
||||
google_app.sites.add(site)
|
||||
self.stdout.write(f'Created Google app with ID: {google_app.id}')
|
||||
self.stdout.write(f"Created Google app with ID: {google_app.pk}")
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import connection
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Reset social auth configuration'
|
||||
help = "Reset social auth configuration"
|
||||
|
||||
def handle(self, *args, **options):
|
||||
with connection.cursor() as cursor:
|
||||
@@ -11,7 +12,13 @@ class Command(BaseCommand):
|
||||
cursor.execute("DELETE FROM socialaccount_socialapp_sites")
|
||||
|
||||
# Reset sequences
|
||||
cursor.execute("DELETE FROM sqlite_sequence WHERE name='socialaccount_socialapp'")
|
||||
cursor.execute("DELETE FROM sqlite_sequence WHERE name='socialaccount_socialapp_sites'")
|
||||
cursor.execute(
|
||||
"DELETE FROM sqlite_sequence WHERE name='socialaccount_socialapp'"
|
||||
)
|
||||
cursor.execute(
|
||||
"DELETE FROM sqlite_sequence WHERE name='socialaccount_socialapp_sites'"
|
||||
)
|
||||
|
||||
self.stdout.write(self.style.SUCCESS('Successfully reset social auth configuration'))
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("Successfully reset social auth configuration")
|
||||
)
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.contrib.auth.models import Group, Permission
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.contrib.auth.models import Group
|
||||
from accounts.models import User
|
||||
from accounts.signals import create_default_groups
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Set up default groups and permissions for user roles'
|
||||
help = "Set up default groups and permissions for user roles"
|
||||
|
||||
def handle(self, *args, **options):
|
||||
self.stdout.write('Creating default groups and permissions...')
|
||||
self.stdout.write("Creating default groups and permissions...")
|
||||
|
||||
try:
|
||||
# Create default groups with permissions
|
||||
@@ -29,14 +29,21 @@ class Command(BaseCommand):
|
||||
user.is_staff = True
|
||||
user.save()
|
||||
|
||||
self.stdout.write(self.style.SUCCESS('Successfully set up groups and permissions'))
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("Successfully set up groups and permissions")
|
||||
)
|
||||
|
||||
# Print summary
|
||||
for group in Group.objects.all():
|
||||
self.stdout.write(f'\nGroup: {group.name}')
|
||||
self.stdout.write('Permissions:')
|
||||
self.stdout.write(f"\nGroup: {group.name}")
|
||||
self.stdout.write("Permissions:")
|
||||
for perm in group.permissions.all():
|
||||
self.stdout.write(f' - {perm.codename}')
|
||||
self.stdout.write(f" - {perm.codename}")
|
||||
|
||||
except Exception as e:
|
||||
self.stdout.write(self.style.ERROR(f'Error setting up groups: {str(e)}'))
|
||||
self.stdout.write(
|
||||
self.style.ERROR(
|
||||
f"Error setting up groups: {
|
||||
str(e)}"
|
||||
)
|
||||
)
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.contrib.sites.models import Site
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Set up default site'
|
||||
help = "Set up default site"
|
||||
|
||||
def handle(self, *args, **options):
|
||||
# Delete any existing sites
|
||||
@@ -10,8 +11,6 @@ class Command(BaseCommand):
|
||||
|
||||
# Create default site
|
||||
site = Site.objects.create(
|
||||
id=1,
|
||||
domain='localhost:8000',
|
||||
name='ThrillWiki Development'
|
||||
id=1, domain="localhost:8000", name="ThrillWiki Development"
|
||||
)
|
||||
self.stdout.write(self.style.SUCCESS(f'Created site: {site.domain}'))
|
||||
self.stdout.write(self.style.SUCCESS(f"Created site: {site.domain}"))
|
||||
|
||||
@@ -4,60 +4,123 @@ from allauth.socialaccount.models import SocialApp
|
||||
from dotenv import load_dotenv
|
||||
import os
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Sets up social authentication apps'
|
||||
help = "Sets up social authentication apps"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
# Load environment variables
|
||||
load_dotenv()
|
||||
|
||||
# Get environment variables
|
||||
google_client_id = os.getenv('GOOGLE_CLIENT_ID')
|
||||
google_client_secret = os.getenv('GOOGLE_CLIENT_SECRET')
|
||||
discord_client_id = os.getenv('DISCORD_CLIENT_ID')
|
||||
discord_client_secret = os.getenv('DISCORD_CLIENT_SECRET')
|
||||
google_client_id = os.getenv("GOOGLE_CLIENT_ID")
|
||||
google_client_secret = os.getenv("GOOGLE_CLIENT_SECRET")
|
||||
discord_client_id = os.getenv("DISCORD_CLIENT_ID")
|
||||
discord_client_secret = os.getenv("DISCORD_CLIENT_SECRET")
|
||||
|
||||
if not all([google_client_id, google_client_secret, discord_client_id, discord_client_secret]):
|
||||
self.stdout.write(self.style.ERROR('Missing required environment variables'))
|
||||
# DEBUG: Log environment variable values
|
||||
self.stdout.write(
|
||||
f"DEBUG: google_client_id type: {
|
||||
type(google_client_id)}, value: {google_client_id}"
|
||||
)
|
||||
self.stdout.write(
|
||||
f"DEBUG: google_client_secret type: {
|
||||
type(google_client_secret)}, value: {google_client_secret}"
|
||||
)
|
||||
self.stdout.write(
|
||||
f"DEBUG: discord_client_id type: {
|
||||
type(discord_client_id)}, value: {discord_client_id}"
|
||||
)
|
||||
self.stdout.write(
|
||||
f"DEBUG: discord_client_secret type: {
|
||||
type(discord_client_secret)}, value: {discord_client_secret}"
|
||||
)
|
||||
|
||||
if not all(
|
||||
[
|
||||
google_client_id,
|
||||
google_client_secret,
|
||||
discord_client_id,
|
||||
discord_client_secret,
|
||||
]
|
||||
):
|
||||
self.stdout.write(
|
||||
self.style.ERROR("Missing required environment variables")
|
||||
)
|
||||
self.stdout.write(
|
||||
f"DEBUG: google_client_id is None: {google_client_id is None}"
|
||||
)
|
||||
self.stdout.write(
|
||||
f"DEBUG: google_client_secret is None: {
|
||||
google_client_secret is None}"
|
||||
)
|
||||
self.stdout.write(
|
||||
f"DEBUG: discord_client_id is None: {
|
||||
discord_client_id is None}"
|
||||
)
|
||||
self.stdout.write(
|
||||
f"DEBUG: discord_client_secret is None: {
|
||||
discord_client_secret is None}"
|
||||
)
|
||||
return
|
||||
|
||||
# Get or create the default site
|
||||
site, _ = Site.objects.get_or_create(
|
||||
id=1,
|
||||
defaults={
|
||||
'domain': 'localhost:8000',
|
||||
'name': 'localhost'
|
||||
}
|
||||
id=1, defaults={"domain": "localhost:8000", "name": "localhost"}
|
||||
)
|
||||
|
||||
# Set up Google
|
||||
google_app, created = SocialApp.objects.get_or_create(
|
||||
provider='google',
|
||||
provider="google",
|
||||
defaults={
|
||||
'name': 'Google',
|
||||
'client_id': google_client_id,
|
||||
'secret': google_client_secret,
|
||||
}
|
||||
"name": "Google",
|
||||
"client_id": google_client_id,
|
||||
"secret": google_client_secret,
|
||||
},
|
||||
)
|
||||
if not created:
|
||||
google_app.client_id = google_client_id
|
||||
google_app.[SECRET-REMOVED]
|
||||
google_app.save()
|
||||
self.stdout.write(
|
||||
f"DEBUG: About to assign google_client_id: {google_client_id} (type: {
|
||||
type(google_client_id)})"
|
||||
)
|
||||
if google_client_id is not None and google_client_secret is not None:
|
||||
google_app.client_id = google_client_id
|
||||
google_app.secret = google_client_secret
|
||||
google_app.save()
|
||||
self.stdout.write("DEBUG: Successfully updated Google app")
|
||||
else:
|
||||
self.stdout.write(
|
||||
self.style.ERROR(
|
||||
"Google client_id or secret is None, skipping update."
|
||||
)
|
||||
)
|
||||
google_app.sites.add(site)
|
||||
|
||||
# Set up Discord
|
||||
discord_app, created = SocialApp.objects.get_or_create(
|
||||
provider='discord',
|
||||
provider="discord",
|
||||
defaults={
|
||||
'name': 'Discord',
|
||||
'client_id': discord_client_id,
|
||||
'secret': discord_client_secret,
|
||||
}
|
||||
"name": "Discord",
|
||||
"client_id": discord_client_id,
|
||||
"secret": discord_client_secret,
|
||||
},
|
||||
)
|
||||
if not created:
|
||||
discord_app.client_id = discord_client_id
|
||||
discord_app.[SECRET-REMOVED]
|
||||
discord_app.save()
|
||||
self.stdout.write(
|
||||
f"DEBUG: About to assign discord_client_id: {discord_client_id} (type: {
|
||||
type(discord_client_id)})"
|
||||
)
|
||||
if discord_client_id is not None and discord_client_secret is not None:
|
||||
discord_app.client_id = discord_client_id
|
||||
discord_app.secret = discord_client_secret
|
||||
discord_app.save()
|
||||
self.stdout.write("DEBUG: Successfully updated Discord app")
|
||||
else:
|
||||
self.stdout.write(
|
||||
self.style.ERROR(
|
||||
"Discord client_id or secret is None, skipping update."
|
||||
)
|
||||
)
|
||||
discord_app.sites.add(site)
|
||||
|
||||
self.stdout.write(self.style.SUCCESS('Successfully set up social auth apps'))
|
||||
self.stdout.write(self.style.SUCCESS("Successfully set up social auth apps"))
|
||||
|
||||
@@ -1,35 +1,43 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.contrib.sites.models import Site
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.contrib.auth.models import Permission
|
||||
from allauth.socialaccount.models import SocialApp
|
||||
|
||||
User = get_user_model()
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Set up social authentication through admin interface'
|
||||
help = "Set up social authentication through admin interface"
|
||||
|
||||
def handle(self, *args, **options):
|
||||
# Get or create the default site
|
||||
site, _ = Site.objects.get_or_create(
|
||||
id=1,
|
||||
defaults={
|
||||
'domain': 'localhost:8000',
|
||||
'name': 'ThrillWiki Development'
|
||||
}
|
||||
"domain": "localhost:8000",
|
||||
"name": "ThrillWiki Development",
|
||||
},
|
||||
)
|
||||
if not _:
|
||||
site.domain = 'localhost:8000'
|
||||
site.name = 'ThrillWiki Development'
|
||||
site.domain = "localhost:8000"
|
||||
site.name = "ThrillWiki Development"
|
||||
site.save()
|
||||
self.stdout.write(f'{"Created" if _ else "Updated"} site: {site.domain}')
|
||||
|
||||
# Create superuser if it doesn't exist
|
||||
if not User.objects.filter(username='admin').exists():
|
||||
User.objects.create_superuser('admin', 'admin@example.com', 'admin')
|
||||
self.stdout.write('Created superuser: admin/admin')
|
||||
if not User.objects.filter(username="admin").exists():
|
||||
admin_user = User.objects.create(
|
||||
username="admin",
|
||||
email="admin@example.com",
|
||||
is_staff=True,
|
||||
is_superuser=True,
|
||||
)
|
||||
admin_user.set_password("admin")
|
||||
admin_user.save()
|
||||
self.stdout.write("Created superuser: admin/admin")
|
||||
|
||||
self.stdout.write(self.style.SUCCESS('''
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
"""
|
||||
Social auth setup instructions:
|
||||
|
||||
1. Run the development server:
|
||||
@@ -57,4 +65,6 @@ Social auth setup instructions:
|
||||
Client id: 135166769591-nopcgmo0fkqfqfs9qe783a137mtmcrt2.apps.googleusercontent.com
|
||||
Secret key: GOCSPX-Wd_0Ue0Ue0Ue0Ue0Ue0Ue0Ue0Ue
|
||||
Sites: Add "localhost:8000"
|
||||
'''))
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
@@ -1,60 +1,61 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.urls import reverse
|
||||
from django.test import Client
|
||||
from allauth.socialaccount.models import SocialApp
|
||||
from urllib.parse import urljoin
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Test Discord OAuth2 authentication flow'
|
||||
help = "Test Discord OAuth2 authentication flow"
|
||||
|
||||
def handle(self, *args, **options):
|
||||
client = Client(HTTP_HOST='localhost:8000')
|
||||
client = Client(HTTP_HOST="localhost:8000")
|
||||
|
||||
# Get Discord app
|
||||
try:
|
||||
discord_app = SocialApp.objects.get(provider='discord')
|
||||
self.stdout.write('Found Discord app configuration:')
|
||||
self.stdout.write(f'Client ID: {discord_app.client_id}')
|
||||
discord_app = SocialApp.objects.get(provider="discord")
|
||||
self.stdout.write("Found Discord app configuration:")
|
||||
self.stdout.write(f"Client ID: {discord_app.client_id}")
|
||||
|
||||
# Test login URL
|
||||
login_url = '/accounts/discord/login/'
|
||||
response = client.get(login_url, HTTP_HOST='localhost:8000')
|
||||
self.stdout.write(f'\nTesting login URL: {login_url}')
|
||||
self.stdout.write(f'Status code: {response.status_code}')
|
||||
login_url = "/accounts/discord/login/"
|
||||
response = client.get(login_url, HTTP_HOST="localhost:8000")
|
||||
self.stdout.write(f"\nTesting login URL: {login_url}")
|
||||
self.stdout.write(f"Status code: {response.status_code}")
|
||||
|
||||
if response.status_code == 302:
|
||||
redirect_url = response['Location']
|
||||
self.stdout.write(f'Redirects to: {redirect_url}')
|
||||
redirect_url = response["Location"]
|
||||
self.stdout.write(f"Redirects to: {redirect_url}")
|
||||
|
||||
# Parse OAuth2 parameters
|
||||
self.stdout.write('\nOAuth2 Parameters:')
|
||||
if 'client_id=' in redirect_url:
|
||||
self.stdout.write('✓ client_id parameter present')
|
||||
if 'redirect_uri=' in redirect_url:
|
||||
self.stdout.write('✓ redirect_uri parameter present')
|
||||
if 'scope=' in redirect_url:
|
||||
self.stdout.write('✓ scope parameter present')
|
||||
if 'response_type=' in redirect_url:
|
||||
self.stdout.write('✓ response_type parameter present')
|
||||
if 'code_challenge=' in redirect_url:
|
||||
self.stdout.write('✓ PKCE enabled (code_challenge present)')
|
||||
self.stdout.write("\nOAuth2 Parameters:")
|
||||
if "client_id=" in redirect_url:
|
||||
self.stdout.write("✓ client_id parameter present")
|
||||
if "redirect_uri=" in redirect_url:
|
||||
self.stdout.write("✓ redirect_uri parameter present")
|
||||
if "scope=" in redirect_url:
|
||||
self.stdout.write("✓ scope parameter present")
|
||||
if "response_type=" in redirect_url:
|
||||
self.stdout.write("✓ response_type parameter present")
|
||||
if "code_challenge=" in redirect_url:
|
||||
self.stdout.write("✓ PKCE enabled (code_challenge present)")
|
||||
|
||||
# Show callback URL
|
||||
callback_url = 'http://localhost:8000/accounts/discord/login/callback/'
|
||||
self.stdout.write('\nCallback URL to configure in Discord Developer Portal:')
|
||||
callback_url = "http://localhost:8000/accounts/discord/login/callback/"
|
||||
self.stdout.write(
|
||||
"\nCallback URL to configure in Discord Developer Portal:"
|
||||
)
|
||||
self.stdout.write(callback_url)
|
||||
|
||||
# Show frontend login URL
|
||||
frontend_url = 'http://localhost:5173'
|
||||
self.stdout.write('\nFrontend configuration:')
|
||||
self.stdout.write(f'Frontend URL: {frontend_url}')
|
||||
self.stdout.write('Discord login button should use:')
|
||||
self.stdout.write('/accounts/discord/login/?process=login')
|
||||
frontend_url = "http://localhost:5173"
|
||||
self.stdout.write("\nFrontend configuration:")
|
||||
self.stdout.write(f"Frontend URL: {frontend_url}")
|
||||
self.stdout.write("Discord login button should use:")
|
||||
self.stdout.write("/accounts/discord/login/?process=login")
|
||||
|
||||
# Show allauth URLs
|
||||
self.stdout.write('\nAllauth URLs:')
|
||||
self.stdout.write('Login URL: /accounts/discord/login/?process=login')
|
||||
self.stdout.write('Callback URL: /accounts/discord/login/callback/')
|
||||
self.stdout.write("\nAllauth URLs:")
|
||||
self.stdout.write("Login URL: /accounts/discord/login/?process=login")
|
||||
self.stdout.write("Callback URL: /accounts/discord/login/callback/")
|
||||
|
||||
except SocialApp.DoesNotExist:
|
||||
self.stdout.write(self.style.ERROR('Discord app not found'))
|
||||
self.stdout.write(self.style.ERROR("Discord app not found"))
|
||||
|
||||
@@ -2,8 +2,9 @@ from django.core.management.base import BaseCommand
|
||||
from allauth.socialaccount.models import SocialApp
|
||||
from django.contrib.sites.models import Site
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Update social apps to be associated with all sites'
|
||||
help = "Update social apps to be associated with all sites"
|
||||
|
||||
def handle(self, *args, **options):
|
||||
# Get all sites
|
||||
@@ -11,10 +12,12 @@ class Command(BaseCommand):
|
||||
|
||||
# Update each social app
|
||||
for app in SocialApp.objects.all():
|
||||
self.stdout.write(f'Updating {app.provider} app...')
|
||||
self.stdout.write(f"Updating {app.provider} app...")
|
||||
# Clear existing sites
|
||||
app.sites.clear()
|
||||
# Add all sites
|
||||
for site in sites:
|
||||
app.sites.add(site)
|
||||
self.stdout.write(f'Added sites: {", ".join(site.domain for site in sites)}')
|
||||
self.stdout.write(
|
||||
f'Added sites: {", ".join(site.domain for site in sites)}'
|
||||
)
|
||||
|
||||
@@ -1,36 +1,42 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from allauth.socialaccount.models import SocialApp
|
||||
from django.contrib.sites.models import Site
|
||||
from django.urls import reverse
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Verify Discord OAuth2 settings'
|
||||
help = "Verify Discord OAuth2 settings"
|
||||
|
||||
def handle(self, *args, **options):
|
||||
# Get Discord app
|
||||
try:
|
||||
discord_app = SocialApp.objects.get(provider='discord')
|
||||
self.stdout.write('Found Discord app configuration:')
|
||||
self.stdout.write(f'Client ID: {discord_app.client_id}')
|
||||
self.stdout.write(f'Secret: {discord_app.secret}')
|
||||
discord_app = SocialApp.objects.get(provider="discord")
|
||||
self.stdout.write("Found Discord app configuration:")
|
||||
self.stdout.write(f"Client ID: {discord_app.client_id}")
|
||||
self.stdout.write(f"Secret: {discord_app.secret}")
|
||||
|
||||
# Get sites
|
||||
sites = discord_app.sites.all()
|
||||
self.stdout.write('\nAssociated sites:')
|
||||
self.stdout.write("\nAssociated sites:")
|
||||
for site in sites:
|
||||
self.stdout.write(f'- {site.domain} ({site.name})')
|
||||
self.stdout.write(f"- {site.domain} ({site.name})")
|
||||
|
||||
# Show callback URL
|
||||
callback_url = 'http://localhost:8000/accounts/discord/login/callback/'
|
||||
self.stdout.write('\nCallback URL to configure in Discord Developer Portal:')
|
||||
callback_url = "http://localhost:8000/accounts/discord/login/callback/"
|
||||
self.stdout.write(
|
||||
"\nCallback URL to configure in Discord Developer Portal:"
|
||||
)
|
||||
self.stdout.write(callback_url)
|
||||
|
||||
# Show OAuth2 settings
|
||||
self.stdout.write('\nOAuth2 settings in settings.py:')
|
||||
discord_settings = settings.SOCIALACCOUNT_PROVIDERS.get('discord', {})
|
||||
self.stdout.write(f'PKCE Enabled: {discord_settings.get("OAUTH_PKCE_ENABLED", False)}')
|
||||
self.stdout.write("\nOAuth2 settings in settings.py:")
|
||||
discord_settings = settings.SOCIALACCOUNT_PROVIDERS.get("discord", {})
|
||||
self.stdout.write(
|
||||
f'PKCE Enabled: {
|
||||
discord_settings.get(
|
||||
"OAUTH_PKCE_ENABLED",
|
||||
False)}'
|
||||
)
|
||||
self.stdout.write(f'Scopes: {discord_settings.get("SCOPE", [])}')
|
||||
|
||||
except SocialApp.DoesNotExist:
|
||||
self.stdout.write(self.style.ERROR('Discord app not found'))
|
||||
self.stdout.write(self.style.ERROR("Discord app not found"))
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Generated by Django 5.1.4 on 2025-02-10 01:10
|
||||
# Generated by Django 5.1.4 on 2025-08-13 21:35
|
||||
|
||||
import django.contrib.auth.models
|
||||
import django.contrib.auth.validators
|
||||
@@ -33,7 +33,10 @@ class Migration(migrations.Migration):
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("password", models.CharField(max_length=128, verbose_name="password")),
|
||||
(
|
||||
"password",
|
||||
models.CharField(max_length=128, verbose_name="password"),
|
||||
),
|
||||
(
|
||||
"last_login",
|
||||
models.DateTimeField(
|
||||
@@ -78,7 +81,9 @@ class Migration(migrations.Migration):
|
||||
(
|
||||
"email",
|
||||
models.EmailField(
|
||||
blank=True, max_length=254, verbose_name="email address"
|
||||
blank=True,
|
||||
max_length=254,
|
||||
verbose_name="email address",
|
||||
),
|
||||
),
|
||||
(
|
||||
@@ -100,7 +105,8 @@ class Migration(migrations.Migration):
|
||||
(
|
||||
"date_joined",
|
||||
models.DateTimeField(
|
||||
default=django.utils.timezone.now, verbose_name="date joined"
|
||||
default=django.utils.timezone.now,
|
||||
verbose_name="date joined",
|
||||
),
|
||||
),
|
||||
(
|
||||
@@ -232,7 +238,15 @@ class Migration(migrations.Migration):
|
||||
migrations.CreateModel(
|
||||
name="TopList",
|
||||
fields=[
|
||||
("id", models.BigAutoField(primary_key=True, serialize=False)),
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("title", models.CharField(max_length=100)),
|
||||
(
|
||||
"category",
|
||||
@@ -266,7 +280,10 @@ class Migration(migrations.Migration):
|
||||
migrations.CreateModel(
|
||||
name="TopListEvent",
|
||||
fields=[
|
||||
("pgh_id", models.AutoField(primary_key=True, serialize=False)),
|
||||
(
|
||||
"pgh_id",
|
||||
models.AutoField(primary_key=True, serialize=False),
|
||||
),
|
||||
("pgh_created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("pgh_label", models.TextField(help_text="The event label.")),
|
||||
("id", models.BigIntegerField()),
|
||||
@@ -324,7 +341,17 @@ class Migration(migrations.Migration):
|
||||
migrations.CreateModel(
|
||||
name="TopListItem",
|
||||
fields=[
|
||||
("id", models.BigAutoField(primary_key=True, serialize=False)),
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
("object_id", models.PositiveIntegerField()),
|
||||
("rank", models.PositiveIntegerField()),
|
||||
("notes", models.TextField(blank=True)),
|
||||
@@ -351,10 +378,15 @@ class Migration(migrations.Migration):
|
||||
migrations.CreateModel(
|
||||
name="TopListItemEvent",
|
||||
fields=[
|
||||
("pgh_id", models.AutoField(primary_key=True, serialize=False)),
|
||||
(
|
||||
"pgh_id",
|
||||
models.AutoField(primary_key=True, serialize=False),
|
||||
),
|
||||
("pgh_created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("pgh_label", models.TextField(help_text="The event label.")),
|
||||
("id", models.BigIntegerField()),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
("object_id", models.PositiveIntegerField()),
|
||||
("rank", models.PositiveIntegerField()),
|
||||
("notes", models.TextField(blank=True)),
|
||||
@@ -431,7 +463,10 @@ class Migration(migrations.Migration):
|
||||
unique=True,
|
||||
),
|
||||
),
|
||||
("avatar", models.ImageField(blank=True, upload_to="avatars/")),
|
||||
(
|
||||
"avatar",
|
||||
models.ImageField(blank=True, upload_to="avatars/"),
|
||||
),
|
||||
("pronouns", models.CharField(blank=True, max_length=50)),
|
||||
("bio", models.TextField(blank=True, max_length=500)),
|
||||
("twitter", models.URLField(blank=True)),
|
||||
@@ -490,7 +525,7 @@ class Migration(migrations.Migration):
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "accounts_toplistitemevent" ("content_type_id", "id", "notes", "object_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "rank", "top_list_id") VALUES (NEW."content_type_id", NEW."id", NEW."notes", NEW."object_id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."rank", NEW."top_list_id"); RETURN NULL;',
|
||||
func='INSERT INTO "accounts_toplistitemevent" ("content_type_id", "created_at", "id", "notes", "object_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "rank", "top_list_id", "updated_at") VALUES (NEW."content_type_id", NEW."created_at", NEW."id", NEW."notes", NEW."object_id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."rank", NEW."top_list_id", NEW."updated_at"); RETURN NULL;',
|
||||
hash="[AWS-SECRET-REMOVED]",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_56dfc",
|
||||
@@ -505,7 +540,7 @@ class Migration(migrations.Migration):
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "accounts_toplistitemevent" ("content_type_id", "id", "notes", "object_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "rank", "top_list_id") VALUES (NEW."content_type_id", NEW."id", NEW."notes", NEW."object_id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."rank", NEW."top_list_id"); RETURN NULL;',
|
||||
func='INSERT INTO "accounts_toplistitemevent" ("content_type_id", "created_at", "id", "notes", "object_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "rank", "top_list_id", "updated_at") VALUES (NEW."content_type_id", NEW."created_at", NEW."id", NEW."notes", NEW."object_id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."rank", NEW."top_list_id", NEW."updated_at"); RETURN NULL;',
|
||||
hash="[AWS-SECRET-REMOVED]",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_2b6e3",
|
||||
|
||||
@@ -1,93 +0,0 @@
|
||||
# Generated by Django 5.1.4 on 2025-02-21 17:55
|
||||
|
||||
import django.utils.timezone
|
||||
import pgtrigger.compiler
|
||||
import pgtrigger.migrations
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("accounts", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
pgtrigger.migrations.RemoveTrigger(
|
||||
model_name="toplistitem",
|
||||
name="insert_insert",
|
||||
),
|
||||
pgtrigger.migrations.RemoveTrigger(
|
||||
model_name="toplistitem",
|
||||
name="update_update",
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="toplistitem",
|
||||
name="created_at",
|
||||
field=models.DateTimeField(
|
||||
auto_now_add=True, default=django.utils.timezone.now
|
||||
),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="toplistitem",
|
||||
name="updated_at",
|
||||
field=models.DateTimeField(auto_now=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="toplistitemevent",
|
||||
name="created_at",
|
||||
field=models.DateTimeField(
|
||||
auto_now_add=True, default=django.utils.timezone.now
|
||||
),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="toplistitemevent",
|
||||
name="updated_at",
|
||||
field=models.DateTimeField(auto_now=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="toplist",
|
||||
name="id",
|
||||
field=models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="toplistitem",
|
||||
name="id",
|
||||
field=models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="toplistitem",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "accounts_toplistitemevent" ("content_type_id", "created_at", "id", "notes", "object_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "rank", "top_list_id", "updated_at") VALUES (NEW."content_type_id", NEW."created_at", NEW."id", NEW."notes", NEW."object_id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."rank", NEW."top_list_id", NEW."updated_at"); RETURN NULL;',
|
||||
hash="[AWS-SECRET-REMOVED]",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_56dfc",
|
||||
table="accounts_toplistitem",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="toplistitem",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "accounts_toplistitemevent" ("content_type_id", "created_at", "id", "notes", "object_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "rank", "top_list_id", "updated_at") VALUES (NEW."content_type_id", NEW."created_at", NEW."id", NEW."notes", NEW."object_id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."rank", NEW."top_list_id", NEW."updated_at"); RETURN NULL;',
|
||||
hash="[AWS-SECRET-REMOVED]",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_2b6e3",
|
||||
table="accounts_toplistitem",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -2,11 +2,13 @@ import requests
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ValidationError
|
||||
|
||||
|
||||
class TurnstileMixin:
|
||||
"""
|
||||
Mixin to handle Cloudflare Turnstile validation.
|
||||
Bypasses validation when DEBUG is True.
|
||||
"""
|
||||
|
||||
def validate_turnstile(self, request):
|
||||
"""
|
||||
Validate the Turnstile response token.
|
||||
@@ -15,19 +17,19 @@ class TurnstileMixin:
|
||||
if settings.DEBUG:
|
||||
return
|
||||
|
||||
token = request.POST.get('cf-turnstile-response')
|
||||
token = request.POST.get("cf-turnstile-response")
|
||||
if not token:
|
||||
raise ValidationError('Please complete the Turnstile challenge.')
|
||||
raise ValidationError("Please complete the Turnstile challenge.")
|
||||
|
||||
# Verify the token with Cloudflare
|
||||
data = {
|
||||
'secret': settings.TURNSTILE_SECRET_KEY,
|
||||
'response': token,
|
||||
'remoteip': request.META.get('REMOTE_ADDR'),
|
||||
"secret": settings.TURNSTILE_SECRET_KEY,
|
||||
"response": token,
|
||||
"remoteip": request.META.get("REMOTE_ADDR"),
|
||||
}
|
||||
|
||||
response = requests.post(settings.TURNSTILE_VERIFY_URL, data=data, timeout=60)
|
||||
result = response.json()
|
||||
|
||||
if not result.get('success'):
|
||||
raise ValidationError('Turnstile validation failed. Please try again.')
|
||||
if not result.get("success"):
|
||||
raise ValidationError("Turnstile validation failed. Please try again.")
|
||||
|
||||
@@ -2,13 +2,12 @@ from django.contrib.auth.models import AbstractUser
|
||||
from django.db import models
|
||||
from django.urls import reverse
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from PIL import Image, ImageDraw, ImageFont
|
||||
from io import BytesIO
|
||||
import base64
|
||||
import os
|
||||
import secrets
|
||||
from history_tracking.models import TrackedModel
|
||||
import pghistory
|
||||
from core.history import TrackedModel
|
||||
|
||||
# import pghistory
|
||||
|
||||
|
||||
def generate_random_id(model_class, id_field):
|
||||
"""Generate a random ID starting at 4 digits, expanding to 5 if needed"""
|
||||
@@ -23,23 +22,27 @@ def generate_random_id(model_class, id_field):
|
||||
if not model_class.objects.filter(**{id_field: new_id}).exists():
|
||||
return new_id
|
||||
|
||||
|
||||
class User(AbstractUser):
|
||||
class Roles(models.TextChoices):
|
||||
USER = 'USER', _('User')
|
||||
MODERATOR = 'MODERATOR', _('Moderator')
|
||||
ADMIN = 'ADMIN', _('Admin')
|
||||
SUPERUSER = 'SUPERUSER', _('Superuser')
|
||||
USER = "USER", _("User")
|
||||
MODERATOR = "MODERATOR", _("Moderator")
|
||||
ADMIN = "ADMIN", _("Admin")
|
||||
SUPERUSER = "SUPERUSER", _("Superuser")
|
||||
|
||||
class ThemePreference(models.TextChoices):
|
||||
LIGHT = 'light', _('Light')
|
||||
DARK = 'dark', _('Dark')
|
||||
LIGHT = "light", _("Light")
|
||||
DARK = "dark", _("Dark")
|
||||
|
||||
# Read-only ID
|
||||
user_id = models.CharField(
|
||||
max_length=10,
|
||||
unique=True,
|
||||
editable=False,
|
||||
help_text='Unique identifier for this user that remains constant even if the username changes'
|
||||
help_text=(
|
||||
"Unique identifier for this user that remains constant even if the "
|
||||
"username changes"
|
||||
),
|
||||
)
|
||||
|
||||
role = models.CharField(
|
||||
@@ -61,40 +64,37 @@ class User(AbstractUser):
|
||||
return self.get_display_name()
|
||||
|
||||
def get_absolute_url(self):
|
||||
return reverse('profile', kwargs={'username': self.username})
|
||||
return reverse("profile", kwargs={"username": self.username})
|
||||
|
||||
def get_display_name(self):
|
||||
"""Get the user's display name, falling back to username if not set"""
|
||||
profile = getattr(self, 'profile', None)
|
||||
profile = getattr(self, "profile", None)
|
||||
if profile and profile.display_name:
|
||||
return profile.display_name
|
||||
return self.username
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if not self.user_id:
|
||||
self.user_id = generate_random_id(User, 'user_id')
|
||||
self.user_id = generate_random_id(User, "user_id")
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
|
||||
class UserProfile(models.Model):
|
||||
# Read-only ID
|
||||
profile_id = models.CharField(
|
||||
max_length=10,
|
||||
unique=True,
|
||||
editable=False,
|
||||
help_text='Unique identifier for this profile that remains constant'
|
||||
help_text="Unique identifier for this profile that remains constant",
|
||||
)
|
||||
|
||||
user = models.OneToOneField(
|
||||
User,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='profile'
|
||||
)
|
||||
user = models.OneToOneField(User, on_delete=models.CASCADE, related_name="profile")
|
||||
display_name = models.CharField(
|
||||
max_length=50,
|
||||
unique=True,
|
||||
help_text="This is the name that will be displayed on the site"
|
||||
help_text="This is the name that will be displayed on the site",
|
||||
)
|
||||
avatar = models.ImageField(upload_to='avatars/', blank=True)
|
||||
avatar = models.ImageField(upload_to="avatars/", blank=True)
|
||||
pronouns = models.CharField(max_length=50, blank=True)
|
||||
|
||||
bio = models.TextField(max_length=500, blank=True)
|
||||
@@ -112,10 +112,13 @@ class UserProfile(models.Model):
|
||||
water_ride_credits = models.IntegerField(default=0)
|
||||
|
||||
def get_avatar(self):
|
||||
"""Return the avatar URL or serve a pre-generated avatar based on the first letter of the username"""
|
||||
"""
|
||||
Return the avatar URL or serve a pre-generated avatar based on the
|
||||
first letter of the username
|
||||
"""
|
||||
if self.avatar:
|
||||
return self.avatar.url
|
||||
first_letter = self.user.username[0].upper()
|
||||
first_letter = self.user.username.upper()
|
||||
avatar_path = f"avatars/letters/{first_letter}_avatar.png"
|
||||
if os.path.exists(avatar_path):
|
||||
return f"/{avatar_path}"
|
||||
@@ -127,12 +130,13 @@ class UserProfile(models.Model):
|
||||
self.display_name = self.user.username
|
||||
|
||||
if not self.profile_id:
|
||||
self.profile_id = generate_random_id(UserProfile, 'profile_id')
|
||||
self.profile_id = generate_random_id(UserProfile, "profile_id")
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
def __str__(self):
|
||||
return self.display_name
|
||||
|
||||
|
||||
class EmailVerification(models.Model):
|
||||
user = models.OneToOneField(User, on_delete=models.CASCADE)
|
||||
token = models.CharField(max_length=64, unique=True)
|
||||
@@ -146,6 +150,7 @@ class EmailVerification(models.Model):
|
||||
verbose_name = "Email Verification"
|
||||
verbose_name_plural = "Email Verifications"
|
||||
|
||||
|
||||
class PasswordReset(models.Model):
|
||||
user = models.ForeignKey(User, on_delete=models.CASCADE)
|
||||
token = models.CharField(max_length=64)
|
||||
@@ -160,53 +165,55 @@ class PasswordReset(models.Model):
|
||||
verbose_name = "Password Reset"
|
||||
verbose_name_plural = "Password Resets"
|
||||
|
||||
@pghistory.track()
|
||||
|
||||
# @pghistory.track()
|
||||
|
||||
|
||||
class TopList(TrackedModel):
|
||||
class Categories(models.TextChoices):
|
||||
ROLLER_COASTER = 'RC', _('Roller Coaster')
|
||||
DARK_RIDE = 'DR', _('Dark Ride')
|
||||
FLAT_RIDE = 'FR', _('Flat Ride')
|
||||
WATER_RIDE = 'WR', _('Water Ride')
|
||||
PARK = 'PK', _('Park')
|
||||
ROLLER_COASTER = "RC", _("Roller Coaster")
|
||||
DARK_RIDE = "DR", _("Dark Ride")
|
||||
FLAT_RIDE = "FR", _("Flat Ride")
|
||||
WATER_RIDE = "WR", _("Water Ride")
|
||||
PARK = "PK", _("Park")
|
||||
|
||||
user = models.ForeignKey(
|
||||
User,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='top_lists' # Added related_name for User model access
|
||||
related_name="top_lists", # Added related_name for User model access
|
||||
)
|
||||
title = models.CharField(max_length=100)
|
||||
category = models.CharField(
|
||||
max_length=2,
|
||||
choices=Categories.choices
|
||||
)
|
||||
category = models.CharField(max_length=2, choices=Categories.choices)
|
||||
description = models.TextField(blank=True)
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
class Meta:
|
||||
ordering = ['-updated_at']
|
||||
class Meta(TrackedModel.Meta):
|
||||
ordering = ["-updated_at"]
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.user.get_display_name()}'s {self.category} Top List: {self.title}"
|
||||
return (
|
||||
f"{self.user.get_display_name()}'s {self.category} Top List: {self.title}"
|
||||
)
|
||||
|
||||
|
||||
# @pghistory.track()
|
||||
|
||||
|
||||
@pghistory.track()
|
||||
class TopListItem(TrackedModel):
|
||||
top_list = models.ForeignKey(
|
||||
TopList,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='items'
|
||||
TopList, on_delete=models.CASCADE, related_name="items"
|
||||
)
|
||||
content_type = models.ForeignKey(
|
||||
'contenttypes.ContentType',
|
||||
on_delete=models.CASCADE
|
||||
"contenttypes.ContentType", on_delete=models.CASCADE
|
||||
)
|
||||
object_id = models.PositiveIntegerField()
|
||||
rank = models.PositiveIntegerField()
|
||||
notes = models.TextField(blank=True)
|
||||
|
||||
class Meta:
|
||||
ordering = ['rank']
|
||||
unique_together = [['top_list', 'rank']]
|
||||
class Meta(TrackedModel.Meta):
|
||||
ordering = ["rank"]
|
||||
unique_together = [["top_list", "rank"]]
|
||||
|
||||
def __str__(self):
|
||||
return f"#{self.rank} in {self.top_list.title}"
|
||||
|
||||
208
accounts/models_temp.py
Normal file
208
accounts/models_temp.py
Normal file
@@ -0,0 +1,208 @@
|
||||
from django.contrib.auth.models import AbstractUser
|
||||
from django.db import models
|
||||
from django.urls import reverse
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
import os
|
||||
import secrets
|
||||
from core.history import TrackedModel
|
||||
import pghistory
|
||||
|
||||
|
||||
def generate_random_id(model_class, id_field):
|
||||
"""Generate a random ID starting at 4 digits, expanding to 5 if needed"""
|
||||
while True:
|
||||
# Try to get a 4-digit number first
|
||||
new_id = str(secrets.SystemRandom().randint(1000, 9999))
|
||||
if not model_class.objects.filter(**{id_field: new_id}).exists():
|
||||
return new_id
|
||||
|
||||
# If all 4-digit numbers are taken, try 5 digits
|
||||
new_id = str(secrets.SystemRandom().randint(10000, 99999))
|
||||
if not model_class.objects.filter(**{id_field: new_id}).exists():
|
||||
return new_id
|
||||
|
||||
|
||||
class User(AbstractUser):
|
||||
class Roles(models.TextChoices):
|
||||
USER = "USER", _("User")
|
||||
MODERATOR = "MODERATOR", _("Moderator")
|
||||
ADMIN = "ADMIN", _("Admin")
|
||||
SUPERUSER = "SUPERUSER", _("Superuser")
|
||||
|
||||
class ThemePreference(models.TextChoices):
|
||||
LIGHT = "light", _("Light")
|
||||
DARK = "dark", _("Dark")
|
||||
|
||||
# Read-only ID
|
||||
user_id = models.CharField(
|
||||
max_length=10,
|
||||
unique=True,
|
||||
editable=False,
|
||||
help_text="Unique identifier for this user that remains constant even if the username changes",
|
||||
)
|
||||
|
||||
role = models.CharField(
|
||||
max_length=10,
|
||||
choices=Roles.choices,
|
||||
default=Roles.USER,
|
||||
)
|
||||
is_banned = models.BooleanField(default=False)
|
||||
ban_reason = models.TextField(blank=True)
|
||||
ban_date = models.DateTimeField(null=True, blank=True)
|
||||
pending_email = models.EmailField(blank=True, null=True)
|
||||
theme_preference = models.CharField(
|
||||
max_length=5,
|
||||
choices=ThemePreference.choices,
|
||||
default=ThemePreference.LIGHT,
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return self.get_display_name()
|
||||
|
||||
def get_absolute_url(self):
|
||||
return reverse("profile", kwargs={"username": self.username})
|
||||
|
||||
def get_display_name(self):
|
||||
"""Get the user's display name, falling back to username if not set"""
|
||||
profile = getattr(self, "profile", None)
|
||||
if profile and profile.display_name:
|
||||
return profile.display_name
|
||||
return self.username
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if not self.user_id:
|
||||
self.user_id = generate_random_id(User, "user_id")
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
|
||||
class UserProfile(models.Model):
|
||||
# Read-only ID
|
||||
profile_id = models.CharField(
|
||||
max_length=10,
|
||||
unique=True,
|
||||
editable=False,
|
||||
help_text="Unique identifier for this profile that remains constant",
|
||||
)
|
||||
|
||||
user = models.OneToOneField(User, on_delete=models.CASCADE, related_name="profile")
|
||||
display_name = models.CharField(
|
||||
max_length=50,
|
||||
unique=True,
|
||||
help_text="This is the name that will be displayed on the site",
|
||||
)
|
||||
avatar = models.ImageField(upload_to="avatars/", blank=True)
|
||||
pronouns = models.CharField(max_length=50, blank=True)
|
||||
|
||||
bio = models.TextField(max_length=500, blank=True)
|
||||
|
||||
# Social media links
|
||||
twitter = models.URLField(blank=True)
|
||||
instagram = models.URLField(blank=True)
|
||||
youtube = models.URLField(blank=True)
|
||||
discord = models.CharField(max_length=100, blank=True)
|
||||
|
||||
# Ride statistics
|
||||
coaster_credits = models.IntegerField(default=0)
|
||||
dark_ride_credits = models.IntegerField(default=0)
|
||||
flat_ride_credits = models.IntegerField(default=0)
|
||||
water_ride_credits = models.IntegerField(default=0)
|
||||
|
||||
def get_avatar(self):
|
||||
"""Return the avatar URL or serve a pre-generated avatar based on the first letter of the username"""
|
||||
if self.avatar:
|
||||
return self.avatar.url
|
||||
first_letter = self.user.username[0].upper()
|
||||
avatar_path = f"avatars/letters/{first_letter}_avatar.png"
|
||||
if os.path.exists(avatar_path):
|
||||
return f"/{avatar_path}"
|
||||
return "/static/images/default-avatar.png"
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
# If no display name is set, use the username
|
||||
if not self.display_name:
|
||||
self.display_name = self.user.username
|
||||
|
||||
if not self.profile_id:
|
||||
self.profile_id = generate_random_id(UserProfile, "profile_id")
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
def __str__(self):
|
||||
return self.display_name
|
||||
|
||||
|
||||
class EmailVerification(models.Model):
|
||||
user = models.OneToOneField(User, on_delete=models.CASCADE)
|
||||
token = models.CharField(max_length=64, unique=True)
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
last_sent = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
def __str__(self):
|
||||
return f"Email verification for {self.user.username}"
|
||||
|
||||
class Meta:
|
||||
verbose_name = "Email Verification"
|
||||
verbose_name_plural = "Email Verifications"
|
||||
|
||||
|
||||
class PasswordReset(models.Model):
|
||||
user = models.ForeignKey(User, on_delete=models.CASCADE)
|
||||
token = models.CharField(max_length=64)
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
expires_at = models.DateTimeField()
|
||||
used = models.BooleanField(default=False)
|
||||
|
||||
def __str__(self):
|
||||
return f"Password reset for {self.user.username}"
|
||||
|
||||
class Meta:
|
||||
verbose_name = "Password Reset"
|
||||
verbose_name_plural = "Password Resets"
|
||||
|
||||
|
||||
@pghistory.track()
|
||||
class TopList(TrackedModel):
|
||||
class Categories(models.TextChoices):
|
||||
ROLLER_COASTER = "RC", _("Roller Coaster")
|
||||
DARK_RIDE = "DR", _("Dark Ride")
|
||||
FLAT_RIDE = "FR", _("Flat Ride")
|
||||
WATER_RIDE = "WR", _("Water Ride")
|
||||
PARK = "PK", _("Park")
|
||||
|
||||
user = models.ForeignKey(
|
||||
User,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="top_lists", # Added related_name for User model access
|
||||
)
|
||||
title = models.CharField(max_length=100)
|
||||
category = models.CharField(max_length=2, choices=Categories.choices)
|
||||
description = models.TextField(blank=True)
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
class Meta(TrackedModel.Meta):
|
||||
ordering = ["-updated_at"]
|
||||
|
||||
def __str__(self):
|
||||
return (
|
||||
f"{self.user.get_display_name()}'s {self.category} Top List: {self.title}"
|
||||
)
|
||||
|
||||
|
||||
@pghistory.track()
|
||||
class TopListItem(TrackedModel):
|
||||
top_list = models.ForeignKey(
|
||||
TopList, on_delete=models.CASCADE, related_name="items"
|
||||
)
|
||||
content_type = models.ForeignKey(
|
||||
"contenttypes.ContentType", on_delete=models.CASCADE
|
||||
)
|
||||
object_id = models.PositiveIntegerField()
|
||||
rank = models.PositiveIntegerField()
|
||||
notes = models.TextField(blank=True)
|
||||
|
||||
class Meta(TrackedModel.Meta):
|
||||
ordering = ["rank"]
|
||||
unique_together = [["top_list", "rank"]]
|
||||
|
||||
def __str__(self):
|
||||
return f"#{self.rank} in {self.top_list.title}"
|
||||
273
accounts/selectors.py
Normal file
273
accounts/selectors.py
Normal file
@@ -0,0 +1,273 @@
|
||||
"""
|
||||
Selectors for user and account-related data retrieval.
|
||||
Following Django styleguide pattern for separating data access from business logic.
|
||||
"""
|
||||
|
||||
from typing import Dict, Any
|
||||
from django.db.models import QuerySet, Q, F, Count
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.utils import timezone
|
||||
from datetime import timedelta
|
||||
|
||||
User = get_user_model()
|
||||
|
||||
|
||||
def user_profile_optimized(*, user_id: int) -> Any:
|
||||
"""
|
||||
Get a user with optimized queries for profile display.
|
||||
|
||||
Args:
|
||||
user_id: User ID
|
||||
|
||||
Returns:
|
||||
User instance with prefetched related data
|
||||
|
||||
Raises:
|
||||
User.DoesNotExist: If user doesn't exist
|
||||
"""
|
||||
return (
|
||||
User.objects.prefetch_related(
|
||||
"park_reviews", "ride_reviews", "socialaccount_set"
|
||||
)
|
||||
.annotate(
|
||||
park_review_count=Count(
|
||||
"park_reviews", filter=Q(park_reviews__is_published=True)
|
||||
),
|
||||
ride_review_count=Count(
|
||||
"ride_reviews", filter=Q(ride_reviews__is_published=True)
|
||||
),
|
||||
total_review_count=F("park_review_count") + F("ride_review_count"),
|
||||
)
|
||||
.get(id=user_id)
|
||||
)
|
||||
|
||||
|
||||
def active_users_with_stats() -> QuerySet:
|
||||
"""
|
||||
Get active users with review statistics.
|
||||
|
||||
Returns:
|
||||
QuerySet of active users with review counts
|
||||
"""
|
||||
return (
|
||||
User.objects.filter(is_active=True)
|
||||
.annotate(
|
||||
park_review_count=Count(
|
||||
"park_reviews", filter=Q(park_reviews__is_published=True)
|
||||
),
|
||||
ride_review_count=Count(
|
||||
"ride_reviews", filter=Q(ride_reviews__is_published=True)
|
||||
),
|
||||
total_review_count=F("park_review_count") + F("ride_review_count"),
|
||||
)
|
||||
.order_by("-total_review_count")
|
||||
)
|
||||
|
||||
|
||||
def users_with_recent_activity(*, days: int = 30) -> QuerySet:
|
||||
"""
|
||||
Get users who have been active in the last N days.
|
||||
|
||||
Args:
|
||||
days: Number of days to look back for activity
|
||||
|
||||
Returns:
|
||||
QuerySet of recently active users
|
||||
"""
|
||||
cutoff_date = timezone.now() - timedelta(days=days)
|
||||
|
||||
return (
|
||||
User.objects.filter(
|
||||
Q(last_login__gte=cutoff_date)
|
||||
| Q(park_reviews__created_at__gte=cutoff_date)
|
||||
| Q(ride_reviews__created_at__gte=cutoff_date)
|
||||
)
|
||||
.annotate(
|
||||
recent_park_reviews=Count(
|
||||
"park_reviews",
|
||||
filter=Q(park_reviews__created_at__gte=cutoff_date),
|
||||
),
|
||||
recent_ride_reviews=Count(
|
||||
"ride_reviews",
|
||||
filter=Q(ride_reviews__created_at__gte=cutoff_date),
|
||||
),
|
||||
recent_total_reviews=F("recent_park_reviews") + F("recent_ride_reviews"),
|
||||
)
|
||||
.order_by("-last_login")
|
||||
.distinct()
|
||||
)
|
||||
|
||||
|
||||
def top_reviewers(*, limit: int = 10) -> QuerySet:
|
||||
"""
|
||||
Get top users by review count.
|
||||
|
||||
Args:
|
||||
limit: Maximum number of users to return
|
||||
|
||||
Returns:
|
||||
QuerySet of top reviewers
|
||||
"""
|
||||
return (
|
||||
User.objects.filter(is_active=True)
|
||||
.annotate(
|
||||
park_review_count=Count(
|
||||
"park_reviews", filter=Q(park_reviews__is_published=True)
|
||||
),
|
||||
ride_review_count=Count(
|
||||
"ride_reviews", filter=Q(ride_reviews__is_published=True)
|
||||
),
|
||||
total_review_count=F("park_review_count") + F("ride_review_count"),
|
||||
)
|
||||
.filter(total_review_count__gt=0)
|
||||
.order_by("-total_review_count")[:limit]
|
||||
)
|
||||
|
||||
|
||||
def moderator_users() -> QuerySet:
|
||||
"""
|
||||
Get users with moderation permissions.
|
||||
|
||||
Returns:
|
||||
QuerySet of users who can moderate content
|
||||
"""
|
||||
return (
|
||||
User.objects.filter(
|
||||
Q(is_staff=True)
|
||||
| Q(groups__name="Moderators")
|
||||
| Q(
|
||||
user_permissions__codename__in=[
|
||||
"change_parkreview",
|
||||
"change_ridereview",
|
||||
]
|
||||
)
|
||||
)
|
||||
.distinct()
|
||||
.order_by("username")
|
||||
)
|
||||
|
||||
|
||||
def users_by_registration_date(*, start_date, end_date) -> QuerySet:
|
||||
"""
|
||||
Get users who registered within a date range.
|
||||
|
||||
Args:
|
||||
start_date: Start of date range
|
||||
end_date: End of date range
|
||||
|
||||
Returns:
|
||||
QuerySet of users registered in the date range
|
||||
"""
|
||||
return User.objects.filter(
|
||||
date_joined__date__gte=start_date, date_joined__date__lte=end_date
|
||||
).order_by("-date_joined")
|
||||
|
||||
|
||||
def user_search_autocomplete(*, query: str, limit: int = 10) -> QuerySet:
|
||||
"""
|
||||
Get users matching a search query for autocomplete functionality.
|
||||
|
||||
Args:
|
||||
query: Search string
|
||||
limit: Maximum number of results
|
||||
|
||||
Returns:
|
||||
QuerySet of matching users for autocomplete
|
||||
"""
|
||||
return User.objects.filter(
|
||||
Q(username__icontains=query)
|
||||
| Q(first_name__icontains=query)
|
||||
| Q(last_name__icontains=query),
|
||||
is_active=True,
|
||||
).order_by("username")[:limit]
|
||||
|
||||
|
||||
def users_with_social_accounts() -> QuerySet:
|
||||
"""
|
||||
Get users who have connected social accounts.
|
||||
|
||||
Returns:
|
||||
QuerySet of users with social account connections
|
||||
"""
|
||||
return (
|
||||
User.objects.filter(socialaccount__isnull=False)
|
||||
.prefetch_related("socialaccount_set")
|
||||
.distinct()
|
||||
.order_by("username")
|
||||
)
|
||||
|
||||
|
||||
def user_statistics_summary() -> Dict[str, Any]:
|
||||
"""
|
||||
Get overall user statistics for dashboard/analytics.
|
||||
|
||||
Returns:
|
||||
Dictionary containing user statistics
|
||||
"""
|
||||
total_users = User.objects.count()
|
||||
active_users = User.objects.filter(is_active=True).count()
|
||||
staff_users = User.objects.filter(is_staff=True).count()
|
||||
|
||||
# Users with reviews
|
||||
users_with_reviews = (
|
||||
User.objects.filter(
|
||||
Q(park_reviews__isnull=False) | Q(ride_reviews__isnull=False)
|
||||
)
|
||||
.distinct()
|
||||
.count()
|
||||
)
|
||||
|
||||
# Recent registrations (last 30 days)
|
||||
cutoff_date = timezone.now() - timedelta(days=30)
|
||||
recent_registrations = User.objects.filter(date_joined__gte=cutoff_date).count()
|
||||
|
||||
return {
|
||||
"total_users": total_users,
|
||||
"active_users": active_users,
|
||||
"inactive_users": total_users - active_users,
|
||||
"staff_users": staff_users,
|
||||
"users_with_reviews": users_with_reviews,
|
||||
"recent_registrations": recent_registrations,
|
||||
"review_participation_rate": (
|
||||
(users_with_reviews / total_users * 100) if total_users > 0 else 0
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
def users_needing_email_verification() -> QuerySet:
|
||||
"""
|
||||
Get users who haven't verified their email addresses.
|
||||
|
||||
Returns:
|
||||
QuerySet of users with unverified emails
|
||||
"""
|
||||
return (
|
||||
User.objects.filter(is_active=True, emailaddress__verified=False)
|
||||
.distinct()
|
||||
.order_by("date_joined")
|
||||
)
|
||||
|
||||
|
||||
def users_by_review_activity(*, min_reviews: int = 1) -> QuerySet:
|
||||
"""
|
||||
Get users who have written at least a minimum number of reviews.
|
||||
|
||||
Args:
|
||||
min_reviews: Minimum number of reviews required
|
||||
|
||||
Returns:
|
||||
QuerySet of users with sufficient review activity
|
||||
"""
|
||||
return (
|
||||
User.objects.annotate(
|
||||
park_review_count=Count(
|
||||
"park_reviews", filter=Q(park_reviews__is_published=True)
|
||||
),
|
||||
ride_review_count=Count(
|
||||
"ride_reviews", filter=Q(ride_reviews__is_published=True)
|
||||
),
|
||||
total_review_count=F("park_review_count") + F("ride_review_count"),
|
||||
)
|
||||
.filter(total_review_count__gte=min_reviews)
|
||||
.order_by("-total_review_count")
|
||||
)
|
||||
@@ -5,7 +5,8 @@ from django.db import transaction
|
||||
from django.core.files import File
|
||||
from django.core.files.temp import NamedTemporaryFile
|
||||
import requests
|
||||
from .models import User, UserProfile, EmailVerification
|
||||
from .models import User, UserProfile
|
||||
|
||||
|
||||
@receiver(post_save, sender=User)
|
||||
def create_user_profile(sender, instance, created, **kwargs):
|
||||
@@ -21,13 +22,13 @@ def create_user_profile(sender, instance, created, **kwargs):
|
||||
extra_data = social_account.extra_data
|
||||
avatar_url = None
|
||||
|
||||
if social_account.provider == 'google':
|
||||
avatar_url = extra_data.get('picture')
|
||||
elif social_account.provider == 'discord':
|
||||
avatar = extra_data.get('avatar')
|
||||
discord_id = extra_data.get('id')
|
||||
if social_account.provider == "google":
|
||||
avatar_url = extra_data.get("picture")
|
||||
elif social_account.provider == "discord":
|
||||
avatar = extra_data.get("avatar")
|
||||
discord_id = extra_data.get("id")
|
||||
if avatar:
|
||||
avatar_url = f'https://cdn.discordapp.com/avatars/{discord_id}/{avatar}.png'
|
||||
avatar_url = f"https://cdn.discordapp.com/avatars/{discord_id}/{avatar}.png"
|
||||
|
||||
if avatar_url:
|
||||
try:
|
||||
@@ -38,26 +39,32 @@ def create_user_profile(sender, instance, created, **kwargs):
|
||||
img_temp.flush()
|
||||
|
||||
file_name = f"avatar_{instance.username}.png"
|
||||
profile.avatar.save(
|
||||
file_name,
|
||||
File(img_temp),
|
||||
save=True
|
||||
)
|
||||
profile.avatar.save(file_name, File(img_temp), save=True)
|
||||
except Exception as e:
|
||||
print(f"Error downloading avatar for user {instance.username}: {str(e)}")
|
||||
print(
|
||||
f"Error downloading avatar for user {
|
||||
instance.username}: {
|
||||
str(e)}"
|
||||
)
|
||||
except Exception as e:
|
||||
print(f"Error creating profile for user {instance.username}: {str(e)}")
|
||||
|
||||
|
||||
@receiver(post_save, sender=User)
|
||||
def save_user_profile(sender, instance, **kwargs):
|
||||
"""Ensure UserProfile exists and is saved"""
|
||||
try:
|
||||
if not hasattr(instance, 'profile'):
|
||||
# Try to get existing profile first
|
||||
try:
|
||||
profile = instance.profile
|
||||
profile.save()
|
||||
except UserProfile.DoesNotExist:
|
||||
# Profile doesn't exist, create it
|
||||
UserProfile.objects.create(user=instance)
|
||||
instance.profile.save()
|
||||
except Exception as e:
|
||||
print(f"Error saving profile for user {instance.username}: {str(e)}")
|
||||
|
||||
|
||||
@receiver(pre_save, sender=User)
|
||||
def sync_user_role_with_groups(sender, instance, **kwargs):
|
||||
"""Sync user role with Django groups"""
|
||||
@@ -83,22 +90,38 @@ def sync_user_role_with_groups(sender, instance, **kwargs):
|
||||
instance.is_superuser = True
|
||||
instance.is_staff = True
|
||||
elif old_instance.role == User.Roles.SUPERUSER:
|
||||
# If removing superuser role, remove superuser status
|
||||
# If removing superuser role, remove superuser
|
||||
# status
|
||||
instance.is_superuser = False
|
||||
if instance.role not in [User.Roles.ADMIN, User.Roles.MODERATOR]:
|
||||
if instance.role not in [
|
||||
User.Roles.ADMIN,
|
||||
User.Roles.MODERATOR,
|
||||
]:
|
||||
instance.is_staff = False
|
||||
|
||||
# Handle staff status for admin and moderator roles
|
||||
if instance.role in [User.Roles.ADMIN, User.Roles.MODERATOR]:
|
||||
if instance.role in [
|
||||
User.Roles.ADMIN,
|
||||
User.Roles.MODERATOR,
|
||||
]:
|
||||
instance.is_staff = True
|
||||
elif old_instance.role in [User.Roles.ADMIN, User.Roles.MODERATOR]:
|
||||
# If removing admin/moderator role, remove staff status
|
||||
elif old_instance.role in [
|
||||
User.Roles.ADMIN,
|
||||
User.Roles.MODERATOR,
|
||||
]:
|
||||
# If removing admin/moderator role, remove staff
|
||||
# status
|
||||
if instance.role not in [User.Roles.SUPERUSER]:
|
||||
instance.is_staff = False
|
||||
except User.DoesNotExist:
|
||||
pass
|
||||
except Exception as e:
|
||||
print(f"Error syncing role with groups for user {instance.username}: {str(e)}")
|
||||
print(
|
||||
f"Error syncing role with groups for user {
|
||||
instance.username}: {
|
||||
str(e)}"
|
||||
)
|
||||
|
||||
|
||||
def create_default_groups():
|
||||
"""
|
||||
@@ -107,31 +130,45 @@ def create_default_groups():
|
||||
"""
|
||||
try:
|
||||
from django.contrib.auth.models import Permission
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
|
||||
# Create Moderator group
|
||||
moderator_group, _ = Group.objects.get_or_create(name=User.Roles.MODERATOR)
|
||||
moderator_permissions = [
|
||||
# Review moderation permissions
|
||||
'change_review', 'delete_review',
|
||||
'change_reviewreport', 'delete_reviewreport',
|
||||
"change_review",
|
||||
"delete_review",
|
||||
"change_reviewreport",
|
||||
"delete_reviewreport",
|
||||
# Edit moderation permissions
|
||||
'change_parkedit', 'delete_parkedit',
|
||||
'change_rideedit', 'delete_rideedit',
|
||||
'change_companyedit', 'delete_companyedit',
|
||||
'change_manufactureredit', 'delete_manufactureredit',
|
||||
"change_parkedit",
|
||||
"delete_parkedit",
|
||||
"change_rideedit",
|
||||
"delete_rideedit",
|
||||
"change_companyedit",
|
||||
"delete_companyedit",
|
||||
"change_manufactureredit",
|
||||
"delete_manufactureredit",
|
||||
]
|
||||
|
||||
# Create Admin group
|
||||
admin_group, _ = Group.objects.get_or_create(name=User.Roles.ADMIN)
|
||||
admin_permissions = moderator_permissions + [
|
||||
# User management permissions
|
||||
'change_user', 'delete_user',
|
||||
"change_user",
|
||||
"delete_user",
|
||||
# Content management permissions
|
||||
'add_park', 'change_park', 'delete_park',
|
||||
'add_ride', 'change_ride', 'delete_ride',
|
||||
'add_company', 'change_company', 'delete_company',
|
||||
'add_manufacturer', 'change_manufacturer', 'delete_manufacturer',
|
||||
"add_park",
|
||||
"change_park",
|
||||
"delete_park",
|
||||
"add_ride",
|
||||
"change_ride",
|
||||
"delete_ride",
|
||||
"add_company",
|
||||
"change_company",
|
||||
"delete_company",
|
||||
"add_manufacturer",
|
||||
"change_manufacturer",
|
||||
"delete_manufacturer",
|
||||
]
|
||||
|
||||
# Assign permissions to groups
|
||||
|
||||
@@ -4,6 +4,7 @@ from django.template.loader import render_to_string
|
||||
|
||||
register = template.Library()
|
||||
|
||||
|
||||
@register.simple_tag
|
||||
def turnstile_widget():
|
||||
"""
|
||||
@@ -13,12 +14,10 @@ def turnstile_widget():
|
||||
Usage: {% load turnstile_tags %}{% turnstile_widget %}
|
||||
"""
|
||||
if settings.DEBUG:
|
||||
template_name = 'accounts/turnstile_widget_empty.html'
|
||||
template_name = "accounts/turnstile_widget_empty.html"
|
||||
context = {}
|
||||
else:
|
||||
template_name = 'accounts/turnstile_widget.html'
|
||||
context = {
|
||||
'site_key': settings.TURNSTILE_SITE_KEY
|
||||
}
|
||||
template_name = "accounts/turnstile_widget.html"
|
||||
context = {"site_key": settings.TURNSTILE_SITE_KEY}
|
||||
|
||||
return render_to_string(template_name, context)
|
||||
|
||||
@@ -1,3 +1,126 @@
|
||||
from django.test import TestCase
|
||||
from django.contrib.auth.models import Group, Permission
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from unittest.mock import patch, MagicMock
|
||||
from .models import User, UserProfile
|
||||
from .signals import create_default_groups
|
||||
|
||||
# Create your tests here.
|
||||
|
||||
class SignalsTestCase(TestCase):
|
||||
def setUp(self):
|
||||
self.user = User.objects.create_user(
|
||||
username="testuser",
|
||||
email="testuser@example.com",
|
||||
password="password",
|
||||
)
|
||||
|
||||
def test_create_user_profile(self):
|
||||
# Refresh user from database to ensure signals have been processed
|
||||
self.user.refresh_from_db()
|
||||
|
||||
# Check if profile exists in database first
|
||||
profile_exists = UserProfile.objects.filter(user=self.user).exists()
|
||||
self.assertTrue(profile_exists, "UserProfile should be created by signals")
|
||||
|
||||
# Now safely access the profile
|
||||
profile = UserProfile.objects.get(user=self.user)
|
||||
self.assertIsInstance(profile, UserProfile)
|
||||
|
||||
# Test the reverse relationship
|
||||
self.assertTrue(hasattr(self.user, "profile"))
|
||||
# Test that we can access the profile through the user relationship
|
||||
user_profile = getattr(self.user, "profile", None)
|
||||
self.assertEqual(user_profile, profile)
|
||||
|
||||
@patch("accounts.signals.requests.get")
|
||||
def test_create_user_profile_with_social_avatar(self, mock_get):
|
||||
# Mock the response from requests.get
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.content = b"fake-image-content"
|
||||
mock_get.return_value = mock_response
|
||||
|
||||
# Create a social account for the user (we'll skip this test since socialaccount_set requires allauth setup)
|
||||
# This test would need proper allauth configuration to work
|
||||
self.skipTest("Requires proper allauth socialaccount setup")
|
||||
|
||||
def test_save_user_profile(self):
|
||||
# Get the profile safely first
|
||||
profile = UserProfile.objects.get(user=self.user)
|
||||
profile.delete()
|
||||
|
||||
# Refresh user to clear cached profile relationship
|
||||
self.user.refresh_from_db()
|
||||
|
||||
# Check that profile no longer exists
|
||||
self.assertFalse(UserProfile.objects.filter(user=self.user).exists())
|
||||
|
||||
# Trigger save to recreate profile via signal
|
||||
self.user.save()
|
||||
|
||||
# Verify profile was recreated
|
||||
self.assertTrue(UserProfile.objects.filter(user=self.user).exists())
|
||||
new_profile = UserProfile.objects.get(user=self.user)
|
||||
self.assertIsInstance(new_profile, UserProfile)
|
||||
|
||||
def test_sync_user_role_with_groups(self):
|
||||
self.user.role = User.Roles.MODERATOR
|
||||
self.user.save()
|
||||
self.assertTrue(self.user.groups.filter(name=User.Roles.MODERATOR).exists())
|
||||
self.assertTrue(self.user.is_staff)
|
||||
|
||||
self.user.role = User.Roles.ADMIN
|
||||
self.user.save()
|
||||
self.assertFalse(self.user.groups.filter(name=User.Roles.MODERATOR).exists())
|
||||
self.assertTrue(self.user.groups.filter(name=User.Roles.ADMIN).exists())
|
||||
self.assertTrue(self.user.is_staff)
|
||||
|
||||
self.user.role = User.Roles.SUPERUSER
|
||||
self.user.save()
|
||||
self.assertFalse(self.user.groups.filter(name=User.Roles.ADMIN).exists())
|
||||
self.assertTrue(self.user.groups.filter(name=User.Roles.SUPERUSER).exists())
|
||||
self.assertTrue(self.user.is_superuser)
|
||||
self.assertTrue(self.user.is_staff)
|
||||
|
||||
self.user.role = User.Roles.USER
|
||||
self.user.save()
|
||||
self.assertFalse(self.user.groups.exists())
|
||||
self.assertFalse(self.user.is_superuser)
|
||||
self.assertFalse(self.user.is_staff)
|
||||
|
||||
def test_create_default_groups(self):
|
||||
# Create some permissions for testing
|
||||
content_type = ContentType.objects.get_for_model(User)
|
||||
Permission.objects.create(
|
||||
codename="change_review",
|
||||
name="Can change review",
|
||||
content_type=content_type,
|
||||
)
|
||||
Permission.objects.create(
|
||||
codename="delete_review",
|
||||
name="Can delete review",
|
||||
content_type=content_type,
|
||||
)
|
||||
Permission.objects.create(
|
||||
codename="change_user",
|
||||
name="Can change user",
|
||||
content_type=content_type,
|
||||
)
|
||||
|
||||
create_default_groups()
|
||||
|
||||
moderator_group = Group.objects.get(name=User.Roles.MODERATOR)
|
||||
self.assertIsNotNone(moderator_group)
|
||||
self.assertTrue(
|
||||
moderator_group.permissions.filter(codename="change_review").exists()
|
||||
)
|
||||
self.assertFalse(
|
||||
moderator_group.permissions.filter(codename="change_user").exists()
|
||||
)
|
||||
|
||||
admin_group = Group.objects.get(name=User.Roles.ADMIN)
|
||||
self.assertIsNotNone(admin_group)
|
||||
self.assertTrue(
|
||||
admin_group.permissions.filter(codename="change_review").exists()
|
||||
)
|
||||
self.assertTrue(admin_group.permissions.filter(codename="change_user").exists())
|
||||
|
||||
@@ -3,23 +3,46 @@ from django.contrib.auth import views as auth_views
|
||||
from allauth.account.views import LogoutView
|
||||
from . import views
|
||||
|
||||
app_name = 'accounts'
|
||||
app_name = "accounts"
|
||||
|
||||
urlpatterns = [
|
||||
# Override allauth's login and signup views with our Turnstile-enabled versions
|
||||
path('login/', views.CustomLoginView.as_view(), name='account_login'),
|
||||
path('signup/', views.CustomSignupView.as_view(), name='account_signup'),
|
||||
|
||||
# Override allauth's login and signup views with our Turnstile-enabled
|
||||
# versions
|
||||
path("login/", views.CustomLoginView.as_view(), name="account_login"),
|
||||
path("signup/", views.CustomSignupView.as_view(), name="account_signup"),
|
||||
# Authentication views
|
||||
path('logout/', LogoutView.as_view(), name='logout'),
|
||||
path('password_change/', auth_views.PasswordChangeView.as_view(), name='password_change'),
|
||||
path('password_change/done/', auth_views.PasswordChangeDoneView.as_view(), name='password_change_done'),
|
||||
path('password_reset/', auth_views.PasswordResetView.as_view(), name='password_reset'),
|
||||
path('password_reset/done/', auth_views.PasswordResetDoneView.as_view(), name='password_reset_done'),
|
||||
path('reset/<uidb64>/<token>/', auth_views.PasswordResetConfirmView.as_view(), name='password_reset_confirm'),
|
||||
path('reset/done/', auth_views.PasswordResetCompleteView.as_view(), name='password_reset_complete'),
|
||||
|
||||
path("logout/", LogoutView.as_view(), name="logout"),
|
||||
path(
|
||||
"password_change/",
|
||||
auth_views.PasswordChangeView.as_view(),
|
||||
name="password_change",
|
||||
),
|
||||
path(
|
||||
"password_change/done/",
|
||||
auth_views.PasswordChangeDoneView.as_view(),
|
||||
name="password_change_done",
|
||||
),
|
||||
path(
|
||||
"password_reset/",
|
||||
auth_views.PasswordResetView.as_view(),
|
||||
name="password_reset",
|
||||
),
|
||||
path(
|
||||
"password_reset/done/",
|
||||
auth_views.PasswordResetDoneView.as_view(),
|
||||
name="password_reset_done",
|
||||
),
|
||||
path(
|
||||
"reset/<uidb64>/<token>/",
|
||||
auth_views.PasswordResetConfirmView.as_view(),
|
||||
name="password_reset_confirm",
|
||||
),
|
||||
path(
|
||||
"reset/done/",
|
||||
auth_views.PasswordResetCompleteView.as_view(),
|
||||
name="password_reset_complete",
|
||||
),
|
||||
# Profile views
|
||||
path('profile/', views.user_redirect_view, name='profile_redirect'),
|
||||
path('settings/', views.SettingsView.as_view(), name='settings'),
|
||||
path("profile/", views.user_redirect_view, name="profile_redirect"),
|
||||
path("settings/", views.SettingsView.as_view(), name="settings"),
|
||||
]
|
||||
|
||||
@@ -5,39 +5,38 @@ from django.contrib.auth.decorators import login_required
|
||||
from django.contrib.auth.mixins import LoginRequiredMixin
|
||||
from django.contrib import messages
|
||||
from django.core.exceptions import ValidationError
|
||||
from allauth.socialaccount.providers.google.views import GoogleOAuth2Adapter
|
||||
from allauth.socialaccount.providers.discord.views import DiscordOAuth2Adapter
|
||||
from allauth.socialaccount.providers.oauth2.client import OAuth2Client
|
||||
from django.conf import settings
|
||||
from django.core.mail import send_mail
|
||||
from django.template.loader import render_to_string
|
||||
from django.utils.crypto import get_random_string
|
||||
from django.utils import timezone
|
||||
from datetime import timedelta
|
||||
from django.contrib.sites.shortcuts import get_current_site
|
||||
from django.db.models import Prefetch, QuerySet
|
||||
from django.contrib.sites.models import Site
|
||||
from django.contrib.sites.requests import RequestSite
|
||||
from django.db.models import QuerySet
|
||||
from django.http import HttpResponseRedirect, HttpResponse, HttpRequest
|
||||
from django.urls import reverse
|
||||
from django.contrib.auth import login
|
||||
from django.core.files.uploadedfile import UploadedFile
|
||||
from accounts.models import User, PasswordReset, TopList, EmailVerification, UserProfile
|
||||
from reviews.models import Review
|
||||
from accounts.models import (
|
||||
User,
|
||||
PasswordReset,
|
||||
TopList,
|
||||
EmailVerification,
|
||||
UserProfile,
|
||||
)
|
||||
from email_service.services import EmailService
|
||||
from parks.models import ParkReview
|
||||
from rides.models import RideReview
|
||||
from allauth.account.views import LoginView, SignupView
|
||||
from .mixins import TurnstileMixin
|
||||
from typing import Dict, Any, Optional, Union, cast, TYPE_CHECKING
|
||||
from django_htmx.http import HttpResponseClientRefresh
|
||||
from django.contrib.sites.models import Site
|
||||
from django.contrib.sites.requests import RequestSite
|
||||
from contextlib import suppress
|
||||
import re
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from django.contrib.sites.models import Site
|
||||
from django.contrib.sites.requests import RequestSite
|
||||
|
||||
UserModel = get_user_model()
|
||||
|
||||
|
||||
class CustomLoginView(TurnstileMixin, LoginView):
|
||||
def form_valid(self, form):
|
||||
try:
|
||||
@@ -47,26 +46,31 @@ class CustomLoginView(TurnstileMixin, LoginView):
|
||||
return self.form_invalid(form)
|
||||
|
||||
response = super().form_valid(form)
|
||||
return HttpResponseClientRefresh() if getattr(self.request, 'htmx', False) else response
|
||||
return (
|
||||
HttpResponseClientRefresh()
|
||||
if getattr(self.request, "htmx", False)
|
||||
else response
|
||||
)
|
||||
|
||||
def form_invalid(self, form):
|
||||
if getattr(self.request, 'htmx', False):
|
||||
if getattr(self.request, "htmx", False):
|
||||
return render(
|
||||
self.request,
|
||||
'account/partials/login_form.html',
|
||||
self.get_context_data(form=form)
|
||||
"account/partials/login_form.html",
|
||||
self.get_context_data(form=form),
|
||||
)
|
||||
return super().form_invalid(form)
|
||||
|
||||
def get(self, request: HttpRequest, *args: Any, **kwargs: Any) -> HttpResponse:
|
||||
if getattr(request, 'htmx', False):
|
||||
if getattr(request, "htmx", False):
|
||||
return render(
|
||||
request,
|
||||
'account/partials/login_modal.html',
|
||||
self.get_context_data()
|
||||
"account/partials/login_modal.html",
|
||||
self.get_context_data(),
|
||||
)
|
||||
return super().get(request, *args, **kwargs)
|
||||
|
||||
|
||||
class CustomSignupView(TurnstileMixin, SignupView):
|
||||
def form_valid(self, form):
|
||||
try:
|
||||
@@ -76,253 +80,283 @@ class CustomSignupView(TurnstileMixin, SignupView):
|
||||
return self.form_invalid(form)
|
||||
|
||||
response = super().form_valid(form)
|
||||
return HttpResponseClientRefresh() if getattr(self.request, 'htmx', False) else response
|
||||
return (
|
||||
HttpResponseClientRefresh()
|
||||
if getattr(self.request, "htmx", False)
|
||||
else response
|
||||
)
|
||||
|
||||
def form_invalid(self, form):
|
||||
if getattr(self.request, 'htmx', False):
|
||||
if getattr(self.request, "htmx", False):
|
||||
return render(
|
||||
self.request,
|
||||
'account/partials/signup_modal.html',
|
||||
self.get_context_data(form=form)
|
||||
"account/partials/signup_modal.html",
|
||||
self.get_context_data(form=form),
|
||||
)
|
||||
return super().form_invalid(form)
|
||||
|
||||
def get(self, request: HttpRequest, *args: Any, **kwargs: Any) -> HttpResponse:
|
||||
if getattr(request, 'htmx', False):
|
||||
if getattr(request, "htmx", False):
|
||||
return render(
|
||||
request,
|
||||
'account/partials/signup_modal.html',
|
||||
self.get_context_data()
|
||||
"account/partials/signup_modal.html",
|
||||
self.get_context_data(),
|
||||
)
|
||||
return super().get(request, *args, **kwargs)
|
||||
|
||||
|
||||
@login_required
|
||||
def user_redirect_view(request: HttpRequest) -> HttpResponse:
|
||||
user = cast(User, request.user)
|
||||
return redirect('profile', username=user.username)
|
||||
return redirect("profile", username=user.username)
|
||||
|
||||
|
||||
def handle_social_login(request: HttpRequest, email: str) -> HttpResponse:
|
||||
if sociallogin := request.session.get('socialaccount_sociallogin'):
|
||||
if sociallogin := request.session.get("socialaccount_sociallogin"):
|
||||
sociallogin.user.email = email
|
||||
sociallogin.save()
|
||||
login(request, sociallogin.user)
|
||||
del request.session['socialaccount_sociallogin']
|
||||
messages.success(request, 'Successfully logged in')
|
||||
return redirect('/')
|
||||
del request.session["socialaccount_sociallogin"]
|
||||
messages.success(request, "Successfully logged in")
|
||||
return redirect("/")
|
||||
|
||||
|
||||
def email_required(request: HttpRequest) -> HttpResponse:
|
||||
if not request.session.get('socialaccount_sociallogin'):
|
||||
messages.error(request, 'No social login in progress')
|
||||
return redirect('/')
|
||||
if not request.session.get("socialaccount_sociallogin"):
|
||||
messages.error(request, "No social login in progress")
|
||||
return redirect("/")
|
||||
|
||||
if request.method == 'POST':
|
||||
if email := request.POST.get('email'):
|
||||
if request.method == "POST":
|
||||
if email := request.POST.get("email"):
|
||||
return handle_social_login(request, email)
|
||||
messages.error(request, 'Email is required')
|
||||
return render(request, 'accounts/email_required.html', {'error': 'Email is required'})
|
||||
messages.error(request, "Email is required")
|
||||
return render(
|
||||
request,
|
||||
"accounts/email_required.html",
|
||||
{"error": "Email is required"},
|
||||
)
|
||||
|
||||
return render(request, "accounts/email_required.html")
|
||||
|
||||
return render(request, 'accounts/email_required.html')
|
||||
|
||||
class ProfileView(DetailView):
|
||||
model = User
|
||||
template_name = 'accounts/profile.html'
|
||||
context_object_name = 'profile_user'
|
||||
slug_field = 'username'
|
||||
slug_url_kwarg = 'username'
|
||||
template_name = "accounts/profile.html"
|
||||
context_object_name = "profile_user"
|
||||
slug_field = "username"
|
||||
slug_url_kwarg = "username"
|
||||
|
||||
def get_queryset(self) -> QuerySet[User]:
|
||||
return User.objects.select_related('profile')
|
||||
return User.objects.select_related("profile")
|
||||
|
||||
def get_context_data(self, **kwargs: Any) -> Dict[str, Any]:
|
||||
context = super().get_context_data(**kwargs)
|
||||
user = cast(User, self.get_object())
|
||||
|
||||
context['recent_reviews'] = self._get_user_reviews(user)
|
||||
context['top_lists'] = self._get_user_top_lists(user)
|
||||
context["park_reviews"] = self._get_user_park_reviews(user)
|
||||
context["ride_reviews"] = self._get_user_ride_reviews(user)
|
||||
context["top_lists"] = self._get_user_top_lists(user)
|
||||
|
||||
return context
|
||||
|
||||
def _get_user_reviews(self, user: User) -> QuerySet[Review]:
|
||||
return Review.objects.filter(
|
||||
user=user,
|
||||
is_published=True
|
||||
).select_related(
|
||||
'user',
|
||||
'user__profile',
|
||||
'content_type'
|
||||
).prefetch_related(
|
||||
'content_object'
|
||||
).order_by('-created_at')[:5]
|
||||
def _get_user_park_reviews(self, user: User) -> QuerySet[ParkReview]:
|
||||
return (
|
||||
ParkReview.objects.filter(user=user, is_published=True)
|
||||
.select_related("user", "user__profile", "park")
|
||||
.order_by("-created_at")[:5]
|
||||
)
|
||||
|
||||
def _get_user_ride_reviews(self, user: User) -> QuerySet[RideReview]:
|
||||
return (
|
||||
RideReview.objects.filter(user=user, is_published=True)
|
||||
.select_related("user", "user__profile", "ride")
|
||||
.order_by("-created_at")[:5]
|
||||
)
|
||||
|
||||
def _get_user_top_lists(self, user: User) -> QuerySet[TopList]:
|
||||
return TopList.objects.filter(
|
||||
user=user
|
||||
).select_related(
|
||||
'user',
|
||||
'user__profile'
|
||||
).prefetch_related(
|
||||
'items'
|
||||
).order_by('-created_at')[:5]
|
||||
return (
|
||||
TopList.objects.filter(user=user)
|
||||
.select_related("user", "user__profile")
|
||||
.prefetch_related("items")
|
||||
.order_by("-created_at")[:5]
|
||||
)
|
||||
|
||||
|
||||
class SettingsView(LoginRequiredMixin, TemplateView):
|
||||
template_name = 'accounts/settings.html'
|
||||
template_name = "accounts/settings.html"
|
||||
|
||||
def get_context_data(self, **kwargs: Any) -> Dict[str, Any]:
|
||||
context = super().get_context_data(**kwargs)
|
||||
context['user'] = self.request.user
|
||||
context["user"] = self.request.user
|
||||
return context
|
||||
|
||||
def _handle_profile_update(self, request: HttpRequest) -> None:
|
||||
user = cast(User, request.user)
|
||||
profile = get_object_or_404(UserProfile, user=user)
|
||||
|
||||
if display_name := request.POST.get('display_name'):
|
||||
if display_name := request.POST.get("display_name"):
|
||||
profile.display_name = display_name
|
||||
|
||||
if 'avatar' in request.FILES:
|
||||
avatar_file = cast(UploadedFile, request.FILES['avatar'])
|
||||
if "avatar" in request.FILES:
|
||||
avatar_file = cast(UploadedFile, request.FILES["avatar"])
|
||||
profile.avatar.save(avatar_file.name, avatar_file, save=False)
|
||||
profile.save()
|
||||
|
||||
user.save()
|
||||
messages.success(request, 'Profile updated successfully')
|
||||
messages.success(request, "Profile updated successfully")
|
||||
|
||||
def _validate_password(self, password: str) -> bool:
|
||||
"""Validate password meets requirements."""
|
||||
return (
|
||||
len(password) >= 8 and
|
||||
bool(re.search(r'[A-Z]', password)) and
|
||||
bool(re.search(r'[a-z]', password)) and
|
||||
bool(re.search(r'[0-9]', password))
|
||||
len(password) >= 8
|
||||
and bool(re.search(r"[A-Z]", password))
|
||||
and bool(re.search(r"[a-z]", password))
|
||||
and bool(re.search(r"[0-9]", password))
|
||||
)
|
||||
|
||||
def _send_password_change_confirmation(self, request: HttpRequest, user: User) -> None:
|
||||
def _send_password_change_confirmation(
|
||||
self, request: HttpRequest, user: User
|
||||
) -> None:
|
||||
"""Send password change confirmation email."""
|
||||
site = get_current_site(request)
|
||||
context = {
|
||||
'user': user,
|
||||
'site_name': site.name,
|
||||
"user": user,
|
||||
"site_name": site.name,
|
||||
}
|
||||
|
||||
email_html = render_to_string('accounts/email/password_change_confirmation.html', context)
|
||||
email_html = render_to_string(
|
||||
"accounts/email/password_change_confirmation.html", context
|
||||
)
|
||||
|
||||
EmailService.send_email(
|
||||
to=user.email,
|
||||
subject='Password Changed Successfully',
|
||||
text='Your password has been changed successfully.',
|
||||
subject="Password Changed Successfully",
|
||||
text="Your password has been changed successfully.",
|
||||
site=site,
|
||||
html=email_html
|
||||
html=email_html,
|
||||
)
|
||||
|
||||
def _handle_password_change(self, request: HttpRequest) -> Optional[HttpResponseRedirect]:
|
||||
def _handle_password_change(
|
||||
self, request: HttpRequest
|
||||
) -> Optional[HttpResponseRedirect]:
|
||||
user = cast(User, request.user)
|
||||
old_password = request.POST.get('old_password', '')
|
||||
new_password = request.POST.get('new_password', '')
|
||||
confirm_password = request.POST.get('confirm_password', '')
|
||||
old_password = request.POST.get("old_password", "")
|
||||
new_password = request.POST.get("new_password", "")
|
||||
confirm_password = request.POST.get("confirm_password", "")
|
||||
|
||||
if not user.check_password(old_password):
|
||||
messages.error(request, 'Current password is incorrect')
|
||||
messages.error(request, "Current password is incorrect")
|
||||
return None
|
||||
|
||||
if new_password != confirm_password:
|
||||
messages.error(request, 'New passwords do not match')
|
||||
messages.error(request, "New passwords do not match")
|
||||
return None
|
||||
|
||||
if not self._validate_password(new_password):
|
||||
messages.error(request, 'Password must be at least 8 characters and contain uppercase, lowercase, and numbers')
|
||||
messages.error(
|
||||
request,
|
||||
"Password must be at least 8 characters and contain uppercase, lowercase, and numbers",
|
||||
)
|
||||
return None
|
||||
|
||||
user.set_password(new_password)
|
||||
user.save()
|
||||
|
||||
self._send_password_change_confirmation(request, user)
|
||||
messages.success(request, 'Password changed successfully. Please check your email for confirmation.')
|
||||
return HttpResponseRedirect(reverse('account_login'))
|
||||
messages.success(
|
||||
request,
|
||||
"Password changed successfully. Please check your email for confirmation.",
|
||||
)
|
||||
return HttpResponseRedirect(reverse("account_login"))
|
||||
|
||||
def _handle_email_change(self, request: HttpRequest) -> None:
|
||||
if new_email := request.POST.get('new_email'):
|
||||
if new_email := request.POST.get("new_email"):
|
||||
self._send_email_verification(request, new_email)
|
||||
messages.success(request, 'Verification email sent to your new email address')
|
||||
messages.success(
|
||||
request, "Verification email sent to your new email address"
|
||||
)
|
||||
else:
|
||||
messages.error(request, 'New email is required')
|
||||
messages.error(request, "New email is required")
|
||||
|
||||
def _send_email_verification(self, request: HttpRequest, new_email: str) -> None:
|
||||
user = cast(User, request.user)
|
||||
token = get_random_string(64)
|
||||
EmailVerification.objects.update_or_create(
|
||||
user=user,
|
||||
defaults={'token': token}
|
||||
)
|
||||
EmailVerification.objects.update_or_create(user=user, defaults={"token": token})
|
||||
|
||||
site = cast(Site, get_current_site(request))
|
||||
verification_url = reverse('verify_email', kwargs={'token': token})
|
||||
verification_url = reverse("verify_email", kwargs={"token": token})
|
||||
|
||||
context = {
|
||||
'user': user,
|
||||
'verification_url': verification_url,
|
||||
'site_name': site.name,
|
||||
"user": user,
|
||||
"verification_url": verification_url,
|
||||
"site_name": site.name,
|
||||
}
|
||||
|
||||
email_html = render_to_string('accounts/email/verify_email.html', context)
|
||||
email_html = render_to_string("accounts/email/verify_email.html", context)
|
||||
EmailService.send_email(
|
||||
to=new_email,
|
||||
subject='Verify your new email address',
|
||||
text='Click the link to verify your new email address',
|
||||
subject="Verify your new email address",
|
||||
text="Click the link to verify your new email address",
|
||||
site=site,
|
||||
html=email_html
|
||||
html=email_html,
|
||||
)
|
||||
|
||||
user.pending_email = new_email
|
||||
user.save()
|
||||
|
||||
def post(self, request: HttpRequest, *args: Any, **kwargs: Any) -> HttpResponse:
|
||||
action = request.POST.get('action')
|
||||
action = request.POST.get("action")
|
||||
|
||||
if action == 'update_profile':
|
||||
if action == "update_profile":
|
||||
self._handle_profile_update(request)
|
||||
elif action == 'change_password':
|
||||
elif action == "change_password":
|
||||
if response := self._handle_password_change(request):
|
||||
return response
|
||||
elif action == 'change_email':
|
||||
elif action == "change_email":
|
||||
self._handle_email_change(request)
|
||||
|
||||
return self.get(request, *args, **kwargs)
|
||||
|
||||
|
||||
def create_password_reset_token(user: User) -> str:
|
||||
token = get_random_string(64)
|
||||
PasswordReset.objects.update_or_create(
|
||||
user=user,
|
||||
defaults={
|
||||
'token': token,
|
||||
'expires_at': timezone.now() + timedelta(hours=24)
|
||||
}
|
||||
"token": token,
|
||||
"expires_at": timezone.now() + timedelta(hours=24),
|
||||
},
|
||||
)
|
||||
return token
|
||||
|
||||
def send_password_reset_email(user: User, site: Union[Site, RequestSite], token: str) -> None:
|
||||
reset_url = reverse('password_reset_confirm', kwargs={'token': token})
|
||||
|
||||
def send_password_reset_email(
|
||||
user: User, site: Union[Site, RequestSite], token: str
|
||||
) -> None:
|
||||
reset_url = reverse("password_reset_confirm", kwargs={"token": token})
|
||||
context = {
|
||||
'user': user,
|
||||
'reset_url': reset_url,
|
||||
'site_name': site.name,
|
||||
"user": user,
|
||||
"reset_url": reset_url,
|
||||
"site_name": site.name,
|
||||
}
|
||||
email_html = render_to_string('accounts/email/password_reset.html', context)
|
||||
email_html = render_to_string("accounts/email/password_reset.html", context)
|
||||
|
||||
EmailService.send_email(
|
||||
to=user.email,
|
||||
subject='Reset your password',
|
||||
text='Click the link to reset your password',
|
||||
subject="Reset your password",
|
||||
text="Click the link to reset your password",
|
||||
site=site,
|
||||
html=email_html
|
||||
html=email_html,
|
||||
)
|
||||
|
||||
def request_password_reset(request: HttpRequest) -> HttpResponse:
|
||||
if request.method != 'POST':
|
||||
return render(request, 'accounts/password_reset.html')
|
||||
|
||||
if not (email := request.POST.get('email')):
|
||||
messages.error(request, 'Email is required')
|
||||
return redirect('account_reset_password')
|
||||
def request_password_reset(request: HttpRequest) -> HttpResponse:
|
||||
if request.method != "POST":
|
||||
return render(request, "accounts/password_reset.html")
|
||||
|
||||
if not (email := request.POST.get("email")):
|
||||
messages.error(request, "Email is required")
|
||||
return redirect("account_reset_password")
|
||||
|
||||
with suppress(User.DoesNotExist):
|
||||
user = User.objects.get(email=email)
|
||||
@@ -330,10 +364,17 @@ def request_password_reset(request: HttpRequest) -> HttpResponse:
|
||||
site = get_current_site(request)
|
||||
send_password_reset_email(user, site, token)
|
||||
|
||||
messages.success(request, 'Password reset email sent')
|
||||
return redirect('account_login')
|
||||
messages.success(request, "Password reset email sent")
|
||||
return redirect("account_login")
|
||||
|
||||
def handle_password_reset(request: HttpRequest, user: User, new_password: str, reset: PasswordReset, site: Union[Site, RequestSite]) -> None:
|
||||
|
||||
def handle_password_reset(
|
||||
request: HttpRequest,
|
||||
user: User,
|
||||
new_password: str,
|
||||
reset: PasswordReset,
|
||||
site: Union[Site, RequestSite],
|
||||
) -> None:
|
||||
user.set_password(new_password)
|
||||
user.save()
|
||||
|
||||
@@ -341,41 +382,45 @@ def handle_password_reset(request: HttpRequest, user: User, new_password: str, r
|
||||
reset.save()
|
||||
|
||||
send_password_reset_confirmation(user, site)
|
||||
messages.success(request, 'Password reset successfully')
|
||||
messages.success(request, "Password reset successfully")
|
||||
|
||||
def send_password_reset_confirmation(user: User, site: Union[Site, RequestSite]) -> None:
|
||||
|
||||
def send_password_reset_confirmation(
|
||||
user: User, site: Union[Site, RequestSite]
|
||||
) -> None:
|
||||
context = {
|
||||
'user': user,
|
||||
'site_name': site.name,
|
||||
"user": user,
|
||||
"site_name": site.name,
|
||||
}
|
||||
email_html = render_to_string('accounts/email/password_reset_complete.html', context)
|
||||
email_html = render_to_string(
|
||||
"accounts/email/password_reset_complete.html", context
|
||||
)
|
||||
|
||||
EmailService.send_email(
|
||||
to=user.email,
|
||||
subject='Password Reset Complete',
|
||||
text='Your password has been reset successfully.',
|
||||
subject="Password Reset Complete",
|
||||
text="Your password has been reset successfully.",
|
||||
site=site,
|
||||
html=email_html
|
||||
html=email_html,
|
||||
)
|
||||
|
||||
|
||||
def reset_password(request: HttpRequest, token: str) -> HttpResponse:
|
||||
try:
|
||||
reset = PasswordReset.objects.select_related('user').get(
|
||||
token=token,
|
||||
expires_at__gt=timezone.now(),
|
||||
used=False
|
||||
reset = PasswordReset.objects.select_related("user").get(
|
||||
token=token, expires_at__gt=timezone.now(), used=False
|
||||
)
|
||||
|
||||
if request.method == 'POST':
|
||||
if new_password := request.POST.get('new_password'):
|
||||
if request.method == "POST":
|
||||
if new_password := request.POST.get("new_password"):
|
||||
site = get_current_site(request)
|
||||
handle_password_reset(request, reset.user, new_password, reset, site)
|
||||
return redirect('account_login')
|
||||
return redirect("account_login")
|
||||
|
||||
messages.error(request, 'New password is required')
|
||||
messages.error(request, "New password is required")
|
||||
|
||||
return render(request, 'accounts/password_reset_confirm.html', {'token': token})
|
||||
return render(request, "accounts/password_reset_confirm.html", {"token": token})
|
||||
|
||||
except PasswordReset.DoesNotExist:
|
||||
messages.error(request, 'Invalid or expired reset token')
|
||||
return redirect('account_reset_password')
|
||||
messages.error(request, "Invalid or expired reset token")
|
||||
return redirect("account_reset_password")
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
default_app_config = 'analytics.apps.AnalyticsConfig'
|
||||
@@ -1,3 +0,0 @@
|
||||
from django.contrib import admin
|
||||
|
||||
# Register your models here.
|
||||
@@ -1,5 +0,0 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
class AnalyticsConfig(AppConfig):
|
||||
default_auto_field = 'django.db.models.BigAutoField'
|
||||
name = 'analytics'
|
||||
@@ -1,39 +0,0 @@
|
||||
from django.utils.deprecation import MiddlewareMixin
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.views.generic.detail import DetailView
|
||||
from .models import PageView
|
||||
|
||||
class PageViewMiddleware(MiddlewareMixin):
|
||||
def process_view(self, request, view_func, view_args, view_kwargs):
|
||||
# Only track GET requests
|
||||
if request.method != 'GET':
|
||||
return None
|
||||
|
||||
# Get view class if it exists
|
||||
view_class = getattr(view_func, 'view_class', None)
|
||||
if not view_class or not issubclass(view_class, DetailView):
|
||||
return None
|
||||
|
||||
# Get the object if it's a detail view
|
||||
try:
|
||||
view_instance = view_class()
|
||||
view_instance.request = request
|
||||
view_instance.args = view_args
|
||||
view_instance.kwargs = view_kwargs
|
||||
obj = view_instance.get_object()
|
||||
except (AttributeError, Exception):
|
||||
return None
|
||||
|
||||
# Record the page view
|
||||
try:
|
||||
PageView.objects.create(
|
||||
content_type=ContentType.objects.get_for_model(obj.__class__),
|
||||
object_id=obj.pk,
|
||||
ip_address=request.META.get('REMOTE_ADDR', ''),
|
||||
user_agent=request.META.get('HTTP_USER_AGENT', '')[:512]
|
||||
)
|
||||
except Exception:
|
||||
# Fail silently to not interrupt the request
|
||||
pass
|
||||
|
||||
return None
|
||||
@@ -1,53 +0,0 @@
|
||||
# Generated by Django 5.1.4 on 2025-02-10 01:10
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
("contenttypes", "0002_remove_content_type_name"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="PageView",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("object_id", models.PositiveIntegerField()),
|
||||
("timestamp", models.DateTimeField(auto_now_add=True, db_index=True)),
|
||||
("ip_address", models.GenericIPAddressField()),
|
||||
("user_agent", models.CharField(blank=True, max_length=512)),
|
||||
(
|
||||
"content_type",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="page_views",
|
||||
to="contenttypes.contenttype",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["timestamp"], name="analytics_p_timesta_835321_idx"
|
||||
),
|
||||
models.Index(
|
||||
fields=["content_type", "object_id"],
|
||||
name="analytics_p_content_73920a_idx",
|
||||
),
|
||||
],
|
||||
},
|
||||
),
|
||||
]
|
||||
@@ -1,3 +0,0 @@
|
||||
from django.test import TestCase
|
||||
|
||||
# Create your tests here.
|
||||
@@ -1,3 +0,0 @@
|
||||
from django.shortcuts import render
|
||||
|
||||
# Create your views here.
|
||||
1
backups/config/.github-pat.20250818_210101.backup
Normal file
1
backups/config/.github-pat.20250818_210101.backup
Normal file
@@ -0,0 +1 @@
|
||||
[GITHUB-TOKEN-REMOVED]
|
||||
203
backups/config/thrillwiki-automation.env.20250818_210101.backup
Normal file
203
backups/config/thrillwiki-automation.env.20250818_210101.backup
Normal file
@@ -0,0 +1,203 @@
|
||||
# ThrillWiki Automation Service Environment Configuration
|
||||
# Copy this file to thrillwiki-automation***REMOVED*** and customize for your environment
|
||||
#
|
||||
# Security Note: This file should have restricted permissions (600) as it may contain
|
||||
# sensitive information like GitHub Personal Access Tokens
|
||||
|
||||
# [AWS-SECRET-REMOVED]====================================
|
||||
# PROJECT CONFIGURATION
|
||||
# [AWS-SECRET-REMOVED]====================================
|
||||
|
||||
# Base project directory (usually auto-detected)
|
||||
# PROJECT_DIR=/home/ubuntu/thrillwiki
|
||||
|
||||
# Service name for systemd integration
|
||||
# SERVICE_NAME=thrillwiki
|
||||
|
||||
# [AWS-SECRET-REMOVED]====================================
|
||||
# GITHUB REPOSITORY CONFIGURATION
|
||||
# [AWS-SECRET-REMOVED]====================================
|
||||
|
||||
# GitHub repository remote name
|
||||
# GITHUB_REPO=origin
|
||||
|
||||
# Branch to pull from
|
||||
# GITHUB_BRANCH=main
|
||||
|
||||
# GitHub Personal Access Token (PAT) - Required for private repositories
|
||||
# Generate at: https://github.com/settings/tokens
|
||||
# Required permissions: repo (Full control of private repositories)
|
||||
# GITHUB_TOKEN=ghp_your_personal_access_token_here
|
||||
|
||||
# GitHub token file location (alternative to GITHUB_TOKEN)
|
||||
# GITHUB_TOKEN_FILE=/home/ubuntu/thrillwiki/.github-pat
|
||||
|
||||
# [AWS-SECRET-REMOVED]====================================
|
||||
# AUTOMATION TIMING CONFIGURATION
|
||||
# [AWS-SECRET-REMOVED]====================================
|
||||
|
||||
# Repository pull interval in seconds (default: 300 = 5 minutes)
|
||||
# PULL_INTERVAL=300
|
||||
|
||||
# Health check interval in seconds (default: 60 = 1 minute)
|
||||
# HEALTH_CHECK_INTERVAL=60
|
||||
|
||||
# Server startup timeout in seconds (default: 120 = 2 minutes)
|
||||
# STARTUP_TIMEOUT=120
|
||||
|
||||
# Restart delay after failure in seconds (default: 10)
|
||||
# RESTART_DELAY=10
|
||||
|
||||
# [AWS-SECRET-REMOVED]====================================
|
||||
# LOGGING CONFIGURATION
|
||||
# [AWS-SECRET-REMOVED]====================================
|
||||
|
||||
# Log directory (default: project_dir/logs)
|
||||
# LOG_DIR=/home/ubuntu/thrillwiki/logs
|
||||
|
||||
# Log file path
|
||||
# LOG_[AWS-SECRET-REMOVED]proof-automation.log
|
||||
|
||||
# Maximum log file size in bytes (default: 10485760 = 10MB)
|
||||
# MAX_LOG_SIZE=10485760
|
||||
|
||||
# Lock file location to prevent multiple instances
|
||||
# LOCK_FILE=/tmp/thrillwiki-bulletproof.lock
|
||||
|
||||
# [AWS-SECRET-REMOVED]====================================
|
||||
# DEVELOPMENT SERVER CONFIGURATION
|
||||
# [AWS-SECRET-REMOVED]====================================
|
||||
|
||||
# Server host address (default: 0.0.0.0 for all interfaces)
|
||||
# SERVER_HOST=0.0.0.0
|
||||
|
||||
# Server port (default: 8000)
|
||||
# SERVER_PORT=8000
|
||||
|
||||
# [AWS-SECRET-REMOVED]====================================
|
||||
# DJANGO CONFIGURATION
|
||||
# [AWS-SECRET-REMOVED]====================================
|
||||
|
||||
# Django settings module
|
||||
# DJANGO_SETTINGS_MODULE=thrillwiki.settings
|
||||
|
||||
# Python path
|
||||
# PYTHONPATH=/home/ubuntu/thrillwiki
|
||||
|
||||
# [AWS-SECRET-REMOVED]====================================
|
||||
# ADVANCED CONFIGURATION
|
||||
# [AWS-SECRET-REMOVED]====================================
|
||||
|
||||
# GitHub authentication script location
|
||||
# GITHUB_AUTH_[AWS-SECRET-REMOVED]ithub-auth.py
|
||||
|
||||
# Enable verbose logging (true/false)
|
||||
# VERBOSE_LOGGING=false
|
||||
|
||||
# Enable debug mode for troubleshooting (true/false)
|
||||
# DEBUG_MODE=false
|
||||
|
||||
# Custom git remote URL (overrides GITHUB_REPO if set)
|
||||
# CUSTOM_GIT_REMOTE=https://github.com/username/repository.git
|
||||
|
||||
# Email notifications for critical failures (requires email configuration)
|
||||
# NOTIFICATION_EMAIL=admin@example.com
|
||||
|
||||
# Maximum consecutive failures before alerting (default: 5)
|
||||
# MAX_CONSECUTIVE_FAILURES=5
|
||||
|
||||
# Enable automatic dependency updates (true/false, default: true)
|
||||
# AUTO_UPDATE_DEPENDENCIES=true
|
||||
|
||||
# Enable automatic migrations on code changes (true/false, default: true)
|
||||
# AUTO_MIGRATE=true
|
||||
|
||||
# Enable automatic static file collection (true/false, default: true)
|
||||
# AUTO_COLLECTSTATIC=true
|
||||
|
||||
# [AWS-SECRET-REMOVED]====================================
|
||||
# SECURITY CONFIGURATION
|
||||
# [AWS-SECRET-REMOVED]====================================
|
||||
|
||||
# GitHub authentication method (token|ssh|https)
|
||||
# Default: token (uses GITHUB_TOKEN or GITHUB_TOKEN_FILE)
|
||||
# GITHUB_AUTH_METHOD=token
|
||||
|
||||
# SSH key path for git operations (when using ssh auth method)
|
||||
# SSH_KEY_PATH=/home/ubuntu/.ssh/***REMOVED***
|
||||
|
||||
# Git user configuration for commits
|
||||
# GIT_USER_NAME="ThrillWiki Automation"
|
||||
# GIT_USER_EMAIL="automation@thrillwiki.local"
|
||||
|
||||
# [AWS-SECRET-REMOVED]====================================
|
||||
# MONITORING AND HEALTH CHECKS
|
||||
# [AWS-SECRET-REMOVED]====================================
|
||||
|
||||
# Health check URL to verify server is running
|
||||
# HEALTH_CHECK_URL=http://localhost:8000/health/
|
||||
|
||||
# Health check timeout in seconds
|
||||
# HEALTH_CHECK_TIMEOUT=30
|
||||
|
||||
# Enable system resource monitoring (true/false)
|
||||
# MONITOR_RESOURCES=true
|
||||
|
||||
# Memory usage threshold for warnings (in MB)
|
||||
# MEMORY_WARNING_THRESHOLD=1024
|
||||
|
||||
# CPU usage threshold for warnings (percentage)
|
||||
# CPU_WARNING_THRESHOLD=80
|
||||
|
||||
# Disk usage threshold for warnings (percentage)
|
||||
# DISK_WARNING_THRESHOLD=90
|
||||
|
||||
# [AWS-SECRET-REMOVED]====================================
|
||||
# INTEGRATION SETTINGS
|
||||
# [AWS-SECRET-REMOVED]====================================
|
||||
|
||||
# Webhook integration (if using thrillwiki-webhook service)
|
||||
# WEBHOOK_INTEGRATION=true
|
||||
|
||||
# Slack webhook URL for notifications (optional)
|
||||
# SLACK_WEBHOOK_URL=https://hooks.slack.com/services/your/webhook/url
|
||||
|
||||
# Discord webhook URL for notifications (optional)
|
||||
# DISCORD_WEBHOOK_URL=https://discord.com/api/webhooks/your/webhook/url
|
||||
|
||||
# [AWS-SECRET-REMOVED]====================================
|
||||
# USAGE EXAMPLES
|
||||
# [AWS-SECRET-REMOVED]====================================
|
||||
|
||||
# Example 1: Basic setup with GitHub PAT
|
||||
# GITHUB_TOKEN=ghp_your_token_here
|
||||
# PULL_INTERVAL=300
|
||||
# AUTO_MIGRATE=true
|
||||
|
||||
# Example 2: Enhanced monitoring setup
|
||||
# HEALTH_CHECK_INTERVAL=30
|
||||
# MONITOR_RESOURCES=true
|
||||
# NOTIFICATION_EMAIL=admin@thrillwiki.com
|
||||
# SLACK_WEBHOOK_URL=https://hooks.slack.com/services/your/webhook
|
||||
|
||||
# Example 3: Development environment with frequent pulls
|
||||
# PULL_INTERVAL=60
|
||||
# DEBUG_MODE=true
|
||||
# VERBOSE_LOGGING=true
|
||||
# AUTO_UPDATE_DEPENDENCIES=true
|
||||
|
||||
# [AWS-SECRET-REMOVED]====================================
|
||||
# INSTALLATION NOTES
|
||||
# [AWS-SECRET-REMOVED]====================================
|
||||
|
||||
# 1. Copy this file: cp thrillwiki-automation***REMOVED***.example thrillwiki-automation***REMOVED***
|
||||
# 2. Set secure permissions: chmod 600 thrillwiki-automation***REMOVED***
|
||||
# 3. Customize the settings above for your environment
|
||||
# 4. Enable the service: sudo systemctl enable thrillwiki-automation
|
||||
# 5. Start the service: sudo systemctl start thrillwiki-automation
|
||||
# 6. Check status: sudo systemctl status thrillwiki-automation
|
||||
# 7. View logs: sudo journalctl -u thrillwiki-automation -f
|
||||
|
||||
# For security, ensure only the ubuntu user can read this file:
|
||||
# sudo chown ubuntu:ubuntu thrillwiki-automation***REMOVED***
|
||||
# sudo chmod 600 thrillwiki-automation***REMOVED***
|
||||
@@ -1,16 +0,0 @@
|
||||
from django.contrib import admin
|
||||
from .models import Company, Manufacturer
|
||||
|
||||
@admin.register(Company)
|
||||
class CompanyAdmin(admin.ModelAdmin):
|
||||
list_display = ('id', 'name', 'headquarters', 'website', 'created_at')
|
||||
search_fields = ('name', 'headquarters', 'description')
|
||||
prepopulated_fields = {'slug': ('name',)}
|
||||
readonly_fields = ('created_at', 'updated_at')
|
||||
|
||||
@admin.register(Manufacturer)
|
||||
class ManufacturerAdmin(admin.ModelAdmin):
|
||||
list_display = ('id', 'name', 'headquarters', 'website', 'created_at')
|
||||
search_fields = ('name', 'headquarters', 'description')
|
||||
prepopulated_fields = {'slug': ('name',)}
|
||||
readonly_fields = ('created_at', 'updated_at')
|
||||
@@ -1,9 +0,0 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
class CompaniesConfig(AppConfig):
|
||||
default_auto_field = 'django.db.models.BigAutoField'
|
||||
name = 'companies'
|
||||
verbose_name = 'Companies'
|
||||
|
||||
def ready(self):
|
||||
import companies.signals # noqa
|
||||
@@ -1,46 +0,0 @@
|
||||
from django import forms
|
||||
from .models import Company, Manufacturer
|
||||
|
||||
class CompanyForm(forms.ModelForm):
|
||||
class Meta:
|
||||
model = Company
|
||||
fields = ['name', 'headquarters', 'website', 'description']
|
||||
widgets = {
|
||||
'name': forms.TextInput(attrs={
|
||||
'class': 'w-full border-gray-300 rounded-lg form-input dark:border-gray-600 dark:bg-gray-700 dark:text-white'
|
||||
}),
|
||||
'headquarters': forms.TextInput(attrs={
|
||||
'class': 'w-full border-gray-300 rounded-lg form-input dark:border-gray-600 dark:bg-gray-700 dark:text-white',
|
||||
'placeholder': 'e.g., Orlando, Florida, United States'
|
||||
}),
|
||||
'website': forms.URLInput(attrs={
|
||||
'class': 'w-full border-gray-300 rounded-lg form-input dark:border-gray-600 dark:bg-gray-700 dark:text-white',
|
||||
'placeholder': 'https://example.com'
|
||||
}),
|
||||
'description': forms.Textarea(attrs={
|
||||
'rows': 4,
|
||||
'class': 'w-full border-gray-300 rounded-lg form-textarea dark:border-gray-600 dark:bg-gray-700 dark:text-white'
|
||||
}),
|
||||
}
|
||||
|
||||
class ManufacturerForm(forms.ModelForm):
|
||||
class Meta:
|
||||
model = Manufacturer
|
||||
fields = ['name', 'headquarters', 'website', 'description']
|
||||
widgets = {
|
||||
'name': forms.TextInput(attrs={
|
||||
'class': 'w-full border-gray-300 rounded-lg form-input dark:border-gray-600 dark:bg-gray-700 dark:text-white'
|
||||
}),
|
||||
'headquarters': forms.TextInput(attrs={
|
||||
'class': 'w-full border-gray-300 rounded-lg form-input dark:border-gray-600 dark:bg-gray-700 dark:text-white',
|
||||
'placeholder': 'e.g., Altoona, Pennsylvania, United States'
|
||||
}),
|
||||
'website': forms.URLInput(attrs={
|
||||
'class': 'w-full border-gray-300 rounded-lg form-input dark:border-gray-600 dark:bg-gray-700 dark:text-white',
|
||||
'placeholder': 'https://example.com'
|
||||
}),
|
||||
'description': forms.Textarea(attrs={
|
||||
'rows': 4,
|
||||
'class': 'w-full border-gray-300 rounded-lg form-textarea dark:border-gray-600 dark:bg-gray-700 dark:text-white'
|
||||
}),
|
||||
}
|
||||
@@ -1,197 +0,0 @@
|
||||
# Generated by Django 5.1.4 on 2025-02-10 01:10
|
||||
|
||||
import django.db.models.deletion
|
||||
import pgtrigger.compiler
|
||||
import pgtrigger.migrations
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
("pghistory", "0006_delete_aggregateevent"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Company",
|
||||
fields=[
|
||||
("id", models.BigAutoField(primary_key=True, serialize=False)),
|
||||
("name", models.CharField(max_length=255)),
|
||||
("slug", models.SlugField(max_length=255, unique=True)),
|
||||
("website", models.URLField(blank=True)),
|
||||
("headquarters", models.CharField(blank=True, max_length=255)),
|
||||
("description", models.TextField(blank=True)),
|
||||
("total_parks", models.IntegerField(default=0)),
|
||||
("total_rides", models.IntegerField(default=0)),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
],
|
||||
options={
|
||||
"verbose_name_plural": "companies",
|
||||
"ordering": ["name"],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="CompanyEvent",
|
||||
fields=[
|
||||
("pgh_id", models.AutoField(primary_key=True, serialize=False)),
|
||||
("pgh_created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("pgh_label", models.TextField(help_text="The event label.")),
|
||||
("id", models.BigIntegerField()),
|
||||
("name", models.CharField(max_length=255)),
|
||||
("slug", models.SlugField(db_index=False, max_length=255)),
|
||||
("website", models.URLField(blank=True)),
|
||||
("headquarters", models.CharField(blank=True, max_length=255)),
|
||||
("description", models.TextField(blank=True)),
|
||||
("total_parks", models.IntegerField(default=0)),
|
||||
("total_rides", models.IntegerField(default=0)),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="Manufacturer",
|
||||
fields=[
|
||||
("id", models.BigAutoField(primary_key=True, serialize=False)),
|
||||
("name", models.CharField(max_length=255)),
|
||||
("slug", models.SlugField(max_length=255, unique=True)),
|
||||
("website", models.URLField(blank=True)),
|
||||
("headquarters", models.CharField(blank=True, max_length=255)),
|
||||
("description", models.TextField(blank=True)),
|
||||
("total_rides", models.IntegerField(default=0)),
|
||||
("total_roller_coasters", models.IntegerField(default=0)),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
],
|
||||
options={
|
||||
"ordering": ["name"],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="ManufacturerEvent",
|
||||
fields=[
|
||||
("pgh_id", models.AutoField(primary_key=True, serialize=False)),
|
||||
("pgh_created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("pgh_label", models.TextField(help_text="The event label.")),
|
||||
("id", models.BigIntegerField()),
|
||||
("name", models.CharField(max_length=255)),
|
||||
("slug", models.SlugField(db_index=False, max_length=255)),
|
||||
("website", models.URLField(blank=True)),
|
||||
("headquarters", models.CharField(blank=True, max_length=255)),
|
||||
("description", models.TextField(blank=True)),
|
||||
("total_rides", models.IntegerField(default=0)),
|
||||
("total_roller_coasters", models.IntegerField(default=0)),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="company",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "companies_companyevent" ("created_at", "description", "headquarters", "id", "name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "slug", "total_parks", "total_rides", "updated_at", "website") VALUES (NEW."created_at", NEW."description", NEW."headquarters", NEW."id", NEW."name", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."slug", NEW."total_parks", NEW."total_rides", NEW."updated_at", NEW."website"); RETURN NULL;',
|
||||
hash="[AWS-SECRET-REMOVED]",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_a4101",
|
||||
table="companies_company",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="company",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "companies_companyevent" ("created_at", "description", "headquarters", "id", "name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "slug", "total_parks", "total_rides", "updated_at", "website") VALUES (NEW."created_at", NEW."description", NEW."headquarters", NEW."id", NEW."name", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."slug", NEW."total_parks", NEW."total_rides", NEW."updated_at", NEW."website"); RETURN NULL;',
|
||||
hash="[AWS-SECRET-REMOVED]",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_3d5ae",
|
||||
table="companies_company",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="companyevent",
|
||||
name="pgh_context",
|
||||
field=models.ForeignKey(
|
||||
db_constraint=False,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
to="pghistory.context",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="companyevent",
|
||||
name="pgh_obj",
|
||||
field=models.ForeignKey(
|
||||
db_constraint=False,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="events",
|
||||
to="companies.company",
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="manufacturer",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "companies_manufacturerevent" ("created_at", "description", "headquarters", "id", "name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "slug", "total_rides", "total_roller_coasters", "updated_at", "website") VALUES (NEW."created_at", NEW."description", NEW."headquarters", NEW."id", NEW."name", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."slug", NEW."total_rides", NEW."total_roller_coasters", NEW."updated_at", NEW."website"); RETURN NULL;',
|
||||
hash="[AWS-SECRET-REMOVED]",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_5c0b6",
|
||||
table="companies_manufacturer",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="manufacturer",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "companies_manufacturerevent" ("created_at", "description", "headquarters", "id", "name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "slug", "total_rides", "total_roller_coasters", "updated_at", "website") VALUES (NEW."created_at", NEW."description", NEW."headquarters", NEW."id", NEW."name", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."slug", NEW."total_rides", NEW."total_roller_coasters", NEW."updated_at", NEW."website"); RETURN NULL;',
|
||||
hash="[AWS-SECRET-REMOVED]",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_81971",
|
||||
table="companies_manufacturer",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="manufacturerevent",
|
||||
name="pgh_context",
|
||||
field=models.ForeignKey(
|
||||
db_constraint=False,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
to="pghistory.context",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="manufacturerevent",
|
||||
name="pgh_obj",
|
||||
field=models.ForeignKey(
|
||||
db_constraint=False,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="events",
|
||||
to="companies.manufacturer",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,27 +0,0 @@
|
||||
# Generated by Django 5.1.4 on 2025-02-21 17:55
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("companies", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="company",
|
||||
name="id",
|
||||
field=models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="manufacturer",
|
||||
name="id",
|
||||
field=models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,111 +0,0 @@
|
||||
from django.db import models
|
||||
from django.utils.text import slugify
|
||||
from django.urls import reverse
|
||||
from typing import Tuple, Optional, ClassVar, TYPE_CHECKING
|
||||
import pghistory
|
||||
from history_tracking.models import TrackedModel, HistoricalSlug
|
||||
|
||||
@pghistory.track()
|
||||
class Company(TrackedModel):
|
||||
name = models.CharField(max_length=255)
|
||||
slug = models.SlugField(max_length=255, unique=True)
|
||||
website = models.URLField(blank=True)
|
||||
headquarters = models.CharField(max_length=255, blank=True)
|
||||
description = models.TextField(blank=True)
|
||||
total_parks = models.IntegerField(default=0)
|
||||
total_rides = models.IntegerField(default=0)
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
objects: ClassVar[models.Manager['Company']]
|
||||
|
||||
class Meta:
|
||||
verbose_name_plural = 'companies'
|
||||
ordering = ['name']
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.name
|
||||
|
||||
def save(self, *args, **kwargs) -> None:
|
||||
if not self.slug:
|
||||
self.slug = slugify(self.name)
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def get_by_slug(cls, slug: str) -> Tuple['Company', bool]:
|
||||
"""Get company by slug, checking historical slugs if needed"""
|
||||
try:
|
||||
return cls.objects.get(slug=slug), False
|
||||
except cls.DoesNotExist:
|
||||
# Check pghistory first
|
||||
history_model = cls.get_history_model()
|
||||
history_entry = (
|
||||
history_model.objects.filter(slug=slug)
|
||||
.order_by('-pgh_created_at')
|
||||
.first()
|
||||
)
|
||||
|
||||
if history_entry:
|
||||
return cls.objects.get(id=history_entry.pgh_obj_id), True
|
||||
|
||||
# Check manual slug history as fallback
|
||||
try:
|
||||
historical = HistoricalSlug.objects.get(
|
||||
content_type__model='company',
|
||||
slug=slug
|
||||
)
|
||||
return cls.objects.get(pk=historical.object_id), True
|
||||
except (HistoricalSlug.DoesNotExist, cls.DoesNotExist):
|
||||
raise cls.DoesNotExist()
|
||||
|
||||
@pghistory.track()
|
||||
class Manufacturer(TrackedModel):
|
||||
name = models.CharField(max_length=255)
|
||||
slug = models.SlugField(max_length=255, unique=True)
|
||||
website = models.URLField(blank=True)
|
||||
headquarters = models.CharField(max_length=255, blank=True)
|
||||
description = models.TextField(blank=True)
|
||||
total_rides = models.IntegerField(default=0)
|
||||
total_roller_coasters = models.IntegerField(default=0)
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
objects: ClassVar[models.Manager['Manufacturer']]
|
||||
|
||||
class Meta:
|
||||
ordering = ['name']
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.name
|
||||
|
||||
def save(self, *args, **kwargs) -> None:
|
||||
if not self.slug:
|
||||
self.slug = slugify(self.name)
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def get_by_slug(cls, slug: str) -> Tuple['Manufacturer', bool]:
|
||||
"""Get manufacturer by slug, checking historical slugs if needed"""
|
||||
try:
|
||||
return cls.objects.get(slug=slug), False
|
||||
except cls.DoesNotExist:
|
||||
# Check pghistory first
|
||||
history_model = cls.get_history_model()
|
||||
history_entry = (
|
||||
history_model.objects.filter(slug=slug)
|
||||
.order_by('-pgh_created_at')
|
||||
.first()
|
||||
)
|
||||
|
||||
if history_entry:
|
||||
return cls.objects.get(id=history_entry.pgh_obj_id), True
|
||||
|
||||
# Check manual slug history as fallback
|
||||
try:
|
||||
historical = HistoricalSlug.objects.get(
|
||||
content_type__model='manufacturer',
|
||||
slug=slug
|
||||
)
|
||||
return cls.objects.get(pk=historical.object_id), True
|
||||
except (HistoricalSlug.DoesNotExist, cls.DoesNotExist):
|
||||
raise cls.DoesNotExist()
|
||||
@@ -1,55 +0,0 @@
|
||||
from django.db.models.signals import post_save, post_delete
|
||||
from django.dispatch import receiver
|
||||
from django.db.utils import ProgrammingError
|
||||
from parks.models import Park
|
||||
from rides.models import Ride
|
||||
from .models import Company, Manufacturer
|
||||
|
||||
@receiver([post_save, post_delete], sender=Park)
|
||||
def update_company_stats(sender, instance, **kwargs):
|
||||
"""Update company statistics when a park is added, modified, or deleted."""
|
||||
if instance.owner:
|
||||
try:
|
||||
# Update total parks
|
||||
total_parks = Park.objects.filter(owner=instance.owner).count()
|
||||
total_rides = Ride.objects.filter(park__owner=instance.owner).count()
|
||||
|
||||
Company.objects.filter(id=instance.owner.id).update(
|
||||
total_parks=total_parks,
|
||||
total_rides=total_rides
|
||||
)
|
||||
except ProgrammingError:
|
||||
# If rides table doesn't exist yet, just update parks count
|
||||
total_parks = Park.objects.filter(owner=instance.owner).count()
|
||||
Company.objects.filter(id=instance.owner.id).update(
|
||||
total_parks=total_parks
|
||||
)
|
||||
|
||||
@receiver([post_save, post_delete], sender=Ride)
|
||||
def update_manufacturer_stats(sender, instance, **kwargs):
|
||||
"""Update manufacturer statistics when a ride is added, modified, or deleted."""
|
||||
if instance.manufacturer:
|
||||
try:
|
||||
# Update total rides and roller coasters
|
||||
total_rides = Ride.objects.filter(manufacturer=instance.manufacturer).count()
|
||||
total_roller_coasters = Ride.objects.filter(
|
||||
manufacturer=instance.manufacturer,
|
||||
category='RC'
|
||||
).count()
|
||||
|
||||
Manufacturer.objects.filter(id=instance.manufacturer.id).update(
|
||||
total_rides=total_rides,
|
||||
total_roller_coasters=total_roller_coasters
|
||||
)
|
||||
except ProgrammingError:
|
||||
pass # Skip if rides table doesn't exist yet
|
||||
|
||||
@receiver(post_save, sender=Ride)
|
||||
def update_company_ride_stats(sender, instance, **kwargs):
|
||||
"""Update company ride statistics when a ride is added or modified."""
|
||||
if instance.park and instance.park.owner:
|
||||
try:
|
||||
total_rides = Ride.objects.filter(park__owner=instance.park.owner).count()
|
||||
Company.objects.filter(id=instance.park.owner.id).update(total_rides=total_rides)
|
||||
except ProgrammingError:
|
||||
pass # Skip if rides table doesn't exist yet
|
||||
@@ -1,429 +0,0 @@
|
||||
from django.test import TestCase, Client
|
||||
from django.urls import reverse
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.contrib.gis.geos import Point
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.core.files.uploadedfile import SimpleUploadedFile
|
||||
from django.http import HttpResponse
|
||||
from typing import cast, Tuple, Optional
|
||||
from .models import Company, Manufacturer
|
||||
from location.models import Location
|
||||
from moderation.models import EditSubmission, PhotoSubmission
|
||||
from media.models import Photo
|
||||
|
||||
User = get_user_model()
|
||||
|
||||
class CompanyModelTests(TestCase):
|
||||
def setUp(self) -> None:
|
||||
self.company = Company.objects.create(
|
||||
name='Test Company',
|
||||
website='http://example.com',
|
||||
headquarters='Test HQ',
|
||||
description='Test Description',
|
||||
total_parks=5,
|
||||
total_rides=100
|
||||
)
|
||||
|
||||
self.location = Location.objects.create(
|
||||
content_type=ContentType.objects.get_for_model(Company),
|
||||
object_id=self.company.pk,
|
||||
name='Test Company HQ',
|
||||
location_type='business',
|
||||
street_address='123 Company St',
|
||||
city='Company City',
|
||||
state='CS',
|
||||
country='Test Country',
|
||||
postal_code='12345',
|
||||
point=Point(-118.2437, 34.0522)
|
||||
)
|
||||
|
||||
def test_company_creation(self) -> None:
|
||||
"""Test company instance creation and field values"""
|
||||
self.assertEqual(self.company.name, 'Test Company')
|
||||
self.assertEqual(self.company.website, 'http://example.com')
|
||||
self.assertEqual(self.company.headquarters, 'Test HQ')
|
||||
self.assertEqual(self.company.description, 'Test Description')
|
||||
self.assertEqual(self.company.total_parks, 5)
|
||||
self.assertEqual(self.company.total_rides, 100)
|
||||
self.assertTrue(self.company.slug)
|
||||
|
||||
def test_company_str_representation(self) -> None:
|
||||
"""Test string representation of company"""
|
||||
self.assertEqual(str(self.company), 'Test Company')
|
||||
|
||||
def test_company_get_by_slug(self) -> None:
|
||||
"""Test get_by_slug class method"""
|
||||
company, is_historical = Company.get_by_slug(self.company.slug)
|
||||
self.assertEqual(company, self.company)
|
||||
self.assertFalse(is_historical)
|
||||
|
||||
def test_company_get_by_invalid_slug(self) -> None:
|
||||
"""Test get_by_slug with invalid slug"""
|
||||
with self.assertRaises(Company.DoesNotExist):
|
||||
Company.get_by_slug('invalid-slug')
|
||||
|
||||
def test_company_stats(self) -> None:
|
||||
"""Test company statistics fields"""
|
||||
self.company.total_parks = 10
|
||||
self.company.total_rides = 200
|
||||
self.company.save()
|
||||
|
||||
company = Company.objects.get(pk=self.company.pk)
|
||||
self.assertEqual(company.total_parks, 10)
|
||||
self.assertEqual(company.total_rides, 200)
|
||||
|
||||
class ManufacturerModelTests(TestCase):
|
||||
def setUp(self) -> None:
|
||||
self.manufacturer = Manufacturer.objects.create(
|
||||
name='Test Manufacturer',
|
||||
website='http://example.com',
|
||||
headquarters='Test HQ',
|
||||
description='Test Description',
|
||||
total_rides=50,
|
||||
total_roller_coasters=20
|
||||
)
|
||||
|
||||
self.location = Location.objects.create(
|
||||
content_type=ContentType.objects.get_for_model(Manufacturer),
|
||||
object_id=self.manufacturer.pk,
|
||||
name='Test Manufacturer HQ',
|
||||
location_type='business',
|
||||
street_address='123 Manufacturer St',
|
||||
city='Manufacturer City',
|
||||
state='MS',
|
||||
country='Test Country',
|
||||
postal_code='12345',
|
||||
point=Point(-118.2437, 34.0522)
|
||||
)
|
||||
|
||||
def test_manufacturer_creation(self) -> None:
|
||||
"""Test manufacturer instance creation and field values"""
|
||||
self.assertEqual(self.manufacturer.name, 'Test Manufacturer')
|
||||
self.assertEqual(self.manufacturer.website, 'http://example.com')
|
||||
self.assertEqual(self.manufacturer.headquarters, 'Test HQ')
|
||||
self.assertEqual(self.manufacturer.description, 'Test Description')
|
||||
self.assertEqual(self.manufacturer.total_rides, 50)
|
||||
self.assertEqual(self.manufacturer.total_roller_coasters, 20)
|
||||
self.assertTrue(self.manufacturer.slug)
|
||||
|
||||
def test_manufacturer_str_representation(self) -> None:
|
||||
"""Test string representation of manufacturer"""
|
||||
self.assertEqual(str(self.manufacturer), 'Test Manufacturer')
|
||||
|
||||
def test_manufacturer_get_by_slug(self) -> None:
|
||||
"""Test get_by_slug class method"""
|
||||
manufacturer, is_historical = Manufacturer.get_by_slug(self.manufacturer.slug)
|
||||
self.assertEqual(manufacturer, self.manufacturer)
|
||||
self.assertFalse(is_historical)
|
||||
|
||||
def test_manufacturer_get_by_invalid_slug(self) -> None:
|
||||
"""Test get_by_slug with invalid slug"""
|
||||
with self.assertRaises(Manufacturer.DoesNotExist):
|
||||
Manufacturer.get_by_slug('invalid-slug')
|
||||
|
||||
def test_manufacturer_stats(self) -> None:
|
||||
"""Test manufacturer statistics fields"""
|
||||
self.manufacturer.total_rides = 100
|
||||
self.manufacturer.total_roller_coasters = 40
|
||||
self.manufacturer.save()
|
||||
|
||||
manufacturer = Manufacturer.objects.get(pk=self.manufacturer.pk)
|
||||
self.assertEqual(manufacturer.total_rides, 100)
|
||||
self.assertEqual(manufacturer.total_roller_coasters, 40)
|
||||
|
||||
class CompanyViewTests(TestCase):
|
||||
def setUp(self) -> None:
|
||||
self.client = Client()
|
||||
self.user = User.objects.create_user(
|
||||
username='testuser',
|
||||
email='test@example.com',
|
||||
password='testpass123'
|
||||
)
|
||||
self.moderator = User.objects.create_user(
|
||||
username='moderator',
|
||||
email='moderator@example.com',
|
||||
password='modpass123',
|
||||
role='MODERATOR'
|
||||
)
|
||||
self.company = Company.objects.create(
|
||||
name='Test Company',
|
||||
website='http://example.com',
|
||||
headquarters='Test HQ',
|
||||
description='Test Description'
|
||||
)
|
||||
|
||||
self.location = Location.objects.create(
|
||||
content_type=ContentType.objects.get_for_model(Company),
|
||||
object_id=self.company.pk,
|
||||
name='Test Company HQ',
|
||||
location_type='business',
|
||||
street_address='123 Company St',
|
||||
city='Company City',
|
||||
state='CS',
|
||||
country='Test Country',
|
||||
postal_code='12345',
|
||||
point=Point(-118.2437, 34.0522)
|
||||
)
|
||||
|
||||
def test_company_list_view(self) -> None:
|
||||
"""Test company list view"""
|
||||
response = self.client.get(reverse('companies:company_list'))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertContains(response, self.company.name)
|
||||
|
||||
def test_company_list_view_with_search(self) -> None:
|
||||
"""Test company list view with search"""
|
||||
response = self.client.get(reverse('companies:company_list') + '?search=Test')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertContains(response, self.company.name)
|
||||
|
||||
response = self.client.get(reverse('companies:company_list') + '?search=NonExistent')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertNotContains(response, self.company.name)
|
||||
|
||||
def test_company_list_view_with_country_filter(self) -> None:
|
||||
"""Test company list view with country filter"""
|
||||
response = self.client.get(reverse('companies:company_list') + '?country=Test Country')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertContains(response, self.company.name)
|
||||
|
||||
response = self.client.get(reverse('companies:company_list') + '?country=NonExistent')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertNotContains(response, self.company.name)
|
||||
|
||||
def test_company_detail_view(self) -> None:
|
||||
"""Test company detail view"""
|
||||
response = self.client.get(
|
||||
reverse('companies:company_detail', kwargs={'slug': self.company.slug})
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertContains(response, self.company.name)
|
||||
self.assertContains(response, self.company.website)
|
||||
self.assertContains(response, self.company.headquarters)
|
||||
|
||||
def test_company_detail_view_invalid_slug(self) -> None:
|
||||
"""Test company detail view with invalid slug"""
|
||||
response = self.client.get(
|
||||
reverse('companies:company_detail', kwargs={'slug': 'invalid-slug'})
|
||||
)
|
||||
self.assertEqual(response.status_code, 404)
|
||||
|
||||
def test_company_create_view_unauthenticated(self) -> None:
|
||||
"""Test company create view when not logged in"""
|
||||
response = self.client.get(reverse('companies:company_create'))
|
||||
self.assertEqual(response.status_code, 302) # Redirects to login
|
||||
|
||||
def test_company_create_view_authenticated(self) -> None:
|
||||
"""Test company create view when logged in"""
|
||||
self.client.login(username='testuser', password='testpass123')
|
||||
response = self.client.get(reverse('companies:company_create'))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
def test_company_create_submission_regular_user(self) -> None:
|
||||
"""Test creating a company submission as regular user"""
|
||||
self.client.login(username='testuser', password='testpass123')
|
||||
data = {
|
||||
'name': 'New Company',
|
||||
'website': 'http://newcompany.com',
|
||||
'headquarters': 'New HQ',
|
||||
'description': 'New Description',
|
||||
'reason': 'Adding new company',
|
||||
'source': 'Company website'
|
||||
}
|
||||
response = self.client.post(reverse('companies:company_create'), data)
|
||||
self.assertEqual(response.status_code, 302) # Redirects after submission
|
||||
self.assertTrue(EditSubmission.objects.filter(
|
||||
submission_type='CREATE',
|
||||
changes__name='New Company',
|
||||
status='NEW'
|
||||
).exists())
|
||||
|
||||
def test_company_create_submission_moderator(self) -> None:
|
||||
"""Test creating a company submission as moderator"""
|
||||
self.client.login(username='moderator', password='modpass123')
|
||||
data = {
|
||||
'name': 'New Company',
|
||||
'website': 'http://newcompany.com',
|
||||
'headquarters': 'New HQ',
|
||||
'description': 'New Description',
|
||||
'reason': 'Adding new company',
|
||||
'source': 'Company website'
|
||||
}
|
||||
response = self.client.post(reverse('companies:company_create'), data)
|
||||
self.assertEqual(response.status_code, 302) # Redirects after submission
|
||||
submission = EditSubmission.objects.get(
|
||||
submission_type='CREATE',
|
||||
changes__name='New Company'
|
||||
)
|
||||
self.assertEqual(submission.status, 'APPROVED')
|
||||
self.assertEqual(submission.handled_by, self.moderator)
|
||||
|
||||
def test_company_photo_submission(self) -> None:
|
||||
"""Test photo submission for company"""
|
||||
self.client.login(username='testuser', password='testpass123')
|
||||
image_content = b'GIF87a\x01\x00\x01\x00\x80\x01\x00\x00\x00\x00ccc,\x00\x00\x00\x00\x01\x00\x01\x00\x00\x02\x02D\x01\x00;'
|
||||
image = SimpleUploadedFile('test.gif', image_content, content_type='image/gif')
|
||||
data = {
|
||||
'photo': image,
|
||||
'caption': 'Test Photo',
|
||||
'date_taken': '2024-01-01'
|
||||
}
|
||||
response = cast(HttpResponse, self.client.post(
|
||||
reverse('companies:company_detail', kwargs={'slug': self.company.slug}),
|
||||
data,
|
||||
HTTP_X_REQUESTED_WITH='XMLHttpRequest' # Simulate AJAX request
|
||||
))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertTrue(PhotoSubmission.objects.filter(
|
||||
content_type=ContentType.objects.get_for_model(Company),
|
||||
object_id=self.company.pk
|
||||
).exists())
|
||||
|
||||
class ManufacturerViewTests(TestCase):
|
||||
def setUp(self) -> None:
|
||||
self.client = Client()
|
||||
self.user = User.objects.create_user(
|
||||
username='testuser',
|
||||
email='test@example.com',
|
||||
password='testpass123'
|
||||
)
|
||||
self.moderator = User.objects.create_user(
|
||||
username='moderator',
|
||||
email='moderator@example.com',
|
||||
password='modpass123',
|
||||
role='MODERATOR'
|
||||
)
|
||||
self.manufacturer = Manufacturer.objects.create(
|
||||
name='Test Manufacturer',
|
||||
website='http://example.com',
|
||||
headquarters='Test HQ',
|
||||
description='Test Description'
|
||||
)
|
||||
|
||||
self.location = Location.objects.create(
|
||||
content_type=ContentType.objects.get_for_model(Manufacturer),
|
||||
object_id=self.manufacturer.pk,
|
||||
name='Test Manufacturer HQ',
|
||||
location_type='business',
|
||||
street_address='123 Manufacturer St',
|
||||
city='Manufacturer City',
|
||||
state='MS',
|
||||
country='Test Country',
|
||||
postal_code='12345',
|
||||
point=Point(-118.2437, 34.0522)
|
||||
)
|
||||
|
||||
def test_manufacturer_list_view(self) -> None:
|
||||
"""Test manufacturer list view"""
|
||||
response = self.client.get(reverse('companies:manufacturer_list'))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertContains(response, self.manufacturer.name)
|
||||
|
||||
def test_manufacturer_list_view_with_search(self) -> None:
|
||||
"""Test manufacturer list view with search"""
|
||||
response = self.client.get(reverse('companies:manufacturer_list') + '?search=Test')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertContains(response, self.manufacturer.name)
|
||||
|
||||
response = self.client.get(reverse('companies:manufacturer_list') + '?search=NonExistent')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertNotContains(response, self.manufacturer.name)
|
||||
|
||||
def test_manufacturer_list_view_with_country_filter(self) -> None:
|
||||
"""Test manufacturer list view with country filter"""
|
||||
response = self.client.get(reverse('companies:manufacturer_list') + '?country=Test Country')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertContains(response, self.manufacturer.name)
|
||||
|
||||
response = self.client.get(reverse('companies:manufacturer_list') + '?country=NonExistent')
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertNotContains(response, self.manufacturer.name)
|
||||
|
||||
def test_manufacturer_detail_view(self) -> None:
|
||||
"""Test manufacturer detail view"""
|
||||
response = self.client.get(
|
||||
reverse('companies:manufacturer_detail', kwargs={'slug': self.manufacturer.slug})
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertContains(response, self.manufacturer.name)
|
||||
self.assertContains(response, self.manufacturer.website)
|
||||
self.assertContains(response, self.manufacturer.headquarters)
|
||||
|
||||
def test_manufacturer_detail_view_invalid_slug(self) -> None:
|
||||
"""Test manufacturer detail view with invalid slug"""
|
||||
response = self.client.get(
|
||||
reverse('companies:manufacturer_detail', kwargs={'slug': 'invalid-slug'})
|
||||
)
|
||||
self.assertEqual(response.status_code, 404)
|
||||
|
||||
def test_manufacturer_create_view_unauthenticated(self) -> None:
|
||||
"""Test manufacturer create view when not logged in"""
|
||||
response = self.client.get(reverse('companies:manufacturer_create'))
|
||||
self.assertEqual(response.status_code, 302) # Redirects to login
|
||||
|
||||
def test_manufacturer_create_view_authenticated(self) -> None:
|
||||
"""Test manufacturer create view when logged in"""
|
||||
self.client.login(username='testuser', password='testpass123')
|
||||
response = self.client.get(reverse('companies:manufacturer_create'))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
def test_manufacturer_create_submission_regular_user(self) -> None:
|
||||
"""Test creating a manufacturer submission as regular user"""
|
||||
self.client.login(username='testuser', password='testpass123')
|
||||
data = {
|
||||
'name': 'New Manufacturer',
|
||||
'website': 'http://newmanufacturer.com',
|
||||
'headquarters': 'New HQ',
|
||||
'description': 'New Description',
|
||||
'reason': 'Adding new manufacturer',
|
||||
'source': 'Manufacturer website'
|
||||
}
|
||||
response = self.client.post(reverse('companies:manufacturer_create'), data)
|
||||
self.assertEqual(response.status_code, 302) # Redirects after submission
|
||||
self.assertTrue(EditSubmission.objects.filter(
|
||||
submission_type='CREATE',
|
||||
changes__name='New Manufacturer',
|
||||
status='NEW'
|
||||
).exists())
|
||||
|
||||
def test_manufacturer_create_submission_moderator(self) -> None:
|
||||
"""Test creating a manufacturer submission as moderator"""
|
||||
self.client.login(username='moderator', password='modpass123')
|
||||
data = {
|
||||
'name': 'New Manufacturer',
|
||||
'website': 'http://newmanufacturer.com',
|
||||
'headquarters': 'New HQ',
|
||||
'description': 'New Description',
|
||||
'reason': 'Adding new manufacturer',
|
||||
'source': 'Manufacturer website'
|
||||
}
|
||||
response = self.client.post(reverse('companies:manufacturer_create'), data)
|
||||
self.assertEqual(response.status_code, 302) # Redirects after submission
|
||||
submission = EditSubmission.objects.get(
|
||||
submission_type='CREATE',
|
||||
changes__name='New Manufacturer'
|
||||
)
|
||||
self.assertEqual(submission.status, 'APPROVED')
|
||||
self.assertEqual(submission.handled_by, self.moderator)
|
||||
|
||||
def test_manufacturer_photo_submission(self) -> None:
|
||||
"""Test photo submission for manufacturer"""
|
||||
self.client.login(username='testuser', password='testpass123')
|
||||
image_content = b'GIF87a\x01\x00\x01\x00\x80\x01\x00\x00\x00\x00ccc,\x00\x00\x00\x00\x01\x00\x01\x00\x00\x02\x02D\x01\x00;'
|
||||
image = SimpleUploadedFile('test.gif', image_content, content_type='image/gif')
|
||||
data = {
|
||||
'photo': image,
|
||||
'caption': 'Test Photo',
|
||||
'date_taken': '2024-01-01'
|
||||
}
|
||||
response = cast(HttpResponse, self.client.post(
|
||||
reverse('companies:manufacturer_detail', kwargs={'slug': self.manufacturer.slug}),
|
||||
data,
|
||||
HTTP_X_REQUESTED_WITH='XMLHttpRequest' # Simulate AJAX request
|
||||
))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertTrue(PhotoSubmission.objects.filter(
|
||||
content_type=ContentType.objects.get_for_model(Manufacturer),
|
||||
object_id=self.manufacturer.pk
|
||||
).exists())
|
||||
@@ -1,22 +0,0 @@
|
||||
from django.urls import path
|
||||
from . import views
|
||||
|
||||
app_name = 'companies'
|
||||
|
||||
urlpatterns = [
|
||||
# List views first
|
||||
path('', views.CompanyListView.as_view(), name='company_list'),
|
||||
path('manufacturers/', views.ManufacturerListView.as_view(), name='manufacturer_list'),
|
||||
|
||||
# Create views
|
||||
path('create/', views.CompanyCreateView.as_view(), name='company_create'),
|
||||
path('manufacturers/create/', views.ManufacturerCreateView.as_view(), name='manufacturer_create'),
|
||||
|
||||
# Update views
|
||||
path('<slug:slug>/edit/', views.CompanyUpdateView.as_view(), name='company_edit'),
|
||||
path('manufacturers/<slug:slug>/edit/', views.ManufacturerUpdateView.as_view(), name='manufacturer_edit'),
|
||||
|
||||
# Detail views last (to avoid conflicts with other URL patterns)
|
||||
path('<slug:slug>/', views.CompanyDetailView.as_view(), name='company_detail'),
|
||||
path('manufacturers/<slug:slug>/', views.ManufacturerDetailView.as_view(), name='manufacturer_detail'),
|
||||
]
|
||||
@@ -1,366 +0,0 @@
|
||||
from typing import Any, Optional, Tuple, Type, cast, Union, Dict, Callable
|
||||
from django.views.generic import DetailView, ListView, CreateView, UpdateView
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.urls import reverse
|
||||
from django.contrib.auth.mixins import LoginRequiredMixin
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.contrib import messages
|
||||
from django.http import HttpResponseRedirect, Http404, JsonResponse, HttpResponse
|
||||
from django.db.models import Count, Sum, Q, QuerySet, Model
|
||||
from django.contrib.auth import get_user_model
|
||||
from .models import Company, Manufacturer
|
||||
from .forms import CompanyForm, ManufacturerForm
|
||||
from rides.models import Ride
|
||||
from parks.models import Park
|
||||
from location.models import Location
|
||||
from core.views import SlugRedirectMixin
|
||||
from moderation.mixins import EditSubmissionMixin, PhotoSubmissionMixin, HistoryMixin
|
||||
from moderation.models import EditSubmission
|
||||
|
||||
User = get_user_model()
|
||||
|
||||
ModelType = Union[Type[Company], Type[Manufacturer]]
|
||||
|
||||
def get_company_parks(company: Company) -> QuerySet[Park]:
|
||||
"""Get parks owned by a company with related data."""
|
||||
return Park.objects.filter(
|
||||
owner=company
|
||||
).select_related('owner')
|
||||
|
||||
def get_company_ride_count(parks: QuerySet[Park]) -> int:
|
||||
"""Get total number of rides across all parks."""
|
||||
return Ride.objects.filter(park__in=parks).count()
|
||||
|
||||
def get_manufacturer_rides(manufacturer: Manufacturer) -> QuerySet[Ride]:
|
||||
"""Get rides made by a manufacturer with related data."""
|
||||
return Ride.objects.filter(
|
||||
manufacturer=manufacturer
|
||||
).select_related('park', 'coaster_stats')
|
||||
|
||||
def get_manufacturer_stats(rides: QuerySet[Ride]) -> Dict[str, int]:
|
||||
"""Get statistics for manufacturer rides."""
|
||||
return {
|
||||
'coaster_count': rides.filter(category='ROLLER_COASTER').count(),
|
||||
'parks_count': rides.values('park').distinct().count()
|
||||
}
|
||||
|
||||
def handle_submission_post(
|
||||
request: Any,
|
||||
handle_photo_submission: Callable[[Any], HttpResponse],
|
||||
super_post: Callable[..., HttpResponse],
|
||||
*args: Any,
|
||||
**kwargs: Any
|
||||
) -> HttpResponse:
|
||||
"""Handle POST requests for photos and edits."""
|
||||
if request.FILES:
|
||||
# Handle photo submission
|
||||
return handle_photo_submission(request)
|
||||
# Handle edit submission
|
||||
return super_post(request, *args, **kwargs)
|
||||
|
||||
# List Views
|
||||
class CompanyListView(ListView):
|
||||
model: Type[Company] = Company
|
||||
template_name = "companies/company_list.html"
|
||||
context_object_name = "companies"
|
||||
paginate_by = 12
|
||||
|
||||
def get_queryset(self) -> QuerySet[Company]:
|
||||
queryset = self.model.objects.all()
|
||||
|
||||
if country := self.request.GET.get("country"):
|
||||
# Get companies that have locations in the specified country
|
||||
company_ids = Location.objects.filter(
|
||||
content_type=ContentType.objects.get_for_model(Company),
|
||||
country__iexact=country,
|
||||
).values_list("object_id", flat=True)
|
||||
queryset = queryset.filter(pk__in=company_ids)
|
||||
|
||||
if search := self.request.GET.get("search"):
|
||||
queryset = queryset.filter(name__icontains=search)
|
||||
|
||||
return queryset.order_by("name")
|
||||
|
||||
def get_context_data(self, **kwargs: Any) -> dict[str, Any]:
|
||||
context = super().get_context_data(**kwargs)
|
||||
# Add filter values to context
|
||||
context["country"] = self.request.GET.get("country", "")
|
||||
context["search"] = self.request.GET.get("search", "")
|
||||
return context
|
||||
|
||||
|
||||
class ManufacturerListView(ListView):
|
||||
model: Type[Manufacturer] = Manufacturer
|
||||
template_name = "companies/manufacturer_list.html"
|
||||
context_object_name = "manufacturers"
|
||||
paginate_by = 12
|
||||
|
||||
def get_queryset(self) -> QuerySet[Manufacturer]:
|
||||
queryset = self.model.objects.all()
|
||||
|
||||
if country := self.request.GET.get("country"):
|
||||
# Get manufacturers that have locations in the specified country
|
||||
manufacturer_ids = Location.objects.filter(
|
||||
content_type=ContentType.objects.get_for_model(Manufacturer),
|
||||
country__iexact=country,
|
||||
).values_list("object_id", flat=True)
|
||||
queryset = queryset.filter(pk__in=manufacturer_ids)
|
||||
|
||||
if search := self.request.GET.get("search"):
|
||||
queryset = queryset.filter(name__icontains=search)
|
||||
|
||||
return queryset.order_by("name")
|
||||
|
||||
def get_context_data(self, **kwargs: Any) -> dict[str, Any]:
|
||||
context = super().get_context_data(**kwargs)
|
||||
# Add stats for filtering
|
||||
context["total_manufacturers"] = self.model.objects.count()
|
||||
context["total_rides"] = Ride.objects.filter(manufacturer__isnull=False).count()
|
||||
context["total_roller_coasters"] = Ride.objects.filter(
|
||||
manufacturer__isnull=False, category="ROLLER_COASTER"
|
||||
).count()
|
||||
# Add filter values to context
|
||||
context["country"] = self.request.GET.get("country", "")
|
||||
context["search"] = self.request.GET.get("search", "")
|
||||
return context
|
||||
|
||||
|
||||
# Detail Views
|
||||
class CompanyDetailView(SlugRedirectMixin, EditSubmissionMixin, PhotoSubmissionMixin, HistoryMixin, DetailView):
|
||||
model: Type[Company] = Company
|
||||
template_name = 'companies/company_detail.html'
|
||||
context_object_name = 'company'
|
||||
|
||||
def get_object(self, queryset: Optional[QuerySet[Company]] = None) -> Company:
|
||||
if queryset is None:
|
||||
queryset = self.get_queryset()
|
||||
slug = self.kwargs.get(self.slug_url_kwarg)
|
||||
try:
|
||||
# Try to get by current or historical slug
|
||||
model = cast(Type[Company], self.model)
|
||||
obj, _ = model.get_by_slug(slug)
|
||||
return obj
|
||||
except model.DoesNotExist as e:
|
||||
raise Http404(f"No {model._meta.verbose_name} found matching the query") from e
|
||||
|
||||
def get_context_data(self, **kwargs: Any) -> dict[str, Any]:
|
||||
context = super().get_context_data(**kwargs)
|
||||
company = cast(Company, self.object)
|
||||
|
||||
parks = get_company_parks(company)
|
||||
context['parks'] = parks
|
||||
context['total_rides'] = get_company_ride_count(parks)
|
||||
return context
|
||||
|
||||
def get_redirect_url_pattern(self) -> str:
|
||||
return 'companies:company_detail'
|
||||
|
||||
def post(self, request: Any, *args: Any, **kwargs: Any) -> HttpResponse:
|
||||
"""Handle POST requests for photos and edits."""
|
||||
return handle_submission_post(
|
||||
request,
|
||||
self.handle_photo_submission,
|
||||
super().post,
|
||||
*args,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
class ManufacturerDetailView(SlugRedirectMixin, EditSubmissionMixin, PhotoSubmissionMixin, HistoryMixin, DetailView):
|
||||
model: Type[Manufacturer] = Manufacturer
|
||||
template_name = 'companies/manufacturer_detail.html'
|
||||
context_object_name = 'manufacturer'
|
||||
|
||||
def get_object(self, queryset: Optional[QuerySet[Manufacturer]] = None) -> Manufacturer:
|
||||
if queryset is None:
|
||||
queryset = self.get_queryset()
|
||||
slug = self.kwargs.get(self.slug_url_kwarg)
|
||||
try:
|
||||
# Try to get by current or historical slug
|
||||
model = cast(Type[Manufacturer], self.model)
|
||||
obj, _ = model.get_by_slug(slug)
|
||||
return obj
|
||||
except model.DoesNotExist as e:
|
||||
raise Http404(f"No {model._meta.verbose_name} found matching the query") from e
|
||||
|
||||
def get_context_data(self, **kwargs: Any) -> dict[str, Any]:
|
||||
context = super().get_context_data(**kwargs)
|
||||
manufacturer = cast(Manufacturer, self.object)
|
||||
|
||||
rides = get_manufacturer_rides(manufacturer)
|
||||
context['rides'] = rides
|
||||
context.update(get_manufacturer_stats(rides))
|
||||
return context
|
||||
|
||||
def get_redirect_url_pattern(self) -> str:
|
||||
return 'companies:manufacturer_detail'
|
||||
|
||||
def post(self, request: Any, *args: Any, **kwargs: Any) -> HttpResponse:
|
||||
"""Handle POST requests for photos and edits."""
|
||||
return handle_submission_post(
|
||||
request,
|
||||
self.handle_photo_submission,
|
||||
super().post,
|
||||
*args,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
|
||||
def _handle_submission(
|
||||
request: Any, form: Any, model: ModelType, success_url: str = ""
|
||||
) -> HttpResponseRedirect:
|
||||
"""Helper method to handle form submissions"""
|
||||
cleaned_data = form.cleaned_data.copy()
|
||||
submission = EditSubmission.objects.create(
|
||||
user=request.user,
|
||||
content_type=ContentType.objects.get_for_model(model),
|
||||
submission_type="CREATE",
|
||||
status="NEW",
|
||||
changes=cleaned_data,
|
||||
reason=request.POST.get("reason", ""),
|
||||
source=request.POST.get("source", ""),
|
||||
)
|
||||
|
||||
# Get user role safely
|
||||
user_role = getattr(request.user, "role", None)
|
||||
|
||||
# If user is moderator or above, auto-approve
|
||||
if user_role in ["MODERATOR", "ADMIN", "SUPERUSER"]:
|
||||
obj = form.save()
|
||||
submission.object_id = obj.pk
|
||||
submission.status = "APPROVED"
|
||||
submission.handled_by = request.user
|
||||
submission.save()
|
||||
|
||||
# Generate success URL if not provided
|
||||
if not success_url:
|
||||
success_url = reverse(
|
||||
f"companies:{model.__name__.lower()}_detail", kwargs={"slug": obj.slug}
|
||||
)
|
||||
messages.success(request, f'Successfully created {getattr(obj, "name", "")}')
|
||||
return HttpResponseRedirect(success_url)
|
||||
|
||||
messages.success(request, "Your submission has been sent for review")
|
||||
return HttpResponseRedirect(reverse(f"companies:{model.__name__.lower()}_list"))
|
||||
|
||||
|
||||
# Create Views
|
||||
class CompanyCreateView(LoginRequiredMixin, CreateView):
|
||||
model: Type[Company] = Company
|
||||
form_class = CompanyForm
|
||||
template_name = "companies/company_form.html"
|
||||
object: Optional[Company]
|
||||
|
||||
def form_valid(self, form: CompanyForm) -> HttpResponseRedirect:
|
||||
return _handle_submission(self.request, form, self.model, "")
|
||||
|
||||
def get_success_url(self) -> str:
|
||||
if self.object is None:
|
||||
return reverse("companies:company_list")
|
||||
return reverse("companies:company_detail", kwargs={"slug": self.object.slug})
|
||||
|
||||
|
||||
class ManufacturerCreateView(LoginRequiredMixin, CreateView):
|
||||
model: Type[Manufacturer] = Manufacturer
|
||||
form_class = ManufacturerForm
|
||||
template_name = "companies/manufacturer_form.html"
|
||||
object: Optional[Manufacturer]
|
||||
|
||||
def form_valid(self, form: ManufacturerForm) -> HttpResponseRedirect:
|
||||
return _handle_submission(self.request, form, self.model, "")
|
||||
|
||||
def get_success_url(self) -> str:
|
||||
if self.object is None:
|
||||
return reverse("companies:manufacturer_list")
|
||||
return reverse(
|
||||
"companies:manufacturer_detail", kwargs={"slug": self.object.slug}
|
||||
)
|
||||
|
||||
|
||||
def _handle_update(
|
||||
request: Any, form: Any, obj: Union[Company, Manufacturer], model: ModelType
|
||||
) -> HttpResponseRedirect:
|
||||
"""Helper method to handle update submissions"""
|
||||
cleaned_data = form.cleaned_data.copy()
|
||||
submission = EditSubmission.objects.create(
|
||||
user=request.user,
|
||||
content_type=ContentType.objects.get_for_model(model),
|
||||
object_id=obj.pk,
|
||||
submission_type="EDIT",
|
||||
changes=cleaned_data,
|
||||
reason=request.POST.get("reason", ""),
|
||||
source=request.POST.get("source", ""),
|
||||
)
|
||||
|
||||
# Get user role safely
|
||||
user_role = getattr(request.user, "role", None)
|
||||
|
||||
# If user is moderator or above, auto-approve
|
||||
if user_role in ["MODERATOR", "ADMIN", "SUPERUSER"]:
|
||||
obj = form.save()
|
||||
submission.status = "APPROVED"
|
||||
submission.handled_by = request.user
|
||||
submission.save()
|
||||
messages.success(request, f'Successfully updated {getattr(obj, "name", "")}')
|
||||
return HttpResponseRedirect(
|
||||
reverse(
|
||||
f"companies:{model.__name__.lower()}_detail",
|
||||
kwargs={"slug": getattr(obj, "slug", "")},
|
||||
)
|
||||
)
|
||||
|
||||
messages.success(
|
||||
request, f'Your changes to {getattr(obj, "name", "")} have been sent for review'
|
||||
)
|
||||
return HttpResponseRedirect(
|
||||
reverse(
|
||||
f"companies:{model.__name__.lower()}_detail",
|
||||
kwargs={"slug": getattr(obj, "slug", "")},
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
# Update Views
|
||||
class CompanyUpdateView(LoginRequiredMixin, UpdateView):
|
||||
model: Type[Company] = Company
|
||||
form_class = CompanyForm
|
||||
template_name = "companies/company_form.html"
|
||||
object: Optional[Company]
|
||||
|
||||
def get_context_data(self, **kwargs: Any) -> dict[str, Any]:
|
||||
context = super().get_context_data(**kwargs)
|
||||
context["is_edit"] = True
|
||||
return context
|
||||
|
||||
def form_valid(self, form: CompanyForm) -> HttpResponseRedirect:
|
||||
if self.object is None:
|
||||
return HttpResponseRedirect(reverse("companies:company_list"))
|
||||
return _handle_update(self.request, form, self.object, self.model)
|
||||
|
||||
def get_success_url(self) -> str:
|
||||
if self.object is None:
|
||||
return reverse("companies:company_list")
|
||||
return reverse("companies:company_detail", kwargs={"slug": self.object.slug})
|
||||
|
||||
|
||||
class ManufacturerUpdateView(LoginRequiredMixin, UpdateView):
|
||||
model: Type[Manufacturer] = Manufacturer
|
||||
form_class = ManufacturerForm
|
||||
template_name = "companies/manufacturer_form.html"
|
||||
object: Optional[Manufacturer]
|
||||
|
||||
def get_context_data(self, **kwargs: Any) -> dict[str, Any]:
|
||||
context = super().get_context_data(**kwargs)
|
||||
context["is_edit"] = True
|
||||
return context
|
||||
|
||||
def form_valid(self, form: ManufacturerForm) -> HttpResponseRedirect:
|
||||
if self.object is None:
|
||||
return HttpResponseRedirect(reverse("companies:manufacturer_list"))
|
||||
return _handle_update(self.request, form, self.object, self.model)
|
||||
|
||||
def get_success_url(self) -> str:
|
||||
if self.object is None:
|
||||
return reverse("companies:manufacturer_list")
|
||||
return reverse(
|
||||
"companies:manufacturer_detail", kwargs={"slug": self.object.slug}
|
||||
)
|
||||
435
complete-project-review-2025-01-05.md
Normal file
435
complete-project-review-2025-01-05.md
Normal file
@@ -0,0 +1,435 @@
|
||||
# ThrillWiki Django Project - Complete Technical Review
|
||||
**Date:** January 5, 2025
|
||||
**Reviewer:** Roo (Architect Mode)
|
||||
**Review Type:** Exhaustive Code Analysis
|
||||
**Status:** COMPLETED - Comprehensive analysis of entire codebase
|
||||
|
||||
> **CRITICAL MEMORY BANK DOCUMENT** - This exhaustive review represents the most comprehensive analysis of the ThrillWiki project to date. All future architectural decisions should reference this document.
|
||||
|
||||
## Executive Summary
|
||||
|
||||
ThrillWiki is a comprehensive Django-based theme park and ride database application with advanced features including user authentication, content moderation, media management, location services, analytics, and history tracking. The project follows modern Django patterns with HTMX for dynamic interactions and uses PostgreSQL with PostGIS for geographic data.
|
||||
|
||||
## Technical Stack Analysis
|
||||
|
||||
### Core Framework & Dependencies
|
||||
- **Django 5.0+** - Modern Django framework
|
||||
- **Python 3.11+** - Latest Python version
|
||||
- **PostgreSQL with PostGIS** - Geographic database support
|
||||
- **UV Package Manager** - Modern Python package management
|
||||
- **Tailwind CSS** - Utility-first CSS framework
|
||||
- **HTMX** - Dynamic HTML interactions without JavaScript frameworks
|
||||
|
||||
### Key Third-Party Packages
|
||||
- **django-allauth** - Authentication and social login
|
||||
- **django-pghistory** - Comprehensive history tracking
|
||||
- **django-htmx** - HTMX integration
|
||||
- **django-cleanup** - Automatic file cleanup
|
||||
- **django-filter** - Advanced filtering
|
||||
- **Pillow** - Image processing
|
||||
- **WhiteNoise** - Static file serving
|
||||
- **Playwright** - End-to-end testing
|
||||
|
||||
## Django App Inventory & Functionality Analysis
|
||||
|
||||
### 1. Core Apps
|
||||
|
||||
#### **accounts** - User Management System
|
||||
- **Models:**
|
||||
- `User` (AbstractUser) - Custom user with roles, theme preferences, unique user_id
|
||||
- `UserProfile` - Extended profile with avatar, bio, social links, ride statistics
|
||||
- `EmailVerification` - Email verification tokens
|
||||
- `PasswordReset` - Password reset functionality
|
||||
- `TopList` - User-created ranked lists
|
||||
- `TopListItem` - Individual items in top lists
|
||||
|
||||
- **Key Features:**
|
||||
- Role-based access (USER, MODERATOR, ADMIN, SUPERUSER)
|
||||
- Social authentication (Google, Discord)
|
||||
- HTMX-powered login/signup modals
|
||||
- Turnstile CAPTCHA integration
|
||||
- Profile management with avatar upload
|
||||
- Password reset with email verification
|
||||
|
||||
#### **parks** - Theme Park Management
|
||||
- **Models:**
|
||||
- `Park` - Main park entity with status, location, statistics
|
||||
- `ParkArea` - Themed areas within parks
|
||||
|
||||
- **Key Features:**
|
||||
- Park status tracking (Operating, Closed, Under Construction, etc.)
|
||||
- Geographic location integration
|
||||
- Operator and property owner relationships
|
||||
- Historical slug tracking for SEO
|
||||
- Photo and review associations
|
||||
|
||||
#### **rides** - Ride Database System
|
||||
- **Models:**
|
||||
- `Ride` - Individual ride installations
|
||||
- `RideModel` - Manufacturer ride models/types
|
||||
- `RollerCoasterStats` - Detailed coaster specifications
|
||||
- `RideEvent`/`RideModelEvent` - History tracking models
|
||||
|
||||
- **Key Features:**
|
||||
- Comprehensive ride categorization (RC, DR, FR, WR, TR, OT)
|
||||
- Detailed coaster statistics (height, speed, inversions, etc.)
|
||||
- Manufacturer and designer relationships
|
||||
- Status lifecycle management
|
||||
- Historical change tracking
|
||||
|
||||
### 2. Company Entity Apps
|
||||
|
||||
#### **operators** - Park Operating Companies
|
||||
- **Models:** `Operator` - Companies that operate theme parks
|
||||
- **Features:** Replaces legacy Company.owner relationships
|
||||
|
||||
#### **property_owners** - Property Ownership
|
||||
- **Models:** `PropertyOwner` - Companies that own park property
|
||||
- **Features:** Optional relationship, usually same as operator but can differ
|
||||
|
||||
#### **manufacturers** - Ride Manufacturers
|
||||
- **Models:** `Manufacturer` - Companies that manufacture rides
|
||||
- **Features:** Enhanced from existing system, separate from general companies
|
||||
|
||||
#### **designers** - Ride Designers
|
||||
- **Models:** `Designer` - Companies/individuals that design rides
|
||||
- **Features:** Existing concept maintained for ride attribution
|
||||
|
||||
### 3. Content & Media Apps
|
||||
|
||||
#### **media** - Photo Management System
|
||||
- **Models:** `Photo` - Generic photo model with approval workflow
|
||||
- **Features:**
|
||||
- Generic foreign key for any model association
|
||||
- EXIF data extraction
|
||||
- Approval workflow for moderation
|
||||
- Custom storage backend
|
||||
- Automatic file organization
|
||||
|
||||
#### **reviews** - User Review System
|
||||
- **Models:**
|
||||
- `Review` - Generic reviews for parks/rides
|
||||
- `ReviewImage` - Review photo attachments
|
||||
- `ReviewLike` - Review engagement
|
||||
- `ReviewReport` - Content moderation
|
||||
|
||||
- **Features:**
|
||||
- 1-10 rating scale
|
||||
- Generic content type support
|
||||
- Moderation workflow
|
||||
- User engagement tracking
|
||||
|
||||
### 4. Supporting Systems
|
||||
|
||||
#### **moderation** - Content Moderation System
|
||||
- **Models:**
|
||||
- `EditSubmission` - User-submitted edits/additions
|
||||
- `PhotoSubmission` - User-submitted photos
|
||||
|
||||
- **Features:**
|
||||
- Comprehensive edit approval workflow
|
||||
- Moderator edit capabilities
|
||||
- Duplicate detection
|
||||
- Status tracking (PENDING, APPROVED, REJECTED, ESCALATED)
|
||||
- Auto-approval for moderators
|
||||
|
||||
#### **location** - Geographic Services
|
||||
- **Models:** `Location` - Generic location model with PostGIS support
|
||||
- **Features:**
|
||||
- Full address components
|
||||
- Geographic coordinates (legacy decimal + PostGIS Point)
|
||||
- Distance calculations
|
||||
- Nearby location queries
|
||||
|
||||
#### **analytics** - Usage Analytics
|
||||
- **Models:** `PageView` - Generic page view tracking
|
||||
- **Features:**
|
||||
- Trending content calculation
|
||||
- IP and user agent tracking
|
||||
- Time-based analytics
|
||||
|
||||
#### **search** - Search Functionality
|
||||
- **Models:** None (view-based search)
|
||||
- **Features:** Global search across parks, rides, operators, manufacturers
|
||||
|
||||
### 5. Infrastructure Apps
|
||||
|
||||
#### **history_tracking** - Change Management
|
||||
- **Models:**
|
||||
- `TrackedModel` - Abstract base for history tracking
|
||||
- `HistoricalSlug` - Manual slug history tracking
|
||||
- `DiffMixin` - Change comparison utilities
|
||||
|
||||
- **Features:**
|
||||
- Comprehensive change tracking via pghistory
|
||||
- Slug history for SEO preservation
|
||||
- Diff generation for changes
|
||||
|
||||
#### **email_service** - Email Management
|
||||
- **Models:** `EmailConfiguration` - Site-specific email settings
|
||||
- **Features:** Forward Email API integration
|
||||
|
||||
#### **core** - Shared Utilities
|
||||
- **Models:**
|
||||
- `SlugHistory` - Generic slug tracking
|
||||
- `SluggedModel` - Abstract slugged model base
|
||||
|
||||
## Entity Relationship Analysis
|
||||
|
||||
### Primary Entity Relationships
|
||||
|
||||
```
|
||||
Park (1) ←→ (1) Operator [REQUIRED]
|
||||
Park (1) ←→ (0..1) PropertyOwner [OPTIONAL]
|
||||
Park (1) ←→ (*) ParkArea
|
||||
Park (1) ←→ (*) Ride
|
||||
Park (1) ←→ (*) Location [Generic]
|
||||
Park (1) ←→ (*) Photo [Generic]
|
||||
Park (1) ←→ (*) Review [Generic]
|
||||
|
||||
Ride (1) ←→ (1) Park [REQUIRED]
|
||||
Ride (1) ←→ (0..1) ParkArea [OPTIONAL]
|
||||
Ride (1) ←→ (0..1) Manufacturer [OPTIONAL]
|
||||
Ride (1) ←→ (0..1) Designer [OPTIONAL]
|
||||
Ride (1) ←→ (0..1) RideModel [OPTIONAL]
|
||||
Ride (1) ←→ (0..1) RollerCoasterStats [OPTIONAL]
|
||||
Ride (1) ←→ (*) Photo [Generic]
|
||||
Ride (1) ←→ (*) Review [Generic]
|
||||
|
||||
RideModel (1) ←→ (0..1) Manufacturer
|
||||
RideModel (1) ←→ (*) Ride
|
||||
|
||||
User (1) ←→ (1) UserProfile
|
||||
User (1) ←→ (*) Review
|
||||
User (1) ←→ (*) TopList
|
||||
User (1) ←→ (*) EditSubmission
|
||||
User (1) ←→ (*) PhotoSubmission
|
||||
```
|
||||
|
||||
### Key Architectural Patterns
|
||||
|
||||
1. **Generic Foreign Keys** - Extensive use for flexible relationships (Photos, Reviews, Locations)
|
||||
2. **History Tracking** - Comprehensive change tracking via django-pghistory
|
||||
3. **Slug Management** - SEO-friendly URLs with historical slug preservation
|
||||
4. **Moderation Workflow** - User-generated content approval system
|
||||
5. **Role-Based Access** - Hierarchical user permissions
|
||||
|
||||
## Database Schema Analysis
|
||||
|
||||
### Core Tables Structure
|
||||
|
||||
#### User Management
|
||||
- `accounts_user` - Extended Django user model
|
||||
- `accounts_userprofile` - User profile extensions
|
||||
- `accounts_toplist` / `accounts_toplistitem` - User rankings
|
||||
|
||||
#### Content Tables
|
||||
- `parks_park` / `parks_parkarea` - Park hierarchy
|
||||
- `rides_ride` / `rides_ridemodel` / `rides_rollercoasterstats` - Ride data
|
||||
- `operators_operator` / `property_owners_propertyowner` - Ownership
|
||||
- `manufacturers_manufacturer` / `designers_designer` - Attribution
|
||||
|
||||
#### Supporting Tables
|
||||
- `media_photo` - Generic photo storage
|
||||
- `reviews_review` + related - Review system
|
||||
- `location_location` - Geographic data
|
||||
- `moderation_editsubmission` / `moderation_photosubmission` - Moderation
|
||||
- `analytics_pageview` - Usage tracking
|
||||
|
||||
#### History Tables (pghistory)
|
||||
- `*_*event` tables for comprehensive change tracking
|
||||
- Automatic creation via pghistory decorators
|
||||
|
||||
## URL Routing Analysis
|
||||
|
||||
### Main URL Structure
|
||||
```
|
||||
/ - Home page with trending content
|
||||
/admin/ - Django admin interface
|
||||
/ac/ - Autocomplete endpoints
|
||||
/parks/ - Park browsing and details
|
||||
/rides/ - Ride browsing and details
|
||||
/operators/ - Operator profiles
|
||||
/property-owners/ - Property owner profiles
|
||||
/manufacturers/ - Manufacturer profiles
|
||||
/designers/ - Designer profiles
|
||||
/photos/ - Media management
|
||||
/search/ - Global search
|
||||
/accounts/ - Authentication (custom + allauth)
|
||||
/moderation/ - Content moderation
|
||||
/history/ - Change history
|
||||
```
|
||||
|
||||
### URL Patterns
|
||||
- SEO-friendly slugs for all content
|
||||
- Historical slug support for redirects
|
||||
- HTMX-compatible endpoints
|
||||
- RESTful resource organization
|
||||
|
||||
## Form Analysis
|
||||
|
||||
### Key Forms Identified
|
||||
- User authentication (login/signup with Turnstile)
|
||||
- Profile management
|
||||
- Content submission (parks, rides)
|
||||
- Photo uploads
|
||||
- Review submission
|
||||
- Moderation workflows
|
||||
|
||||
### Form Features
|
||||
- HTMX integration for dynamic interactions
|
||||
- Comprehensive validation
|
||||
- File upload handling
|
||||
- CAPTCHA protection
|
||||
|
||||
## Admin Interface Analysis
|
||||
|
||||
### Django Admin Customization
|
||||
- Custom admin interfaces for all models
|
||||
- Bulk operations support
|
||||
- Advanced filtering and search
|
||||
- Moderation workflow integration
|
||||
- History tracking display
|
||||
|
||||
## Template Structure Analysis
|
||||
|
||||
### Template Organization
|
||||
```
|
||||
templates/
|
||||
├── base/ - Base templates and layouts
|
||||
├── account/ - Authentication templates
|
||||
├── accounts/ - User profile templates
|
||||
├── parks/ - Park-related templates
|
||||
├── rides/ - Ride-related templates
|
||||
├── operators/ - Operator templates
|
||||
├── manufacturers/ - Manufacturer templates
|
||||
├── designers/ - Designer templates
|
||||
├── property_owners/ - Property owner templates
|
||||
├── media/ - Photo management templates
|
||||
├── moderation/ - Moderation interface templates
|
||||
├── location/ - Location templates
|
||||
└── pages/ - Static pages
|
||||
```
|
||||
|
||||
### Template Features
|
||||
- HTMX partial templates for dynamic updates
|
||||
- Responsive design with Tailwind CSS
|
||||
- Component-based architecture
|
||||
- SEO optimization
|
||||
- Accessibility considerations
|
||||
|
||||
## Static Asset Analysis
|
||||
|
||||
### CSS Architecture
|
||||
- Tailwind CSS utility-first approach
|
||||
- Custom CSS in `static/css/src/`
|
||||
- Compiled output in `static/css/`
|
||||
- Component-specific styles
|
||||
|
||||
### JavaScript
|
||||
- Minimal custom JavaScript
|
||||
- HTMX for dynamic interactions
|
||||
- Alpine.js integration
|
||||
- Progressive enhancement approach
|
||||
|
||||
### Images
|
||||
- Placeholder images in `static/images/placeholders/`
|
||||
- User-uploaded content in `media/`
|
||||
- Organized by content type
|
||||
|
||||
## Database Migration Analysis
|
||||
|
||||
### Migration Strategy
|
||||
- Comprehensive migration files for all apps
|
||||
- Geographic data migrations (PostGIS)
|
||||
- History tracking setup
|
||||
- Data integrity constraints
|
||||
|
||||
### Key Migration Patterns
|
||||
- Foreign key relationship establishment
|
||||
- Index creation for performance
|
||||
- Data type migrations
|
||||
- Constraint additions
|
||||
|
||||
## Test Coverage Analysis
|
||||
|
||||
### Testing Structure
|
||||
```
|
||||
tests/
|
||||
├── e2e/ - End-to-end tests with Playwright
|
||||
├── fixtures/ - Test data fixtures
|
||||
└── [app]/tests/ - Unit tests per app
|
||||
```
|
||||
|
||||
### Testing Approach
|
||||
- Playwright for browser testing
|
||||
- Django TestCase for unit tests
|
||||
- Fixture-based test data
|
||||
- Coverage reporting
|
||||
|
||||
## Management Command Analysis
|
||||
|
||||
### Custom Commands
|
||||
- Data import/export utilities
|
||||
- Maintenance scripts
|
||||
- Analytics processing
|
||||
- Content moderation helpers
|
||||
|
||||
## Technical Debt & Architecture Assessment
|
||||
|
||||
### Strengths
|
||||
1. **Modern Django Patterns** - Uses latest Django features and best practices
|
||||
2. **Comprehensive History Tracking** - Full audit trail via pghistory
|
||||
3. **Flexible Content System** - Generic foreign keys for extensibility
|
||||
4. **Geographic Support** - PostGIS integration for location features
|
||||
5. **Moderation Workflow** - Robust user-generated content management
|
||||
6. **Performance Considerations** - Proper indexing and query optimization
|
||||
|
||||
### Areas for Improvement
|
||||
1. **API Layer** - No REST API for mobile/external access
|
||||
2. **Caching Strategy** - Limited caching implementation
|
||||
3. **Search Optimization** - Basic search, could benefit from Elasticsearch
|
||||
4. **Image Optimization** - No automatic image resizing/optimization
|
||||
5. **Internationalization** - No i18n support currently
|
||||
|
||||
### Security Analysis
|
||||
1. **Authentication** - Robust with social login and 2FA options
|
||||
2. **Authorization** - Role-based access control
|
||||
3. **Input Validation** - Comprehensive form validation
|
||||
4. **CSRF Protection** - Django built-in protection
|
||||
5. **SQL Injection** - ORM usage prevents issues
|
||||
6. **File Upload Security** - Proper validation and storage
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
### Database Optimization
|
||||
- Proper indexing on frequently queried fields
|
||||
- Select/prefetch related for query optimization
|
||||
- Generic foreign key indexing
|
||||
|
||||
### Caching Strategy
|
||||
- Basic cache implementation
|
||||
- Trending content caching
|
||||
- Static file optimization with WhiteNoise
|
||||
|
||||
### Media Handling
|
||||
- Custom storage backend
|
||||
- Organized file structure
|
||||
- EXIF data extraction
|
||||
|
||||
## Deployment Architecture
|
||||
|
||||
### Production Considerations
|
||||
- PostgreSQL with PostGIS extensions
|
||||
- Static file serving via WhiteNoise
|
||||
- Media file storage (local/cloud)
|
||||
- Email service integration
|
||||
- Geographic library dependencies (GDAL, GEOS)
|
||||
|
||||
## Conclusion
|
||||
|
||||
ThrillWiki represents a well-architected Django application with modern patterns and comprehensive functionality. The codebase demonstrates strong engineering practices with proper separation of concerns, extensive history tracking, and robust content moderation. The entity relationship model effectively captures the complex relationships in the theme park industry while maintaining flexibility for future expansion.
|
||||
|
||||
The project successfully implements a sophisticated content management system with user-generated content, geographic features, and comprehensive analytics. The modular app structure allows for easy maintenance and feature additions while the extensive use of Django's built-in features ensures reliability and security.
|
||||
|
||||
**Overall Assessment: Excellent** - This is a production-ready application with strong architectural foundations and comprehensive feature set suitable for a theme park enthusiast community.
|
||||
1
config/__init__.py
Normal file
1
config/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Configuration package for thrillwiki project
|
||||
1
config/django/__init__.py
Normal file
1
config/django/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Django settings package
|
||||
377
config/django/base.py
Normal file
377
config/django/base.py
Normal file
@@ -0,0 +1,377 @@
|
||||
"""
|
||||
Base Django settings for thrillwiki project.
|
||||
Common settings shared across all environments.
|
||||
"""
|
||||
|
||||
import environ # type: ignore[import]
|
||||
from pathlib import Path
|
||||
|
||||
# Initialize environment variables
|
||||
env = environ.Env(
|
||||
DEBUG=(bool, False),
|
||||
SECRET_KEY=(str, ""),
|
||||
ALLOWED_HOSTS=(list, []),
|
||||
DATABASE_URL=(str, ""),
|
||||
CACHE_URL=(str, "locmem://"),
|
||||
EMAIL_URL=(str, ""),
|
||||
REDIS_URL=(str, ""),
|
||||
)
|
||||
|
||||
# Build paths inside the project like this: BASE_DIR / 'subdir'.
|
||||
BASE_DIR = Path(__file__).resolve().parent.parent.parent
|
||||
|
||||
# Read environment file if it exists
|
||||
environ.Env.read_env(BASE_DIR / ".env")
|
||||
|
||||
# SECURITY WARNING: keep the secret key used in production secret!
|
||||
SECRET_KEY = env("SECRET_KEY")
|
||||
|
||||
# SECURITY WARNING: don't run with debug turned on in production!
|
||||
DEBUG = env("DEBUG")
|
||||
|
||||
# Allowed hosts
|
||||
ALLOWED_HOSTS = env("ALLOWED_HOSTS")
|
||||
|
||||
# CSRF trusted origins
|
||||
CSRF_TRUSTED_ORIGINS = env("CSRF_TRUSTED_ORIGINS", default=[]) # type: ignore[arg-type]
|
||||
|
||||
# Application definition
|
||||
DJANGO_APPS = [
|
||||
"django.contrib.admin",
|
||||
"django.contrib.auth",
|
||||
"django.contrib.contenttypes",
|
||||
"django.contrib.sessions",
|
||||
"django.contrib.messages",
|
||||
"django.contrib.staticfiles",
|
||||
"django.contrib.sites",
|
||||
"django.contrib.gis", # GeoDjango
|
||||
]
|
||||
|
||||
THIRD_PARTY_APPS = [
|
||||
"rest_framework", # Django REST Framework
|
||||
"drf_spectacular", # OpenAPI 3.0 documentation
|
||||
"corsheaders", # CORS headers for API
|
||||
"pghistory", # django-pghistory
|
||||
"pgtrigger", # Required by django-pghistory
|
||||
"allauth",
|
||||
"allauth.account",
|
||||
"allauth.socialaccount",
|
||||
"allauth.socialaccount.providers.google",
|
||||
"allauth.socialaccount.providers.discord",
|
||||
"django_cleanup",
|
||||
"django_filters",
|
||||
"django_htmx",
|
||||
"whitenoise",
|
||||
"django_tailwind_cli",
|
||||
"autocomplete", # Django HTMX Autocomplete
|
||||
"health_check", # Health checks
|
||||
"health_check.db",
|
||||
"health_check.cache",
|
||||
"health_check.storage",
|
||||
"health_check.contrib.migrations",
|
||||
"health_check.contrib.redis",
|
||||
]
|
||||
|
||||
LOCAL_APPS = [
|
||||
"core",
|
||||
"accounts",
|
||||
"parks",
|
||||
"rides",
|
||||
"email_service",
|
||||
"media.apps.MediaConfig",
|
||||
"moderation",
|
||||
"location",
|
||||
]
|
||||
|
||||
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
|
||||
|
||||
MIDDLEWARE = [
|
||||
"django.middleware.cache.UpdateCacheMiddleware",
|
||||
"corsheaders.middleware.CorsMiddleware", # CORS middleware for API
|
||||
"django.middleware.security.SecurityMiddleware",
|
||||
"whitenoise.middleware.WhiteNoiseMiddleware",
|
||||
"django.contrib.sessions.middleware.SessionMiddleware",
|
||||
"django.middleware.common.CommonMiddleware",
|
||||
"django.middleware.csrf.CsrfViewMiddleware",
|
||||
"django.contrib.auth.middleware.AuthenticationMiddleware",
|
||||
"django.contrib.messages.middleware.MessageMiddleware",
|
||||
"django.middleware.clickjacking.XFrameOptionsMiddleware",
|
||||
"core.middleware.PgHistoryContextMiddleware", # Add history context tracking
|
||||
"allauth.account.middleware.AccountMiddleware",
|
||||
"django.middleware.cache.FetchFromCacheMiddleware",
|
||||
"django_htmx.middleware.HtmxMiddleware",
|
||||
"core.middleware.PageViewMiddleware", # Add our page view tracking
|
||||
]
|
||||
|
||||
ROOT_URLCONF = "thrillwiki.urls"
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
"BACKEND": "django.template.backends.django.DjangoTemplates",
|
||||
"DIRS": [BASE_DIR / "templates"],
|
||||
"APP_DIRS": True,
|
||||
"OPTIONS": {
|
||||
"context_processors": [
|
||||
"django.template.context_processors.debug",
|
||||
"django.template.context_processors.request",
|
||||
"django.contrib.auth.context_processors.auth",
|
||||
"django.contrib.messages.context_processors.messages",
|
||||
"moderation.context_processors.moderation_access",
|
||||
]
|
||||
},
|
||||
}
|
||||
]
|
||||
|
||||
WSGI_APPLICATION = "thrillwiki.wsgi.application"
|
||||
|
||||
# Password validation
|
||||
AUTH_PASSWORD_VALIDATORS = [
|
||||
{
|
||||
"NAME": (
|
||||
"django.contrib.auth.password_validation.UserAttributeSimilarityValidator"
|
||||
),
|
||||
},
|
||||
{
|
||||
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
|
||||
},
|
||||
{
|
||||
"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
|
||||
},
|
||||
{
|
||||
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
|
||||
},
|
||||
]
|
||||
|
||||
# Internationalization
|
||||
LANGUAGE_CODE = "en-us"
|
||||
TIME_ZONE = "America/New_York"
|
||||
USE_I18N = True
|
||||
USE_TZ = True
|
||||
|
||||
# Static files (CSS, JavaScript, Images)
|
||||
STATIC_URL = "static/"
|
||||
STATICFILES_DIRS = [BASE_DIR / "static"]
|
||||
STATIC_ROOT = BASE_DIR / "staticfiles"
|
||||
|
||||
# Media files
|
||||
MEDIA_URL = "/media/"
|
||||
MEDIA_ROOT = BASE_DIR / "media"
|
||||
|
||||
# Default primary key field type
|
||||
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
|
||||
|
||||
# Authentication settings
|
||||
AUTHENTICATION_BACKENDS = [
|
||||
"django.contrib.auth.backends.ModelBackend",
|
||||
"allauth.account.auth_backends.AuthenticationBackend",
|
||||
]
|
||||
|
||||
# django-allauth settings
|
||||
SITE_ID = 1
|
||||
ACCOUNT_SIGNUP_FIELDS = ["email*", "username*", "password1*", "password2*"]
|
||||
ACCOUNT_LOGIN_METHODS = {"email", "username"}
|
||||
ACCOUNT_EMAIL_VERIFICATION = "optional"
|
||||
LOGIN_REDIRECT_URL = "/"
|
||||
ACCOUNT_LOGOUT_REDIRECT_URL = "/"
|
||||
|
||||
# Custom adapters
|
||||
ACCOUNT_ADAPTER = "accounts.adapters.CustomAccountAdapter"
|
||||
SOCIALACCOUNT_ADAPTER = "accounts.adapters.CustomSocialAccountAdapter"
|
||||
|
||||
# Social account settings
|
||||
SOCIALACCOUNT_PROVIDERS = {
|
||||
"google": {
|
||||
"SCOPE": [
|
||||
"profile",
|
||||
"email",
|
||||
],
|
||||
"AUTH_PARAMS": {"access_type": "online"},
|
||||
},
|
||||
"discord": {
|
||||
"SCOPE": ["identify", "email"],
|
||||
"OAUTH_PKCE_ENABLED": True,
|
||||
},
|
||||
}
|
||||
|
||||
# Additional social account settings
|
||||
SOCIALACCOUNT_LOGIN_ON_GET = True
|
||||
SOCIALACCOUNT_AUTO_SIGNUP = False
|
||||
SOCIALACCOUNT_STORE_TOKENS = True
|
||||
|
||||
# Custom User Model
|
||||
AUTH_USER_MODEL = "accounts.User"
|
||||
|
||||
# Autocomplete configuration
|
||||
AUTOCOMPLETE_BLOCK_UNAUTHENTICATED = False
|
||||
|
||||
# Tailwind configuration
|
||||
TAILWIND_CLI_CONFIG_FILE = BASE_DIR / "tailwind.config.js"
|
||||
TAILWIND_CLI_SRC_CSS = BASE_DIR / "static/css/src/input.css"
|
||||
TAILWIND_CLI_DIST_CSS = BASE_DIR / "static/css/tailwind.css"
|
||||
|
||||
# Test runner
|
||||
TEST_RUNNER = "django.test.runner.DiscoverRunner"
|
||||
|
||||
# Road Trip Service Settings
|
||||
ROADTRIP_CACHE_TIMEOUT = 3600 * 24 # 24 hours for geocoding
|
||||
ROADTRIP_ROUTE_CACHE_TIMEOUT = 3600 * 6 # 6 hours for routes
|
||||
ROADTRIP_MAX_REQUESTS_PER_SECOND = 1 # Respect OSM rate limits
|
||||
ROADTRIP_USER_AGENT = "ThrillWiki Road Trip Planner (https://thrillwiki.com)"
|
||||
ROADTRIP_REQUEST_TIMEOUT = 10 # seconds
|
||||
ROADTRIP_MAX_RETRIES = 3
|
||||
ROADTRIP_BACKOFF_FACTOR = 2
|
||||
|
||||
# Django REST Framework Settings
|
||||
REST_FRAMEWORK = {
|
||||
"DEFAULT_AUTHENTICATION_CLASSES": [
|
||||
"rest_framework.authentication.SessionAuthentication",
|
||||
"rest_framework.authentication.TokenAuthentication",
|
||||
],
|
||||
"DEFAULT_PERMISSION_CLASSES": [
|
||||
"rest_framework.permissions.IsAuthenticated",
|
||||
],
|
||||
"DEFAULT_PAGINATION_CLASS": "rest_framework.pagination.PageNumberPagination",
|
||||
"PAGE_SIZE": 20,
|
||||
"DEFAULT_VERSIONING_CLASS": "rest_framework.versioning.AcceptHeaderVersioning",
|
||||
"DEFAULT_VERSION": "v1",
|
||||
"ALLOWED_VERSIONS": ["v1"],
|
||||
"DEFAULT_RENDERER_CLASSES": [
|
||||
"rest_framework.renderers.JSONRenderer",
|
||||
"rest_framework.renderers.BrowsableAPIRenderer",
|
||||
],
|
||||
"DEFAULT_PARSER_CLASSES": [
|
||||
"rest_framework.parsers.JSONParser",
|
||||
"rest_framework.parsers.FormParser",
|
||||
"rest_framework.parsers.MultiPartParser",
|
||||
],
|
||||
"EXCEPTION_HANDLER": "core.api.exceptions.custom_exception_handler",
|
||||
"DEFAULT_FILTER_BACKENDS": [
|
||||
"django_filters.rest_framework.DjangoFilterBackend",
|
||||
"rest_framework.filters.SearchFilter",
|
||||
"rest_framework.filters.OrderingFilter",
|
||||
],
|
||||
"TEST_REQUEST_DEFAULT_FORMAT": "json",
|
||||
"NON_FIELD_ERRORS_KEY": "non_field_errors",
|
||||
"DEFAULT_SCHEMA_CLASS": "drf_spectacular.openapi.AutoSchema",
|
||||
}
|
||||
|
||||
# CORS Settings for API
|
||||
CORS_ALLOWED_ORIGINS = env("CORS_ALLOWED_ORIGINS", default=[]) # type: ignore[arg-type]
|
||||
CORS_ALLOW_CREDENTIALS = True
|
||||
CORS_ALLOW_ALL_ORIGINS = env(
|
||||
"CORS_ALLOW_ALL_ORIGINS", default=False
|
||||
) # type: ignore[arg-type]
|
||||
|
||||
# API-specific settings
|
||||
API_RATE_LIMIT_PER_MINUTE = env.int(
|
||||
"API_RATE_LIMIT_PER_MINUTE", default=60
|
||||
) # type: ignore[arg-type]
|
||||
API_RATE_LIMIT_PER_HOUR = env.int(
|
||||
"API_RATE_LIMIT_PER_HOUR", default=1000
|
||||
) # type: ignore[arg-type]
|
||||
|
||||
# drf-spectacular settings
|
||||
SPECTACULAR_SETTINGS = {
|
||||
"TITLE": "ThrillWiki API",
|
||||
"DESCRIPTION": "Comprehensive theme park and ride information API",
|
||||
"VERSION": "1.0.0",
|
||||
"SERVE_INCLUDE_SCHEMA": False,
|
||||
"COMPONENT_SPLIT_REQUEST": True,
|
||||
"TAGS": [
|
||||
{"name": "parks", "description": "Theme park operations"},
|
||||
{"name": "rides", "description": "Ride information and management"},
|
||||
{"name": "locations", "description": "Geographic location services"},
|
||||
{"name": "accounts", "description": "User account management"},
|
||||
{"name": "media", "description": "Media and image management"},
|
||||
{"name": "moderation", "description": "Content moderation"},
|
||||
],
|
||||
"SCHEMA_PATH_PREFIX": "/api/",
|
||||
"DEFAULT_GENERATOR_CLASS": "drf_spectacular.generators.SchemaGenerator",
|
||||
"SERVE_PERMISSIONS": ["rest_framework.permissions.AllowAny"],
|
||||
"SWAGGER_UI_SETTINGS": {
|
||||
"deepLinking": True,
|
||||
"persistAuthorization": True,
|
||||
"displayOperationId": False,
|
||||
"displayRequestDuration": True,
|
||||
},
|
||||
"REDOC_UI_SETTINGS": {
|
||||
"hideDownloadButton": False,
|
||||
"hideHostname": False,
|
||||
"hideLoading": False,
|
||||
"hideSchemaPattern": True,
|
||||
"scrollYOffset": 0,
|
||||
"theme": {"colors": {"primary": {"main": "#1976d2"}}},
|
||||
},
|
||||
}
|
||||
|
||||
# Health Check Configuration
|
||||
HEALTH_CHECK = {
|
||||
"DISK_USAGE_MAX": 90, # Fail if disk usage is over 90%
|
||||
"MEMORY_MIN": 100, # Fail if less than 100MB available memory
|
||||
}
|
||||
|
||||
# Custom health check backends
|
||||
HEALTH_CHECK_BACKENDS = [
|
||||
"health_check.db",
|
||||
"health_check.cache",
|
||||
"health_check.storage",
|
||||
"core.health_checks.custom_checks.CacheHealthCheck",
|
||||
"core.health_checks.custom_checks.DatabasePerformanceCheck",
|
||||
"core.health_checks.custom_checks.ApplicationHealthCheck",
|
||||
"core.health_checks.custom_checks.ExternalServiceHealthCheck",
|
||||
"core.health_checks.custom_checks.DiskSpaceHealthCheck",
|
||||
]
|
||||
|
||||
# Enhanced Cache Configuration
|
||||
DJANGO_REDIS_CACHE_BACKEND = "django_redis.cache.RedisCache"
|
||||
DJANGO_REDIS_CLIENT_CLASS = "django_redis.client.DefaultClient"
|
||||
|
||||
CACHES = {
|
||||
"default": {
|
||||
"BACKEND": DJANGO_REDIS_CACHE_BACKEND,
|
||||
# type: ignore[arg-type]
|
||||
# pyright: ignore[reportArgumentType]
|
||||
# pyright: ignore[reportArgumentType]
|
||||
# type: ignore
|
||||
"LOCATION": env("REDIS_URL", default="redis://127.0.0.1:6379/1"),
|
||||
"OPTIONS": {
|
||||
"CLIENT_CLASS": DJANGO_REDIS_CLIENT_CLASS,
|
||||
"PARSER_CLASS": "redis.connection.HiredisParser",
|
||||
"CONNECTION_POOL_CLASS": "redis.BlockingConnectionPool",
|
||||
"CONNECTION_POOL_CLASS_KWARGS": {
|
||||
"max_connections": 50,
|
||||
"timeout": 20,
|
||||
},
|
||||
"COMPRESSOR": "django_redis.compressors.zlib.ZlibCompressor",
|
||||
"IGNORE_EXCEPTIONS": True,
|
||||
},
|
||||
"KEY_PREFIX": "thrillwiki",
|
||||
"VERSION": 1,
|
||||
},
|
||||
"sessions": {
|
||||
"BACKEND": DJANGO_REDIS_CACHE_BACKEND,
|
||||
# type: ignore[arg-type]
|
||||
# type: ignore
|
||||
"LOCATION": env("REDIS_URL", default="redis://127.0.0.1:6379/2"),
|
||||
"OPTIONS": {
|
||||
"CLIENT_CLASS": DJANGO_REDIS_CLIENT_CLASS,
|
||||
},
|
||||
},
|
||||
"api": {
|
||||
"BACKEND": DJANGO_REDIS_CACHE_BACKEND,
|
||||
# type: ignore[arg-type]
|
||||
"LOCATION": env("REDIS_URL", default="redis://127.0.0.1:6379/3"),
|
||||
"OPTIONS": {
|
||||
"CLIENT_CLASS": DJANGO_REDIS_CLIENT_CLASS,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
# Use Redis for sessions
|
||||
SESSION_ENGINE = "django.contrib.sessions.backends.cache"
|
||||
SESSION_CACHE_ALIAS = "sessions"
|
||||
SESSION_COOKIE_AGE = 86400 # 24 hours
|
||||
|
||||
# Cache middleware settings
|
||||
CACHE_MIDDLEWARE_SECONDS = 300 # 5 minutes
|
||||
CACHE_MIDDLEWARE_KEY_PREFIX = "thrillwiki"
|
||||
189
config/django/local.py
Normal file
189
config/django/local.py
Normal file
@@ -0,0 +1,189 @@
|
||||
"""
|
||||
Local development settings for thrillwiki project.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from .base import *
|
||||
from ..settings import database
|
||||
|
||||
# Import the module and use its members, e.g., email.EMAIL_HOST
|
||||
|
||||
# Import the module and use its members, e.g., security.SECURE_HSTS_SECONDS
|
||||
|
||||
# Import database configuration
|
||||
DATABASES = database.DATABASES
|
||||
|
||||
# Development-specific settings
|
||||
DEBUG = True
|
||||
|
||||
# For local development, allow all hosts
|
||||
ALLOWED_HOSTS = ["*"]
|
||||
|
||||
# CSRF trusted origins for local development
|
||||
CSRF_TRUSTED_ORIGINS = [
|
||||
"http://localhost:8000",
|
||||
"http://127.0.0.1:8000",
|
||||
"https://beta.thrillwiki.com",
|
||||
]
|
||||
|
||||
GDAL_LIBRARY_PATH = "/opt/homebrew/lib/libgdal.dylib"
|
||||
GEOS_LIBRARY_PATH = "/opt/homebrew/lib/libgeos_c.dylib"
|
||||
|
||||
# Local cache configuration
|
||||
LOC_MEM_CACHE_BACKEND = "django.core.cache.backends.locmem.LocMemCache"
|
||||
|
||||
CACHES = {
|
||||
"default": {
|
||||
"BACKEND": LOC_MEM_CACHE_BACKEND,
|
||||
"LOCATION": "unique-snowflake",
|
||||
"TIMEOUT": 300, # 5 minutes
|
||||
"OPTIONS": {"MAX_ENTRIES": 1000},
|
||||
},
|
||||
"sessions": {
|
||||
"BACKEND": LOC_MEM_CACHE_BACKEND,
|
||||
"LOCATION": "sessions-cache",
|
||||
"TIMEOUT": 86400, # 24 hours (same as SESSION_COOKIE_AGE)
|
||||
"OPTIONS": {"MAX_ENTRIES": 5000},
|
||||
},
|
||||
"api": {
|
||||
"BACKEND": LOC_MEM_CACHE_BACKEND,
|
||||
"LOCATION": "api-cache",
|
||||
"TIMEOUT": 300, # 5 minutes
|
||||
"OPTIONS": {"MAX_ENTRIES": 2000},
|
||||
},
|
||||
}
|
||||
|
||||
# Development-friendly cache settings
|
||||
CACHE_MIDDLEWARE_SECONDS = 1 # Very short cache for development
|
||||
CACHE_MIDDLEWARE_KEY_PREFIX = "thrillwiki_dev"
|
||||
|
||||
# Development email backend
|
||||
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
|
||||
|
||||
# Security settings for development
|
||||
SECURE_SSL_REDIRECT = False
|
||||
SESSION_COOKIE_SECURE = False
|
||||
CSRF_COOKIE_SECURE = False
|
||||
|
||||
# Development monitoring tools
|
||||
DEVELOPMENT_APPS = [
|
||||
"silk",
|
||||
"debug_toolbar",
|
||||
"nplusone.ext.django",
|
||||
"django_extensions",
|
||||
]
|
||||
|
||||
# Add development apps if available
|
||||
for app in DEVELOPMENT_APPS:
|
||||
if app not in INSTALLED_APPS:
|
||||
INSTALLED_APPS.append(app)
|
||||
|
||||
# Development middleware
|
||||
DEVELOPMENT_MIDDLEWARE = [
|
||||
"silk.middleware.SilkyMiddleware",
|
||||
"debug_toolbar.middleware.DebugToolbarMiddleware",
|
||||
"nplusone.ext.django.NPlusOneMiddleware",
|
||||
"core.middleware.performance_middleware.PerformanceMiddleware",
|
||||
"core.middleware.performance_middleware.QueryCountMiddleware",
|
||||
]
|
||||
|
||||
# Add development middleware
|
||||
for middleware in DEVELOPMENT_MIDDLEWARE:
|
||||
if middleware not in MIDDLEWARE:
|
||||
MIDDLEWARE.insert(1, middleware) # Insert after security middleware
|
||||
|
||||
# Debug toolbar configuration
|
||||
INTERNAL_IPS = ["127.0.0.1", "::1"]
|
||||
|
||||
# Silk configuration for development
|
||||
# Disable profiler to avoid silk_profile installation issues
|
||||
SILKY_PYTHON_PROFILER = False
|
||||
SILKY_PYTHON_PROFILER_BINARY = False # Disable binary profiler
|
||||
SILKY_PYTHON_PROFILER_RESULT_PATH = (
|
||||
BASE_DIR / "profiles"
|
||||
) # Not needed when profiler is disabled
|
||||
SILKY_AUTHENTICATION = True # Require login to access Silk
|
||||
SILKY_AUTHORISATION = True # Enable authorization
|
||||
SILKY_MAX_REQUEST_BODY_SIZE = -1 # Don't limit request body size
|
||||
# Limit response body size to 1KB for performance
|
||||
SILKY_MAX_RESPONSE_BODY_SIZE = 1024
|
||||
SILKY_META = True # Record metadata about requests
|
||||
|
||||
# NPlusOne configuration
|
||||
NPLUSONE_LOGGER = logging.getLogger("nplusone")
|
||||
NPLUSONE_LOG_LEVEL = logging.WARN
|
||||
|
||||
# Enhanced development logging
|
||||
LOGGING = {
|
||||
"version": 1,
|
||||
"disable_existing_loggers": False,
|
||||
"formatters": {
|
||||
"verbose": {
|
||||
"format": "{levelname} {asctime} {module} {process:d} {thread:d} {message}",
|
||||
"style": "{",
|
||||
},
|
||||
"json": {
|
||||
"()": "pythonjsonlogger.jsonlogger.JsonFormatter",
|
||||
"format": (
|
||||
"%(levelname)s %(asctime)s %(module)s %(process)d "
|
||||
"%(thread)d %(message)s"
|
||||
),
|
||||
},
|
||||
},
|
||||
"handlers": {
|
||||
"console": {
|
||||
"class": "logging.StreamHandler",
|
||||
"formatter": "verbose",
|
||||
},
|
||||
"file": {
|
||||
"class": "logging.handlers.RotatingFileHandler",
|
||||
"filename": BASE_DIR / "logs" / "thrillwiki.log",
|
||||
"maxBytes": 1024 * 1024 * 10, # 10MB
|
||||
"backupCount": 5,
|
||||
"formatter": "json",
|
||||
},
|
||||
"performance": {
|
||||
"class": "logging.handlers.RotatingFileHandler",
|
||||
"filename": BASE_DIR / "logs" / "performance.log",
|
||||
"maxBytes": 1024 * 1024 * 10, # 10MB
|
||||
"backupCount": 5,
|
||||
"formatter": "json",
|
||||
},
|
||||
},
|
||||
"root": {
|
||||
"level": "INFO",
|
||||
"handlers": ["console"],
|
||||
},
|
||||
"loggers": {
|
||||
"django": {
|
||||
"handlers": ["file"],
|
||||
"level": "INFO",
|
||||
"propagate": False,
|
||||
},
|
||||
"django.db.backends": {
|
||||
"handlers": ["console"],
|
||||
"level": "DEBUG",
|
||||
"propagate": False,
|
||||
},
|
||||
"thrillwiki": {
|
||||
"handlers": ["console", "file"],
|
||||
"level": "DEBUG",
|
||||
"propagate": False,
|
||||
},
|
||||
"performance": {
|
||||
"handlers": ["performance"],
|
||||
"level": "INFO",
|
||||
"propagate": False,
|
||||
},
|
||||
"query_optimization": {
|
||||
"handlers": ["console", "file"],
|
||||
"level": "WARNING",
|
||||
"propagate": False,
|
||||
},
|
||||
"nplusone": {
|
||||
"handlers": ["console"],
|
||||
"level": "WARNING",
|
||||
"propagate": False,
|
||||
},
|
||||
},
|
||||
}
|
||||
103
config/django/production.py
Normal file
103
config/django/production.py
Normal file
@@ -0,0 +1,103 @@
|
||||
"""
|
||||
Production settings for thrillwiki project.
|
||||
"""
|
||||
|
||||
# Import the module and use its members, e.g., base.BASE_DIR, base***REMOVED***
|
||||
from . import base
|
||||
|
||||
# Import the module and use its members, e.g., database.DATABASES
|
||||
|
||||
# Import the module and use its members, e.g., email.EMAIL_HOST
|
||||
|
||||
# Import the module and use its members, e.g., security.SECURE_HSTS_SECONDS
|
||||
|
||||
# Import the module and use its members, e.g., email.EMAIL_HOST
|
||||
|
||||
# Import the module and use its members, e.g., security.SECURE_HSTS_SECONDS
|
||||
|
||||
# Production settings
|
||||
DEBUG = False
|
||||
|
||||
# Allowed hosts must be explicitly set in production
|
||||
ALLOWED_HOSTS = base.env.list("ALLOWED_HOSTS")
|
||||
|
||||
# CSRF trusted origins for production
|
||||
CSRF_TRUSTED_ORIGINS = base.env.list("CSRF_TRUSTED_ORIGINS")
|
||||
|
||||
# Security settings for production
|
||||
SECURE_SSL_REDIRECT = True
|
||||
SESSION_COOKIE_SECURE = True
|
||||
CSRF_COOKIE_SECURE = True
|
||||
SECURE_HSTS_SECONDS = 31536000 # 1 year
|
||||
SECURE_HSTS_INCLUDE_SUBDOMAINS = True
|
||||
SECURE_HSTS_PRELOAD = True
|
||||
|
||||
# Production logging
|
||||
LOGGING = {
|
||||
"version": 1,
|
||||
"disable_existing_loggers": False,
|
||||
"formatters": {
|
||||
"verbose": {
|
||||
"format": "{levelname} {asctime} {module} {process:d} {thread:d} {message}",
|
||||
"style": "{",
|
||||
},
|
||||
"simple": {
|
||||
"format": "{levelname} {message}",
|
||||
"style": "{",
|
||||
},
|
||||
},
|
||||
"handlers": {
|
||||
"file": {
|
||||
"level": "INFO",
|
||||
"class": "logging.handlers.RotatingFileHandler",
|
||||
"filename": base.BASE_DIR / "logs" / "django.log",
|
||||
"maxBytes": 1024 * 1024 * 15, # 15MB
|
||||
"backupCount": 10,
|
||||
"formatter": "verbose",
|
||||
},
|
||||
"error_file": {
|
||||
"level": "ERROR",
|
||||
"class": "logging.handlers.RotatingFileHandler",
|
||||
"filename": base.BASE_DIR / "logs" / "django_error.log",
|
||||
"maxBytes": 1024 * 1024 * 15, # 15MB
|
||||
"backupCount": 10,
|
||||
"formatter": "verbose",
|
||||
},
|
||||
},
|
||||
"root": {
|
||||
"handlers": ["file"],
|
||||
"level": "INFO",
|
||||
},
|
||||
"loggers": {
|
||||
"django": {
|
||||
"handlers": ["file", "error_file"],
|
||||
"level": "INFO",
|
||||
"propagate": False,
|
||||
},
|
||||
"thrillwiki": {
|
||||
"handlers": ["file", "error_file"],
|
||||
"level": "INFO",
|
||||
"propagate": False,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
# Static files collection for production
|
||||
STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
|
||||
|
||||
# Cache settings for production (Redis recommended)
|
||||
redis_url = base.env.str("REDIS_URL", default=None)
|
||||
if redis_url:
|
||||
CACHES = {
|
||||
"default": {
|
||||
"BACKEND": "django_redis.cache.RedisCache",
|
||||
"LOCATION": redis_url,
|
||||
"OPTIONS": {
|
||||
"CLIENT_CLASS": "django_redis.client.DefaultClient",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
# Use Redis for sessions in production
|
||||
SESSION_ENGINE = "django.contrib.sessions.backends.cache"
|
||||
SESSION_CACHE_ALIAS = "default"
|
||||
65
config/django/test.py
Normal file
65
config/django/test.py
Normal file
@@ -0,0 +1,65 @@
|
||||
"""
|
||||
Test settings for thrillwiki project.
|
||||
"""
|
||||
|
||||
from .base import *
|
||||
|
||||
# Test-specific settings
|
||||
DEBUG = False
|
||||
|
||||
# Use in-memory database for faster tests
|
||||
DATABASES = {
|
||||
"default": {
|
||||
"ENGINE": "django.contrib.gis.db.backends.spatialite",
|
||||
"NAME": ":memory:",
|
||||
}
|
||||
}
|
||||
|
||||
# Use in-memory cache for tests
|
||||
CACHES = {
|
||||
"default": {
|
||||
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
|
||||
"LOCATION": "test-cache",
|
||||
}
|
||||
}
|
||||
|
||||
# Disable migrations for faster tests
|
||||
|
||||
|
||||
class DisableMigrations:
|
||||
def __contains__(self, item):
|
||||
return True
|
||||
|
||||
def __getitem__(self, item):
|
||||
return None
|
||||
|
||||
|
||||
MIGRATION_MODULES = DisableMigrations()
|
||||
|
||||
# Email backend for tests
|
||||
EMAIL_BACKEND = "django.core.mail.backends.locmem.EmailBackend"
|
||||
|
||||
# Password hashers for faster tests
|
||||
PASSWORD_HASHERS = [
|
||||
"django.contrib.auth.hashers.MD5PasswordHasher",
|
||||
]
|
||||
|
||||
# Disable logging during tests
|
||||
LOGGING_CONFIG = None
|
||||
|
||||
# Media files for tests
|
||||
MEDIA_ROOT = BASE_DIR / "test_media"
|
||||
|
||||
# Static files for tests
|
||||
STATIC_ROOT = BASE_DIR / "test_static"
|
||||
|
||||
# Disable Turnstile for tests
|
||||
TURNSTILE_SITE_KEY = "test-key"
|
||||
TURNSTILE_SECRET_KEY = "test-secret"
|
||||
|
||||
# Test-specific middleware (remove caching middleware)
|
||||
MIDDLEWARE = [m for m in MIDDLEWARE if "cache" not in m.lower()]
|
||||
|
||||
# Celery settings for tests (if Celery is used)
|
||||
CELERY_TASK_ALWAYS_EAGER = True
|
||||
CELERY_TASK_EAGER_PROPAGATES = True
|
||||
44
config/django/test_accounts.py
Normal file
44
config/django/test_accounts.py
Normal file
@@ -0,0 +1,44 @@
|
||||
"""
|
||||
Test Django settings for thrillwiki accounts app.
|
||||
"""
|
||||
|
||||
# Use in-memory database for tests
|
||||
DATABASES = {
|
||||
"default": {
|
||||
"ENGINE": "django.contrib.gis.db.backends.postgis",
|
||||
"NAME": "test_db",
|
||||
}
|
||||
}
|
||||
|
||||
# Use a faster password hasher for tests
|
||||
PASSWORD_HASHERS = [
|
||||
"django.contrib.auth.hashers.MD5PasswordHasher",
|
||||
]
|
||||
|
||||
# Disable whitenoise for tests
|
||||
WHITENOISE_AUTOREFRESH = True
|
||||
STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
|
||||
|
||||
INSTALLED_APPS = [
|
||||
"django.contrib.admin",
|
||||
"django.contrib.auth",
|
||||
"django.contrib.contenttypes",
|
||||
"django.contrib.sessions",
|
||||
"django.contrib.messages",
|
||||
"django.contrib.staticfiles",
|
||||
"django.contrib.sites",
|
||||
"allauth",
|
||||
"allauth.account",
|
||||
"allauth.socialaccount",
|
||||
"accounts",
|
||||
"core",
|
||||
"pghistory",
|
||||
"pgtrigger",
|
||||
"email_service",
|
||||
"parks",
|
||||
"rides",
|
||||
"media.apps.MediaConfig",
|
||||
]
|
||||
|
||||
GDAL_LIBRARY_PATH = "/opt/homebrew/lib/libgdal.dylib"
|
||||
GEOS_LIBRARY_PATH = "/opt/homebrew/lib/libgeos_c.dylib"
|
||||
1
config/settings/__init__.py
Normal file
1
config/settings/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Settings modules package
|
||||
28
config/settings/database.py
Normal file
28
config/settings/database.py
Normal file
@@ -0,0 +1,28 @@
|
||||
"""
|
||||
Database configuration for thrillwiki project.
|
||||
"""
|
||||
|
||||
import environ
|
||||
|
||||
env = environ.Env()
|
||||
|
||||
# Database configuration
|
||||
db_config = env.db("DATABASE_URL")
|
||||
|
||||
|
||||
# Force PostGIS backend for spatial data support
|
||||
db_config["ENGINE"] = "django.contrib.gis.db.backends.postgis"
|
||||
|
||||
DATABASES = {
|
||||
"default": db_config,
|
||||
}
|
||||
|
||||
# GeoDjango Settings - Environment specific
|
||||
GDAL_LIBRARY_PATH = env("GDAL_LIBRARY_PATH", default=None)
|
||||
GEOS_LIBRARY_PATH = env("GEOS_LIBRARY_PATH", default=None)
|
||||
|
||||
# Cache settings
|
||||
CACHES = {"default": env.cache("CACHE_URL", default="locmemcache://")}
|
||||
|
||||
CACHE_MIDDLEWARE_SECONDS = env.int("CACHE_MIDDLEWARE_SECONDS", default=300) # 5 minutes
|
||||
CACHE_MIDDLEWARE_KEY_PREFIX = env("CACHE_MIDDLEWARE_KEY_PREFIX", default="thrillwiki")
|
||||
24
config/settings/email.py
Normal file
24
config/settings/email.py
Normal file
@@ -0,0 +1,24 @@
|
||||
"""
|
||||
Email configuration for thrillwiki project.
|
||||
"""
|
||||
|
||||
import environ
|
||||
|
||||
env = environ.Env()
|
||||
|
||||
# Email settings
|
||||
EMAIL_BACKEND = env(
|
||||
"EMAIL_BACKEND", default="email_service.backends.ForwardEmailBackend"
|
||||
)
|
||||
FORWARD_EMAIL_BASE_URL = env(
|
||||
"FORWARD_EMAIL_BASE_URL", default="https://api.forwardemail.net"
|
||||
)
|
||||
SERVER_EMAIL = env("SERVER_EMAIL", default="django_webmaster@thrillwiki.com")
|
||||
|
||||
# Email URLs can be configured using EMAIL_URL environment variable
|
||||
# Example: EMAIL_URL=smtp://user:pass@localhost:587
|
||||
EMAIL_URL = env("EMAIL_URL", default=None)
|
||||
|
||||
if EMAIL_URL:
|
||||
email_config = env.email(EMAIL_URL)
|
||||
vars().update(email_config)
|
||||
36
config/settings/security.py
Normal file
36
config/settings/security.py
Normal file
@@ -0,0 +1,36 @@
|
||||
"""
|
||||
Security configuration for thrillwiki project.
|
||||
"""
|
||||
|
||||
import environ
|
||||
|
||||
env = environ.Env()
|
||||
|
||||
# Cloudflare Turnstile settings
|
||||
TURNSTILE_SITE_KEY = env("TURNSTILE_SITE_KEY", default="")
|
||||
TURNSTILE_SECRET_KEY = env("TURNSTILE_SECRET_KEY", default="")
|
||||
TURNSTILE_VERIFY_URL = env(
|
||||
"TURNSTILE_VERIFY_URL",
|
||||
default="https://challenges.cloudflare.com/turnstile/v0/siteverify",
|
||||
)
|
||||
|
||||
# Security headers and settings (for production)
|
||||
SECURE_BROWSER_XSS_FILTER = env.bool("SECURE_BROWSER_XSS_FILTER", default=True)
|
||||
SECURE_CONTENT_TYPE_NOSNIFF = env.bool("SECURE_CONTENT_TYPE_NOSNIFF", default=True)
|
||||
SECURE_HSTS_INCLUDE_SUBDOMAINS = env.bool(
|
||||
"SECURE_HSTS_INCLUDE_SUBDOMAINS", default=True
|
||||
)
|
||||
SECURE_HSTS_SECONDS = env.int("SECURE_HSTS_SECONDS", default=31536000) # 1 year
|
||||
SECURE_REDIRECT_EXEMPT = env.list("SECURE_REDIRECT_EXEMPT", default=[])
|
||||
SECURE_SSL_REDIRECT = env.bool("SECURE_SSL_REDIRECT", default=False)
|
||||
SECURE_PROXY_SSL_HEADER = env.tuple("SECURE_PROXY_SSL_HEADER", default=None)
|
||||
|
||||
# Session security
|
||||
SESSION_COOKIE_SECURE = env.bool("SESSION_COOKIE_SECURE", default=False)
|
||||
SESSION_COOKIE_HTTPONLY = env.bool("SESSION_COOKIE_HTTPONLY", default=True)
|
||||
SESSION_COOKIE_SAMESITE = env("SESSION_COOKIE_SAMESITE", default="Lax")
|
||||
|
||||
# CSRF security
|
||||
CSRF_COOKIE_SECURE = env.bool("CSRF_COOKIE_SECURE", default=False)
|
||||
CSRF_COOKIE_HTTPONLY = env.bool("CSRF_COOKIE_HTTPONLY", default=True)
|
||||
CSRF_COOKIE_SAMESITE = env("CSRF_COOKIE_SAMESITE", default="Lax")
|
||||
@@ -1,29 +1,26 @@
|
||||
from django.contrib import admin
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.utils.html import format_html
|
||||
from .models import SlugHistory
|
||||
|
||||
|
||||
@admin.register(SlugHistory)
|
||||
class SlugHistoryAdmin(admin.ModelAdmin):
|
||||
list_display = ['content_object_link', 'old_slug', 'created_at']
|
||||
list_filter = ['content_type', 'created_at']
|
||||
search_fields = ['old_slug', 'object_id']
|
||||
readonly_fields = ['content_type', 'object_id', 'old_slug', 'created_at']
|
||||
date_hierarchy = 'created_at'
|
||||
ordering = ['-created_at']
|
||||
list_display = ["content_object_link", "old_slug", "created_at"]
|
||||
list_filter = ["content_type", "created_at"]
|
||||
search_fields = ["old_slug", "object_id"]
|
||||
readonly_fields = ["content_type", "object_id", "old_slug", "created_at"]
|
||||
date_hierarchy = "created_at"
|
||||
ordering = ["-created_at"]
|
||||
|
||||
def content_object_link(self, obj):
|
||||
"""Create a link to the related object's admin page"""
|
||||
try:
|
||||
url = obj.content_object.get_absolute_url()
|
||||
return format_html(
|
||||
'<a href="{}">{}</a>',
|
||||
url,
|
||||
str(obj.content_object)
|
||||
)
|
||||
return format_html('<a href="{}">{}</a>', url, str(obj.content_object))
|
||||
except (AttributeError, ValueError):
|
||||
return str(obj.content_object)
|
||||
content_object_link.short_description = 'Object'
|
||||
|
||||
content_object_link.short_description = "Object"
|
||||
|
||||
def has_add_permission(self, request):
|
||||
"""Disable manual creation of slug history records"""
|
||||
|
||||
@@ -3,12 +3,14 @@ from django.contrib.contenttypes.fields import GenericForeignKey
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.utils import timezone
|
||||
from django.db.models import Count
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
class PageView(models.Model):
|
||||
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE, related_name='page_views')
|
||||
content_type = models.ForeignKey(
|
||||
ContentType, on_delete=models.CASCADE, related_name="page_views"
|
||||
)
|
||||
object_id = models.PositiveIntegerField()
|
||||
content_object = GenericForeignKey('content_type', 'object_id')
|
||||
content_object = GenericForeignKey("content_type", "object_id")
|
||||
|
||||
timestamp = models.DateTimeField(auto_now_add=True, db_index=True)
|
||||
ip_address = models.GenericIPAddressField()
|
||||
@@ -16,8 +18,8 @@ class PageView(models.Model):
|
||||
|
||||
class Meta:
|
||||
indexes = [
|
||||
models.Index(fields=['timestamp']),
|
||||
models.Index(fields=['content_type', 'object_id']),
|
||||
models.Index(fields=["timestamp"]),
|
||||
models.Index(fields=["content_type", "object_id"]),
|
||||
]
|
||||
|
||||
@classmethod
|
||||
@@ -36,14 +38,14 @@ class PageView(models.Model):
|
||||
cutoff = timezone.now() - timezone.timedelta(hours=hours)
|
||||
|
||||
# Query through the ContentType relationship
|
||||
item_ids = cls.objects.filter(
|
||||
content_type=content_type,
|
||||
timestamp__gte=cutoff
|
||||
).values('object_id').annotate(
|
||||
view_count=Count('id')
|
||||
).filter(
|
||||
view_count__gt=0
|
||||
).order_by('-view_count').values_list('object_id', flat=True)[:limit]
|
||||
item_ids = (
|
||||
cls.objects.filter(content_type=content_type, timestamp__gte=cutoff)
|
||||
.values("object_id")
|
||||
.annotate(view_count=Count("id"))
|
||||
.filter(view_count__gt=0)
|
||||
.order_by("-view_count")
|
||||
.values_list("object_id", flat=True)[:limit]
|
||||
)
|
||||
|
||||
# Get the actual items in the correct order
|
||||
if item_ids:
|
||||
@@ -51,6 +53,7 @@ class PageView(models.Model):
|
||||
id_list = list(item_ids)
|
||||
# Use Case/When to preserve the ordering
|
||||
from django.db.models import Case, When
|
||||
|
||||
preserved = Case(*[When(pk=pk, then=pos) for pos, pk in enumerate(id_list)])
|
||||
return model_class.objects.filter(pk__in=id_list).order_by(preserved)
|
||||
|
||||
1
core/api/__init__.py
Normal file
1
core/api/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Core API infrastructure for ThrillWiki
|
||||
205
core/api/exceptions.py
Normal file
205
core/api/exceptions.py
Normal file
@@ -0,0 +1,205 @@
|
||||
"""
|
||||
Custom exception handling for ThrillWiki API.
|
||||
Provides standardized error responses following Django styleguide patterns.
|
||||
"""
|
||||
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from django.http import Http404
|
||||
from django.core.exceptions import (
|
||||
PermissionDenied,
|
||||
ValidationError as DjangoValidationError,
|
||||
)
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import exception_handler
|
||||
from rest_framework.exceptions import (
|
||||
ValidationError as DRFValidationError,
|
||||
NotFound,
|
||||
PermissionDenied as DRFPermissionDenied,
|
||||
)
|
||||
|
||||
from ..exceptions import ThrillWikiException
|
||||
from ..logging import get_logger, log_exception
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
def custom_exception_handler(
|
||||
exc: Exception, context: Dict[str, Any]
|
||||
) -> Optional[Response]:
|
||||
"""
|
||||
Custom exception handler for DRF that provides standardized error responses.
|
||||
|
||||
Returns:
|
||||
Response with standardized error format or None to fallback to default handler
|
||||
"""
|
||||
# Call REST framework's default exception handler first
|
||||
response = exception_handler(exc, context)
|
||||
|
||||
if response is not None:
|
||||
# Standardize the error response format
|
||||
custom_response_data = {
|
||||
"status": "error",
|
||||
"error": {
|
||||
"code": _get_error_code(exc),
|
||||
"message": _get_error_message(exc, response.data),
|
||||
"details": _get_error_details(exc, response.data),
|
||||
},
|
||||
"data": None,
|
||||
}
|
||||
|
||||
# Add request context for debugging
|
||||
if hasattr(context.get("request"), "user"):
|
||||
custom_response_data["error"]["request_user"] = str(context["request"].user)
|
||||
|
||||
# Log the error for monitoring
|
||||
log_exception(
|
||||
logger,
|
||||
exc,
|
||||
context={"response_status": response.status_code},
|
||||
request=context.get("request"),
|
||||
)
|
||||
|
||||
response.data = custom_response_data
|
||||
|
||||
# Handle ThrillWiki custom exceptions
|
||||
elif isinstance(exc, ThrillWikiException):
|
||||
custom_response_data = {
|
||||
"status": "error",
|
||||
"error": exc.to_dict(),
|
||||
"data": None,
|
||||
}
|
||||
|
||||
log_exception(
|
||||
logger,
|
||||
exc,
|
||||
context={"response_status": exc.status_code},
|
||||
request=context.get("request"),
|
||||
)
|
||||
response = Response(custom_response_data, status=exc.status_code)
|
||||
|
||||
# Handle specific Django exceptions that DRF doesn't catch
|
||||
elif isinstance(exc, DjangoValidationError):
|
||||
custom_response_data = {
|
||||
"status": "error",
|
||||
"error": {
|
||||
"code": "VALIDATION_ERROR",
|
||||
"message": "Validation failed",
|
||||
"details": _format_django_validation_errors(exc),
|
||||
},
|
||||
"data": None,
|
||||
}
|
||||
|
||||
log_exception(
|
||||
logger,
|
||||
exc,
|
||||
context={"response_status": status.HTTP_400_BAD_REQUEST},
|
||||
request=context.get("request"),
|
||||
)
|
||||
response = Response(custom_response_data, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
elif isinstance(exc, Http404):
|
||||
custom_response_data = {
|
||||
"status": "error",
|
||||
"error": {
|
||||
"code": "NOT_FOUND",
|
||||
"message": "Resource not found",
|
||||
"details": str(exc) if str(exc) else None,
|
||||
},
|
||||
"data": None,
|
||||
}
|
||||
|
||||
log_exception(
|
||||
logger,
|
||||
exc,
|
||||
context={"response_status": status.HTTP_404_NOT_FOUND},
|
||||
request=context.get("request"),
|
||||
)
|
||||
response = Response(custom_response_data, status=status.HTTP_404_NOT_FOUND)
|
||||
|
||||
elif isinstance(exc, PermissionDenied):
|
||||
custom_response_data = {
|
||||
"status": "error",
|
||||
"error": {
|
||||
"code": "PERMISSION_DENIED",
|
||||
"message": "Permission denied",
|
||||
"details": str(exc) if str(exc) else None,
|
||||
},
|
||||
"data": None,
|
||||
}
|
||||
|
||||
log_exception(
|
||||
logger,
|
||||
exc,
|
||||
context={"response_status": status.HTTP_403_FORBIDDEN},
|
||||
request=context.get("request"),
|
||||
)
|
||||
response = Response(custom_response_data, status=status.HTTP_403_FORBIDDEN)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
def _get_error_code(exc: Exception) -> str:
|
||||
"""Extract or determine error code from exception."""
|
||||
if hasattr(exc, "default_code"):
|
||||
return exc.default_code.upper()
|
||||
|
||||
if isinstance(exc, DRFValidationError):
|
||||
return "VALIDATION_ERROR"
|
||||
elif isinstance(exc, NotFound):
|
||||
return "NOT_FOUND"
|
||||
elif isinstance(exc, DRFPermissionDenied):
|
||||
return "PERMISSION_DENIED"
|
||||
|
||||
return exc.__class__.__name__.upper()
|
||||
|
||||
|
||||
def _get_error_message(exc: Exception, response_data: Any) -> str:
|
||||
"""Extract user-friendly error message."""
|
||||
if isinstance(response_data, dict):
|
||||
# Handle DRF validation errors
|
||||
if "detail" in response_data:
|
||||
return str(response_data["detail"])
|
||||
elif "non_field_errors" in response_data:
|
||||
errors = response_data["non_field_errors"]
|
||||
return errors[0] if isinstance(errors, list) and errors else str(errors)
|
||||
elif isinstance(response_data, dict) and len(response_data) == 1:
|
||||
key, value = next(iter(response_data.items()))
|
||||
if isinstance(value, list) and value:
|
||||
return f"{key}: {value[0]}"
|
||||
return f"{key}: {value}"
|
||||
|
||||
# Fallback to exception message
|
||||
return str(exc) if str(exc) else "An error occurred"
|
||||
|
||||
|
||||
def _get_error_details(exc: Exception, response_data: Any) -> Optional[Dict[str, Any]]:
|
||||
"""Extract detailed error information for debugging."""
|
||||
if isinstance(response_data, dict) and len(response_data) > 1:
|
||||
return response_data
|
||||
|
||||
if hasattr(exc, "detail") and isinstance(exc.detail, dict):
|
||||
return exc.detail
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _format_django_validation_errors(
|
||||
exc: DjangoValidationError,
|
||||
) -> Dict[str, Any]:
|
||||
"""Format Django ValidationError for API response."""
|
||||
if hasattr(exc, "error_dict"):
|
||||
# Field-specific errors
|
||||
return {
|
||||
field: [str(error) for error in errors]
|
||||
for field, errors in exc.error_dict.items()
|
||||
}
|
||||
elif hasattr(exc, "error_list"):
|
||||
# Non-field errors
|
||||
return {"non_field_errors": [str(error) for error in exc.error_list]}
|
||||
|
||||
return {"non_field_errors": [str(exc)]}
|
||||
|
||||
|
||||
# Removed _log_api_error - using centralized logging instead
|
||||
260
core/api/mixins.py
Normal file
260
core/api/mixins.py
Normal file
@@ -0,0 +1,260 @@
|
||||
"""
|
||||
Common mixins for API views following Django styleguide patterns.
|
||||
"""
|
||||
|
||||
from typing import Dict, Any, Optional
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
|
||||
|
||||
class ApiMixin:
|
||||
"""
|
||||
Base mixin for API views providing standardized response formatting.
|
||||
"""
|
||||
|
||||
def create_response(
|
||||
self,
|
||||
*,
|
||||
data: Any = None,
|
||||
message: Optional[str] = None,
|
||||
status_code: int = status.HTTP_200_OK,
|
||||
pagination: Optional[Dict[str, Any]] = None,
|
||||
metadata: Optional[Dict[str, Any]] = None,
|
||||
) -> Response:
|
||||
"""
|
||||
Create standardized API response.
|
||||
|
||||
Args:
|
||||
data: Response data
|
||||
message: Optional success message
|
||||
status_code: HTTP status code
|
||||
pagination: Pagination information
|
||||
metadata: Additional metadata
|
||||
|
||||
Returns:
|
||||
Standardized Response object
|
||||
"""
|
||||
response_data = {
|
||||
"status": "success" if status_code < 400 else "error",
|
||||
"data": data,
|
||||
}
|
||||
|
||||
if message:
|
||||
response_data["message"] = message
|
||||
|
||||
if pagination:
|
||||
response_data["pagination"] = pagination
|
||||
|
||||
if metadata:
|
||||
response_data["metadata"] = metadata
|
||||
|
||||
return Response(response_data, status=status_code)
|
||||
|
||||
def create_error_response(
|
||||
self,
|
||||
*,
|
||||
message: str,
|
||||
status_code: int = status.HTTP_400_BAD_REQUEST,
|
||||
error_code: Optional[str] = None,
|
||||
details: Optional[Dict[str, Any]] = None,
|
||||
) -> Response:
|
||||
"""
|
||||
Create standardized error response.
|
||||
|
||||
Args:
|
||||
message: Error message
|
||||
status_code: HTTP status code
|
||||
error_code: Optional error code
|
||||
details: Additional error details
|
||||
|
||||
Returns:
|
||||
Standardized error Response object
|
||||
"""
|
||||
error_data = {
|
||||
"code": error_code or "GENERIC_ERROR",
|
||||
"message": message,
|
||||
}
|
||||
|
||||
if details:
|
||||
error_data["details"] = details
|
||||
|
||||
response_data = {
|
||||
"status": "error",
|
||||
"error": error_data,
|
||||
"data": None,
|
||||
}
|
||||
|
||||
return Response(response_data, status=status_code)
|
||||
|
||||
|
||||
class CreateApiMixin(ApiMixin):
|
||||
"""
|
||||
Mixin for create API endpoints with standardized input/output handling.
|
||||
"""
|
||||
|
||||
def create(self, request: Request, *args, **kwargs) -> Response:
|
||||
"""Handle POST requests for creating resources."""
|
||||
serializer = self.get_input_serializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
# Create the object using the service layer
|
||||
obj = self.perform_create(**serializer.validated_data)
|
||||
|
||||
# Serialize the output
|
||||
output_serializer = self.get_output_serializer(obj)
|
||||
|
||||
return self.create_response(
|
||||
data=output_serializer.data,
|
||||
status_code=status.HTTP_201_CREATED,
|
||||
message="Resource created successfully",
|
||||
)
|
||||
|
||||
def perform_create(self, **validated_data):
|
||||
"""
|
||||
Override this method to implement object creation logic.
|
||||
Should use service layer methods.
|
||||
"""
|
||||
raise NotImplementedError("Subclasses must implement perform_create")
|
||||
|
||||
def get_input_serializer(self, *args, **kwargs):
|
||||
"""Get the input serializer for validation."""
|
||||
return self.InputSerializer(*args, **kwargs)
|
||||
|
||||
def get_output_serializer(self, *args, **kwargs):
|
||||
"""Get the output serializer for response."""
|
||||
return self.OutputSerializer(*args, **kwargs)
|
||||
|
||||
|
||||
class UpdateApiMixin(ApiMixin):
|
||||
"""
|
||||
Mixin for update API endpoints with standardized input/output handling.
|
||||
"""
|
||||
|
||||
def update(self, request: Request, *args, **kwargs) -> Response:
|
||||
"""Handle PUT/PATCH requests for updating resources."""
|
||||
instance = self.get_object()
|
||||
serializer = self.get_input_serializer(
|
||||
data=request.data, partial=kwargs.get("partial", False)
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
# Update the object using the service layer
|
||||
updated_obj = self.perform_update(instance, **serializer.validated_data)
|
||||
|
||||
# Serialize the output
|
||||
output_serializer = self.get_output_serializer(updated_obj)
|
||||
|
||||
return self.create_response(
|
||||
data=output_serializer.data,
|
||||
message="Resource updated successfully",
|
||||
)
|
||||
|
||||
def perform_update(self, instance, **validated_data):
|
||||
"""
|
||||
Override this method to implement object update logic.
|
||||
Should use service layer methods.
|
||||
"""
|
||||
raise NotImplementedError("Subclasses must implement perform_update")
|
||||
|
||||
def get_input_serializer(self, *args, **kwargs):
|
||||
"""Get the input serializer for validation."""
|
||||
return self.InputSerializer(*args, **kwargs)
|
||||
|
||||
def get_output_serializer(self, *args, **kwargs):
|
||||
"""Get the output serializer for response."""
|
||||
return self.OutputSerializer(*args, **kwargs)
|
||||
|
||||
|
||||
class ListApiMixin(ApiMixin):
|
||||
"""
|
||||
Mixin for list API endpoints with pagination and filtering.
|
||||
"""
|
||||
|
||||
def list(self, request: Request, *args, **kwargs) -> Response:
|
||||
"""Handle GET requests for listing resources."""
|
||||
# Use selector to get filtered queryset
|
||||
queryset = self.get_queryset()
|
||||
|
||||
# Apply pagination
|
||||
page = self.paginate_queryset(queryset)
|
||||
if page is not None:
|
||||
serializer = self.get_output_serializer(page, many=True)
|
||||
return self.get_paginated_response(serializer.data)
|
||||
|
||||
# No pagination
|
||||
serializer = self.get_output_serializer(queryset, many=True)
|
||||
return self.create_response(data=serializer.data)
|
||||
|
||||
def get_queryset(self):
|
||||
"""
|
||||
Override this method to use selector patterns.
|
||||
Should call selector functions, not access model managers directly.
|
||||
"""
|
||||
raise NotImplementedError(
|
||||
"Subclasses must implement get_queryset using selectors"
|
||||
)
|
||||
|
||||
def get_output_serializer(self, *args, **kwargs):
|
||||
"""Get the output serializer for response."""
|
||||
return self.OutputSerializer(*args, **kwargs)
|
||||
|
||||
|
||||
class RetrieveApiMixin(ApiMixin):
|
||||
"""
|
||||
Mixin for retrieve API endpoints.
|
||||
"""
|
||||
|
||||
def retrieve(self, request: Request, *args, **kwargs) -> Response:
|
||||
"""Handle GET requests for retrieving a single resource."""
|
||||
instance = self.get_object()
|
||||
serializer = self.get_output_serializer(instance)
|
||||
|
||||
return self.create_response(data=serializer.data)
|
||||
|
||||
def get_object(self):
|
||||
"""
|
||||
Override this method to use selector patterns.
|
||||
Should call selector functions for optimized queries.
|
||||
"""
|
||||
raise NotImplementedError(
|
||||
"Subclasses must implement get_object using selectors"
|
||||
)
|
||||
|
||||
def get_output_serializer(self, *args, **kwargs):
|
||||
"""Get the output serializer for response."""
|
||||
return self.OutputSerializer(*args, **kwargs)
|
||||
|
||||
|
||||
class DestroyApiMixin(ApiMixin):
|
||||
"""
|
||||
Mixin for delete API endpoints.
|
||||
"""
|
||||
|
||||
def destroy(self, request: Request, *args, **kwargs) -> Response:
|
||||
"""Handle DELETE requests for destroying resources."""
|
||||
instance = self.get_object()
|
||||
|
||||
# Delete using service layer
|
||||
self.perform_destroy(instance)
|
||||
|
||||
return self.create_response(
|
||||
status_code=status.HTTP_204_NO_CONTENT,
|
||||
message="Resource deleted successfully",
|
||||
)
|
||||
|
||||
def perform_destroy(self, instance):
|
||||
"""
|
||||
Override this method to implement object deletion logic.
|
||||
Should use service layer methods.
|
||||
"""
|
||||
raise NotImplementedError("Subclasses must implement perform_destroy")
|
||||
|
||||
def get_object(self):
|
||||
"""
|
||||
Override this method to use selector patterns.
|
||||
Should call selector functions for optimized queries.
|
||||
"""
|
||||
raise NotImplementedError(
|
||||
"Subclasses must implement get_object using selectors"
|
||||
)
|
||||
@@ -1,5 +1,6 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class CoreConfig(AppConfig):
|
||||
default_auto_field = 'django.db.models.BigAutoField'
|
||||
name = 'core'
|
||||
default_auto_field = "django.db.models.BigAutoField"
|
||||
name = "core"
|
||||
|
||||
1
core/decorators/__init__.py
Normal file
1
core/decorators/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Decorators module
|
||||
409
core/decorators/cache_decorators.py
Normal file
409
core/decorators/cache_decorators.py
Normal file
@@ -0,0 +1,409 @@
|
||||
"""
|
||||
Advanced caching decorators for API views and functions.
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import time
|
||||
from functools import wraps
|
||||
from typing import Optional, List, Callable
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.vary import vary_on_headers
|
||||
from core.services.enhanced_cache_service import EnhancedCacheService
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def cache_api_response(
|
||||
timeout=1800, vary_on=None, key_prefix="api", cache_backend="api"
|
||||
):
|
||||
"""
|
||||
Advanced decorator for caching API responses with flexible configuration
|
||||
|
||||
Args:
|
||||
timeout: Cache timeout in seconds
|
||||
vary_on: List of request attributes to vary cache on
|
||||
key_prefix: Prefix for cache keys
|
||||
cache_backend: Cache backend to use
|
||||
"""
|
||||
|
||||
def decorator(view_func):
|
||||
@wraps(view_func)
|
||||
def wrapper(self, request, *args, **kwargs):
|
||||
# Only cache GET requests
|
||||
if request.method != "GET":
|
||||
return view_func(self, request, *args, **kwargs)
|
||||
|
||||
# Generate cache key based on view, user, and parameters
|
||||
cache_key_parts = [
|
||||
key_prefix,
|
||||
view_func.__name__,
|
||||
(
|
||||
str(request.user.id)
|
||||
if request.user.is_authenticated
|
||||
else "anonymous"
|
||||
),
|
||||
str(hash(frozenset(request.GET.items()))),
|
||||
]
|
||||
|
||||
# Add URL parameters to cache key
|
||||
if args:
|
||||
cache_key_parts.append(str(hash(args)))
|
||||
if kwargs:
|
||||
cache_key_parts.append(str(hash(frozenset(kwargs.items()))))
|
||||
|
||||
# Add custom vary_on fields
|
||||
if vary_on:
|
||||
for field in vary_on:
|
||||
value = getattr(request, field, "")
|
||||
cache_key_parts.append(str(value))
|
||||
|
||||
cache_key = ":".join(cache_key_parts)
|
||||
|
||||
# Try to get from cache
|
||||
cache_service = EnhancedCacheService()
|
||||
cached_response = getattr(cache_service, cache_backend + "_cache").get(
|
||||
cache_key
|
||||
)
|
||||
|
||||
if cached_response:
|
||||
logger.debug(
|
||||
f"Cache hit for API view {view_func.__name__}",
|
||||
extra={
|
||||
"cache_key": cache_key,
|
||||
"view": view_func.__name__,
|
||||
"cache_hit": True,
|
||||
},
|
||||
)
|
||||
return cached_response
|
||||
|
||||
# Execute view and cache result
|
||||
start_time = time.time()
|
||||
response = view_func(self, request, *args, **kwargs)
|
||||
execution_time = time.time() - start_time
|
||||
|
||||
# Only cache successful responses
|
||||
if hasattr(response, "status_code") and response.status_code == 200:
|
||||
getattr(cache_service, cache_backend + "_cache").set(
|
||||
cache_key, response, timeout
|
||||
)
|
||||
logger.debug(
|
||||
f"Cached API response for view {view_func.__name__}",
|
||||
extra={
|
||||
"cache_key": cache_key,
|
||||
"view": view_func.__name__,
|
||||
"execution_time": execution_time,
|
||||
"cache_timeout": timeout,
|
||||
"cache_miss": True,
|
||||
},
|
||||
)
|
||||
else:
|
||||
logger.debug(
|
||||
f"Not caching response for view {
|
||||
view_func.__name__} (status: {
|
||||
getattr(
|
||||
response,
|
||||
'status_code',
|
||||
'unknown')})"
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def cache_queryset_result(
|
||||
cache_key_template: str, timeout: int = 3600, cache_backend="default"
|
||||
):
|
||||
"""
|
||||
Decorator for caching expensive queryset operations
|
||||
|
||||
Args:
|
||||
cache_key_template: Template for cache key (can use format placeholders)
|
||||
timeout: Cache timeout in seconds
|
||||
cache_backend: Cache backend to use
|
||||
"""
|
||||
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
# Generate cache key from template and arguments
|
||||
try:
|
||||
cache_key = cache_key_template.format(*args, **kwargs)
|
||||
except (KeyError, IndexError):
|
||||
# Fallback to simpler key generation
|
||||
cache_key = f"{cache_key_template}:{
|
||||
hash(
|
||||
str(args) +
|
||||
str(kwargs))}"
|
||||
|
||||
cache_service = EnhancedCacheService()
|
||||
cached_result = getattr(cache_service, cache_backend + "_cache").get(
|
||||
cache_key
|
||||
)
|
||||
|
||||
if cached_result is not None:
|
||||
logger.debug(
|
||||
f"Cache hit for queryset operation: {
|
||||
func.__name__}"
|
||||
)
|
||||
return cached_result
|
||||
|
||||
# Execute function and cache result
|
||||
start_time = time.time()
|
||||
result = func(*args, **kwargs)
|
||||
execution_time = time.time() - start_time
|
||||
|
||||
getattr(cache_service, cache_backend + "_cache").set(
|
||||
cache_key, result, timeout
|
||||
)
|
||||
logger.debug(
|
||||
f"Cached queryset result for {func.__name__}",
|
||||
extra={
|
||||
"cache_key": cache_key,
|
||||
"function": func.__name__,
|
||||
"execution_time": execution_time,
|
||||
"cache_timeout": timeout,
|
||||
},
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def invalidate_cache_on_save(model_name: str, cache_patterns: List[str] = None):
|
||||
"""
|
||||
Decorator to invalidate cache when model instances are saved
|
||||
|
||||
Args:
|
||||
model_name: Name of the model
|
||||
cache_patterns: List of cache key patterns to invalidate
|
||||
"""
|
||||
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
def wrapper(self, *args, **kwargs):
|
||||
result = func(self, *args, **kwargs)
|
||||
|
||||
# Invalidate related cache entries
|
||||
cache_service = EnhancedCacheService()
|
||||
|
||||
# Standard model cache invalidation
|
||||
instance_id = getattr(self, "id", None)
|
||||
cache_service.invalidate_model_cache(model_name, instance_id)
|
||||
|
||||
# Custom pattern invalidation
|
||||
if cache_patterns:
|
||||
for pattern in cache_patterns:
|
||||
if instance_id:
|
||||
pattern = pattern.format(model=model_name, id=instance_id)
|
||||
cache_service.invalidate_pattern(pattern)
|
||||
|
||||
logger.info(
|
||||
f"Invalidated cache for {model_name} after save",
|
||||
extra={
|
||||
"model": model_name,
|
||||
"instance_id": instance_id,
|
||||
"patterns": cache_patterns,
|
||||
},
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
class CachedAPIViewMixin:
|
||||
"""Mixin to add caching capabilities to API views"""
|
||||
|
||||
cache_timeout = 1800 # 30 minutes default
|
||||
cache_vary_on = ["version"]
|
||||
cache_key_prefix = "api"
|
||||
cache_backend = "api"
|
||||
|
||||
@method_decorator(vary_on_headers("User-Agent", "Accept-Language"))
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
"""Add caching to the dispatch method"""
|
||||
if request.method == "GET" and getattr(self, "enable_caching", True):
|
||||
return self._cached_dispatch(request, *args, **kwargs)
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
|
||||
def _cached_dispatch(self, request, *args, **kwargs):
|
||||
"""Handle cached dispatch for GET requests"""
|
||||
cache_key = self._generate_cache_key(request, *args, **kwargs)
|
||||
|
||||
cache_service = EnhancedCacheService()
|
||||
cached_response = getattr(cache_service, self.cache_backend + "_cache").get(
|
||||
cache_key
|
||||
)
|
||||
|
||||
if cached_response:
|
||||
logger.debug(f"Cache hit for view {self.__class__.__name__}")
|
||||
return cached_response
|
||||
|
||||
# Execute view
|
||||
response = super().dispatch(request, *args, **kwargs)
|
||||
|
||||
# Cache successful responses
|
||||
if hasattr(response, "status_code") and response.status_code == 200:
|
||||
getattr(cache_service, self.cache_backend + "_cache").set(
|
||||
cache_key, response, self.cache_timeout
|
||||
)
|
||||
logger.debug(f"Cached response for view {self.__class__.__name__}")
|
||||
|
||||
return response
|
||||
|
||||
def _generate_cache_key(self, request, *args, **kwargs):
|
||||
"""Generate cache key for the request"""
|
||||
key_parts = [
|
||||
self.cache_key_prefix,
|
||||
self.__class__.__name__,
|
||||
request.method,
|
||||
(str(request.user.id) if request.user.is_authenticated else "anonymous"),
|
||||
str(hash(frozenset(request.GET.items()))),
|
||||
]
|
||||
|
||||
if args:
|
||||
key_parts.append(str(hash(args)))
|
||||
if kwargs:
|
||||
key_parts.append(str(hash(frozenset(kwargs.items()))))
|
||||
|
||||
# Add vary_on fields
|
||||
for field in self.cache_vary_on:
|
||||
value = getattr(request, field, "")
|
||||
key_parts.append(str(value))
|
||||
|
||||
return ":".join(key_parts)
|
||||
|
||||
|
||||
def smart_cache(
|
||||
timeout: int = 3600,
|
||||
key_func: Optional[Callable] = None,
|
||||
invalidate_on: Optional[List[str]] = None,
|
||||
cache_backend: str = "default",
|
||||
):
|
||||
"""
|
||||
Smart caching decorator that adapts to function arguments
|
||||
|
||||
Args:
|
||||
timeout: Cache timeout in seconds
|
||||
key_func: Custom function to generate cache key
|
||||
invalidate_on: List of signals to invalidate cache on
|
||||
cache_backend: Cache backend to use
|
||||
"""
|
||||
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
# Generate cache key
|
||||
if key_func:
|
||||
cache_key = key_func(*args, **kwargs)
|
||||
else:
|
||||
# Default key generation
|
||||
key_data = {
|
||||
"func": f"{func.__module__}.{func.__name__}",
|
||||
"args": str(args),
|
||||
"kwargs": json.dumps(kwargs, sort_keys=True, default=str),
|
||||
}
|
||||
key_string = json.dumps(key_data, sort_keys=True)
|
||||
cache_key = f"smart_cache:{
|
||||
hashlib.md5(
|
||||
key_string.encode()).hexdigest()}"
|
||||
|
||||
# Try to get from cache
|
||||
cache_service = EnhancedCacheService()
|
||||
cached_result = getattr(cache_service, cache_backend + "_cache").get(
|
||||
cache_key
|
||||
)
|
||||
|
||||
if cached_result is not None:
|
||||
logger.debug(f"Smart cache hit for {func.__name__}")
|
||||
return cached_result
|
||||
|
||||
# Execute function
|
||||
start_time = time.time()
|
||||
result = func(*args, **kwargs)
|
||||
execution_time = time.time() - start_time
|
||||
|
||||
# Cache result
|
||||
getattr(cache_service, cache_backend + "_cache").set(
|
||||
cache_key, result, timeout
|
||||
)
|
||||
|
||||
logger.debug(
|
||||
f"Smart cached result for {func.__name__}",
|
||||
extra={
|
||||
"cache_key": cache_key,
|
||||
"execution_time": execution_time,
|
||||
"function": func.__name__,
|
||||
},
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
# Add cache invalidation if specified
|
||||
if invalidate_on:
|
||||
wrapper._cache_invalidate_on = invalidate_on
|
||||
wrapper._cache_backend = cache_backend
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def conditional_cache(condition_func: Callable, **cache_kwargs):
|
||||
"""
|
||||
Cache decorator that only caches when condition is met
|
||||
|
||||
Args:
|
||||
condition_func: Function that returns True if caching should be applied
|
||||
**cache_kwargs: Arguments passed to smart_cache
|
||||
"""
|
||||
|
||||
def decorator(func):
|
||||
cached_func = smart_cache(**cache_kwargs)(func)
|
||||
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
if condition_func(*args, **kwargs):
|
||||
return cached_func(*args, **kwargs)
|
||||
else:
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
# Utility functions for cache key generation
|
||||
def generate_user_cache_key(user, suffix: str = ""):
|
||||
"""Generate cache key based on user"""
|
||||
user_id = user.id if user.is_authenticated else "anonymous"
|
||||
return f"user:{user_id}:{suffix}" if suffix else f"user:{user_id}"
|
||||
|
||||
|
||||
def generate_model_cache_key(model_instance, suffix: str = ""):
|
||||
"""Generate cache key based on model instance"""
|
||||
model_name = model_instance._meta.model_name
|
||||
instance_id = model_instance.id
|
||||
return (
|
||||
f"{model_name}:{instance_id}:{suffix}"
|
||||
if suffix
|
||||
else f"{model_name}:{instance_id}"
|
||||
)
|
||||
|
||||
|
||||
def generate_queryset_cache_key(queryset, params: dict = None):
|
||||
"""Generate cache key for queryset with parameters"""
|
||||
model_name = queryset.model._meta.model_name
|
||||
params_str = json.dumps(params or {}, sort_keys=True, default=str)
|
||||
params_hash = hashlib.md5(params_str.encode()).hexdigest()
|
||||
return f"queryset:{model_name}:{params_hash}"
|
||||
224
core/exceptions.py
Normal file
224
core/exceptions.py
Normal file
@@ -0,0 +1,224 @@
|
||||
"""
|
||||
Custom exception classes for ThrillWiki.
|
||||
Provides domain-specific exceptions with proper error codes and messages.
|
||||
"""
|
||||
|
||||
from typing import Optional, Dict, Any
|
||||
|
||||
|
||||
class ThrillWikiException(Exception):
|
||||
"""Base exception for all ThrillWiki-specific errors."""
|
||||
|
||||
default_message = "An error occurred"
|
||||
error_code = "THRILLWIKI_ERROR"
|
||||
status_code = 500
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message: Optional[str] = None,
|
||||
error_code: Optional[str] = None,
|
||||
details: Optional[Dict[str, Any]] = None,
|
||||
):
|
||||
self.message = message or self.default_message
|
||||
self.error_code = error_code or self.error_code
|
||||
self.details = details or {}
|
||||
super().__init__(self.message)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert exception to dictionary for API responses."""
|
||||
return {
|
||||
"error_code": self.error_code,
|
||||
"message": self.message,
|
||||
"details": self.details,
|
||||
}
|
||||
|
||||
|
||||
class ValidationException(ThrillWikiException):
|
||||
"""Raised when data validation fails."""
|
||||
|
||||
default_message = "Validation failed"
|
||||
error_code = "VALIDATION_ERROR"
|
||||
status_code = 400
|
||||
|
||||
|
||||
class NotFoundError(ThrillWikiException):
|
||||
"""Raised when a requested resource is not found."""
|
||||
|
||||
default_message = "Resource not found"
|
||||
error_code = "NOT_FOUND"
|
||||
status_code = 404
|
||||
|
||||
|
||||
class PermissionDeniedError(ThrillWikiException):
|
||||
"""Raised when user lacks permission for an operation."""
|
||||
|
||||
default_message = "Permission denied"
|
||||
error_code = "PERMISSION_DENIED"
|
||||
status_code = 403
|
||||
|
||||
|
||||
class BusinessLogicError(ThrillWikiException):
|
||||
"""Raised when business logic constraints are violated."""
|
||||
|
||||
default_message = "Business logic violation"
|
||||
error_code = "BUSINESS_LOGIC_ERROR"
|
||||
status_code = 400
|
||||
|
||||
|
||||
class ExternalServiceError(ThrillWikiException):
|
||||
"""Raised when external service calls fail."""
|
||||
|
||||
default_message = "External service error"
|
||||
error_code = "EXTERNAL_SERVICE_ERROR"
|
||||
status_code = 502
|
||||
|
||||
|
||||
# Domain-specific exceptions
|
||||
|
||||
|
||||
class ParkError(ThrillWikiException):
|
||||
"""Base exception for park-related errors."""
|
||||
|
||||
error_code = "PARK_ERROR"
|
||||
|
||||
|
||||
class ParkNotFoundError(NotFoundError):
|
||||
"""Raised when a park is not found."""
|
||||
|
||||
default_message = "Park not found"
|
||||
error_code = "PARK_NOT_FOUND"
|
||||
|
||||
def __init__(self, park_slug: Optional[str] = None, **kwargs):
|
||||
if park_slug:
|
||||
kwargs["details"] = {"park_slug": park_slug}
|
||||
kwargs["message"] = f"Park with slug '{park_slug}' not found"
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
class ParkOperationError(BusinessLogicError):
|
||||
"""Raised when park operation constraints are violated."""
|
||||
|
||||
default_message = "Invalid park operation"
|
||||
error_code = "PARK_OPERATION_ERROR"
|
||||
|
||||
|
||||
class RideError(ThrillWikiException):
|
||||
"""Base exception for ride-related errors."""
|
||||
|
||||
error_code = "RIDE_ERROR"
|
||||
|
||||
|
||||
class RideNotFoundError(NotFoundError):
|
||||
"""Raised when a ride is not found."""
|
||||
|
||||
default_message = "Ride not found"
|
||||
error_code = "RIDE_NOT_FOUND"
|
||||
|
||||
def __init__(self, ride_slug: Optional[str] = None, **kwargs):
|
||||
if ride_slug:
|
||||
kwargs["details"] = {"ride_slug": ride_slug}
|
||||
kwargs["message"] = f"Ride with slug '{ride_slug}' not found"
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
class RideOperationError(BusinessLogicError):
|
||||
"""Raised when ride operation constraints are violated."""
|
||||
|
||||
default_message = "Invalid ride operation"
|
||||
error_code = "RIDE_OPERATION_ERROR"
|
||||
|
||||
|
||||
class LocationError(ThrillWikiException):
|
||||
"""Base exception for location-related errors."""
|
||||
|
||||
error_code = "LOCATION_ERROR"
|
||||
|
||||
|
||||
class InvalidCoordinatesError(ValidationException):
|
||||
"""Raised when geographic coordinates are invalid."""
|
||||
|
||||
default_message = "Invalid geographic coordinates"
|
||||
error_code = "INVALID_COORDINATES"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
latitude: Optional[float] = None,
|
||||
longitude: Optional[float] = None,
|
||||
**kwargs,
|
||||
):
|
||||
if latitude is not None or longitude is not None:
|
||||
kwargs["details"] = {"latitude": latitude, "longitude": longitude}
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
class GeolocationError(ExternalServiceError):
|
||||
"""Raised when geolocation services fail."""
|
||||
|
||||
default_message = "Geolocation service unavailable"
|
||||
error_code = "GEOLOCATION_ERROR"
|
||||
|
||||
|
||||
class ReviewError(ThrillWikiException):
|
||||
"""Base exception for review-related errors."""
|
||||
|
||||
error_code = "REVIEW_ERROR"
|
||||
|
||||
|
||||
class ReviewModerationError(BusinessLogicError):
|
||||
"""Raised when review moderation constraints are violated."""
|
||||
|
||||
default_message = "Review moderation error"
|
||||
error_code = "REVIEW_MODERATION_ERROR"
|
||||
|
||||
|
||||
class DuplicateReviewError(BusinessLogicError):
|
||||
"""Raised when user tries to create duplicate reviews."""
|
||||
|
||||
default_message = "User has already reviewed this item"
|
||||
error_code = "DUPLICATE_REVIEW"
|
||||
|
||||
|
||||
class AccountError(ThrillWikiException):
|
||||
"""Base exception for account-related errors."""
|
||||
|
||||
error_code = "ACCOUNT_ERROR"
|
||||
|
||||
|
||||
class InsufficientPermissionsError(PermissionDeniedError):
|
||||
"""Raised when user lacks required permissions."""
|
||||
|
||||
default_message = "Insufficient permissions"
|
||||
error_code = "INSUFFICIENT_PERMISSIONS"
|
||||
|
||||
def __init__(self, required_permission: Optional[str] = None, **kwargs):
|
||||
if required_permission:
|
||||
kwargs["details"] = {"required_permission": required_permission}
|
||||
kwargs["message"] = f"Permission '{required_permission}' required"
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
class EmailError(ExternalServiceError):
|
||||
"""Raised when email operations fail."""
|
||||
|
||||
default_message = "Email service error"
|
||||
error_code = "EMAIL_ERROR"
|
||||
|
||||
|
||||
class CacheError(ThrillWikiException):
|
||||
"""Raised when cache operations fail."""
|
||||
|
||||
default_message = "Cache operation failed"
|
||||
error_code = "CACHE_ERROR"
|
||||
status_code = 500
|
||||
|
||||
|
||||
class RoadTripError(ExternalServiceError):
|
||||
"""Raised when road trip planning fails."""
|
||||
|
||||
default_message = "Road trip planning error"
|
||||
error_code = "ROADTRIP_ERROR"
|
||||
|
||||
def __init__(self, service_name: Optional[str] = None, **kwargs):
|
||||
if service_name:
|
||||
kwargs["details"] = {"service": service_name}
|
||||
super().__init__(**kwargs)
|
||||
@@ -1,4 +1,5 @@
|
||||
"""Core forms and form components."""
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import PermissionDenied
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
@@ -15,13 +16,16 @@ class BaseAutocomplete(Autocomplete):
|
||||
- Authentication enforcement
|
||||
- Sensible search configuration
|
||||
"""
|
||||
|
||||
# Search configuration
|
||||
minimum_search_length = 2 # More responsive than default 3
|
||||
max_results = 10 # Reasonable limit for performance
|
||||
|
||||
# UI text configuration using gettext for i18n
|
||||
no_result_text = _("No matches found")
|
||||
narrow_search_text = _("Showing %(page_size)s of %(total)s matches. Please refine your search.")
|
||||
narrow_search_text = _(
|
||||
"Showing %(page_size)s of %(total)s matches. Please refine your search."
|
||||
)
|
||||
type_at_least_n_characters = _("Type at least %(n)s characters...")
|
||||
|
||||
# Project-wide component settings
|
||||
@@ -34,6 +38,6 @@ class BaseAutocomplete(Autocomplete):
|
||||
This can be overridden in subclasses if public access is needed.
|
||||
Configure AUTOCOMPLETE_BLOCK_UNAUTHENTICATED in settings to disable.
|
||||
"""
|
||||
block_unauth = getattr(settings, 'AUTOCOMPLETE_BLOCK_UNAUTHENTICATED', True)
|
||||
block_unauth = getattr(settings, "AUTOCOMPLETE_BLOCK_UNAUTHENTICATED", True)
|
||||
if block_unauth and not request.user.is_authenticated:
|
||||
raise PermissionDenied(_("Authentication required"))
|
||||
168
core/forms/search.py
Normal file
168
core/forms/search.py
Normal file
@@ -0,0 +1,168 @@
|
||||
from django import forms
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
|
||||
class LocationSearchForm(forms.Form):
|
||||
"""
|
||||
A comprehensive search form that includes text search, location-based
|
||||
search, and content type filtering for a unified search experience.
|
||||
"""
|
||||
|
||||
# Text search query
|
||||
q = forms.CharField(
|
||||
required=False,
|
||||
label=_("Search Query"),
|
||||
widget=forms.TextInput(
|
||||
attrs={
|
||||
"placeholder": _("Search parks, rides, companies..."),
|
||||
"class": (
|
||||
"w-full px-3 py-2 border border-gray-300 rounded-md shadow-sm "
|
||||
"focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 "
|
||||
"dark:border-gray-600 dark:text-white"
|
||||
),
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
# Location-based search
|
||||
location = forms.CharField(
|
||||
required=False,
|
||||
label=_("Near Location"),
|
||||
widget=forms.TextInput(
|
||||
attrs={
|
||||
"placeholder": _("City, address, or coordinates..."),
|
||||
"id": "location-input",
|
||||
"class": (
|
||||
"w-full px-3 py-2 border border-gray-300 rounded-md shadow-sm "
|
||||
"focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 "
|
||||
"dark:border-gray-600 dark:text-white"
|
||||
),
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
# Hidden fields for coordinates
|
||||
lat = forms.FloatField(
|
||||
required=False, widget=forms.HiddenInput(attrs={"id": "lat-input"})
|
||||
)
|
||||
lng = forms.FloatField(
|
||||
required=False, widget=forms.HiddenInput(attrs={"id": "lng-input"})
|
||||
)
|
||||
|
||||
# Search radius
|
||||
radius_km = forms.ChoiceField(
|
||||
required=False,
|
||||
label=_("Search Radius"),
|
||||
choices=[
|
||||
("", _("Any distance")),
|
||||
("5", _("5 km")),
|
||||
("10", _("10 km")),
|
||||
("25", _("25 km")),
|
||||
("50", _("50 km")),
|
||||
("100", _("100 km")),
|
||||
("200", _("200 km")),
|
||||
],
|
||||
widget=forms.Select(
|
||||
attrs={
|
||||
"class": (
|
||||
"w-full px-3 py-2 border border-gray-300 rounded-md shadow-sm "
|
||||
"focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 "
|
||||
"dark:border-gray-600 dark:text-white"
|
||||
)
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
# Content type filters
|
||||
search_parks = forms.BooleanField(
|
||||
required=False,
|
||||
initial=True,
|
||||
label=_("Search Parks"),
|
||||
widget=forms.CheckboxInput(
|
||||
attrs={
|
||||
"class": (
|
||||
"rounded border-gray-300 text-blue-600 focus:ring-blue-500 "
|
||||
"dark:border-gray-600 dark:bg-gray-700"
|
||||
)
|
||||
}
|
||||
),
|
||||
)
|
||||
search_rides = forms.BooleanField(
|
||||
required=False,
|
||||
label=_("Search Rides"),
|
||||
widget=forms.CheckboxInput(
|
||||
attrs={
|
||||
"class": (
|
||||
"rounded border-gray-300 text-blue-600 focus:ring-blue-500 "
|
||||
"dark:border-gray-600 dark:bg-gray-700"
|
||||
)
|
||||
}
|
||||
),
|
||||
)
|
||||
search_companies = forms.BooleanField(
|
||||
required=False,
|
||||
label=_("Search Companies"),
|
||||
widget=forms.CheckboxInput(
|
||||
attrs={
|
||||
"class": (
|
||||
"rounded border-gray-300 text-blue-600 focus:ring-blue-500 "
|
||||
"dark:border-gray-600 dark:bg-gray-700"
|
||||
)
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
# Geographic filters
|
||||
country = forms.CharField(
|
||||
required=False,
|
||||
widget=forms.TextInput(
|
||||
attrs={
|
||||
"placeholder": _("Country"),
|
||||
"class": (
|
||||
"w-full px-3 py-2 text-sm border border-gray-300 rounded-md "
|
||||
"shadow-sm focus:ring-blue-500 focus:border-blue-500 "
|
||||
"dark:bg-gray-700 dark:border-gray-600 dark:text-white"
|
||||
),
|
||||
}
|
||||
),
|
||||
)
|
||||
state = forms.CharField(
|
||||
required=False,
|
||||
widget=forms.TextInput(
|
||||
attrs={
|
||||
"placeholder": _("State/Region"),
|
||||
"class": (
|
||||
"w-full px-3 py-2 text-sm border border-gray-300 rounded-md "
|
||||
"shadow-sm focus:ring-blue-500 focus:border-blue-500 "
|
||||
"dark:bg-gray-700 dark:border-gray-600 dark:text-white"
|
||||
),
|
||||
}
|
||||
),
|
||||
)
|
||||
city = forms.CharField(
|
||||
required=False,
|
||||
widget=forms.TextInput(
|
||||
attrs={
|
||||
"placeholder": _("City"),
|
||||
"class": (
|
||||
"w-full px-3 py-2 text-sm border border-gray-300 rounded-md "
|
||||
"shadow-sm focus:ring-blue-500 focus:border-blue-500 "
|
||||
"dark:bg-gray-700 dark:border-gray-600 dark:text-white"
|
||||
),
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
def clean(self):
|
||||
cleaned_data = super().clean()
|
||||
|
||||
# If lat/lng are provided, ensure location field is populated for
|
||||
# display
|
||||
lat = cleaned_data.get("lat")
|
||||
lng = cleaned_data.get("lng")
|
||||
location = cleaned_data.get("location")
|
||||
|
||||
if lat and lng and not location:
|
||||
cleaned_data["location"] = f"{lat}, {lng}"
|
||||
|
||||
return cleaned_data
|
||||
1
core/health_checks/__init__.py
Normal file
1
core/health_checks/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Health checks module
|
||||
325
core/health_checks/custom_checks.py
Normal file
325
core/health_checks/custom_checks.py
Normal file
@@ -0,0 +1,325 @@
|
||||
"""
|
||||
Custom health checks for ThrillWiki application.
|
||||
"""
|
||||
|
||||
import time
|
||||
import logging
|
||||
from django.core.cache import cache
|
||||
from django.db import connection
|
||||
from health_check.backends import BaseHealthCheckBackend
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CacheHealthCheck(BaseHealthCheckBackend):
|
||||
"""Check Redis cache connectivity and performance"""
|
||||
|
||||
critical_service = True
|
||||
|
||||
def check_status(self):
|
||||
try:
|
||||
# Test cache write/read performance
|
||||
test_key = "health_check_test"
|
||||
test_value = "test_value_" + str(int(time.time()))
|
||||
|
||||
start_time = time.time()
|
||||
cache.set(test_key, test_value, timeout=30)
|
||||
cached_value = cache.get(test_key)
|
||||
cache_time = time.time() - start_time
|
||||
|
||||
if cached_value != test_value:
|
||||
self.add_error("Cache read/write test failed - values don't match")
|
||||
return
|
||||
|
||||
# Check cache performance
|
||||
if cache_time > 0.1: # Warn if cache operations take more than 100ms
|
||||
self.add_error(
|
||||
f"Cache performance degraded: {
|
||||
cache_time:.3f}s for read/write operation"
|
||||
)
|
||||
return
|
||||
|
||||
# Clean up test key
|
||||
cache.delete(test_key)
|
||||
|
||||
# Additional Redis-specific checks if using django-redis
|
||||
try:
|
||||
from django_redis import get_redis_connection
|
||||
|
||||
redis_client = get_redis_connection("default")
|
||||
info = redis_client.info()
|
||||
|
||||
# Check memory usage
|
||||
used_memory = info.get("used_memory", 0)
|
||||
max_memory = info.get("maxmemory", 0)
|
||||
|
||||
if max_memory > 0:
|
||||
memory_usage_percent = (used_memory / max_memory) * 100
|
||||
if memory_usage_percent > 90:
|
||||
self.add_error(
|
||||
f"Redis memory usage critical: {
|
||||
memory_usage_percent:.1f}%"
|
||||
)
|
||||
elif memory_usage_percent > 80:
|
||||
logger.warning(
|
||||
f"Redis memory usage high: {
|
||||
memory_usage_percent:.1f}%"
|
||||
)
|
||||
|
||||
except ImportError:
|
||||
# django-redis not available, skip additional checks
|
||||
pass
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not get Redis info: {e}")
|
||||
|
||||
except Exception as e:
|
||||
self.add_error(f"Cache service unavailable: {e}")
|
||||
|
||||
|
||||
class DatabasePerformanceCheck(BaseHealthCheckBackend):
|
||||
"""Check database performance and connectivity"""
|
||||
|
||||
critical_service = False
|
||||
|
||||
def check_status(self):
|
||||
try:
|
||||
start_time = time.time()
|
||||
|
||||
# Test basic connectivity
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("SELECT 1")
|
||||
result = cursor.fetchone()
|
||||
|
||||
if result[0] != 1:
|
||||
self.add_error("Database connectivity test failed")
|
||||
return
|
||||
|
||||
basic_query_time = time.time() - start_time
|
||||
|
||||
# Test a more complex query (if it takes too long, there might be
|
||||
# performance issues)
|
||||
start_time = time.time()
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("SELECT COUNT(*) FROM django_content_type")
|
||||
cursor.fetchone()
|
||||
|
||||
complex_query_time = time.time() - start_time
|
||||
|
||||
# Performance thresholds
|
||||
if basic_query_time > 1.0:
|
||||
self.add_error(
|
||||
f"Database responding slowly: basic query took {
|
||||
basic_query_time:.2f}s"
|
||||
)
|
||||
elif basic_query_time > 0.5:
|
||||
logger.warning(
|
||||
f"Database performance degraded: basic query took {
|
||||
basic_query_time:.2f}s"
|
||||
)
|
||||
|
||||
if complex_query_time > 2.0:
|
||||
self.add_error(
|
||||
f"Database performance critical: complex query took {
|
||||
complex_query_time:.2f}s"
|
||||
)
|
||||
elif complex_query_time > 1.0:
|
||||
logger.warning(
|
||||
f"Database performance slow: complex query took {
|
||||
complex_query_time:.2f}s"
|
||||
)
|
||||
|
||||
# Check database version and settings if possible
|
||||
try:
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("SELECT version()")
|
||||
version = cursor.fetchone()[0]
|
||||
logger.debug(f"Database version: {version}")
|
||||
except Exception as e:
|
||||
logger.debug(f"Could not get database version: {e}")
|
||||
|
||||
except Exception as e:
|
||||
self.add_error(f"Database performance check failed: {e}")
|
||||
|
||||
|
||||
class ApplicationHealthCheck(BaseHealthCheckBackend):
|
||||
"""Check application-specific health indicators"""
|
||||
|
||||
critical_service = False
|
||||
|
||||
def check_status(self):
|
||||
try:
|
||||
# Check if we can import critical modules
|
||||
critical_modules = [
|
||||
"parks.models",
|
||||
"rides.models",
|
||||
"accounts.models",
|
||||
"core.services",
|
||||
]
|
||||
|
||||
for module_name in critical_modules:
|
||||
try:
|
||||
__import__(module_name)
|
||||
except ImportError as e:
|
||||
self.add_error(
|
||||
f"Critical module import failed: {module_name} - {e}"
|
||||
)
|
||||
|
||||
# Check if we can access critical models
|
||||
try:
|
||||
from parks.models import Park
|
||||
from rides.models import Ride
|
||||
from django.contrib.auth import get_user_model
|
||||
|
||||
User = get_user_model()
|
||||
|
||||
# Test that we can query these models (just count, don't load
|
||||
# data)
|
||||
park_count = Park.objects.count()
|
||||
ride_count = Ride.objects.count()
|
||||
user_count = User.objects.count()
|
||||
|
||||
logger.debug(
|
||||
f"Model counts - Parks: {park_count}, Rides: {ride_count}, Users: {user_count}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.add_error(f"Model access check failed: {e}")
|
||||
|
||||
# Check media and static file configuration
|
||||
from django.conf import settings
|
||||
import os
|
||||
|
||||
if not os.path.exists(settings.MEDIA_ROOT):
|
||||
self.add_error(
|
||||
f"Media directory does not exist: {
|
||||
settings.MEDIA_ROOT}"
|
||||
)
|
||||
|
||||
if not os.path.exists(settings.STATIC_ROOT) and not settings.DEBUG:
|
||||
self.add_error(
|
||||
f"Static directory does not exist: {settings.STATIC_ROOT}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.add_error(f"Application health check failed: {e}")
|
||||
|
||||
|
||||
class ExternalServiceHealthCheck(BaseHealthCheckBackend):
|
||||
"""Check external services and dependencies"""
|
||||
|
||||
critical_service = False
|
||||
|
||||
def check_status(self):
|
||||
# Check email service if configured
|
||||
try:
|
||||
from django.core.mail import get_connection
|
||||
from django.conf import settings
|
||||
|
||||
if (
|
||||
hasattr(settings, "EMAIL_BACKEND")
|
||||
and "console" not in settings.EMAIL_BACKEND
|
||||
):
|
||||
# Only check if not using console backend
|
||||
connection = get_connection()
|
||||
if hasattr(connection, "open"):
|
||||
try:
|
||||
connection.open()
|
||||
connection.close()
|
||||
except Exception as e:
|
||||
logger.warning(f"Email service check failed: {e}")
|
||||
# Don't fail the health check for email issues in
|
||||
# development
|
||||
|
||||
except Exception as e:
|
||||
logger.debug(f"Email service check error: {e}")
|
||||
|
||||
# Check if Sentry is configured and working
|
||||
try:
|
||||
import sentry_sdk
|
||||
|
||||
if sentry_sdk.Hub.current.client:
|
||||
# Sentry is configured
|
||||
try:
|
||||
# Test that we can capture a test message (this won't
|
||||
# actually send to Sentry)
|
||||
with sentry_sdk.push_scope() as scope:
|
||||
scope.set_tag("health_check", True)
|
||||
# Don't actually send a message, just verify the SDK is
|
||||
# working
|
||||
logger.debug("Sentry SDK is operational")
|
||||
except Exception as e:
|
||||
logger.warning(f"Sentry SDK check failed: {e}")
|
||||
|
||||
except ImportError:
|
||||
logger.debug("Sentry SDK not installed")
|
||||
except Exception as e:
|
||||
logger.debug(f"Sentry check error: {e}")
|
||||
|
||||
# Check Redis connection if configured
|
||||
try:
|
||||
from django.core.cache import caches
|
||||
from django.conf import settings
|
||||
|
||||
cache_config = settings.CACHES.get("default", {})
|
||||
if "redis" in cache_config.get("BACKEND", "").lower():
|
||||
# Redis is configured, test basic connectivity
|
||||
redis_cache = caches["default"]
|
||||
redis_cache.set("health_check_redis", "test", 10)
|
||||
value = redis_cache.get("health_check_redis")
|
||||
if value != "test":
|
||||
self.add_error("Redis cache connectivity test failed")
|
||||
else:
|
||||
redis_cache.delete("health_check_redis")
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Redis connectivity check failed: {e}")
|
||||
|
||||
|
||||
class DiskSpaceHealthCheck(BaseHealthCheckBackend):
|
||||
"""Check available disk space"""
|
||||
|
||||
critical_service = False
|
||||
|
||||
def check_status(self):
|
||||
try:
|
||||
import shutil
|
||||
from django.conf import settings
|
||||
|
||||
# Check disk space for media directory
|
||||
media_usage = shutil.disk_usage(settings.MEDIA_ROOT)
|
||||
media_free_percent = (media_usage.free / media_usage.total) * 100
|
||||
|
||||
# Check disk space for logs directory if it exists
|
||||
logs_dir = getattr(settings, "BASE_DIR", "/tmp") / "logs"
|
||||
if logs_dir.exists():
|
||||
logs_usage = shutil.disk_usage(logs_dir)
|
||||
logs_free_percent = (logs_usage.free / logs_usage.total) * 100
|
||||
else:
|
||||
logs_free_percent = media_free_percent # Use same as media
|
||||
|
||||
# Alert thresholds
|
||||
if media_free_percent < 10:
|
||||
self.add_error(
|
||||
f"Critical disk space: {
|
||||
media_free_percent:.1f}% free in media directory"
|
||||
)
|
||||
elif media_free_percent < 20:
|
||||
logger.warning(
|
||||
f"Low disk space: {
|
||||
media_free_percent:.1f}% free in media directory"
|
||||
)
|
||||
|
||||
if logs_free_percent < 10:
|
||||
self.add_error(
|
||||
f"Critical disk space: {
|
||||
logs_free_percent:.1f}% free in logs directory"
|
||||
)
|
||||
elif logs_free_percent < 20:
|
||||
logger.warning(
|
||||
f"Low disk space: {
|
||||
logs_free_percent:.1f}% free in logs directory"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Disk space check failed: {e}")
|
||||
# Don't fail health check for disk space issues in development
|
||||
@@ -5,16 +5,22 @@ from django.conf import settings
|
||||
from typing import Any, Dict, Optional
|
||||
from django.db.models import QuerySet
|
||||
|
||||
|
||||
class DiffMixin:
|
||||
"""Mixin to add diffing capabilities to models"""
|
||||
|
||||
def get_prev_record(self) -> Optional[Any]:
|
||||
"""Get the previous record for this instance"""
|
||||
try:
|
||||
return type(self).objects.filter(
|
||||
pgh_created_at__lt=self.pgh_created_at,
|
||||
pgh_obj_id=self.pgh_obj_id
|
||||
).order_by('-pgh_created_at').first()
|
||||
return (
|
||||
type(self)
|
||||
.objects.filter(
|
||||
pgh_created_at__lt=self.pgh_created_at,
|
||||
pgh_obj_id=self.pgh_obj_id,
|
||||
)
|
||||
.order_by("-pgh_created_at")
|
||||
.first()
|
||||
)
|
||||
except (AttributeError, TypeError):
|
||||
return None
|
||||
|
||||
@@ -25,15 +31,20 @@ class DiffMixin:
|
||||
return {}
|
||||
|
||||
skip_fields = {
|
||||
'pgh_id', 'pgh_created_at', 'pgh_label',
|
||||
'pgh_obj_id', 'pgh_context_id', '_state',
|
||||
'created_at', 'updated_at'
|
||||
"pgh_id",
|
||||
"pgh_created_at",
|
||||
"pgh_label",
|
||||
"pgh_obj_id",
|
||||
"pgh_context_id",
|
||||
"_state",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
}
|
||||
|
||||
changes = {}
|
||||
for field, value in self.__dict__.items():
|
||||
# Skip internal fields and those we don't want to track
|
||||
if field.startswith('_') or field in skip_fields or field.endswith('_id'):
|
||||
if field.startswith("_") or field in skip_fields or field.endswith("_id"):
|
||||
continue
|
||||
|
||||
try:
|
||||
@@ -41,16 +52,18 @@ class DiffMixin:
|
||||
new_value = value
|
||||
if old_value != new_value:
|
||||
changes[field] = {
|
||||
"old": str(old_value) if old_value is not None else "None",
|
||||
"new": str(new_value) if new_value is not None else "None"
|
||||
"old": (str(old_value) if old_value is not None else "None"),
|
||||
"new": (str(new_value) if new_value is not None else "None"),
|
||||
}
|
||||
except AttributeError:
|
||||
continue
|
||||
|
||||
return changes
|
||||
|
||||
|
||||
class TrackedModel(models.Model):
|
||||
"""Abstract base class for models that need history tracking"""
|
||||
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
@@ -61,16 +74,18 @@ class TrackedModel(models.Model):
|
||||
"""Get all history records for this instance in chronological order"""
|
||||
event_model = self.events.model # pghistory provides this automatically
|
||||
if event_model:
|
||||
return event_model.objects.filter(
|
||||
pgh_obj_id=self.pk
|
||||
).order_by('-pgh_created_at')
|
||||
return event_model.objects.filter(pgh_obj_id=self.pk).order_by(
|
||||
"-pgh_created_at"
|
||||
)
|
||||
return self.__class__.objects.none()
|
||||
|
||||
|
||||
class HistoricalSlug(models.Model):
|
||||
"""Track historical slugs for models"""
|
||||
|
||||
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
|
||||
object_id = models.PositiveIntegerField()
|
||||
content_object = GenericForeignKey('content_type', 'object_id')
|
||||
content_object = GenericForeignKey("content_type", "object_id")
|
||||
slug = models.SlugField(max_length=255)
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
user = models.ForeignKey(
|
||||
@@ -78,14 +93,14 @@ class HistoricalSlug(models.Model):
|
||||
null=True,
|
||||
blank=True,
|
||||
on_delete=models.SET_NULL,
|
||||
related_name='historical_slugs'
|
||||
related_name="historical_slugs",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
unique_together = ('content_type', 'slug')
|
||||
unique_together = ("content_type", "slug")
|
||||
indexes = [
|
||||
models.Index(fields=['content_type', 'object_id']),
|
||||
models.Index(fields=['slug']),
|
||||
models.Index(fields=["content_type", "object_id"]),
|
||||
models.Index(fields=["slug"]),
|
||||
]
|
||||
|
||||
def __str__(self) -> str:
|
||||
261
core/logging.py
Normal file
261
core/logging.py
Normal file
@@ -0,0 +1,261 @@
|
||||
"""
|
||||
Centralized logging configuration for ThrillWiki.
|
||||
Provides structured logging with proper formatting and context.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import sys
|
||||
from typing import Dict, Any, Optional
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
|
||||
|
||||
class ThrillWikiFormatter(logging.Formatter):
|
||||
"""Custom formatter for ThrillWiki logs with structured output."""
|
||||
|
||||
def format(self, record):
|
||||
# Add timestamp if not present
|
||||
if not hasattr(record, "timestamp"):
|
||||
record.timestamp = timezone.now().isoformat()
|
||||
|
||||
# Add request context if available
|
||||
if hasattr(record, "request"):
|
||||
record.request_id = getattr(record.request, "id", "unknown")
|
||||
record.user_id = (
|
||||
getattr(record.request.user, "id", "anonymous")
|
||||
if hasattr(record.request, "user")
|
||||
else "unknown"
|
||||
)
|
||||
record.path = getattr(record.request, "path", "unknown")
|
||||
record.method = getattr(record.request, "method", "unknown")
|
||||
|
||||
# Structure the log message
|
||||
if hasattr(record, "extra_data"):
|
||||
record.structured_data = record.extra_data
|
||||
|
||||
return super().format(record)
|
||||
|
||||
|
||||
def get_logger(name: str) -> logging.Logger:
|
||||
"""
|
||||
Get a configured logger for ThrillWiki components.
|
||||
|
||||
Args:
|
||||
name: Logger name (usually __name__)
|
||||
|
||||
Returns:
|
||||
Configured logger instance
|
||||
"""
|
||||
logger = logging.getLogger(name)
|
||||
|
||||
# Only configure if not already configured
|
||||
if not logger.handlers:
|
||||
handler = logging.StreamHandler(sys.stdout)
|
||||
formatter = ThrillWikiFormatter(
|
||||
fmt="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
||||
)
|
||||
handler.setFormatter(formatter)
|
||||
logger.addHandler(handler)
|
||||
logger.setLevel(logging.INFO if settings.DEBUG else logging.WARNING)
|
||||
|
||||
return logger
|
||||
|
||||
|
||||
def log_exception(
|
||||
logger: logging.Logger,
|
||||
exception: Exception,
|
||||
*,
|
||||
context: Optional[Dict[str, Any]] = None,
|
||||
request=None,
|
||||
level: int = logging.ERROR,
|
||||
) -> None:
|
||||
"""
|
||||
Log an exception with structured context.
|
||||
|
||||
Args:
|
||||
logger: Logger instance
|
||||
exception: Exception to log
|
||||
context: Additional context data
|
||||
request: Django request object
|
||||
level: Log level
|
||||
"""
|
||||
log_data = {
|
||||
"exception_type": exception.__class__.__name__,
|
||||
"exception_message": str(exception),
|
||||
"context": context or {},
|
||||
}
|
||||
|
||||
if request:
|
||||
log_data.update(
|
||||
{
|
||||
"request_path": getattr(request, "path", "unknown"),
|
||||
"request_method": getattr(request, "method", "unknown"),
|
||||
"user_id": (
|
||||
getattr(request.user, "id", "anonymous")
|
||||
if hasattr(request, "user")
|
||||
else "unknown"
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
logger.log(
|
||||
level,
|
||||
f"Exception occurred: {exception}",
|
||||
extra={"extra_data": log_data},
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
|
||||
def log_business_event(
|
||||
logger: logging.Logger,
|
||||
event_type: str,
|
||||
*,
|
||||
message: str,
|
||||
context: Optional[Dict[str, Any]] = None,
|
||||
request=None,
|
||||
level: int = logging.INFO,
|
||||
) -> None:
|
||||
"""
|
||||
Log a business event with structured context.
|
||||
|
||||
Args:
|
||||
logger: Logger instance
|
||||
event_type: Type of business event
|
||||
message: Event message
|
||||
context: Additional context data
|
||||
request: Django request object
|
||||
level: Log level
|
||||
"""
|
||||
log_data = {"event_type": event_type, "context": context or {}}
|
||||
|
||||
if request:
|
||||
log_data.update(
|
||||
{
|
||||
"request_path": getattr(request, "path", "unknown"),
|
||||
"request_method": getattr(request, "method", "unknown"),
|
||||
"user_id": (
|
||||
getattr(request.user, "id", "anonymous")
|
||||
if hasattr(request, "user")
|
||||
else "unknown"
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
logger.log(level, message, extra={"extra_data": log_data})
|
||||
|
||||
|
||||
def log_performance_metric(
|
||||
logger: logging.Logger,
|
||||
operation: str,
|
||||
*,
|
||||
duration_ms: float,
|
||||
context: Optional[Dict[str, Any]] = None,
|
||||
level: int = logging.INFO,
|
||||
) -> None:
|
||||
"""
|
||||
Log a performance metric.
|
||||
|
||||
Args:
|
||||
logger: Logger instance
|
||||
operation: Operation name
|
||||
duration_ms: Duration in milliseconds
|
||||
context: Additional context data
|
||||
level: Log level
|
||||
"""
|
||||
log_data = {
|
||||
"metric_type": "performance",
|
||||
"operation": operation,
|
||||
"duration_ms": duration_ms,
|
||||
"context": context or {},
|
||||
}
|
||||
|
||||
message = f"Performance: {operation} took {duration_ms:.2f}ms"
|
||||
logger.log(level, message, extra={"extra_data": log_data})
|
||||
|
||||
|
||||
def log_api_request(
|
||||
logger: logging.Logger,
|
||||
request,
|
||||
*,
|
||||
response_status: Optional[int] = None,
|
||||
duration_ms: Optional[float] = None,
|
||||
level: int = logging.INFO,
|
||||
) -> None:
|
||||
"""
|
||||
Log an API request with context.
|
||||
|
||||
Args:
|
||||
logger: Logger instance
|
||||
request: Django request object
|
||||
response_status: HTTP response status code
|
||||
duration_ms: Request duration in milliseconds
|
||||
level: Log level
|
||||
"""
|
||||
log_data = {
|
||||
"request_type": "api",
|
||||
"path": getattr(request, "path", "unknown"),
|
||||
"method": getattr(request, "method", "unknown"),
|
||||
"user_id": (
|
||||
getattr(request.user, "id", "anonymous")
|
||||
if hasattr(request, "user")
|
||||
else "unknown"
|
||||
),
|
||||
"response_status": response_status,
|
||||
"duration_ms": duration_ms,
|
||||
}
|
||||
|
||||
message = f"API Request: {request.method} {request.path}"
|
||||
if response_status:
|
||||
message += f" -> {response_status}"
|
||||
if duration_ms:
|
||||
message += f" ({duration_ms:.2f}ms)"
|
||||
|
||||
logger.log(level, message, extra={"extra_data": log_data})
|
||||
|
||||
|
||||
def log_security_event(
|
||||
logger: logging.Logger,
|
||||
event_type: str,
|
||||
*,
|
||||
message: str,
|
||||
severity: str = "medium",
|
||||
context: Optional[Dict[str, Any]] = None,
|
||||
request=None,
|
||||
) -> None:
|
||||
"""
|
||||
Log a security-related event.
|
||||
|
||||
Args:
|
||||
logger: Logger instance
|
||||
event_type: Type of security event
|
||||
message: Event message
|
||||
severity: Event severity (low, medium, high, critical)
|
||||
context: Additional context data
|
||||
request: Django request object
|
||||
"""
|
||||
log_data = {
|
||||
"security_event": True,
|
||||
"event_type": event_type,
|
||||
"severity": severity,
|
||||
"context": context or {},
|
||||
}
|
||||
|
||||
if request:
|
||||
log_data.update(
|
||||
{
|
||||
"request_path": getattr(request, "path", "unknown"),
|
||||
"request_method": getattr(request, "method", "unknown"),
|
||||
"user_id": (
|
||||
getattr(request.user, "id", "anonymous")
|
||||
if hasattr(request, "user")
|
||||
else "unknown"
|
||||
),
|
||||
"remote_addr": request.META.get("REMOTE_ADDR", "unknown"),
|
||||
"user_agent": request.META.get("HTTP_USER_AGENT", "unknown"),
|
||||
}
|
||||
)
|
||||
|
||||
# Use WARNING for medium/high, ERROR for critical
|
||||
level = logging.ERROR if severity in ["high", "critical"] else logging.WARNING
|
||||
|
||||
logger.log(level, f"SECURITY: {message}", extra={"extra_data": log_data})
|
||||
@@ -2,10 +2,11 @@ from django.core.management.base import BaseCommand
|
||||
from django.core.cache import cache
|
||||
from parks.models import Park
|
||||
from rides.models import Ride
|
||||
from analytics.models import PageView
|
||||
from core.analytics import PageView
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Updates trending parks and rides cache based on views in the last 24 hours'
|
||||
help = "Updates trending parks and rides cache based on views in the last 24 hours"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
"""
|
||||
@@ -23,12 +24,12 @@ class Command(BaseCommand):
|
||||
trending_rides = PageView.get_trending_items(Ride, hours=24, limit=10)
|
||||
|
||||
# Cache the results for 1 hour
|
||||
cache.set('trending_parks', trending_parks, 3600) # 3600 seconds = 1 hour
|
||||
cache.set('trending_rides', trending_rides, 3600)
|
||||
cache.set("trending_parks", trending_parks, 3600) # 3600 seconds = 1 hour
|
||||
cache.set("trending_rides", trending_rides, 3600)
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
'Successfully updated trending parks and rides. '
|
||||
'Cached 10 items each for parks and rides based on views in the last 24 hours.'
|
||||
"Successfully updated trending parks and rides. "
|
||||
"Cached 10 items each for parks and rides based on views in the last 24 hours."
|
||||
)
|
||||
)
|
||||
273
core/managers.py
Normal file
273
core/managers.py
Normal file
@@ -0,0 +1,273 @@
|
||||
"""
|
||||
Custom managers and QuerySets for optimized database patterns.
|
||||
Following Django styleguide best practices for database access.
|
||||
"""
|
||||
|
||||
from typing import Optional, List, Union
|
||||
from django.db import models
|
||||
from django.db.models import Q, Count, Avg, Max
|
||||
from django.contrib.gis.geos import Point
|
||||
from django.contrib.gis.measure import Distance
|
||||
from django.utils import timezone
|
||||
from datetime import timedelta
|
||||
|
||||
|
||||
class BaseQuerySet(models.QuerySet):
|
||||
"""Base QuerySet with common optimizations and patterns."""
|
||||
|
||||
def active(self):
|
||||
"""Filter for active/enabled records."""
|
||||
if hasattr(self.model, "is_active"):
|
||||
return self.filter(is_active=True)
|
||||
return self
|
||||
|
||||
def published(self):
|
||||
"""Filter for published records."""
|
||||
if hasattr(self.model, "is_published"):
|
||||
return self.filter(is_published=True)
|
||||
return self
|
||||
|
||||
def recent(self, *, days: int = 30):
|
||||
"""Filter for recently created records."""
|
||||
cutoff_date = timezone.now() - timedelta(days=days)
|
||||
return self.filter(created_at__gte=cutoff_date)
|
||||
|
||||
def search(self, *, query: str, fields: Optional[List[str]] = None):
|
||||
"""
|
||||
Full-text search across specified fields.
|
||||
|
||||
Args:
|
||||
query: Search query string
|
||||
fields: List of field names to search (defaults to name, description)
|
||||
"""
|
||||
if not query:
|
||||
return self
|
||||
|
||||
if fields is None:
|
||||
fields = ["name", "description"] if hasattr(self.model, "name") else []
|
||||
|
||||
q_objects = Q()
|
||||
for field in fields:
|
||||
if hasattr(self.model, field):
|
||||
q_objects |= Q(**{f"{field}__icontains": query})
|
||||
|
||||
return self.filter(q_objects) if q_objects else self
|
||||
|
||||
def with_stats(self):
|
||||
"""Add basic statistics annotations."""
|
||||
return self
|
||||
|
||||
def optimized_for_list(self):
|
||||
"""Optimize queryset for list display."""
|
||||
return self.select_related().prefetch_related()
|
||||
|
||||
def optimized_for_detail(self):
|
||||
"""Optimize queryset for detail display."""
|
||||
return self.select_related().prefetch_related()
|
||||
|
||||
|
||||
class BaseManager(models.Manager):
|
||||
"""Base manager with common patterns."""
|
||||
|
||||
def get_queryset(self):
|
||||
return BaseQuerySet(self.model, using=self._db)
|
||||
|
||||
def active(self):
|
||||
return self.get_queryset().active()
|
||||
|
||||
def published(self):
|
||||
return self.get_queryset().published()
|
||||
|
||||
def recent(self, *, days: int = 30):
|
||||
return self.get_queryset().recent(days=days)
|
||||
|
||||
def search(self, *, query: str, fields: Optional[List[str]] = None):
|
||||
return self.get_queryset().search(query=query, fields=fields)
|
||||
|
||||
|
||||
class LocationQuerySet(BaseQuerySet):
|
||||
"""QuerySet for location-based models with geographic functionality."""
|
||||
|
||||
def near_point(self, *, point: Point, distance_km: float = 50):
|
||||
"""Filter locations near a geographic point."""
|
||||
if hasattr(self.model, "point"):
|
||||
return (
|
||||
self.filter(point__distance_lte=(point, Distance(km=distance_km)))
|
||||
.distance(point)
|
||||
.order_by("distance")
|
||||
)
|
||||
return self
|
||||
|
||||
def within_bounds(self, *, north: float, south: float, east: float, west: float):
|
||||
"""Filter locations within geographic bounds."""
|
||||
if hasattr(self.model, "point"):
|
||||
return self.filter(
|
||||
point__latitude__gte=south,
|
||||
point__latitude__lte=north,
|
||||
point__longitude__gte=west,
|
||||
point__longitude__lte=east,
|
||||
)
|
||||
return self
|
||||
|
||||
def by_country(self, *, country: str):
|
||||
"""Filter by country."""
|
||||
if hasattr(self.model, "country"):
|
||||
return self.filter(country__iexact=country)
|
||||
return self
|
||||
|
||||
def by_region(self, *, state: str):
|
||||
"""Filter by state/region."""
|
||||
if hasattr(self.model, "state"):
|
||||
return self.filter(state__iexact=state)
|
||||
return self
|
||||
|
||||
def by_city(self, *, city: str):
|
||||
"""Filter by city."""
|
||||
if hasattr(self.model, "city"):
|
||||
return self.filter(city__iexact=city)
|
||||
return self
|
||||
|
||||
|
||||
class LocationManager(BaseManager):
|
||||
"""Manager for location-based models."""
|
||||
|
||||
def get_queryset(self):
|
||||
return LocationQuerySet(self.model, using=self._db)
|
||||
|
||||
def near_point(self, *, point: Point, distance_km: float = 50):
|
||||
return self.get_queryset().near_point(point=point, distance_km=distance_km)
|
||||
|
||||
def within_bounds(self, *, north: float, south: float, east: float, west: float):
|
||||
return self.get_queryset().within_bounds(
|
||||
north=north, south=south, east=east, west=west
|
||||
)
|
||||
|
||||
|
||||
class ReviewableQuerySet(BaseQuerySet):
|
||||
"""QuerySet for models that can be reviewed."""
|
||||
|
||||
def with_review_stats(self):
|
||||
"""Add review statistics annotations."""
|
||||
return self.annotate(
|
||||
review_count=Count("reviews", filter=Q(reviews__is_published=True)),
|
||||
average_rating=Avg("reviews__rating", filter=Q(reviews__is_published=True)),
|
||||
latest_review_date=Max(
|
||||
"reviews__created_at", filter=Q(reviews__is_published=True)
|
||||
),
|
||||
)
|
||||
|
||||
def highly_rated(self, *, min_rating: float = 8.0):
|
||||
"""Filter for highly rated items."""
|
||||
return self.with_review_stats().filter(average_rating__gte=min_rating)
|
||||
|
||||
def recently_reviewed(self, *, days: int = 30):
|
||||
"""Filter for items with recent reviews."""
|
||||
cutoff_date = timezone.now() - timedelta(days=days)
|
||||
return self.filter(
|
||||
reviews__created_at__gte=cutoff_date, reviews__is_published=True
|
||||
).distinct()
|
||||
|
||||
|
||||
class ReviewableManager(BaseManager):
|
||||
"""Manager for reviewable models."""
|
||||
|
||||
def get_queryset(self):
|
||||
return ReviewableQuerySet(self.model, using=self._db)
|
||||
|
||||
def with_review_stats(self):
|
||||
return self.get_queryset().with_review_stats()
|
||||
|
||||
def highly_rated(self, *, min_rating: float = 8.0):
|
||||
return self.get_queryset().highly_rated(min_rating=min_rating)
|
||||
|
||||
|
||||
class HierarchicalQuerySet(BaseQuerySet):
|
||||
"""QuerySet for hierarchical models (with parent/child relationships)."""
|
||||
|
||||
def root_level(self):
|
||||
"""Filter for root-level items (no parent)."""
|
||||
if hasattr(self.model, "parent"):
|
||||
return self.filter(parent__isnull=True)
|
||||
return self
|
||||
|
||||
def children_of(self, *, parent_id: int):
|
||||
"""Get children of a specific parent."""
|
||||
if hasattr(self.model, "parent"):
|
||||
return self.filter(parent_id=parent_id)
|
||||
return self
|
||||
|
||||
def with_children_count(self):
|
||||
"""Add count of children."""
|
||||
if hasattr(self.model, "children"):
|
||||
return self.annotate(children_count=Count("children"))
|
||||
return self
|
||||
|
||||
|
||||
class HierarchicalManager(BaseManager):
|
||||
"""Manager for hierarchical models."""
|
||||
|
||||
def get_queryset(self):
|
||||
return HierarchicalQuerySet(self.model, using=self._db)
|
||||
|
||||
def root_level(self):
|
||||
return self.get_queryset().root_level()
|
||||
|
||||
|
||||
class TimestampedQuerySet(BaseQuerySet):
|
||||
"""QuerySet for models with created_at/updated_at timestamps."""
|
||||
|
||||
def created_between(self, *, start_date, end_date):
|
||||
"""Filter by creation date range."""
|
||||
return self.filter(created_at__date__range=[start_date, end_date])
|
||||
|
||||
def updated_since(self, *, since_date):
|
||||
"""Filter for records updated since a date."""
|
||||
return self.filter(updated_at__gte=since_date)
|
||||
|
||||
def by_creation_date(self, *, descending: bool = True):
|
||||
"""Order by creation date."""
|
||||
order = "-created_at" if descending else "created_at"
|
||||
return self.order_by(order)
|
||||
|
||||
|
||||
class TimestampedManager(BaseManager):
|
||||
"""Manager for timestamped models."""
|
||||
|
||||
def get_queryset(self):
|
||||
return TimestampedQuerySet(self.model, using=self._db)
|
||||
|
||||
def created_between(self, *, start_date, end_date):
|
||||
return self.get_queryset().created_between(
|
||||
start_date=start_date, end_date=end_date
|
||||
)
|
||||
|
||||
|
||||
class StatusQuerySet(BaseQuerySet):
|
||||
"""QuerySet for models with status fields."""
|
||||
|
||||
def with_status(self, *, status: Union[str, List[str]]):
|
||||
"""Filter by status."""
|
||||
if isinstance(status, list):
|
||||
return self.filter(status__in=status)
|
||||
return self.filter(status=status)
|
||||
|
||||
def operating(self):
|
||||
"""Filter for operating/active status."""
|
||||
return self.filter(status="OPERATING")
|
||||
|
||||
def closed(self):
|
||||
"""Filter for closed status."""
|
||||
return self.filter(status__in=["CLOSED_TEMP", "CLOSED_PERM"])
|
||||
|
||||
|
||||
class StatusManager(BaseManager):
|
||||
"""Manager for status-based models."""
|
||||
|
||||
def get_queryset(self):
|
||||
return StatusQuerySet(self.model, using=self._db)
|
||||
|
||||
def operating(self):
|
||||
return self.get_queryset().operating()
|
||||
|
||||
def closed(self):
|
||||
return self.get_queryset().closed()
|
||||
@@ -1,27 +0,0 @@
|
||||
import pghistory
|
||||
from django.contrib.auth.models import AnonymousUser
|
||||
from django.core.handlers.wsgi import WSGIRequest
|
||||
|
||||
class RequestContextProvider(pghistory.context):
|
||||
"""Custom context provider for pghistory that extracts information from the request."""
|
||||
def __call__(self, request: WSGIRequest) -> dict:
|
||||
return {
|
||||
'user': str(request.user) if request.user and not isinstance(request.user, AnonymousUser) else None,
|
||||
'ip': request.META.get('REMOTE_ADDR'),
|
||||
'user_agent': request.META.get('HTTP_USER_AGENT'),
|
||||
'session_key': request.session.session_key if hasattr(request, 'session') else None
|
||||
}
|
||||
|
||||
# Initialize the context provider
|
||||
request_context = RequestContextProvider()
|
||||
|
||||
class PgHistoryContextMiddleware:
|
||||
"""
|
||||
Middleware that ensures request object is available to pghistory context.
|
||||
"""
|
||||
def __init__(self, get_response):
|
||||
self.get_response = get_response
|
||||
|
||||
def __call__(self, request):
|
||||
response = self.get_response(request)
|
||||
return response
|
||||
22
core/middleware/__init__.py
Normal file
22
core/middleware/__init__.py
Normal file
@@ -0,0 +1,22 @@
|
||||
# Core middleware modules
|
||||
|
||||
# Import middleware classes from the analytics module
|
||||
from .analytics import PageViewMiddleware, PgHistoryContextMiddleware
|
||||
|
||||
# Import middleware classes from the performance_middleware.py module
|
||||
from .performance_middleware import (
|
||||
PerformanceMiddleware,
|
||||
QueryCountMiddleware,
|
||||
DatabaseConnectionMiddleware,
|
||||
CachePerformanceMiddleware,
|
||||
)
|
||||
|
||||
# Make all middleware classes available at the package level
|
||||
__all__ = [
|
||||
"PageViewMiddleware",
|
||||
"PgHistoryContextMiddleware",
|
||||
"PerformanceMiddleware",
|
||||
"QueryCountMiddleware",
|
||||
"DatabaseConnectionMiddleware",
|
||||
"CachePerformanceMiddleware",
|
||||
]
|
||||
84
core/middleware/analytics.py
Normal file
84
core/middleware/analytics.py
Normal file
@@ -0,0 +1,84 @@
|
||||
"""
|
||||
Analytics and tracking middleware for Django application.
|
||||
"""
|
||||
|
||||
import pghistory
|
||||
from django.contrib.auth.models import AnonymousUser
|
||||
from django.core.handlers.wsgi import WSGIRequest
|
||||
from django.utils.deprecation import MiddlewareMixin
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.views.generic.detail import DetailView
|
||||
from core.analytics import PageView
|
||||
|
||||
|
||||
class RequestContextProvider(pghistory.context):
|
||||
"""Custom context provider for pghistory that extracts information from the request."""
|
||||
|
||||
def __call__(self, request: WSGIRequest) -> dict:
|
||||
return {
|
||||
"user": (
|
||||
str(request.user)
|
||||
if request.user and not isinstance(request.user, AnonymousUser)
|
||||
else None
|
||||
),
|
||||
"ip": request.META.get("REMOTE_ADDR"),
|
||||
"user_agent": request.META.get("HTTP_USER_AGENT"),
|
||||
"session_key": (
|
||||
request.session.session_key if hasattr(request, "session") else None
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
# Initialize the context provider
|
||||
request_context = RequestContextProvider()
|
||||
|
||||
|
||||
class PgHistoryContextMiddleware:
|
||||
"""
|
||||
Middleware that ensures request object is available to pghistory context.
|
||||
"""
|
||||
|
||||
def __init__(self, get_response):
|
||||
self.get_response = get_response
|
||||
|
||||
def __call__(self, request):
|
||||
response = self.get_response(request)
|
||||
return response
|
||||
|
||||
|
||||
class PageViewMiddleware(MiddlewareMixin):
|
||||
"""Middleware to track page views for DetailView-based pages."""
|
||||
|
||||
def process_view(self, request, view_func, view_args, view_kwargs):
|
||||
# Only track GET requests
|
||||
if request.method != "GET":
|
||||
return None
|
||||
|
||||
# Get view class if it exists
|
||||
view_class = getattr(view_func, "view_class", None)
|
||||
if not view_class or not issubclass(view_class, DetailView):
|
||||
return None
|
||||
|
||||
# Get the object if it's a detail view
|
||||
try:
|
||||
view_instance = view_class()
|
||||
view_instance.request = request
|
||||
view_instance.args = view_args
|
||||
view_instance.kwargs = view_kwargs
|
||||
obj = view_instance.get_object()
|
||||
except (AttributeError, Exception):
|
||||
return None
|
||||
|
||||
# Record the page view
|
||||
try:
|
||||
PageView.objects.create(
|
||||
content_type=ContentType.objects.get_for_model(obj.__class__),
|
||||
object_id=obj.pk,
|
||||
ip_address=request.META.get("REMOTE_ADDR", ""),
|
||||
user_agent=request.META.get("HTTP_USER_AGENT", "")[:512],
|
||||
)
|
||||
except Exception:
|
||||
# Fail silently to not interrupt the request
|
||||
pass
|
||||
|
||||
return None
|
||||
317
core/middleware/performance_middleware.py
Normal file
317
core/middleware/performance_middleware.py
Normal file
@@ -0,0 +1,317 @@
|
||||
"""
|
||||
Performance monitoring middleware for tracking request metrics.
|
||||
"""
|
||||
|
||||
import time
|
||||
import logging
|
||||
from django.db import connection
|
||||
from django.utils.deprecation import MiddlewareMixin
|
||||
from django.conf import settings
|
||||
|
||||
performance_logger = logging.getLogger("performance")
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PerformanceMiddleware(MiddlewareMixin):
|
||||
"""Middleware to collect performance metrics for each request"""
|
||||
|
||||
def process_request(self, request):
|
||||
"""Initialize performance tracking for the request"""
|
||||
request._performance_start_time = time.time()
|
||||
request._performance_initial_queries = (
|
||||
len(connection.queries) if hasattr(connection, "queries") else 0
|
||||
)
|
||||
return None
|
||||
|
||||
def process_response(self, request, response):
|
||||
"""Log performance metrics after response is ready"""
|
||||
# Skip performance tracking for certain paths
|
||||
skip_paths = [
|
||||
"/health/",
|
||||
"/admin/jsi18n/",
|
||||
"/static/",
|
||||
"/media/",
|
||||
"/__debug__/",
|
||||
]
|
||||
if any(request.path.startswith(path) for path in skip_paths):
|
||||
return response
|
||||
|
||||
# Calculate metrics
|
||||
end_time = time.time()
|
||||
start_time = getattr(request, "_performance_start_time", end_time)
|
||||
duration = end_time - start_time
|
||||
|
||||
initial_queries = getattr(request, "_performance_initial_queries", 0)
|
||||
total_queries = (
|
||||
len(connection.queries) - initial_queries
|
||||
if hasattr(connection, "queries")
|
||||
else 0
|
||||
)
|
||||
|
||||
# Get content length
|
||||
content_length = 0
|
||||
if hasattr(response, "content"):
|
||||
content_length = len(response.content)
|
||||
elif hasattr(response, "streaming_content"):
|
||||
# For streaming responses, we can't easily measure content length
|
||||
content_length = -1
|
||||
|
||||
# Build performance data
|
||||
performance_data = {
|
||||
"path": request.path,
|
||||
"method": request.method,
|
||||
"status_code": response.status_code,
|
||||
"duration_ms": round(duration * 1000, 2),
|
||||
"duration_seconds": round(duration, 3),
|
||||
"query_count": total_queries,
|
||||
"content_length_bytes": content_length,
|
||||
"user_id": (
|
||||
getattr(request.user, "id", None)
|
||||
if hasattr(request, "user") and request.user.is_authenticated
|
||||
else None
|
||||
),
|
||||
"user_agent": request.META.get("HTTP_USER_AGENT", "")[
|
||||
:100
|
||||
], # Truncate user agent
|
||||
"remote_addr": self._get_client_ip(request),
|
||||
}
|
||||
|
||||
# Add query details in debug mode
|
||||
if settings.DEBUG and hasattr(connection, "queries") and total_queries > 0:
|
||||
recent_queries = connection.queries[-total_queries:]
|
||||
performance_data["queries"] = [
|
||||
{
|
||||
"sql": (
|
||||
query["sql"][:200] + "..."
|
||||
if len(query["sql"]) > 200
|
||||
else query["sql"]
|
||||
),
|
||||
"time": float(query["time"]),
|
||||
}
|
||||
for query in recent_queries[-10:] # Last 10 queries only
|
||||
]
|
||||
|
||||
# Identify slow queries
|
||||
slow_queries = [q for q in recent_queries if float(q["time"]) > 0.1]
|
||||
if slow_queries:
|
||||
performance_data["slow_query_count"] = len(slow_queries)
|
||||
performance_data["slowest_query_time"] = max(
|
||||
float(q["time"]) for q in slow_queries
|
||||
)
|
||||
|
||||
# Determine log level based on performance
|
||||
log_level = self._get_log_level(duration, total_queries, response.status_code)
|
||||
|
||||
# Log the performance data
|
||||
performance_logger.log(
|
||||
log_level,
|
||||
f"Request performance: {request.method} {request.path} - "
|
||||
f"{duration:.3f}s, {total_queries} queries, {response.status_code}",
|
||||
extra=performance_data,
|
||||
)
|
||||
|
||||
# Add performance headers for debugging (only in debug mode)
|
||||
if settings.DEBUG:
|
||||
response["X-Response-Time"] = f"{duration * 1000:.2f}ms"
|
||||
response["X-Query-Count"] = str(total_queries)
|
||||
if total_queries > 0 and hasattr(connection, "queries"):
|
||||
total_query_time = sum(
|
||||
float(q["time"]) for q in connection.queries[-total_queries:]
|
||||
)
|
||||
response["X-Query-Time"] = f"{total_query_time * 1000:.2f}ms"
|
||||
|
||||
return response
|
||||
|
||||
def process_exception(self, request, exception):
|
||||
"""Log performance data even when an exception occurs"""
|
||||
end_time = time.time()
|
||||
start_time = getattr(request, "_performance_start_time", end_time)
|
||||
duration = end_time - start_time
|
||||
|
||||
initial_queries = getattr(request, "_performance_initial_queries", 0)
|
||||
total_queries = (
|
||||
len(connection.queries) - initial_queries
|
||||
if hasattr(connection, "queries")
|
||||
else 0
|
||||
)
|
||||
|
||||
performance_data = {
|
||||
"path": request.path,
|
||||
"method": request.method,
|
||||
"status_code": 500, # Exception occurred
|
||||
"duration_ms": round(duration * 1000, 2),
|
||||
"query_count": total_queries,
|
||||
"exception": str(exception),
|
||||
"exception_type": type(exception).__name__,
|
||||
"user_id": (
|
||||
getattr(request.user, "id", None)
|
||||
if hasattr(request, "user") and request.user.is_authenticated
|
||||
else None
|
||||
),
|
||||
}
|
||||
|
||||
performance_logger.error(
|
||||
f"Request exception: {
|
||||
request.method} {
|
||||
request.path} - "
|
||||
f"{
|
||||
duration:.3f}s, {total_queries} queries, {
|
||||
type(exception).__name__}: {exception}",
|
||||
extra=performance_data,
|
||||
)
|
||||
|
||||
return None # Don't handle the exception, just log it
|
||||
|
||||
def _get_client_ip(self, request):
|
||||
"""Extract client IP address from request"""
|
||||
x_forwarded_for = request.META.get("HTTP_X_FORWARDED_FOR")
|
||||
if x_forwarded_for:
|
||||
ip = x_forwarded_for.split(",")[0].strip()
|
||||
else:
|
||||
ip = request.META.get("REMOTE_ADDR", "")
|
||||
return ip
|
||||
|
||||
def _get_log_level(self, duration, query_count, status_code):
|
||||
"""Determine appropriate log level based on performance metrics"""
|
||||
# Error responses
|
||||
if status_code >= 500:
|
||||
return logging.ERROR
|
||||
elif status_code >= 400:
|
||||
return logging.WARNING
|
||||
|
||||
# Performance-based log levels
|
||||
if duration > 5.0: # Very slow requests
|
||||
return logging.ERROR
|
||||
elif duration > 2.0 or query_count > 20: # Slow requests or high query count
|
||||
return logging.WARNING
|
||||
elif duration > 1.0 or query_count > 10: # Moderately slow
|
||||
return logging.INFO
|
||||
else:
|
||||
return logging.DEBUG
|
||||
|
||||
|
||||
class QueryCountMiddleware(MiddlewareMixin):
|
||||
"""Middleware to track and limit query counts per request"""
|
||||
|
||||
def __init__(self, get_response):
|
||||
self.get_response = get_response
|
||||
self.query_limit = getattr(settings, "MAX_QUERIES_PER_REQUEST", 50)
|
||||
super().__init__(get_response)
|
||||
|
||||
def process_request(self, request):
|
||||
"""Initialize query tracking"""
|
||||
request._query_count_start = (
|
||||
len(connection.queries) if hasattr(connection, "queries") else 0
|
||||
)
|
||||
return None
|
||||
|
||||
def process_response(self, request, response):
|
||||
"""Check query count and warn if excessive"""
|
||||
if not hasattr(connection, "queries"):
|
||||
return response
|
||||
|
||||
start_count = getattr(request, "_query_count_start", 0)
|
||||
current_count = len(connection.queries)
|
||||
request_query_count = current_count - start_count
|
||||
|
||||
if request_query_count > self.query_limit:
|
||||
logger.warning(
|
||||
f"Excessive query count: {
|
||||
request.path} executed {request_query_count} queries "
|
||||
f"(limit: {
|
||||
self.query_limit})",
|
||||
extra={
|
||||
"path": request.path,
|
||||
"method": request.method,
|
||||
"query_count": request_query_count,
|
||||
"query_limit": self.query_limit,
|
||||
"excessive_queries": True,
|
||||
},
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
class DatabaseConnectionMiddleware(MiddlewareMixin):
|
||||
"""Middleware to monitor database connection health"""
|
||||
|
||||
def process_request(self, request):
|
||||
"""Check database connection at start of request"""
|
||||
try:
|
||||
# Simple connection test
|
||||
from django.db import connection
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("SELECT 1")
|
||||
cursor.fetchone()
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Database connection failed at request start: {e}",
|
||||
extra={
|
||||
"path": request.path,
|
||||
"method": request.method,
|
||||
"database_error": str(e),
|
||||
},
|
||||
)
|
||||
# Don't block the request, let Django handle the database error
|
||||
|
||||
return None
|
||||
|
||||
def process_response(self, request, response):
|
||||
"""Close database connections properly"""
|
||||
try:
|
||||
from django.db import connection
|
||||
|
||||
connection.close()
|
||||
except Exception as e:
|
||||
logger.warning(f"Error closing database connection: {e}")
|
||||
|
||||
return response
|
||||
|
||||
|
||||
class CachePerformanceMiddleware(MiddlewareMixin):
|
||||
"""Middleware to monitor cache performance"""
|
||||
|
||||
def process_request(self, request):
|
||||
"""Initialize cache performance tracking"""
|
||||
request._cache_hits = 0
|
||||
request._cache_misses = 0
|
||||
request._cache_start_time = time.time()
|
||||
return None
|
||||
|
||||
def process_response(self, request, response):
|
||||
"""Log cache performance metrics"""
|
||||
cache_duration = time.time() - getattr(
|
||||
request, "_cache_start_time", time.time()
|
||||
)
|
||||
cache_hits = getattr(request, "_cache_hits", 0)
|
||||
cache_misses = getattr(request, "_cache_misses", 0)
|
||||
|
||||
if cache_hits + cache_misses > 0:
|
||||
hit_rate = (cache_hits / (cache_hits + cache_misses)) * 100
|
||||
|
||||
cache_data = {
|
||||
"path": request.path,
|
||||
"cache_hits": cache_hits,
|
||||
"cache_misses": cache_misses,
|
||||
"cache_hit_rate": round(hit_rate, 2),
|
||||
"cache_operations": cache_hits + cache_misses,
|
||||
# milliseconds
|
||||
"cache_duration": round(cache_duration * 1000, 2),
|
||||
}
|
||||
|
||||
# Log cache performance
|
||||
if hit_rate < 50 and cache_hits + cache_misses > 5:
|
||||
logger.warning(
|
||||
f"Low cache hit rate for {request.path}: {hit_rate:.1f}%",
|
||||
extra=cache_data,
|
||||
)
|
||||
else:
|
||||
logger.debug(
|
||||
f"Cache performance for {
|
||||
request.path}: {
|
||||
hit_rate:.1f}% hit rate",
|
||||
extra=cache_data,
|
||||
)
|
||||
|
||||
return response
|
||||
@@ -1,4 +1,4 @@
|
||||
# Generated by Django 5.1.4 on 2025-02-10 01:10
|
||||
# Generated by Django 5.1.4 on 2025-08-13 21:35
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
@@ -45,7 +45,8 @@ class Migration(migrations.Migration):
|
||||
name="core_slughi_content_8bbf56_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["old_slug"], name="core_slughi_old_slu_aaef7f_idx"
|
||||
fields=["old_slug"],
|
||||
name="core_slughi_old_slu_aaef7f_idx",
|
||||
),
|
||||
],
|
||||
},
|
||||
|
||||
102
core/migrations/0002_historicalslug_pageview.py
Normal file
102
core/migrations/0002_historicalslug_pageview.py
Normal file
@@ -0,0 +1,102 @@
|
||||
# Generated by Django 5.1.4 on 2025-08-14 14:50
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("contenttypes", "0002_remove_content_type_name"),
|
||||
("core", "0001_initial"),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="HistoricalSlug",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("object_id", models.PositiveIntegerField()),
|
||||
("slug", models.SlugField(max_length=255)),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
(
|
||||
"content_type",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="contenttypes.contenttype",
|
||||
),
|
||||
),
|
||||
(
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="historical_slugs",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["content_type", "object_id"],
|
||||
name="core_histor_content_b4c470_idx",
|
||||
),
|
||||
models.Index(fields=["slug"], name="core_histor_slug_8fd7b3_idx"),
|
||||
],
|
||||
"unique_together": {("content_type", "slug")},
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="PageView",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("object_id", models.PositiveIntegerField()),
|
||||
(
|
||||
"timestamp",
|
||||
models.DateTimeField(auto_now_add=True, db_index=True),
|
||||
),
|
||||
("ip_address", models.GenericIPAddressField()),
|
||||
("user_agent", models.CharField(blank=True, max_length=512)),
|
||||
(
|
||||
"content_type",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="page_views",
|
||||
to="contenttypes.contenttype",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["timestamp"],
|
||||
name="core_pagevi_timesta_757ebb_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["content_type", "object_id"],
|
||||
name="core_pagevi_content_eda7ad_idx",
|
||||
),
|
||||
],
|
||||
},
|
||||
),
|
||||
]
|
||||
19
core/mixins/__init__.py
Normal file
19
core/mixins/__init__.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from django.views.generic.list import MultipleObjectMixin
|
||||
|
||||
|
||||
class HTMXFilterableMixin(MultipleObjectMixin):
|
||||
"""
|
||||
A mixin that provides filtering capabilities for HTMX requests.
|
||||
"""
|
||||
|
||||
filter_class = None
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = super().get_queryset()
|
||||
self.filterset = self.filter_class(self.request.GET, queryset=queryset)
|
||||
return self.filterset.qs
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super().get_context_data(**kwargs)
|
||||
context["filter"] = self.filterset
|
||||
return context
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user