feat: major project restructure - move Django to backend dir and fix critical imports
- Restructure project: moved Django backend to backend/ directory - Add frontend/ directory for future Next.js application - Add shared/ directory for common resources - Fix critical Django import errors: - Add missing sys.path modification for apps directory - Fix undefined CATEGORY_CHOICES imports in rides module - Fix media migration undefined references - Remove unused imports and f-strings without placeholders - Install missing django-environ dependency - Django server now runs without ModuleNotFoundError - Update .gitignore and README for new structure - Add pnpm workspace configuration for monorepo setup
442
.gitignore
vendored
@@ -1,198 +1,8 @@
|
|||||||
/.vscode
|
# Python
|
||||||
/dev.sh
|
|
||||||
/flake.nix
|
|
||||||
venv
|
|
||||||
/venv
|
|
||||||
./venv
|
|
||||||
venv/sour
|
|
||||||
.DS_Store
|
|
||||||
.DS_Store
|
|
||||||
.DS_Store
|
|
||||||
accounts/__pycache__/
|
|
||||||
__pycache__
|
|
||||||
thrillwiki/__pycache__
|
|
||||||
reviews/__pycache__
|
|
||||||
parks/__pycache__
|
|
||||||
media/__pycache__
|
|
||||||
email_service/__pycache__
|
|
||||||
core/__pycache__
|
|
||||||
companies/__pycache__
|
|
||||||
accounts/__pycache__
|
|
||||||
venv
|
|
||||||
accounts/__pycache__
|
|
||||||
thrillwiki/__pycache__/settings.cpython-311.pyc
|
|
||||||
accounts/migrations/__pycache__/__init__.cpython-311.pyc
|
|
||||||
accounts/migrations/__pycache__/0001_initial.cpython-311.pyc
|
|
||||||
companies/migrations/__pycache__
|
|
||||||
moderation/__pycache__
|
|
||||||
rides/__pycache__
|
|
||||||
ssh_tools.jsonc
|
|
||||||
thrillwiki/__pycache__/settings.cpython-312.pyc
|
|
||||||
parks/__pycache__/views.cpython-312.pyc
|
|
||||||
.venv/lib/python3.12/site-packages
|
|
||||||
thrillwiki/__pycache__/urls.cpython-312.pyc
|
|
||||||
thrillwiki/__pycache__/views.cpython-312.pyc
|
|
||||||
.pytest_cache.github
|
|
||||||
static/css/tailwind.css
|
|
||||||
static/css/tailwind.css
|
|
||||||
.venv
|
|
||||||
location/__pycache__
|
|
||||||
analytics/__pycache__
|
|
||||||
designers/__pycache__
|
|
||||||
history_tracking/__pycache__
|
|
||||||
media/migrations/__pycache__/0001_initial.cpython-312.pyc
|
|
||||||
accounts/__pycache__/__init__.cpython-312.pyc
|
|
||||||
accounts/__pycache__/adapters.cpython-312.pyc
|
|
||||||
accounts/__pycache__/admin.cpython-312.pyc
|
|
||||||
accounts/__pycache__/apps.cpython-312.pyc
|
|
||||||
accounts/__pycache__/models.cpython-312.pyc
|
|
||||||
accounts/__pycache__/signals.cpython-312.pyc
|
|
||||||
accounts/__pycache__/urls.cpython-312.pyc
|
|
||||||
accounts/__pycache__/views.cpython-312.pyc
|
|
||||||
accounts/migrations/__pycache__/__init__.cpython-312.pyc
|
|
||||||
accounts/migrations/__pycache__/0001_initial.cpython-312.pyc
|
|
||||||
companies/__pycache__/__init__.cpython-312.pyc
|
|
||||||
companies/__pycache__/admin.cpython-312.pyc
|
|
||||||
companies/__pycache__/apps.cpython-312.pyc
|
|
||||||
companies/__pycache__/models.cpython-312.pyc
|
|
||||||
companies/__pycache__/signals.cpython-312.pyc
|
|
||||||
companies/__pycache__/urls.cpython-312.pyc
|
|
||||||
companies/__pycache__/views.cpython-312.pyc
|
|
||||||
companies/migrations/__pycache__/__init__.cpython-312.pyc
|
|
||||||
companies/migrations/__pycache__/0001_initial.cpython-312.pyc
|
|
||||||
core/__pycache__/__init__.cpython-312.pyc
|
|
||||||
core/__pycache__/admin.cpython-312.pyc
|
|
||||||
core/__pycache__/apps.cpython-312.pyc
|
|
||||||
core/__pycache__/models.cpython-312.pyc
|
|
||||||
core/__pycache__/views.cpython-312.pyc
|
|
||||||
core/migrations/__pycache__/__init__.cpython-312.pyc
|
|
||||||
core/migrations/__pycache__/0001_initial.cpython-312.pyc
|
|
||||||
email_service/__pycache__/__init__.cpython-312.pyc
|
|
||||||
email_service/__pycache__/admin.cpython-312.pyc
|
|
||||||
email_service/__pycache__/apps.cpython-312.pyc
|
|
||||||
email_service/__pycache__/models.cpython-312.pyc
|
|
||||||
email_service/__pycache__/services.cpython-312.pyc
|
|
||||||
email_service/migrations/__pycache__/__init__.cpython-312.pyc
|
|
||||||
email_service/migrations/__pycache__/0001_initial.cpython-312.pyc
|
|
||||||
media/__pycache__/__init__.cpython-312.pyc
|
|
||||||
media/__pycache__/admin.cpython-312.pyc
|
|
||||||
media/__pycache__/apps.cpython-312.pyc
|
|
||||||
media/__pycache__/models.cpython-312.pyc
|
|
||||||
media/migrations/__pycache__/__init__.cpython-312.pyc
|
|
||||||
media/migrations/__pycache__/0001_initial.cpython-312.pyc
|
|
||||||
parks/__pycache__/__init__.cpython-312.pyc
|
|
||||||
parks/__pycache__/admin.cpython-312.pyc
|
|
||||||
parks/__pycache__/apps.cpython-312.pyc
|
|
||||||
parks/__pycache__/models.cpython-312.pyc
|
|
||||||
parks/__pycache__/signals.cpython-312.pyc
|
|
||||||
parks/__pycache__/urls.cpython-312.pyc
|
|
||||||
parks/__pycache__/views.cpython-312.pyc
|
|
||||||
parks/migrations/__pycache__/__init__.cpython-312.pyc
|
|
||||||
parks/migrations/__pycache__/0001_initial.cpython-312.pyc
|
|
||||||
reviews/__pycache__/__init__.cpython-312.pyc
|
|
||||||
reviews/__pycache__/admin.cpython-312.pyc
|
|
||||||
reviews/__pycache__/apps.cpython-312.pyc
|
|
||||||
reviews/__pycache__/models.cpython-312.pyc
|
|
||||||
reviews/__pycache__/signals.cpython-312.pyc
|
|
||||||
reviews/__pycache__/urls.cpython-312.pyc
|
|
||||||
reviews/__pycache__/views.cpython-312.pyc
|
|
||||||
reviews/migrations/__pycache__/__init__.cpython-312.pyc
|
|
||||||
reviews/migrations/__pycache__/0001_initial.cpython-312.pyc
|
|
||||||
rides/__pycache__/__init__.cpython-312.pyc
|
|
||||||
rides/__pycache__/admin.cpython-312.pyc
|
|
||||||
rides/__pycache__/apps.cpython-312.pyc
|
|
||||||
rides/__pycache__/models.cpython-312.pyc
|
|
||||||
rides/__pycache__/signals.cpython-312.pyc
|
|
||||||
rides/__pycache__/urls.cpython-312.pyc
|
|
||||||
rides/__pycache__/views.cpython-312.pyc
|
|
||||||
rides/migrations/__pycache__/__init__.cpython-312.pyc
|
|
||||||
rides/migrations/__pycache__/0001_initial.cpython-312.pyc
|
|
||||||
thrillwiki/__pycache__/__init__.cpython-312.pyc
|
|
||||||
thrillwiki/__pycache__/settings.cpython-312.pyc
|
|
||||||
thrillwiki/__pycache__/urls.cpython-312.pyc
|
|
||||||
thrillwiki/__pycache__/views.cpython-312.pyc
|
|
||||||
thrillwiki/__pycache__/wsgi.cpython-312.pyc
|
|
||||||
accounts/__pycache__/__init__.cpython-312.pyc
|
|
||||||
accounts/__pycache__/adapters.cpython-312.pyc
|
|
||||||
accounts/__pycache__/admin.cpython-312.pyc
|
|
||||||
accounts/__pycache__/apps.cpython-312.pyc
|
|
||||||
accounts/__pycache__/models.cpython-312.pyc
|
|
||||||
accounts/__pycache__/signals.cpython-312.pyc
|
|
||||||
accounts/__pycache__/urls.cpython-312.pyc
|
|
||||||
accounts/__pycache__/views.cpython-312.pyc
|
|
||||||
accounts/migrations/__pycache__/__init__.cpython-312.pyc
|
|
||||||
accounts/migrations/__pycache__/0001_initial.cpython-312.pyc
|
|
||||||
companies/__pycache__/__init__.cpython-312.pyc
|
|
||||||
companies/__pycache__/admin.cpython-312.pyc
|
|
||||||
companies/__pycache__/apps.cpython-312.pyc
|
|
||||||
companies/__pycache__/models.cpython-312.pyc
|
|
||||||
companies/__pycache__/signals.cpython-312.pyc
|
|
||||||
companies/__pycache__/urls.cpython-312.pyc
|
|
||||||
companies/__pycache__/views.cpython-312.pyc
|
|
||||||
companies/migrations/__pycache__/__init__.cpython-312.pyc
|
|
||||||
companies/migrations/__pycache__/0001_initial.cpython-312.pyc
|
|
||||||
core/__pycache__/__init__.cpython-312.pyc
|
|
||||||
core/__pycache__/admin.cpython-312.pyc
|
|
||||||
core/__pycache__/apps.cpython-312.pyc
|
|
||||||
core/__pycache__/models.cpython-312.pyc
|
|
||||||
core/__pycache__/views.cpython-312.pyc
|
|
||||||
core/migrations/__pycache__/__init__.cpython-312.pyc
|
|
||||||
core/migrations/__pycache__/0001_initial.cpython-312.pyc
|
|
||||||
email_service/__pycache__/__init__.cpython-312.pyc
|
|
||||||
email_service/__pycache__/admin.cpython-312.pyc
|
|
||||||
email_service/__pycache__/apps.cpython-312.pyc
|
|
||||||
email_service/__pycache__/models.cpython-312.pyc
|
|
||||||
email_service/__pycache__/services.cpython-312.pyc
|
|
||||||
email_service/migrations/__pycache__/__init__.cpython-312.pyc
|
|
||||||
email_service/migrations/__pycache__/0001_initial.cpython-312.pyc
|
|
||||||
media/__pycache__/__init__.cpython-312.pyc
|
|
||||||
media/__pycache__/admin.cpython-312.pyc
|
|
||||||
media/__pycache__/apps.cpython-312.pyc
|
|
||||||
media/__pycache__/models.cpython-312.pyc
|
|
||||||
media/migrations/__pycache__/__init__.cpython-312.pyc
|
|
||||||
media/migrations/__pycache__/0001_initial.cpython-312.pyc
|
|
||||||
parks/__pycache__/__init__.cpython-312.pyc
|
|
||||||
parks/__pycache__/admin.cpython-312.pyc
|
|
||||||
parks/__pycache__/apps.cpython-312.pyc
|
|
||||||
parks/__pycache__/models.cpython-312.pyc
|
|
||||||
parks/__pycache__/signals.cpython-312.pyc
|
|
||||||
parks/__pycache__/urls.cpython-312.pyc
|
|
||||||
parks/__pycache__/views.cpython-312.pyc
|
|
||||||
parks/migrations/__pycache__/__init__.cpython-312.pyc
|
|
||||||
parks/migrations/__pycache__/0001_initial.cpython-312.pyc
|
|
||||||
reviews/__pycache__/__init__.cpython-312.pyc
|
|
||||||
reviews/__pycache__/admin.cpython-312.pyc
|
|
||||||
reviews/__pycache__/apps.cpython-312.pyc
|
|
||||||
reviews/__pycache__/models.cpython-312.pyc
|
|
||||||
reviews/__pycache__/signals.cpython-312.pyc
|
|
||||||
reviews/__pycache__/urls.cpython-312.pyc
|
|
||||||
reviews/__pycache__/views.cpython-312.pyc
|
|
||||||
reviews/migrations/__pycache__/__init__.cpython-312.pyc
|
|
||||||
reviews/migrations/__pycache__/0001_initial.cpython-312.pyc
|
|
||||||
rides/__pycache__/__init__.cpython-312.pyc
|
|
||||||
rides/__pycache__/admin.cpython-312.pyc
|
|
||||||
rides/__pycache__/apps.cpython-312.pyc
|
|
||||||
rides/__pycache__/models.cpython-312.pyc
|
|
||||||
rides/__pycache__/signals.cpython-312.pyc
|
|
||||||
rides/__pycache__/urls.cpython-312.pyc
|
|
||||||
rides/__pycache__/views.cpython-312.pyc
|
|
||||||
rides/migrations/__pycache__/__init__.cpython-312.pyc
|
|
||||||
rides/migrations/__pycache__/0001_initial.cpython-312.pyc
|
|
||||||
thrillwiki/__pycache__/__init__.cpython-312.pyc
|
|
||||||
thrillwiki/__pycache__/settings.cpython-312.pyc
|
|
||||||
thrillwiki/__pycache__/urls.cpython-312.pyc
|
|
||||||
thrillwiki/__pycache__/views.cpython-312.pyc
|
|
||||||
thrillwiki/__pycache__/wsgi.cpython-312.pyc
|
|
||||||
|
|
||||||
# Byte-compiled / optimized / DLL files
|
|
||||||
__pycache__/
|
__pycache__/
|
||||||
*.py[cod]
|
*.py[cod]
|
||||||
*$py.class
|
*$py.class
|
||||||
|
|
||||||
# C extensions
|
|
||||||
*.so
|
*.so
|
||||||
|
|
||||||
# Distribution / packaging
|
|
||||||
.Python
|
.Python
|
||||||
build/
|
build/
|
||||||
develop-eggs/
|
develop-eggs/
|
||||||
@@ -212,189 +22,95 @@ share/python-wheels/
|
|||||||
*.egg
|
*.egg
|
||||||
MANIFEST
|
MANIFEST
|
||||||
|
|
||||||
# PyInstaller
|
# Django
|
||||||
# Usually these files are written by a python script from a template
|
|
||||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
|
||||||
*.manifest
|
|
||||||
*.spec
|
|
||||||
|
|
||||||
# Installer logs
|
|
||||||
pip-log.txt
|
|
||||||
pip-delete-this-directory.txt
|
|
||||||
|
|
||||||
# Unit test / coverage reports
|
|
||||||
htmlcov/
|
|
||||||
.tox/
|
|
||||||
.nox/
|
|
||||||
.coverage
|
|
||||||
.coverage.*
|
|
||||||
.cache
|
|
||||||
nosetests.xml
|
|
||||||
coverage.xml
|
|
||||||
*.cover
|
|
||||||
*.py,cover
|
|
||||||
.hypothesis/
|
|
||||||
.pytest_cache/
|
|
||||||
cover/
|
|
||||||
|
|
||||||
# Translations
|
|
||||||
*.mo
|
|
||||||
*.pot
|
|
||||||
|
|
||||||
# Django stuff:
|
|
||||||
*.log
|
*.log
|
||||||
local_settings.py
|
local_settings.py
|
||||||
db.sqlite3
|
db.sqlite3
|
||||||
db.sqlite3-journal
|
db.sqlite3-journal
|
||||||
|
/backend/staticfiles/
|
||||||
|
/backend/media/
|
||||||
|
|
||||||
# Flask stuff:
|
# UV
|
||||||
instance/
|
.uv/
|
||||||
.webassets-cache
|
backend/.uv/
|
||||||
|
|
||||||
# Scrapy stuff:
|
# Node.js
|
||||||
.scrapy
|
node_modules/
|
||||||
|
npm-debug.log*
|
||||||
|
yarn-debug.log*
|
||||||
|
yarn-error.log*
|
||||||
|
pnpm-debug.log*
|
||||||
|
lerna-debug.log*
|
||||||
|
.pnpm-store/
|
||||||
|
|
||||||
# Sphinx documentation
|
# Vue.js / Vite
|
||||||
docs/_build/
|
/frontend/dist/
|
||||||
|
/frontend/dist-ssr/
|
||||||
|
*.local
|
||||||
|
|
||||||
# PyBuilder
|
# Environment variables
|
||||||
.pybuilder/
|
.env
|
||||||
target/
|
.env.local
|
||||||
|
.env.development.local
|
||||||
|
.env.test.local
|
||||||
|
.env.production.local
|
||||||
|
backend/.env
|
||||||
|
frontend/.env
|
||||||
|
|
||||||
# Jupyter Notebook
|
# IDEs
|
||||||
.ipynb_checkpoints
|
.vscode/
|
||||||
|
.idea/
|
||||||
|
*.swp
|
||||||
|
*.swo
|
||||||
|
*.sublime-project
|
||||||
|
*.sublime-workspace
|
||||||
|
|
||||||
# IPython
|
# OS
|
||||||
profile_default/
|
|
||||||
ipython_config.py
|
|
||||||
|
|
||||||
# pyenv
|
|
||||||
# For a library or package, you might want to ignore these files since the code is
|
|
||||||
# intended to run in multiple environments; otherwise, check them in:
|
|
||||||
# .python-version
|
|
||||||
|
|
||||||
# pipenv
|
|
||||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
|
||||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
|
||||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
|
||||||
# install all needed dependencies.
|
|
||||||
#Pipfile.lock
|
|
||||||
|
|
||||||
# poetry
|
|
||||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
|
||||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
|
||||||
# commonly ignored for libraries.
|
|
||||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
|
||||||
#poetry.lock
|
|
||||||
|
|
||||||
# pdm
|
|
||||||
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
|
||||||
#pdm.lock
|
|
||||||
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
|
||||||
# in version control.
|
|
||||||
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
|
|
||||||
.pdm.toml
|
|
||||||
.pdm-python
|
|
||||||
.pdm-build/
|
|
||||||
|
|
||||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
|
||||||
__pypackages__/
|
|
||||||
|
|
||||||
# Celery stuff
|
|
||||||
celerybeat-schedule
|
|
||||||
celerybeat.pid
|
|
||||||
|
|
||||||
# SageMath parsed files
|
|
||||||
*.sage.py
|
|
||||||
|
|
||||||
# Environments
|
|
||||||
***REMOVED***
|
|
||||||
.venv
|
|
||||||
env/
|
|
||||||
venv/
|
|
||||||
ENV/
|
|
||||||
env.bak/
|
|
||||||
venv.bak/
|
|
||||||
|
|
||||||
# Spyder project settings
|
|
||||||
.spyderproject
|
|
||||||
.spyproject
|
|
||||||
|
|
||||||
# Rope project settings
|
|
||||||
.ropeproject
|
|
||||||
|
|
||||||
# mkdocs documentation
|
|
||||||
/site
|
|
||||||
|
|
||||||
# mypy
|
|
||||||
.mypy_cache/
|
|
||||||
.dmypy.json
|
|
||||||
dmypy.json
|
|
||||||
|
|
||||||
# Pyre type checker
|
|
||||||
.pyre/
|
|
||||||
|
|
||||||
# pytype static type analyzer
|
|
||||||
.pytype/
|
|
||||||
|
|
||||||
# Cython debug symbols
|
|
||||||
cython_debug/
|
|
||||||
|
|
||||||
# PyCharm
|
|
||||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
|
||||||
# be found at https://github.[AWS-SECRET-REMOVED]tBrains.gitignore
|
|
||||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
|
||||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
|
||||||
#.idea/
|
|
||||||
|
|
||||||
# Pixi package manager
|
|
||||||
.pixi/
|
|
||||||
|
|
||||||
# Django Tailwind CLI
|
|
||||||
.django_tailwind_cli/
|
|
||||||
|
|
||||||
# General
|
|
||||||
.DS_Store
|
.DS_Store
|
||||||
.AppleDouble
|
Thumbs.db
|
||||||
.LSOverride
|
Desktop.ini
|
||||||
|
|
||||||
# Icon must end with two \r
|
# Logs
|
||||||
Icon
|
|
||||||
|
|
||||||
# Thumbnails
|
|
||||||
._*
|
|
||||||
|
|
||||||
# Files that might appear in the root of a volume
|
|
||||||
.DocumentRevisions-V100
|
|
||||||
.fseventsd
|
|
||||||
.Spotlight-V100
|
|
||||||
.TemporaryItems
|
|
||||||
.Trashes
|
|
||||||
.VolumeIcon.icns
|
|
||||||
.com.apple.timemachine.donotpresent
|
|
||||||
|
|
||||||
# Directories potentially created on remote AFP share
|
|
||||||
.AppleDB
|
|
||||||
.AppleDesktop
|
|
||||||
Network Trash Folder
|
|
||||||
Temporary Items
|
|
||||||
.apdisk
|
|
||||||
|
|
||||||
|
|
||||||
# ThrillWiki CI/CD Configuration
|
|
||||||
.thrillwiki-config
|
|
||||||
***REMOVED***.unraid
|
|
||||||
***REMOVED***.webhook
|
|
||||||
.github-token
|
|
||||||
logs/
|
logs/
|
||||||
profiles
|
*.log
|
||||||
.thrillwiki-github-token
|
|
||||||
.thrillwiki-template-config
|
|
||||||
|
|
||||||
# Environment files with potential secrets
|
# Coverage
|
||||||
scripts/systemd/thrillwiki-automation***REMOVED***
|
coverage/
|
||||||
scripts/systemd/thrillwiki-deployment***REMOVED***
|
*.lcov
|
||||||
scripts/systemd/****REMOVED***
|
.nyc_output
|
||||||
logs/
|
htmlcov/
|
||||||
profiles/
|
.coverage
|
||||||
uv.lock
|
.coverage.*
|
||||||
|
|
||||||
|
# Testing
|
||||||
|
.pytest_cache/
|
||||||
|
.cache
|
||||||
|
|
||||||
|
# Temporary files
|
||||||
|
tmp/
|
||||||
|
temp/
|
||||||
|
*.tmp
|
||||||
|
*.temp
|
||||||
|
|
||||||
|
# Build outputs
|
||||||
|
/dist/
|
||||||
|
/build/
|
||||||
|
|
||||||
|
# Backup files
|
||||||
|
*.bak
|
||||||
|
*.orig
|
||||||
|
*.swp
|
||||||
|
|
||||||
|
# Archive files
|
||||||
|
*.tar.gz
|
||||||
|
*.zip
|
||||||
|
*.rar
|
||||||
|
|
||||||
|
# Security
|
||||||
|
*.pem
|
||||||
|
*.key
|
||||||
|
*.cert
|
||||||
|
|
||||||
|
# Local development
|
||||||
|
/uploads/
|
||||||
|
/backups/
|
||||||
|
|||||||
443
README.md
@@ -1,391 +1,150 @@
|
|||||||
# ThrillWiki Development Environment Setup
|
# ThrillWiki Django + Vue.js Monorepo
|
||||||
|
|
||||||
ThrillWiki is a modern Django web application for theme park and roller coaster enthusiasts, featuring a sophisticated dark theme design with purple-to-blue gradients, HTMX interactivity, and comprehensive park/ride information management.
|
A modern monorepo architecture for ThrillWiki, combining a Django REST API backend with a Vue.js frontend.
|
||||||
|
|
||||||
## 🏗️ Technology Stack
|
## 🏗️ Architecture
|
||||||
|
|
||||||
- **Backend**: Django 5.0+ with GeoDjango (PostGIS)
|
This project uses a monorepo structure that cleanly separates backend and frontend concerns:
|
||||||
- **Frontend**: HTMX + Alpine.js + Tailwind CSS
|
|
||||||
- **Database**: PostgreSQL with PostGIS extension
|
|
||||||
- **Package Management**: UV (Python package manager)
|
|
||||||
- **Authentication**: Django Allauth with Google/Discord OAuth
|
|
||||||
- **Styling**: Tailwind CSS with custom dark theme
|
|
||||||
- **History Tracking**: django-pghistory for audit trails
|
|
||||||
- **Testing**: Pytest + Playwright for E2E testing
|
|
||||||
|
|
||||||
## 📋 Prerequisites
|
```
|
||||||
|
thrillwiki-monorepo/
|
||||||
### Required Software
|
├── backend/ # Django REST API
|
||||||
|
├── frontend/ # Vue.js SPA
|
||||||
1. **Python 3.11+**
|
└── shared/ # Shared resources and documentation
|
||||||
```bash
|
```
|
||||||
python --version # Should be 3.11 or higher
|
|
||||||
```
|
|
||||||
|
|
||||||
2. **UV Package Manager**
|
|
||||||
```bash
|
|
||||||
# Install UV if not already installed
|
|
||||||
curl -LsSf https://astral.sh/uv/install.sh | sh
|
|
||||||
# or
|
|
||||||
pip install uv
|
|
||||||
```
|
|
||||||
|
|
||||||
3. **PostgreSQL with PostGIS**
|
|
||||||
```bash
|
|
||||||
# macOS (Homebrew)
|
|
||||||
brew install postgresql postgis
|
|
||||||
|
|
||||||
# Ubuntu/Debian
|
|
||||||
sudo apt-get install postgresql postgresql-contrib postgis
|
|
||||||
|
|
||||||
# Start PostgreSQL service
|
|
||||||
brew services start postgresql # macOS
|
|
||||||
sudo systemctl start postgresql # Linux
|
|
||||||
```
|
|
||||||
|
|
||||||
4. **GDAL/GEOS Libraries** (for GeoDjango)
|
|
||||||
```bash
|
|
||||||
# macOS (Homebrew)
|
|
||||||
brew install gdal geos
|
|
||||||
|
|
||||||
# Ubuntu/Debian
|
|
||||||
sudo apt-get install gdal-bin libgdal-dev libgeos-dev
|
|
||||||
```
|
|
||||||
|
|
||||||
5. **Node.js** (for Tailwind CSS)
|
|
||||||
```bash
|
|
||||||
# Install Node.js 18+ for Tailwind CSS compilation
|
|
||||||
node --version # Should be 18 or higher
|
|
||||||
```
|
|
||||||
|
|
||||||
## 🚀 Quick Start
|
## 🚀 Quick Start
|
||||||
|
|
||||||
### 1. Clone and Setup Project
|
### Prerequisites
|
||||||
|
|
||||||
```bash
|
- **Python 3.11+** with [uv](https://docs.astral.sh/uv/) for backend dependencies
|
||||||
# Clone the repository
|
- **Node.js 18+** with [pnpm](https://pnpm.io/) for frontend dependencies
|
||||||
git clone <repository-url>
|
|
||||||
cd thrillwiki_django_no_react
|
|
||||||
|
|
||||||
# Install Python dependencies using UV
|
### Development Setup
|
||||||
uv sync
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. Database Setup
|
1. **Clone the repository**
|
||||||
|
```bash
|
||||||
|
git clone <repository-url>
|
||||||
|
cd thrillwiki-monorepo
|
||||||
|
```
|
||||||
|
|
||||||
```bash
|
2. **Install dependencies**
|
||||||
# Create PostgreSQL database and user
|
```bash
|
||||||
createdb thrillwiki
|
# Install frontend dependencies
|
||||||
createuser wiki
|
pnpm install
|
||||||
|
|
||||||
|
# Install backend dependencies
|
||||||
|
cd backend && uv sync
|
||||||
|
```
|
||||||
|
|
||||||
# Connect to PostgreSQL and setup
|
3. **Start development servers**
|
||||||
psql postgres
|
```bash
|
||||||
```
|
# Start both frontend and backend
|
||||||
|
pnpm run dev
|
||||||
|
|
||||||
|
# Or start individually
|
||||||
|
pnpm run dev:frontend # Vue.js on :3000
|
||||||
|
pnpm run dev:backend # Django on :8000
|
||||||
|
```
|
||||||
|
|
||||||
In the PostgreSQL shell:
|
## 📁 Project Structure
|
||||||
```sql
|
|
||||||
-- Set password for wiki user
|
|
||||||
ALTER USER wiki WITH PASSWORD 'thrillwiki';
|
|
||||||
|
|
||||||
-- Grant privileges
|
### Backend (`/backend`)
|
||||||
GRANT ALL PRIVILEGES ON DATABASE thrillwiki TO wiki;
|
- **Django REST API** with modular app architecture
|
||||||
|
- **UV package management** for Python dependencies
|
||||||
|
- **PostgreSQL** database (configurable)
|
||||||
|
- **Redis** for caching and sessions
|
||||||
|
|
||||||
-- Enable PostGIS extension
|
### Frontend (`/frontend`)
|
||||||
\c thrillwiki
|
- **Vue 3** with Composition API
|
||||||
CREATE EXTENSION postgis;
|
- **TypeScript** for type safety
|
||||||
\q
|
- **Vite** for fast development and building
|
||||||
```
|
- **Tailwind CSS** for styling
|
||||||
|
- **Pinia** for state management
|
||||||
|
|
||||||
### 3. Environment Configuration
|
### Shared (`/shared`)
|
||||||
|
- Documentation and deployment guides
|
||||||
The project uses these database settings (configured in [`thrillwiki/settings.py`](thrillwiki/settings.py)):
|
- Shared TypeScript types
|
||||||
```python
|
- Build and deployment scripts
|
||||||
DATABASES = {
|
- Docker configurations
|
||||||
"default": {
|
|
||||||
"ENGINE": "django.contrib.gis.db.backends.postgis",
|
|
||||||
"NAME": "thrillwiki",
|
|
||||||
"USER": "wiki",
|
|
||||||
"PASSWORD": "thrillwiki",
|
|
||||||
"HOST": "192.168.86.3", # Update to your PostgreSQL host
|
|
||||||
"PORT": "5432",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
**Important**: Update the `HOST` setting in [`thrillwiki/settings.py`](thrillwiki/settings.py) to match your PostgreSQL server location:
|
|
||||||
- Use `"localhost"` or `"127.0.0.1"` for local development
|
|
||||||
- Current setting is `"192.168.86.3"` - update this to your PostgreSQL server IP
|
|
||||||
- For local development, change to `"localhost"` in settings.py
|
|
||||||
|
|
||||||
### 4. Database Migration
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Run database migrations
|
|
||||||
uv run manage.py migrate
|
|
||||||
|
|
||||||
# Create a superuser account
|
|
||||||
uv run manage.py createsuperuser
|
|
||||||
```
|
|
||||||
|
|
||||||
**Note**: If you're setting up for local development, first update the database HOST in [`thrillwiki/settings.py`](thrillwiki/settings.py) from `"192.168.86.3"` to `"localhost"` before running migrations.
|
|
||||||
|
|
||||||
### 5. Start Development Server
|
|
||||||
|
|
||||||
**CRITICAL**: Always use this exact command sequence for starting the development server:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
lsof -ti :8000 | xargs kill -9; find . -type d -name "__pycache__" -exec rm -r {} +; uv run manage.py tailwind runserver
|
|
||||||
```
|
|
||||||
|
|
||||||
This command:
|
|
||||||
- Kills any existing processes on port 8000
|
|
||||||
- Cleans Python cache files
|
|
||||||
- Starts Tailwind CSS compilation
|
|
||||||
- Runs the Django development server
|
|
||||||
|
|
||||||
The application will be available at: http://localhost:8000
|
|
||||||
|
|
||||||
## 🛠️ Development Workflow
|
## 🛠️ Development Workflow
|
||||||
|
|
||||||
### Package Management
|
### Available Scripts
|
||||||
|
|
||||||
**ALWAYS use UV for package management**:
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Add new Python packages
|
# Development
|
||||||
uv add <package-name>
|
pnpm run dev # Start both servers
|
||||||
|
pnpm run dev:frontend # Frontend only
|
||||||
|
pnpm run dev:backend # Backend only
|
||||||
|
|
||||||
# Add development dependencies
|
# Building
|
||||||
uv add --dev <package-name>
|
pnpm run build # Build for production
|
||||||
|
pnpm run build:frontend # Frontend build only
|
||||||
|
|
||||||
# Never use pip install - always use UV
|
# Testing
|
||||||
|
pnpm run test # Run all tests
|
||||||
|
pnpm run test:frontend # Frontend tests
|
||||||
|
pnpm run test:backend # Backend tests
|
||||||
|
|
||||||
|
# Code Quality
|
||||||
|
pnpm run lint # Lint all code
|
||||||
|
pnpm run format # Format all code
|
||||||
```
|
```
|
||||||
|
|
||||||
### Django Management Commands
|
### Backend Commands
|
||||||
|
|
||||||
**ALWAYS use UV for Django commands**:
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Correct way to run Django commands
|
cd backend
|
||||||
uv run manage.py <command>
|
|
||||||
|
|
||||||
# Examples:
|
# Django management
|
||||||
uv run manage.py makemigrations
|
|
||||||
uv run manage.py migrate
|
uv run manage.py migrate
|
||||||
uv run manage.py shell
|
|
||||||
uv run manage.py createsuperuser
|
uv run manage.py createsuperuser
|
||||||
uv run manage.py collectstatic
|
uv run manage.py collectstatic
|
||||||
|
|
||||||
# NEVER use these patterns:
|
# Testing
|
||||||
# python manage.py <command> ❌ Wrong
|
uv run manage.py test
|
||||||
# uv run python manage.py <command> ❌ Wrong
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### CSS Development
|
## 🔧 Configuration
|
||||||
|
|
||||||
The project uses **Tailwind CSS v4** with a custom dark theme. CSS files are located in:
|
### Environment Variables
|
||||||
- Source: [`static/css/src/input.css`](static/css/src/input.css)
|
|
||||||
- Compiled: [`static/css/`](static/css/) (auto-generated)
|
|
||||||
|
|
||||||
Tailwind automatically compiles when using the `tailwind runserver` command.
|
Create `.env` files for local development:
|
||||||
|
|
||||||
#### Tailwind CSS v4 Migration
|
|
||||||
|
|
||||||
This project has been migrated from Tailwind CSS v3 to v4. For complete migration details:
|
|
||||||
|
|
||||||
- **📖 Full Migration Documentation**: [`TAILWIND_V4_MIGRATION.md`](TAILWIND_V4_MIGRATION.md)
|
|
||||||
- **⚡ Quick Reference Guide**: [`TAILWIND_V4_QUICK_REFERENCE.md`](TAILWIND_V4_QUICK_REFERENCE.md)
|
|
||||||
|
|
||||||
**Key v4 Changes**:
|
|
||||||
- New CSS-first approach with `@theme` blocks
|
|
||||||
- Updated utility class names (e.g., `outline-none` → `outline-hidden`)
|
|
||||||
- New opacity syntax (e.g., `bg-blue-500/50` instead of `bg-blue-500 bg-opacity-50`)
|
|
||||||
- Enhanced performance and smaller bundle sizes
|
|
||||||
|
|
||||||
**Custom Theme Variables** (available in CSS):
|
|
||||||
```css
|
|
||||||
var(--color-primary) /* #4f46e5 - Indigo-600 */
|
|
||||||
var(--color-secondary) /* #e11d48 - Rose-600 */
|
|
||||||
var(--color-accent) /* #8b5cf6 - Violet-500 */
|
|
||||||
var(--font-family-sans) /* Poppins, sans-serif */
|
|
||||||
```
|
|
||||||
|
|
||||||
## 🏗️ Project Structure
|
|
||||||
|
|
||||||
```
|
|
||||||
thrillwiki_django_no_react/
|
|
||||||
├── accounts/ # User account management
|
|
||||||
├── analytics/ # Analytics and tracking
|
|
||||||
├── companies/ # Theme park companies
|
|
||||||
├── core/ # Core application logic
|
|
||||||
├── designers/ # Ride designers
|
|
||||||
├── history/ # History timeline features
|
|
||||||
├── location/ # Geographic location handling
|
|
||||||
├── media/ # Media file management
|
|
||||||
├── moderation/ # Content moderation
|
|
||||||
├── parks/ # Theme park management
|
|
||||||
├── reviews/ # User reviews
|
|
||||||
├── rides/ # Roller coaster/ride management
|
|
||||||
├── search/ # Search functionality
|
|
||||||
├── static/ # Static assets (CSS, JS, images)
|
|
||||||
├── templates/ # Django templates
|
|
||||||
├── thrillwiki/ # Main Django project settings
|
|
||||||
├── memory-bank/ # Development documentation
|
|
||||||
└── .clinerules # Project development rules
|
|
||||||
```
|
|
||||||
|
|
||||||
## 🔧 Key Features
|
|
||||||
|
|
||||||
### Authentication System
|
|
||||||
- Django Allauth integration
|
|
||||||
- Google OAuth authentication
|
|
||||||
- Discord OAuth authentication
|
|
||||||
- Custom user profiles with avatars
|
|
||||||
|
|
||||||
### Geographic Features
|
|
||||||
- PostGIS integration for location data
|
|
||||||
- Interactive park maps
|
|
||||||
- Location-based search and filtering
|
|
||||||
|
|
||||||
### Content Management
|
|
||||||
- Park and ride information management
|
|
||||||
- Photo galleries with upload capabilities
|
|
||||||
- User-generated reviews and ratings
|
|
||||||
- Content moderation system
|
|
||||||
|
|
||||||
### Modern Frontend
|
|
||||||
- HTMX for dynamic interactions
|
|
||||||
- Alpine.js for client-side behavior
|
|
||||||
- Tailwind CSS with custom dark theme
|
|
||||||
- Responsive design (mobile-first)
|
|
||||||
|
|
||||||
## 🧪 Testing
|
|
||||||
|
|
||||||
### Running Tests
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Run Python tests
|
# Root .env (shared settings)
|
||||||
uv run pytest
|
DATABASE_URL=postgresql://user:pass@localhost/thrillwiki
|
||||||
|
REDIS_URL=redis://localhost:6379
|
||||||
|
SECRET_KEY=your-secret-key
|
||||||
|
|
||||||
# Run with coverage
|
# Backend .env
|
||||||
uv run coverage run -m pytest
|
DJANGO_SETTINGS_MODULE=config.django.local
|
||||||
uv run coverage report
|
DEBUG=True
|
||||||
|
|
||||||
# Run E2E tests with Playwright
|
# Frontend .env
|
||||||
uv run pytest tests/e2e/
|
VITE_API_BASE_URL=http://localhost:8000/api
|
||||||
```
|
```
|
||||||
|
|
||||||
### Test Structure
|
## 📖 Documentation
|
||||||
- Unit tests: Located within each app's `tests/` directory
|
|
||||||
- E2E tests: [`tests/e2e/`](tests/e2e/)
|
|
||||||
- Test fixtures: [`tests/fixtures/`](tests/fixtures/)
|
|
||||||
|
|
||||||
## 📚 Documentation
|
- [Backend Documentation](./backend/README.md)
|
||||||
|
- [Frontend Documentation](./frontend/README.md)
|
||||||
|
- [Deployment Guide](./shared/docs/deployment/)
|
||||||
|
- [API Documentation](./shared/docs/api/)
|
||||||
|
|
||||||
### Memory Bank System
|
## 🚀 Deployment
|
||||||
The project uses a comprehensive documentation system in [`memory-bank/`](memory-bank/):
|
|
||||||
|
|
||||||
- [`memory-bank/activeContext.md`](memory-bank/activeContext.md) - Current development context
|
See [Deployment Guide](./shared/docs/deployment/) for production setup instructions.
|
||||||
- [`memory-bank/documentation/design-system.md`](memory-bank/documentation/design-system.md) - Design system documentation
|
|
||||||
- [`memory-bank/features/`](memory-bank/features/) - Feature-specific documentation
|
|
||||||
- [`memory-bank/testing/`](memory-bank/testing/) - Testing documentation and results
|
|
||||||
|
|
||||||
### Key Documentation Files
|
## 🤝 Contributing
|
||||||
- [Design System](memory-bank/documentation/design-system.md) - UI/UX guidelines and patterns
|
|
||||||
- [Authentication System](memory-bank/features/auth/) - OAuth and user management
|
|
||||||
- [Layout Optimization](memory-bank/projects/) - Responsive design implementations
|
|
||||||
|
|
||||||
## 🚨 Important Development Rules
|
1. Fork the repository
|
||||||
|
2. Create a feature branch
|
||||||
|
3. Make your changes
|
||||||
|
4. Run tests and linting
|
||||||
|
5. Submit a pull request
|
||||||
|
|
||||||
### Critical Commands
|
## 📄 License
|
||||||
1. **Server Startup**: Always use the full command sequence:
|
|
||||||
```bash
|
|
||||||
lsof -ti :8000 | xargs kill -9; find . -type d -name "__pycache__" -exec rm -r {} +; uv run manage.py tailwind runserver
|
|
||||||
```
|
|
||||||
|
|
||||||
2. **Package Management**: Only use UV:
|
This project is licensed under the MIT License.
|
||||||
```bash
|
|
||||||
uv add <package> # ✅ Correct
|
|
||||||
pip install <package> # ❌ Wrong
|
|
||||||
```
|
|
||||||
|
|
||||||
3. **Django Commands**: Always prefix with `uv run`:
|
|
||||||
```bash
|
|
||||||
uv run manage.py <command> # ✅ Correct
|
|
||||||
python manage.py <command> # ❌ Wrong
|
|
||||||
```
|
|
||||||
|
|
||||||
### Database Configuration
|
|
||||||
- Ensure PostgreSQL is running before starting development
|
|
||||||
- PostGIS extension must be enabled
|
|
||||||
- Update database host settings for your environment
|
|
||||||
|
|
||||||
### GeoDjango Requirements
|
|
||||||
- GDAL and GEOS libraries must be properly installed
|
|
||||||
- Library paths are configured in [`thrillwiki/settings.py`](thrillwiki/settings.py) for macOS Homebrew
|
|
||||||
- Current paths: `/opt/homebrew/lib/libgdal.dylib` and `/opt/homebrew/lib/libgeos_c.dylib`
|
|
||||||
- May need adjustment based on your system's library locations (Linux users will need different paths)
|
|
||||||
|
|
||||||
## 🔍 Troubleshooting
|
|
||||||
|
|
||||||
### Common Issues
|
|
||||||
|
|
||||||
1. **PostGIS Extension Error**
|
|
||||||
```bash
|
|
||||||
# Connect to database and enable PostGIS
|
|
||||||
psql thrillwiki
|
|
||||||
CREATE EXTENSION postgis;
|
|
||||||
```
|
|
||||||
|
|
||||||
2. **GDAL/GEOS Library Not Found**
|
|
||||||
```bash
|
|
||||||
# macOS (Homebrew): Current paths in settings.py
|
|
||||||
GDAL_LIBRARY_PATH = "/opt/homebrew/lib/libgdal.dylib"
|
|
||||||
GEOS_LIBRARY_PATH = "/opt/homebrew/lib/libgeos_c.dylib"
|
|
||||||
|
|
||||||
# Linux: Update paths in settings.py to something like:
|
|
||||||
# GDAL_LIBRARY_PATH = "/usr/lib/x86_64-linux-gnu/libgdal.so"
|
|
||||||
# GEOS_LIBRARY_PATH = "/usr/lib/x86_64-linux-gnu/libgeos_c.so"
|
|
||||||
|
|
||||||
# Find your library locations
|
|
||||||
find /usr -name "libgdal*" 2>/dev/null
|
|
||||||
find /usr -name "libgeos*" 2>/dev/null
|
|
||||||
find /opt -name "libgdal*" 2>/dev/null
|
|
||||||
find /opt -name "libgeos*" 2>/dev/null
|
|
||||||
```
|
|
||||||
|
|
||||||
3. **Port 8000 Already in Use**
|
|
||||||
```bash
|
|
||||||
# Kill existing processes
|
|
||||||
lsof -ti :8000 | xargs kill -9
|
|
||||||
```
|
|
||||||
|
|
||||||
4. **Tailwind CSS Not Compiling**
|
|
||||||
```bash
|
|
||||||
# Ensure Node.js is installed and use the full server command
|
|
||||||
node --version
|
|
||||||
uv run manage.py tailwind runserver
|
|
||||||
```
|
|
||||||
|
|
||||||
### Getting Help
|
|
||||||
|
|
||||||
1. Check the [`memory-bank/`](memory-bank/) documentation for detailed feature information
|
|
||||||
2. Review [`memory-bank/testing/`](memory-bank/testing/) for known issues and solutions
|
|
||||||
3. Ensure all prerequisites are properly installed
|
|
||||||
4. Verify database connection and PostGIS extension
|
|
||||||
|
|
||||||
## 🎯 Next Steps
|
|
||||||
|
|
||||||
After successful setup:
|
|
||||||
|
|
||||||
1. **Explore the Admin Interface**: http://localhost:8000/admin/
|
|
||||||
2. **Browse the Application**: http://localhost:8000/
|
|
||||||
3. **Review Documentation**: Check [`memory-bank/`](memory-bank/) for detailed feature docs
|
|
||||||
4. **Run Tests**: Ensure everything works with `uv run pytest`
|
|
||||||
5. **Start Development**: Follow the development workflow guidelines above
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**Happy Coding!** 🎢✨
|
|
||||||
|
|
||||||
For detailed feature documentation and development context, see the [`memory-bank/`](memory-bank/) directory.
|
|
||||||
|
|||||||
@@ -1,64 +0,0 @@
|
|||||||
from django.conf import settings
|
|
||||||
from allauth.account.adapter import DefaultAccountAdapter
|
|
||||||
from allauth.socialaccount.adapter import DefaultSocialAccountAdapter
|
|
||||||
from django.contrib.auth import get_user_model
|
|
||||||
from django.contrib.sites.shortcuts import get_current_site
|
|
||||||
|
|
||||||
User = get_user_model()
|
|
||||||
|
|
||||||
|
|
||||||
class CustomAccountAdapter(DefaultAccountAdapter):
|
|
||||||
def is_open_for_signup(self, request):
|
|
||||||
"""
|
|
||||||
Whether to allow sign ups.
|
|
||||||
"""
|
|
||||||
return True
|
|
||||||
|
|
||||||
def get_email_confirmation_url(self, request, emailconfirmation):
|
|
||||||
"""
|
|
||||||
Constructs the email confirmation (activation) url.
|
|
||||||
"""
|
|
||||||
get_current_site(request)
|
|
||||||
return f"{settings.LOGIN_REDIRECT_URL}verify-email?key={emailconfirmation.key}"
|
|
||||||
|
|
||||||
def send_confirmation_mail(self, request, emailconfirmation, signup):
|
|
||||||
"""
|
|
||||||
Sends the confirmation email.
|
|
||||||
"""
|
|
||||||
current_site = get_current_site(request)
|
|
||||||
activate_url = self.get_email_confirmation_url(request, emailconfirmation)
|
|
||||||
ctx = {
|
|
||||||
"user": emailconfirmation.email_address.user,
|
|
||||||
"activate_url": activate_url,
|
|
||||||
"current_site": current_site,
|
|
||||||
"key": emailconfirmation.key,
|
|
||||||
}
|
|
||||||
if signup:
|
|
||||||
email_template = "account/email/email_confirmation_signup"
|
|
||||||
else:
|
|
||||||
email_template = "account/email/email_confirmation"
|
|
||||||
self.send_mail(email_template, emailconfirmation.email_address.email, ctx)
|
|
||||||
|
|
||||||
|
|
||||||
class CustomSocialAccountAdapter(DefaultSocialAccountAdapter):
|
|
||||||
def is_open_for_signup(self, request, sociallogin):
|
|
||||||
"""
|
|
||||||
Whether to allow social account sign ups.
|
|
||||||
"""
|
|
||||||
return True
|
|
||||||
|
|
||||||
def populate_user(self, request, sociallogin, data):
|
|
||||||
"""
|
|
||||||
Hook that can be used to further populate the user instance.
|
|
||||||
"""
|
|
||||||
user = super().populate_user(request, sociallogin, data)
|
|
||||||
if sociallogin.account.provider == "discord":
|
|
||||||
user.discord_id = sociallogin.account.uid
|
|
||||||
return user
|
|
||||||
|
|
||||||
def save_user(self, request, sociallogin, form=None):
|
|
||||||
"""
|
|
||||||
Save the newly signed up social login.
|
|
||||||
"""
|
|
||||||
user = super().save_user(request, sociallogin, form)
|
|
||||||
return user
|
|
||||||
@@ -1,282 +0,0 @@
|
|||||||
from django.contrib import admin
|
|
||||||
from django.contrib.auth.admin import UserAdmin
|
|
||||||
from django.utils.html import format_html
|
|
||||||
from django.contrib.auth.models import Group
|
|
||||||
from .models import User, UserProfile, EmailVerification, TopList, TopListItem
|
|
||||||
|
|
||||||
|
|
||||||
class UserProfileInline(admin.StackedInline):
|
|
||||||
model = UserProfile
|
|
||||||
can_delete = False
|
|
||||||
verbose_name_plural = "Profile"
|
|
||||||
fieldsets = (
|
|
||||||
(
|
|
||||||
"Personal Info",
|
|
||||||
{"fields": ("display_name", "avatar", "pronouns", "bio")},
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"Social Media",
|
|
||||||
{"fields": ("twitter", "instagram", "youtube", "discord")},
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"Ride Credits",
|
|
||||||
{
|
|
||||||
"fields": (
|
|
||||||
"coaster_credits",
|
|
||||||
"dark_ride_credits",
|
|
||||||
"flat_ride_credits",
|
|
||||||
"water_ride_credits",
|
|
||||||
)
|
|
||||||
},
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class TopListItemInline(admin.TabularInline):
|
|
||||||
model = TopListItem
|
|
||||||
extra = 1
|
|
||||||
fields = ("content_type", "object_id", "rank", "notes")
|
|
||||||
ordering = ("rank",)
|
|
||||||
|
|
||||||
|
|
||||||
@admin.register(User)
|
|
||||||
class CustomUserAdmin(UserAdmin):
|
|
||||||
list_display = (
|
|
||||||
"username",
|
|
||||||
"email",
|
|
||||||
"get_avatar",
|
|
||||||
"get_status",
|
|
||||||
"role",
|
|
||||||
"date_joined",
|
|
||||||
"last_login",
|
|
||||||
"get_credits",
|
|
||||||
)
|
|
||||||
list_filter = (
|
|
||||||
"is_active",
|
|
||||||
"is_staff",
|
|
||||||
"role",
|
|
||||||
"is_banned",
|
|
||||||
"groups",
|
|
||||||
"date_joined",
|
|
||||||
)
|
|
||||||
search_fields = ("username", "email")
|
|
||||||
ordering = ("-date_joined",)
|
|
||||||
actions = [
|
|
||||||
"activate_users",
|
|
||||||
"deactivate_users",
|
|
||||||
"ban_users",
|
|
||||||
"unban_users",
|
|
||||||
]
|
|
||||||
inlines = [UserProfileInline]
|
|
||||||
|
|
||||||
fieldsets = (
|
|
||||||
(None, {"fields": ("username", "password")}),
|
|
||||||
("Personal info", {"fields": ("email", "pending_email")}),
|
|
||||||
(
|
|
||||||
"Roles and Permissions",
|
|
||||||
{
|
|
||||||
"fields": ("role", "groups", "user_permissions"),
|
|
||||||
"description": (
|
|
||||||
"Role determines group membership. Groups determine permissions."
|
|
||||||
),
|
|
||||||
},
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"Status",
|
|
||||||
{
|
|
||||||
"fields": ("is_active", "is_staff", "is_superuser"),
|
|
||||||
"description": "These are automatically managed based on role.",
|
|
||||||
},
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"Ban Status",
|
|
||||||
{
|
|
||||||
"fields": ("is_banned", "ban_reason", "ban_date"),
|
|
||||||
},
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"Preferences",
|
|
||||||
{
|
|
||||||
"fields": ("theme_preference",),
|
|
||||||
},
|
|
||||||
),
|
|
||||||
("Important dates", {"fields": ("last_login", "date_joined")}),
|
|
||||||
)
|
|
||||||
add_fieldsets = (
|
|
||||||
(
|
|
||||||
None,
|
|
||||||
{
|
|
||||||
"classes": ("wide",),
|
|
||||||
"fields": (
|
|
||||||
"username",
|
|
||||||
"email",
|
|
||||||
"password1",
|
|
||||||
"password2",
|
|
||||||
"role",
|
|
||||||
),
|
|
||||||
},
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
@admin.display(description="Avatar")
|
|
||||||
def get_avatar(self, obj):
|
|
||||||
if obj.profile.avatar:
|
|
||||||
return format_html(
|
|
||||||
'<img src="{}" width="30" height="30" style="border-radius:50%;" />',
|
|
||||||
obj.profile.avatar.url,
|
|
||||||
)
|
|
||||||
return format_html(
|
|
||||||
'<div style="width:30px; height:30px; border-radius:50%; '
|
|
||||||
"background-color:#007bff; color:white; display:flex; "
|
|
||||||
'align-items:center; justify-content:center;">{}</div>',
|
|
||||||
obj.username[0].upper(),
|
|
||||||
)
|
|
||||||
|
|
||||||
@admin.display(description="Status")
|
|
||||||
def get_status(self, obj):
|
|
||||||
if obj.is_banned:
|
|
||||||
return format_html('<span style="color: red;">Banned</span>')
|
|
||||||
if not obj.is_active:
|
|
||||||
return format_html('<span style="color: orange;">Inactive</span>')
|
|
||||||
if obj.is_superuser:
|
|
||||||
return format_html('<span style="color: purple;">Superuser</span>')
|
|
||||||
if obj.is_staff:
|
|
||||||
return format_html('<span style="color: blue;">Staff</span>')
|
|
||||||
return format_html('<span style="color: green;">Active</span>')
|
|
||||||
|
|
||||||
@admin.display(description="Ride Credits")
|
|
||||||
def get_credits(self, obj):
|
|
||||||
try:
|
|
||||||
profile = obj.profile
|
|
||||||
return format_html(
|
|
||||||
"RC: {}<br>DR: {}<br>FR: {}<br>WR: {}",
|
|
||||||
profile.coaster_credits,
|
|
||||||
profile.dark_ride_credits,
|
|
||||||
profile.flat_ride_credits,
|
|
||||||
profile.water_ride_credits,
|
|
||||||
)
|
|
||||||
except UserProfile.DoesNotExist:
|
|
||||||
return "-"
|
|
||||||
|
|
||||||
@admin.action(description="Activate selected users")
|
|
||||||
def activate_users(self, request, queryset):
|
|
||||||
queryset.update(is_active=True)
|
|
||||||
|
|
||||||
@admin.action(description="Deactivate selected users")
|
|
||||||
def deactivate_users(self, request, queryset):
|
|
||||||
queryset.update(is_active=False)
|
|
||||||
|
|
||||||
@admin.action(description="Ban selected users")
|
|
||||||
def ban_users(self, request, queryset):
|
|
||||||
from django.utils import timezone
|
|
||||||
|
|
||||||
queryset.update(is_banned=True, ban_date=timezone.now())
|
|
||||||
|
|
||||||
@admin.action(description="Unban selected users")
|
|
||||||
def unban_users(self, request, queryset):
|
|
||||||
queryset.update(is_banned=False, ban_date=None, ban_reason="")
|
|
||||||
|
|
||||||
def save_model(self, request, obj, form, change):
|
|
||||||
creating = not obj.pk
|
|
||||||
super().save_model(request, obj, form, change)
|
|
||||||
if creating and obj.role != User.Roles.USER:
|
|
||||||
# Ensure new user with role gets added to appropriate group
|
|
||||||
group = Group.objects.filter(name=obj.role).first()
|
|
||||||
if group:
|
|
||||||
obj.groups.add(group)
|
|
||||||
|
|
||||||
|
|
||||||
@admin.register(UserProfile)
|
|
||||||
class UserProfileAdmin(admin.ModelAdmin):
|
|
||||||
list_display = (
|
|
||||||
"user",
|
|
||||||
"display_name",
|
|
||||||
"coaster_credits",
|
|
||||||
"dark_ride_credits",
|
|
||||||
"flat_ride_credits",
|
|
||||||
"water_ride_credits",
|
|
||||||
)
|
|
||||||
list_filter = (
|
|
||||||
"coaster_credits",
|
|
||||||
"dark_ride_credits",
|
|
||||||
"flat_ride_credits",
|
|
||||||
"water_ride_credits",
|
|
||||||
)
|
|
||||||
search_fields = ("user__username", "user__email", "display_name", "bio")
|
|
||||||
|
|
||||||
fieldsets = (
|
|
||||||
(
|
|
||||||
"User Information",
|
|
||||||
{"fields": ("user", "display_name", "avatar", "pronouns", "bio")},
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"Social Media",
|
|
||||||
{"fields": ("twitter", "instagram", "youtube", "discord")},
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"Ride Credits",
|
|
||||||
{
|
|
||||||
"fields": (
|
|
||||||
"coaster_credits",
|
|
||||||
"dark_ride_credits",
|
|
||||||
"flat_ride_credits",
|
|
||||||
"water_ride_credits",
|
|
||||||
)
|
|
||||||
},
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@admin.register(EmailVerification)
|
|
||||||
class EmailVerificationAdmin(admin.ModelAdmin):
|
|
||||||
list_display = ("user", "created_at", "last_sent", "is_expired")
|
|
||||||
list_filter = ("created_at", "last_sent")
|
|
||||||
search_fields = ("user__username", "user__email", "token")
|
|
||||||
readonly_fields = ("created_at", "last_sent")
|
|
||||||
|
|
||||||
fieldsets = (
|
|
||||||
("Verification Details", {"fields": ("user", "token")}),
|
|
||||||
("Timing", {"fields": ("created_at", "last_sent")}),
|
|
||||||
)
|
|
||||||
|
|
||||||
@admin.display(description="Status")
|
|
||||||
def is_expired(self, obj):
|
|
||||||
from django.utils import timezone
|
|
||||||
from datetime import timedelta
|
|
||||||
|
|
||||||
if timezone.now() - obj.last_sent > timedelta(days=1):
|
|
||||||
return format_html('<span style="color: red;">Expired</span>')
|
|
||||||
return format_html('<span style="color: green;">Valid</span>')
|
|
||||||
|
|
||||||
|
|
||||||
@admin.register(TopList)
|
|
||||||
class TopListAdmin(admin.ModelAdmin):
|
|
||||||
list_display = ("title", "user", "category", "created_at", "updated_at")
|
|
||||||
list_filter = ("category", "created_at", "updated_at")
|
|
||||||
search_fields = ("title", "user__username", "description")
|
|
||||||
inlines = [TopListItemInline]
|
|
||||||
|
|
||||||
fieldsets = (
|
|
||||||
(
|
|
||||||
"Basic Information",
|
|
||||||
{"fields": ("user", "title", "category", "description")},
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"Timestamps",
|
|
||||||
{"fields": ("created_at", "updated_at"), "classes": ("collapse",)},
|
|
||||||
),
|
|
||||||
)
|
|
||||||
readonly_fields = ("created_at", "updated_at")
|
|
||||||
|
|
||||||
|
|
||||||
@admin.register(TopListItem)
|
|
||||||
class TopListItemAdmin(admin.ModelAdmin):
|
|
||||||
list_display = ("top_list", "content_type", "object_id", "rank")
|
|
||||||
list_filter = ("top_list__category", "rank")
|
|
||||||
search_fields = ("top_list__title", "notes")
|
|
||||||
ordering = ("top_list", "rank")
|
|
||||||
|
|
||||||
fieldsets = (
|
|
||||||
("List Information", {"fields": ("top_list", "rank")}),
|
|
||||||
("Item Details", {"fields": ("content_type", "object_id", "notes")}),
|
|
||||||
)
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
from django.apps import AppConfig
|
|
||||||
|
|
||||||
|
|
||||||
class AccountsConfig(AppConfig):
|
|
||||||
default_auto_field = "django.db.models.BigAutoField"
|
|
||||||
name = "accounts"
|
|
||||||
|
|
||||||
def ready(self):
|
|
||||||
import accounts.signals # noqa
|
|
||||||
@@ -1,46 +0,0 @@
|
|||||||
from django.core.management.base import BaseCommand
|
|
||||||
from allauth.socialaccount.models import SocialApp, SocialAccount, SocialToken
|
|
||||||
from django.contrib.sites.models import Site
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
|
||||||
help = "Check all social auth related tables"
|
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
|
||||||
# Check SocialApp
|
|
||||||
self.stdout.write("\nChecking SocialApp table:")
|
|
||||||
for app in SocialApp.objects.all():
|
|
||||||
self.stdout.write(
|
|
||||||
f"ID: {
|
|
||||||
app.pk}, Provider: {
|
|
||||||
app.provider}, Name: {
|
|
||||||
app.name}, Client ID: {
|
|
||||||
app.client_id}"
|
|
||||||
)
|
|
||||||
self.stdout.write("Sites:")
|
|
||||||
for site in app.sites.all():
|
|
||||||
self.stdout.write(f" - {site.domain}")
|
|
||||||
|
|
||||||
# Check SocialAccount
|
|
||||||
self.stdout.write("\nChecking SocialAccount table:")
|
|
||||||
for account in SocialAccount.objects.all():
|
|
||||||
self.stdout.write(
|
|
||||||
f"ID: {
|
|
||||||
account.pk}, Provider: {
|
|
||||||
account.provider}, UID: {
|
|
||||||
account.uid}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check SocialToken
|
|
||||||
self.stdout.write("\nChecking SocialToken table:")
|
|
||||||
for token in SocialToken.objects.all():
|
|
||||||
self.stdout.write(
|
|
||||||
f"ID: {token.pk}, Account: {token.account}, App: {token.app}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check Site
|
|
||||||
self.stdout.write("\nChecking Site table:")
|
|
||||||
for site in Site.objects.all():
|
|
||||||
self.stdout.write(
|
|
||||||
f"ID: {site.pk}, Domain: {site.domain}, Name: {site.name}"
|
|
||||||
)
|
|
||||||
@@ -1,27 +0,0 @@
|
|||||||
from django.core.management.base import BaseCommand
|
|
||||||
from allauth.socialaccount.models import SocialApp
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
|
||||||
help = "Check social app configurations"
|
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
|
||||||
social_apps = SocialApp.objects.all()
|
|
||||||
|
|
||||||
if not social_apps:
|
|
||||||
self.stdout.write(self.style.ERROR("No social apps found"))
|
|
||||||
return
|
|
||||||
|
|
||||||
for app in social_apps:
|
|
||||||
self.stdout.write(
|
|
||||||
self.style.SUCCESS(
|
|
||||||
f"\nProvider: {
|
|
||||||
app.provider}"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
self.stdout.write(f"Name: {app.name}")
|
|
||||||
self.stdout.write(f"Client ID: {app.client_id}")
|
|
||||||
self.stdout.write(f"Secret: {app.secret}")
|
|
||||||
self.stdout.write(
|
|
||||||
f'Sites: {", ".join(str(site.domain) for site in app.sites.all())}'
|
|
||||||
)
|
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
from django.core.management.base import BaseCommand
|
|
||||||
from django.db import connection
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
|
||||||
help = "Clean up social auth tables and migrations"
|
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
|
||||||
with connection.cursor() as cursor:
|
|
||||||
# Drop social auth tables
|
|
||||||
cursor.execute("DROP TABLE IF EXISTS socialaccount_socialapp")
|
|
||||||
cursor.execute("DROP TABLE IF EXISTS socialaccount_socialapp_sites")
|
|
||||||
cursor.execute("DROP TABLE IF EXISTS socialaccount_socialaccount")
|
|
||||||
cursor.execute("DROP TABLE IF EXISTS socialaccount_socialtoken")
|
|
||||||
|
|
||||||
# Remove migration records
|
|
||||||
cursor.execute("DELETE FROM django_migrations WHERE app='socialaccount'")
|
|
||||||
cursor.execute(
|
|
||||||
"DELETE FROM django_migrations WHERE app='accounts' "
|
|
||||||
"AND name LIKE '%social%'"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Reset sequences
|
|
||||||
cursor.execute("DELETE FROM sqlite_sequence WHERE name LIKE '%social%'")
|
|
||||||
|
|
||||||
self.stdout.write(
|
|
||||||
self.style.SUCCESS("Successfully cleaned up social auth configuration")
|
|
||||||
)
|
|
||||||
@@ -1,67 +0,0 @@
|
|||||||
from django.core.management.base import BaseCommand
|
|
||||||
from django.contrib.auth import get_user_model
|
|
||||||
from parks.models import ParkReview, Park
|
|
||||||
from rides.models import Ride
|
|
||||||
from media.models import Photo
|
|
||||||
|
|
||||||
User = get_user_model()
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
|
||||||
help = "Cleans up test users and data created during e2e testing"
|
|
||||||
|
|
||||||
def handle(self, *args, **kwargs):
|
|
||||||
# Delete test users
|
|
||||||
test_users = User.objects.filter(username__in=["testuser", "moderator"])
|
|
||||||
count = test_users.count()
|
|
||||||
test_users.delete()
|
|
||||||
self.stdout.write(self.style.SUCCESS(f"Deleted {count} test users"))
|
|
||||||
|
|
||||||
# Delete test reviews
|
|
||||||
reviews = ParkReview.objects.filter(
|
|
||||||
user__username__in=["testuser", "moderator"]
|
|
||||||
)
|
|
||||||
count = reviews.count()
|
|
||||||
reviews.delete()
|
|
||||||
self.stdout.write(self.style.SUCCESS(f"Deleted {count} test reviews"))
|
|
||||||
|
|
||||||
# Delete test photos
|
|
||||||
photos = Photo.objects.filter(uploader__username__in=["testuser", "moderator"])
|
|
||||||
count = photos.count()
|
|
||||||
photos.delete()
|
|
||||||
self.stdout.write(self.style.SUCCESS(f"Deleted {count} test photos"))
|
|
||||||
|
|
||||||
# Delete test parks
|
|
||||||
parks = Park.objects.filter(name__startswith="Test Park")
|
|
||||||
count = parks.count()
|
|
||||||
parks.delete()
|
|
||||||
self.stdout.write(self.style.SUCCESS(f"Deleted {count} test parks"))
|
|
||||||
|
|
||||||
# Delete test rides
|
|
||||||
rides = Ride.objects.filter(name__startswith="Test Ride")
|
|
||||||
count = rides.count()
|
|
||||||
rides.delete()
|
|
||||||
self.stdout.write(self.style.SUCCESS(f"Deleted {count} test rides"))
|
|
||||||
|
|
||||||
# Clean up test files
|
|
||||||
import os
|
|
||||||
import glob
|
|
||||||
|
|
||||||
# Clean up test uploads
|
|
||||||
media_patterns = [
|
|
||||||
"media/uploads/test_*",
|
|
||||||
"media/avatars/test_*",
|
|
||||||
"media/park/test_*",
|
|
||||||
"media/rides/test_*",
|
|
||||||
]
|
|
||||||
|
|
||||||
for pattern in media_patterns:
|
|
||||||
files = glob.glob(pattern)
|
|
||||||
for f in files:
|
|
||||||
try:
|
|
||||||
os.remove(f)
|
|
||||||
self.stdout.write(self.style.SUCCESS(f"Deleted {f}"))
|
|
||||||
except OSError as e:
|
|
||||||
self.stdout.write(self.style.WARNING(f"Error deleting {f}: {e}"))
|
|
||||||
|
|
||||||
self.stdout.write(self.style.SUCCESS("Test data cleanup complete"))
|
|
||||||
@@ -1,55 +0,0 @@
|
|||||||
from django.core.management.base import BaseCommand
|
|
||||||
from django.contrib.sites.models import Site
|
|
||||||
from allauth.socialaccount.models import SocialApp
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
|
||||||
help = "Create social apps for authentication"
|
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
|
||||||
# Get the default site
|
|
||||||
site = Site.objects.get_or_create(
|
|
||||||
id=1,
|
|
||||||
defaults={
|
|
||||||
"domain": "localhost:8000",
|
|
||||||
"name": "ThrillWiki Development",
|
|
||||||
},
|
|
||||||
)[0]
|
|
||||||
|
|
||||||
# Create Discord app
|
|
||||||
discord_app, created = SocialApp.objects.get_or_create(
|
|
||||||
provider="discord",
|
|
||||||
defaults={
|
|
||||||
"name": "Discord",
|
|
||||||
"client_id": "1299112802274902047",
|
|
||||||
"secret": "ece7Pe_M4mD4mYzAgcINjTEKL_3ftL11",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
if not created:
|
|
||||||
discord_app.client_id = "1299112802274902047"
|
|
||||||
discord_app.secret = "ece7Pe_M4mD4mYzAgcINjTEKL_3ftL11"
|
|
||||||
discord_app.save()
|
|
||||||
discord_app.sites.add(site)
|
|
||||||
self.stdout.write(f'{"Created" if created else "Updated"} Discord app')
|
|
||||||
|
|
||||||
# Create Google app
|
|
||||||
google_app, created = SocialApp.objects.get_or_create(
|
|
||||||
provider="google",
|
|
||||||
defaults={
|
|
||||||
"name": "Google",
|
|
||||||
"client_id": (
|
|
||||||
"135166769591-nopcgmo0fkqfqfs9qe783a137mtmcrt2."
|
|
||||||
"apps.googleusercontent.com"
|
|
||||||
),
|
|
||||||
"secret": "GOCSPX-Wd_0Ue0Ue0Ue0Ue0Ue0Ue0Ue0Ue",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
if not created:
|
|
||||||
google_app.client_id = (
|
|
||||||
"135166769591-nopcgmo0fkqfqfs9qe783a137mtmcrt2."
|
|
||||||
"apps.googleusercontent.com"
|
|
||||||
)
|
|
||||||
google_app.secret = "GOCSPX-Wd_0Ue0Ue0Ue0Ue0Ue0Ue0Ue0Ue"
|
|
||||||
google_app.save()
|
|
||||||
google_app.sites.add(site)
|
|
||||||
self.stdout.write(f'{"Created" if created else "Updated"} Google app')
|
|
||||||
@@ -1,58 +0,0 @@
|
|||||||
from django.core.management.base import BaseCommand
|
|
||||||
from django.contrib.auth.models import Group, Permission, User
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
|
||||||
help = "Creates test users for e2e testing"
|
|
||||||
|
|
||||||
def handle(self, *args, **kwargs):
|
|
||||||
# Create regular test user
|
|
||||||
if not User.objects.filter(username="testuser").exists():
|
|
||||||
user = User.objects.create(
|
|
||||||
username="testuser",
|
|
||||||
email="testuser@example.com",
|
|
||||||
)
|
|
||||||
user.set_password("testpass123")
|
|
||||||
user.save()
|
|
||||||
self.stdout.write(
|
|
||||||
self.style.SUCCESS(f"Created test user: {user.get_username()}")
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
self.stdout.write(self.style.WARNING("Test user already exists"))
|
|
||||||
|
|
||||||
if not User.objects.filter(username="moderator").exists():
|
|
||||||
moderator = User.objects.create(
|
|
||||||
username="moderator",
|
|
||||||
email="moderator@example.com",
|
|
||||||
)
|
|
||||||
moderator.set_password("modpass123")
|
|
||||||
moderator.save()
|
|
||||||
|
|
||||||
# Create moderator group if it doesn't exist
|
|
||||||
moderator_group, created = Group.objects.get_or_create(name="Moderators")
|
|
||||||
|
|
||||||
# Add relevant permissions
|
|
||||||
permissions = Permission.objects.filter(
|
|
||||||
codename__in=[
|
|
||||||
"change_review",
|
|
||||||
"delete_review",
|
|
||||||
"change_park",
|
|
||||||
"change_ride",
|
|
||||||
"moderate_photos",
|
|
||||||
"moderate_comments",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
moderator_group.permissions.add(*permissions)
|
|
||||||
|
|
||||||
# Add user to moderator group
|
|
||||||
moderator.groups.add(moderator_group)
|
|
||||||
|
|
||||||
self.stdout.write(
|
|
||||||
self.style.SUCCESS(
|
|
||||||
f"Created moderator user: {moderator.get_username()}"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
self.stdout.write(self.style.WARNING("Moderator user already exists"))
|
|
||||||
|
|
||||||
self.stdout.write(self.style.SUCCESS("Test users setup complete"))
|
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
from django.core.management.base import BaseCommand
|
|
||||||
from django.db import connection
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
|
||||||
help = "Fix migration history by removing rides.0001_initial"
|
|
||||||
|
|
||||||
def handle(self, *args, **kwargs):
|
|
||||||
with connection.cursor() as cursor:
|
|
||||||
cursor.execute(
|
|
||||||
"DELETE FROM django_migrations WHERE app='rides' "
|
|
||||||
"AND name='0001_initial';"
|
|
||||||
)
|
|
||||||
self.stdout.write(
|
|
||||||
self.style.SUCCESS(
|
|
||||||
"Successfully removed rides.0001_initial from migration history"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
@@ -1,41 +0,0 @@
|
|||||||
from django.core.management.base import BaseCommand
|
|
||||||
from allauth.socialaccount.models import SocialApp
|
|
||||||
from django.contrib.sites.models import Site
|
|
||||||
import os
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
|
||||||
help = "Fix social app configurations"
|
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
|
||||||
# Delete all existing social apps
|
|
||||||
SocialApp.objects.all().delete()
|
|
||||||
self.stdout.write("Deleted all existing social apps")
|
|
||||||
|
|
||||||
# Get the default site
|
|
||||||
site = Site.objects.get(id=1)
|
|
||||||
|
|
||||||
# Create Google provider
|
|
||||||
google_app = SocialApp.objects.create(
|
|
||||||
provider="google",
|
|
||||||
name="Google",
|
|
||||||
client_id=os.getenv("GOOGLE_CLIENT_ID"),
|
|
||||||
secret=os.getenv("GOOGLE_CLIENT_SECRET"),
|
|
||||||
)
|
|
||||||
google_app.sites.add(site)
|
|
||||||
self.stdout.write(
|
|
||||||
f"Created Google app with client_id: {
|
|
||||||
google_app.client_id}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create Discord provider
|
|
||||||
discord_app = SocialApp.objects.create(
|
|
||||||
provider="discord",
|
|
||||||
name="Discord",
|
|
||||||
client_id=os.getenv("DISCORD_CLIENT_ID"),
|
|
||||||
secret=os.getenv("DISCORD_CLIENT_SECRET"),
|
|
||||||
)
|
|
||||||
discord_app.sites.add(site)
|
|
||||||
self.stdout.write(
|
|
||||||
f"Created Discord app with client_id: {discord_app.client_id}"
|
|
||||||
)
|
|
||||||
@@ -1,54 +0,0 @@
|
|||||||
from django.core.management.base import BaseCommand
|
|
||||||
from PIL import Image, ImageDraw, ImageFont
|
|
||||||
import os
|
|
||||||
|
|
||||||
|
|
||||||
def generate_avatar(letter):
|
|
||||||
"""Generate an avatar for a given letter or number"""
|
|
||||||
avatar_size = (100, 100)
|
|
||||||
background_color = (0, 123, 255) # Blue background
|
|
||||||
text_color = (255, 255, 255) # White text
|
|
||||||
font_size = 100
|
|
||||||
|
|
||||||
# Create a blank image with background color
|
|
||||||
image = Image.new("RGB", avatar_size, background_color)
|
|
||||||
draw = ImageDraw.Draw(image)
|
|
||||||
|
|
||||||
# Load a font
|
|
||||||
font_path = "[AWS-SECRET-REMOVED]ans-Bold.ttf"
|
|
||||||
font = ImageFont.truetype(font_path, font_size)
|
|
||||||
|
|
||||||
# Calculate text size and position using textbbox
|
|
||||||
text_bbox = draw.textbbox((0, 0), letter, font=font)
|
|
||||||
text_width, text_height = (
|
|
||||||
text_bbox[2] - text_bbox[0],
|
|
||||||
text_bbox[3] - text_bbox[1],
|
|
||||||
)
|
|
||||||
text_position = (
|
|
||||||
(avatar_size[0] - text_width) / 2,
|
|
||||||
(avatar_size[1] - text_height) / 2,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Draw the text on the image
|
|
||||||
draw.text(text_position, letter, font=font, fill=text_color)
|
|
||||||
|
|
||||||
# Ensure the avatars directory exists
|
|
||||||
avatar_dir = "avatars/letters"
|
|
||||||
if not os.path.exists(avatar_dir):
|
|
||||||
os.makedirs(avatar_dir)
|
|
||||||
|
|
||||||
# Save the image to the avatars directory
|
|
||||||
avatar_path = os.path.join(avatar_dir, f"{letter}_avatar.png")
|
|
||||||
image.save(avatar_path)
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
|
||||||
help = "Generate avatars for letters A-Z and numbers 0-9"
|
|
||||||
|
|
||||||
def handle(self, *args, **kwargs):
|
|
||||||
characters = [chr(i) for i in range(65, 91)] + [
|
|
||||||
str(i) for i in range(10)
|
|
||||||
] # A-Z and 0-9
|
|
||||||
for char in characters:
|
|
||||||
generate_avatar(char)
|
|
||||||
self.stdout.write(self.style.SUCCESS(f"Generated avatar for {char}"))
|
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
from django.core.management.base import BaseCommand
|
|
||||||
from accounts.models import UserProfile
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
|
||||||
help = "Regenerate default avatars for users without an uploaded avatar"
|
|
||||||
|
|
||||||
def handle(self, *args, **kwargs):
|
|
||||||
profiles = UserProfile.objects.filter(avatar="")
|
|
||||||
for profile in profiles:
|
|
||||||
# This will trigger the avatar generation logic in the save method
|
|
||||||
profile.save()
|
|
||||||
self.stdout.write(
|
|
||||||
self.style.SUCCESS(
|
|
||||||
f"Regenerated avatar for {
|
|
||||||
profile.user.username}"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
@@ -1,113 +0,0 @@
|
|||||||
from django.core.management.base import BaseCommand
|
|
||||||
from django.db import connection
|
|
||||||
from django.contrib.auth.hashers import make_password
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
|
||||||
help = "Reset database and create admin user"
|
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
|
||||||
self.stdout.write("Resetting database...")
|
|
||||||
|
|
||||||
# Drop all tables
|
|
||||||
with connection.cursor() as cursor:
|
|
||||||
cursor.execute(
|
|
||||||
"""
|
|
||||||
DO $$ DECLARE
|
|
||||||
r RECORD;
|
|
||||||
BEGIN
|
|
||||||
FOR r IN (
|
|
||||||
SELECT tablename FROM pg_tables
|
|
||||||
WHERE schemaname = current_schema()
|
|
||||||
) LOOP
|
|
||||||
EXECUTE 'DROP TABLE IF EXISTS ' || \
|
|
||||||
quote_ident(r.tablename) || ' CASCADE';
|
|
||||||
END LOOP;
|
|
||||||
END $$;
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
|
|
||||||
# Reset sequences
|
|
||||||
cursor.execute(
|
|
||||||
"""
|
|
||||||
DO $$ DECLARE
|
|
||||||
r RECORD;
|
|
||||||
BEGIN
|
|
||||||
FOR r IN (
|
|
||||||
SELECT sequencename FROM pg_sequences
|
|
||||||
WHERE schemaname = current_schema()
|
|
||||||
) LOOP
|
|
||||||
EXECUTE 'ALTER SEQUENCE ' || \
|
|
||||||
quote_ident(r.sequencename) || ' RESTART WITH 1';
|
|
||||||
END LOOP;
|
|
||||||
END $$;
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
|
|
||||||
self.stdout.write("All tables dropped and sequences reset.")
|
|
||||||
|
|
||||||
# Run migrations
|
|
||||||
from django.core.management import call_command
|
|
||||||
|
|
||||||
call_command("migrate")
|
|
||||||
|
|
||||||
self.stdout.write("Migrations applied.")
|
|
||||||
|
|
||||||
# Create superuser using raw SQL
|
|
||||||
try:
|
|
||||||
with connection.cursor() as cursor:
|
|
||||||
# Create user
|
|
||||||
user_id = str(uuid.uuid4())[:10]
|
|
||||||
cursor.execute(
|
|
||||||
"""
|
|
||||||
INSERT INTO accounts_user (
|
|
||||||
username, password, email, is_superuser, is_staff,
|
|
||||||
is_active, date_joined, user_id, first_name,
|
|
||||||
last_name, role, is_banned, ban_reason,
|
|
||||||
theme_preference
|
|
||||||
) VALUES (
|
|
||||||
'admin', %s, 'admin@thrillwiki.com', true, true,
|
|
||||||
true, NOW(), %s, '', '', 'SUPERUSER', false, '',
|
|
||||||
'light'
|
|
||||||
) RETURNING id;
|
|
||||||
""",
|
|
||||||
[make_password("admin"), user_id],
|
|
||||||
)
|
|
||||||
|
|
||||||
result = cursor.fetchone()
|
|
||||||
if result is None:
|
|
||||||
raise Exception("Failed to create user - no ID returned")
|
|
||||||
user_db_id = result[0]
|
|
||||||
|
|
||||||
# Create profile
|
|
||||||
profile_id = str(uuid.uuid4())[:10]
|
|
||||||
cursor.execute(
|
|
||||||
"""
|
|
||||||
INSERT INTO accounts_userprofile (
|
|
||||||
profile_id, display_name, pronouns, bio,
|
|
||||||
twitter, instagram, youtube, discord,
|
|
||||||
coaster_credits, dark_ride_credits,
|
|
||||||
flat_ride_credits, water_ride_credits,
|
|
||||||
user_id, avatar
|
|
||||||
) VALUES (
|
|
||||||
%s, 'Admin', 'they/them', 'ThrillWiki Administrator',
|
|
||||||
'', '', '', '',
|
|
||||||
0, 0, 0, 0,
|
|
||||||
%s, ''
|
|
||||||
);
|
|
||||||
""",
|
|
||||||
[profile_id, user_db_id],
|
|
||||||
)
|
|
||||||
|
|
||||||
self.stdout.write("Superuser created.")
|
|
||||||
except Exception as e:
|
|
||||||
self.stdout.write(
|
|
||||||
self.style.ERROR(
|
|
||||||
f"Error creating superuser: {
|
|
||||||
str(e)}"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
raise
|
|
||||||
|
|
||||||
self.stdout.write(self.style.SUCCESS("Database reset complete."))
|
|
||||||
@@ -1,39 +0,0 @@
|
|||||||
from django.core.management.base import BaseCommand
|
|
||||||
from allauth.socialaccount.models import SocialApp
|
|
||||||
from django.contrib.sites.models import Site
|
|
||||||
from django.db import connection
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
|
||||||
help = "Reset social apps configuration"
|
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
|
||||||
# Delete all social apps using raw SQL to bypass Django's ORM
|
|
||||||
with connection.cursor() as cursor:
|
|
||||||
cursor.execute("DELETE FROM socialaccount_socialapp_sites")
|
|
||||||
cursor.execute("DELETE FROM socialaccount_socialapp")
|
|
||||||
|
|
||||||
# Get the default site
|
|
||||||
site = Site.objects.get(id=1)
|
|
||||||
|
|
||||||
# Create Discord app
|
|
||||||
discord_app = SocialApp.objects.create(
|
|
||||||
provider="discord",
|
|
||||||
name="Discord",
|
|
||||||
client_id="1299112802274902047",
|
|
||||||
secret="ece7Pe_M4mD4mYzAgcINjTEKL_3ftL11",
|
|
||||||
)
|
|
||||||
discord_app.sites.add(site)
|
|
||||||
self.stdout.write(f"Created Discord app with ID: {discord_app.pk}")
|
|
||||||
|
|
||||||
# Create Google app
|
|
||||||
google_app = SocialApp.objects.create(
|
|
||||||
provider="google",
|
|
||||||
name="Google",
|
|
||||||
client_id=(
|
|
||||||
"135166769591-nopcgmo0fkqfqfs9qe783a137mtmcrt2.apps.googleusercontent.com"
|
|
||||||
),
|
|
||||||
secret="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",
|
|
||||||
)
|
|
||||||
google_app.sites.add(site)
|
|
||||||
self.stdout.write(f"Created Google app with ID: {google_app.pk}")
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
from django.core.management.base import BaseCommand
|
|
||||||
from django.db import connection
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
|
||||||
help = "Reset social auth configuration"
|
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
|
||||||
with connection.cursor() as cursor:
|
|
||||||
# Delete all social apps
|
|
||||||
cursor.execute("DELETE FROM socialaccount_socialapp")
|
|
||||||
cursor.execute("DELETE FROM socialaccount_socialapp_sites")
|
|
||||||
|
|
||||||
# Reset sequences
|
|
||||||
cursor.execute(
|
|
||||||
"DELETE FROM sqlite_sequence WHERE name='socialaccount_socialapp'"
|
|
||||||
)
|
|
||||||
cursor.execute(
|
|
||||||
"DELETE FROM sqlite_sequence WHERE name='socialaccount_socialapp_sites'"
|
|
||||||
)
|
|
||||||
|
|
||||||
self.stdout.write(
|
|
||||||
self.style.SUCCESS("Successfully reset social auth configuration")
|
|
||||||
)
|
|
||||||
@@ -1,49 +0,0 @@
|
|||||||
from django.core.management.base import BaseCommand
|
|
||||||
from django.contrib.auth.models import Group
|
|
||||||
from accounts.models import User
|
|
||||||
from accounts.signals import create_default_groups
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
|
||||||
help = "Set up default groups and permissions for user roles"
|
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
|
||||||
self.stdout.write("Creating default groups and permissions...")
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Create default groups with permissions
|
|
||||||
create_default_groups()
|
|
||||||
|
|
||||||
# Sync existing users with groups based on their roles
|
|
||||||
users = User.objects.exclude(role=User.Roles.USER)
|
|
||||||
for user in users:
|
|
||||||
group = Group.objects.filter(name=user.role).first()
|
|
||||||
if group:
|
|
||||||
user.groups.add(group)
|
|
||||||
|
|
||||||
# Update staff/superuser status based on role
|
|
||||||
if user.role == User.Roles.SUPERUSER:
|
|
||||||
user.is_superuser = True
|
|
||||||
user.is_staff = True
|
|
||||||
elif user.role in [User.Roles.ADMIN, User.Roles.MODERATOR]:
|
|
||||||
user.is_staff = True
|
|
||||||
user.save()
|
|
||||||
|
|
||||||
self.stdout.write(
|
|
||||||
self.style.SUCCESS("Successfully set up groups and permissions")
|
|
||||||
)
|
|
||||||
|
|
||||||
# Print summary
|
|
||||||
for group in Group.objects.all():
|
|
||||||
self.stdout.write(f"\nGroup: {group.name}")
|
|
||||||
self.stdout.write("Permissions:")
|
|
||||||
for perm in group.permissions.all():
|
|
||||||
self.stdout.write(f" - {perm.codename}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.stdout.write(
|
|
||||||
self.style.ERROR(
|
|
||||||
f"Error setting up groups: {
|
|
||||||
str(e)}"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
from django.core.management.base import BaseCommand
|
|
||||||
from django.contrib.sites.models import Site
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
|
||||||
help = "Set up default site"
|
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
|
||||||
# Delete any existing sites
|
|
||||||
Site.objects.all().delete()
|
|
||||||
|
|
||||||
# Create default site
|
|
||||||
site = Site.objects.create(
|
|
||||||
id=1, domain="localhost:8000", name="ThrillWiki Development"
|
|
||||||
)
|
|
||||||
self.stdout.write(self.style.SUCCESS(f"Created site: {site.domain}"))
|
|
||||||
@@ -1,126 +0,0 @@
|
|||||||
from django.core.management.base import BaseCommand
|
|
||||||
from django.contrib.sites.models import Site
|
|
||||||
from allauth.socialaccount.models import SocialApp
|
|
||||||
from dotenv import load_dotenv
|
|
||||||
import os
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
|
||||||
help = "Sets up social authentication apps"
|
|
||||||
|
|
||||||
def handle(self, *args, **kwargs):
|
|
||||||
# Load environment variables
|
|
||||||
load_dotenv()
|
|
||||||
|
|
||||||
# Get environment variables
|
|
||||||
google_client_id = os.getenv("GOOGLE_CLIENT_ID")
|
|
||||||
google_client_secret = os.getenv("GOOGLE_CLIENT_SECRET")
|
|
||||||
discord_client_id = os.getenv("DISCORD_CLIENT_ID")
|
|
||||||
discord_client_secret = os.getenv("DISCORD_CLIENT_SECRET")
|
|
||||||
|
|
||||||
# DEBUG: Log environment variable values
|
|
||||||
self.stdout.write(
|
|
||||||
f"DEBUG: google_client_id type: {
|
|
||||||
type(google_client_id)}, value: {google_client_id}"
|
|
||||||
)
|
|
||||||
self.stdout.write(
|
|
||||||
f"DEBUG: google_client_secret type: {
|
|
||||||
type(google_client_secret)}, value: {google_client_secret}"
|
|
||||||
)
|
|
||||||
self.stdout.write(
|
|
||||||
f"DEBUG: discord_client_id type: {
|
|
||||||
type(discord_client_id)}, value: {discord_client_id}"
|
|
||||||
)
|
|
||||||
self.stdout.write(
|
|
||||||
f"DEBUG: discord_client_secret type: {
|
|
||||||
type(discord_client_secret)}, value: {discord_client_secret}"
|
|
||||||
)
|
|
||||||
|
|
||||||
if not all(
|
|
||||||
[
|
|
||||||
google_client_id,
|
|
||||||
google_client_secret,
|
|
||||||
discord_client_id,
|
|
||||||
discord_client_secret,
|
|
||||||
]
|
|
||||||
):
|
|
||||||
self.stdout.write(
|
|
||||||
self.style.ERROR("Missing required environment variables")
|
|
||||||
)
|
|
||||||
self.stdout.write(
|
|
||||||
f"DEBUG: google_client_id is None: {google_client_id is None}"
|
|
||||||
)
|
|
||||||
self.stdout.write(
|
|
||||||
f"DEBUG: google_client_secret is None: {
|
|
||||||
google_client_secret is None}"
|
|
||||||
)
|
|
||||||
self.stdout.write(
|
|
||||||
f"DEBUG: discord_client_id is None: {
|
|
||||||
discord_client_id is None}"
|
|
||||||
)
|
|
||||||
self.stdout.write(
|
|
||||||
f"DEBUG: discord_client_secret is None: {
|
|
||||||
discord_client_secret is None}"
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
# Get or create the default site
|
|
||||||
site, _ = Site.objects.get_or_create(
|
|
||||||
id=1, defaults={"domain": "localhost:8000", "name": "localhost"}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Set up Google
|
|
||||||
google_app, created = SocialApp.objects.get_or_create(
|
|
||||||
provider="google",
|
|
||||||
defaults={
|
|
||||||
"name": "Google",
|
|
||||||
"client_id": google_client_id,
|
|
||||||
"secret": google_client_secret,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
if not created:
|
|
||||||
self.stdout.write(
|
|
||||||
f"DEBUG: About to assign google_client_id: {google_client_id} (type: {
|
|
||||||
type(google_client_id)})"
|
|
||||||
)
|
|
||||||
if google_client_id is not None and google_client_secret is not None:
|
|
||||||
google_app.client_id = google_client_id
|
|
||||||
google_app.secret = google_client_secret
|
|
||||||
google_app.save()
|
|
||||||
self.stdout.write("DEBUG: Successfully updated Google app")
|
|
||||||
else:
|
|
||||||
self.stdout.write(
|
|
||||||
self.style.ERROR(
|
|
||||||
"Google client_id or secret is None, skipping update."
|
|
||||||
)
|
|
||||||
)
|
|
||||||
google_app.sites.add(site)
|
|
||||||
|
|
||||||
# Set up Discord
|
|
||||||
discord_app, created = SocialApp.objects.get_or_create(
|
|
||||||
provider="discord",
|
|
||||||
defaults={
|
|
||||||
"name": "Discord",
|
|
||||||
"client_id": discord_client_id,
|
|
||||||
"secret": discord_client_secret,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
if not created:
|
|
||||||
self.stdout.write(
|
|
||||||
f"DEBUG: About to assign discord_client_id: {discord_client_id} (type: {
|
|
||||||
type(discord_client_id)})"
|
|
||||||
)
|
|
||||||
if discord_client_id is not None and discord_client_secret is not None:
|
|
||||||
discord_app.client_id = discord_client_id
|
|
||||||
discord_app.secret = discord_client_secret
|
|
||||||
discord_app.save()
|
|
||||||
self.stdout.write("DEBUG: Successfully updated Discord app")
|
|
||||||
else:
|
|
||||||
self.stdout.write(
|
|
||||||
self.style.ERROR(
|
|
||||||
"Discord client_id or secret is None, skipping update."
|
|
||||||
)
|
|
||||||
)
|
|
||||||
discord_app.sites.add(site)
|
|
||||||
|
|
||||||
self.stdout.write(self.style.SUCCESS("Successfully set up social auth apps"))
|
|
||||||
@@ -1,70 +0,0 @@
|
|||||||
from django.core.management.base import BaseCommand
|
|
||||||
from django.contrib.sites.models import Site
|
|
||||||
from django.contrib.auth import get_user_model
|
|
||||||
|
|
||||||
User = get_user_model()
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
|
||||||
help = "Set up social authentication through admin interface"
|
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
|
||||||
# Get or create the default site
|
|
||||||
site, _ = Site.objects.get_or_create(
|
|
||||||
id=1,
|
|
||||||
defaults={
|
|
||||||
"domain": "localhost:8000",
|
|
||||||
"name": "ThrillWiki Development",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
if not _:
|
|
||||||
site.domain = "localhost:8000"
|
|
||||||
site.name = "ThrillWiki Development"
|
|
||||||
site.save()
|
|
||||||
self.stdout.write(f'{"Created" if _ else "Updated"} site: {site.domain}')
|
|
||||||
|
|
||||||
# Create superuser if it doesn't exist
|
|
||||||
if not User.objects.filter(username="admin").exists():
|
|
||||||
admin_user = User.objects.create(
|
|
||||||
username="admin",
|
|
||||||
email="admin@example.com",
|
|
||||||
is_staff=True,
|
|
||||||
is_superuser=True,
|
|
||||||
)
|
|
||||||
admin_user.set_password("admin")
|
|
||||||
admin_user.save()
|
|
||||||
self.stdout.write("Created superuser: admin/admin")
|
|
||||||
|
|
||||||
self.stdout.write(
|
|
||||||
self.style.SUCCESS(
|
|
||||||
"""
|
|
||||||
Social auth setup instructions:
|
|
||||||
|
|
||||||
1. Run the development server:
|
|
||||||
python manage.py runserver
|
|
||||||
|
|
||||||
2. Go to the admin interface:
|
|
||||||
http://localhost:8000/admin/
|
|
||||||
|
|
||||||
3. Log in with:
|
|
||||||
Username: admin
|
|
||||||
Password: admin
|
|
||||||
|
|
||||||
4. Add social applications:
|
|
||||||
- Go to "Social applications" under "Social Accounts"
|
|
||||||
- Add Discord app:
|
|
||||||
Provider: discord
|
|
||||||
Name: Discord
|
|
||||||
Client id: 1299112802274902047
|
|
||||||
Secret key: ece7Pe_M4mD4mYzAgcINjTEKL_3ftL11
|
|
||||||
Sites: Add "localhost:8000"
|
|
||||||
|
|
||||||
- Add Google app:
|
|
||||||
Provider: google
|
|
||||||
Name: Google
|
|
||||||
Client id: 135166769591-nopcgmo0fkqfqfs9qe783a137mtmcrt2.apps.googleusercontent.com
|
|
||||||
Secret key: GOCSPX-Wd_0Ue0Ue0Ue0Ue0Ue0Ue0Ue0Ue
|
|
||||||
Sites: Add "localhost:8000"
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
)
|
|
||||||
@@ -1,61 +0,0 @@
|
|||||||
from django.core.management.base import BaseCommand
|
|
||||||
from django.test import Client
|
|
||||||
from allauth.socialaccount.models import SocialApp
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
|
||||||
help = "Test Discord OAuth2 authentication flow"
|
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
|
||||||
client = Client(HTTP_HOST="localhost:8000")
|
|
||||||
|
|
||||||
# Get Discord app
|
|
||||||
try:
|
|
||||||
discord_app = SocialApp.objects.get(provider="discord")
|
|
||||||
self.stdout.write("Found Discord app configuration:")
|
|
||||||
self.stdout.write(f"Client ID: {discord_app.client_id}")
|
|
||||||
|
|
||||||
# Test login URL
|
|
||||||
login_url = "/accounts/discord/login/"
|
|
||||||
response = client.get(login_url, HTTP_HOST="localhost:8000")
|
|
||||||
self.stdout.write(f"\nTesting login URL: {login_url}")
|
|
||||||
self.stdout.write(f"Status code: {response.status_code}")
|
|
||||||
|
|
||||||
if response.status_code == 302:
|
|
||||||
redirect_url = response["Location"]
|
|
||||||
self.stdout.write(f"Redirects to: {redirect_url}")
|
|
||||||
|
|
||||||
# Parse OAuth2 parameters
|
|
||||||
self.stdout.write("\nOAuth2 Parameters:")
|
|
||||||
if "client_id=" in redirect_url:
|
|
||||||
self.stdout.write("✓ client_id parameter present")
|
|
||||||
if "redirect_uri=" in redirect_url:
|
|
||||||
self.stdout.write("✓ redirect_uri parameter present")
|
|
||||||
if "scope=" in redirect_url:
|
|
||||||
self.stdout.write("✓ scope parameter present")
|
|
||||||
if "response_type=" in redirect_url:
|
|
||||||
self.stdout.write("✓ response_type parameter present")
|
|
||||||
if "code_challenge=" in redirect_url:
|
|
||||||
self.stdout.write("✓ PKCE enabled (code_challenge present)")
|
|
||||||
|
|
||||||
# Show callback URL
|
|
||||||
callback_url = "http://localhost:8000/accounts/discord/login/callback/"
|
|
||||||
self.stdout.write(
|
|
||||||
"\nCallback URL to configure in Discord Developer Portal:"
|
|
||||||
)
|
|
||||||
self.stdout.write(callback_url)
|
|
||||||
|
|
||||||
# Show frontend login URL
|
|
||||||
frontend_url = "http://localhost:5173"
|
|
||||||
self.stdout.write("\nFrontend configuration:")
|
|
||||||
self.stdout.write(f"Frontend URL: {frontend_url}")
|
|
||||||
self.stdout.write("Discord login button should use:")
|
|
||||||
self.stdout.write("/accounts/discord/login/?process=login")
|
|
||||||
|
|
||||||
# Show allauth URLs
|
|
||||||
self.stdout.write("\nAllauth URLs:")
|
|
||||||
self.stdout.write("Login URL: /accounts/discord/login/?process=login")
|
|
||||||
self.stdout.write("Callback URL: /accounts/discord/login/callback/")
|
|
||||||
|
|
||||||
except SocialApp.DoesNotExist:
|
|
||||||
self.stdout.write(self.style.ERROR("Discord app not found"))
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
from django.core.management.base import BaseCommand
|
|
||||||
from allauth.socialaccount.models import SocialApp
|
|
||||||
from django.contrib.sites.models import Site
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
|
||||||
help = "Update social apps to be associated with all sites"
|
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
|
||||||
# Get all sites
|
|
||||||
sites = Site.objects.all()
|
|
||||||
|
|
||||||
# Update each social app
|
|
||||||
for app in SocialApp.objects.all():
|
|
||||||
self.stdout.write(f"Updating {app.provider} app...")
|
|
||||||
# Clear existing sites
|
|
||||||
app.sites.clear()
|
|
||||||
# Add all sites
|
|
||||||
for site in sites:
|
|
||||||
app.sites.add(site)
|
|
||||||
self.stdout.write(
|
|
||||||
f'Added sites: {", ".join(site.domain for site in sites)}'
|
|
||||||
)
|
|
||||||
@@ -1,42 +0,0 @@
|
|||||||
from django.core.management.base import BaseCommand
|
|
||||||
from allauth.socialaccount.models import SocialApp
|
|
||||||
from django.conf import settings
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
|
||||||
help = "Verify Discord OAuth2 settings"
|
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
|
||||||
# Get Discord app
|
|
||||||
try:
|
|
||||||
discord_app = SocialApp.objects.get(provider="discord")
|
|
||||||
self.stdout.write("Found Discord app configuration:")
|
|
||||||
self.stdout.write(f"Client ID: {discord_app.client_id}")
|
|
||||||
self.stdout.write(f"Secret: {discord_app.secret}")
|
|
||||||
|
|
||||||
# Get sites
|
|
||||||
sites = discord_app.sites.all()
|
|
||||||
self.stdout.write("\nAssociated sites:")
|
|
||||||
for site in sites:
|
|
||||||
self.stdout.write(f"- {site.domain} ({site.name})")
|
|
||||||
|
|
||||||
# Show callback URL
|
|
||||||
callback_url = "http://localhost:8000/accounts/discord/login/callback/"
|
|
||||||
self.stdout.write(
|
|
||||||
"\nCallback URL to configure in Discord Developer Portal:"
|
|
||||||
)
|
|
||||||
self.stdout.write(callback_url)
|
|
||||||
|
|
||||||
# Show OAuth2 settings
|
|
||||||
self.stdout.write("\nOAuth2 settings in settings.py:")
|
|
||||||
discord_settings = settings.SOCIALACCOUNT_PROVIDERS.get("discord", {})
|
|
||||||
self.stdout.write(
|
|
||||||
f'PKCE Enabled: {
|
|
||||||
discord_settings.get(
|
|
||||||
"OAUTH_PKCE_ENABLED",
|
|
||||||
False)}'
|
|
||||||
)
|
|
||||||
self.stdout.write(f'Scopes: {discord_settings.get("SCOPE", [])}')
|
|
||||||
|
|
||||||
except SocialApp.DoesNotExist:
|
|
||||||
self.stdout.write(self.style.ERROR("Discord app not found"))
|
|
||||||
@@ -1,552 +0,0 @@
|
|||||||
# Generated by Django 5.1.4 on 2025-08-13 21:35
|
|
||||||
|
|
||||||
import django.contrib.auth.models
|
|
||||||
import django.contrib.auth.validators
|
|
||||||
import django.db.models.deletion
|
|
||||||
import django.utils.timezone
|
|
||||||
import pgtrigger.compiler
|
|
||||||
import pgtrigger.migrations
|
|
||||||
from django.conf import settings
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
initial = True
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
("auth", "0012_alter_user_first_name_max_length"),
|
|
||||||
("contenttypes", "0002_remove_content_type_name"),
|
|
||||||
("pghistory", "0006_delete_aggregateevent"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.CreateModel(
|
|
||||||
name="User",
|
|
||||||
fields=[
|
|
||||||
(
|
|
||||||
"id",
|
|
||||||
models.BigAutoField(
|
|
||||||
auto_created=True,
|
|
||||||
primary_key=True,
|
|
||||||
serialize=False,
|
|
||||||
verbose_name="ID",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"password",
|
|
||||||
models.CharField(max_length=128, verbose_name="password"),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"last_login",
|
|
||||||
models.DateTimeField(
|
|
||||||
blank=True, null=True, verbose_name="last login"
|
|
||||||
),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"is_superuser",
|
|
||||||
models.BooleanField(
|
|
||||||
default=False,
|
|
||||||
help_text="Designates that this user has all permissions without explicitly assigning them.",
|
|
||||||
verbose_name="superuser status",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"username",
|
|
||||||
models.CharField(
|
|
||||||
error_messages={
|
|
||||||
"unique": "A user with that username already exists."
|
|
||||||
},
|
|
||||||
help_text="Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.",
|
|
||||||
max_length=150,
|
|
||||||
unique=True,
|
|
||||||
validators=[
|
|
||||||
django.contrib.auth.validators.UnicodeUsernameValidator()
|
|
||||||
],
|
|
||||||
verbose_name="username",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"first_name",
|
|
||||||
models.CharField(
|
|
||||||
blank=True, max_length=150, verbose_name="first name"
|
|
||||||
),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"last_name",
|
|
||||||
models.CharField(
|
|
||||||
blank=True, max_length=150, verbose_name="last name"
|
|
||||||
),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"email",
|
|
||||||
models.EmailField(
|
|
||||||
blank=True,
|
|
||||||
max_length=254,
|
|
||||||
verbose_name="email address",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"is_staff",
|
|
||||||
models.BooleanField(
|
|
||||||
default=False,
|
|
||||||
help_text="Designates whether the user can log into this admin site.",
|
|
||||||
verbose_name="staff status",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"is_active",
|
|
||||||
models.BooleanField(
|
|
||||||
default=True,
|
|
||||||
help_text="Designates whether this user should be treated as active. Unselect this instead of deleting accounts.",
|
|
||||||
verbose_name="active",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"date_joined",
|
|
||||||
models.DateTimeField(
|
|
||||||
default=django.utils.timezone.now,
|
|
||||||
verbose_name="date joined",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"user_id",
|
|
||||||
models.CharField(
|
|
||||||
editable=False,
|
|
||||||
help_text="Unique identifier for this user that remains constant even if the username changes",
|
|
||||||
max_length=10,
|
|
||||||
unique=True,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"role",
|
|
||||||
models.CharField(
|
|
||||||
choices=[
|
|
||||||
("USER", "User"),
|
|
||||||
("MODERATOR", "Moderator"),
|
|
||||||
("ADMIN", "Admin"),
|
|
||||||
("SUPERUSER", "Superuser"),
|
|
||||||
],
|
|
||||||
default="USER",
|
|
||||||
max_length=10,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
("is_banned", models.BooleanField(default=False)),
|
|
||||||
("ban_reason", models.TextField(blank=True)),
|
|
||||||
("ban_date", models.DateTimeField(blank=True, null=True)),
|
|
||||||
(
|
|
||||||
"pending_email",
|
|
||||||
models.EmailField(blank=True, max_length=254, null=True),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"theme_preference",
|
|
||||||
models.CharField(
|
|
||||||
choices=[("light", "Light"), ("dark", "Dark")],
|
|
||||||
default="light",
|
|
||||||
max_length=5,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"groups",
|
|
||||||
models.ManyToManyField(
|
|
||||||
blank=True,
|
|
||||||
help_text="The groups this user belongs to. A user will get all permissions granted to each of their groups.",
|
|
||||||
related_name="user_set",
|
|
||||||
related_query_name="user",
|
|
||||||
to="auth.group",
|
|
||||||
verbose_name="groups",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"user_permissions",
|
|
||||||
models.ManyToManyField(
|
|
||||||
blank=True,
|
|
||||||
help_text="Specific permissions for this user.",
|
|
||||||
related_name="user_set",
|
|
||||||
related_query_name="user",
|
|
||||||
to="auth.permission",
|
|
||||||
verbose_name="user permissions",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
],
|
|
||||||
options={
|
|
||||||
"verbose_name": "user",
|
|
||||||
"verbose_name_plural": "users",
|
|
||||||
"abstract": False,
|
|
||||||
},
|
|
||||||
managers=[
|
|
||||||
("objects", django.contrib.auth.models.UserManager()),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
|
||||||
name="EmailVerification",
|
|
||||||
fields=[
|
|
||||||
(
|
|
||||||
"id",
|
|
||||||
models.BigAutoField(
|
|
||||||
auto_created=True,
|
|
||||||
primary_key=True,
|
|
||||||
serialize=False,
|
|
||||||
verbose_name="ID",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
("token", models.CharField(max_length=64, unique=True)),
|
|
||||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
|
||||||
("last_sent", models.DateTimeField(auto_now_add=True)),
|
|
||||||
(
|
|
||||||
"user",
|
|
||||||
models.OneToOneField(
|
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
|
||||||
to=settings.AUTH_USER_MODEL,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
],
|
|
||||||
options={
|
|
||||||
"verbose_name": "Email Verification",
|
|
||||||
"verbose_name_plural": "Email Verifications",
|
|
||||||
},
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
|
||||||
name="PasswordReset",
|
|
||||||
fields=[
|
|
||||||
(
|
|
||||||
"id",
|
|
||||||
models.BigAutoField(
|
|
||||||
auto_created=True,
|
|
||||||
primary_key=True,
|
|
||||||
serialize=False,
|
|
||||||
verbose_name="ID",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
("token", models.CharField(max_length=64)),
|
|
||||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
|
||||||
("expires_at", models.DateTimeField()),
|
|
||||||
("used", models.BooleanField(default=False)),
|
|
||||||
(
|
|
||||||
"user",
|
|
||||||
models.ForeignKey(
|
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
|
||||||
to=settings.AUTH_USER_MODEL,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
],
|
|
||||||
options={
|
|
||||||
"verbose_name": "Password Reset",
|
|
||||||
"verbose_name_plural": "Password Resets",
|
|
||||||
},
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
|
||||||
name="TopList",
|
|
||||||
fields=[
|
|
||||||
(
|
|
||||||
"id",
|
|
||||||
models.BigAutoField(
|
|
||||||
auto_created=True,
|
|
||||||
primary_key=True,
|
|
||||||
serialize=False,
|
|
||||||
verbose_name="ID",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
("title", models.CharField(max_length=100)),
|
|
||||||
(
|
|
||||||
"category",
|
|
||||||
models.CharField(
|
|
||||||
choices=[
|
|
||||||
("RC", "Roller Coaster"),
|
|
||||||
("DR", "Dark Ride"),
|
|
||||||
("FR", "Flat Ride"),
|
|
||||||
("WR", "Water Ride"),
|
|
||||||
("PK", "Park"),
|
|
||||||
],
|
|
||||||
max_length=2,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
("description", models.TextField(blank=True)),
|
|
||||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
|
||||||
("updated_at", models.DateTimeField(auto_now=True)),
|
|
||||||
(
|
|
||||||
"user",
|
|
||||||
models.ForeignKey(
|
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
|
||||||
related_name="top_lists",
|
|
||||||
to=settings.AUTH_USER_MODEL,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
],
|
|
||||||
options={
|
|
||||||
"ordering": ["-updated_at"],
|
|
||||||
},
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
|
||||||
name="TopListEvent",
|
|
||||||
fields=[
|
|
||||||
(
|
|
||||||
"pgh_id",
|
|
||||||
models.AutoField(primary_key=True, serialize=False),
|
|
||||||
),
|
|
||||||
("pgh_created_at", models.DateTimeField(auto_now_add=True)),
|
|
||||||
("pgh_label", models.TextField(help_text="The event label.")),
|
|
||||||
("id", models.BigIntegerField()),
|
|
||||||
("title", models.CharField(max_length=100)),
|
|
||||||
(
|
|
||||||
"category",
|
|
||||||
models.CharField(
|
|
||||||
choices=[
|
|
||||||
("RC", "Roller Coaster"),
|
|
||||||
("DR", "Dark Ride"),
|
|
||||||
("FR", "Flat Ride"),
|
|
||||||
("WR", "Water Ride"),
|
|
||||||
("PK", "Park"),
|
|
||||||
],
|
|
||||||
max_length=2,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
("description", models.TextField(blank=True)),
|
|
||||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
|
||||||
("updated_at", models.DateTimeField(auto_now=True)),
|
|
||||||
(
|
|
||||||
"pgh_context",
|
|
||||||
models.ForeignKey(
|
|
||||||
db_constraint=False,
|
|
||||||
null=True,
|
|
||||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
|
||||||
related_name="+",
|
|
||||||
to="pghistory.context",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"pgh_obj",
|
|
||||||
models.ForeignKey(
|
|
||||||
db_constraint=False,
|
|
||||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
|
||||||
related_name="events",
|
|
||||||
to="accounts.toplist",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"user",
|
|
||||||
models.ForeignKey(
|
|
||||||
db_constraint=False,
|
|
||||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
|
||||||
related_name="+",
|
|
||||||
related_query_name="+",
|
|
||||||
to=settings.AUTH_USER_MODEL,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
],
|
|
||||||
options={
|
|
||||||
"abstract": False,
|
|
||||||
},
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
|
||||||
name="TopListItem",
|
|
||||||
fields=[
|
|
||||||
(
|
|
||||||
"id",
|
|
||||||
models.BigAutoField(
|
|
||||||
auto_created=True,
|
|
||||||
primary_key=True,
|
|
||||||
serialize=False,
|
|
||||||
verbose_name="ID",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
|
||||||
("updated_at", models.DateTimeField(auto_now=True)),
|
|
||||||
("object_id", models.PositiveIntegerField()),
|
|
||||||
("rank", models.PositiveIntegerField()),
|
|
||||||
("notes", models.TextField(blank=True)),
|
|
||||||
(
|
|
||||||
"content_type",
|
|
||||||
models.ForeignKey(
|
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
|
||||||
to="contenttypes.contenttype",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"top_list",
|
|
||||||
models.ForeignKey(
|
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
|
||||||
related_name="items",
|
|
||||||
to="accounts.toplist",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
],
|
|
||||||
options={
|
|
||||||
"ordering": ["rank"],
|
|
||||||
},
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
|
||||||
name="TopListItemEvent",
|
|
||||||
fields=[
|
|
||||||
(
|
|
||||||
"pgh_id",
|
|
||||||
models.AutoField(primary_key=True, serialize=False),
|
|
||||||
),
|
|
||||||
("pgh_created_at", models.DateTimeField(auto_now_add=True)),
|
|
||||||
("pgh_label", models.TextField(help_text="The event label.")),
|
|
||||||
("id", models.BigIntegerField()),
|
|
||||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
|
||||||
("updated_at", models.DateTimeField(auto_now=True)),
|
|
||||||
("object_id", models.PositiveIntegerField()),
|
|
||||||
("rank", models.PositiveIntegerField()),
|
|
||||||
("notes", models.TextField(blank=True)),
|
|
||||||
(
|
|
||||||
"content_type",
|
|
||||||
models.ForeignKey(
|
|
||||||
db_constraint=False,
|
|
||||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
|
||||||
related_name="+",
|
|
||||||
related_query_name="+",
|
|
||||||
to="contenttypes.contenttype",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"pgh_context",
|
|
||||||
models.ForeignKey(
|
|
||||||
db_constraint=False,
|
|
||||||
null=True,
|
|
||||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
|
||||||
related_name="+",
|
|
||||||
to="pghistory.context",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"pgh_obj",
|
|
||||||
models.ForeignKey(
|
|
||||||
db_constraint=False,
|
|
||||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
|
||||||
related_name="events",
|
|
||||||
to="accounts.toplistitem",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"top_list",
|
|
||||||
models.ForeignKey(
|
|
||||||
db_constraint=False,
|
|
||||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
|
||||||
related_name="+",
|
|
||||||
related_query_name="+",
|
|
||||||
to="accounts.toplist",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
],
|
|
||||||
options={
|
|
||||||
"abstract": False,
|
|
||||||
},
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
|
||||||
name="UserProfile",
|
|
||||||
fields=[
|
|
||||||
(
|
|
||||||
"id",
|
|
||||||
models.BigAutoField(
|
|
||||||
auto_created=True,
|
|
||||||
primary_key=True,
|
|
||||||
serialize=False,
|
|
||||||
verbose_name="ID",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"profile_id",
|
|
||||||
models.CharField(
|
|
||||||
editable=False,
|
|
||||||
help_text="Unique identifier for this profile that remains constant",
|
|
||||||
max_length=10,
|
|
||||||
unique=True,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"display_name",
|
|
||||||
models.CharField(
|
|
||||||
help_text="This is the name that will be displayed on the site",
|
|
||||||
max_length=50,
|
|
||||||
unique=True,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"avatar",
|
|
||||||
models.ImageField(blank=True, upload_to="avatars/"),
|
|
||||||
),
|
|
||||||
("pronouns", models.CharField(blank=True, max_length=50)),
|
|
||||||
("bio", models.TextField(blank=True, max_length=500)),
|
|
||||||
("twitter", models.URLField(blank=True)),
|
|
||||||
("instagram", models.URLField(blank=True)),
|
|
||||||
("youtube", models.URLField(blank=True)),
|
|
||||||
("discord", models.CharField(blank=True, max_length=100)),
|
|
||||||
("coaster_credits", models.IntegerField(default=0)),
|
|
||||||
("dark_ride_credits", models.IntegerField(default=0)),
|
|
||||||
("flat_ride_credits", models.IntegerField(default=0)),
|
|
||||||
("water_ride_credits", models.IntegerField(default=0)),
|
|
||||||
(
|
|
||||||
"user",
|
|
||||||
models.OneToOneField(
|
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
|
||||||
related_name="profile",
|
|
||||||
to=settings.AUTH_USER_MODEL,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
pgtrigger.migrations.AddTrigger(
|
|
||||||
model_name="toplist",
|
|
||||||
trigger=pgtrigger.compiler.Trigger(
|
|
||||||
name="insert_insert",
|
|
||||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
|
||||||
func='INSERT INTO "accounts_toplistevent" ("category", "created_at", "description", "id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "title", "updated_at", "user_id") VALUES (NEW."category", NEW."created_at", NEW."description", NEW."id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."title", NEW."updated_at", NEW."user_id"); RETURN NULL;',
|
|
||||||
hash="[AWS-SECRET-REMOVED]",
|
|
||||||
operation="INSERT",
|
|
||||||
pgid="pgtrigger_insert_insert_26546",
|
|
||||||
table="accounts_toplist",
|
|
||||||
when="AFTER",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
),
|
|
||||||
pgtrigger.migrations.AddTrigger(
|
|
||||||
model_name="toplist",
|
|
||||||
trigger=pgtrigger.compiler.Trigger(
|
|
||||||
name="update_update",
|
|
||||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
|
||||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
|
||||||
func='INSERT INTO "accounts_toplistevent" ("category", "created_at", "description", "id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "title", "updated_at", "user_id") VALUES (NEW."category", NEW."created_at", NEW."description", NEW."id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."title", NEW."updated_at", NEW."user_id"); RETURN NULL;',
|
|
||||||
hash="[AWS-SECRET-REMOVED]",
|
|
||||||
operation="UPDATE",
|
|
||||||
pgid="pgtrigger_update_update_84849",
|
|
||||||
table="accounts_toplist",
|
|
||||||
when="AFTER",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.AlterUniqueTogether(
|
|
||||||
name="toplistitem",
|
|
||||||
unique_together={("top_list", "rank")},
|
|
||||||
),
|
|
||||||
pgtrigger.migrations.AddTrigger(
|
|
||||||
model_name="toplistitem",
|
|
||||||
trigger=pgtrigger.compiler.Trigger(
|
|
||||||
name="insert_insert",
|
|
||||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
|
||||||
func='INSERT INTO "accounts_toplistitemevent" ("content_type_id", "created_at", "id", "notes", "object_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "rank", "top_list_id", "updated_at") VALUES (NEW."content_type_id", NEW."created_at", NEW."id", NEW."notes", NEW."object_id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."rank", NEW."top_list_id", NEW."updated_at"); RETURN NULL;',
|
|
||||||
hash="[AWS-SECRET-REMOVED]",
|
|
||||||
operation="INSERT",
|
|
||||||
pgid="pgtrigger_insert_insert_56dfc",
|
|
||||||
table="accounts_toplistitem",
|
|
||||||
when="AFTER",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
),
|
|
||||||
pgtrigger.migrations.AddTrigger(
|
|
||||||
model_name="toplistitem",
|
|
||||||
trigger=pgtrigger.compiler.Trigger(
|
|
||||||
name="update_update",
|
|
||||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
|
||||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
|
||||||
func='INSERT INTO "accounts_toplistitemevent" ("content_type_id", "created_at", "id", "notes", "object_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "rank", "top_list_id", "updated_at") VALUES (NEW."content_type_id", NEW."created_at", NEW."id", NEW."notes", NEW."object_id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."rank", NEW."top_list_id", NEW."updated_at"); RETURN NULL;',
|
|
||||||
hash="[AWS-SECRET-REMOVED]",
|
|
||||||
operation="UPDATE",
|
|
||||||
pgid="pgtrigger_update_update_2b6e3",
|
|
||||||
table="accounts_toplistitem",
|
|
||||||
when="AFTER",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,35 +0,0 @@
|
|||||||
import requests
|
|
||||||
from django.conf import settings
|
|
||||||
from django.core.exceptions import ValidationError
|
|
||||||
|
|
||||||
|
|
||||||
class TurnstileMixin:
|
|
||||||
"""
|
|
||||||
Mixin to handle Cloudflare Turnstile validation.
|
|
||||||
Bypasses validation when DEBUG is True.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def validate_turnstile(self, request):
|
|
||||||
"""
|
|
||||||
Validate the Turnstile response token.
|
|
||||||
Skips validation when DEBUG is True.
|
|
||||||
"""
|
|
||||||
if settings.DEBUG:
|
|
||||||
return
|
|
||||||
|
|
||||||
token = request.POST.get("cf-turnstile-response")
|
|
||||||
if not token:
|
|
||||||
raise ValidationError("Please complete the Turnstile challenge.")
|
|
||||||
|
|
||||||
# Verify the token with Cloudflare
|
|
||||||
data = {
|
|
||||||
"secret": settings.TURNSTILE_SECRET_KEY,
|
|
||||||
"response": token,
|
|
||||||
"remoteip": request.META.get("REMOTE_ADDR"),
|
|
||||||
}
|
|
||||||
|
|
||||||
response = requests.post(settings.TURNSTILE_VERIFY_URL, data=data, timeout=60)
|
|
||||||
result = response.json()
|
|
||||||
|
|
||||||
if not result.get("success"):
|
|
||||||
raise ValidationError("Turnstile validation failed. Please try again.")
|
|
||||||
@@ -1,219 +0,0 @@
|
|||||||
from django.contrib.auth.models import AbstractUser
|
|
||||||
from django.db import models
|
|
||||||
from django.urls import reverse
|
|
||||||
from django.utils.translation import gettext_lazy as _
|
|
||||||
import os
|
|
||||||
import secrets
|
|
||||||
from core.history import TrackedModel
|
|
||||||
|
|
||||||
# import pghistory
|
|
||||||
|
|
||||||
|
|
||||||
def generate_random_id(model_class, id_field):
|
|
||||||
"""Generate a random ID starting at 4 digits, expanding to 5 if needed"""
|
|
||||||
while True:
|
|
||||||
# Try to get a 4-digit number first
|
|
||||||
new_id = str(secrets.SystemRandom().randint(1000, 9999))
|
|
||||||
if not model_class.objects.filter(**{id_field: new_id}).exists():
|
|
||||||
return new_id
|
|
||||||
|
|
||||||
# If all 4-digit numbers are taken, try 5 digits
|
|
||||||
new_id = str(secrets.SystemRandom().randint(10000, 99999))
|
|
||||||
if not model_class.objects.filter(**{id_field: new_id}).exists():
|
|
||||||
return new_id
|
|
||||||
|
|
||||||
|
|
||||||
class User(AbstractUser):
|
|
||||||
class Roles(models.TextChoices):
|
|
||||||
USER = "USER", _("User")
|
|
||||||
MODERATOR = "MODERATOR", _("Moderator")
|
|
||||||
ADMIN = "ADMIN", _("Admin")
|
|
||||||
SUPERUSER = "SUPERUSER", _("Superuser")
|
|
||||||
|
|
||||||
class ThemePreference(models.TextChoices):
|
|
||||||
LIGHT = "light", _("Light")
|
|
||||||
DARK = "dark", _("Dark")
|
|
||||||
|
|
||||||
# Read-only ID
|
|
||||||
user_id = models.CharField(
|
|
||||||
max_length=10,
|
|
||||||
unique=True,
|
|
||||||
editable=False,
|
|
||||||
help_text=(
|
|
||||||
"Unique identifier for this user that remains constant even if the "
|
|
||||||
"username changes"
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
role = models.CharField(
|
|
||||||
max_length=10,
|
|
||||||
choices=Roles.choices,
|
|
||||||
default=Roles.USER,
|
|
||||||
)
|
|
||||||
is_banned = models.BooleanField(default=False)
|
|
||||||
ban_reason = models.TextField(blank=True)
|
|
||||||
ban_date = models.DateTimeField(null=True, blank=True)
|
|
||||||
pending_email = models.EmailField(blank=True, null=True)
|
|
||||||
theme_preference = models.CharField(
|
|
||||||
max_length=5,
|
|
||||||
choices=ThemePreference.choices,
|
|
||||||
default=ThemePreference.LIGHT,
|
|
||||||
)
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return self.get_display_name()
|
|
||||||
|
|
||||||
def get_absolute_url(self):
|
|
||||||
return reverse("profile", kwargs={"username": self.username})
|
|
||||||
|
|
||||||
def get_display_name(self):
|
|
||||||
"""Get the user's display name, falling back to username if not set"""
|
|
||||||
profile = getattr(self, "profile", None)
|
|
||||||
if profile and profile.display_name:
|
|
||||||
return profile.display_name
|
|
||||||
return self.username
|
|
||||||
|
|
||||||
def save(self, *args, **kwargs):
|
|
||||||
if not self.user_id:
|
|
||||||
self.user_id = generate_random_id(User, "user_id")
|
|
||||||
super().save(*args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class UserProfile(models.Model):
|
|
||||||
# Read-only ID
|
|
||||||
profile_id = models.CharField(
|
|
||||||
max_length=10,
|
|
||||||
unique=True,
|
|
||||||
editable=False,
|
|
||||||
help_text="Unique identifier for this profile that remains constant",
|
|
||||||
)
|
|
||||||
|
|
||||||
user = models.OneToOneField(User, on_delete=models.CASCADE, related_name="profile")
|
|
||||||
display_name = models.CharField(
|
|
||||||
max_length=50,
|
|
||||||
unique=True,
|
|
||||||
help_text="This is the name that will be displayed on the site",
|
|
||||||
)
|
|
||||||
avatar = models.ImageField(upload_to="avatars/", blank=True)
|
|
||||||
pronouns = models.CharField(max_length=50, blank=True)
|
|
||||||
|
|
||||||
bio = models.TextField(max_length=500, blank=True)
|
|
||||||
|
|
||||||
# Social media links
|
|
||||||
twitter = models.URLField(blank=True)
|
|
||||||
instagram = models.URLField(blank=True)
|
|
||||||
youtube = models.URLField(blank=True)
|
|
||||||
discord = models.CharField(max_length=100, blank=True)
|
|
||||||
|
|
||||||
# Ride statistics
|
|
||||||
coaster_credits = models.IntegerField(default=0)
|
|
||||||
dark_ride_credits = models.IntegerField(default=0)
|
|
||||||
flat_ride_credits = models.IntegerField(default=0)
|
|
||||||
water_ride_credits = models.IntegerField(default=0)
|
|
||||||
|
|
||||||
def get_avatar(self):
|
|
||||||
"""
|
|
||||||
Return the avatar URL or serve a pre-generated avatar based on the
|
|
||||||
first letter of the username
|
|
||||||
"""
|
|
||||||
if self.avatar:
|
|
||||||
return self.avatar.url
|
|
||||||
first_letter = self.user.username.upper()
|
|
||||||
avatar_path = f"avatars/letters/{first_letter}_avatar.png"
|
|
||||||
if os.path.exists(avatar_path):
|
|
||||||
return f"/{avatar_path}"
|
|
||||||
return "/static/images/default-avatar.png"
|
|
||||||
|
|
||||||
def save(self, *args, **kwargs):
|
|
||||||
# If no display name is set, use the username
|
|
||||||
if not self.display_name:
|
|
||||||
self.display_name = self.user.username
|
|
||||||
|
|
||||||
if not self.profile_id:
|
|
||||||
self.profile_id = generate_random_id(UserProfile, "profile_id")
|
|
||||||
super().save(*args, **kwargs)
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return self.display_name
|
|
||||||
|
|
||||||
|
|
||||||
class EmailVerification(models.Model):
|
|
||||||
user = models.OneToOneField(User, on_delete=models.CASCADE)
|
|
||||||
token = models.CharField(max_length=64, unique=True)
|
|
||||||
created_at = models.DateTimeField(auto_now_add=True)
|
|
||||||
last_sent = models.DateTimeField(auto_now_add=True)
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return f"Email verification for {self.user.username}"
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
verbose_name = "Email Verification"
|
|
||||||
verbose_name_plural = "Email Verifications"
|
|
||||||
|
|
||||||
|
|
||||||
class PasswordReset(models.Model):
|
|
||||||
user = models.ForeignKey(User, on_delete=models.CASCADE)
|
|
||||||
token = models.CharField(max_length=64)
|
|
||||||
created_at = models.DateTimeField(auto_now_add=True)
|
|
||||||
expires_at = models.DateTimeField()
|
|
||||||
used = models.BooleanField(default=False)
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return f"Password reset for {self.user.username}"
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
verbose_name = "Password Reset"
|
|
||||||
verbose_name_plural = "Password Resets"
|
|
||||||
|
|
||||||
|
|
||||||
# @pghistory.track()
|
|
||||||
|
|
||||||
|
|
||||||
class TopList(TrackedModel):
|
|
||||||
class Categories(models.TextChoices):
|
|
||||||
ROLLER_COASTER = "RC", _("Roller Coaster")
|
|
||||||
DARK_RIDE = "DR", _("Dark Ride")
|
|
||||||
FLAT_RIDE = "FR", _("Flat Ride")
|
|
||||||
WATER_RIDE = "WR", _("Water Ride")
|
|
||||||
PARK = "PK", _("Park")
|
|
||||||
|
|
||||||
user = models.ForeignKey(
|
|
||||||
User,
|
|
||||||
on_delete=models.CASCADE,
|
|
||||||
related_name="top_lists", # Added related_name for User model access
|
|
||||||
)
|
|
||||||
title = models.CharField(max_length=100)
|
|
||||||
category = models.CharField(max_length=2, choices=Categories.choices)
|
|
||||||
description = models.TextField(blank=True)
|
|
||||||
created_at = models.DateTimeField(auto_now_add=True)
|
|
||||||
updated_at = models.DateTimeField(auto_now=True)
|
|
||||||
|
|
||||||
class Meta(TrackedModel.Meta):
|
|
||||||
ordering = ["-updated_at"]
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return (
|
|
||||||
f"{self.user.get_display_name()}'s {self.category} Top List: {self.title}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# @pghistory.track()
|
|
||||||
|
|
||||||
|
|
||||||
class TopListItem(TrackedModel):
|
|
||||||
top_list = models.ForeignKey(
|
|
||||||
TopList, on_delete=models.CASCADE, related_name="items"
|
|
||||||
)
|
|
||||||
content_type = models.ForeignKey(
|
|
||||||
"contenttypes.ContentType", on_delete=models.CASCADE
|
|
||||||
)
|
|
||||||
object_id = models.PositiveIntegerField()
|
|
||||||
rank = models.PositiveIntegerField()
|
|
||||||
notes = models.TextField(blank=True)
|
|
||||||
|
|
||||||
class Meta(TrackedModel.Meta):
|
|
||||||
ordering = ["rank"]
|
|
||||||
unique_together = [["top_list", "rank"]]
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return f"#{self.rank} in {self.top_list.title}"
|
|
||||||
@@ -1,208 +0,0 @@
|
|||||||
from django.contrib.auth.models import AbstractUser
|
|
||||||
from django.db import models
|
|
||||||
from django.urls import reverse
|
|
||||||
from django.utils.translation import gettext_lazy as _
|
|
||||||
import os
|
|
||||||
import secrets
|
|
||||||
from core.history import TrackedModel
|
|
||||||
import pghistory
|
|
||||||
|
|
||||||
|
|
||||||
def generate_random_id(model_class, id_field):
|
|
||||||
"""Generate a random ID starting at 4 digits, expanding to 5 if needed"""
|
|
||||||
while True:
|
|
||||||
# Try to get a 4-digit number first
|
|
||||||
new_id = str(secrets.SystemRandom().randint(1000, 9999))
|
|
||||||
if not model_class.objects.filter(**{id_field: new_id}).exists():
|
|
||||||
return new_id
|
|
||||||
|
|
||||||
# If all 4-digit numbers are taken, try 5 digits
|
|
||||||
new_id = str(secrets.SystemRandom().randint(10000, 99999))
|
|
||||||
if not model_class.objects.filter(**{id_field: new_id}).exists():
|
|
||||||
return new_id
|
|
||||||
|
|
||||||
|
|
||||||
class User(AbstractUser):
|
|
||||||
class Roles(models.TextChoices):
|
|
||||||
USER = "USER", _("User")
|
|
||||||
MODERATOR = "MODERATOR", _("Moderator")
|
|
||||||
ADMIN = "ADMIN", _("Admin")
|
|
||||||
SUPERUSER = "SUPERUSER", _("Superuser")
|
|
||||||
|
|
||||||
class ThemePreference(models.TextChoices):
|
|
||||||
LIGHT = "light", _("Light")
|
|
||||||
DARK = "dark", _("Dark")
|
|
||||||
|
|
||||||
# Read-only ID
|
|
||||||
user_id = models.CharField(
|
|
||||||
max_length=10,
|
|
||||||
unique=True,
|
|
||||||
editable=False,
|
|
||||||
help_text="Unique identifier for this user that remains constant even if the username changes",
|
|
||||||
)
|
|
||||||
|
|
||||||
role = models.CharField(
|
|
||||||
max_length=10,
|
|
||||||
choices=Roles.choices,
|
|
||||||
default=Roles.USER,
|
|
||||||
)
|
|
||||||
is_banned = models.BooleanField(default=False)
|
|
||||||
ban_reason = models.TextField(blank=True)
|
|
||||||
ban_date = models.DateTimeField(null=True, blank=True)
|
|
||||||
pending_email = models.EmailField(blank=True, null=True)
|
|
||||||
theme_preference = models.CharField(
|
|
||||||
max_length=5,
|
|
||||||
choices=ThemePreference.choices,
|
|
||||||
default=ThemePreference.LIGHT,
|
|
||||||
)
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return self.get_display_name()
|
|
||||||
|
|
||||||
def get_absolute_url(self):
|
|
||||||
return reverse("profile", kwargs={"username": self.username})
|
|
||||||
|
|
||||||
def get_display_name(self):
|
|
||||||
"""Get the user's display name, falling back to username if not set"""
|
|
||||||
profile = getattr(self, "profile", None)
|
|
||||||
if profile and profile.display_name:
|
|
||||||
return profile.display_name
|
|
||||||
return self.username
|
|
||||||
|
|
||||||
def save(self, *args, **kwargs):
|
|
||||||
if not self.user_id:
|
|
||||||
self.user_id = generate_random_id(User, "user_id")
|
|
||||||
super().save(*args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class UserProfile(models.Model):
|
|
||||||
# Read-only ID
|
|
||||||
profile_id = models.CharField(
|
|
||||||
max_length=10,
|
|
||||||
unique=True,
|
|
||||||
editable=False,
|
|
||||||
help_text="Unique identifier for this profile that remains constant",
|
|
||||||
)
|
|
||||||
|
|
||||||
user = models.OneToOneField(User, on_delete=models.CASCADE, related_name="profile")
|
|
||||||
display_name = models.CharField(
|
|
||||||
max_length=50,
|
|
||||||
unique=True,
|
|
||||||
help_text="This is the name that will be displayed on the site",
|
|
||||||
)
|
|
||||||
avatar = models.ImageField(upload_to="avatars/", blank=True)
|
|
||||||
pronouns = models.CharField(max_length=50, blank=True)
|
|
||||||
|
|
||||||
bio = models.TextField(max_length=500, blank=True)
|
|
||||||
|
|
||||||
# Social media links
|
|
||||||
twitter = models.URLField(blank=True)
|
|
||||||
instagram = models.URLField(blank=True)
|
|
||||||
youtube = models.URLField(blank=True)
|
|
||||||
discord = models.CharField(max_length=100, blank=True)
|
|
||||||
|
|
||||||
# Ride statistics
|
|
||||||
coaster_credits = models.IntegerField(default=0)
|
|
||||||
dark_ride_credits = models.IntegerField(default=0)
|
|
||||||
flat_ride_credits = models.IntegerField(default=0)
|
|
||||||
water_ride_credits = models.IntegerField(default=0)
|
|
||||||
|
|
||||||
def get_avatar(self):
|
|
||||||
"""Return the avatar URL or serve a pre-generated avatar based on the first letter of the username"""
|
|
||||||
if self.avatar:
|
|
||||||
return self.avatar.url
|
|
||||||
first_letter = self.user.username[0].upper()
|
|
||||||
avatar_path = f"avatars/letters/{first_letter}_avatar.png"
|
|
||||||
if os.path.exists(avatar_path):
|
|
||||||
return f"/{avatar_path}"
|
|
||||||
return "/static/images/default-avatar.png"
|
|
||||||
|
|
||||||
def save(self, *args, **kwargs):
|
|
||||||
# If no display name is set, use the username
|
|
||||||
if not self.display_name:
|
|
||||||
self.display_name = self.user.username
|
|
||||||
|
|
||||||
if not self.profile_id:
|
|
||||||
self.profile_id = generate_random_id(UserProfile, "profile_id")
|
|
||||||
super().save(*args, **kwargs)
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return self.display_name
|
|
||||||
|
|
||||||
|
|
||||||
class EmailVerification(models.Model):
|
|
||||||
user = models.OneToOneField(User, on_delete=models.CASCADE)
|
|
||||||
token = models.CharField(max_length=64, unique=True)
|
|
||||||
created_at = models.DateTimeField(auto_now_add=True)
|
|
||||||
last_sent = models.DateTimeField(auto_now_add=True)
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return f"Email verification for {self.user.username}"
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
verbose_name = "Email Verification"
|
|
||||||
verbose_name_plural = "Email Verifications"
|
|
||||||
|
|
||||||
|
|
||||||
class PasswordReset(models.Model):
|
|
||||||
user = models.ForeignKey(User, on_delete=models.CASCADE)
|
|
||||||
token = models.CharField(max_length=64)
|
|
||||||
created_at = models.DateTimeField(auto_now_add=True)
|
|
||||||
expires_at = models.DateTimeField()
|
|
||||||
used = models.BooleanField(default=False)
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return f"Password reset for {self.user.username}"
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
verbose_name = "Password Reset"
|
|
||||||
verbose_name_plural = "Password Resets"
|
|
||||||
|
|
||||||
|
|
||||||
@pghistory.track()
|
|
||||||
class TopList(TrackedModel):
|
|
||||||
class Categories(models.TextChoices):
|
|
||||||
ROLLER_COASTER = "RC", _("Roller Coaster")
|
|
||||||
DARK_RIDE = "DR", _("Dark Ride")
|
|
||||||
FLAT_RIDE = "FR", _("Flat Ride")
|
|
||||||
WATER_RIDE = "WR", _("Water Ride")
|
|
||||||
PARK = "PK", _("Park")
|
|
||||||
|
|
||||||
user = models.ForeignKey(
|
|
||||||
User,
|
|
||||||
on_delete=models.CASCADE,
|
|
||||||
related_name="top_lists", # Added related_name for User model access
|
|
||||||
)
|
|
||||||
title = models.CharField(max_length=100)
|
|
||||||
category = models.CharField(max_length=2, choices=Categories.choices)
|
|
||||||
description = models.TextField(blank=True)
|
|
||||||
created_at = models.DateTimeField(auto_now_add=True)
|
|
||||||
updated_at = models.DateTimeField(auto_now=True)
|
|
||||||
|
|
||||||
class Meta(TrackedModel.Meta):
|
|
||||||
ordering = ["-updated_at"]
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return (
|
|
||||||
f"{self.user.get_display_name()}'s {self.category} Top List: {self.title}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@pghistory.track()
|
|
||||||
class TopListItem(TrackedModel):
|
|
||||||
top_list = models.ForeignKey(
|
|
||||||
TopList, on_delete=models.CASCADE, related_name="items"
|
|
||||||
)
|
|
||||||
content_type = models.ForeignKey(
|
|
||||||
"contenttypes.ContentType", on_delete=models.CASCADE
|
|
||||||
)
|
|
||||||
object_id = models.PositiveIntegerField()
|
|
||||||
rank = models.PositiveIntegerField()
|
|
||||||
notes = models.TextField(blank=True)
|
|
||||||
|
|
||||||
class Meta(TrackedModel.Meta):
|
|
||||||
ordering = ["rank"]
|
|
||||||
unique_together = [["top_list", "rank"]]
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return f"#{self.rank} in {self.top_list.title}"
|
|
||||||
@@ -1,273 +0,0 @@
|
|||||||
"""
|
|
||||||
Selectors for user and account-related data retrieval.
|
|
||||||
Following Django styleguide pattern for separating data access from business logic.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from typing import Dict, Any
|
|
||||||
from django.db.models import QuerySet, Q, F, Count
|
|
||||||
from django.contrib.auth import get_user_model
|
|
||||||
from django.utils import timezone
|
|
||||||
from datetime import timedelta
|
|
||||||
|
|
||||||
User = get_user_model()
|
|
||||||
|
|
||||||
|
|
||||||
def user_profile_optimized(*, user_id: int) -> Any:
|
|
||||||
"""
|
|
||||||
Get a user with optimized queries for profile display.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
user_id: User ID
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
User instance with prefetched related data
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
User.DoesNotExist: If user doesn't exist
|
|
||||||
"""
|
|
||||||
return (
|
|
||||||
User.objects.prefetch_related(
|
|
||||||
"park_reviews", "ride_reviews", "socialaccount_set"
|
|
||||||
)
|
|
||||||
.annotate(
|
|
||||||
park_review_count=Count(
|
|
||||||
"park_reviews", filter=Q(park_reviews__is_published=True)
|
|
||||||
),
|
|
||||||
ride_review_count=Count(
|
|
||||||
"ride_reviews", filter=Q(ride_reviews__is_published=True)
|
|
||||||
),
|
|
||||||
total_review_count=F("park_review_count") + F("ride_review_count"),
|
|
||||||
)
|
|
||||||
.get(id=user_id)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def active_users_with_stats() -> QuerySet:
|
|
||||||
"""
|
|
||||||
Get active users with review statistics.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
QuerySet of active users with review counts
|
|
||||||
"""
|
|
||||||
return (
|
|
||||||
User.objects.filter(is_active=True)
|
|
||||||
.annotate(
|
|
||||||
park_review_count=Count(
|
|
||||||
"park_reviews", filter=Q(park_reviews__is_published=True)
|
|
||||||
),
|
|
||||||
ride_review_count=Count(
|
|
||||||
"ride_reviews", filter=Q(ride_reviews__is_published=True)
|
|
||||||
),
|
|
||||||
total_review_count=F("park_review_count") + F("ride_review_count"),
|
|
||||||
)
|
|
||||||
.order_by("-total_review_count")
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def users_with_recent_activity(*, days: int = 30) -> QuerySet:
|
|
||||||
"""
|
|
||||||
Get users who have been active in the last N days.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
days: Number of days to look back for activity
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
QuerySet of recently active users
|
|
||||||
"""
|
|
||||||
cutoff_date = timezone.now() - timedelta(days=days)
|
|
||||||
|
|
||||||
return (
|
|
||||||
User.objects.filter(
|
|
||||||
Q(last_login__gte=cutoff_date)
|
|
||||||
| Q(park_reviews__created_at__gte=cutoff_date)
|
|
||||||
| Q(ride_reviews__created_at__gte=cutoff_date)
|
|
||||||
)
|
|
||||||
.annotate(
|
|
||||||
recent_park_reviews=Count(
|
|
||||||
"park_reviews",
|
|
||||||
filter=Q(park_reviews__created_at__gte=cutoff_date),
|
|
||||||
),
|
|
||||||
recent_ride_reviews=Count(
|
|
||||||
"ride_reviews",
|
|
||||||
filter=Q(ride_reviews__created_at__gte=cutoff_date),
|
|
||||||
),
|
|
||||||
recent_total_reviews=F("recent_park_reviews") + F("recent_ride_reviews"),
|
|
||||||
)
|
|
||||||
.order_by("-last_login")
|
|
||||||
.distinct()
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def top_reviewers(*, limit: int = 10) -> QuerySet:
|
|
||||||
"""
|
|
||||||
Get top users by review count.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
limit: Maximum number of users to return
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
QuerySet of top reviewers
|
|
||||||
"""
|
|
||||||
return (
|
|
||||||
User.objects.filter(is_active=True)
|
|
||||||
.annotate(
|
|
||||||
park_review_count=Count(
|
|
||||||
"park_reviews", filter=Q(park_reviews__is_published=True)
|
|
||||||
),
|
|
||||||
ride_review_count=Count(
|
|
||||||
"ride_reviews", filter=Q(ride_reviews__is_published=True)
|
|
||||||
),
|
|
||||||
total_review_count=F("park_review_count") + F("ride_review_count"),
|
|
||||||
)
|
|
||||||
.filter(total_review_count__gt=0)
|
|
||||||
.order_by("-total_review_count")[:limit]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def moderator_users() -> QuerySet:
|
|
||||||
"""
|
|
||||||
Get users with moderation permissions.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
QuerySet of users who can moderate content
|
|
||||||
"""
|
|
||||||
return (
|
|
||||||
User.objects.filter(
|
|
||||||
Q(is_staff=True)
|
|
||||||
| Q(groups__name="Moderators")
|
|
||||||
| Q(
|
|
||||||
user_permissions__codename__in=[
|
|
||||||
"change_parkreview",
|
|
||||||
"change_ridereview",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.distinct()
|
|
||||||
.order_by("username")
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def users_by_registration_date(*, start_date, end_date) -> QuerySet:
|
|
||||||
"""
|
|
||||||
Get users who registered within a date range.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
start_date: Start of date range
|
|
||||||
end_date: End of date range
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
QuerySet of users registered in the date range
|
|
||||||
"""
|
|
||||||
return User.objects.filter(
|
|
||||||
date_joined__date__gte=start_date, date_joined__date__lte=end_date
|
|
||||||
).order_by("-date_joined")
|
|
||||||
|
|
||||||
|
|
||||||
def user_search_autocomplete(*, query: str, limit: int = 10) -> QuerySet:
|
|
||||||
"""
|
|
||||||
Get users matching a search query for autocomplete functionality.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
query: Search string
|
|
||||||
limit: Maximum number of results
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
QuerySet of matching users for autocomplete
|
|
||||||
"""
|
|
||||||
return User.objects.filter(
|
|
||||||
Q(username__icontains=query)
|
|
||||||
| Q(first_name__icontains=query)
|
|
||||||
| Q(last_name__icontains=query),
|
|
||||||
is_active=True,
|
|
||||||
).order_by("username")[:limit]
|
|
||||||
|
|
||||||
|
|
||||||
def users_with_social_accounts() -> QuerySet:
|
|
||||||
"""
|
|
||||||
Get users who have connected social accounts.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
QuerySet of users with social account connections
|
|
||||||
"""
|
|
||||||
return (
|
|
||||||
User.objects.filter(socialaccount__isnull=False)
|
|
||||||
.prefetch_related("socialaccount_set")
|
|
||||||
.distinct()
|
|
||||||
.order_by("username")
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def user_statistics_summary() -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Get overall user statistics for dashboard/analytics.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Dictionary containing user statistics
|
|
||||||
"""
|
|
||||||
total_users = User.objects.count()
|
|
||||||
active_users = User.objects.filter(is_active=True).count()
|
|
||||||
staff_users = User.objects.filter(is_staff=True).count()
|
|
||||||
|
|
||||||
# Users with reviews
|
|
||||||
users_with_reviews = (
|
|
||||||
User.objects.filter(
|
|
||||||
Q(park_reviews__isnull=False) | Q(ride_reviews__isnull=False)
|
|
||||||
)
|
|
||||||
.distinct()
|
|
||||||
.count()
|
|
||||||
)
|
|
||||||
|
|
||||||
# Recent registrations (last 30 days)
|
|
||||||
cutoff_date = timezone.now() - timedelta(days=30)
|
|
||||||
recent_registrations = User.objects.filter(date_joined__gte=cutoff_date).count()
|
|
||||||
|
|
||||||
return {
|
|
||||||
"total_users": total_users,
|
|
||||||
"active_users": active_users,
|
|
||||||
"inactive_users": total_users - active_users,
|
|
||||||
"staff_users": staff_users,
|
|
||||||
"users_with_reviews": users_with_reviews,
|
|
||||||
"recent_registrations": recent_registrations,
|
|
||||||
"review_participation_rate": (
|
|
||||||
(users_with_reviews / total_users * 100) if total_users > 0 else 0
|
|
||||||
),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def users_needing_email_verification() -> QuerySet:
|
|
||||||
"""
|
|
||||||
Get users who haven't verified their email addresses.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
QuerySet of users with unverified emails
|
|
||||||
"""
|
|
||||||
return (
|
|
||||||
User.objects.filter(is_active=True, emailaddress__verified=False)
|
|
||||||
.distinct()
|
|
||||||
.order_by("date_joined")
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def users_by_review_activity(*, min_reviews: int = 1) -> QuerySet:
|
|
||||||
"""
|
|
||||||
Get users who have written at least a minimum number of reviews.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
min_reviews: Minimum number of reviews required
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
QuerySet of users with sufficient review activity
|
|
||||||
"""
|
|
||||||
return (
|
|
||||||
User.objects.annotate(
|
|
||||||
park_review_count=Count(
|
|
||||||
"park_reviews", filter=Q(park_reviews__is_published=True)
|
|
||||||
),
|
|
||||||
ride_review_count=Count(
|
|
||||||
"ride_reviews", filter=Q(ride_reviews__is_published=True)
|
|
||||||
),
|
|
||||||
total_review_count=F("park_review_count") + F("ride_review_count"),
|
|
||||||
)
|
|
||||||
.filter(total_review_count__gte=min_reviews)
|
|
||||||
.order_by("-total_review_count")
|
|
||||||
)
|
|
||||||
@@ -1,189 +0,0 @@
|
|||||||
from django.db.models.signals import post_save, pre_save
|
|
||||||
from django.dispatch import receiver
|
|
||||||
from django.contrib.auth.models import Group
|
|
||||||
from django.db import transaction
|
|
||||||
from django.core.files import File
|
|
||||||
from django.core.files.temp import NamedTemporaryFile
|
|
||||||
import requests
|
|
||||||
from .models import User, UserProfile
|
|
||||||
|
|
||||||
|
|
||||||
@receiver(post_save, sender=User)
|
|
||||||
def create_user_profile(sender, instance, created, **kwargs):
|
|
||||||
"""Create UserProfile for new users"""
|
|
||||||
try:
|
|
||||||
if created:
|
|
||||||
# Create profile
|
|
||||||
profile = UserProfile.objects.create(user=instance)
|
|
||||||
|
|
||||||
# If user has a social account with avatar, download it
|
|
||||||
social_account = instance.socialaccount_set.first()
|
|
||||||
if social_account:
|
|
||||||
extra_data = social_account.extra_data
|
|
||||||
avatar_url = None
|
|
||||||
|
|
||||||
if social_account.provider == "google":
|
|
||||||
avatar_url = extra_data.get("picture")
|
|
||||||
elif social_account.provider == "discord":
|
|
||||||
avatar = extra_data.get("avatar")
|
|
||||||
discord_id = extra_data.get("id")
|
|
||||||
if avatar:
|
|
||||||
avatar_url = f"https://cdn.discordapp.com/avatars/{discord_id}/{avatar}.png"
|
|
||||||
|
|
||||||
if avatar_url:
|
|
||||||
try:
|
|
||||||
response = requests.get(avatar_url, timeout=60)
|
|
||||||
if response.status_code == 200:
|
|
||||||
img_temp = NamedTemporaryFile(delete=True)
|
|
||||||
img_temp.write(response.content)
|
|
||||||
img_temp.flush()
|
|
||||||
|
|
||||||
file_name = f"avatar_{instance.username}.png"
|
|
||||||
profile.avatar.save(file_name, File(img_temp), save=True)
|
|
||||||
except Exception as e:
|
|
||||||
print(
|
|
||||||
f"Error downloading avatar for user {
|
|
||||||
instance.username}: {
|
|
||||||
str(e)}"
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error creating profile for user {instance.username}: {str(e)}")
|
|
||||||
|
|
||||||
|
|
||||||
@receiver(post_save, sender=User)
|
|
||||||
def save_user_profile(sender, instance, **kwargs):
|
|
||||||
"""Ensure UserProfile exists and is saved"""
|
|
||||||
try:
|
|
||||||
# Try to get existing profile first
|
|
||||||
try:
|
|
||||||
profile = instance.profile
|
|
||||||
profile.save()
|
|
||||||
except UserProfile.DoesNotExist:
|
|
||||||
# Profile doesn't exist, create it
|
|
||||||
UserProfile.objects.create(user=instance)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error saving profile for user {instance.username}: {str(e)}")
|
|
||||||
|
|
||||||
|
|
||||||
@receiver(pre_save, sender=User)
|
|
||||||
def sync_user_role_with_groups(sender, instance, **kwargs):
|
|
||||||
"""Sync user role with Django groups"""
|
|
||||||
if instance.pk: # Only for existing users
|
|
||||||
try:
|
|
||||||
old_instance = User.objects.get(pk=instance.pk)
|
|
||||||
if old_instance.role != instance.role:
|
|
||||||
# Role has changed, update groups
|
|
||||||
with transaction.atomic():
|
|
||||||
# Remove from old role group if exists
|
|
||||||
if old_instance.role != User.Roles.USER:
|
|
||||||
old_group = Group.objects.filter(name=old_instance.role).first()
|
|
||||||
if old_group:
|
|
||||||
instance.groups.remove(old_group)
|
|
||||||
|
|
||||||
# Add to new role group
|
|
||||||
if instance.role != User.Roles.USER:
|
|
||||||
new_group, _ = Group.objects.get_or_create(name=instance.role)
|
|
||||||
instance.groups.add(new_group)
|
|
||||||
|
|
||||||
# Special handling for superuser role
|
|
||||||
if instance.role == User.Roles.SUPERUSER:
|
|
||||||
instance.is_superuser = True
|
|
||||||
instance.is_staff = True
|
|
||||||
elif old_instance.role == User.Roles.SUPERUSER:
|
|
||||||
# If removing superuser role, remove superuser
|
|
||||||
# status
|
|
||||||
instance.is_superuser = False
|
|
||||||
if instance.role not in [
|
|
||||||
User.Roles.ADMIN,
|
|
||||||
User.Roles.MODERATOR,
|
|
||||||
]:
|
|
||||||
instance.is_staff = False
|
|
||||||
|
|
||||||
# Handle staff status for admin and moderator roles
|
|
||||||
if instance.role in [
|
|
||||||
User.Roles.ADMIN,
|
|
||||||
User.Roles.MODERATOR,
|
|
||||||
]:
|
|
||||||
instance.is_staff = True
|
|
||||||
elif old_instance.role in [
|
|
||||||
User.Roles.ADMIN,
|
|
||||||
User.Roles.MODERATOR,
|
|
||||||
]:
|
|
||||||
# If removing admin/moderator role, remove staff
|
|
||||||
# status
|
|
||||||
if instance.role not in [User.Roles.SUPERUSER]:
|
|
||||||
instance.is_staff = False
|
|
||||||
except User.DoesNotExist:
|
|
||||||
pass
|
|
||||||
except Exception as e:
|
|
||||||
print(
|
|
||||||
f"Error syncing role with groups for user {
|
|
||||||
instance.username}: {
|
|
||||||
str(e)}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def create_default_groups():
|
|
||||||
"""
|
|
||||||
Create default groups with appropriate permissions.
|
|
||||||
Call this in a migration or management command.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
from django.contrib.auth.models import Permission
|
|
||||||
|
|
||||||
# Create Moderator group
|
|
||||||
moderator_group, _ = Group.objects.get_or_create(name=User.Roles.MODERATOR)
|
|
||||||
moderator_permissions = [
|
|
||||||
# Review moderation permissions
|
|
||||||
"change_review",
|
|
||||||
"delete_review",
|
|
||||||
"change_reviewreport",
|
|
||||||
"delete_reviewreport",
|
|
||||||
# Edit moderation permissions
|
|
||||||
"change_parkedit",
|
|
||||||
"delete_parkedit",
|
|
||||||
"change_rideedit",
|
|
||||||
"delete_rideedit",
|
|
||||||
"change_companyedit",
|
|
||||||
"delete_companyedit",
|
|
||||||
"change_manufactureredit",
|
|
||||||
"delete_manufactureredit",
|
|
||||||
]
|
|
||||||
|
|
||||||
# Create Admin group
|
|
||||||
admin_group, _ = Group.objects.get_or_create(name=User.Roles.ADMIN)
|
|
||||||
admin_permissions = moderator_permissions + [
|
|
||||||
# User management permissions
|
|
||||||
"change_user",
|
|
||||||
"delete_user",
|
|
||||||
# Content management permissions
|
|
||||||
"add_park",
|
|
||||||
"change_park",
|
|
||||||
"delete_park",
|
|
||||||
"add_ride",
|
|
||||||
"change_ride",
|
|
||||||
"delete_ride",
|
|
||||||
"add_company",
|
|
||||||
"change_company",
|
|
||||||
"delete_company",
|
|
||||||
"add_manufacturer",
|
|
||||||
"change_manufacturer",
|
|
||||||
"delete_manufacturer",
|
|
||||||
]
|
|
||||||
|
|
||||||
# Assign permissions to groups
|
|
||||||
for codename in moderator_permissions:
|
|
||||||
try:
|
|
||||||
perm = Permission.objects.get(codename=codename)
|
|
||||||
moderator_group.permissions.add(perm)
|
|
||||||
except Permission.DoesNotExist:
|
|
||||||
print(f"Permission not found: {codename}")
|
|
||||||
|
|
||||||
for codename in admin_permissions:
|
|
||||||
try:
|
|
||||||
perm = Permission.objects.get(codename=codename)
|
|
||||||
admin_group.permissions.add(perm)
|
|
||||||
except Permission.DoesNotExist:
|
|
||||||
print(f"Permission not found: {codename}")
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error creating default groups: {str(e)}")
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
from django import template
|
|
||||||
from django.conf import settings
|
|
||||||
from django.template.loader import render_to_string
|
|
||||||
|
|
||||||
register = template.Library()
|
|
||||||
|
|
||||||
|
|
||||||
@register.simple_tag
|
|
||||||
def turnstile_widget():
|
|
||||||
"""
|
|
||||||
Template tag to render the Cloudflare Turnstile widget.
|
|
||||||
When DEBUG is True, renders an empty template.
|
|
||||||
When DEBUG is False, renders the normal widget.
|
|
||||||
Usage: {% load turnstile_tags %}{% turnstile_widget %}
|
|
||||||
"""
|
|
||||||
if settings.DEBUG:
|
|
||||||
template_name = "accounts/turnstile_widget_empty.html"
|
|
||||||
context = {}
|
|
||||||
else:
|
|
||||||
template_name = "accounts/turnstile_widget.html"
|
|
||||||
context = {"site_key": settings.TURNSTILE_SITE_KEY}
|
|
||||||
|
|
||||||
return render_to_string(template_name, context)
|
|
||||||
@@ -1,126 +0,0 @@
|
|||||||
from django.test import TestCase
|
|
||||||
from django.contrib.auth.models import Group, Permission
|
|
||||||
from django.contrib.contenttypes.models import ContentType
|
|
||||||
from unittest.mock import patch, MagicMock
|
|
||||||
from .models import User, UserProfile
|
|
||||||
from .signals import create_default_groups
|
|
||||||
|
|
||||||
|
|
||||||
class SignalsTestCase(TestCase):
|
|
||||||
def setUp(self):
|
|
||||||
self.user = User.objects.create_user(
|
|
||||||
username="testuser",
|
|
||||||
email="testuser@example.com",
|
|
||||||
password="password",
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_create_user_profile(self):
|
|
||||||
# Refresh user from database to ensure signals have been processed
|
|
||||||
self.user.refresh_from_db()
|
|
||||||
|
|
||||||
# Check if profile exists in database first
|
|
||||||
profile_exists = UserProfile.objects.filter(user=self.user).exists()
|
|
||||||
self.assertTrue(profile_exists, "UserProfile should be created by signals")
|
|
||||||
|
|
||||||
# Now safely access the profile
|
|
||||||
profile = UserProfile.objects.get(user=self.user)
|
|
||||||
self.assertIsInstance(profile, UserProfile)
|
|
||||||
|
|
||||||
# Test the reverse relationship
|
|
||||||
self.assertTrue(hasattr(self.user, "profile"))
|
|
||||||
# Test that we can access the profile through the user relationship
|
|
||||||
user_profile = getattr(self.user, "profile", None)
|
|
||||||
self.assertEqual(user_profile, profile)
|
|
||||||
|
|
||||||
@patch("accounts.signals.requests.get")
|
|
||||||
def test_create_user_profile_with_social_avatar(self, mock_get):
|
|
||||||
# Mock the response from requests.get
|
|
||||||
mock_response = MagicMock()
|
|
||||||
mock_response.status_code = 200
|
|
||||||
mock_response.content = b"fake-image-content"
|
|
||||||
mock_get.return_value = mock_response
|
|
||||||
|
|
||||||
# Create a social account for the user (we'll skip this test since socialaccount_set requires allauth setup)
|
|
||||||
# This test would need proper allauth configuration to work
|
|
||||||
self.skipTest("Requires proper allauth socialaccount setup")
|
|
||||||
|
|
||||||
def test_save_user_profile(self):
|
|
||||||
# Get the profile safely first
|
|
||||||
profile = UserProfile.objects.get(user=self.user)
|
|
||||||
profile.delete()
|
|
||||||
|
|
||||||
# Refresh user to clear cached profile relationship
|
|
||||||
self.user.refresh_from_db()
|
|
||||||
|
|
||||||
# Check that profile no longer exists
|
|
||||||
self.assertFalse(UserProfile.objects.filter(user=self.user).exists())
|
|
||||||
|
|
||||||
# Trigger save to recreate profile via signal
|
|
||||||
self.user.save()
|
|
||||||
|
|
||||||
# Verify profile was recreated
|
|
||||||
self.assertTrue(UserProfile.objects.filter(user=self.user).exists())
|
|
||||||
new_profile = UserProfile.objects.get(user=self.user)
|
|
||||||
self.assertIsInstance(new_profile, UserProfile)
|
|
||||||
|
|
||||||
def test_sync_user_role_with_groups(self):
|
|
||||||
self.user.role = User.Roles.MODERATOR
|
|
||||||
self.user.save()
|
|
||||||
self.assertTrue(self.user.groups.filter(name=User.Roles.MODERATOR).exists())
|
|
||||||
self.assertTrue(self.user.is_staff)
|
|
||||||
|
|
||||||
self.user.role = User.Roles.ADMIN
|
|
||||||
self.user.save()
|
|
||||||
self.assertFalse(self.user.groups.filter(name=User.Roles.MODERATOR).exists())
|
|
||||||
self.assertTrue(self.user.groups.filter(name=User.Roles.ADMIN).exists())
|
|
||||||
self.assertTrue(self.user.is_staff)
|
|
||||||
|
|
||||||
self.user.role = User.Roles.SUPERUSER
|
|
||||||
self.user.save()
|
|
||||||
self.assertFalse(self.user.groups.filter(name=User.Roles.ADMIN).exists())
|
|
||||||
self.assertTrue(self.user.groups.filter(name=User.Roles.SUPERUSER).exists())
|
|
||||||
self.assertTrue(self.user.is_superuser)
|
|
||||||
self.assertTrue(self.user.is_staff)
|
|
||||||
|
|
||||||
self.user.role = User.Roles.USER
|
|
||||||
self.user.save()
|
|
||||||
self.assertFalse(self.user.groups.exists())
|
|
||||||
self.assertFalse(self.user.is_superuser)
|
|
||||||
self.assertFalse(self.user.is_staff)
|
|
||||||
|
|
||||||
def test_create_default_groups(self):
|
|
||||||
# Create some permissions for testing
|
|
||||||
content_type = ContentType.objects.get_for_model(User)
|
|
||||||
Permission.objects.create(
|
|
||||||
codename="change_review",
|
|
||||||
name="Can change review",
|
|
||||||
content_type=content_type,
|
|
||||||
)
|
|
||||||
Permission.objects.create(
|
|
||||||
codename="delete_review",
|
|
||||||
name="Can delete review",
|
|
||||||
content_type=content_type,
|
|
||||||
)
|
|
||||||
Permission.objects.create(
|
|
||||||
codename="change_user",
|
|
||||||
name="Can change user",
|
|
||||||
content_type=content_type,
|
|
||||||
)
|
|
||||||
|
|
||||||
create_default_groups()
|
|
||||||
|
|
||||||
moderator_group = Group.objects.get(name=User.Roles.MODERATOR)
|
|
||||||
self.assertIsNotNone(moderator_group)
|
|
||||||
self.assertTrue(
|
|
||||||
moderator_group.permissions.filter(codename="change_review").exists()
|
|
||||||
)
|
|
||||||
self.assertFalse(
|
|
||||||
moderator_group.permissions.filter(codename="change_user").exists()
|
|
||||||
)
|
|
||||||
|
|
||||||
admin_group = Group.objects.get(name=User.Roles.ADMIN)
|
|
||||||
self.assertIsNotNone(admin_group)
|
|
||||||
self.assertTrue(
|
|
||||||
admin_group.permissions.filter(codename="change_review").exists()
|
|
||||||
)
|
|
||||||
self.assertTrue(admin_group.permissions.filter(codename="change_user").exists())
|
|
||||||
@@ -1,48 +0,0 @@
|
|||||||
from django.urls import path
|
|
||||||
from django.contrib.auth import views as auth_views
|
|
||||||
from allauth.account.views import LogoutView
|
|
||||||
from . import views
|
|
||||||
|
|
||||||
app_name = "accounts"
|
|
||||||
|
|
||||||
urlpatterns = [
|
|
||||||
# Override allauth's login and signup views with our Turnstile-enabled
|
|
||||||
# versions
|
|
||||||
path("login/", views.CustomLoginView.as_view(), name="account_login"),
|
|
||||||
path("signup/", views.CustomSignupView.as_view(), name="account_signup"),
|
|
||||||
# Authentication views
|
|
||||||
path("logout/", LogoutView.as_view(), name="logout"),
|
|
||||||
path(
|
|
||||||
"password_change/",
|
|
||||||
auth_views.PasswordChangeView.as_view(),
|
|
||||||
name="password_change",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"password_change/done/",
|
|
||||||
auth_views.PasswordChangeDoneView.as_view(),
|
|
||||||
name="password_change_done",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"password_reset/",
|
|
||||||
auth_views.PasswordResetView.as_view(),
|
|
||||||
name="password_reset",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"password_reset/done/",
|
|
||||||
auth_views.PasswordResetDoneView.as_view(),
|
|
||||||
name="password_reset_done",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"reset/<uidb64>/<token>/",
|
|
||||||
auth_views.PasswordResetConfirmView.as_view(),
|
|
||||||
name="password_reset_confirm",
|
|
||||||
),
|
|
||||||
path(
|
|
||||||
"reset/done/",
|
|
||||||
auth_views.PasswordResetCompleteView.as_view(),
|
|
||||||
name="password_reset_complete",
|
|
||||||
),
|
|
||||||
# Profile views
|
|
||||||
path("profile/", views.user_redirect_view, name="profile_redirect"),
|
|
||||||
path("settings/", views.SettingsView.as_view(), name="settings"),
|
|
||||||
]
|
|
||||||
@@ -1,426 +0,0 @@
|
|||||||
from django.views.generic import DetailView, TemplateView
|
|
||||||
from django.contrib.auth import get_user_model
|
|
||||||
from django.shortcuts import get_object_or_404, redirect, render
|
|
||||||
from django.contrib.auth.decorators import login_required
|
|
||||||
from django.contrib.auth.mixins import LoginRequiredMixin
|
|
||||||
from django.contrib import messages
|
|
||||||
from django.core.exceptions import ValidationError
|
|
||||||
from django.template.loader import render_to_string
|
|
||||||
from django.utils.crypto import get_random_string
|
|
||||||
from django.utils import timezone
|
|
||||||
from datetime import timedelta
|
|
||||||
from django.contrib.sites.shortcuts import get_current_site
|
|
||||||
from django.contrib.sites.models import Site
|
|
||||||
from django.contrib.sites.requests import RequestSite
|
|
||||||
from django.db.models import QuerySet
|
|
||||||
from django.http import HttpResponseRedirect, HttpResponse, HttpRequest
|
|
||||||
from django.urls import reverse
|
|
||||||
from django.contrib.auth import login
|
|
||||||
from django.core.files.uploadedfile import UploadedFile
|
|
||||||
from accounts.models import (
|
|
||||||
User,
|
|
||||||
PasswordReset,
|
|
||||||
TopList,
|
|
||||||
EmailVerification,
|
|
||||||
UserProfile,
|
|
||||||
)
|
|
||||||
from email_service.services import EmailService
|
|
||||||
from parks.models import ParkReview
|
|
||||||
from rides.models import RideReview
|
|
||||||
from allauth.account.views import LoginView, SignupView
|
|
||||||
from .mixins import TurnstileMixin
|
|
||||||
from typing import Dict, Any, Optional, Union, cast, TYPE_CHECKING
|
|
||||||
from django_htmx.http import HttpResponseClientRefresh
|
|
||||||
from contextlib import suppress
|
|
||||||
import re
|
|
||||||
|
|
||||||
UserModel = get_user_model()
|
|
||||||
|
|
||||||
|
|
||||||
class CustomLoginView(TurnstileMixin, LoginView):
|
|
||||||
def form_valid(self, form):
|
|
||||||
try:
|
|
||||||
self.validate_turnstile(self.request)
|
|
||||||
except ValidationError as e:
|
|
||||||
form.add_error(None, str(e))
|
|
||||||
return self.form_invalid(form)
|
|
||||||
|
|
||||||
response = super().form_valid(form)
|
|
||||||
return (
|
|
||||||
HttpResponseClientRefresh()
|
|
||||||
if getattr(self.request, "htmx", False)
|
|
||||||
else response
|
|
||||||
)
|
|
||||||
|
|
||||||
def form_invalid(self, form):
|
|
||||||
if getattr(self.request, "htmx", False):
|
|
||||||
return render(
|
|
||||||
self.request,
|
|
||||||
"account/partials/login_form.html",
|
|
||||||
self.get_context_data(form=form),
|
|
||||||
)
|
|
||||||
return super().form_invalid(form)
|
|
||||||
|
|
||||||
def get(self, request: HttpRequest, *args: Any, **kwargs: Any) -> HttpResponse:
|
|
||||||
if getattr(request, "htmx", False):
|
|
||||||
return render(
|
|
||||||
request,
|
|
||||||
"account/partials/login_modal.html",
|
|
||||||
self.get_context_data(),
|
|
||||||
)
|
|
||||||
return super().get(request, *args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class CustomSignupView(TurnstileMixin, SignupView):
|
|
||||||
def form_valid(self, form):
|
|
||||||
try:
|
|
||||||
self.validate_turnstile(self.request)
|
|
||||||
except ValidationError as e:
|
|
||||||
form.add_error(None, str(e))
|
|
||||||
return self.form_invalid(form)
|
|
||||||
|
|
||||||
response = super().form_valid(form)
|
|
||||||
return (
|
|
||||||
HttpResponseClientRefresh()
|
|
||||||
if getattr(self.request, "htmx", False)
|
|
||||||
else response
|
|
||||||
)
|
|
||||||
|
|
||||||
def form_invalid(self, form):
|
|
||||||
if getattr(self.request, "htmx", False):
|
|
||||||
return render(
|
|
||||||
self.request,
|
|
||||||
"account/partials/signup_modal.html",
|
|
||||||
self.get_context_data(form=form),
|
|
||||||
)
|
|
||||||
return super().form_invalid(form)
|
|
||||||
|
|
||||||
def get(self, request: HttpRequest, *args: Any, **kwargs: Any) -> HttpResponse:
|
|
||||||
if getattr(request, "htmx", False):
|
|
||||||
return render(
|
|
||||||
request,
|
|
||||||
"account/partials/signup_modal.html",
|
|
||||||
self.get_context_data(),
|
|
||||||
)
|
|
||||||
return super().get(request, *args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
@login_required
|
|
||||||
def user_redirect_view(request: HttpRequest) -> HttpResponse:
|
|
||||||
user = cast(User, request.user)
|
|
||||||
return redirect("profile", username=user.username)
|
|
||||||
|
|
||||||
|
|
||||||
def handle_social_login(request: HttpRequest, email: str) -> HttpResponse:
|
|
||||||
if sociallogin := request.session.get("socialaccount_sociallogin"):
|
|
||||||
sociallogin.user.email = email
|
|
||||||
sociallogin.save()
|
|
||||||
login(request, sociallogin.user)
|
|
||||||
del request.session["socialaccount_sociallogin"]
|
|
||||||
messages.success(request, "Successfully logged in")
|
|
||||||
return redirect("/")
|
|
||||||
|
|
||||||
|
|
||||||
def email_required(request: HttpRequest) -> HttpResponse:
|
|
||||||
if not request.session.get("socialaccount_sociallogin"):
|
|
||||||
messages.error(request, "No social login in progress")
|
|
||||||
return redirect("/")
|
|
||||||
|
|
||||||
if request.method == "POST":
|
|
||||||
if email := request.POST.get("email"):
|
|
||||||
return handle_social_login(request, email)
|
|
||||||
messages.error(request, "Email is required")
|
|
||||||
return render(
|
|
||||||
request,
|
|
||||||
"accounts/email_required.html",
|
|
||||||
{"error": "Email is required"},
|
|
||||||
)
|
|
||||||
|
|
||||||
return render(request, "accounts/email_required.html")
|
|
||||||
|
|
||||||
|
|
||||||
class ProfileView(DetailView):
|
|
||||||
model = User
|
|
||||||
template_name = "accounts/profile.html"
|
|
||||||
context_object_name = "profile_user"
|
|
||||||
slug_field = "username"
|
|
||||||
slug_url_kwarg = "username"
|
|
||||||
|
|
||||||
def get_queryset(self) -> QuerySet[User]:
|
|
||||||
return User.objects.select_related("profile")
|
|
||||||
|
|
||||||
def get_context_data(self, **kwargs: Any) -> Dict[str, Any]:
|
|
||||||
context = super().get_context_data(**kwargs)
|
|
||||||
user = cast(User, self.get_object())
|
|
||||||
|
|
||||||
context["park_reviews"] = self._get_user_park_reviews(user)
|
|
||||||
context["ride_reviews"] = self._get_user_ride_reviews(user)
|
|
||||||
context["top_lists"] = self._get_user_top_lists(user)
|
|
||||||
|
|
||||||
return context
|
|
||||||
|
|
||||||
def _get_user_park_reviews(self, user: User) -> QuerySet[ParkReview]:
|
|
||||||
return (
|
|
||||||
ParkReview.objects.filter(user=user, is_published=True)
|
|
||||||
.select_related("user", "user__profile", "park")
|
|
||||||
.order_by("-created_at")[:5]
|
|
||||||
)
|
|
||||||
|
|
||||||
def _get_user_ride_reviews(self, user: User) -> QuerySet[RideReview]:
|
|
||||||
return (
|
|
||||||
RideReview.objects.filter(user=user, is_published=True)
|
|
||||||
.select_related("user", "user__profile", "ride")
|
|
||||||
.order_by("-created_at")[:5]
|
|
||||||
)
|
|
||||||
|
|
||||||
def _get_user_top_lists(self, user: User) -> QuerySet[TopList]:
|
|
||||||
return (
|
|
||||||
TopList.objects.filter(user=user)
|
|
||||||
.select_related("user", "user__profile")
|
|
||||||
.prefetch_related("items")
|
|
||||||
.order_by("-created_at")[:5]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class SettingsView(LoginRequiredMixin, TemplateView):
|
|
||||||
template_name = "accounts/settings.html"
|
|
||||||
|
|
||||||
def get_context_data(self, **kwargs: Any) -> Dict[str, Any]:
|
|
||||||
context = super().get_context_data(**kwargs)
|
|
||||||
context["user"] = self.request.user
|
|
||||||
return context
|
|
||||||
|
|
||||||
def _handle_profile_update(self, request: HttpRequest) -> None:
|
|
||||||
user = cast(User, request.user)
|
|
||||||
profile = get_object_or_404(UserProfile, user=user)
|
|
||||||
|
|
||||||
if display_name := request.POST.get("display_name"):
|
|
||||||
profile.display_name = display_name
|
|
||||||
|
|
||||||
if "avatar" in request.FILES:
|
|
||||||
avatar_file = cast(UploadedFile, request.FILES["avatar"])
|
|
||||||
profile.avatar.save(avatar_file.name, avatar_file, save=False)
|
|
||||||
profile.save()
|
|
||||||
|
|
||||||
user.save()
|
|
||||||
messages.success(request, "Profile updated successfully")
|
|
||||||
|
|
||||||
def _validate_password(self, password: str) -> bool:
|
|
||||||
"""Validate password meets requirements."""
|
|
||||||
return (
|
|
||||||
len(password) >= 8
|
|
||||||
and bool(re.search(r"[A-Z]", password))
|
|
||||||
and bool(re.search(r"[a-z]", password))
|
|
||||||
and bool(re.search(r"[0-9]", password))
|
|
||||||
)
|
|
||||||
|
|
||||||
def _send_password_change_confirmation(
|
|
||||||
self, request: HttpRequest, user: User
|
|
||||||
) -> None:
|
|
||||||
"""Send password change confirmation email."""
|
|
||||||
site = get_current_site(request)
|
|
||||||
context = {
|
|
||||||
"user": user,
|
|
||||||
"site_name": site.name,
|
|
||||||
}
|
|
||||||
|
|
||||||
email_html = render_to_string(
|
|
||||||
"accounts/email/password_change_confirmation.html", context
|
|
||||||
)
|
|
||||||
|
|
||||||
EmailService.send_email(
|
|
||||||
to=user.email,
|
|
||||||
subject="Password Changed Successfully",
|
|
||||||
text="Your password has been changed successfully.",
|
|
||||||
site=site,
|
|
||||||
html=email_html,
|
|
||||||
)
|
|
||||||
|
|
||||||
def _handle_password_change(
|
|
||||||
self, request: HttpRequest
|
|
||||||
) -> Optional[HttpResponseRedirect]:
|
|
||||||
user = cast(User, request.user)
|
|
||||||
old_password = request.POST.get("old_password", "")
|
|
||||||
new_password = request.POST.get("new_password", "")
|
|
||||||
confirm_password = request.POST.get("confirm_password", "")
|
|
||||||
|
|
||||||
if not user.check_password(old_password):
|
|
||||||
messages.error(request, "Current password is incorrect")
|
|
||||||
return None
|
|
||||||
|
|
||||||
if new_password != confirm_password:
|
|
||||||
messages.error(request, "New passwords do not match")
|
|
||||||
return None
|
|
||||||
|
|
||||||
if not self._validate_password(new_password):
|
|
||||||
messages.error(
|
|
||||||
request,
|
|
||||||
"Password must be at least 8 characters and contain uppercase, lowercase, and numbers",
|
|
||||||
)
|
|
||||||
return None
|
|
||||||
|
|
||||||
user.set_password(new_password)
|
|
||||||
user.save()
|
|
||||||
|
|
||||||
self._send_password_change_confirmation(request, user)
|
|
||||||
messages.success(
|
|
||||||
request,
|
|
||||||
"Password changed successfully. Please check your email for confirmation.",
|
|
||||||
)
|
|
||||||
return HttpResponseRedirect(reverse("account_login"))
|
|
||||||
|
|
||||||
def _handle_email_change(self, request: HttpRequest) -> None:
|
|
||||||
if new_email := request.POST.get("new_email"):
|
|
||||||
self._send_email_verification(request, new_email)
|
|
||||||
messages.success(
|
|
||||||
request, "Verification email sent to your new email address"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
messages.error(request, "New email is required")
|
|
||||||
|
|
||||||
def _send_email_verification(self, request: HttpRequest, new_email: str) -> None:
|
|
||||||
user = cast(User, request.user)
|
|
||||||
token = get_random_string(64)
|
|
||||||
EmailVerification.objects.update_or_create(user=user, defaults={"token": token})
|
|
||||||
|
|
||||||
site = cast(Site, get_current_site(request))
|
|
||||||
verification_url = reverse("verify_email", kwargs={"token": token})
|
|
||||||
|
|
||||||
context = {
|
|
||||||
"user": user,
|
|
||||||
"verification_url": verification_url,
|
|
||||||
"site_name": site.name,
|
|
||||||
}
|
|
||||||
|
|
||||||
email_html = render_to_string("accounts/email/verify_email.html", context)
|
|
||||||
EmailService.send_email(
|
|
||||||
to=new_email,
|
|
||||||
subject="Verify your new email address",
|
|
||||||
text="Click the link to verify your new email address",
|
|
||||||
site=site,
|
|
||||||
html=email_html,
|
|
||||||
)
|
|
||||||
|
|
||||||
user.pending_email = new_email
|
|
||||||
user.save()
|
|
||||||
|
|
||||||
def post(self, request: HttpRequest, *args: Any, **kwargs: Any) -> HttpResponse:
|
|
||||||
action = request.POST.get("action")
|
|
||||||
|
|
||||||
if action == "update_profile":
|
|
||||||
self._handle_profile_update(request)
|
|
||||||
elif action == "change_password":
|
|
||||||
if response := self._handle_password_change(request):
|
|
||||||
return response
|
|
||||||
elif action == "change_email":
|
|
||||||
self._handle_email_change(request)
|
|
||||||
|
|
||||||
return self.get(request, *args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def create_password_reset_token(user: User) -> str:
|
|
||||||
token = get_random_string(64)
|
|
||||||
PasswordReset.objects.update_or_create(
|
|
||||||
user=user,
|
|
||||||
defaults={
|
|
||||||
"token": token,
|
|
||||||
"expires_at": timezone.now() + timedelta(hours=24),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
return token
|
|
||||||
|
|
||||||
|
|
||||||
def send_password_reset_email(
|
|
||||||
user: User, site: Union[Site, RequestSite], token: str
|
|
||||||
) -> None:
|
|
||||||
reset_url = reverse("password_reset_confirm", kwargs={"token": token})
|
|
||||||
context = {
|
|
||||||
"user": user,
|
|
||||||
"reset_url": reset_url,
|
|
||||||
"site_name": site.name,
|
|
||||||
}
|
|
||||||
email_html = render_to_string("accounts/email/password_reset.html", context)
|
|
||||||
|
|
||||||
EmailService.send_email(
|
|
||||||
to=user.email,
|
|
||||||
subject="Reset your password",
|
|
||||||
text="Click the link to reset your password",
|
|
||||||
site=site,
|
|
||||||
html=email_html,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def request_password_reset(request: HttpRequest) -> HttpResponse:
|
|
||||||
if request.method != "POST":
|
|
||||||
return render(request, "accounts/password_reset.html")
|
|
||||||
|
|
||||||
if not (email := request.POST.get("email")):
|
|
||||||
messages.error(request, "Email is required")
|
|
||||||
return redirect("account_reset_password")
|
|
||||||
|
|
||||||
with suppress(User.DoesNotExist):
|
|
||||||
user = User.objects.get(email=email)
|
|
||||||
token = create_password_reset_token(user)
|
|
||||||
site = get_current_site(request)
|
|
||||||
send_password_reset_email(user, site, token)
|
|
||||||
|
|
||||||
messages.success(request, "Password reset email sent")
|
|
||||||
return redirect("account_login")
|
|
||||||
|
|
||||||
|
|
||||||
def handle_password_reset(
|
|
||||||
request: HttpRequest,
|
|
||||||
user: User,
|
|
||||||
new_password: str,
|
|
||||||
reset: PasswordReset,
|
|
||||||
site: Union[Site, RequestSite],
|
|
||||||
) -> None:
|
|
||||||
user.set_password(new_password)
|
|
||||||
user.save()
|
|
||||||
|
|
||||||
reset.used = True
|
|
||||||
reset.save()
|
|
||||||
|
|
||||||
send_password_reset_confirmation(user, site)
|
|
||||||
messages.success(request, "Password reset successfully")
|
|
||||||
|
|
||||||
|
|
||||||
def send_password_reset_confirmation(
|
|
||||||
user: User, site: Union[Site, RequestSite]
|
|
||||||
) -> None:
|
|
||||||
context = {
|
|
||||||
"user": user,
|
|
||||||
"site_name": site.name,
|
|
||||||
}
|
|
||||||
email_html = render_to_string(
|
|
||||||
"accounts/email/password_reset_complete.html", context
|
|
||||||
)
|
|
||||||
|
|
||||||
EmailService.send_email(
|
|
||||||
to=user.email,
|
|
||||||
subject="Password Reset Complete",
|
|
||||||
text="Your password has been reset successfully.",
|
|
||||||
site=site,
|
|
||||||
html=email_html,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def reset_password(request: HttpRequest, token: str) -> HttpResponse:
|
|
||||||
try:
|
|
||||||
reset = PasswordReset.objects.select_related("user").get(
|
|
||||||
token=token, expires_at__gt=timezone.now(), used=False
|
|
||||||
)
|
|
||||||
|
|
||||||
if request.method == "POST":
|
|
||||||
if new_password := request.POST.get("new_password"):
|
|
||||||
site = get_current_site(request)
|
|
||||||
handle_password_reset(request, reset.user, new_password, reset, site)
|
|
||||||
return redirect("account_login")
|
|
||||||
|
|
||||||
messages.error(request, "New password is required")
|
|
||||||
|
|
||||||
return render(request, "accounts/password_reset_confirm.html", {"token": token})
|
|
||||||
|
|
||||||
except PasswordReset.DoesNotExist:
|
|
||||||
messages.error(request, "Invalid or expired reset token")
|
|
||||||
return redirect("account_reset_password")
|
|
||||||
|
Before Width: | Height: | Size: 1.7 KiB |
|
Before Width: | Height: | Size: 530 B |
|
Before Width: | Height: | Size: 1.5 KiB |
|
Before Width: | Height: | Size: 1.6 KiB |
|
Before Width: | Height: | Size: 1.0 KiB |
|
Before Width: | Height: | Size: 1.2 KiB |
|
Before Width: | Height: | Size: 1.9 KiB |
|
Before Width: | Height: | Size: 1.1 KiB |
|
Before Width: | Height: | Size: 2.0 KiB |
|
Before Width: | Height: | Size: 1.8 KiB |
|
Before Width: | Height: | Size: 1.4 KiB |
|
Before Width: | Height: | Size: 1.3 KiB |
|
Before Width: | Height: | Size: 1.6 KiB |
|
Before Width: | Height: | Size: 1.3 KiB |
|
Before Width: | Height: | Size: 366 B |
|
Before Width: | Height: | Size: 355 B |
|
Before Width: | Height: | Size: 1.6 KiB |
|
Before Width: | Height: | Size: 362 B |
|
Before Width: | Height: | Size: 317 B |
|
Before Width: | Height: | Size: 486 B |
|
Before Width: | Height: | Size: 1.4 KiB |
|
Before Width: | Height: | Size: 333 B |
|
Before Width: | Height: | Size: 1.3 KiB |
|
Before Width: | Height: | Size: 1.0 KiB |
|
Before Width: | Height: | Size: 2.0 KiB |
|
Before Width: | Height: | Size: 1.0 KiB |
|
Before Width: | Height: | Size: 2.2 KiB |
|
Before Width: | Height: | Size: 1.3 KiB |
|
Before Width: | Height: | Size: 1.8 KiB |
|
Before Width: | Height: | Size: 342 B |
|
Before Width: | Height: | Size: 910 B |
|
Before Width: | Height: | Size: 1.6 KiB |
|
Before Width: | Height: | Size: 1.9 KiB |
|
Before Width: | Height: | Size: 1.9 KiB |
|
Before Width: | Height: | Size: 1.4 KiB |
|
Before Width: | Height: | Size: 1.2 KiB |
@@ -1 +0,0 @@
|
|||||||
[GITHUB-TOKEN-REMOVED]
|
|
||||||
@@ -1,203 +0,0 @@
|
|||||||
# ThrillWiki Automation Service Environment Configuration
|
|
||||||
# Copy this file to thrillwiki-automation***REMOVED*** and customize for your environment
|
|
||||||
#
|
|
||||||
# Security Note: This file should have restricted permissions (600) as it may contain
|
|
||||||
# sensitive information like GitHub Personal Access Tokens
|
|
||||||
|
|
||||||
# [AWS-SECRET-REMOVED]====================================
|
|
||||||
# PROJECT CONFIGURATION
|
|
||||||
# [AWS-SECRET-REMOVED]====================================
|
|
||||||
|
|
||||||
# Base project directory (usually auto-detected)
|
|
||||||
# PROJECT_DIR=/home/ubuntu/thrillwiki
|
|
||||||
|
|
||||||
# Service name for systemd integration
|
|
||||||
# SERVICE_NAME=thrillwiki
|
|
||||||
|
|
||||||
# [AWS-SECRET-REMOVED]====================================
|
|
||||||
# GITHUB REPOSITORY CONFIGURATION
|
|
||||||
# [AWS-SECRET-REMOVED]====================================
|
|
||||||
|
|
||||||
# GitHub repository remote name
|
|
||||||
# GITHUB_REPO=origin
|
|
||||||
|
|
||||||
# Branch to pull from
|
|
||||||
# GITHUB_BRANCH=main
|
|
||||||
|
|
||||||
# GitHub Personal Access Token (PAT) - Required for private repositories
|
|
||||||
# Generate at: https://github.com/settings/tokens
|
|
||||||
# Required permissions: repo (Full control of private repositories)
|
|
||||||
# GITHUB_TOKEN=ghp_your_personal_access_token_here
|
|
||||||
|
|
||||||
# GitHub token file location (alternative to GITHUB_TOKEN)
|
|
||||||
# GITHUB_TOKEN_FILE=/home/ubuntu/thrillwiki/.github-pat
|
|
||||||
|
|
||||||
# [AWS-SECRET-REMOVED]====================================
|
|
||||||
# AUTOMATION TIMING CONFIGURATION
|
|
||||||
# [AWS-SECRET-REMOVED]====================================
|
|
||||||
|
|
||||||
# Repository pull interval in seconds (default: 300 = 5 minutes)
|
|
||||||
# PULL_INTERVAL=300
|
|
||||||
|
|
||||||
# Health check interval in seconds (default: 60 = 1 minute)
|
|
||||||
# HEALTH_CHECK_INTERVAL=60
|
|
||||||
|
|
||||||
# Server startup timeout in seconds (default: 120 = 2 minutes)
|
|
||||||
# STARTUP_TIMEOUT=120
|
|
||||||
|
|
||||||
# Restart delay after failure in seconds (default: 10)
|
|
||||||
# RESTART_DELAY=10
|
|
||||||
|
|
||||||
# [AWS-SECRET-REMOVED]====================================
|
|
||||||
# LOGGING CONFIGURATION
|
|
||||||
# [AWS-SECRET-REMOVED]====================================
|
|
||||||
|
|
||||||
# Log directory (default: project_dir/logs)
|
|
||||||
# LOG_DIR=/home/ubuntu/thrillwiki/logs
|
|
||||||
|
|
||||||
# Log file path
|
|
||||||
# LOG_[AWS-SECRET-REMOVED]proof-automation.log
|
|
||||||
|
|
||||||
# Maximum log file size in bytes (default: 10485760 = 10MB)
|
|
||||||
# MAX_LOG_SIZE=10485760
|
|
||||||
|
|
||||||
# Lock file location to prevent multiple instances
|
|
||||||
# LOCK_FILE=/tmp/thrillwiki-bulletproof.lock
|
|
||||||
|
|
||||||
# [AWS-SECRET-REMOVED]====================================
|
|
||||||
# DEVELOPMENT SERVER CONFIGURATION
|
|
||||||
# [AWS-SECRET-REMOVED]====================================
|
|
||||||
|
|
||||||
# Server host address (default: 0.0.0.0 for all interfaces)
|
|
||||||
# SERVER_HOST=0.0.0.0
|
|
||||||
|
|
||||||
# Server port (default: 8000)
|
|
||||||
# SERVER_PORT=8000
|
|
||||||
|
|
||||||
# [AWS-SECRET-REMOVED]====================================
|
|
||||||
# DJANGO CONFIGURATION
|
|
||||||
# [AWS-SECRET-REMOVED]====================================
|
|
||||||
|
|
||||||
# Django settings module
|
|
||||||
# DJANGO_SETTINGS_MODULE=thrillwiki.settings
|
|
||||||
|
|
||||||
# Python path
|
|
||||||
# PYTHONPATH=/home/ubuntu/thrillwiki
|
|
||||||
|
|
||||||
# [AWS-SECRET-REMOVED]====================================
|
|
||||||
# ADVANCED CONFIGURATION
|
|
||||||
# [AWS-SECRET-REMOVED]====================================
|
|
||||||
|
|
||||||
# GitHub authentication script location
|
|
||||||
# GITHUB_AUTH_[AWS-SECRET-REMOVED]ithub-auth.py
|
|
||||||
|
|
||||||
# Enable verbose logging (true/false)
|
|
||||||
# VERBOSE_LOGGING=false
|
|
||||||
|
|
||||||
# Enable debug mode for troubleshooting (true/false)
|
|
||||||
# DEBUG_MODE=false
|
|
||||||
|
|
||||||
# Custom git remote URL (overrides GITHUB_REPO if set)
|
|
||||||
# CUSTOM_GIT_REMOTE=https://github.com/username/repository.git
|
|
||||||
|
|
||||||
# Email notifications for critical failures (requires email configuration)
|
|
||||||
# NOTIFICATION_EMAIL=admin@example.com
|
|
||||||
|
|
||||||
# Maximum consecutive failures before alerting (default: 5)
|
|
||||||
# MAX_CONSECUTIVE_FAILURES=5
|
|
||||||
|
|
||||||
# Enable automatic dependency updates (true/false, default: true)
|
|
||||||
# AUTO_UPDATE_DEPENDENCIES=true
|
|
||||||
|
|
||||||
# Enable automatic migrations on code changes (true/false, default: true)
|
|
||||||
# AUTO_MIGRATE=true
|
|
||||||
|
|
||||||
# Enable automatic static file collection (true/false, default: true)
|
|
||||||
# AUTO_COLLECTSTATIC=true
|
|
||||||
|
|
||||||
# [AWS-SECRET-REMOVED]====================================
|
|
||||||
# SECURITY CONFIGURATION
|
|
||||||
# [AWS-SECRET-REMOVED]====================================
|
|
||||||
|
|
||||||
# GitHub authentication method (token|ssh|https)
|
|
||||||
# Default: token (uses GITHUB_TOKEN or GITHUB_TOKEN_FILE)
|
|
||||||
# GITHUB_AUTH_METHOD=token
|
|
||||||
|
|
||||||
# SSH key path for git operations (when using ssh auth method)
|
|
||||||
# SSH_KEY_PATH=/home/ubuntu/.ssh/***REMOVED***
|
|
||||||
|
|
||||||
# Git user configuration for commits
|
|
||||||
# GIT_USER_NAME="ThrillWiki Automation"
|
|
||||||
# GIT_USER_EMAIL="automation@thrillwiki.local"
|
|
||||||
|
|
||||||
# [AWS-SECRET-REMOVED]====================================
|
|
||||||
# MONITORING AND HEALTH CHECKS
|
|
||||||
# [AWS-SECRET-REMOVED]====================================
|
|
||||||
|
|
||||||
# Health check URL to verify server is running
|
|
||||||
# HEALTH_CHECK_URL=http://localhost:8000/health/
|
|
||||||
|
|
||||||
# Health check timeout in seconds
|
|
||||||
# HEALTH_CHECK_TIMEOUT=30
|
|
||||||
|
|
||||||
# Enable system resource monitoring (true/false)
|
|
||||||
# MONITOR_RESOURCES=true
|
|
||||||
|
|
||||||
# Memory usage threshold for warnings (in MB)
|
|
||||||
# MEMORY_WARNING_THRESHOLD=1024
|
|
||||||
|
|
||||||
# CPU usage threshold for warnings (percentage)
|
|
||||||
# CPU_WARNING_THRESHOLD=80
|
|
||||||
|
|
||||||
# Disk usage threshold for warnings (percentage)
|
|
||||||
# DISK_WARNING_THRESHOLD=90
|
|
||||||
|
|
||||||
# [AWS-SECRET-REMOVED]====================================
|
|
||||||
# INTEGRATION SETTINGS
|
|
||||||
# [AWS-SECRET-REMOVED]====================================
|
|
||||||
|
|
||||||
# Webhook integration (if using thrillwiki-webhook service)
|
|
||||||
# WEBHOOK_INTEGRATION=true
|
|
||||||
|
|
||||||
# Slack webhook URL for notifications (optional)
|
|
||||||
# SLACK_WEBHOOK_URL=https://hooks.slack.com/services/your/webhook/url
|
|
||||||
|
|
||||||
# Discord webhook URL for notifications (optional)
|
|
||||||
# DISCORD_WEBHOOK_URL=https://discord.com/api/webhooks/your/webhook/url
|
|
||||||
|
|
||||||
# [AWS-SECRET-REMOVED]====================================
|
|
||||||
# USAGE EXAMPLES
|
|
||||||
# [AWS-SECRET-REMOVED]====================================
|
|
||||||
|
|
||||||
# Example 1: Basic setup with GitHub PAT
|
|
||||||
# GITHUB_TOKEN=ghp_your_token_here
|
|
||||||
# PULL_INTERVAL=300
|
|
||||||
# AUTO_MIGRATE=true
|
|
||||||
|
|
||||||
# Example 2: Enhanced monitoring setup
|
|
||||||
# HEALTH_CHECK_INTERVAL=30
|
|
||||||
# MONITOR_RESOURCES=true
|
|
||||||
# NOTIFICATION_EMAIL=admin@thrillwiki.com
|
|
||||||
# SLACK_WEBHOOK_URL=https://hooks.slack.com/services/your/webhook
|
|
||||||
|
|
||||||
# Example 3: Development environment with frequent pulls
|
|
||||||
# PULL_INTERVAL=60
|
|
||||||
# DEBUG_MODE=true
|
|
||||||
# VERBOSE_LOGGING=true
|
|
||||||
# AUTO_UPDATE_DEPENDENCIES=true
|
|
||||||
|
|
||||||
# [AWS-SECRET-REMOVED]====================================
|
|
||||||
# INSTALLATION NOTES
|
|
||||||
# [AWS-SECRET-REMOVED]====================================
|
|
||||||
|
|
||||||
# 1. Copy this file: cp thrillwiki-automation***REMOVED***.example thrillwiki-automation***REMOVED***
|
|
||||||
# 2. Set secure permissions: chmod 600 thrillwiki-automation***REMOVED***
|
|
||||||
# 3. Customize the settings above for your environment
|
|
||||||
# 4. Enable the service: sudo systemctl enable thrillwiki-automation
|
|
||||||
# 5. Start the service: sudo systemctl start thrillwiki-automation
|
|
||||||
# 6. Check status: sudo systemctl status thrillwiki-automation
|
|
||||||
# 7. View logs: sudo journalctl -u thrillwiki-automation -f
|
|
||||||
|
|
||||||
# For security, ensure only the ubuntu user can read this file:
|
|
||||||
# sudo chown ubuntu:ubuntu thrillwiki-automation***REMOVED***
|
|
||||||
# sudo chmod 600 thrillwiki-automation***REMOVED***
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
# Configuration package for thrillwiki project
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
# Django settings package
|
|
||||||
@@ -1,377 +0,0 @@
|
|||||||
"""
|
|
||||||
Base Django settings for thrillwiki project.
|
|
||||||
Common settings shared across all environments.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import environ # type: ignore[import]
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
# Initialize environment variables
|
|
||||||
env = environ.Env(
|
|
||||||
DEBUG=(bool, False),
|
|
||||||
SECRET_KEY=(str, ""),
|
|
||||||
ALLOWED_HOSTS=(list, []),
|
|
||||||
DATABASE_URL=(str, ""),
|
|
||||||
CACHE_URL=(str, "locmem://"),
|
|
||||||
EMAIL_URL=(str, ""),
|
|
||||||
REDIS_URL=(str, ""),
|
|
||||||
)
|
|
||||||
|
|
||||||
# Build paths inside the project like this: BASE_DIR / 'subdir'.
|
|
||||||
BASE_DIR = Path(__file__).resolve().parent.parent.parent
|
|
||||||
|
|
||||||
# Read environment file if it exists
|
|
||||||
environ.Env.read_env(BASE_DIR / ".env")
|
|
||||||
|
|
||||||
# SECURITY WARNING: keep the secret key used in production secret!
|
|
||||||
SECRET_KEY = env("SECRET_KEY")
|
|
||||||
|
|
||||||
# SECURITY WARNING: don't run with debug turned on in production!
|
|
||||||
DEBUG = env("DEBUG")
|
|
||||||
|
|
||||||
# Allowed hosts
|
|
||||||
ALLOWED_HOSTS = env("ALLOWED_HOSTS")
|
|
||||||
|
|
||||||
# CSRF trusted origins
|
|
||||||
CSRF_TRUSTED_ORIGINS = env("CSRF_TRUSTED_ORIGINS", default=[]) # type: ignore[arg-type]
|
|
||||||
|
|
||||||
# Application definition
|
|
||||||
DJANGO_APPS = [
|
|
||||||
"django.contrib.admin",
|
|
||||||
"django.contrib.auth",
|
|
||||||
"django.contrib.contenttypes",
|
|
||||||
"django.contrib.sessions",
|
|
||||||
"django.contrib.messages",
|
|
||||||
"django.contrib.staticfiles",
|
|
||||||
"django.contrib.sites",
|
|
||||||
"django.contrib.gis", # GeoDjango
|
|
||||||
]
|
|
||||||
|
|
||||||
THIRD_PARTY_APPS = [
|
|
||||||
"rest_framework", # Django REST Framework
|
|
||||||
"drf_spectacular", # OpenAPI 3.0 documentation
|
|
||||||
"corsheaders", # CORS headers for API
|
|
||||||
"pghistory", # django-pghistory
|
|
||||||
"pgtrigger", # Required by django-pghistory
|
|
||||||
"allauth",
|
|
||||||
"allauth.account",
|
|
||||||
"allauth.socialaccount",
|
|
||||||
"allauth.socialaccount.providers.google",
|
|
||||||
"allauth.socialaccount.providers.discord",
|
|
||||||
"django_cleanup",
|
|
||||||
"django_filters",
|
|
||||||
"django_htmx",
|
|
||||||
"whitenoise",
|
|
||||||
"django_tailwind_cli",
|
|
||||||
"autocomplete", # Django HTMX Autocomplete
|
|
||||||
"health_check", # Health checks
|
|
||||||
"health_check.db",
|
|
||||||
"health_check.cache",
|
|
||||||
"health_check.storage",
|
|
||||||
"health_check.contrib.migrations",
|
|
||||||
"health_check.contrib.redis",
|
|
||||||
]
|
|
||||||
|
|
||||||
LOCAL_APPS = [
|
|
||||||
"core",
|
|
||||||
"accounts",
|
|
||||||
"parks",
|
|
||||||
"rides",
|
|
||||||
"email_service",
|
|
||||||
"media.apps.MediaConfig",
|
|
||||||
"moderation",
|
|
||||||
"location",
|
|
||||||
]
|
|
||||||
|
|
||||||
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
|
|
||||||
|
|
||||||
MIDDLEWARE = [
|
|
||||||
"django.middleware.cache.UpdateCacheMiddleware",
|
|
||||||
"corsheaders.middleware.CorsMiddleware", # CORS middleware for API
|
|
||||||
"django.middleware.security.SecurityMiddleware",
|
|
||||||
"whitenoise.middleware.WhiteNoiseMiddleware",
|
|
||||||
"django.contrib.sessions.middleware.SessionMiddleware",
|
|
||||||
"django.middleware.common.CommonMiddleware",
|
|
||||||
"django.middleware.csrf.CsrfViewMiddleware",
|
|
||||||
"django.contrib.auth.middleware.AuthenticationMiddleware",
|
|
||||||
"django.contrib.messages.middleware.MessageMiddleware",
|
|
||||||
"django.middleware.clickjacking.XFrameOptionsMiddleware",
|
|
||||||
"core.middleware.PgHistoryContextMiddleware", # Add history context tracking
|
|
||||||
"allauth.account.middleware.AccountMiddleware",
|
|
||||||
"django.middleware.cache.FetchFromCacheMiddleware",
|
|
||||||
"django_htmx.middleware.HtmxMiddleware",
|
|
||||||
"core.middleware.PageViewMiddleware", # Add our page view tracking
|
|
||||||
]
|
|
||||||
|
|
||||||
ROOT_URLCONF = "thrillwiki.urls"
|
|
||||||
|
|
||||||
TEMPLATES = [
|
|
||||||
{
|
|
||||||
"BACKEND": "django.template.backends.django.DjangoTemplates",
|
|
||||||
"DIRS": [BASE_DIR / "templates"],
|
|
||||||
"APP_DIRS": True,
|
|
||||||
"OPTIONS": {
|
|
||||||
"context_processors": [
|
|
||||||
"django.template.context_processors.debug",
|
|
||||||
"django.template.context_processors.request",
|
|
||||||
"django.contrib.auth.context_processors.auth",
|
|
||||||
"django.contrib.messages.context_processors.messages",
|
|
||||||
"moderation.context_processors.moderation_access",
|
|
||||||
]
|
|
||||||
},
|
|
||||||
}
|
|
||||||
]
|
|
||||||
|
|
||||||
WSGI_APPLICATION = "thrillwiki.wsgi.application"
|
|
||||||
|
|
||||||
# Password validation
|
|
||||||
AUTH_PASSWORD_VALIDATORS = [
|
|
||||||
{
|
|
||||||
"NAME": (
|
|
||||||
"django.contrib.auth.password_validation.UserAttributeSimilarityValidator"
|
|
||||||
),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
|
|
||||||
},
|
|
||||||
]
|
|
||||||
|
|
||||||
# Internationalization
|
|
||||||
LANGUAGE_CODE = "en-us"
|
|
||||||
TIME_ZONE = "America/New_York"
|
|
||||||
USE_I18N = True
|
|
||||||
USE_TZ = True
|
|
||||||
|
|
||||||
# Static files (CSS, JavaScript, Images)
|
|
||||||
STATIC_URL = "static/"
|
|
||||||
STATICFILES_DIRS = [BASE_DIR / "static"]
|
|
||||||
STATIC_ROOT = BASE_DIR / "staticfiles"
|
|
||||||
|
|
||||||
# Media files
|
|
||||||
MEDIA_URL = "/media/"
|
|
||||||
MEDIA_ROOT = BASE_DIR / "media"
|
|
||||||
|
|
||||||
# Default primary key field type
|
|
||||||
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
|
|
||||||
|
|
||||||
# Authentication settings
|
|
||||||
AUTHENTICATION_BACKENDS = [
|
|
||||||
"django.contrib.auth.backends.ModelBackend",
|
|
||||||
"allauth.account.auth_backends.AuthenticationBackend",
|
|
||||||
]
|
|
||||||
|
|
||||||
# django-allauth settings
|
|
||||||
SITE_ID = 1
|
|
||||||
ACCOUNT_SIGNUP_FIELDS = ["email*", "username*", "password1*", "password2*"]
|
|
||||||
ACCOUNT_LOGIN_METHODS = {"email", "username"}
|
|
||||||
ACCOUNT_EMAIL_VERIFICATION = "optional"
|
|
||||||
LOGIN_REDIRECT_URL = "/"
|
|
||||||
ACCOUNT_LOGOUT_REDIRECT_URL = "/"
|
|
||||||
|
|
||||||
# Custom adapters
|
|
||||||
ACCOUNT_ADAPTER = "accounts.adapters.CustomAccountAdapter"
|
|
||||||
SOCIALACCOUNT_ADAPTER = "accounts.adapters.CustomSocialAccountAdapter"
|
|
||||||
|
|
||||||
# Social account settings
|
|
||||||
SOCIALACCOUNT_PROVIDERS = {
|
|
||||||
"google": {
|
|
||||||
"SCOPE": [
|
|
||||||
"profile",
|
|
||||||
"email",
|
|
||||||
],
|
|
||||||
"AUTH_PARAMS": {"access_type": "online"},
|
|
||||||
},
|
|
||||||
"discord": {
|
|
||||||
"SCOPE": ["identify", "email"],
|
|
||||||
"OAUTH_PKCE_ENABLED": True,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
# Additional social account settings
|
|
||||||
SOCIALACCOUNT_LOGIN_ON_GET = True
|
|
||||||
SOCIALACCOUNT_AUTO_SIGNUP = False
|
|
||||||
SOCIALACCOUNT_STORE_TOKENS = True
|
|
||||||
|
|
||||||
# Custom User Model
|
|
||||||
AUTH_USER_MODEL = "accounts.User"
|
|
||||||
|
|
||||||
# Autocomplete configuration
|
|
||||||
AUTOCOMPLETE_BLOCK_UNAUTHENTICATED = False
|
|
||||||
|
|
||||||
# Tailwind configuration
|
|
||||||
TAILWIND_CLI_CONFIG_FILE = BASE_DIR / "tailwind.config.js"
|
|
||||||
TAILWIND_CLI_SRC_CSS = BASE_DIR / "static/css/src/input.css"
|
|
||||||
TAILWIND_CLI_DIST_CSS = BASE_DIR / "static/css/tailwind.css"
|
|
||||||
|
|
||||||
# Test runner
|
|
||||||
TEST_RUNNER = "django.test.runner.DiscoverRunner"
|
|
||||||
|
|
||||||
# Road Trip Service Settings
|
|
||||||
ROADTRIP_CACHE_TIMEOUT = 3600 * 24 # 24 hours for geocoding
|
|
||||||
ROADTRIP_ROUTE_CACHE_TIMEOUT = 3600 * 6 # 6 hours for routes
|
|
||||||
ROADTRIP_MAX_REQUESTS_PER_SECOND = 1 # Respect OSM rate limits
|
|
||||||
ROADTRIP_USER_AGENT = "ThrillWiki Road Trip Planner (https://thrillwiki.com)"
|
|
||||||
ROADTRIP_REQUEST_TIMEOUT = 10 # seconds
|
|
||||||
ROADTRIP_MAX_RETRIES = 3
|
|
||||||
ROADTRIP_BACKOFF_FACTOR = 2
|
|
||||||
|
|
||||||
# Django REST Framework Settings
|
|
||||||
REST_FRAMEWORK = {
|
|
||||||
"DEFAULT_AUTHENTICATION_CLASSES": [
|
|
||||||
"rest_framework.authentication.SessionAuthentication",
|
|
||||||
"rest_framework.authentication.TokenAuthentication",
|
|
||||||
],
|
|
||||||
"DEFAULT_PERMISSION_CLASSES": [
|
|
||||||
"rest_framework.permissions.IsAuthenticated",
|
|
||||||
],
|
|
||||||
"DEFAULT_PAGINATION_CLASS": "rest_framework.pagination.PageNumberPagination",
|
|
||||||
"PAGE_SIZE": 20,
|
|
||||||
"DEFAULT_VERSIONING_CLASS": "rest_framework.versioning.AcceptHeaderVersioning",
|
|
||||||
"DEFAULT_VERSION": "v1",
|
|
||||||
"ALLOWED_VERSIONS": ["v1"],
|
|
||||||
"DEFAULT_RENDERER_CLASSES": [
|
|
||||||
"rest_framework.renderers.JSONRenderer",
|
|
||||||
"rest_framework.renderers.BrowsableAPIRenderer",
|
|
||||||
],
|
|
||||||
"DEFAULT_PARSER_CLASSES": [
|
|
||||||
"rest_framework.parsers.JSONParser",
|
|
||||||
"rest_framework.parsers.FormParser",
|
|
||||||
"rest_framework.parsers.MultiPartParser",
|
|
||||||
],
|
|
||||||
"EXCEPTION_HANDLER": "core.api.exceptions.custom_exception_handler",
|
|
||||||
"DEFAULT_FILTER_BACKENDS": [
|
|
||||||
"django_filters.rest_framework.DjangoFilterBackend",
|
|
||||||
"rest_framework.filters.SearchFilter",
|
|
||||||
"rest_framework.filters.OrderingFilter",
|
|
||||||
],
|
|
||||||
"TEST_REQUEST_DEFAULT_FORMAT": "json",
|
|
||||||
"NON_FIELD_ERRORS_KEY": "non_field_errors",
|
|
||||||
"DEFAULT_SCHEMA_CLASS": "drf_spectacular.openapi.AutoSchema",
|
|
||||||
}
|
|
||||||
|
|
||||||
# CORS Settings for API
|
|
||||||
CORS_ALLOWED_ORIGINS = env("CORS_ALLOWED_ORIGINS", default=[]) # type: ignore[arg-type]
|
|
||||||
CORS_ALLOW_CREDENTIALS = True
|
|
||||||
CORS_ALLOW_ALL_ORIGINS = env(
|
|
||||||
"CORS_ALLOW_ALL_ORIGINS", default=False
|
|
||||||
) # type: ignore[arg-type]
|
|
||||||
|
|
||||||
# API-specific settings
|
|
||||||
API_RATE_LIMIT_PER_MINUTE = env.int(
|
|
||||||
"API_RATE_LIMIT_PER_MINUTE", default=60
|
|
||||||
) # type: ignore[arg-type]
|
|
||||||
API_RATE_LIMIT_PER_HOUR = env.int(
|
|
||||||
"API_RATE_LIMIT_PER_HOUR", default=1000
|
|
||||||
) # type: ignore[arg-type]
|
|
||||||
|
|
||||||
# drf-spectacular settings
|
|
||||||
SPECTACULAR_SETTINGS = {
|
|
||||||
"TITLE": "ThrillWiki API",
|
|
||||||
"DESCRIPTION": "Comprehensive theme park and ride information API",
|
|
||||||
"VERSION": "1.0.0",
|
|
||||||
"SERVE_INCLUDE_SCHEMA": False,
|
|
||||||
"COMPONENT_SPLIT_REQUEST": True,
|
|
||||||
"TAGS": [
|
|
||||||
{"name": "parks", "description": "Theme park operations"},
|
|
||||||
{"name": "rides", "description": "Ride information and management"},
|
|
||||||
{"name": "locations", "description": "Geographic location services"},
|
|
||||||
{"name": "accounts", "description": "User account management"},
|
|
||||||
{"name": "media", "description": "Media and image management"},
|
|
||||||
{"name": "moderation", "description": "Content moderation"},
|
|
||||||
],
|
|
||||||
"SCHEMA_PATH_PREFIX": "/api/",
|
|
||||||
"DEFAULT_GENERATOR_CLASS": "drf_spectacular.generators.SchemaGenerator",
|
|
||||||
"SERVE_PERMISSIONS": ["rest_framework.permissions.AllowAny"],
|
|
||||||
"SWAGGER_UI_SETTINGS": {
|
|
||||||
"deepLinking": True,
|
|
||||||
"persistAuthorization": True,
|
|
||||||
"displayOperationId": False,
|
|
||||||
"displayRequestDuration": True,
|
|
||||||
},
|
|
||||||
"REDOC_UI_SETTINGS": {
|
|
||||||
"hideDownloadButton": False,
|
|
||||||
"hideHostname": False,
|
|
||||||
"hideLoading": False,
|
|
||||||
"hideSchemaPattern": True,
|
|
||||||
"scrollYOffset": 0,
|
|
||||||
"theme": {"colors": {"primary": {"main": "#1976d2"}}},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
# Health Check Configuration
|
|
||||||
HEALTH_CHECK = {
|
|
||||||
"DISK_USAGE_MAX": 90, # Fail if disk usage is over 90%
|
|
||||||
"MEMORY_MIN": 100, # Fail if less than 100MB available memory
|
|
||||||
}
|
|
||||||
|
|
||||||
# Custom health check backends
|
|
||||||
HEALTH_CHECK_BACKENDS = [
|
|
||||||
"health_check.db",
|
|
||||||
"health_check.cache",
|
|
||||||
"health_check.storage",
|
|
||||||
"core.health_checks.custom_checks.CacheHealthCheck",
|
|
||||||
"core.health_checks.custom_checks.DatabasePerformanceCheck",
|
|
||||||
"core.health_checks.custom_checks.ApplicationHealthCheck",
|
|
||||||
"core.health_checks.custom_checks.ExternalServiceHealthCheck",
|
|
||||||
"core.health_checks.custom_checks.DiskSpaceHealthCheck",
|
|
||||||
]
|
|
||||||
|
|
||||||
# Enhanced Cache Configuration
|
|
||||||
DJANGO_REDIS_CACHE_BACKEND = "django_redis.cache.RedisCache"
|
|
||||||
DJANGO_REDIS_CLIENT_CLASS = "django_redis.client.DefaultClient"
|
|
||||||
|
|
||||||
CACHES = {
|
|
||||||
"default": {
|
|
||||||
"BACKEND": DJANGO_REDIS_CACHE_BACKEND,
|
|
||||||
# type: ignore[arg-type]
|
|
||||||
# pyright: ignore[reportArgumentType]
|
|
||||||
# pyright: ignore[reportArgumentType]
|
|
||||||
# type: ignore
|
|
||||||
"LOCATION": env("REDIS_URL", default="redis://127.0.0.1:6379/1"),
|
|
||||||
"OPTIONS": {
|
|
||||||
"CLIENT_CLASS": DJANGO_REDIS_CLIENT_CLASS,
|
|
||||||
"PARSER_CLASS": "redis.connection.HiredisParser",
|
|
||||||
"CONNECTION_POOL_CLASS": "redis.BlockingConnectionPool",
|
|
||||||
"CONNECTION_POOL_CLASS_KWARGS": {
|
|
||||||
"max_connections": 50,
|
|
||||||
"timeout": 20,
|
|
||||||
},
|
|
||||||
"COMPRESSOR": "django_redis.compressors.zlib.ZlibCompressor",
|
|
||||||
"IGNORE_EXCEPTIONS": True,
|
|
||||||
},
|
|
||||||
"KEY_PREFIX": "thrillwiki",
|
|
||||||
"VERSION": 1,
|
|
||||||
},
|
|
||||||
"sessions": {
|
|
||||||
"BACKEND": DJANGO_REDIS_CACHE_BACKEND,
|
|
||||||
# type: ignore[arg-type]
|
|
||||||
# type: ignore
|
|
||||||
"LOCATION": env("REDIS_URL", default="redis://127.0.0.1:6379/2"),
|
|
||||||
"OPTIONS": {
|
|
||||||
"CLIENT_CLASS": DJANGO_REDIS_CLIENT_CLASS,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"api": {
|
|
||||||
"BACKEND": DJANGO_REDIS_CACHE_BACKEND,
|
|
||||||
# type: ignore[arg-type]
|
|
||||||
"LOCATION": env("REDIS_URL", default="redis://127.0.0.1:6379/3"),
|
|
||||||
"OPTIONS": {
|
|
||||||
"CLIENT_CLASS": DJANGO_REDIS_CLIENT_CLASS,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
# Use Redis for sessions
|
|
||||||
SESSION_ENGINE = "django.contrib.sessions.backends.cache"
|
|
||||||
SESSION_CACHE_ALIAS = "sessions"
|
|
||||||
SESSION_COOKIE_AGE = 86400 # 24 hours
|
|
||||||
|
|
||||||
# Cache middleware settings
|
|
||||||
CACHE_MIDDLEWARE_SECONDS = 300 # 5 minutes
|
|
||||||
CACHE_MIDDLEWARE_KEY_PREFIX = "thrillwiki"
|
|
||||||
@@ -1,189 +0,0 @@
|
|||||||
"""
|
|
||||||
Local development settings for thrillwiki project.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
from .base import *
|
|
||||||
from ..settings import database
|
|
||||||
|
|
||||||
# Import the module and use its members, e.g., email.EMAIL_HOST
|
|
||||||
|
|
||||||
# Import the module and use its members, e.g., security.SECURE_HSTS_SECONDS
|
|
||||||
|
|
||||||
# Import database configuration
|
|
||||||
DATABASES = database.DATABASES
|
|
||||||
|
|
||||||
# Development-specific settings
|
|
||||||
DEBUG = True
|
|
||||||
|
|
||||||
# For local development, allow all hosts
|
|
||||||
ALLOWED_HOSTS = ["*"]
|
|
||||||
|
|
||||||
# CSRF trusted origins for local development
|
|
||||||
CSRF_TRUSTED_ORIGINS = [
|
|
||||||
"http://localhost:8000",
|
|
||||||
"http://127.0.0.1:8000",
|
|
||||||
"https://beta.thrillwiki.com",
|
|
||||||
]
|
|
||||||
|
|
||||||
GDAL_LIBRARY_PATH = "/opt/homebrew/lib/libgdal.dylib"
|
|
||||||
GEOS_LIBRARY_PATH = "/opt/homebrew/lib/libgeos_c.dylib"
|
|
||||||
|
|
||||||
# Local cache configuration
|
|
||||||
LOC_MEM_CACHE_BACKEND = "django.core.cache.backends.locmem.LocMemCache"
|
|
||||||
|
|
||||||
CACHES = {
|
|
||||||
"default": {
|
|
||||||
"BACKEND": LOC_MEM_CACHE_BACKEND,
|
|
||||||
"LOCATION": "unique-snowflake",
|
|
||||||
"TIMEOUT": 300, # 5 minutes
|
|
||||||
"OPTIONS": {"MAX_ENTRIES": 1000},
|
|
||||||
},
|
|
||||||
"sessions": {
|
|
||||||
"BACKEND": LOC_MEM_CACHE_BACKEND,
|
|
||||||
"LOCATION": "sessions-cache",
|
|
||||||
"TIMEOUT": 86400, # 24 hours (same as SESSION_COOKIE_AGE)
|
|
||||||
"OPTIONS": {"MAX_ENTRIES": 5000},
|
|
||||||
},
|
|
||||||
"api": {
|
|
||||||
"BACKEND": LOC_MEM_CACHE_BACKEND,
|
|
||||||
"LOCATION": "api-cache",
|
|
||||||
"TIMEOUT": 300, # 5 minutes
|
|
||||||
"OPTIONS": {"MAX_ENTRIES": 2000},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
# Development-friendly cache settings
|
|
||||||
CACHE_MIDDLEWARE_SECONDS = 1 # Very short cache for development
|
|
||||||
CACHE_MIDDLEWARE_KEY_PREFIX = "thrillwiki_dev"
|
|
||||||
|
|
||||||
# Development email backend
|
|
||||||
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
|
|
||||||
|
|
||||||
# Security settings for development
|
|
||||||
SECURE_SSL_REDIRECT = False
|
|
||||||
SESSION_COOKIE_SECURE = False
|
|
||||||
CSRF_COOKIE_SECURE = False
|
|
||||||
|
|
||||||
# Development monitoring tools
|
|
||||||
DEVELOPMENT_APPS = [
|
|
||||||
"silk",
|
|
||||||
"debug_toolbar",
|
|
||||||
"nplusone.ext.django",
|
|
||||||
"django_extensions",
|
|
||||||
]
|
|
||||||
|
|
||||||
# Add development apps if available
|
|
||||||
for app in DEVELOPMENT_APPS:
|
|
||||||
if app not in INSTALLED_APPS:
|
|
||||||
INSTALLED_APPS.append(app)
|
|
||||||
|
|
||||||
# Development middleware
|
|
||||||
DEVELOPMENT_MIDDLEWARE = [
|
|
||||||
"silk.middleware.SilkyMiddleware",
|
|
||||||
"debug_toolbar.middleware.DebugToolbarMiddleware",
|
|
||||||
"nplusone.ext.django.NPlusOneMiddleware",
|
|
||||||
"core.middleware.performance_middleware.PerformanceMiddleware",
|
|
||||||
"core.middleware.performance_middleware.QueryCountMiddleware",
|
|
||||||
]
|
|
||||||
|
|
||||||
# Add development middleware
|
|
||||||
for middleware in DEVELOPMENT_MIDDLEWARE:
|
|
||||||
if middleware not in MIDDLEWARE:
|
|
||||||
MIDDLEWARE.insert(1, middleware) # Insert after security middleware
|
|
||||||
|
|
||||||
# Debug toolbar configuration
|
|
||||||
INTERNAL_IPS = ["127.0.0.1", "::1"]
|
|
||||||
|
|
||||||
# Silk configuration for development
|
|
||||||
# Disable profiler to avoid silk_profile installation issues
|
|
||||||
SILKY_PYTHON_PROFILER = False
|
|
||||||
SILKY_PYTHON_PROFILER_BINARY = False # Disable binary profiler
|
|
||||||
SILKY_PYTHON_PROFILER_RESULT_PATH = (
|
|
||||||
BASE_DIR / "profiles"
|
|
||||||
) # Not needed when profiler is disabled
|
|
||||||
SILKY_AUTHENTICATION = True # Require login to access Silk
|
|
||||||
SILKY_AUTHORISATION = True # Enable authorization
|
|
||||||
SILKY_MAX_REQUEST_BODY_SIZE = -1 # Don't limit request body size
|
|
||||||
# Limit response body size to 1KB for performance
|
|
||||||
SILKY_MAX_RESPONSE_BODY_SIZE = 1024
|
|
||||||
SILKY_META = True # Record metadata about requests
|
|
||||||
|
|
||||||
# NPlusOne configuration
|
|
||||||
NPLUSONE_LOGGER = logging.getLogger("nplusone")
|
|
||||||
NPLUSONE_LOG_LEVEL = logging.WARN
|
|
||||||
|
|
||||||
# Enhanced development logging
|
|
||||||
LOGGING = {
|
|
||||||
"version": 1,
|
|
||||||
"disable_existing_loggers": False,
|
|
||||||
"formatters": {
|
|
||||||
"verbose": {
|
|
||||||
"format": "{levelname} {asctime} {module} {process:d} {thread:d} {message}",
|
|
||||||
"style": "{",
|
|
||||||
},
|
|
||||||
"json": {
|
|
||||||
"()": "pythonjsonlogger.jsonlogger.JsonFormatter",
|
|
||||||
"format": (
|
|
||||||
"%(levelname)s %(asctime)s %(module)s %(process)d "
|
|
||||||
"%(thread)d %(message)s"
|
|
||||||
),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"handlers": {
|
|
||||||
"console": {
|
|
||||||
"class": "logging.StreamHandler",
|
|
||||||
"formatter": "verbose",
|
|
||||||
},
|
|
||||||
"file": {
|
|
||||||
"class": "logging.handlers.RotatingFileHandler",
|
|
||||||
"filename": BASE_DIR / "logs" / "thrillwiki.log",
|
|
||||||
"maxBytes": 1024 * 1024 * 10, # 10MB
|
|
||||||
"backupCount": 5,
|
|
||||||
"formatter": "json",
|
|
||||||
},
|
|
||||||
"performance": {
|
|
||||||
"class": "logging.handlers.RotatingFileHandler",
|
|
||||||
"filename": BASE_DIR / "logs" / "performance.log",
|
|
||||||
"maxBytes": 1024 * 1024 * 10, # 10MB
|
|
||||||
"backupCount": 5,
|
|
||||||
"formatter": "json",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"root": {
|
|
||||||
"level": "INFO",
|
|
||||||
"handlers": ["console"],
|
|
||||||
},
|
|
||||||
"loggers": {
|
|
||||||
"django": {
|
|
||||||
"handlers": ["file"],
|
|
||||||
"level": "INFO",
|
|
||||||
"propagate": False,
|
|
||||||
},
|
|
||||||
"django.db.backends": {
|
|
||||||
"handlers": ["console"],
|
|
||||||
"level": "DEBUG",
|
|
||||||
"propagate": False,
|
|
||||||
},
|
|
||||||
"thrillwiki": {
|
|
||||||
"handlers": ["console", "file"],
|
|
||||||
"level": "DEBUG",
|
|
||||||
"propagate": False,
|
|
||||||
},
|
|
||||||
"performance": {
|
|
||||||
"handlers": ["performance"],
|
|
||||||
"level": "INFO",
|
|
||||||
"propagate": False,
|
|
||||||
},
|
|
||||||
"query_optimization": {
|
|
||||||
"handlers": ["console", "file"],
|
|
||||||
"level": "WARNING",
|
|
||||||
"propagate": False,
|
|
||||||
},
|
|
||||||
"nplusone": {
|
|
||||||
"handlers": ["console"],
|
|
||||||
"level": "WARNING",
|
|
||||||
"propagate": False,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
@@ -1,103 +0,0 @@
|
|||||||
"""
|
|
||||||
Production settings for thrillwiki project.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Import the module and use its members, e.g., base.BASE_DIR, base***REMOVED***
|
|
||||||
from . import base
|
|
||||||
|
|
||||||
# Import the module and use its members, e.g., database.DATABASES
|
|
||||||
|
|
||||||
# Import the module and use its members, e.g., email.EMAIL_HOST
|
|
||||||
|
|
||||||
# Import the module and use its members, e.g., security.SECURE_HSTS_SECONDS
|
|
||||||
|
|
||||||
# Import the module and use its members, e.g., email.EMAIL_HOST
|
|
||||||
|
|
||||||
# Import the module and use its members, e.g., security.SECURE_HSTS_SECONDS
|
|
||||||
|
|
||||||
# Production settings
|
|
||||||
DEBUG = False
|
|
||||||
|
|
||||||
# Allowed hosts must be explicitly set in production
|
|
||||||
ALLOWED_HOSTS = base.env.list("ALLOWED_HOSTS")
|
|
||||||
|
|
||||||
# CSRF trusted origins for production
|
|
||||||
CSRF_TRUSTED_ORIGINS = base.env.list("CSRF_TRUSTED_ORIGINS")
|
|
||||||
|
|
||||||
# Security settings for production
|
|
||||||
SECURE_SSL_REDIRECT = True
|
|
||||||
SESSION_COOKIE_SECURE = True
|
|
||||||
CSRF_COOKIE_SECURE = True
|
|
||||||
SECURE_HSTS_SECONDS = 31536000 # 1 year
|
|
||||||
SECURE_HSTS_INCLUDE_SUBDOMAINS = True
|
|
||||||
SECURE_HSTS_PRELOAD = True
|
|
||||||
|
|
||||||
# Production logging
|
|
||||||
LOGGING = {
|
|
||||||
"version": 1,
|
|
||||||
"disable_existing_loggers": False,
|
|
||||||
"formatters": {
|
|
||||||
"verbose": {
|
|
||||||
"format": "{levelname} {asctime} {module} {process:d} {thread:d} {message}",
|
|
||||||
"style": "{",
|
|
||||||
},
|
|
||||||
"simple": {
|
|
||||||
"format": "{levelname} {message}",
|
|
||||||
"style": "{",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"handlers": {
|
|
||||||
"file": {
|
|
||||||
"level": "INFO",
|
|
||||||
"class": "logging.handlers.RotatingFileHandler",
|
|
||||||
"filename": base.BASE_DIR / "logs" / "django.log",
|
|
||||||
"maxBytes": 1024 * 1024 * 15, # 15MB
|
|
||||||
"backupCount": 10,
|
|
||||||
"formatter": "verbose",
|
|
||||||
},
|
|
||||||
"error_file": {
|
|
||||||
"level": "ERROR",
|
|
||||||
"class": "logging.handlers.RotatingFileHandler",
|
|
||||||
"filename": base.BASE_DIR / "logs" / "django_error.log",
|
|
||||||
"maxBytes": 1024 * 1024 * 15, # 15MB
|
|
||||||
"backupCount": 10,
|
|
||||||
"formatter": "verbose",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"root": {
|
|
||||||
"handlers": ["file"],
|
|
||||||
"level": "INFO",
|
|
||||||
},
|
|
||||||
"loggers": {
|
|
||||||
"django": {
|
|
||||||
"handlers": ["file", "error_file"],
|
|
||||||
"level": "INFO",
|
|
||||||
"propagate": False,
|
|
||||||
},
|
|
||||||
"thrillwiki": {
|
|
||||||
"handlers": ["file", "error_file"],
|
|
||||||
"level": "INFO",
|
|
||||||
"propagate": False,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
# Static files collection for production
|
|
||||||
STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
|
|
||||||
|
|
||||||
# Cache settings for production (Redis recommended)
|
|
||||||
redis_url = base.env.str("REDIS_URL", default=None)
|
|
||||||
if redis_url:
|
|
||||||
CACHES = {
|
|
||||||
"default": {
|
|
||||||
"BACKEND": "django_redis.cache.RedisCache",
|
|
||||||
"LOCATION": redis_url,
|
|
||||||
"OPTIONS": {
|
|
||||||
"CLIENT_CLASS": "django_redis.client.DefaultClient",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# Use Redis for sessions in production
|
|
||||||
SESSION_ENGINE = "django.contrib.sessions.backends.cache"
|
|
||||||
SESSION_CACHE_ALIAS = "default"
|
|
||||||
@@ -1,65 +0,0 @@
|
|||||||
"""
|
|
||||||
Test settings for thrillwiki project.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from .base import *
|
|
||||||
|
|
||||||
# Test-specific settings
|
|
||||||
DEBUG = False
|
|
||||||
|
|
||||||
# Use in-memory database for faster tests
|
|
||||||
DATABASES = {
|
|
||||||
"default": {
|
|
||||||
"ENGINE": "django.contrib.gis.db.backends.spatialite",
|
|
||||||
"NAME": ":memory:",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# Use in-memory cache for tests
|
|
||||||
CACHES = {
|
|
||||||
"default": {
|
|
||||||
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
|
|
||||||
"LOCATION": "test-cache",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# Disable migrations for faster tests
|
|
||||||
|
|
||||||
|
|
||||||
class DisableMigrations:
|
|
||||||
def __contains__(self, item):
|
|
||||||
return True
|
|
||||||
|
|
||||||
def __getitem__(self, item):
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
MIGRATION_MODULES = DisableMigrations()
|
|
||||||
|
|
||||||
# Email backend for tests
|
|
||||||
EMAIL_BACKEND = "django.core.mail.backends.locmem.EmailBackend"
|
|
||||||
|
|
||||||
# Password hashers for faster tests
|
|
||||||
PASSWORD_HASHERS = [
|
|
||||||
"django.contrib.auth.hashers.MD5PasswordHasher",
|
|
||||||
]
|
|
||||||
|
|
||||||
# Disable logging during tests
|
|
||||||
LOGGING_CONFIG = None
|
|
||||||
|
|
||||||
# Media files for tests
|
|
||||||
MEDIA_ROOT = BASE_DIR / "test_media"
|
|
||||||
|
|
||||||
# Static files for tests
|
|
||||||
STATIC_ROOT = BASE_DIR / "test_static"
|
|
||||||
|
|
||||||
# Disable Turnstile for tests
|
|
||||||
TURNSTILE_SITE_KEY = "test-key"
|
|
||||||
TURNSTILE_SECRET_KEY = "test-secret"
|
|
||||||
|
|
||||||
# Test-specific middleware (remove caching middleware)
|
|
||||||
MIDDLEWARE = [m for m in MIDDLEWARE if "cache" not in m.lower()]
|
|
||||||
|
|
||||||
# Celery settings for tests (if Celery is used)
|
|
||||||
CELERY_TASK_ALWAYS_EAGER = True
|
|
||||||
CELERY_TASK_EAGER_PROPAGATES = True
|
|
||||||
@@ -1,44 +0,0 @@
|
|||||||
"""
|
|
||||||
Test Django settings for thrillwiki accounts app.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Use in-memory database for tests
|
|
||||||
DATABASES = {
|
|
||||||
"default": {
|
|
||||||
"ENGINE": "django.contrib.gis.db.backends.postgis",
|
|
||||||
"NAME": "test_db",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# Use a faster password hasher for tests
|
|
||||||
PASSWORD_HASHERS = [
|
|
||||||
"django.contrib.auth.hashers.MD5PasswordHasher",
|
|
||||||
]
|
|
||||||
|
|
||||||
# Disable whitenoise for tests
|
|
||||||
WHITENOISE_AUTOREFRESH = True
|
|
||||||
STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
|
|
||||||
|
|
||||||
INSTALLED_APPS = [
|
|
||||||
"django.contrib.admin",
|
|
||||||
"django.contrib.auth",
|
|
||||||
"django.contrib.contenttypes",
|
|
||||||
"django.contrib.sessions",
|
|
||||||
"django.contrib.messages",
|
|
||||||
"django.contrib.staticfiles",
|
|
||||||
"django.contrib.sites",
|
|
||||||
"allauth",
|
|
||||||
"allauth.account",
|
|
||||||
"allauth.socialaccount",
|
|
||||||
"accounts",
|
|
||||||
"core",
|
|
||||||
"pghistory",
|
|
||||||
"pgtrigger",
|
|
||||||
"email_service",
|
|
||||||
"parks",
|
|
||||||
"rides",
|
|
||||||
"media.apps.MediaConfig",
|
|
||||||
]
|
|
||||||
|
|
||||||
GDAL_LIBRARY_PATH = "/opt/homebrew/lib/libgdal.dylib"
|
|
||||||
GEOS_LIBRARY_PATH = "/opt/homebrew/lib/libgeos_c.dylib"
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
# Settings modules package
|
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
"""
|
|
||||||
Database configuration for thrillwiki project.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import environ
|
|
||||||
|
|
||||||
env = environ.Env()
|
|
||||||
|
|
||||||
# Database configuration
|
|
||||||
db_config = env.db("DATABASE_URL")
|
|
||||||
|
|
||||||
|
|
||||||
# Force PostGIS backend for spatial data support
|
|
||||||
db_config["ENGINE"] = "django.contrib.gis.db.backends.postgis"
|
|
||||||
|
|
||||||
DATABASES = {
|
|
||||||
"default": db_config,
|
|
||||||
}
|
|
||||||
|
|
||||||
# GeoDjango Settings - Environment specific
|
|
||||||
GDAL_LIBRARY_PATH = env("GDAL_LIBRARY_PATH", default=None)
|
|
||||||
GEOS_LIBRARY_PATH = env("GEOS_LIBRARY_PATH", default=None)
|
|
||||||
|
|
||||||
# Cache settings
|
|
||||||
CACHES = {"default": env.cache("CACHE_URL", default="locmemcache://")}
|
|
||||||
|
|
||||||
CACHE_MIDDLEWARE_SECONDS = env.int("CACHE_MIDDLEWARE_SECONDS", default=300) # 5 minutes
|
|
||||||
CACHE_MIDDLEWARE_KEY_PREFIX = env("CACHE_MIDDLEWARE_KEY_PREFIX", default="thrillwiki")
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
"""
|
|
||||||
Email configuration for thrillwiki project.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import environ
|
|
||||||
|
|
||||||
env = environ.Env()
|
|
||||||
|
|
||||||
# Email settings
|
|
||||||
EMAIL_BACKEND = env(
|
|
||||||
"EMAIL_BACKEND", default="email_service.backends.ForwardEmailBackend"
|
|
||||||
)
|
|
||||||
FORWARD_EMAIL_BASE_URL = env(
|
|
||||||
"FORWARD_EMAIL_BASE_URL", default="https://api.forwardemail.net"
|
|
||||||
)
|
|
||||||
SERVER_EMAIL = env("SERVER_EMAIL", default="django_webmaster@thrillwiki.com")
|
|
||||||
|
|
||||||
# Email URLs can be configured using EMAIL_URL environment variable
|
|
||||||
# Example: EMAIL_URL=smtp://user:pass@localhost:587
|
|
||||||
EMAIL_URL = env("EMAIL_URL", default=None)
|
|
||||||
|
|
||||||
if EMAIL_URL:
|
|
||||||
email_config = env.email(EMAIL_URL)
|
|
||||||
vars().update(email_config)
|
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
"""
|
|
||||||
Security configuration for thrillwiki project.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import environ
|
|
||||||
|
|
||||||
env = environ.Env()
|
|
||||||
|
|
||||||
# Cloudflare Turnstile settings
|
|
||||||
TURNSTILE_SITE_KEY = env("TURNSTILE_SITE_KEY", default="")
|
|
||||||
TURNSTILE_SECRET_KEY = env("TURNSTILE_SECRET_KEY", default="")
|
|
||||||
TURNSTILE_VERIFY_URL = env(
|
|
||||||
"TURNSTILE_VERIFY_URL",
|
|
||||||
default="https://challenges.cloudflare.com/turnstile/v0/siteverify",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Security headers and settings (for production)
|
|
||||||
SECURE_BROWSER_XSS_FILTER = env.bool("SECURE_BROWSER_XSS_FILTER", default=True)
|
|
||||||
SECURE_CONTENT_TYPE_NOSNIFF = env.bool("SECURE_CONTENT_TYPE_NOSNIFF", default=True)
|
|
||||||
SECURE_HSTS_INCLUDE_SUBDOMAINS = env.bool(
|
|
||||||
"SECURE_HSTS_INCLUDE_SUBDOMAINS", default=True
|
|
||||||
)
|
|
||||||
SECURE_HSTS_SECONDS = env.int("SECURE_HSTS_SECONDS", default=31536000) # 1 year
|
|
||||||
SECURE_REDIRECT_EXEMPT = env.list("SECURE_REDIRECT_EXEMPT", default=[])
|
|
||||||
SECURE_SSL_REDIRECT = env.bool("SECURE_SSL_REDIRECT", default=False)
|
|
||||||
SECURE_PROXY_SSL_HEADER = env.tuple("SECURE_PROXY_SSL_HEADER", default=None)
|
|
||||||
|
|
||||||
# Session security
|
|
||||||
SESSION_COOKIE_SECURE = env.bool("SESSION_COOKIE_SECURE", default=False)
|
|
||||||
SESSION_COOKIE_HTTPONLY = env.bool("SESSION_COOKIE_HTTPONLY", default=True)
|
|
||||||
SESSION_COOKIE_SAMESITE = env("SESSION_COOKIE_SAMESITE", default="Lax")
|
|
||||||
|
|
||||||
# CSRF security
|
|
||||||
CSRF_COOKIE_SECURE = env.bool("CSRF_COOKIE_SECURE", default=False)
|
|
||||||
CSRF_COOKIE_HTTPONLY = env.bool("CSRF_COOKIE_HTTPONLY", default=True)
|
|
||||||
CSRF_COOKIE_SAMESITE = env("CSRF_COOKIE_SAMESITE", default="Lax")
|
|
||||||
@@ -1,31 +0,0 @@
|
|||||||
from django.contrib import admin
|
|
||||||
from django.utils.html import format_html
|
|
||||||
from .models import SlugHistory
|
|
||||||
|
|
||||||
|
|
||||||
@admin.register(SlugHistory)
|
|
||||||
class SlugHistoryAdmin(admin.ModelAdmin):
|
|
||||||
list_display = ["content_object_link", "old_slug", "created_at"]
|
|
||||||
list_filter = ["content_type", "created_at"]
|
|
||||||
search_fields = ["old_slug", "object_id"]
|
|
||||||
readonly_fields = ["content_type", "object_id", "old_slug", "created_at"]
|
|
||||||
date_hierarchy = "created_at"
|
|
||||||
ordering = ["-created_at"]
|
|
||||||
|
|
||||||
def content_object_link(self, obj):
|
|
||||||
"""Create a link to the related object's admin page"""
|
|
||||||
try:
|
|
||||||
url = obj.content_object.get_absolute_url()
|
|
||||||
return format_html('<a href="{}">{}</a>', url, str(obj.content_object))
|
|
||||||
except (AttributeError, ValueError):
|
|
||||||
return str(obj.content_object)
|
|
||||||
|
|
||||||
content_object_link.short_description = "Object"
|
|
||||||
|
|
||||||
def has_add_permission(self, request):
|
|
||||||
"""Disable manual creation of slug history records"""
|
|
||||||
return False
|
|
||||||
|
|
||||||
def has_change_permission(self, request, obj=None):
|
|
||||||
"""Disable editing of slug history records"""
|
|
||||||
return False
|
|
||||||
@@ -1,60 +0,0 @@
|
|||||||
from django.db import models
|
|
||||||
from django.contrib.contenttypes.fields import GenericForeignKey
|
|
||||||
from django.contrib.contenttypes.models import ContentType
|
|
||||||
from django.utils import timezone
|
|
||||||
from django.db.models import Count
|
|
||||||
|
|
||||||
|
|
||||||
class PageView(models.Model):
|
|
||||||
content_type = models.ForeignKey(
|
|
||||||
ContentType, on_delete=models.CASCADE, related_name="page_views"
|
|
||||||
)
|
|
||||||
object_id = models.PositiveIntegerField()
|
|
||||||
content_object = GenericForeignKey("content_type", "object_id")
|
|
||||||
|
|
||||||
timestamp = models.DateTimeField(auto_now_add=True, db_index=True)
|
|
||||||
ip_address = models.GenericIPAddressField()
|
|
||||||
user_agent = models.CharField(max_length=512, blank=True)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
indexes = [
|
|
||||||
models.Index(fields=["timestamp"]),
|
|
||||||
models.Index(fields=["content_type", "object_id"]),
|
|
||||||
]
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_trending_items(cls, model_class, hours=24, limit=10):
|
|
||||||
"""Get trending items of a specific model class based on views in last X hours.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
model_class: The model class to get trending items for (e.g., Park, Ride)
|
|
||||||
hours (int): Number of hours to look back for views (default: 24)
|
|
||||||
limit (int): Maximum number of items to return (default: 10)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
QuerySet: The trending items ordered by view count
|
|
||||||
"""
|
|
||||||
content_type = ContentType.objects.get_for_model(model_class)
|
|
||||||
cutoff = timezone.now() - timezone.timedelta(hours=hours)
|
|
||||||
|
|
||||||
# Query through the ContentType relationship
|
|
||||||
item_ids = (
|
|
||||||
cls.objects.filter(content_type=content_type, timestamp__gte=cutoff)
|
|
||||||
.values("object_id")
|
|
||||||
.annotate(view_count=Count("id"))
|
|
||||||
.filter(view_count__gt=0)
|
|
||||||
.order_by("-view_count")
|
|
||||||
.values_list("object_id", flat=True)[:limit]
|
|
||||||
)
|
|
||||||
|
|
||||||
# Get the actual items in the correct order
|
|
||||||
if item_ids:
|
|
||||||
# Convert the list to a string of comma-separated values
|
|
||||||
id_list = list(item_ids)
|
|
||||||
# Use Case/When to preserve the ordering
|
|
||||||
from django.db.models import Case, When
|
|
||||||
|
|
||||||
preserved = Case(*[When(pk=pk, then=pos) for pos, pk in enumerate(id_list)])
|
|
||||||
return model_class.objects.filter(pk__in=id_list).order_by(preserved)
|
|
||||||
|
|
||||||
return model_class.objects.none()
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
# Core API infrastructure for ThrillWiki
|
|
||||||
@@ -1,205 +0,0 @@
|
|||||||
"""
|
|
||||||
Custom exception handling for ThrillWiki API.
|
|
||||||
Provides standardized error responses following Django styleguide patterns.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from typing import Any, Dict, Optional
|
|
||||||
|
|
||||||
from django.http import Http404
|
|
||||||
from django.core.exceptions import (
|
|
||||||
PermissionDenied,
|
|
||||||
ValidationError as DjangoValidationError,
|
|
||||||
)
|
|
||||||
from rest_framework import status
|
|
||||||
from rest_framework.response import Response
|
|
||||||
from rest_framework.views import exception_handler
|
|
||||||
from rest_framework.exceptions import (
|
|
||||||
ValidationError as DRFValidationError,
|
|
||||||
NotFound,
|
|
||||||
PermissionDenied as DRFPermissionDenied,
|
|
||||||
)
|
|
||||||
|
|
||||||
from ..exceptions import ThrillWikiException
|
|
||||||
from ..logging import get_logger, log_exception
|
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def custom_exception_handler(
|
|
||||||
exc: Exception, context: Dict[str, Any]
|
|
||||||
) -> Optional[Response]:
|
|
||||||
"""
|
|
||||||
Custom exception handler for DRF that provides standardized error responses.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Response with standardized error format or None to fallback to default handler
|
|
||||||
"""
|
|
||||||
# Call REST framework's default exception handler first
|
|
||||||
response = exception_handler(exc, context)
|
|
||||||
|
|
||||||
if response is not None:
|
|
||||||
# Standardize the error response format
|
|
||||||
custom_response_data = {
|
|
||||||
"status": "error",
|
|
||||||
"error": {
|
|
||||||
"code": _get_error_code(exc),
|
|
||||||
"message": _get_error_message(exc, response.data),
|
|
||||||
"details": _get_error_details(exc, response.data),
|
|
||||||
},
|
|
||||||
"data": None,
|
|
||||||
}
|
|
||||||
|
|
||||||
# Add request context for debugging
|
|
||||||
if hasattr(context.get("request"), "user"):
|
|
||||||
custom_response_data["error"]["request_user"] = str(context["request"].user)
|
|
||||||
|
|
||||||
# Log the error for monitoring
|
|
||||||
log_exception(
|
|
||||||
logger,
|
|
||||||
exc,
|
|
||||||
context={"response_status": response.status_code},
|
|
||||||
request=context.get("request"),
|
|
||||||
)
|
|
||||||
|
|
||||||
response.data = custom_response_data
|
|
||||||
|
|
||||||
# Handle ThrillWiki custom exceptions
|
|
||||||
elif isinstance(exc, ThrillWikiException):
|
|
||||||
custom_response_data = {
|
|
||||||
"status": "error",
|
|
||||||
"error": exc.to_dict(),
|
|
||||||
"data": None,
|
|
||||||
}
|
|
||||||
|
|
||||||
log_exception(
|
|
||||||
logger,
|
|
||||||
exc,
|
|
||||||
context={"response_status": exc.status_code},
|
|
||||||
request=context.get("request"),
|
|
||||||
)
|
|
||||||
response = Response(custom_response_data, status=exc.status_code)
|
|
||||||
|
|
||||||
# Handle specific Django exceptions that DRF doesn't catch
|
|
||||||
elif isinstance(exc, DjangoValidationError):
|
|
||||||
custom_response_data = {
|
|
||||||
"status": "error",
|
|
||||||
"error": {
|
|
||||||
"code": "VALIDATION_ERROR",
|
|
||||||
"message": "Validation failed",
|
|
||||||
"details": _format_django_validation_errors(exc),
|
|
||||||
},
|
|
||||||
"data": None,
|
|
||||||
}
|
|
||||||
|
|
||||||
log_exception(
|
|
||||||
logger,
|
|
||||||
exc,
|
|
||||||
context={"response_status": status.HTTP_400_BAD_REQUEST},
|
|
||||||
request=context.get("request"),
|
|
||||||
)
|
|
||||||
response = Response(custom_response_data, status=status.HTTP_400_BAD_REQUEST)
|
|
||||||
|
|
||||||
elif isinstance(exc, Http404):
|
|
||||||
custom_response_data = {
|
|
||||||
"status": "error",
|
|
||||||
"error": {
|
|
||||||
"code": "NOT_FOUND",
|
|
||||||
"message": "Resource not found",
|
|
||||||
"details": str(exc) if str(exc) else None,
|
|
||||||
},
|
|
||||||
"data": None,
|
|
||||||
}
|
|
||||||
|
|
||||||
log_exception(
|
|
||||||
logger,
|
|
||||||
exc,
|
|
||||||
context={"response_status": status.HTTP_404_NOT_FOUND},
|
|
||||||
request=context.get("request"),
|
|
||||||
)
|
|
||||||
response = Response(custom_response_data, status=status.HTTP_404_NOT_FOUND)
|
|
||||||
|
|
||||||
elif isinstance(exc, PermissionDenied):
|
|
||||||
custom_response_data = {
|
|
||||||
"status": "error",
|
|
||||||
"error": {
|
|
||||||
"code": "PERMISSION_DENIED",
|
|
||||||
"message": "Permission denied",
|
|
||||||
"details": str(exc) if str(exc) else None,
|
|
||||||
},
|
|
||||||
"data": None,
|
|
||||||
}
|
|
||||||
|
|
||||||
log_exception(
|
|
||||||
logger,
|
|
||||||
exc,
|
|
||||||
context={"response_status": status.HTTP_403_FORBIDDEN},
|
|
||||||
request=context.get("request"),
|
|
||||||
)
|
|
||||||
response = Response(custom_response_data, status=status.HTTP_403_FORBIDDEN)
|
|
||||||
|
|
||||||
return response
|
|
||||||
|
|
||||||
|
|
||||||
def _get_error_code(exc: Exception) -> str:
|
|
||||||
"""Extract or determine error code from exception."""
|
|
||||||
if hasattr(exc, "default_code"):
|
|
||||||
return exc.default_code.upper()
|
|
||||||
|
|
||||||
if isinstance(exc, DRFValidationError):
|
|
||||||
return "VALIDATION_ERROR"
|
|
||||||
elif isinstance(exc, NotFound):
|
|
||||||
return "NOT_FOUND"
|
|
||||||
elif isinstance(exc, DRFPermissionDenied):
|
|
||||||
return "PERMISSION_DENIED"
|
|
||||||
|
|
||||||
return exc.__class__.__name__.upper()
|
|
||||||
|
|
||||||
|
|
||||||
def _get_error_message(exc: Exception, response_data: Any) -> str:
|
|
||||||
"""Extract user-friendly error message."""
|
|
||||||
if isinstance(response_data, dict):
|
|
||||||
# Handle DRF validation errors
|
|
||||||
if "detail" in response_data:
|
|
||||||
return str(response_data["detail"])
|
|
||||||
elif "non_field_errors" in response_data:
|
|
||||||
errors = response_data["non_field_errors"]
|
|
||||||
return errors[0] if isinstance(errors, list) and errors else str(errors)
|
|
||||||
elif isinstance(response_data, dict) and len(response_data) == 1:
|
|
||||||
key, value = next(iter(response_data.items()))
|
|
||||||
if isinstance(value, list) and value:
|
|
||||||
return f"{key}: {value[0]}"
|
|
||||||
return f"{key}: {value}"
|
|
||||||
|
|
||||||
# Fallback to exception message
|
|
||||||
return str(exc) if str(exc) else "An error occurred"
|
|
||||||
|
|
||||||
|
|
||||||
def _get_error_details(exc: Exception, response_data: Any) -> Optional[Dict[str, Any]]:
|
|
||||||
"""Extract detailed error information for debugging."""
|
|
||||||
if isinstance(response_data, dict) and len(response_data) > 1:
|
|
||||||
return response_data
|
|
||||||
|
|
||||||
if hasattr(exc, "detail") and isinstance(exc.detail, dict):
|
|
||||||
return exc.detail
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def _format_django_validation_errors(
|
|
||||||
exc: DjangoValidationError,
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""Format Django ValidationError for API response."""
|
|
||||||
if hasattr(exc, "error_dict"):
|
|
||||||
# Field-specific errors
|
|
||||||
return {
|
|
||||||
field: [str(error) for error in errors]
|
|
||||||
for field, errors in exc.error_dict.items()
|
|
||||||
}
|
|
||||||
elif hasattr(exc, "error_list"):
|
|
||||||
# Non-field errors
|
|
||||||
return {"non_field_errors": [str(error) for error in exc.error_list]}
|
|
||||||
|
|
||||||
return {"non_field_errors": [str(exc)]}
|
|
||||||
|
|
||||||
|
|
||||||
# Removed _log_api_error - using centralized logging instead
|
|
||||||
@@ -1,260 +0,0 @@
|
|||||||
"""
|
|
||||||
Common mixins for API views following Django styleguide patterns.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from typing import Dict, Any, Optional
|
|
||||||
from rest_framework.request import Request
|
|
||||||
from rest_framework.response import Response
|
|
||||||
from rest_framework import status
|
|
||||||
|
|
||||||
|
|
||||||
class ApiMixin:
|
|
||||||
"""
|
|
||||||
Base mixin for API views providing standardized response formatting.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def create_response(
|
|
||||||
self,
|
|
||||||
*,
|
|
||||||
data: Any = None,
|
|
||||||
message: Optional[str] = None,
|
|
||||||
status_code: int = status.HTTP_200_OK,
|
|
||||||
pagination: Optional[Dict[str, Any]] = None,
|
|
||||||
metadata: Optional[Dict[str, Any]] = None,
|
|
||||||
) -> Response:
|
|
||||||
"""
|
|
||||||
Create standardized API response.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
data: Response data
|
|
||||||
message: Optional success message
|
|
||||||
status_code: HTTP status code
|
|
||||||
pagination: Pagination information
|
|
||||||
metadata: Additional metadata
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Standardized Response object
|
|
||||||
"""
|
|
||||||
response_data = {
|
|
||||||
"status": "success" if status_code < 400 else "error",
|
|
||||||
"data": data,
|
|
||||||
}
|
|
||||||
|
|
||||||
if message:
|
|
||||||
response_data["message"] = message
|
|
||||||
|
|
||||||
if pagination:
|
|
||||||
response_data["pagination"] = pagination
|
|
||||||
|
|
||||||
if metadata:
|
|
||||||
response_data["metadata"] = metadata
|
|
||||||
|
|
||||||
return Response(response_data, status=status_code)
|
|
||||||
|
|
||||||
def create_error_response(
|
|
||||||
self,
|
|
||||||
*,
|
|
||||||
message: str,
|
|
||||||
status_code: int = status.HTTP_400_BAD_REQUEST,
|
|
||||||
error_code: Optional[str] = None,
|
|
||||||
details: Optional[Dict[str, Any]] = None,
|
|
||||||
) -> Response:
|
|
||||||
"""
|
|
||||||
Create standardized error response.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
message: Error message
|
|
||||||
status_code: HTTP status code
|
|
||||||
error_code: Optional error code
|
|
||||||
details: Additional error details
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Standardized error Response object
|
|
||||||
"""
|
|
||||||
error_data = {
|
|
||||||
"code": error_code or "GENERIC_ERROR",
|
|
||||||
"message": message,
|
|
||||||
}
|
|
||||||
|
|
||||||
if details:
|
|
||||||
error_data["details"] = details
|
|
||||||
|
|
||||||
response_data = {
|
|
||||||
"status": "error",
|
|
||||||
"error": error_data,
|
|
||||||
"data": None,
|
|
||||||
}
|
|
||||||
|
|
||||||
return Response(response_data, status=status_code)
|
|
||||||
|
|
||||||
|
|
||||||
class CreateApiMixin(ApiMixin):
|
|
||||||
"""
|
|
||||||
Mixin for create API endpoints with standardized input/output handling.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def create(self, request: Request, *args, **kwargs) -> Response:
|
|
||||||
"""Handle POST requests for creating resources."""
|
|
||||||
serializer = self.get_input_serializer(data=request.data)
|
|
||||||
serializer.is_valid(raise_exception=True)
|
|
||||||
|
|
||||||
# Create the object using the service layer
|
|
||||||
obj = self.perform_create(**serializer.validated_data)
|
|
||||||
|
|
||||||
# Serialize the output
|
|
||||||
output_serializer = self.get_output_serializer(obj)
|
|
||||||
|
|
||||||
return self.create_response(
|
|
||||||
data=output_serializer.data,
|
|
||||||
status_code=status.HTTP_201_CREATED,
|
|
||||||
message="Resource created successfully",
|
|
||||||
)
|
|
||||||
|
|
||||||
def perform_create(self, **validated_data):
|
|
||||||
"""
|
|
||||||
Override this method to implement object creation logic.
|
|
||||||
Should use service layer methods.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError("Subclasses must implement perform_create")
|
|
||||||
|
|
||||||
def get_input_serializer(self, *args, **kwargs):
|
|
||||||
"""Get the input serializer for validation."""
|
|
||||||
return self.InputSerializer(*args, **kwargs)
|
|
||||||
|
|
||||||
def get_output_serializer(self, *args, **kwargs):
|
|
||||||
"""Get the output serializer for response."""
|
|
||||||
return self.OutputSerializer(*args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class UpdateApiMixin(ApiMixin):
|
|
||||||
"""
|
|
||||||
Mixin for update API endpoints with standardized input/output handling.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def update(self, request: Request, *args, **kwargs) -> Response:
|
|
||||||
"""Handle PUT/PATCH requests for updating resources."""
|
|
||||||
instance = self.get_object()
|
|
||||||
serializer = self.get_input_serializer(
|
|
||||||
data=request.data, partial=kwargs.get("partial", False)
|
|
||||||
)
|
|
||||||
serializer.is_valid(raise_exception=True)
|
|
||||||
|
|
||||||
# Update the object using the service layer
|
|
||||||
updated_obj = self.perform_update(instance, **serializer.validated_data)
|
|
||||||
|
|
||||||
# Serialize the output
|
|
||||||
output_serializer = self.get_output_serializer(updated_obj)
|
|
||||||
|
|
||||||
return self.create_response(
|
|
||||||
data=output_serializer.data,
|
|
||||||
message="Resource updated successfully",
|
|
||||||
)
|
|
||||||
|
|
||||||
def perform_update(self, instance, **validated_data):
|
|
||||||
"""
|
|
||||||
Override this method to implement object update logic.
|
|
||||||
Should use service layer methods.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError("Subclasses must implement perform_update")
|
|
||||||
|
|
||||||
def get_input_serializer(self, *args, **kwargs):
|
|
||||||
"""Get the input serializer for validation."""
|
|
||||||
return self.InputSerializer(*args, **kwargs)
|
|
||||||
|
|
||||||
def get_output_serializer(self, *args, **kwargs):
|
|
||||||
"""Get the output serializer for response."""
|
|
||||||
return self.OutputSerializer(*args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class ListApiMixin(ApiMixin):
|
|
||||||
"""
|
|
||||||
Mixin for list API endpoints with pagination and filtering.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def list(self, request: Request, *args, **kwargs) -> Response:
|
|
||||||
"""Handle GET requests for listing resources."""
|
|
||||||
# Use selector to get filtered queryset
|
|
||||||
queryset = self.get_queryset()
|
|
||||||
|
|
||||||
# Apply pagination
|
|
||||||
page = self.paginate_queryset(queryset)
|
|
||||||
if page is not None:
|
|
||||||
serializer = self.get_output_serializer(page, many=True)
|
|
||||||
return self.get_paginated_response(serializer.data)
|
|
||||||
|
|
||||||
# No pagination
|
|
||||||
serializer = self.get_output_serializer(queryset, many=True)
|
|
||||||
return self.create_response(data=serializer.data)
|
|
||||||
|
|
||||||
def get_queryset(self):
|
|
||||||
"""
|
|
||||||
Override this method to use selector patterns.
|
|
||||||
Should call selector functions, not access model managers directly.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError(
|
|
||||||
"Subclasses must implement get_queryset using selectors"
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_output_serializer(self, *args, **kwargs):
|
|
||||||
"""Get the output serializer for response."""
|
|
||||||
return self.OutputSerializer(*args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class RetrieveApiMixin(ApiMixin):
|
|
||||||
"""
|
|
||||||
Mixin for retrieve API endpoints.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def retrieve(self, request: Request, *args, **kwargs) -> Response:
|
|
||||||
"""Handle GET requests for retrieving a single resource."""
|
|
||||||
instance = self.get_object()
|
|
||||||
serializer = self.get_output_serializer(instance)
|
|
||||||
|
|
||||||
return self.create_response(data=serializer.data)
|
|
||||||
|
|
||||||
def get_object(self):
|
|
||||||
"""
|
|
||||||
Override this method to use selector patterns.
|
|
||||||
Should call selector functions for optimized queries.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError(
|
|
||||||
"Subclasses must implement get_object using selectors"
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_output_serializer(self, *args, **kwargs):
|
|
||||||
"""Get the output serializer for response."""
|
|
||||||
return self.OutputSerializer(*args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class DestroyApiMixin(ApiMixin):
|
|
||||||
"""
|
|
||||||
Mixin for delete API endpoints.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def destroy(self, request: Request, *args, **kwargs) -> Response:
|
|
||||||
"""Handle DELETE requests for destroying resources."""
|
|
||||||
instance = self.get_object()
|
|
||||||
|
|
||||||
# Delete using service layer
|
|
||||||
self.perform_destroy(instance)
|
|
||||||
|
|
||||||
return self.create_response(
|
|
||||||
status_code=status.HTTP_204_NO_CONTENT,
|
|
||||||
message="Resource deleted successfully",
|
|
||||||
)
|
|
||||||
|
|
||||||
def perform_destroy(self, instance):
|
|
||||||
"""
|
|
||||||
Override this method to implement object deletion logic.
|
|
||||||
Should use service layer methods.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError("Subclasses must implement perform_destroy")
|
|
||||||
|
|
||||||
def get_object(self):
|
|
||||||
"""
|
|
||||||
Override this method to use selector patterns.
|
|
||||||
Should call selector functions for optimized queries.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError(
|
|
||||||
"Subclasses must implement get_object using selectors"
|
|
||||||
)
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
from django.apps import AppConfig
|
|
||||||
|
|
||||||
|
|
||||||
class CoreConfig(AppConfig):
|
|
||||||
default_auto_field = "django.db.models.BigAutoField"
|
|
||||||
name = "core"
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
# Decorators module
|
|
||||||
@@ -1,409 +0,0 @@
|
|||||||
"""
|
|
||||||
Advanced caching decorators for API views and functions.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import hashlib
|
|
||||||
import json
|
|
||||||
import time
|
|
||||||
from functools import wraps
|
|
||||||
from typing import Optional, List, Callable
|
|
||||||
from django.utils.decorators import method_decorator
|
|
||||||
from django.views.decorators.vary import vary_on_headers
|
|
||||||
from core.services.enhanced_cache_service import EnhancedCacheService
|
|
||||||
import logging
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def cache_api_response(
|
|
||||||
timeout=1800, vary_on=None, key_prefix="api", cache_backend="api"
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Advanced decorator for caching API responses with flexible configuration
|
|
||||||
|
|
||||||
Args:
|
|
||||||
timeout: Cache timeout in seconds
|
|
||||||
vary_on: List of request attributes to vary cache on
|
|
||||||
key_prefix: Prefix for cache keys
|
|
||||||
cache_backend: Cache backend to use
|
|
||||||
"""
|
|
||||||
|
|
||||||
def decorator(view_func):
|
|
||||||
@wraps(view_func)
|
|
||||||
def wrapper(self, request, *args, **kwargs):
|
|
||||||
# Only cache GET requests
|
|
||||||
if request.method != "GET":
|
|
||||||
return view_func(self, request, *args, **kwargs)
|
|
||||||
|
|
||||||
# Generate cache key based on view, user, and parameters
|
|
||||||
cache_key_parts = [
|
|
||||||
key_prefix,
|
|
||||||
view_func.__name__,
|
|
||||||
(
|
|
||||||
str(request.user.id)
|
|
||||||
if request.user.is_authenticated
|
|
||||||
else "anonymous"
|
|
||||||
),
|
|
||||||
str(hash(frozenset(request.GET.items()))),
|
|
||||||
]
|
|
||||||
|
|
||||||
# Add URL parameters to cache key
|
|
||||||
if args:
|
|
||||||
cache_key_parts.append(str(hash(args)))
|
|
||||||
if kwargs:
|
|
||||||
cache_key_parts.append(str(hash(frozenset(kwargs.items()))))
|
|
||||||
|
|
||||||
# Add custom vary_on fields
|
|
||||||
if vary_on:
|
|
||||||
for field in vary_on:
|
|
||||||
value = getattr(request, field, "")
|
|
||||||
cache_key_parts.append(str(value))
|
|
||||||
|
|
||||||
cache_key = ":".join(cache_key_parts)
|
|
||||||
|
|
||||||
# Try to get from cache
|
|
||||||
cache_service = EnhancedCacheService()
|
|
||||||
cached_response = getattr(cache_service, cache_backend + "_cache").get(
|
|
||||||
cache_key
|
|
||||||
)
|
|
||||||
|
|
||||||
if cached_response:
|
|
||||||
logger.debug(
|
|
||||||
f"Cache hit for API view {view_func.__name__}",
|
|
||||||
extra={
|
|
||||||
"cache_key": cache_key,
|
|
||||||
"view": view_func.__name__,
|
|
||||||
"cache_hit": True,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
return cached_response
|
|
||||||
|
|
||||||
# Execute view and cache result
|
|
||||||
start_time = time.time()
|
|
||||||
response = view_func(self, request, *args, **kwargs)
|
|
||||||
execution_time = time.time() - start_time
|
|
||||||
|
|
||||||
# Only cache successful responses
|
|
||||||
if hasattr(response, "status_code") and response.status_code == 200:
|
|
||||||
getattr(cache_service, cache_backend + "_cache").set(
|
|
||||||
cache_key, response, timeout
|
|
||||||
)
|
|
||||||
logger.debug(
|
|
||||||
f"Cached API response for view {view_func.__name__}",
|
|
||||||
extra={
|
|
||||||
"cache_key": cache_key,
|
|
||||||
"view": view_func.__name__,
|
|
||||||
"execution_time": execution_time,
|
|
||||||
"cache_timeout": timeout,
|
|
||||||
"cache_miss": True,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
logger.debug(
|
|
||||||
f"Not caching response for view {
|
|
||||||
view_func.__name__} (status: {
|
|
||||||
getattr(
|
|
||||||
response,
|
|
||||||
'status_code',
|
|
||||||
'unknown')})"
|
|
||||||
)
|
|
||||||
|
|
||||||
return response
|
|
||||||
|
|
||||||
return wrapper
|
|
||||||
|
|
||||||
return decorator
|
|
||||||
|
|
||||||
|
|
||||||
def cache_queryset_result(
|
|
||||||
cache_key_template: str, timeout: int = 3600, cache_backend="default"
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Decorator for caching expensive queryset operations
|
|
||||||
|
|
||||||
Args:
|
|
||||||
cache_key_template: Template for cache key (can use format placeholders)
|
|
||||||
timeout: Cache timeout in seconds
|
|
||||||
cache_backend: Cache backend to use
|
|
||||||
"""
|
|
||||||
|
|
||||||
def decorator(func):
|
|
||||||
@wraps(func)
|
|
||||||
def wrapper(*args, **kwargs):
|
|
||||||
# Generate cache key from template and arguments
|
|
||||||
try:
|
|
||||||
cache_key = cache_key_template.format(*args, **kwargs)
|
|
||||||
except (KeyError, IndexError):
|
|
||||||
# Fallback to simpler key generation
|
|
||||||
cache_key = f"{cache_key_template}:{
|
|
||||||
hash(
|
|
||||||
str(args) +
|
|
||||||
str(kwargs))}"
|
|
||||||
|
|
||||||
cache_service = EnhancedCacheService()
|
|
||||||
cached_result = getattr(cache_service, cache_backend + "_cache").get(
|
|
||||||
cache_key
|
|
||||||
)
|
|
||||||
|
|
||||||
if cached_result is not None:
|
|
||||||
logger.debug(
|
|
||||||
f"Cache hit for queryset operation: {
|
|
||||||
func.__name__}"
|
|
||||||
)
|
|
||||||
return cached_result
|
|
||||||
|
|
||||||
# Execute function and cache result
|
|
||||||
start_time = time.time()
|
|
||||||
result = func(*args, **kwargs)
|
|
||||||
execution_time = time.time() - start_time
|
|
||||||
|
|
||||||
getattr(cache_service, cache_backend + "_cache").set(
|
|
||||||
cache_key, result, timeout
|
|
||||||
)
|
|
||||||
logger.debug(
|
|
||||||
f"Cached queryset result for {func.__name__}",
|
|
||||||
extra={
|
|
||||||
"cache_key": cache_key,
|
|
||||||
"function": func.__name__,
|
|
||||||
"execution_time": execution_time,
|
|
||||||
"cache_timeout": timeout,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
return wrapper
|
|
||||||
|
|
||||||
return decorator
|
|
||||||
|
|
||||||
|
|
||||||
def invalidate_cache_on_save(model_name: str, cache_patterns: List[str] = None):
|
|
||||||
"""
|
|
||||||
Decorator to invalidate cache when model instances are saved
|
|
||||||
|
|
||||||
Args:
|
|
||||||
model_name: Name of the model
|
|
||||||
cache_patterns: List of cache key patterns to invalidate
|
|
||||||
"""
|
|
||||||
|
|
||||||
def decorator(func):
|
|
||||||
@wraps(func)
|
|
||||||
def wrapper(self, *args, **kwargs):
|
|
||||||
result = func(self, *args, **kwargs)
|
|
||||||
|
|
||||||
# Invalidate related cache entries
|
|
||||||
cache_service = EnhancedCacheService()
|
|
||||||
|
|
||||||
# Standard model cache invalidation
|
|
||||||
instance_id = getattr(self, "id", None)
|
|
||||||
cache_service.invalidate_model_cache(model_name, instance_id)
|
|
||||||
|
|
||||||
# Custom pattern invalidation
|
|
||||||
if cache_patterns:
|
|
||||||
for pattern in cache_patterns:
|
|
||||||
if instance_id:
|
|
||||||
pattern = pattern.format(model=model_name, id=instance_id)
|
|
||||||
cache_service.invalidate_pattern(pattern)
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
f"Invalidated cache for {model_name} after save",
|
|
||||||
extra={
|
|
||||||
"model": model_name,
|
|
||||||
"instance_id": instance_id,
|
|
||||||
"patterns": cache_patterns,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
return wrapper
|
|
||||||
|
|
||||||
return decorator
|
|
||||||
|
|
||||||
|
|
||||||
class CachedAPIViewMixin:
|
|
||||||
"""Mixin to add caching capabilities to API views"""
|
|
||||||
|
|
||||||
cache_timeout = 1800 # 30 minutes default
|
|
||||||
cache_vary_on = ["version"]
|
|
||||||
cache_key_prefix = "api"
|
|
||||||
cache_backend = "api"
|
|
||||||
|
|
||||||
@method_decorator(vary_on_headers("User-Agent", "Accept-Language"))
|
|
||||||
def dispatch(self, request, *args, **kwargs):
|
|
||||||
"""Add caching to the dispatch method"""
|
|
||||||
if request.method == "GET" and getattr(self, "enable_caching", True):
|
|
||||||
return self._cached_dispatch(request, *args, **kwargs)
|
|
||||||
return super().dispatch(request, *args, **kwargs)
|
|
||||||
|
|
||||||
def _cached_dispatch(self, request, *args, **kwargs):
|
|
||||||
"""Handle cached dispatch for GET requests"""
|
|
||||||
cache_key = self._generate_cache_key(request, *args, **kwargs)
|
|
||||||
|
|
||||||
cache_service = EnhancedCacheService()
|
|
||||||
cached_response = getattr(cache_service, self.cache_backend + "_cache").get(
|
|
||||||
cache_key
|
|
||||||
)
|
|
||||||
|
|
||||||
if cached_response:
|
|
||||||
logger.debug(f"Cache hit for view {self.__class__.__name__}")
|
|
||||||
return cached_response
|
|
||||||
|
|
||||||
# Execute view
|
|
||||||
response = super().dispatch(request, *args, **kwargs)
|
|
||||||
|
|
||||||
# Cache successful responses
|
|
||||||
if hasattr(response, "status_code") and response.status_code == 200:
|
|
||||||
getattr(cache_service, self.cache_backend + "_cache").set(
|
|
||||||
cache_key, response, self.cache_timeout
|
|
||||||
)
|
|
||||||
logger.debug(f"Cached response for view {self.__class__.__name__}")
|
|
||||||
|
|
||||||
return response
|
|
||||||
|
|
||||||
def _generate_cache_key(self, request, *args, **kwargs):
|
|
||||||
"""Generate cache key for the request"""
|
|
||||||
key_parts = [
|
|
||||||
self.cache_key_prefix,
|
|
||||||
self.__class__.__name__,
|
|
||||||
request.method,
|
|
||||||
(str(request.user.id) if request.user.is_authenticated else "anonymous"),
|
|
||||||
str(hash(frozenset(request.GET.items()))),
|
|
||||||
]
|
|
||||||
|
|
||||||
if args:
|
|
||||||
key_parts.append(str(hash(args)))
|
|
||||||
if kwargs:
|
|
||||||
key_parts.append(str(hash(frozenset(kwargs.items()))))
|
|
||||||
|
|
||||||
# Add vary_on fields
|
|
||||||
for field in self.cache_vary_on:
|
|
||||||
value = getattr(request, field, "")
|
|
||||||
key_parts.append(str(value))
|
|
||||||
|
|
||||||
return ":".join(key_parts)
|
|
||||||
|
|
||||||
|
|
||||||
def smart_cache(
|
|
||||||
timeout: int = 3600,
|
|
||||||
key_func: Optional[Callable] = None,
|
|
||||||
invalidate_on: Optional[List[str]] = None,
|
|
||||||
cache_backend: str = "default",
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Smart caching decorator that adapts to function arguments
|
|
||||||
|
|
||||||
Args:
|
|
||||||
timeout: Cache timeout in seconds
|
|
||||||
key_func: Custom function to generate cache key
|
|
||||||
invalidate_on: List of signals to invalidate cache on
|
|
||||||
cache_backend: Cache backend to use
|
|
||||||
"""
|
|
||||||
|
|
||||||
def decorator(func):
|
|
||||||
@wraps(func)
|
|
||||||
def wrapper(*args, **kwargs):
|
|
||||||
# Generate cache key
|
|
||||||
if key_func:
|
|
||||||
cache_key = key_func(*args, **kwargs)
|
|
||||||
else:
|
|
||||||
# Default key generation
|
|
||||||
key_data = {
|
|
||||||
"func": f"{func.__module__}.{func.__name__}",
|
|
||||||
"args": str(args),
|
|
||||||
"kwargs": json.dumps(kwargs, sort_keys=True, default=str),
|
|
||||||
}
|
|
||||||
key_string = json.dumps(key_data, sort_keys=True)
|
|
||||||
cache_key = f"smart_cache:{
|
|
||||||
hashlib.md5(
|
|
||||||
key_string.encode()).hexdigest()}"
|
|
||||||
|
|
||||||
# Try to get from cache
|
|
||||||
cache_service = EnhancedCacheService()
|
|
||||||
cached_result = getattr(cache_service, cache_backend + "_cache").get(
|
|
||||||
cache_key
|
|
||||||
)
|
|
||||||
|
|
||||||
if cached_result is not None:
|
|
||||||
logger.debug(f"Smart cache hit for {func.__name__}")
|
|
||||||
return cached_result
|
|
||||||
|
|
||||||
# Execute function
|
|
||||||
start_time = time.time()
|
|
||||||
result = func(*args, **kwargs)
|
|
||||||
execution_time = time.time() - start_time
|
|
||||||
|
|
||||||
# Cache result
|
|
||||||
getattr(cache_service, cache_backend + "_cache").set(
|
|
||||||
cache_key, result, timeout
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.debug(
|
|
||||||
f"Smart cached result for {func.__name__}",
|
|
||||||
extra={
|
|
||||||
"cache_key": cache_key,
|
|
||||||
"execution_time": execution_time,
|
|
||||||
"function": func.__name__,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
# Add cache invalidation if specified
|
|
||||||
if invalidate_on:
|
|
||||||
wrapper._cache_invalidate_on = invalidate_on
|
|
||||||
wrapper._cache_backend = cache_backend
|
|
||||||
|
|
||||||
return wrapper
|
|
||||||
|
|
||||||
return decorator
|
|
||||||
|
|
||||||
|
|
||||||
def conditional_cache(condition_func: Callable, **cache_kwargs):
|
|
||||||
"""
|
|
||||||
Cache decorator that only caches when condition is met
|
|
||||||
|
|
||||||
Args:
|
|
||||||
condition_func: Function that returns True if caching should be applied
|
|
||||||
**cache_kwargs: Arguments passed to smart_cache
|
|
||||||
"""
|
|
||||||
|
|
||||||
def decorator(func):
|
|
||||||
cached_func = smart_cache(**cache_kwargs)(func)
|
|
||||||
|
|
||||||
@wraps(func)
|
|
||||||
def wrapper(*args, **kwargs):
|
|
||||||
if condition_func(*args, **kwargs):
|
|
||||||
return cached_func(*args, **kwargs)
|
|
||||||
else:
|
|
||||||
return func(*args, **kwargs)
|
|
||||||
|
|
||||||
return wrapper
|
|
||||||
|
|
||||||
return decorator
|
|
||||||
|
|
||||||
|
|
||||||
# Utility functions for cache key generation
|
|
||||||
def generate_user_cache_key(user, suffix: str = ""):
|
|
||||||
"""Generate cache key based on user"""
|
|
||||||
user_id = user.id if user.is_authenticated else "anonymous"
|
|
||||||
return f"user:{user_id}:{suffix}" if suffix else f"user:{user_id}"
|
|
||||||
|
|
||||||
|
|
||||||
def generate_model_cache_key(model_instance, suffix: str = ""):
|
|
||||||
"""Generate cache key based on model instance"""
|
|
||||||
model_name = model_instance._meta.model_name
|
|
||||||
instance_id = model_instance.id
|
|
||||||
return (
|
|
||||||
f"{model_name}:{instance_id}:{suffix}"
|
|
||||||
if suffix
|
|
||||||
else f"{model_name}:{instance_id}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def generate_queryset_cache_key(queryset, params: dict = None):
|
|
||||||
"""Generate cache key for queryset with parameters"""
|
|
||||||
model_name = queryset.model._meta.model_name
|
|
||||||
params_str = json.dumps(params or {}, sort_keys=True, default=str)
|
|
||||||
params_hash = hashlib.md5(params_str.encode()).hexdigest()
|
|
||||||
return f"queryset:{model_name}:{params_hash}"
|
|
||||||
@@ -1,224 +0,0 @@
|
|||||||
"""
|
|
||||||
Custom exception classes for ThrillWiki.
|
|
||||||
Provides domain-specific exceptions with proper error codes and messages.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from typing import Optional, Dict, Any
|
|
||||||
|
|
||||||
|
|
||||||
class ThrillWikiException(Exception):
|
|
||||||
"""Base exception for all ThrillWiki-specific errors."""
|
|
||||||
|
|
||||||
default_message = "An error occurred"
|
|
||||||
error_code = "THRILLWIKI_ERROR"
|
|
||||||
status_code = 500
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
message: Optional[str] = None,
|
|
||||||
error_code: Optional[str] = None,
|
|
||||||
details: Optional[Dict[str, Any]] = None,
|
|
||||||
):
|
|
||||||
self.message = message or self.default_message
|
|
||||||
self.error_code = error_code or self.error_code
|
|
||||||
self.details = details or {}
|
|
||||||
super().__init__(self.message)
|
|
||||||
|
|
||||||
def to_dict(self) -> Dict[str, Any]:
|
|
||||||
"""Convert exception to dictionary for API responses."""
|
|
||||||
return {
|
|
||||||
"error_code": self.error_code,
|
|
||||||
"message": self.message,
|
|
||||||
"details": self.details,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class ValidationException(ThrillWikiException):
|
|
||||||
"""Raised when data validation fails."""
|
|
||||||
|
|
||||||
default_message = "Validation failed"
|
|
||||||
error_code = "VALIDATION_ERROR"
|
|
||||||
status_code = 400
|
|
||||||
|
|
||||||
|
|
||||||
class NotFoundError(ThrillWikiException):
|
|
||||||
"""Raised when a requested resource is not found."""
|
|
||||||
|
|
||||||
default_message = "Resource not found"
|
|
||||||
error_code = "NOT_FOUND"
|
|
||||||
status_code = 404
|
|
||||||
|
|
||||||
|
|
||||||
class PermissionDeniedError(ThrillWikiException):
|
|
||||||
"""Raised when user lacks permission for an operation."""
|
|
||||||
|
|
||||||
default_message = "Permission denied"
|
|
||||||
error_code = "PERMISSION_DENIED"
|
|
||||||
status_code = 403
|
|
||||||
|
|
||||||
|
|
||||||
class BusinessLogicError(ThrillWikiException):
|
|
||||||
"""Raised when business logic constraints are violated."""
|
|
||||||
|
|
||||||
default_message = "Business logic violation"
|
|
||||||
error_code = "BUSINESS_LOGIC_ERROR"
|
|
||||||
status_code = 400
|
|
||||||
|
|
||||||
|
|
||||||
class ExternalServiceError(ThrillWikiException):
|
|
||||||
"""Raised when external service calls fail."""
|
|
||||||
|
|
||||||
default_message = "External service error"
|
|
||||||
error_code = "EXTERNAL_SERVICE_ERROR"
|
|
||||||
status_code = 502
|
|
||||||
|
|
||||||
|
|
||||||
# Domain-specific exceptions
|
|
||||||
|
|
||||||
|
|
||||||
class ParkError(ThrillWikiException):
|
|
||||||
"""Base exception for park-related errors."""
|
|
||||||
|
|
||||||
error_code = "PARK_ERROR"
|
|
||||||
|
|
||||||
|
|
||||||
class ParkNotFoundError(NotFoundError):
|
|
||||||
"""Raised when a park is not found."""
|
|
||||||
|
|
||||||
default_message = "Park not found"
|
|
||||||
error_code = "PARK_NOT_FOUND"
|
|
||||||
|
|
||||||
def __init__(self, park_slug: Optional[str] = None, **kwargs):
|
|
||||||
if park_slug:
|
|
||||||
kwargs["details"] = {"park_slug": park_slug}
|
|
||||||
kwargs["message"] = f"Park with slug '{park_slug}' not found"
|
|
||||||
super().__init__(**kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class ParkOperationError(BusinessLogicError):
|
|
||||||
"""Raised when park operation constraints are violated."""
|
|
||||||
|
|
||||||
default_message = "Invalid park operation"
|
|
||||||
error_code = "PARK_OPERATION_ERROR"
|
|
||||||
|
|
||||||
|
|
||||||
class RideError(ThrillWikiException):
|
|
||||||
"""Base exception for ride-related errors."""
|
|
||||||
|
|
||||||
error_code = "RIDE_ERROR"
|
|
||||||
|
|
||||||
|
|
||||||
class RideNotFoundError(NotFoundError):
|
|
||||||
"""Raised when a ride is not found."""
|
|
||||||
|
|
||||||
default_message = "Ride not found"
|
|
||||||
error_code = "RIDE_NOT_FOUND"
|
|
||||||
|
|
||||||
def __init__(self, ride_slug: Optional[str] = None, **kwargs):
|
|
||||||
if ride_slug:
|
|
||||||
kwargs["details"] = {"ride_slug": ride_slug}
|
|
||||||
kwargs["message"] = f"Ride with slug '{ride_slug}' not found"
|
|
||||||
super().__init__(**kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class RideOperationError(BusinessLogicError):
|
|
||||||
"""Raised when ride operation constraints are violated."""
|
|
||||||
|
|
||||||
default_message = "Invalid ride operation"
|
|
||||||
error_code = "RIDE_OPERATION_ERROR"
|
|
||||||
|
|
||||||
|
|
||||||
class LocationError(ThrillWikiException):
|
|
||||||
"""Base exception for location-related errors."""
|
|
||||||
|
|
||||||
error_code = "LOCATION_ERROR"
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidCoordinatesError(ValidationException):
|
|
||||||
"""Raised when geographic coordinates are invalid."""
|
|
||||||
|
|
||||||
default_message = "Invalid geographic coordinates"
|
|
||||||
error_code = "INVALID_COORDINATES"
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
latitude: Optional[float] = None,
|
|
||||||
longitude: Optional[float] = None,
|
|
||||||
**kwargs,
|
|
||||||
):
|
|
||||||
if latitude is not None or longitude is not None:
|
|
||||||
kwargs["details"] = {"latitude": latitude, "longitude": longitude}
|
|
||||||
super().__init__(**kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class GeolocationError(ExternalServiceError):
|
|
||||||
"""Raised when geolocation services fail."""
|
|
||||||
|
|
||||||
default_message = "Geolocation service unavailable"
|
|
||||||
error_code = "GEOLOCATION_ERROR"
|
|
||||||
|
|
||||||
|
|
||||||
class ReviewError(ThrillWikiException):
|
|
||||||
"""Base exception for review-related errors."""
|
|
||||||
|
|
||||||
error_code = "REVIEW_ERROR"
|
|
||||||
|
|
||||||
|
|
||||||
class ReviewModerationError(BusinessLogicError):
|
|
||||||
"""Raised when review moderation constraints are violated."""
|
|
||||||
|
|
||||||
default_message = "Review moderation error"
|
|
||||||
error_code = "REVIEW_MODERATION_ERROR"
|
|
||||||
|
|
||||||
|
|
||||||
class DuplicateReviewError(BusinessLogicError):
|
|
||||||
"""Raised when user tries to create duplicate reviews."""
|
|
||||||
|
|
||||||
default_message = "User has already reviewed this item"
|
|
||||||
error_code = "DUPLICATE_REVIEW"
|
|
||||||
|
|
||||||
|
|
||||||
class AccountError(ThrillWikiException):
|
|
||||||
"""Base exception for account-related errors."""
|
|
||||||
|
|
||||||
error_code = "ACCOUNT_ERROR"
|
|
||||||
|
|
||||||
|
|
||||||
class InsufficientPermissionsError(PermissionDeniedError):
|
|
||||||
"""Raised when user lacks required permissions."""
|
|
||||||
|
|
||||||
default_message = "Insufficient permissions"
|
|
||||||
error_code = "INSUFFICIENT_PERMISSIONS"
|
|
||||||
|
|
||||||
def __init__(self, required_permission: Optional[str] = None, **kwargs):
|
|
||||||
if required_permission:
|
|
||||||
kwargs["details"] = {"required_permission": required_permission}
|
|
||||||
kwargs["message"] = f"Permission '{required_permission}' required"
|
|
||||||
super().__init__(**kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class EmailError(ExternalServiceError):
|
|
||||||
"""Raised when email operations fail."""
|
|
||||||
|
|
||||||
default_message = "Email service error"
|
|
||||||
error_code = "EMAIL_ERROR"
|
|
||||||
|
|
||||||
|
|
||||||
class CacheError(ThrillWikiException):
|
|
||||||
"""Raised when cache operations fail."""
|
|
||||||
|
|
||||||
default_message = "Cache operation failed"
|
|
||||||
error_code = "CACHE_ERROR"
|
|
||||||
status_code = 500
|
|
||||||
|
|
||||||
|
|
||||||
class RoadTripError(ExternalServiceError):
|
|
||||||
"""Raised when road trip planning fails."""
|
|
||||||
|
|
||||||
default_message = "Road trip planning error"
|
|
||||||
error_code = "ROADTRIP_ERROR"
|
|
||||||
|
|
||||||
def __init__(self, service_name: Optional[str] = None, **kwargs):
|
|
||||||
if service_name:
|
|
||||||
kwargs["details"] = {"service": service_name}
|
|
||||||
super().__init__(**kwargs)
|
|
||||||
@@ -1,43 +0,0 @@
|
|||||||
"""Core forms and form components."""
|
|
||||||
|
|
||||||
from django.conf import settings
|
|
||||||
from django.core.exceptions import PermissionDenied
|
|
||||||
from django.utils.translation import gettext_lazy as _
|
|
||||||
|
|
||||||
from autocomplete import Autocomplete
|
|
||||||
|
|
||||||
|
|
||||||
class BaseAutocomplete(Autocomplete):
|
|
||||||
"""Base autocomplete class for consistent autocomplete behavior across the project.
|
|
||||||
|
|
||||||
This class extends django-htmx-autocomplete's base Autocomplete class to provide:
|
|
||||||
- Project-wide defaults for autocomplete behavior
|
|
||||||
- Translation strings
|
|
||||||
- Authentication enforcement
|
|
||||||
- Sensible search configuration
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Search configuration
|
|
||||||
minimum_search_length = 2 # More responsive than default 3
|
|
||||||
max_results = 10 # Reasonable limit for performance
|
|
||||||
|
|
||||||
# UI text configuration using gettext for i18n
|
|
||||||
no_result_text = _("No matches found")
|
|
||||||
narrow_search_text = _(
|
|
||||||
"Showing %(page_size)s of %(total)s matches. Please refine your search."
|
|
||||||
)
|
|
||||||
type_at_least_n_characters = _("Type at least %(n)s characters...")
|
|
||||||
|
|
||||||
# Project-wide component settings
|
|
||||||
placeholder = _("Search...")
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def auth_check(request):
|
|
||||||
"""Enforce authentication by default.
|
|
||||||
|
|
||||||
This can be overridden in subclasses if public access is needed.
|
|
||||||
Configure AUTOCOMPLETE_BLOCK_UNAUTHENTICATED in settings to disable.
|
|
||||||
"""
|
|
||||||
block_unauth = getattr(settings, "AUTOCOMPLETE_BLOCK_UNAUTHENTICATED", True)
|
|
||||||
if block_unauth and not request.user.is_authenticated:
|
|
||||||
raise PermissionDenied(_("Authentication required"))
|
|
||||||
@@ -1,168 +0,0 @@
|
|||||||
from django import forms
|
|
||||||
from django.utils.translation import gettext_lazy as _
|
|
||||||
|
|
||||||
|
|
||||||
class LocationSearchForm(forms.Form):
|
|
||||||
"""
|
|
||||||
A comprehensive search form that includes text search, location-based
|
|
||||||
search, and content type filtering for a unified search experience.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Text search query
|
|
||||||
q = forms.CharField(
|
|
||||||
required=False,
|
|
||||||
label=_("Search Query"),
|
|
||||||
widget=forms.TextInput(
|
|
||||||
attrs={
|
|
||||||
"placeholder": _("Search parks, rides, companies..."),
|
|
||||||
"class": (
|
|
||||||
"w-full px-3 py-2 border border-gray-300 rounded-md shadow-sm "
|
|
||||||
"focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 "
|
|
||||||
"dark:border-gray-600 dark:text-white"
|
|
||||||
),
|
|
||||||
}
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
# Location-based search
|
|
||||||
location = forms.CharField(
|
|
||||||
required=False,
|
|
||||||
label=_("Near Location"),
|
|
||||||
widget=forms.TextInput(
|
|
||||||
attrs={
|
|
||||||
"placeholder": _("City, address, or coordinates..."),
|
|
||||||
"id": "location-input",
|
|
||||||
"class": (
|
|
||||||
"w-full px-3 py-2 border border-gray-300 rounded-md shadow-sm "
|
|
||||||
"focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 "
|
|
||||||
"dark:border-gray-600 dark:text-white"
|
|
||||||
),
|
|
||||||
}
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
# Hidden fields for coordinates
|
|
||||||
lat = forms.FloatField(
|
|
||||||
required=False, widget=forms.HiddenInput(attrs={"id": "lat-input"})
|
|
||||||
)
|
|
||||||
lng = forms.FloatField(
|
|
||||||
required=False, widget=forms.HiddenInput(attrs={"id": "lng-input"})
|
|
||||||
)
|
|
||||||
|
|
||||||
# Search radius
|
|
||||||
radius_km = forms.ChoiceField(
|
|
||||||
required=False,
|
|
||||||
label=_("Search Radius"),
|
|
||||||
choices=[
|
|
||||||
("", _("Any distance")),
|
|
||||||
("5", _("5 km")),
|
|
||||||
("10", _("10 km")),
|
|
||||||
("25", _("25 km")),
|
|
||||||
("50", _("50 km")),
|
|
||||||
("100", _("100 km")),
|
|
||||||
("200", _("200 km")),
|
|
||||||
],
|
|
||||||
widget=forms.Select(
|
|
||||||
attrs={
|
|
||||||
"class": (
|
|
||||||
"w-full px-3 py-2 border border-gray-300 rounded-md shadow-sm "
|
|
||||||
"focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 "
|
|
||||||
"dark:border-gray-600 dark:text-white"
|
|
||||||
)
|
|
||||||
}
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
# Content type filters
|
|
||||||
search_parks = forms.BooleanField(
|
|
||||||
required=False,
|
|
||||||
initial=True,
|
|
||||||
label=_("Search Parks"),
|
|
||||||
widget=forms.CheckboxInput(
|
|
||||||
attrs={
|
|
||||||
"class": (
|
|
||||||
"rounded border-gray-300 text-blue-600 focus:ring-blue-500 "
|
|
||||||
"dark:border-gray-600 dark:bg-gray-700"
|
|
||||||
)
|
|
||||||
}
|
|
||||||
),
|
|
||||||
)
|
|
||||||
search_rides = forms.BooleanField(
|
|
||||||
required=False,
|
|
||||||
label=_("Search Rides"),
|
|
||||||
widget=forms.CheckboxInput(
|
|
||||||
attrs={
|
|
||||||
"class": (
|
|
||||||
"rounded border-gray-300 text-blue-600 focus:ring-blue-500 "
|
|
||||||
"dark:border-gray-600 dark:bg-gray-700"
|
|
||||||
)
|
|
||||||
}
|
|
||||||
),
|
|
||||||
)
|
|
||||||
search_companies = forms.BooleanField(
|
|
||||||
required=False,
|
|
||||||
label=_("Search Companies"),
|
|
||||||
widget=forms.CheckboxInput(
|
|
||||||
attrs={
|
|
||||||
"class": (
|
|
||||||
"rounded border-gray-300 text-blue-600 focus:ring-blue-500 "
|
|
||||||
"dark:border-gray-600 dark:bg-gray-700"
|
|
||||||
)
|
|
||||||
}
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
# Geographic filters
|
|
||||||
country = forms.CharField(
|
|
||||||
required=False,
|
|
||||||
widget=forms.TextInput(
|
|
||||||
attrs={
|
|
||||||
"placeholder": _("Country"),
|
|
||||||
"class": (
|
|
||||||
"w-full px-3 py-2 text-sm border border-gray-300 rounded-md "
|
|
||||||
"shadow-sm focus:ring-blue-500 focus:border-blue-500 "
|
|
||||||
"dark:bg-gray-700 dark:border-gray-600 dark:text-white"
|
|
||||||
),
|
|
||||||
}
|
|
||||||
),
|
|
||||||
)
|
|
||||||
state = forms.CharField(
|
|
||||||
required=False,
|
|
||||||
widget=forms.TextInput(
|
|
||||||
attrs={
|
|
||||||
"placeholder": _("State/Region"),
|
|
||||||
"class": (
|
|
||||||
"w-full px-3 py-2 text-sm border border-gray-300 rounded-md "
|
|
||||||
"shadow-sm focus:ring-blue-500 focus:border-blue-500 "
|
|
||||||
"dark:bg-gray-700 dark:border-gray-600 dark:text-white"
|
|
||||||
),
|
|
||||||
}
|
|
||||||
),
|
|
||||||
)
|
|
||||||
city = forms.CharField(
|
|
||||||
required=False,
|
|
||||||
widget=forms.TextInput(
|
|
||||||
attrs={
|
|
||||||
"placeholder": _("City"),
|
|
||||||
"class": (
|
|
||||||
"w-full px-3 py-2 text-sm border border-gray-300 rounded-md "
|
|
||||||
"shadow-sm focus:ring-blue-500 focus:border-blue-500 "
|
|
||||||
"dark:bg-gray-700 dark:border-gray-600 dark:text-white"
|
|
||||||
),
|
|
||||||
}
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
def clean(self):
|
|
||||||
cleaned_data = super().clean()
|
|
||||||
|
|
||||||
# If lat/lng are provided, ensure location field is populated for
|
|
||||||
# display
|
|
||||||
lat = cleaned_data.get("lat")
|
|
||||||
lng = cleaned_data.get("lng")
|
|
||||||
location = cleaned_data.get("location")
|
|
||||||
|
|
||||||
if lat and lng and not location:
|
|
||||||
cleaned_data["location"] = f"{lat}, {lng}"
|
|
||||||
|
|
||||||
return cleaned_data
|
|
||||||