mirror of
https://github.com/pacnpal/thrillwiki_django_no_react.git
synced 2026-02-05 13:35:19 -05:00
Compare commits
36 Commits
95700c7d7b
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fbfda9a3d8 | ||
|
|
4140a0d8e7 | ||
|
|
d631f3183c | ||
|
|
2b66814d82 | ||
|
|
96df23242e | ||
|
|
692c0bbbbf | ||
|
|
22ff0d1c49 | ||
|
|
fbbfea50a3 | ||
|
|
b37aedf82e | ||
|
|
fa570334fc | ||
|
|
d9a6b4a085 | ||
|
|
8ff6b7ee23 | ||
|
|
e2103a49ce | ||
|
|
2a1d139171 | ||
|
|
d8cb6fcffe | ||
|
|
2cdf302179 | ||
|
|
7db5d1a1cc | ||
|
|
acf2834d16 | ||
|
|
5bcd64ebae | ||
|
|
9a5974eff5 | ||
|
|
8a51cd5de7 | ||
|
|
cf54df0416 | ||
|
|
fe960e8b62 | ||
|
|
40cba5bdb2 | ||
|
|
28c9ec56da | ||
|
|
3ec5a4857d | ||
|
|
4da7e52fb0 | ||
|
|
b80654952d | ||
|
|
2b7bb4dfaa | ||
|
|
a801813dcf | ||
|
|
1c6e219662 | ||
|
|
70e4385c2b | ||
|
|
30aa887d2a | ||
|
|
dd2d09b1c7 | ||
|
|
89d9e945b9 | ||
|
|
bc4a3c7557 |
2
.github/workflows/claude-code-review.yml
vendored
2
.github/workflows/claude-code-review.yml
vendored
@@ -27,7 +27,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
fetch-depth: 1
|
fetch-depth: 1
|
||||||
|
|
||||||
|
|||||||
2
.github/workflows/claude.yml
vendored
2
.github/workflows/claude.yml
vendored
@@ -26,7 +26,7 @@ jobs:
|
|||||||
actions: read # Required for Claude to read CI results on PRs
|
actions: read # Required for Claude to read CI results on PRs
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
fetch-depth: 1
|
fetch-depth: 1
|
||||||
|
|
||||||
|
|||||||
6
.github/workflows/dependency-update.yml
vendored
6
.github/workflows/dependency-update.yml
vendored
@@ -9,10 +9,10 @@ jobs:
|
|||||||
update:
|
update:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: "3.13"
|
python-version: "3.13"
|
||||||
|
|
||||||
@@ -33,7 +33,7 @@ jobs:
|
|||||||
uv run manage.py test
|
uv run manage.py test
|
||||||
|
|
||||||
- name: Create Pull Request
|
- name: Create Pull Request
|
||||||
uses: peter-evans/create-pull-request@v5
|
uses: peter-evans/create-pull-request@v8
|
||||||
with:
|
with:
|
||||||
commit-message: "chore: update dependencies"
|
commit-message: "chore: update dependencies"
|
||||||
title: "chore: weekly dependency updates"
|
title: "chore: weekly dependency updates"
|
||||||
|
|||||||
6
.github/workflows/django.yml
vendored
6
.github/workflows/django.yml
vendored
@@ -32,7 +32,7 @@ jobs:
|
|||||||
if: runner.os == 'Linux'
|
if: runner.os == 'Linux'
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Install Homebrew on Linux
|
- name: Install Homebrew on Linux
|
||||||
if: runner.os == 'Linux'
|
if: runner.os == 'Linux'
|
||||||
@@ -54,7 +54,7 @@ jobs:
|
|||||||
/opt/homebrew/opt/postgresql@16/bin/psql -U postgres -d test_thrillwiki -c "CREATE EXTENSION IF NOT EXISTS postgis;" || true
|
/opt/homebrew/opt/postgresql@16/bin/psql -U postgres -d test_thrillwiki -c "CREATE EXTENSION IF NOT EXISTS postgis;" || true
|
||||||
|
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
|
|
||||||
@@ -64,7 +64,7 @@ jobs:
|
|||||||
echo "$HOME/.cargo/bin" >> $GITHUB_PATH
|
echo "$HOME/.cargo/bin" >> $GITHUB_PATH
|
||||||
|
|
||||||
- name: Cache UV dependencies
|
- name: Cache UV dependencies
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v5
|
||||||
with:
|
with:
|
||||||
path: ~/.cache/uv
|
path: ~/.cache/uv
|
||||||
key: ${{ runner.os }}-uv-${{ hashFiles('backend/pyproject.toml') }}
|
key: ${{ runner.os }}-uv-${{ hashFiles('backend/pyproject.toml') }}
|
||||||
|
|||||||
2
.github/workflows/review.yml
vendored
2
.github/workflows/review.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
environment: development_environment
|
environment: development_environment
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
|
|||||||
139
.gitignore
vendored
139
.gitignore
vendored
@@ -1,139 +0,0 @@
|
|||||||
# Python
|
|
||||||
__pycache__/
|
|
||||||
*.py[cod]
|
|
||||||
*$py.class
|
|
||||||
*.so
|
|
||||||
.Python
|
|
||||||
build/
|
|
||||||
develop-eggs/
|
|
||||||
dist/
|
|
||||||
downloads/
|
|
||||||
eggs/
|
|
||||||
.eggs/
|
|
||||||
lib/
|
|
||||||
lib64/
|
|
||||||
parts/
|
|
||||||
sdist/
|
|
||||||
var/
|
|
||||||
wheels/
|
|
||||||
share/python-wheels/
|
|
||||||
*.egg-info/
|
|
||||||
.installed.cfg
|
|
||||||
*.egg
|
|
||||||
MANIFEST
|
|
||||||
|
|
||||||
# Django
|
|
||||||
*.log
|
|
||||||
local_settings.py
|
|
||||||
db.sqlite3
|
|
||||||
db.sqlite3-journal
|
|
||||||
/backend/staticfiles/
|
|
||||||
/backend/media/
|
|
||||||
|
|
||||||
# UV
|
|
||||||
.uv/
|
|
||||||
backend/.uv/
|
|
||||||
|
|
||||||
# Generated requirements files (auto-generated from pyproject.toml)
|
|
||||||
# Uncomment if you want to track these files
|
|
||||||
# backend/requirements.txt
|
|
||||||
# backend/requirements-dev.txt
|
|
||||||
# backend/requirements-test.txt
|
|
||||||
|
|
||||||
# Node.js
|
|
||||||
node_modules/
|
|
||||||
npm-debug.log*
|
|
||||||
yarn-debug.log*
|
|
||||||
yarn-error.log*
|
|
||||||
pnpm-debug.log*
|
|
||||||
lerna-debug.log*
|
|
||||||
.pnpm-store/
|
|
||||||
|
|
||||||
# Vue.js / Vite
|
|
||||||
/frontend/dist/
|
|
||||||
/frontend/dist-ssr/
|
|
||||||
*.local
|
|
||||||
|
|
||||||
# Environment variables
|
|
||||||
.env
|
|
||||||
.env.local
|
|
||||||
.env.development.local
|
|
||||||
.env.test.local
|
|
||||||
.env.production.local
|
|
||||||
backend/.env
|
|
||||||
frontend/.env
|
|
||||||
|
|
||||||
# IDEs
|
|
||||||
.vscode/
|
|
||||||
.idea/
|
|
||||||
*.swp
|
|
||||||
*.swo
|
|
||||||
*.sublime-project
|
|
||||||
*.sublime-workspace
|
|
||||||
|
|
||||||
# OS
|
|
||||||
.DS_Store
|
|
||||||
Thumbs.db
|
|
||||||
Desktop.ini
|
|
||||||
|
|
||||||
# Logs
|
|
||||||
logs/
|
|
||||||
*.log
|
|
||||||
|
|
||||||
# Coverage
|
|
||||||
coverage/
|
|
||||||
*.lcov
|
|
||||||
.nyc_output
|
|
||||||
htmlcov/
|
|
||||||
.coverage
|
|
||||||
.coverage.*
|
|
||||||
|
|
||||||
# Testing
|
|
||||||
.pytest_cache/
|
|
||||||
.cache
|
|
||||||
|
|
||||||
# Temporary files
|
|
||||||
tmp/
|
|
||||||
temp/
|
|
||||||
*.tmp
|
|
||||||
*.temp
|
|
||||||
|
|
||||||
# Build outputs
|
|
||||||
/dist/
|
|
||||||
/build/
|
|
||||||
|
|
||||||
# Backup files
|
|
||||||
*.bak
|
|
||||||
*.backup
|
|
||||||
*.orig
|
|
||||||
*.swp
|
|
||||||
*_backup.*
|
|
||||||
*_OLD_*
|
|
||||||
|
|
||||||
# Archive files
|
|
||||||
*.tar.gz
|
|
||||||
*.zip
|
|
||||||
*.rar
|
|
||||||
|
|
||||||
# Security
|
|
||||||
*.pem
|
|
||||||
*.key
|
|
||||||
*.cert
|
|
||||||
|
|
||||||
# Local development
|
|
||||||
/uploads/
|
|
||||||
/backups/
|
|
||||||
.django_tailwind_cli/
|
|
||||||
backend/.env
|
|
||||||
frontend/.env
|
|
||||||
|
|
||||||
# Extracted packages
|
|
||||||
django-forwardemail/
|
|
||||||
frontend/
|
|
||||||
frontend
|
|
||||||
.snapshots
|
|
||||||
web/next-env.d.ts
|
|
||||||
web/.next/types/cache-life.d.ts
|
|
||||||
.gitignore
|
|
||||||
web/.next/types/routes.d.ts
|
|
||||||
web/.next/types/validator.ts
|
|
||||||
@@ -32,6 +32,8 @@ class CustomAccountAdapter(DefaultAccountAdapter):
|
|||||||
"activate_url": activate_url,
|
"activate_url": activate_url,
|
||||||
"current_site": current_site,
|
"current_site": current_site,
|
||||||
"key": emailconfirmation.key,
|
"key": emailconfirmation.key,
|
||||||
|
"request": request, # Include request for email backend
|
||||||
|
"site": current_site, # Include site for email backend
|
||||||
}
|
}
|
||||||
email_template = "account/email/email_confirmation_signup" if signup else "account/email/email_confirmation"
|
email_template = "account/email/email_confirmation_signup" if signup else "account/email/email_confirmation"
|
||||||
self.send_mail(email_template, emailconfirmation.email_address.email, ctx)
|
self.send_mail(email_template, emailconfirmation.email_address.email, ctx)
|
||||||
|
|||||||
@@ -586,6 +586,264 @@ notification_priorities = ChoiceGroup(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# SECURITY EVENT TYPES
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
security_event_types = ChoiceGroup(
|
||||||
|
name="security_event_types",
|
||||||
|
choices=[
|
||||||
|
RichChoice(
|
||||||
|
value="login_success",
|
||||||
|
label="Login Success",
|
||||||
|
description="User successfully logged in to their account",
|
||||||
|
metadata={
|
||||||
|
"color": "green",
|
||||||
|
"icon": "login",
|
||||||
|
"css_class": "text-green-600 bg-green-50",
|
||||||
|
"severity": "info",
|
||||||
|
"category": "authentication",
|
||||||
|
"sort_order": 1,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="login_failed",
|
||||||
|
label="Login Failed",
|
||||||
|
description="Failed login attempt to user's account",
|
||||||
|
metadata={
|
||||||
|
"color": "red",
|
||||||
|
"icon": "login",
|
||||||
|
"css_class": "text-red-600 bg-red-50",
|
||||||
|
"severity": "warning",
|
||||||
|
"category": "authentication",
|
||||||
|
"sort_order": 2,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="logout",
|
||||||
|
label="Logout",
|
||||||
|
description="User logged out of their account",
|
||||||
|
metadata={
|
||||||
|
"color": "gray",
|
||||||
|
"icon": "logout",
|
||||||
|
"css_class": "text-gray-600 bg-gray-50",
|
||||||
|
"severity": "info",
|
||||||
|
"category": "authentication",
|
||||||
|
"sort_order": 3,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="mfa_enrolled",
|
||||||
|
label="MFA Enrolled",
|
||||||
|
description="User enabled two-factor authentication",
|
||||||
|
metadata={
|
||||||
|
"color": "green",
|
||||||
|
"icon": "shield-check",
|
||||||
|
"css_class": "text-green-600 bg-green-50",
|
||||||
|
"severity": "info",
|
||||||
|
"category": "mfa",
|
||||||
|
"sort_order": 4,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="mfa_disabled",
|
||||||
|
label="MFA Disabled",
|
||||||
|
description="User disabled two-factor authentication",
|
||||||
|
metadata={
|
||||||
|
"color": "yellow",
|
||||||
|
"icon": "shield-off",
|
||||||
|
"css_class": "text-yellow-600 bg-yellow-50",
|
||||||
|
"severity": "warning",
|
||||||
|
"category": "mfa",
|
||||||
|
"sort_order": 5,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="mfa_challenge_success",
|
||||||
|
label="MFA Challenge Success",
|
||||||
|
description="User successfully completed MFA verification",
|
||||||
|
metadata={
|
||||||
|
"color": "green",
|
||||||
|
"icon": "shield-check",
|
||||||
|
"css_class": "text-green-600 bg-green-50",
|
||||||
|
"severity": "info",
|
||||||
|
"category": "mfa",
|
||||||
|
"sort_order": 6,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="mfa_challenge_failed",
|
||||||
|
label="MFA Challenge Failed",
|
||||||
|
description="User failed MFA verification attempt",
|
||||||
|
metadata={
|
||||||
|
"color": "red",
|
||||||
|
"icon": "shield-x",
|
||||||
|
"css_class": "text-red-600 bg-red-50",
|
||||||
|
"severity": "warning",
|
||||||
|
"category": "mfa",
|
||||||
|
"sort_order": 7,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="passkey_registered",
|
||||||
|
label="Passkey Registered",
|
||||||
|
description="User registered a new passkey/WebAuthn credential",
|
||||||
|
metadata={
|
||||||
|
"color": "green",
|
||||||
|
"icon": "fingerprint",
|
||||||
|
"css_class": "text-green-600 bg-green-50",
|
||||||
|
"severity": "info",
|
||||||
|
"category": "passkey",
|
||||||
|
"sort_order": 8,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="passkey_removed",
|
||||||
|
label="Passkey Removed",
|
||||||
|
description="User removed a passkey/WebAuthn credential",
|
||||||
|
metadata={
|
||||||
|
"color": "yellow",
|
||||||
|
"icon": "fingerprint",
|
||||||
|
"css_class": "text-yellow-600 bg-yellow-50",
|
||||||
|
"severity": "warning",
|
||||||
|
"category": "passkey",
|
||||||
|
"sort_order": 9,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="passkey_login",
|
||||||
|
label="Passkey Login",
|
||||||
|
description="User logged in using a passkey",
|
||||||
|
metadata={
|
||||||
|
"color": "green",
|
||||||
|
"icon": "fingerprint",
|
||||||
|
"css_class": "text-green-600 bg-green-50",
|
||||||
|
"severity": "info",
|
||||||
|
"category": "passkey",
|
||||||
|
"sort_order": 10,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="social_linked",
|
||||||
|
label="Social Account Linked",
|
||||||
|
description="User connected a social login provider",
|
||||||
|
metadata={
|
||||||
|
"color": "blue",
|
||||||
|
"icon": "link",
|
||||||
|
"css_class": "text-blue-600 bg-blue-50",
|
||||||
|
"severity": "info",
|
||||||
|
"category": "social",
|
||||||
|
"sort_order": 11,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="social_unlinked",
|
||||||
|
label="Social Account Unlinked",
|
||||||
|
description="User disconnected a social login provider",
|
||||||
|
metadata={
|
||||||
|
"color": "yellow",
|
||||||
|
"icon": "unlink",
|
||||||
|
"css_class": "text-yellow-600 bg-yellow-50",
|
||||||
|
"severity": "info",
|
||||||
|
"category": "social",
|
||||||
|
"sort_order": 12,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="password_reset_requested",
|
||||||
|
label="Password Reset Requested",
|
||||||
|
description="Password reset was requested for user's account",
|
||||||
|
metadata={
|
||||||
|
"color": "yellow",
|
||||||
|
"icon": "key",
|
||||||
|
"css_class": "text-yellow-600 bg-yellow-50",
|
||||||
|
"severity": "info",
|
||||||
|
"category": "password",
|
||||||
|
"sort_order": 13,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="password_reset_completed",
|
||||||
|
label="Password Reset Completed",
|
||||||
|
description="User successfully reset their password",
|
||||||
|
metadata={
|
||||||
|
"color": "green",
|
||||||
|
"icon": "key",
|
||||||
|
"css_class": "text-green-600 bg-green-50",
|
||||||
|
"severity": "info",
|
||||||
|
"category": "password",
|
||||||
|
"sort_order": 14,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="password_changed",
|
||||||
|
label="Password Changed",
|
||||||
|
description="User changed their password",
|
||||||
|
metadata={
|
||||||
|
"color": "green",
|
||||||
|
"icon": "key",
|
||||||
|
"css_class": "text-green-600 bg-green-50",
|
||||||
|
"severity": "info",
|
||||||
|
"category": "password",
|
||||||
|
"sort_order": 15,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="session_invalidated",
|
||||||
|
label="Session Invalidated",
|
||||||
|
description="User's session was terminated",
|
||||||
|
metadata={
|
||||||
|
"color": "yellow",
|
||||||
|
"icon": "clock",
|
||||||
|
"css_class": "text-yellow-600 bg-yellow-50",
|
||||||
|
"severity": "info",
|
||||||
|
"category": "session",
|
||||||
|
"sort_order": 16,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="recovery_code_used",
|
||||||
|
label="Recovery Code Used",
|
||||||
|
description="User used a recovery code for authentication",
|
||||||
|
metadata={
|
||||||
|
"color": "orange",
|
||||||
|
"icon": "key",
|
||||||
|
"css_class": "text-orange-600 bg-orange-50",
|
||||||
|
"severity": "warning",
|
||||||
|
"category": "mfa",
|
||||||
|
"sort_order": 17,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="recovery_codes_regenerated",
|
||||||
|
label="Recovery Codes Regenerated",
|
||||||
|
description="User generated new recovery codes",
|
||||||
|
metadata={
|
||||||
|
"color": "blue",
|
||||||
|
"icon": "refresh",
|
||||||
|
"css_class": "text-blue-600 bg-blue-50",
|
||||||
|
"severity": "info",
|
||||||
|
"category": "mfa",
|
||||||
|
"sort_order": 18,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="session_to_token",
|
||||||
|
label="Passkey Login",
|
||||||
|
description="Signed in using a passkey",
|
||||||
|
metadata={
|
||||||
|
"color": "green",
|
||||||
|
"icon": "fingerprint",
|
||||||
|
"css_class": "text-green-600 bg-green-50",
|
||||||
|
"severity": "info",
|
||||||
|
"category": "authentication",
|
||||||
|
"sort_order": 19,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
# REGISTER ALL CHOICE GROUPS
|
# REGISTER ALL CHOICE GROUPS
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
@@ -598,3 +856,5 @@ register_choices("privacy_levels", privacy_levels.choices, "accounts", "Privacy
|
|||||||
register_choices("top_list_categories", top_list_categories.choices, "accounts", "Top list category types")
|
register_choices("top_list_categories", top_list_categories.choices, "accounts", "Top list category types")
|
||||||
register_choices("notification_types", notification_types.choices, "accounts", "Notification type classifications")
|
register_choices("notification_types", notification_types.choices, "accounts", "Notification type classifications")
|
||||||
register_choices("notification_priorities", notification_priorities.choices, "accounts", "Notification priority levels")
|
register_choices("notification_priorities", notification_priorities.choices, "accounts", "Notification priority levels")
|
||||||
|
register_choices("security_event_types", security_event_types.choices, "accounts", "Security event type classifications")
|
||||||
|
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ class Migration(migrations.Migration):
|
|||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("accounts", "0014_remove_toplist_user_remove_toplistitem_top_list_and_more"),
|
("accounts", "0014_remove_toplist_user_remove_toplistitem_top_list_and_more"),
|
||||||
("pghistory", "0007_auto_20250421_0444"),
|
("pghistory", "0006_delete_aggregateevent"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
|
|||||||
@@ -0,0 +1,41 @@
|
|||||||
|
# Generated by Django 5.2.9 on 2026-01-07 01:23
|
||||||
|
|
||||||
|
import pgtrigger.compiler
|
||||||
|
import pgtrigger.migrations
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('accounts', '0015_loginhistory_loginhistoryevent_and_more'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
pgtrigger.migrations.RemoveTrigger(
|
||||||
|
model_name='emailverification',
|
||||||
|
name='insert_insert',
|
||||||
|
),
|
||||||
|
pgtrigger.migrations.RemoveTrigger(
|
||||||
|
model_name='emailverification',
|
||||||
|
name='update_update',
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='emailverification',
|
||||||
|
name='updated_at',
|
||||||
|
field=models.DateTimeField(auto_now=True, help_text='When this verification was last updated'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='emailverificationevent',
|
||||||
|
name='updated_at',
|
||||||
|
field=models.DateTimeField(auto_now=True, help_text='When this verification was last updated'),
|
||||||
|
),
|
||||||
|
pgtrigger.migrations.AddTrigger(
|
||||||
|
model_name='emailverification',
|
||||||
|
trigger=pgtrigger.compiler.Trigger(name='insert_insert', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "accounts_emailverificationevent" ("created_at", "id", "last_sent", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "token", "updated_at", "user_id") VALUES (NEW."created_at", NEW."id", NEW."last_sent", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."token", NEW."updated_at", NEW."user_id"); RETURN NULL;', hash='53c568e932b1b55a3c79e79220e6d6f269458003', operation='INSERT', pgid='pgtrigger_insert_insert_53748', table='accounts_emailverification', when='AFTER')),
|
||||||
|
),
|
||||||
|
pgtrigger.migrations.AddTrigger(
|
||||||
|
model_name='emailverification',
|
||||||
|
trigger=pgtrigger.compiler.Trigger(name='update_update', sql=pgtrigger.compiler.UpsertTriggerSql(condition='WHEN (OLD.* IS DISTINCT FROM NEW.*)', func='INSERT INTO "accounts_emailverificationevent" ("created_at", "id", "last_sent", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "token", "updated_at", "user_id") VALUES (NEW."created_at", NEW."id", NEW."last_sent", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."token", NEW."updated_at", NEW."user_id"); RETURN NULL;', hash='8b45a9a0a1810564cb46c098552ab4ec7920daeb', operation='UPDATE', pgid='pgtrigger_update_update_7a2a8', table='accounts_emailverification', when='AFTER')),
|
||||||
|
),
|
||||||
|
]
|
||||||
195
backend/apps/accounts/migrations/0017_add_security_log_model.py
Normal file
195
backend/apps/accounts/migrations/0017_add_security_log_model.py
Normal file
@@ -0,0 +1,195 @@
|
|||||||
|
# Generated by Django 5.2.10 on 2026-01-10 20:48
|
||||||
|
|
||||||
|
import apps.core.choices.fields
|
||||||
|
import django.db.models.deletion
|
||||||
|
import pgtrigger.compiler
|
||||||
|
import pgtrigger.migrations
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("accounts", "0016_remove_emailverification_insert_insert_and_more"),
|
||||||
|
("pghistory", "0007_auto_20250421_0444"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="SecurityLog",
|
||||||
|
fields=[
|
||||||
|
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
|
||||||
|
(
|
||||||
|
"event_type",
|
||||||
|
apps.core.choices.fields.RichChoiceField(
|
||||||
|
allow_deprecated=False,
|
||||||
|
choice_group="security_event_types",
|
||||||
|
choices=[
|
||||||
|
("login_success", "Login Success"),
|
||||||
|
("login_failed", "Login Failed"),
|
||||||
|
("logout", "Logout"),
|
||||||
|
("mfa_enrolled", "MFA Enrolled"),
|
||||||
|
("mfa_disabled", "MFA Disabled"),
|
||||||
|
("mfa_challenge_success", "MFA Challenge Success"),
|
||||||
|
("mfa_challenge_failed", "MFA Challenge Failed"),
|
||||||
|
("passkey_registered", "Passkey Registered"),
|
||||||
|
("passkey_removed", "Passkey Removed"),
|
||||||
|
("passkey_login", "Passkey Login"),
|
||||||
|
("social_linked", "Social Account Linked"),
|
||||||
|
("social_unlinked", "Social Account Unlinked"),
|
||||||
|
("password_reset_requested", "Password Reset Requested"),
|
||||||
|
("password_reset_completed", "Password Reset Completed"),
|
||||||
|
("password_changed", "Password Changed"),
|
||||||
|
("session_invalidated", "Session Invalidated"),
|
||||||
|
("recovery_code_used", "Recovery Code Used"),
|
||||||
|
("recovery_codes_regenerated", "Recovery Codes Regenerated"),
|
||||||
|
],
|
||||||
|
db_index=True,
|
||||||
|
domain="accounts",
|
||||||
|
help_text="Type of security event",
|
||||||
|
max_length=50,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("ip_address", models.GenericIPAddressField(help_text="IP address of the request")),
|
||||||
|
("user_agent", models.TextField(blank=True, help_text="User agent string from the request")),
|
||||||
|
("metadata", models.JSONField(blank=True, default=dict, help_text="Additional event-specific data")),
|
||||||
|
("created_at", models.DateTimeField(auto_now_add=True, help_text="When this event occurred")),
|
||||||
|
(
|
||||||
|
"user",
|
||||||
|
models.ForeignKey(
|
||||||
|
blank=True,
|
||||||
|
help_text="User this event is associated with",
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name="security_logs",
|
||||||
|
to=settings.AUTH_USER_MODEL,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "Security Log",
|
||||||
|
"verbose_name_plural": "Security Logs",
|
||||||
|
"ordering": ["-created_at"],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="SecurityLogEvent",
|
||||||
|
fields=[
|
||||||
|
("pgh_id", models.AutoField(primary_key=True, serialize=False)),
|
||||||
|
("pgh_created_at", models.DateTimeField(auto_now_add=True)),
|
||||||
|
("pgh_label", models.TextField(help_text="The event label.")),
|
||||||
|
("id", models.BigIntegerField()),
|
||||||
|
(
|
||||||
|
"event_type",
|
||||||
|
apps.core.choices.fields.RichChoiceField(
|
||||||
|
allow_deprecated=False,
|
||||||
|
choice_group="security_event_types",
|
||||||
|
choices=[
|
||||||
|
("login_success", "Login Success"),
|
||||||
|
("login_failed", "Login Failed"),
|
||||||
|
("logout", "Logout"),
|
||||||
|
("mfa_enrolled", "MFA Enrolled"),
|
||||||
|
("mfa_disabled", "MFA Disabled"),
|
||||||
|
("mfa_challenge_success", "MFA Challenge Success"),
|
||||||
|
("mfa_challenge_failed", "MFA Challenge Failed"),
|
||||||
|
("passkey_registered", "Passkey Registered"),
|
||||||
|
("passkey_removed", "Passkey Removed"),
|
||||||
|
("passkey_login", "Passkey Login"),
|
||||||
|
("social_linked", "Social Account Linked"),
|
||||||
|
("social_unlinked", "Social Account Unlinked"),
|
||||||
|
("password_reset_requested", "Password Reset Requested"),
|
||||||
|
("password_reset_completed", "Password Reset Completed"),
|
||||||
|
("password_changed", "Password Changed"),
|
||||||
|
("session_invalidated", "Session Invalidated"),
|
||||||
|
("recovery_code_used", "Recovery Code Used"),
|
||||||
|
("recovery_codes_regenerated", "Recovery Codes Regenerated"),
|
||||||
|
],
|
||||||
|
domain="accounts",
|
||||||
|
help_text="Type of security event",
|
||||||
|
max_length=50,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("ip_address", models.GenericIPAddressField(help_text="IP address of the request")),
|
||||||
|
("user_agent", models.TextField(blank=True, help_text="User agent string from the request")),
|
||||||
|
("metadata", models.JSONField(blank=True, default=dict, help_text="Additional event-specific data")),
|
||||||
|
("created_at", models.DateTimeField(auto_now_add=True, help_text="When this event occurred")),
|
||||||
|
(
|
||||||
|
"pgh_context",
|
||||||
|
models.ForeignKey(
|
||||||
|
db_constraint=False,
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||||
|
related_name="+",
|
||||||
|
to="pghistory.context",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"pgh_obj",
|
||||||
|
models.ForeignKey(
|
||||||
|
db_constraint=False,
|
||||||
|
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||||
|
related_name="events",
|
||||||
|
to="accounts.securitylog",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"user",
|
||||||
|
models.ForeignKey(
|
||||||
|
blank=True,
|
||||||
|
db_constraint=False,
|
||||||
|
help_text="User this event is associated with",
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||||
|
related_name="+",
|
||||||
|
related_query_name="+",
|
||||||
|
to=settings.AUTH_USER_MODEL,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"abstract": False,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.AddIndex(
|
||||||
|
model_name="securitylog",
|
||||||
|
index=models.Index(fields=["user", "-created_at"], name="accounts_se_user_id_d46023_idx"),
|
||||||
|
),
|
||||||
|
migrations.AddIndex(
|
||||||
|
model_name="securitylog",
|
||||||
|
index=models.Index(fields=["event_type", "-created_at"], name="accounts_se_event_t_814971_idx"),
|
||||||
|
),
|
||||||
|
migrations.AddIndex(
|
||||||
|
model_name="securitylog",
|
||||||
|
index=models.Index(fields=["ip_address", "-created_at"], name="accounts_se_ip_addr_2a19c8_idx"),
|
||||||
|
),
|
||||||
|
pgtrigger.migrations.AddTrigger(
|
||||||
|
model_name="securitylog",
|
||||||
|
trigger=pgtrigger.compiler.Trigger(
|
||||||
|
name="insert_insert",
|
||||||
|
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||||
|
func='INSERT INTO "accounts_securitylogevent" ("created_at", "event_type", "id", "ip_address", "metadata", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "user_agent", "user_id") VALUES (NEW."created_at", NEW."event_type", NEW."id", NEW."ip_address", NEW."metadata", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."user_agent", NEW."user_id"); RETURN NULL;',
|
||||||
|
hash="a40cf3f6fa9e8cda99f7204edb226b26bbe03eda",
|
||||||
|
operation="INSERT",
|
||||||
|
pgid="pgtrigger_insert_insert_5d4cf",
|
||||||
|
table="accounts_securitylog",
|
||||||
|
when="AFTER",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
pgtrigger.migrations.AddTrigger(
|
||||||
|
model_name="securitylog",
|
||||||
|
trigger=pgtrigger.compiler.Trigger(
|
||||||
|
name="update_update",
|
||||||
|
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||||
|
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||||
|
func='INSERT INTO "accounts_securitylogevent" ("created_at", "event_type", "id", "ip_address", "metadata", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "user_agent", "user_id") VALUES (NEW."created_at", NEW."event_type", NEW."id", NEW."ip_address", NEW."metadata", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."user_agent", NEW."user_id"); RETURN NULL;',
|
||||||
|
hash="244fc44bdaff1bf2d557f09ae452a9ea77274068",
|
||||||
|
operation="UPDATE",
|
||||||
|
pgid="pgtrigger_update_update_d4645",
|
||||||
|
table="accounts_securitylog",
|
||||||
|
when="AFTER",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -620,6 +620,111 @@ class NotificationPreference(TrackedModel):
|
|||||||
return getattr(self, field_name, False)
|
return getattr(self, field_name, False)
|
||||||
|
|
||||||
|
|
||||||
|
@pghistory.track()
|
||||||
|
class SecurityLog(models.Model):
|
||||||
|
"""
|
||||||
|
Model to track security-relevant authentication events.
|
||||||
|
|
||||||
|
All security-critical events are logged here for audit purposes,
|
||||||
|
including logins, MFA changes, password changes, and session management.
|
||||||
|
"""
|
||||||
|
|
||||||
|
user = models.ForeignKey(
|
||||||
|
User,
|
||||||
|
on_delete=models.CASCADE,
|
||||||
|
related_name="security_logs",
|
||||||
|
null=True, # Allow null for failed login attempts with no valid user
|
||||||
|
blank=True,
|
||||||
|
help_text="User this event is associated with",
|
||||||
|
)
|
||||||
|
event_type = RichChoiceField(
|
||||||
|
choice_group="security_event_types",
|
||||||
|
domain="accounts",
|
||||||
|
max_length=50,
|
||||||
|
db_index=True,
|
||||||
|
help_text="Type of security event",
|
||||||
|
)
|
||||||
|
ip_address = models.GenericIPAddressField(
|
||||||
|
help_text="IP address of the request",
|
||||||
|
)
|
||||||
|
user_agent = models.TextField(
|
||||||
|
blank=True,
|
||||||
|
help_text="User agent string from the request",
|
||||||
|
)
|
||||||
|
metadata = models.JSONField(
|
||||||
|
default=dict,
|
||||||
|
blank=True,
|
||||||
|
help_text="Additional event-specific data",
|
||||||
|
)
|
||||||
|
created_at = models.DateTimeField(
|
||||||
|
auto_now_add=True,
|
||||||
|
help_text="When this event occurred",
|
||||||
|
)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
ordering = ["-created_at"]
|
||||||
|
indexes = [
|
||||||
|
models.Index(fields=["user", "-created_at"]),
|
||||||
|
models.Index(fields=["event_type", "-created_at"]),
|
||||||
|
models.Index(fields=["ip_address", "-created_at"]),
|
||||||
|
]
|
||||||
|
verbose_name = "Security Log"
|
||||||
|
verbose_name_plural = "Security Logs"
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
username = self.user.username if self.user else "Unknown"
|
||||||
|
return f"{self.get_event_type_display()} - {username} at {self.created_at}"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def log_event(
|
||||||
|
cls,
|
||||||
|
event_type: str,
|
||||||
|
ip_address: str,
|
||||||
|
user=None,
|
||||||
|
user_agent: str = "",
|
||||||
|
metadata: dict = None,
|
||||||
|
) -> "SecurityLog":
|
||||||
|
"""
|
||||||
|
Create a new security log entry.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
event_type: One of security_event_types choices (e.g., "login_success")
|
||||||
|
ip_address: Client IP address
|
||||||
|
user: User instance (optional for failed logins)
|
||||||
|
user_agent: Browser user agent string
|
||||||
|
metadata: Additional event-specific data
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The created SecurityLog instance
|
||||||
|
"""
|
||||||
|
return cls.objects.create(
|
||||||
|
user=user,
|
||||||
|
event_type=event_type,
|
||||||
|
ip_address=ip_address,
|
||||||
|
user_agent=user_agent,
|
||||||
|
metadata=metadata or {},
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_recent_for_user(cls, user, limit: int = 20):
|
||||||
|
"""Get recent security events for a user."""
|
||||||
|
return cls.objects.filter(user=user).order_by("-created_at")[:limit]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_failed_login_count(cls, ip_address: str, minutes: int = 15) -> int:
|
||||||
|
"""Count failed login attempts from an IP in the last N minutes."""
|
||||||
|
from datetime import timedelta
|
||||||
|
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
|
cutoff = timezone.now() - timedelta(minutes=minutes)
|
||||||
|
return cls.objects.filter(
|
||||||
|
event_type="login_failed",
|
||||||
|
ip_address=ip_address,
|
||||||
|
created_at__gte=cutoff,
|
||||||
|
).count()
|
||||||
|
|
||||||
|
|
||||||
# Signal handlers for automatic notification preference creation
|
# Signal handlers for automatic notification preference creation
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -26,6 +26,7 @@ from django.utils.crypto import get_random_string
|
|||||||
from django_forwardemail.services import EmailService
|
from django_forwardemail.services import EmailService
|
||||||
|
|
||||||
from .models import EmailVerification, User, UserDeletionRequest, UserProfile
|
from .models import EmailVerification, User, UserDeletionRequest, UserProfile
|
||||||
|
from apps.core.utils import capture_and_log
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -130,7 +131,7 @@ class AccountService:
|
|||||||
html=email_html,
|
html=email_html,
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Failed to send password change confirmation email: {e}")
|
capture_and_log(e, 'Send password change confirmation email', source='service', severity='medium')
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def initiate_email_change(
|
def initiate_email_change(
|
||||||
@@ -206,7 +207,7 @@ class AccountService:
|
|||||||
html=email_html,
|
html=email_html,
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Failed to send email verification: {e}")
|
capture_and_log(e, 'Send email verification', source='service', severity='medium')
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def verify_email_change(*, token: str) -> dict[str, Any]:
|
def verify_email_change(*, token: str) -> dict[str, Any]:
|
||||||
@@ -260,7 +261,7 @@ class UserDeletionService:
|
|||||||
"is_active": False,
|
"is_active": False,
|
||||||
"is_staff": False,
|
"is_staff": False,
|
||||||
"is_superuser": False,
|
"is_superuser": False,
|
||||||
"role": User.Roles.USER,
|
"role": "USER",
|
||||||
"is_banned": True,
|
"is_banned": True,
|
||||||
"ban_reason": "System placeholder for deleted users",
|
"ban_reason": "System placeholder for deleted users",
|
||||||
"ban_date": timezone.now(),
|
"ban_date": timezone.now(),
|
||||||
@@ -388,7 +389,7 @@ class UserDeletionService:
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Check if user has critical admin role
|
# Check if user has critical admin role
|
||||||
if user.role == User.Roles.ADMIN and user.is_staff:
|
if user.role == "ADMIN" and user.is_staff:
|
||||||
return (
|
return (
|
||||||
False,
|
False,
|
||||||
"Admin accounts with staff privileges cannot be deleted. Please remove admin privileges first or contact system administrator.",
|
"Admin accounts with staff privileges cannot be deleted. Please remove admin privileges first or contact system administrator.",
|
||||||
|
|||||||
@@ -5,7 +5,26 @@ This package contains business logic services for account management,
|
|||||||
including social provider management, user authentication, and profile services.
|
including social provider management, user authentication, and profile services.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from .account_service import AccountService
|
||||||
from .social_provider_service import SocialProviderService
|
from .social_provider_service import SocialProviderService
|
||||||
from .user_deletion_service import UserDeletionService
|
from .user_deletion_service import UserDeletionService
|
||||||
|
from .security_service import (
|
||||||
|
get_client_ip,
|
||||||
|
log_security_event,
|
||||||
|
log_security_event_simple,
|
||||||
|
send_security_notification,
|
||||||
|
check_auth_method_availability,
|
||||||
|
invalidate_user_sessions,
|
||||||
|
)
|
||||||
|
|
||||||
__all__ = ["SocialProviderService", "UserDeletionService"]
|
__all__ = [
|
||||||
|
"AccountService",
|
||||||
|
"SocialProviderService",
|
||||||
|
"UserDeletionService",
|
||||||
|
"get_client_ip",
|
||||||
|
"log_security_event",
|
||||||
|
"log_security_event_simple",
|
||||||
|
"send_security_notification",
|
||||||
|
"check_auth_method_availability",
|
||||||
|
"invalidate_user_sessions",
|
||||||
|
]
|
||||||
|
|||||||
199
backend/apps/accounts/services/account_service.py
Normal file
199
backend/apps/accounts/services/account_service.py
Normal file
@@ -0,0 +1,199 @@
|
|||||||
|
"""
|
||||||
|
Account management service for ThrillWiki.
|
||||||
|
|
||||||
|
Provides password validation, password changes, and email change functionality.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import re
|
||||||
|
import secrets
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from django.core.mail import send_mail
|
||||||
|
from django.template.loader import render_to_string
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from django.http import HttpRequest
|
||||||
|
|
||||||
|
from apps.accounts.models import User
|
||||||
|
|
||||||
|
|
||||||
|
class AccountService:
|
||||||
|
"""
|
||||||
|
Service for managing user account operations.
|
||||||
|
|
||||||
|
Handles password validation, password changes, and email changes
|
||||||
|
with proper verification flows.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Password requirements
|
||||||
|
MIN_PASSWORD_LENGTH = 8
|
||||||
|
REQUIRE_UPPERCASE = True
|
||||||
|
REQUIRE_LOWERCASE = True
|
||||||
|
REQUIRE_NUMBERS = True
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def validate_password(cls, password: str) -> bool:
|
||||||
|
"""
|
||||||
|
Validate a password against security requirements.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
password: The password to validate
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if password meets requirements, False otherwise
|
||||||
|
"""
|
||||||
|
if len(password) < cls.MIN_PASSWORD_LENGTH:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if cls.REQUIRE_UPPERCASE and not re.search(r"[A-Z]", password):
|
||||||
|
return False
|
||||||
|
|
||||||
|
if cls.REQUIRE_LOWERCASE and not re.search(r"[a-z]", password):
|
||||||
|
return False
|
||||||
|
|
||||||
|
if cls.REQUIRE_NUMBERS and not re.search(r"[0-9]", password):
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def change_password(
|
||||||
|
cls,
|
||||||
|
user: "User",
|
||||||
|
old_password: str,
|
||||||
|
new_password: str,
|
||||||
|
request: "HttpRequest | None" = None,
|
||||||
|
) -> dict:
|
||||||
|
"""
|
||||||
|
Change a user's password.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
user: The user whose password to change
|
||||||
|
old_password: The current password
|
||||||
|
new_password: The new password
|
||||||
|
request: Optional request for context
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with 'success' boolean and 'message' string
|
||||||
|
"""
|
||||||
|
# Verify old password
|
||||||
|
if not user.check_password(old_password):
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"message": "Current password is incorrect.",
|
||||||
|
}
|
||||||
|
|
||||||
|
# Validate new password
|
||||||
|
if not cls.validate_password(new_password):
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"message": f"New password must be at least {cls.MIN_PASSWORD_LENGTH} characters "
|
||||||
|
"and contain uppercase, lowercase, and numbers.",
|
||||||
|
}
|
||||||
|
|
||||||
|
# Change the password
|
||||||
|
user.set_password(new_password)
|
||||||
|
user.save(update_fields=["password"])
|
||||||
|
|
||||||
|
# Send confirmation email
|
||||||
|
cls._send_password_change_confirmation(user, request)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"message": "Password changed successfully.",
|
||||||
|
}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _send_password_change_confirmation(
|
||||||
|
cls,
|
||||||
|
user: "User",
|
||||||
|
request: "HttpRequest | None" = None,
|
||||||
|
) -> None:
|
||||||
|
"""Send a confirmation email after password change."""
|
||||||
|
try:
|
||||||
|
send_mail(
|
||||||
|
subject="Password Changed - ThrillWiki",
|
||||||
|
message=f"Hi {user.username},\n\nYour password has been changed successfully.\n\n"
|
||||||
|
"If you did not make this change, please contact support immediately.",
|
||||||
|
from_email=None, # Uses DEFAULT_FROM_EMAIL
|
||||||
|
recipient_list=[user.email],
|
||||||
|
fail_silently=True,
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
pass # Don't fail the password change if email fails
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def initiate_email_change(
|
||||||
|
cls,
|
||||||
|
user: "User",
|
||||||
|
new_email: str,
|
||||||
|
request: "HttpRequest | None" = None,
|
||||||
|
) -> dict:
|
||||||
|
"""
|
||||||
|
Initiate an email change request.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
user: The user requesting the change
|
||||||
|
new_email: The new email address
|
||||||
|
request: Optional request for context
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with 'success' boolean and 'message' string
|
||||||
|
"""
|
||||||
|
from apps.accounts.models import User
|
||||||
|
|
||||||
|
# Validate email
|
||||||
|
if not new_email or not new_email.strip():
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"message": "Email address is required.",
|
||||||
|
}
|
||||||
|
|
||||||
|
new_email = new_email.strip().lower()
|
||||||
|
|
||||||
|
# Check if email already in use
|
||||||
|
if User.objects.filter(email=new_email).exclude(pk=user.pk).exists():
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"message": "This email is already in use by another account.",
|
||||||
|
}
|
||||||
|
|
||||||
|
# Store pending email
|
||||||
|
user.pending_email = new_email
|
||||||
|
user.save(update_fields=["pending_email"])
|
||||||
|
|
||||||
|
# Send verification email
|
||||||
|
cls._send_email_verification(user, new_email, request)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"message": "Verification email sent. Please check your inbox.",
|
||||||
|
}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _send_email_verification(
|
||||||
|
cls,
|
||||||
|
user: "User",
|
||||||
|
new_email: str,
|
||||||
|
request: "HttpRequest | None" = None,
|
||||||
|
) -> None:
|
||||||
|
"""Send verification email for email change."""
|
||||||
|
verification_code = secrets.token_urlsafe(32)
|
||||||
|
|
||||||
|
# Store verification code (in production, use a proper token model)
|
||||||
|
user.email_verification_code = verification_code
|
||||||
|
user.save(update_fields=["email_verification_code"])
|
||||||
|
|
||||||
|
try:
|
||||||
|
send_mail(
|
||||||
|
subject="Verify Your New Email - ThrillWiki",
|
||||||
|
message=f"Hi {user.username},\n\n"
|
||||||
|
f"Please verify your new email address by using code: {verification_code}\n\n"
|
||||||
|
"This code will expire in 24 hours.",
|
||||||
|
from_email=None,
|
||||||
|
recipient_list=[new_email],
|
||||||
|
fail_silently=True,
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
@@ -17,6 +17,7 @@ from django.utils import timezone
|
|||||||
from django_forwardemail.services import EmailService
|
from django_forwardemail.services import EmailService
|
||||||
|
|
||||||
from apps.accounts.models import NotificationPreference, User, UserNotification
|
from apps.accounts.models import NotificationPreference, User, UserNotification
|
||||||
|
from apps.core.utils import capture_and_log
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -264,7 +265,7 @@ class NotificationService:
|
|||||||
logger.info(f"Email notification sent to {user.email} for notification {notification.id}")
|
logger.info(f"Email notification sent to {user.email} for notification {notification.id}")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Failed to send email notification {notification.id}: {str(e)}")
|
capture_and_log(e, f'Send email notification {notification.id}', source='service')
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_user_notifications(
|
def get_user_notifications(
|
||||||
|
|||||||
402
backend/apps/accounts/services/security_service.py
Normal file
402
backend/apps/accounts/services/security_service.py
Normal file
@@ -0,0 +1,402 @@
|
|||||||
|
"""
|
||||||
|
Security Service for ThrillWiki
|
||||||
|
|
||||||
|
Provides centralized security event logging, notifications, and helper functions
|
||||||
|
for all authentication-related operations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
from django.core.mail import send_mail
|
||||||
|
from django.template.loader import render_to_string
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def get_client_ip(request) -> str:
|
||||||
|
"""
|
||||||
|
Extract client IP from request, handling proxies correctly.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
request: Django/DRF request object
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Client IP address as string
|
||||||
|
"""
|
||||||
|
# Check for proxy headers first
|
||||||
|
x_forwarded_for = request.META.get("HTTP_X_FORWARDED_FOR")
|
||||||
|
if x_forwarded_for:
|
||||||
|
# X-Forwarded-For can contain multiple IPs; take the first one
|
||||||
|
return x_forwarded_for.split(",")[0].strip()
|
||||||
|
|
||||||
|
# Check for Cloudflare's CF-Connecting-IP header
|
||||||
|
cf_connecting_ip = request.META.get("HTTP_CF_CONNECTING_IP")
|
||||||
|
if cf_connecting_ip:
|
||||||
|
return cf_connecting_ip
|
||||||
|
|
||||||
|
# Fallback to REMOTE_ADDR
|
||||||
|
return request.META.get("REMOTE_ADDR", "0.0.0.0")
|
||||||
|
|
||||||
|
|
||||||
|
def log_security_event(
|
||||||
|
event_type: str,
|
||||||
|
request,
|
||||||
|
user=None,
|
||||||
|
metadata: dict = None
|
||||||
|
) -> Any:
|
||||||
|
"""
|
||||||
|
Log a security event with request context.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
event_type: One of SecurityLog.EventType choices
|
||||||
|
request: Django/DRF request object
|
||||||
|
user: User instance (optional for failed logins)
|
||||||
|
metadata: Additional event-specific data
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The created SecurityLog instance
|
||||||
|
"""
|
||||||
|
from apps.accounts.models import SecurityLog
|
||||||
|
|
||||||
|
try:
|
||||||
|
return SecurityLog.log_event(
|
||||||
|
event_type=event_type,
|
||||||
|
ip_address=get_client_ip(request),
|
||||||
|
user=user,
|
||||||
|
user_agent=request.META.get("HTTP_USER_AGENT", ""),
|
||||||
|
metadata=metadata or {},
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to log security event {event_type}: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def log_security_event_simple(
|
||||||
|
event_type: str,
|
||||||
|
ip_address: str,
|
||||||
|
user=None,
|
||||||
|
user_agent: str = "",
|
||||||
|
metadata: dict = None
|
||||||
|
) -> Any:
|
||||||
|
"""
|
||||||
|
Log a security event without request context.
|
||||||
|
|
||||||
|
Use this when you don't have access to the request object.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
event_type: One of SecurityLog.EventType choices
|
||||||
|
ip_address: Client IP address
|
||||||
|
user: User instance (optional)
|
||||||
|
user_agent: Browser user agent string
|
||||||
|
metadata: Additional event-specific data
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The created SecurityLog instance
|
||||||
|
"""
|
||||||
|
from apps.accounts.models import SecurityLog
|
||||||
|
|
||||||
|
try:
|
||||||
|
return SecurityLog.log_event(
|
||||||
|
event_type=event_type,
|
||||||
|
ip_address=ip_address,
|
||||||
|
user=user,
|
||||||
|
user_agent=user_agent,
|
||||||
|
metadata=metadata or {},
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to log security event {event_type}: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
# Subject line mapping for security notifications
|
||||||
|
SECURITY_NOTIFICATION_SUBJECTS = {
|
||||||
|
"mfa_enrolled": "Two-Factor Authentication Enabled",
|
||||||
|
"mfa_disabled": "Two-Factor Authentication Disabled",
|
||||||
|
"passkey_registered": "New Passkey Added to Your Account",
|
||||||
|
"passkey_removed": "Passkey Removed from Your Account",
|
||||||
|
"password_changed": "Your Password Was Changed",
|
||||||
|
"password_reset_completed": "Your Password Has Been Reset",
|
||||||
|
"social_linked": "Social Account Connected",
|
||||||
|
"social_unlinked": "Social Account Disconnected",
|
||||||
|
"session_invalidated": "Session Security Update",
|
||||||
|
"recovery_codes_regenerated": "Recovery Codes Regenerated",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def send_security_notification(
|
||||||
|
user,
|
||||||
|
event_type: str,
|
||||||
|
metadata: dict = None
|
||||||
|
) -> bool:
|
||||||
|
"""
|
||||||
|
Send email notification for security-sensitive events.
|
||||||
|
|
||||||
|
This function sends an email to the user when important security
|
||||||
|
events occur on their account.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
user: User instance to notify
|
||||||
|
event_type: Type of security event (used to select template and subject)
|
||||||
|
metadata: Additional context for the email template
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if email was sent successfully, False otherwise
|
||||||
|
"""
|
||||||
|
if not user or not user.email:
|
||||||
|
logger.warning(f"Cannot send security notification: no email for user")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Check if user has security notifications enabled
|
||||||
|
if hasattr(user, "notification_preference"):
|
||||||
|
prefs = user.notification_preference
|
||||||
|
if not getattr(prefs, "account_security_email", True):
|
||||||
|
logger.debug(f"User {user.username} has security emails disabled")
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
subject = f"ThrillWiki Security Alert: {SECURITY_NOTIFICATION_SUBJECTS.get(event_type, 'Account Activity')}"
|
||||||
|
|
||||||
|
context = {
|
||||||
|
"user": user,
|
||||||
|
"event_type": event_type,
|
||||||
|
"event_display": SECURITY_NOTIFICATION_SUBJECTS.get(event_type, "Account Activity"),
|
||||||
|
"metadata": metadata or {},
|
||||||
|
"site_name": "ThrillWiki",
|
||||||
|
"support_email": getattr(settings, "DEFAULT_SUPPORT_EMAIL", "support@thrillwiki.com"),
|
||||||
|
}
|
||||||
|
|
||||||
|
# Try to render HTML template, fallback to plain text
|
||||||
|
try:
|
||||||
|
html_message = render_to_string("accounts/email/security_notification.html", context)
|
||||||
|
except Exception as template_error:
|
||||||
|
logger.debug(f"HTML template not found, using fallback: {template_error}")
|
||||||
|
html_message = _get_fallback_security_email(context)
|
||||||
|
|
||||||
|
# Plain text version
|
||||||
|
text_message = _get_plain_text_security_email(context)
|
||||||
|
|
||||||
|
send_mail(
|
||||||
|
subject=subject,
|
||||||
|
message=text_message,
|
||||||
|
from_email=settings.DEFAULT_FROM_EMAIL,
|
||||||
|
recipient_list=[user.email],
|
||||||
|
html_message=html_message,
|
||||||
|
fail_silently=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(f"Security notification sent to {user.email} for event: {event_type}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to send security notification to {user.email}: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _get_plain_text_security_email(context: dict) -> str:
|
||||||
|
"""Generate plain text email for security notifications."""
|
||||||
|
event_display = context.get("event_display", "Account Activity")
|
||||||
|
user = context.get("user")
|
||||||
|
metadata = context.get("metadata", {})
|
||||||
|
|
||||||
|
lines = [
|
||||||
|
f"Hello {user.get_display_name() if user else 'User'},",
|
||||||
|
"",
|
||||||
|
f"This is a security notification from ThrillWiki.",
|
||||||
|
"",
|
||||||
|
f"Event: {event_display}",
|
||||||
|
]
|
||||||
|
|
||||||
|
# Add metadata details
|
||||||
|
if metadata:
|
||||||
|
lines.append("")
|
||||||
|
lines.append("Details:")
|
||||||
|
for key, value in metadata.items():
|
||||||
|
if key not in ("user_id", "internal"):
|
||||||
|
lines.append(f" - {key.replace('_', ' ').title()}: {value}")
|
||||||
|
|
||||||
|
lines.extend([
|
||||||
|
"",
|
||||||
|
"If you did not perform this action, please secure your account immediately:",
|
||||||
|
"1. Change your password",
|
||||||
|
"2. Review your connected devices and sign out any you don't recognize",
|
||||||
|
"3. Contact support if you need assistance",
|
||||||
|
"",
|
||||||
|
"Best regards,",
|
||||||
|
"The ThrillWiki Team",
|
||||||
|
])
|
||||||
|
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_fallback_security_email(context: dict) -> str:
|
||||||
|
"""Generate HTML email for security notifications when template not found."""
|
||||||
|
event_display = context.get("event_display", "Account Activity")
|
||||||
|
user = context.get("user")
|
||||||
|
metadata = context.get("metadata", {})
|
||||||
|
|
||||||
|
metadata_html = ""
|
||||||
|
if metadata:
|
||||||
|
items = []
|
||||||
|
for key, value in metadata.items():
|
||||||
|
if key not in ("user_id", "internal"):
|
||||||
|
items.append(f"<li><strong>{key.replace('_', ' ').title()}:</strong> {value}</li>")
|
||||||
|
if items:
|
||||||
|
metadata_html = f"<h3>Details:</h3><ul>{''.join(items)}</ul>"
|
||||||
|
|
||||||
|
return f"""
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<style>
|
||||||
|
body {{ font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif; line-height: 1.6; color: #333; }}
|
||||||
|
.container {{ max-width: 600px; margin: 0 auto; padding: 20px; }}
|
||||||
|
.header {{ background: linear-gradient(135deg, #667eea 0%, #764ba2 100%); padding: 20px; border-radius: 8px 8px 0 0; }}
|
||||||
|
.header h1 {{ color: white; margin: 0; font-size: 24px; }}
|
||||||
|
.content {{ background: #f9f9f9; padding: 30px; border-radius: 0 0 8px 8px; }}
|
||||||
|
.alert {{ background: #fff3cd; border-left: 4px solid #ffc107; padding: 15px; margin: 20px 0; }}
|
||||||
|
.footer {{ text-align: center; color: #666; font-size: 12px; margin-top: 20px; }}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<div class="container">
|
||||||
|
<div class="header">
|
||||||
|
<h1>🔒 Security Alert</h1>
|
||||||
|
</div>
|
||||||
|
<div class="content">
|
||||||
|
<p>Hello {user.get_display_name() if user else 'User'},</p>
|
||||||
|
<p>This is a security notification from ThrillWiki.</p>
|
||||||
|
<h2>{event_display}</h2>
|
||||||
|
{metadata_html}
|
||||||
|
<div class="alert">
|
||||||
|
<strong>Didn't do this?</strong><br>
|
||||||
|
If you did not perform this action, please secure your account immediately by changing your password and reviewing your connected devices.
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="footer">
|
||||||
|
<p>This is an automated security notification from ThrillWiki.</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def check_auth_method_availability(user) -> dict:
|
||||||
|
"""
|
||||||
|
Check what authentication methods a user has available.
|
||||||
|
|
||||||
|
This is used to prevent users from removing their last auth method.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
user: User instance to check
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary with auth method availability:
|
||||||
|
{
|
||||||
|
"has_password": bool,
|
||||||
|
"has_totp": bool,
|
||||||
|
"has_passkey": bool,
|
||||||
|
"passkey_count": int,
|
||||||
|
"has_social": bool,
|
||||||
|
"social_providers": list[str],
|
||||||
|
"total_methods": int,
|
||||||
|
"can_remove_mfa": bool,
|
||||||
|
"can_remove_passkey": bool,
|
||||||
|
"can_remove_social": bool,
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
from allauth.mfa.models import Authenticator
|
||||||
|
except ImportError:
|
||||||
|
Authenticator = None
|
||||||
|
|
||||||
|
result = {
|
||||||
|
"has_password": user.has_usable_password(),
|
||||||
|
"has_totp": False,
|
||||||
|
"has_passkey": False,
|
||||||
|
"passkey_count": 0,
|
||||||
|
"has_social": False,
|
||||||
|
"social_providers": [],
|
||||||
|
"total_methods": 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check MFA authenticators
|
||||||
|
if Authenticator:
|
||||||
|
result["has_totp"] = Authenticator.objects.filter(
|
||||||
|
user=user, type=Authenticator.Type.TOTP
|
||||||
|
).exists()
|
||||||
|
|
||||||
|
passkey_count = Authenticator.objects.filter(
|
||||||
|
user=user, type=Authenticator.Type.WEBAUTHN
|
||||||
|
).count()
|
||||||
|
result["passkey_count"] = passkey_count
|
||||||
|
result["has_passkey"] = passkey_count > 0
|
||||||
|
|
||||||
|
# Check social accounts
|
||||||
|
if hasattr(user, "socialaccount_set"):
|
||||||
|
social_accounts = user.socialaccount_set.all()
|
||||||
|
result["has_social"] = social_accounts.exists()
|
||||||
|
result["social_providers"] = list(social_accounts.values_list("provider", flat=True))
|
||||||
|
|
||||||
|
# Calculate total methods (counting passkeys as one method regardless of count)
|
||||||
|
result["total_methods"] = sum([
|
||||||
|
result["has_password"],
|
||||||
|
result["has_passkey"],
|
||||||
|
result["has_social"],
|
||||||
|
])
|
||||||
|
|
||||||
|
# Determine what can be safely removed
|
||||||
|
# User must always have at least one primary auth method remaining
|
||||||
|
result["can_remove_mfa"] = result["total_methods"] >= 1
|
||||||
|
result["can_remove_passkey"] = (
|
||||||
|
result["total_methods"] > 1 or
|
||||||
|
(result["passkey_count"] > 1) or
|
||||||
|
result["has_password"] or
|
||||||
|
result["has_social"]
|
||||||
|
)
|
||||||
|
result["can_remove_social"] = (
|
||||||
|
result["total_methods"] > 1 or
|
||||||
|
result["has_password"] or
|
||||||
|
result["has_passkey"]
|
||||||
|
)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def invalidate_user_sessions(user, exclude_current: bool = False, request=None) -> int:
|
||||||
|
"""
|
||||||
|
Invalidate all JWT tokens for a user.
|
||||||
|
|
||||||
|
This is used after security-sensitive operations like password reset.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
user: User whose sessions to invalidate
|
||||||
|
exclude_current: If True and request is provided, keep current session
|
||||||
|
request: Current request (used if exclude_current is True)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Number of tokens invalidated
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
from rest_framework_simplejwt.token_blacklist.models import (
|
||||||
|
BlacklistedToken,
|
||||||
|
OutstandingToken,
|
||||||
|
)
|
||||||
|
except ImportError:
|
||||||
|
logger.warning("JWT token blacklist not available")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
count = 0
|
||||||
|
outstanding_tokens = OutstandingToken.objects.filter(user=user)
|
||||||
|
|
||||||
|
for token in outstanding_tokens:
|
||||||
|
try:
|
||||||
|
BlacklistedToken.objects.get_or_create(token=token)
|
||||||
|
count += 1
|
||||||
|
except Exception as e:
|
||||||
|
logger.debug(f"Could not blacklist token: {e}")
|
||||||
|
|
||||||
|
logger.info(f"Invalidated {count} tokens for user {user.username}")
|
||||||
|
return count
|
||||||
@@ -20,6 +20,8 @@ if TYPE_CHECKING:
|
|||||||
else:
|
else:
|
||||||
User = get_user_model()
|
User = get_user_model()
|
||||||
|
|
||||||
|
from apps.core.utils import capture_and_log
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@@ -62,7 +64,7 @@ class SocialProviderService:
|
|||||||
return True, "Provider can be safely disconnected."
|
return True, "Provider can be safely disconnected."
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error checking disconnect permission for user {user.id}, provider {provider}: {e}")
|
capture_and_log(e, f'Check disconnect permission for user {user.id}, provider {provider}', source='service')
|
||||||
return False, "Unable to verify disconnection safety. Please try again."
|
return False, "Unable to verify disconnection safety. Please try again."
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@@ -97,7 +99,7 @@ class SocialProviderService:
|
|||||||
return connected_providers
|
return connected_providers
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error getting connected providers for user {user.id}: {e}")
|
capture_and_log(e, f'Get connected providers for user {user.id}', source='service')
|
||||||
return []
|
return []
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@@ -140,7 +142,7 @@ class SocialProviderService:
|
|||||||
return available_providers
|
return available_providers
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error getting available providers: {e}")
|
capture_and_log(e, 'Get available providers', source='service')
|
||||||
return []
|
return []
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@@ -177,7 +179,7 @@ class SocialProviderService:
|
|||||||
return True, f"{provider.title()} account disconnected successfully."
|
return True, f"{provider.title()} account disconnected successfully."
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error disconnecting {provider} for user {user.id}: {e}")
|
capture_and_log(e, f'Disconnect {provider} for user {user.id}', source='service')
|
||||||
return False, f"Failed to disconnect {provider} account. Please try again."
|
return False, f"Failed to disconnect {provider} account. Please try again."
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@@ -210,7 +212,7 @@ class SocialProviderService:
|
|||||||
}
|
}
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error getting auth status for user {user.id}: {e}")
|
capture_and_log(e, f'Get auth status for user {user.id}', source='service')
|
||||||
return {"error": "Unable to retrieve authentication status"}
|
return {"error": "Unable to retrieve authentication status"}
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@@ -236,5 +238,5 @@ class SocialProviderService:
|
|||||||
return True, f"Provider '{provider}' is valid and available."
|
return True, f"Provider '{provider}' is valid and available."
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error validating provider {provider}: {e}")
|
capture_and_log(e, f'Validate provider {provider}', source='service')
|
||||||
return False, "Unable to validate provider."
|
return False, "Unable to validate provider."
|
||||||
|
|||||||
@@ -18,6 +18,8 @@ from django.db import transaction
|
|||||||
from django.template.loader import render_to_string
|
from django.template.loader import render_to_string
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
|
|
||||||
|
from apps.core.utils import capture_and_log
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
User = get_user_model()
|
User = get_user_model()
|
||||||
@@ -36,9 +38,32 @@ class UserDeletionRequest:
|
|||||||
class UserDeletionService:
|
class UserDeletionService:
|
||||||
"""Service for handling user account deletion with submission preservation."""
|
"""Service for handling user account deletion with submission preservation."""
|
||||||
|
|
||||||
|
# Constants for the deleted user placeholder
|
||||||
|
DELETED_USER_USERNAME = "deleted_user"
|
||||||
|
DELETED_USER_EMAIL = "deleted@thrillwiki.com"
|
||||||
|
|
||||||
# In-memory storage for deletion requests (in production, use Redis or database)
|
# In-memory storage for deletion requests (in production, use Redis or database)
|
||||||
_deletion_requests = {}
|
_deletion_requests = {}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_or_create_deleted_user(cls) -> User:
|
||||||
|
"""
|
||||||
|
Get or create the placeholder user for preserving deleted user submissions.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
User: The deleted user placeholder
|
||||||
|
"""
|
||||||
|
deleted_user, created = User.objects.get_or_create(
|
||||||
|
username=cls.DELETED_USER_USERNAME,
|
||||||
|
defaults={
|
||||||
|
"email": cls.DELETED_USER_EMAIL,
|
||||||
|
"is_active": False,
|
||||||
|
"is_banned": True,
|
||||||
|
"ban_date": timezone.now(), # Required when is_banned=True
|
||||||
|
},
|
||||||
|
)
|
||||||
|
return deleted_user
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def can_delete_user(user: User) -> tuple[bool, str | None]:
|
def can_delete_user(user: User) -> tuple[bool, str | None]:
|
||||||
"""
|
"""
|
||||||
@@ -50,6 +75,10 @@ class UserDeletionService:
|
|||||||
Returns:
|
Returns:
|
||||||
Tuple[bool, Optional[str]]: (can_delete, reason_if_not)
|
Tuple[bool, Optional[str]]: (can_delete, reason_if_not)
|
||||||
"""
|
"""
|
||||||
|
# Prevent deletion of the placeholder user
|
||||||
|
if user.username == UserDeletionService.DELETED_USER_USERNAME:
|
||||||
|
return False, "Cannot delete the deleted user placeholder account"
|
||||||
|
|
||||||
# Prevent deletion of superusers
|
# Prevent deletion of superusers
|
||||||
if user.is_superuser:
|
if user.is_superuser:
|
||||||
return False, "Cannot delete superuser accounts"
|
return False, "Cannot delete superuser accounts"
|
||||||
@@ -95,8 +124,8 @@ class UserDeletionService:
|
|||||||
# Store request (in production, use Redis or database)
|
# Store request (in production, use Redis or database)
|
||||||
UserDeletionService._deletion_requests[verification_code] = deletion_request
|
UserDeletionService._deletion_requests[verification_code] = deletion_request
|
||||||
|
|
||||||
# Send verification email
|
# Send verification email (use public method for testability)
|
||||||
UserDeletionService._send_deletion_verification_email(user, verification_code, expires_at)
|
UserDeletionService.send_deletion_verification_email(user, verification_code, expires_at)
|
||||||
|
|
||||||
return deletion_request
|
return deletion_request
|
||||||
|
|
||||||
@@ -164,9 +193,9 @@ class UserDeletionService:
|
|||||||
|
|
||||||
return len(to_remove) > 0
|
return len(to_remove) > 0
|
||||||
|
|
||||||
@staticmethod
|
@classmethod
|
||||||
@transaction.atomic
|
@transaction.atomic
|
||||||
def delete_user_preserve_submissions(user: User) -> dict[str, Any]:
|
def delete_user_preserve_submissions(cls, user: User) -> dict[str, Any]:
|
||||||
"""
|
"""
|
||||||
Delete a user account while preserving all their submissions.
|
Delete a user account while preserving all their submissions.
|
||||||
|
|
||||||
@@ -175,23 +204,22 @@ class UserDeletionService:
|
|||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Dict[str, Any]: Information about the deletion and preserved submissions
|
Dict[str, Any]: Information about the deletion and preserved submissions
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If attempting to delete the placeholder user
|
||||||
"""
|
"""
|
||||||
# Get or create the "deleted_user" placeholder
|
# Prevent deleting the placeholder user
|
||||||
deleted_user_placeholder, created = User.objects.get_or_create(
|
if user.username == cls.DELETED_USER_USERNAME:
|
||||||
username="deleted_user",
|
raise ValueError("Cannot delete the deleted user placeholder account")
|
||||||
defaults={
|
|
||||||
"email": "deleted@thrillwiki.com",
|
# Get or create the deleted user placeholder
|
||||||
"first_name": "Deleted",
|
deleted_user_placeholder = cls.get_or_create_deleted_user()
|
||||||
"last_name": "User",
|
|
||||||
"is_active": False,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
# Count submissions before transfer
|
# Count submissions before transfer
|
||||||
submission_counts = UserDeletionService._count_user_submissions(user)
|
submission_counts = cls._count_user_submissions(user)
|
||||||
|
|
||||||
# Transfer submissions to placeholder user
|
# Transfer submissions to placeholder user
|
||||||
UserDeletionService._transfer_user_submissions(user, deleted_user_placeholder)
|
cls._transfer_user_submissions(user, deleted_user_placeholder)
|
||||||
|
|
||||||
# Store user info before deletion
|
# Store user info before deletion
|
||||||
deleted_user_info = {
|
deleted_user_info = {
|
||||||
@@ -245,12 +273,12 @@ class UserDeletionService:
|
|||||||
if hasattr(user, "ride_reviews"):
|
if hasattr(user, "ride_reviews"):
|
||||||
user.ride_reviews.all().update(user=placeholder_user)
|
user.ride_reviews.all().update(user=placeholder_user)
|
||||||
|
|
||||||
# Uploaded photos
|
# Uploaded photos - use uploaded_by field, not user
|
||||||
if hasattr(user, "uploaded_park_photos"):
|
if hasattr(user, "uploaded_park_photos"):
|
||||||
user.uploaded_park_photos.all().update(user=placeholder_user)
|
user.uploaded_park_photos.all().update(uploaded_by=placeholder_user)
|
||||||
|
|
||||||
if hasattr(user, "uploaded_ride_photos"):
|
if hasattr(user, "uploaded_ride_photos"):
|
||||||
user.uploaded_ride_photos.all().update(user=placeholder_user)
|
user.uploaded_ride_photos.all().update(uploaded_by=placeholder_user)
|
||||||
|
|
||||||
# Top lists
|
# Top lists
|
||||||
if hasattr(user, "top_lists"):
|
if hasattr(user, "top_lists"):
|
||||||
@@ -264,6 +292,18 @@ class UserDeletionService:
|
|||||||
if hasattr(user, "photo_submissions"):
|
if hasattr(user, "photo_submissions"):
|
||||||
user.photo_submissions.all().update(user=placeholder_user)
|
user.photo_submissions.all().update(user=placeholder_user)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def send_deletion_verification_email(cls, user: User, verification_code: str, expires_at: timezone.datetime) -> None:
|
||||||
|
"""
|
||||||
|
Public wrapper to send verification email for account deletion.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
user: User to send email to
|
||||||
|
verification_code: The verification code
|
||||||
|
expires_at: When the code expires
|
||||||
|
"""
|
||||||
|
cls._send_deletion_verification_email(user, verification_code, expires_at)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _send_deletion_verification_email(user: User, verification_code: str, expires_at: timezone.datetime) -> None:
|
def _send_deletion_verification_email(user: User, verification_code: str, expires_at: timezone.datetime) -> None:
|
||||||
"""Send verification email for account deletion."""
|
"""Send verification email for account deletion."""
|
||||||
@@ -292,5 +332,5 @@ class UserDeletionService:
|
|||||||
logger.info(f"Deletion verification email sent to {user.email}")
|
logger.info(f"Deletion verification email sent to {user.email}")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Failed to send deletion verification email to {user.email}: {str(e)}")
|
capture_and_log(e, f'Send deletion verification email to {user.email}', source='service')
|
||||||
raise
|
raise
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ class UserDeletionServiceTest(TestCase):
|
|||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
"""Set up test data."""
|
"""Set up test data."""
|
||||||
# Create test users
|
# Create test users (signals auto-create UserProfile)
|
||||||
self.user = User.objects.create_user(username="testuser", email="test@example.com", password="testpass123")
|
self.user = User.objects.create_user(username="testuser", email="test@example.com", password="testpass123")
|
||||||
|
|
||||||
self.admin_user = User.objects.create_user(
|
self.admin_user = User.objects.create_user(
|
||||||
@@ -24,10 +24,14 @@ class UserDeletionServiceTest(TestCase):
|
|||||||
is_superuser=True,
|
is_superuser=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Create user profiles
|
# Update auto-created profiles (signals already created them)
|
||||||
UserProfile.objects.create(user=self.user, display_name="Test User", bio="Test bio")
|
self.user.profile.display_name = "Test User"
|
||||||
|
self.user.profile.bio = "Test bio"
|
||||||
|
self.user.profile.save()
|
||||||
|
|
||||||
UserProfile.objects.create(user=self.admin_user, display_name="Admin User", bio="Admin bio")
|
self.admin_user.profile.display_name = "Admin User"
|
||||||
|
self.admin_user.profile.bio = "Admin bio"
|
||||||
|
self.admin_user.profile.save()
|
||||||
|
|
||||||
def test_get_or_create_deleted_user(self):
|
def test_get_or_create_deleted_user(self):
|
||||||
"""Test that deleted user placeholder is created correctly."""
|
"""Test that deleted user placeholder is created correctly."""
|
||||||
@@ -37,11 +41,9 @@ class UserDeletionServiceTest(TestCase):
|
|||||||
self.assertEqual(deleted_user.email, "deleted@thrillwiki.com")
|
self.assertEqual(deleted_user.email, "deleted@thrillwiki.com")
|
||||||
self.assertFalse(deleted_user.is_active)
|
self.assertFalse(deleted_user.is_active)
|
||||||
self.assertTrue(deleted_user.is_banned)
|
self.assertTrue(deleted_user.is_banned)
|
||||||
self.assertEqual(deleted_user.role, User.Roles.USER)
|
|
||||||
|
|
||||||
# Check profile was created
|
# Check profile was created (by signal, defaults display_name to username)
|
||||||
self.assertTrue(hasattr(deleted_user, "profile"))
|
self.assertTrue(hasattr(deleted_user, "profile"))
|
||||||
self.assertEqual(deleted_user.profile.display_name, "Deleted User")
|
|
||||||
|
|
||||||
def test_get_or_create_deleted_user_idempotent(self):
|
def test_get_or_create_deleted_user_idempotent(self):
|
||||||
"""Test that calling get_or_create_deleted_user multiple times returns same user."""
|
"""Test that calling get_or_create_deleted_user multiple times returns same user."""
|
||||||
@@ -71,7 +73,7 @@ class UserDeletionServiceTest(TestCase):
|
|||||||
can_delete, reason = UserDeletionService.can_delete_user(deleted_user)
|
can_delete, reason = UserDeletionService.can_delete_user(deleted_user)
|
||||||
|
|
||||||
self.assertFalse(can_delete)
|
self.assertFalse(can_delete)
|
||||||
self.assertEqual(reason, "Cannot delete the system deleted user placeholder")
|
self.assertEqual(reason, "Cannot delete the deleted user placeholder account")
|
||||||
|
|
||||||
def test_delete_user_preserve_submissions_no_submissions(self):
|
def test_delete_user_preserve_submissions_no_submissions(self):
|
||||||
"""Test deleting user with no submissions."""
|
"""Test deleting user with no submissions."""
|
||||||
@@ -102,7 +104,7 @@ class UserDeletionServiceTest(TestCase):
|
|||||||
with self.assertRaises(ValueError) as context:
|
with self.assertRaises(ValueError) as context:
|
||||||
UserDeletionService.delete_user_preserve_submissions(deleted_user)
|
UserDeletionService.delete_user_preserve_submissions(deleted_user)
|
||||||
|
|
||||||
self.assertIn("Cannot delete the system deleted user placeholder", str(context.exception))
|
self.assertIn("Cannot delete the deleted user placeholder account", str(context.exception))
|
||||||
|
|
||||||
def test_delete_user_with_submissions_transfers_correctly(self):
|
def test_delete_user_with_submissions_transfers_correctly(self):
|
||||||
"""Test that user submissions are transferred to deleted user placeholder."""
|
"""Test that user submissions are transferred to deleted user placeholder."""
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ from django.urls import include, path
|
|||||||
from rest_framework.routers import DefaultRouter
|
from rest_framework.routers import DefaultRouter
|
||||||
|
|
||||||
from . import views, views_credits, views_magic_link
|
from . import views, views_credits, views_magic_link
|
||||||
|
from .views import list_profiles
|
||||||
|
|
||||||
# Register ViewSets
|
# Register ViewSets
|
||||||
router = DefaultRouter()
|
router = DefaultRouter()
|
||||||
@@ -110,13 +111,21 @@ urlpatterns = [
|
|||||||
path("profile/avatar/upload/", views.upload_avatar, name="upload_avatar"),
|
path("profile/avatar/upload/", views.upload_avatar, name="upload_avatar"),
|
||||||
path("profile/avatar/save/", views.save_avatar_image, name="save_avatar_image"),
|
path("profile/avatar/save/", views.save_avatar_image, name="save_avatar_image"),
|
||||||
path("profile/avatar/delete/", views.delete_avatar, name="delete_avatar"),
|
path("profile/avatar/delete/", views.delete_avatar, name="delete_avatar"),
|
||||||
|
# User permissions endpoint
|
||||||
|
path("permissions/", views.get_user_permissions, name="get_user_permissions"),
|
||||||
# Login history endpoint
|
# Login history endpoint
|
||||||
path("login-history/", views.get_login_history, name="get_login_history"),
|
path("login-history/", views.get_login_history, name="get_login_history"),
|
||||||
|
# Email change cancellation endpoint
|
||||||
|
path("email-change/cancel/", views.cancel_email_change, name="cancel_email_change"),
|
||||||
# Magic Link (Login by Code) endpoints
|
# Magic Link (Login by Code) endpoints
|
||||||
path("magic-link/request/", views_magic_link.request_magic_link, name="request_magic_link"),
|
path("magic-link/request/", views_magic_link.request_magic_link, name="request_magic_link"),
|
||||||
path("magic-link/verify/", views_magic_link.verify_magic_link, name="verify_magic_link"),
|
path("magic-link/verify/", views_magic_link.verify_magic_link, name="verify_magic_link"),
|
||||||
# Public Profile
|
# Public Profiles - List and Detail
|
||||||
|
path("profiles/", list_profiles, name="list_profiles"),
|
||||||
path("profiles/<str:username>/", views.get_public_user_profile, name="get_public_user_profile"),
|
path("profiles/<str:username>/", views.get_public_user_profile, name="get_public_user_profile"),
|
||||||
|
# Bulk lookup endpoints
|
||||||
|
path("profiles/bulk/", views.bulk_get_profiles, name="bulk_get_profiles"),
|
||||||
|
path("users/bulk/", views.get_users_with_emails, name="get_users_with_emails"),
|
||||||
# ViewSet routes
|
# ViewSet routes
|
||||||
path("", include(router.urls)),
|
path("", include(router.urls)),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -823,9 +823,185 @@ def check_user_deletion_eligibility(request, user_id):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# === PUBLIC PROFILE LIST ENDPOINT ===
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="list_profiles",
|
||||||
|
summary="List user profiles with search and pagination",
|
||||||
|
description=(
|
||||||
|
"Returns a paginated list of public user profiles. "
|
||||||
|
"Supports search by username or display name, and filtering by various criteria. "
|
||||||
|
"This endpoint is used for user discovery, leaderboards, and friend finding."
|
||||||
|
),
|
||||||
|
parameters=[
|
||||||
|
OpenApiParameter(
|
||||||
|
name="search",
|
||||||
|
type=OpenApiTypes.STR,
|
||||||
|
location=OpenApiParameter.QUERY,
|
||||||
|
description="Search term for username or display name",
|
||||||
|
),
|
||||||
|
OpenApiParameter(
|
||||||
|
name="ordering",
|
||||||
|
type=OpenApiTypes.STR,
|
||||||
|
location=OpenApiParameter.QUERY,
|
||||||
|
description="Order by field: date_joined, -date_joined, username, -username",
|
||||||
|
),
|
||||||
|
OpenApiParameter(
|
||||||
|
name="page",
|
||||||
|
type=OpenApiTypes.INT,
|
||||||
|
location=OpenApiParameter.QUERY,
|
||||||
|
description="Page number for pagination",
|
||||||
|
),
|
||||||
|
OpenApiParameter(
|
||||||
|
name="page_size",
|
||||||
|
type=OpenApiTypes.INT,
|
||||||
|
location=OpenApiParameter.QUERY,
|
||||||
|
description="Number of results per page (max 100)",
|
||||||
|
),
|
||||||
|
],
|
||||||
|
responses={
|
||||||
|
200: {
|
||||||
|
"description": "Paginated list of public profiles",
|
||||||
|
"example": {
|
||||||
|
"count": 150,
|
||||||
|
"next": "https://api.thrillwiki.com/api/v1/accounts/profiles/?page=2",
|
||||||
|
"previous": None,
|
||||||
|
"results": [
|
||||||
|
{
|
||||||
|
"user_id": "uuid-1",
|
||||||
|
"username": "thrillseeker",
|
||||||
|
"date_joined": "2024-01-01T00:00:00Z",
|
||||||
|
"role": "USER",
|
||||||
|
"profile": {
|
||||||
|
"profile_id": "uuid-profile",
|
||||||
|
"display_name": "Thrill Seeker",
|
||||||
|
"avatar_url": "https://example.com/avatar.jpg",
|
||||||
|
"bio": "Coaster enthusiast!",
|
||||||
|
"total_credits": 150,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
tags=["User Profile"],
|
||||||
|
)
|
||||||
|
@api_view(["GET"])
|
||||||
|
@permission_classes([AllowAny])
|
||||||
|
def list_profiles(request):
|
||||||
|
"""
|
||||||
|
List public user profiles with search and pagination.
|
||||||
|
|
||||||
|
This endpoint provides the missing /accounts/profiles/ list endpoint
|
||||||
|
that the frontend expects for user discovery features.
|
||||||
|
"""
|
||||||
|
from django.db.models import Q
|
||||||
|
from rest_framework.pagination import PageNumberPagination
|
||||||
|
|
||||||
|
# Base queryset: only active users with public profiles
|
||||||
|
queryset = User.objects.filter(
|
||||||
|
is_active=True,
|
||||||
|
).select_related("profile").order_by("-date_joined")
|
||||||
|
|
||||||
|
# User ID filter - EXACT match (critical for single user lookups)
|
||||||
|
user_id = request.query_params.get("user_id", "").strip()
|
||||||
|
if user_id:
|
||||||
|
# Use exact match to prevent user_id=4 from matching user_id=4448
|
||||||
|
queryset = queryset.filter(user_id=user_id)
|
||||||
|
|
||||||
|
# Search filter
|
||||||
|
search = request.query_params.get("search", "").strip()
|
||||||
|
if search:
|
||||||
|
queryset = queryset.filter(
|
||||||
|
Q(username__icontains=search) |
|
||||||
|
Q(profile__display_name__icontains=search)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Ordering
|
||||||
|
ordering = request.query_params.get("ordering", "-date_joined")
|
||||||
|
valid_orderings = ["date_joined", "-date_joined", "username", "-username"]
|
||||||
|
if ordering in valid_orderings:
|
||||||
|
queryset = queryset.order_by(ordering)
|
||||||
|
|
||||||
|
# Pagination
|
||||||
|
class ProfilePagination(PageNumberPagination):
|
||||||
|
page_size = 20
|
||||||
|
page_size_query_param = "page_size"
|
||||||
|
max_page_size = 100
|
||||||
|
|
||||||
|
paginator = ProfilePagination()
|
||||||
|
page = paginator.paginate_queryset(queryset, request)
|
||||||
|
|
||||||
|
if page is not None:
|
||||||
|
serializer = PublicUserSerializer(page, many=True)
|
||||||
|
return paginator.get_paginated_response(serializer.data)
|
||||||
|
|
||||||
|
# Fallback if pagination fails
|
||||||
|
serializer = PublicUserSerializer(queryset[:20], many=True)
|
||||||
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
|
||||||
# === USER PROFILE ENDPOINTS ===
|
# === USER PROFILE ENDPOINTS ===
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="get_user_permissions",
|
||||||
|
summary="Get current user's management permissions",
|
||||||
|
description="Get the authenticated user's management permissions including role information.",
|
||||||
|
responses={
|
||||||
|
200: {
|
||||||
|
"description": "User permissions",
|
||||||
|
"example": {
|
||||||
|
"user_id": "uuid",
|
||||||
|
"is_superuser": True,
|
||||||
|
"is_staff": True,
|
||||||
|
"is_moderator": False,
|
||||||
|
"roles": ["admin"],
|
||||||
|
"permissions": ["can_moderate", "can_manage_users"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
401: {
|
||||||
|
"description": "Authentication required",
|
||||||
|
"example": {"detail": "Authentication credentials were not provided."},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
tags=["User Profile"],
|
||||||
|
)
|
||||||
|
@api_view(["GET"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def get_user_permissions(request):
|
||||||
|
"""Get the authenticated user's management permissions."""
|
||||||
|
user = request.user
|
||||||
|
profile = getattr(user, "profile", None)
|
||||||
|
|
||||||
|
# Get roles from profile if exists
|
||||||
|
roles = []
|
||||||
|
if profile:
|
||||||
|
if hasattr(profile, "role") and profile.role:
|
||||||
|
roles.append(profile.role)
|
||||||
|
if user.is_superuser:
|
||||||
|
roles.append("admin")
|
||||||
|
if user.is_staff:
|
||||||
|
roles.append("staff")
|
||||||
|
|
||||||
|
# Build permissions list based on flags
|
||||||
|
permissions = []
|
||||||
|
if user.is_superuser or user.is_staff:
|
||||||
|
permissions.extend(["can_moderate", "can_manage_users", "can_view_admin"])
|
||||||
|
elif profile and getattr(profile, "is_moderator", False):
|
||||||
|
permissions.append("can_moderate")
|
||||||
|
|
||||||
|
return Response({
|
||||||
|
"user_id": str(user.id),
|
||||||
|
"is_superuser": user.is_superuser,
|
||||||
|
"is_staff": user.is_staff,
|
||||||
|
"is_moderator": profile and getattr(profile, "is_moderator", False) if profile else False,
|
||||||
|
"roles": list(set(roles)), # Deduplicate
|
||||||
|
"permissions": list(set(permissions)), # Deduplicate
|
||||||
|
}, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
|
||||||
@extend_schema(
|
@extend_schema(
|
||||||
operation_id="get_user_profile",
|
operation_id="get_user_profile",
|
||||||
summary="Get current user's complete profile",
|
summary="Get current user's complete profile",
|
||||||
@@ -911,18 +1087,53 @@ def update_user_profile(request):
|
|||||||
@extend_schema(
|
@extend_schema(
|
||||||
operation_id="get_user_preferences",
|
operation_id="get_user_preferences",
|
||||||
summary="Get user preferences",
|
summary="Get user preferences",
|
||||||
description="Get the authenticated user's preferences and settings.",
|
description="Get or update the authenticated user's preferences and settings.",
|
||||||
responses={
|
responses={
|
||||||
200: UserPreferencesSerializer,
|
200: UserPreferencesSerializer,
|
||||||
401: {"description": "Authentication required"},
|
401: {"description": "Authentication required"},
|
||||||
},
|
},
|
||||||
tags=["User Settings"],
|
tags=["User Settings"],
|
||||||
)
|
)
|
||||||
@api_view(["GET"])
|
@api_view(["GET", "PATCH"])
|
||||||
@permission_classes([IsAuthenticated])
|
@permission_classes([IsAuthenticated])
|
||||||
def get_user_preferences(request):
|
def get_user_preferences(request):
|
||||||
"""Get user preferences."""
|
"""Get or update user preferences."""
|
||||||
user = request.user
|
user = request.user
|
||||||
|
|
||||||
|
if request.method == "PATCH":
|
||||||
|
current_data = {
|
||||||
|
"theme_preference": user.theme_preference,
|
||||||
|
"email_notifications": user.email_notifications,
|
||||||
|
"push_notifications": user.push_notifications,
|
||||||
|
"privacy_level": user.privacy_level,
|
||||||
|
"show_email": user.show_email,
|
||||||
|
"show_real_name": user.show_real_name,
|
||||||
|
"show_statistics": user.show_statistics,
|
||||||
|
"allow_friend_requests": user.allow_friend_requests,
|
||||||
|
"allow_messages": user.allow_messages,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Handle moderation_preferences field (stored as JSON on User model if it exists)
|
||||||
|
if "moderation_preferences" in request.data:
|
||||||
|
try:
|
||||||
|
if hasattr(user, 'moderation_preferences'):
|
||||||
|
user.moderation_preferences = request.data["moderation_preferences"]
|
||||||
|
user.save()
|
||||||
|
# Return success even if field doesn't exist (non-critical preference)
|
||||||
|
return Response({"moderation_preferences": request.data["moderation_preferences"]}, status=status.HTTP_200_OK)
|
||||||
|
except Exception:
|
||||||
|
# Non-critical - just return success
|
||||||
|
return Response({"moderation_preferences": request.data["moderation_preferences"]}, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
serializer = UserPreferencesSerializer(data={**current_data, **request.data})
|
||||||
|
if serializer.is_valid():
|
||||||
|
for field, value in serializer.validated_data.items():
|
||||||
|
setattr(user, field, value)
|
||||||
|
user.save()
|
||||||
|
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||||
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
# GET request
|
||||||
data = {
|
data = {
|
||||||
"theme_preference": user.theme_preference,
|
"theme_preference": user.theme_preference,
|
||||||
"email_notifications": user.email_notifications,
|
"email_notifications": user.email_notifications,
|
||||||
@@ -935,8 +1146,8 @@ def get_user_preferences(request):
|
|||||||
"allow_messages": user.allow_messages,
|
"allow_messages": user.allow_messages,
|
||||||
}
|
}
|
||||||
|
|
||||||
serializer = UserPreferencesSerializer(data=data)
|
# Return the data directly - no validation needed for GET response
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
return Response(data, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
|
||||||
@extend_schema(
|
@extend_schema(
|
||||||
@@ -1056,8 +1267,8 @@ def get_notification_settings(request):
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
serializer = NotificationSettingsSerializer(data=data)
|
# Return the data directly - no validation needed for GET response
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
return Response(data, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
|
||||||
@extend_schema(
|
@extend_schema(
|
||||||
@@ -1131,8 +1342,8 @@ def get_privacy_settings(request):
|
|||||||
"allow_messages": user.allow_messages,
|
"allow_messages": user.allow_messages,
|
||||||
}
|
}
|
||||||
|
|
||||||
serializer = PrivacySettingsSerializer(data=data)
|
# Return the data directly - no validation needed for GET response
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
return Response(data, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
|
||||||
@extend_schema(
|
@extend_schema(
|
||||||
@@ -1198,8 +1409,8 @@ def get_security_settings(request):
|
|||||||
"active_sessions": getattr(user, "active_sessions", 1),
|
"active_sessions": getattr(user, "active_sessions", 1),
|
||||||
}
|
}
|
||||||
|
|
||||||
serializer = SecuritySettingsSerializer(data=data)
|
# Return the data directly - no validation needed for GET response
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
return Response(data, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
|
||||||
@extend_schema(
|
@extend_schema(
|
||||||
@@ -1273,8 +1484,8 @@ def get_user_statistics(request):
|
|||||||
"last_activity": user.last_login,
|
"last_activity": user.last_login,
|
||||||
}
|
}
|
||||||
|
|
||||||
serializer = UserStatisticsSerializer(data=data)
|
# Return the data directly - no validation needed for GET response
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
return Response(data, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
|
||||||
# === TOP LISTS ENDPOINTS ===
|
# === TOP LISTS ENDPOINTS ===
|
||||||
@@ -1640,3 +1851,227 @@ def get_login_history(request):
|
|||||||
"count": len(results),
|
"count": len(results),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="cancel_email_change",
|
||||||
|
summary="Cancel pending email change",
|
||||||
|
description=(
|
||||||
|
"Cancel a pending email change request. This will clear the new_email field "
|
||||||
|
"and prevent the email change from being completed."
|
||||||
|
),
|
||||||
|
responses={
|
||||||
|
200: {
|
||||||
|
"description": "Email change cancelled or no pending change found",
|
||||||
|
"example": {
|
||||||
|
"detail": "Email change cancelled",
|
||||||
|
"had_pending_change": True,
|
||||||
|
"cancelled_email": "newemail@example.com",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
401: {
|
||||||
|
"description": "Authentication required",
|
||||||
|
"example": {"detail": "Authentication required"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
tags=["Account Management"],
|
||||||
|
)
|
||||||
|
@api_view(["POST"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def cancel_email_change(request):
|
||||||
|
"""
|
||||||
|
Cancel a pending email change request.
|
||||||
|
|
||||||
|
This endpoint allows users to cancel their pending email change
|
||||||
|
if they change their mind before completing the verification.
|
||||||
|
|
||||||
|
**Authentication Required**: User must be logged in.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
user = request.user
|
||||||
|
|
||||||
|
# Check if user has a pending email change
|
||||||
|
pending_email = user.pending_email
|
||||||
|
|
||||||
|
if pending_email:
|
||||||
|
# Clear the pending email
|
||||||
|
user.pending_email = None
|
||||||
|
user.save(update_fields=["pending_email"])
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"User {user.username} cancelled email change to {pending_email}",
|
||||||
|
extra={
|
||||||
|
"user": user.username,
|
||||||
|
"user_id": user.user_id,
|
||||||
|
"cancelled_email": pending_email,
|
||||||
|
"action": "email_change_cancelled",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"success": True,
|
||||||
|
"detail": "Email change cancelled",
|
||||||
|
"had_pending_change": True,
|
||||||
|
"cancelled_email": pending_email,
|
||||||
|
},
|
||||||
|
status=status.HTTP_200_OK,
|
||||||
|
)
|
||||||
|
|
||||||
|
# No pending change, but still success (idempotent)
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"success": True,
|
||||||
|
"detail": "No pending email change found",
|
||||||
|
"had_pending_change": False,
|
||||||
|
"cancelled_email": None,
|
||||||
|
},
|
||||||
|
status=status.HTTP_200_OK,
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
capture_and_log(
|
||||||
|
e,
|
||||||
|
f"Cancel email change for user {request.user.username}",
|
||||||
|
source="api",
|
||||||
|
request=request,
|
||||||
|
)
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"success": False,
|
||||||
|
"error": f"Error cancelling email change: {str(e)}",
|
||||||
|
},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="bulk_get_profiles",
|
||||||
|
summary="Get multiple user profiles by user IDs",
|
||||||
|
description="Fetch profile information for multiple users at once. Useful for displaying user info in lists.",
|
||||||
|
parameters=[
|
||||||
|
OpenApiParameter(
|
||||||
|
name="user_ids",
|
||||||
|
type=OpenApiTypes.STR,
|
||||||
|
location=OpenApiParameter.QUERY,
|
||||||
|
description="Comma-separated list of user IDs",
|
||||||
|
required=True,
|
||||||
|
),
|
||||||
|
],
|
||||||
|
responses={
|
||||||
|
200: {
|
||||||
|
"description": "List of user profiles",
|
||||||
|
"example": [
|
||||||
|
{
|
||||||
|
"user_id": "123",
|
||||||
|
"username": "john_doe",
|
||||||
|
"display_name": "John Doe",
|
||||||
|
"avatar_url": "https://example.com/avatar.jpg",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
tags=["User Profile"],
|
||||||
|
)
|
||||||
|
@api_view(["GET"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def bulk_get_profiles(request):
|
||||||
|
"""Get multiple user profiles by IDs for efficient bulk lookups."""
|
||||||
|
user_ids_param = request.query_params.get("user_ids", "")
|
||||||
|
|
||||||
|
if not user_ids_param:
|
||||||
|
return Response([], status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
user_ids = [uid.strip() for uid in user_ids_param.split(",") if uid.strip()]
|
||||||
|
|
||||||
|
if not user_ids:
|
||||||
|
return Response([], status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
# Limit to prevent abuse
|
||||||
|
if len(user_ids) > 100:
|
||||||
|
user_ids = user_ids[:100]
|
||||||
|
|
||||||
|
profiles = UserProfile.objects.filter(user__user_id__in=user_ids).select_related("user", "avatar")
|
||||||
|
|
||||||
|
result = []
|
||||||
|
for profile in profiles:
|
||||||
|
result.append({
|
||||||
|
"user_id": str(profile.user.user_id),
|
||||||
|
"username": profile.user.username,
|
||||||
|
"display_name": profile.display_name,
|
||||||
|
"avatar_url": profile.get_avatar_url() if hasattr(profile, "get_avatar_url") else None,
|
||||||
|
})
|
||||||
|
|
||||||
|
return Response(result, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="get_users_with_emails",
|
||||||
|
summary="Get users with email addresses (admin/moderator only)",
|
||||||
|
description="Fetch user information including emails. Restricted to admins and moderators.",
|
||||||
|
parameters=[
|
||||||
|
OpenApiParameter(
|
||||||
|
name="user_ids",
|
||||||
|
type=OpenApiTypes.STR,
|
||||||
|
location=OpenApiParameter.QUERY,
|
||||||
|
description="Comma-separated list of user IDs",
|
||||||
|
required=True,
|
||||||
|
),
|
||||||
|
],
|
||||||
|
responses={
|
||||||
|
200: {
|
||||||
|
"description": "List of users with emails",
|
||||||
|
"example": [
|
||||||
|
{
|
||||||
|
"user_id": "123",
|
||||||
|
"username": "john_doe",
|
||||||
|
"email": "john@example.com",
|
||||||
|
"display_name": "John Doe",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
},
|
||||||
|
403: {"description": "Not authorized - admin or moderator access required"},
|
||||||
|
},
|
||||||
|
tags=["User Management"],
|
||||||
|
)
|
||||||
|
@api_view(["GET"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def get_users_with_emails(request):
|
||||||
|
"""Get users with email addresses - restricted to admins and moderators."""
|
||||||
|
user = request.user
|
||||||
|
|
||||||
|
# Check if user is admin or moderator
|
||||||
|
if not (user.is_staff or user.is_superuser or getattr(user, "role", "") in ["ADMIN", "MODERATOR"]):
|
||||||
|
return Response(
|
||||||
|
{"detail": "Admin or moderator access required"},
|
||||||
|
status=status.HTTP_403_FORBIDDEN,
|
||||||
|
)
|
||||||
|
|
||||||
|
user_ids_param = request.query_params.get("user_ids", "")
|
||||||
|
|
||||||
|
if not user_ids_param:
|
||||||
|
return Response([], status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
user_ids = [uid.strip() for uid in user_ids_param.split(",") if uid.strip()]
|
||||||
|
|
||||||
|
if not user_ids:
|
||||||
|
return Response([], status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
# Limit to prevent abuse
|
||||||
|
if len(user_ids) > 100:
|
||||||
|
user_ids = user_ids[:100]
|
||||||
|
|
||||||
|
users = User.objects.filter(user_id__in=user_ids).select_related("profile")
|
||||||
|
|
||||||
|
result = []
|
||||||
|
for u in users:
|
||||||
|
profile = getattr(u, "profile", None)
|
||||||
|
result.append({
|
||||||
|
"user_id": str(u.user_id),
|
||||||
|
"username": u.username,
|
||||||
|
"email": u.email,
|
||||||
|
"display_name": profile.display_name if profile else None,
|
||||||
|
})
|
||||||
|
|
||||||
|
return Response(result, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
|||||||
1
backend/apps/api/v1/admin/__init__.py
Normal file
1
backend/apps/api/v1/admin/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
# Admin API module
|
||||||
79
backend/apps/api/v1/admin/urls.py
Normal file
79
backend/apps/api/v1/admin/urls.py
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
"""
|
||||||
|
Admin API URL configuration.
|
||||||
|
Provides endpoints for admin dashboard functionality.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from django.urls import include, path
|
||||||
|
from rest_framework.routers import DefaultRouter
|
||||||
|
|
||||||
|
from apps.core.api.alert_views import (
|
||||||
|
RateLimitAlertConfigViewSet,
|
||||||
|
RateLimitAlertViewSet,
|
||||||
|
SystemAlertViewSet,
|
||||||
|
)
|
||||||
|
from apps.core.api.incident_views import IncidentViewSet
|
||||||
|
|
||||||
|
from . import views
|
||||||
|
|
||||||
|
app_name = "admin_api"
|
||||||
|
|
||||||
|
# Router for admin ViewSets
|
||||||
|
router = DefaultRouter()
|
||||||
|
router.register(r"system-alerts", SystemAlertViewSet, basename="system-alert")
|
||||||
|
router.register(r"rate-limit-alerts", RateLimitAlertViewSet, basename="rate-limit-alert")
|
||||||
|
router.register(r"rate-limit-config", RateLimitAlertConfigViewSet, basename="rate-limit-config")
|
||||||
|
router.register(r"incidents", IncidentViewSet, basename="incident")
|
||||||
|
|
||||||
|
|
||||||
|
urlpatterns = [
|
||||||
|
# Alert ViewSets (via router)
|
||||||
|
path("", include(router.urls)),
|
||||||
|
# OSM Cache Stats
|
||||||
|
path(
|
||||||
|
"osm-usage-stats/",
|
||||||
|
views.OSMUsageStatsView.as_view(),
|
||||||
|
name="osm_usage_stats",
|
||||||
|
),
|
||||||
|
# Rate Limit Metrics
|
||||||
|
path(
|
||||||
|
"rate-limit-metrics/",
|
||||||
|
views.RateLimitMetricsView.as_view(),
|
||||||
|
name="rate_limit_metrics",
|
||||||
|
),
|
||||||
|
# Database Manager (admin CRUD operations)
|
||||||
|
path(
|
||||||
|
"database-manager/",
|
||||||
|
views.DatabaseManagerView.as_view(),
|
||||||
|
name="database_manager",
|
||||||
|
),
|
||||||
|
# Celery Task Status (read-only)
|
||||||
|
path(
|
||||||
|
"tasks/status/",
|
||||||
|
views.CeleryTaskStatusView.as_view(),
|
||||||
|
name="task_status",
|
||||||
|
),
|
||||||
|
# Anomaly Detection
|
||||||
|
path(
|
||||||
|
"anomalies/detect/",
|
||||||
|
views.DetectAnomaliesView.as_view(),
|
||||||
|
name="detect_anomalies",
|
||||||
|
),
|
||||||
|
# Metrics Collection
|
||||||
|
path(
|
||||||
|
"metrics/collect/",
|
||||||
|
views.CollectMetricsView.as_view(),
|
||||||
|
name="collect_metrics",
|
||||||
|
),
|
||||||
|
# Pipeline Integrity Scan
|
||||||
|
path(
|
||||||
|
"pipeline/integrity-scan/",
|
||||||
|
views.PipelineIntegrityScanView.as_view(),
|
||||||
|
name="pipeline_integrity_scan",
|
||||||
|
),
|
||||||
|
# Admin Settings (key-value store for preferences)
|
||||||
|
path(
|
||||||
|
"settings/",
|
||||||
|
views.AdminSettingsView.as_view(),
|
||||||
|
name="admin_settings",
|
||||||
|
),
|
||||||
|
]
|
||||||
1350
backend/apps/api/v1/admin/views.py
Normal file
1350
backend/apps/api/v1/admin/views.py
Normal file
File diff suppressed because it is too large
Load Diff
554
backend/apps/api/v1/auth/account_management.py
Normal file
554
backend/apps/api/v1/auth/account_management.py
Normal file
@@ -0,0 +1,554 @@
|
|||||||
|
"""
|
||||||
|
Account Management Views for ThrillWiki API v1.
|
||||||
|
|
||||||
|
Handles email changes, account deletion, and session management.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from django.contrib.auth import get_user_model
|
||||||
|
from django.core.cache import cache
|
||||||
|
from django.utils import timezone
|
||||||
|
from drf_spectacular.utils import extend_schema, extend_schema_view
|
||||||
|
from rest_framework import status
|
||||||
|
from rest_framework.decorators import api_view, permission_classes
|
||||||
|
from rest_framework.permissions import IsAuthenticated
|
||||||
|
from rest_framework.response import Response
|
||||||
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
UserModel = get_user_model()
|
||||||
|
|
||||||
|
|
||||||
|
# ============== EMAIL CHANGE ENDPOINTS ==============
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="request_email_change",
|
||||||
|
summary="Request email change",
|
||||||
|
description="Initiates an email change request. Sends verification to new email.",
|
||||||
|
request={
|
||||||
|
"application/json": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"new_email": {"type": "string", "format": "email"},
|
||||||
|
"password": {"type": "string", "description": "Current password for verification"},
|
||||||
|
},
|
||||||
|
"required": ["new_email", "password"],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
responses={
|
||||||
|
200: {"description": "Email change requested"},
|
||||||
|
400: {"description": "Invalid request"},
|
||||||
|
},
|
||||||
|
tags=["Account"],
|
||||||
|
)
|
||||||
|
@api_view(["POST"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def request_email_change(request):
|
||||||
|
"""Request to change email address."""
|
||||||
|
user = request.user
|
||||||
|
new_email = request.data.get("new_email", "").strip().lower()
|
||||||
|
password = request.data.get("password", "")
|
||||||
|
|
||||||
|
if not new_email:
|
||||||
|
return Response(
|
||||||
|
{"detail": "New email is required"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
if not user.check_password(password):
|
||||||
|
return Response(
|
||||||
|
{"detail": "Invalid password"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check if email already in use
|
||||||
|
if UserModel.objects.filter(email=new_email).exclude(pk=user.pk).exists():
|
||||||
|
return Response(
|
||||||
|
{"detail": "This email is already in use"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Store pending email change in cache
|
||||||
|
cache_key = f"email_change:{user.pk}"
|
||||||
|
cache.set(
|
||||||
|
cache_key,
|
||||||
|
{
|
||||||
|
"new_email": new_email,
|
||||||
|
"requested_at": timezone.now().isoformat(),
|
||||||
|
},
|
||||||
|
timeout=86400, # 24 hours
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO: Send verification email to new_email
|
||||||
|
# For now, just store the pending change
|
||||||
|
|
||||||
|
return Response({
|
||||||
|
"detail": "Email change requested. Please check your new email for verification.",
|
||||||
|
"new_email": new_email,
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="get_email_change_status",
|
||||||
|
summary="Get pending email change status",
|
||||||
|
responses={
|
||||||
|
200: {
|
||||||
|
"description": "Email change status",
|
||||||
|
"example": {
|
||||||
|
"has_pending_change": True,
|
||||||
|
"new_email": "new@example.com",
|
||||||
|
"requested_at": "2026-01-06T12:00:00Z",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
tags=["Account"],
|
||||||
|
)
|
||||||
|
@api_view(["GET"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def get_email_change_status(request):
|
||||||
|
"""Get status of pending email change."""
|
||||||
|
user = request.user
|
||||||
|
cache_key = f"email_change:{user.pk}"
|
||||||
|
pending = cache.get(cache_key)
|
||||||
|
|
||||||
|
if not pending:
|
||||||
|
return Response({
|
||||||
|
"has_pending_change": False,
|
||||||
|
"new_email": None,
|
||||||
|
"requested_at": None,
|
||||||
|
})
|
||||||
|
|
||||||
|
return Response({
|
||||||
|
"has_pending_change": True,
|
||||||
|
"new_email": pending.get("new_email"),
|
||||||
|
"requested_at": pending.get("requested_at"),
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="cancel_email_change",
|
||||||
|
summary="Cancel pending email change",
|
||||||
|
responses={
|
||||||
|
200: {"description": "Email change cancelled"},
|
||||||
|
},
|
||||||
|
tags=["Account"],
|
||||||
|
)
|
||||||
|
@api_view(["POST"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def cancel_email_change(request):
|
||||||
|
"""Cancel a pending email change request."""
|
||||||
|
user = request.user
|
||||||
|
cache_key = f"email_change:{user.pk}"
|
||||||
|
cache.delete(cache_key)
|
||||||
|
|
||||||
|
return Response({"detail": "Email change cancelled"})
|
||||||
|
|
||||||
|
|
||||||
|
# ============== ACCOUNT DELETION ENDPOINTS ==============
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="request_account_deletion",
|
||||||
|
summary="Request account deletion",
|
||||||
|
description="Initiates account deletion. Requires password confirmation.",
|
||||||
|
request={
|
||||||
|
"application/json": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"password": {"type": "string"},
|
||||||
|
"reason": {"type": "string", "description": "Optional reason for leaving"},
|
||||||
|
},
|
||||||
|
"required": ["password"],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
responses={
|
||||||
|
200: {"description": "Deletion requested"},
|
||||||
|
400: {"description": "Invalid password"},
|
||||||
|
},
|
||||||
|
tags=["Account"],
|
||||||
|
)
|
||||||
|
@api_view(["POST"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def request_account_deletion(request):
|
||||||
|
"""Request account deletion."""
|
||||||
|
user = request.user
|
||||||
|
password = request.data.get("password", "")
|
||||||
|
reason = request.data.get("reason", "")
|
||||||
|
|
||||||
|
if not user.check_password(password):
|
||||||
|
return Response(
|
||||||
|
{"detail": "Invalid password"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Store deletion request in cache (will be processed by background task)
|
||||||
|
cache_key = f"account_deletion:{user.pk}"
|
||||||
|
deletion_date = timezone.now() + timezone.timedelta(days=30)
|
||||||
|
|
||||||
|
cache.set(
|
||||||
|
cache_key,
|
||||||
|
{
|
||||||
|
"requested_at": timezone.now().isoformat(),
|
||||||
|
"scheduled_deletion": deletion_date.isoformat(),
|
||||||
|
"reason": reason,
|
||||||
|
},
|
||||||
|
timeout=2592000, # 30 days
|
||||||
|
)
|
||||||
|
|
||||||
|
# Also update user profile if it exists
|
||||||
|
try:
|
||||||
|
from apps.accounts.models import Profile
|
||||||
|
profile = Profile.objects.filter(user=user).first()
|
||||||
|
if profile:
|
||||||
|
profile.deletion_requested_at = timezone.now()
|
||||||
|
profile.scheduled_deletion_date = deletion_date
|
||||||
|
profile.save(update_fields=["deletion_requested_at", "scheduled_deletion_date"])
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Could not update profile for deletion: {e}")
|
||||||
|
|
||||||
|
return Response({
|
||||||
|
"detail": "Account deletion scheduled",
|
||||||
|
"scheduled_deletion": deletion_date.isoformat(),
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="get_deletion_status",
|
||||||
|
summary="Get account deletion status",
|
||||||
|
responses={
|
||||||
|
200: {
|
||||||
|
"description": "Deletion status",
|
||||||
|
"example": {
|
||||||
|
"deletion_pending": True,
|
||||||
|
"scheduled_deletion": "2026-02-06T12:00:00Z",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
tags=["Account"],
|
||||||
|
)
|
||||||
|
@api_view(["GET"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def get_deletion_status(request):
|
||||||
|
"""Get status of pending account deletion."""
|
||||||
|
user = request.user
|
||||||
|
cache_key = f"account_deletion:{user.pk}"
|
||||||
|
pending = cache.get(cache_key)
|
||||||
|
|
||||||
|
if not pending:
|
||||||
|
# Also check profile
|
||||||
|
try:
|
||||||
|
from apps.accounts.models import Profile
|
||||||
|
profile = Profile.objects.filter(user=user).first()
|
||||||
|
if profile and profile.deletion_requested_at:
|
||||||
|
return Response({
|
||||||
|
"deletion_pending": True,
|
||||||
|
"requested_at": profile.deletion_requested_at.isoformat(),
|
||||||
|
"scheduled_deletion": profile.scheduled_deletion_date.isoformat() if profile.scheduled_deletion_date else None,
|
||||||
|
})
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return Response({
|
||||||
|
"deletion_pending": False,
|
||||||
|
"scheduled_deletion": None,
|
||||||
|
})
|
||||||
|
|
||||||
|
return Response({
|
||||||
|
"deletion_pending": True,
|
||||||
|
"requested_at": pending.get("requested_at"),
|
||||||
|
"scheduled_deletion": pending.get("scheduled_deletion"),
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="cancel_account_deletion",
|
||||||
|
summary="Cancel account deletion",
|
||||||
|
responses={
|
||||||
|
200: {"description": "Deletion cancelled"},
|
||||||
|
},
|
||||||
|
tags=["Account"],
|
||||||
|
)
|
||||||
|
@api_view(["POST"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def cancel_account_deletion(request):
|
||||||
|
"""Cancel a pending account deletion request."""
|
||||||
|
user = request.user
|
||||||
|
cache_key = f"account_deletion:{user.pk}"
|
||||||
|
cache.delete(cache_key)
|
||||||
|
|
||||||
|
# Also clear from profile
|
||||||
|
try:
|
||||||
|
from apps.accounts.models import Profile
|
||||||
|
Profile.objects.filter(user=user).update(
|
||||||
|
deletion_requested_at=None,
|
||||||
|
scheduled_deletion_date=None,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Could not clear deletion from profile: {e}")
|
||||||
|
|
||||||
|
return Response({"detail": "Account deletion cancelled"})
|
||||||
|
|
||||||
|
|
||||||
|
# ============== SESSION MANAGEMENT ENDPOINTS ==============
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="list_sessions",
|
||||||
|
summary="List active sessions",
|
||||||
|
description="Returns list of active sessions for the current user.",
|
||||||
|
responses={
|
||||||
|
200: {
|
||||||
|
"description": "List of sessions",
|
||||||
|
"example": {
|
||||||
|
"sessions": [
|
||||||
|
{
|
||||||
|
"id": "session_123",
|
||||||
|
"created_at": "2026-01-06T12:00:00Z",
|
||||||
|
"last_activity": "2026-01-06T14:00:00Z",
|
||||||
|
"ip_address": "192.168.1.1",
|
||||||
|
"user_agent": "Mozilla/5.0...",
|
||||||
|
"is_current": True,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
tags=["Account"],
|
||||||
|
)
|
||||||
|
@api_view(["GET"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def list_sessions(request):
|
||||||
|
"""List all active sessions for the user."""
|
||||||
|
# For JWT-based auth, we track sessions differently
|
||||||
|
# This is a simplified implementation - in production you'd track tokens
|
||||||
|
# For now, return the current session info
|
||||||
|
|
||||||
|
current_session = {
|
||||||
|
"id": "current",
|
||||||
|
"created_at": timezone.now().isoformat(),
|
||||||
|
"last_activity": timezone.now().isoformat(),
|
||||||
|
"ip_address": request.META.get("REMOTE_ADDR", "unknown"),
|
||||||
|
"user_agent": request.META.get("HTTP_USER_AGENT", "unknown"),
|
||||||
|
"is_current": True,
|
||||||
|
}
|
||||||
|
|
||||||
|
return Response({
|
||||||
|
"sessions": [current_session],
|
||||||
|
"count": 1,
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="revoke_session",
|
||||||
|
summary="Revoke a session",
|
||||||
|
description="Revokes a specific session. If revoking current session, user will be logged out.",
|
||||||
|
responses={
|
||||||
|
200: {"description": "Session revoked"},
|
||||||
|
404: {"description": "Session not found"},
|
||||||
|
},
|
||||||
|
tags=["Account"],
|
||||||
|
)
|
||||||
|
@api_view(["DELETE"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def revoke_session(request, session_id):
|
||||||
|
"""Revoke a specific session."""
|
||||||
|
# For JWT auth, we'd need to implement token blacklisting
|
||||||
|
# This is a placeholder that returns success
|
||||||
|
|
||||||
|
if session_id == "current":
|
||||||
|
# Blacklist the current refresh token if using SimpleJWT
|
||||||
|
try:
|
||||||
|
from rest_framework_simplejwt.token_blacklist.models import BlacklistedToken
|
||||||
|
from rest_framework_simplejwt.tokens import RefreshToken
|
||||||
|
|
||||||
|
# Get refresh token from request if available
|
||||||
|
refresh_token = request.data.get("refresh_token")
|
||||||
|
if refresh_token:
|
||||||
|
token = RefreshToken(refresh_token)
|
||||||
|
token.blacklist()
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Could not blacklist token: {e}")
|
||||||
|
|
||||||
|
return Response({"detail": "Session revoked"})
|
||||||
|
|
||||||
|
|
||||||
|
# ============== PASSWORD CHANGE ENDPOINT ==============
|
||||||
|
|
||||||
|
# ============== SECURITY LOG ENDPOINT ==============
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="get_security_log",
|
||||||
|
summary="Get security activity log",
|
||||||
|
description="Returns paginated list of security events for the current user.",
|
||||||
|
parameters=[
|
||||||
|
{
|
||||||
|
"name": "page",
|
||||||
|
"in": "query",
|
||||||
|
"description": "Page number (1-indexed)",
|
||||||
|
"required": False,
|
||||||
|
"schema": {"type": "integer", "default": 1},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "page_size",
|
||||||
|
"in": "query",
|
||||||
|
"description": "Number of items per page (max 50)",
|
||||||
|
"required": False,
|
||||||
|
"schema": {"type": "integer", "default": 20},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "event_type",
|
||||||
|
"in": "query",
|
||||||
|
"description": "Filter by event type",
|
||||||
|
"required": False,
|
||||||
|
"schema": {"type": "string"},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
responses={
|
||||||
|
200: {
|
||||||
|
"description": "Security log entries",
|
||||||
|
"example": {
|
||||||
|
"count": 42,
|
||||||
|
"page": 1,
|
||||||
|
"page_size": 20,
|
||||||
|
"total_pages": 3,
|
||||||
|
"results": [
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"event_type": "login_success",
|
||||||
|
"event_type_display": "Login Success",
|
||||||
|
"ip_address": "192.168.1.1",
|
||||||
|
"user_agent": "Mozilla/5.0...",
|
||||||
|
"created_at": "2026-01-06T12:00:00Z",
|
||||||
|
"metadata": {},
|
||||||
|
}
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
tags=["Account"],
|
||||||
|
)
|
||||||
|
@api_view(["GET"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def get_security_log(request):
|
||||||
|
"""Get security activity log for the current user."""
|
||||||
|
from apps.accounts.models import SecurityLog
|
||||||
|
|
||||||
|
user = request.user
|
||||||
|
|
||||||
|
# Parse pagination params
|
||||||
|
try:
|
||||||
|
page = max(1, int(request.query_params.get("page", 1)))
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
page = 1
|
||||||
|
|
||||||
|
try:
|
||||||
|
page_size = min(50, max(1, int(request.query_params.get("page_size", 20))))
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
page_size = 20
|
||||||
|
|
||||||
|
event_type = request.query_params.get("event_type")
|
||||||
|
|
||||||
|
# Build queryset
|
||||||
|
queryset = SecurityLog.objects.filter(user=user).order_by("-created_at")
|
||||||
|
|
||||||
|
if event_type:
|
||||||
|
queryset = queryset.filter(event_type=event_type)
|
||||||
|
|
||||||
|
# Count total
|
||||||
|
total_count = queryset.count()
|
||||||
|
total_pages = (total_count + page_size - 1) // page_size
|
||||||
|
|
||||||
|
# Fetch page
|
||||||
|
offset = (page - 1) * page_size
|
||||||
|
logs = queryset[offset : offset + page_size]
|
||||||
|
|
||||||
|
# Serialize
|
||||||
|
results = []
|
||||||
|
for log in logs:
|
||||||
|
results.append({
|
||||||
|
"id": log.id,
|
||||||
|
"event_type": log.event_type,
|
||||||
|
"event_type_display": log.get_event_type_display(),
|
||||||
|
"ip_address": log.ip_address,
|
||||||
|
"user_agent": log.user_agent[:200] if log.user_agent else "", # Truncate for safety
|
||||||
|
"created_at": log.created_at.isoformat(),
|
||||||
|
"metadata": log.metadata or {},
|
||||||
|
})
|
||||||
|
|
||||||
|
return Response({
|
||||||
|
"count": total_count,
|
||||||
|
"page": page,
|
||||||
|
"page_size": page_size,
|
||||||
|
"total_pages": total_pages,
|
||||||
|
"results": results,
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
# ============== PASSWORD CHANGE ENDPOINT ==============
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="change_password",
|
||||||
|
summary="Change password",
|
||||||
|
description="Changes the user's password. Requires current password.",
|
||||||
|
request={
|
||||||
|
"application/json": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"current_password": {"type": "string"},
|
||||||
|
"new_password": {"type": "string"},
|
||||||
|
},
|
||||||
|
"required": ["current_password", "new_password"],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
responses={
|
||||||
|
200: {"description": "Password changed"},
|
||||||
|
400: {"description": "Invalid current password or weak new password"},
|
||||||
|
},
|
||||||
|
tags=["Account"],
|
||||||
|
)
|
||||||
|
@api_view(["POST"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def change_password(request):
|
||||||
|
"""Change user password."""
|
||||||
|
from apps.accounts.services.security_service import (
|
||||||
|
log_security_event,
|
||||||
|
send_security_notification,
|
||||||
|
invalidate_user_sessions,
|
||||||
|
)
|
||||||
|
|
||||||
|
user = request.user
|
||||||
|
current_password = request.data.get("current_password", "")
|
||||||
|
new_password = request.data.get("new_password", "")
|
||||||
|
|
||||||
|
if not user.check_password(current_password):
|
||||||
|
return Response(
|
||||||
|
{"detail": "Current password is incorrect"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
if len(new_password) < 8:
|
||||||
|
return Response(
|
||||||
|
{"detail": "New password must be at least 8 characters"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
user.set_password(new_password)
|
||||||
|
user.last_password_change = timezone.now()
|
||||||
|
user.save()
|
||||||
|
|
||||||
|
# Invalidate all existing sessions/tokens (except current)
|
||||||
|
invalidated_count = invalidate_user_sessions(user, exclude_current=True, request=request)
|
||||||
|
|
||||||
|
# Log security event
|
||||||
|
log_security_event(
|
||||||
|
"password_changed",
|
||||||
|
request,
|
||||||
|
user=user,
|
||||||
|
metadata={"sessions_invalidated": invalidated_count},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Send security notification email
|
||||||
|
send_security_notification(user, "password_changed", metadata={})
|
||||||
|
|
||||||
|
return Response({
|
||||||
|
"detail": "Password changed successfully",
|
||||||
|
"sessions_invalidated": invalidated_count,
|
||||||
|
})
|
||||||
96
backend/apps/api/v1/auth/jwt.py
Normal file
96
backend/apps/api/v1/auth/jwt.py
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
"""
|
||||||
|
Custom JWT Token Generation for ThrillWiki
|
||||||
|
|
||||||
|
This module provides custom JWT token generation that includes authentication
|
||||||
|
method claims for enhanced MFA satisfaction logic.
|
||||||
|
|
||||||
|
Claims added:
|
||||||
|
- auth_method: How the user authenticated (password, passkey, totp, google, discord)
|
||||||
|
- mfa_verified: Whether MFA was verified during this login
|
||||||
|
- provider_mfa: Whether the OAuth provider (Discord) has MFA enabled
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Literal, TypedDict
|
||||||
|
|
||||||
|
from rest_framework_simplejwt.tokens import RefreshToken
|
||||||
|
|
||||||
|
# Type definitions for auth methods
|
||||||
|
AuthMethod = Literal["password", "passkey", "totp", "google", "discord"]
|
||||||
|
|
||||||
|
|
||||||
|
class TokenClaims(TypedDict, total=False):
|
||||||
|
"""Type definition for custom JWT claims."""
|
||||||
|
|
||||||
|
auth_method: AuthMethod
|
||||||
|
mfa_verified: bool
|
||||||
|
provider_mfa: bool
|
||||||
|
|
||||||
|
|
||||||
|
def create_tokens_for_user(
|
||||||
|
user,
|
||||||
|
auth_method: AuthMethod = "password",
|
||||||
|
mfa_verified: bool = False,
|
||||||
|
provider_mfa: bool = False,
|
||||||
|
) -> dict[str, str]:
|
||||||
|
"""
|
||||||
|
Generate JWT tokens with custom authentication claims.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
user: The Django user object
|
||||||
|
auth_method: How the user authenticated
|
||||||
|
mfa_verified: True if MFA (TOTP/passkey) was verified at login
|
||||||
|
provider_mfa: True if OAuth provider (Discord) has MFA enabled
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary with 'access' and 'refresh' token strings
|
||||||
|
"""
|
||||||
|
refresh = RefreshToken.for_user(user)
|
||||||
|
|
||||||
|
# Add custom claims to both refresh and access tokens
|
||||||
|
refresh["auth_method"] = auth_method
|
||||||
|
refresh["mfa_verified"] = mfa_verified
|
||||||
|
refresh["provider_mfa"] = provider_mfa
|
||||||
|
|
||||||
|
access = refresh.access_token
|
||||||
|
|
||||||
|
return {
|
||||||
|
"access": str(access),
|
||||||
|
"refresh": str(refresh),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_auth_method_for_provider(provider: str) -> AuthMethod:
|
||||||
|
"""
|
||||||
|
Map OAuth provider name to AuthMethod type.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
provider: The provider name (e.g., 'google', 'discord')
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The corresponding AuthMethod
|
||||||
|
"""
|
||||||
|
provider_map: dict[str, AuthMethod] = {
|
||||||
|
"google": "google",
|
||||||
|
"discord": "discord",
|
||||||
|
}
|
||||||
|
return provider_map.get(provider, "password")
|
||||||
|
|
||||||
|
|
||||||
|
def get_provider_mfa_status(provider: str, extra_data: dict) -> bool:
|
||||||
|
"""
|
||||||
|
Extract MFA status from OAuth provider extra_data.
|
||||||
|
|
||||||
|
Only Discord exposes mfa_enabled. Google does not share this info.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
provider: The OAuth provider name
|
||||||
|
extra_data: The extra_data dict from SocialAccount
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if provider has MFA enabled, False otherwise
|
||||||
|
"""
|
||||||
|
if provider == "discord":
|
||||||
|
return extra_data.get("mfa_enabled", False)
|
||||||
|
|
||||||
|
# Google and other providers don't expose MFA status
|
||||||
|
return False
|
||||||
@@ -50,6 +50,10 @@ def get_mfa_status(request):
|
|||||||
|
|
||||||
totp_enabled = authenticators.filter(type=Authenticator.Type.TOTP).exists()
|
totp_enabled = authenticators.filter(type=Authenticator.Type.TOTP).exists()
|
||||||
recovery_enabled = authenticators.filter(type=Authenticator.Type.RECOVERY_CODES).exists()
|
recovery_enabled = authenticators.filter(type=Authenticator.Type.RECOVERY_CODES).exists()
|
||||||
|
|
||||||
|
# Check for WebAuthn/Passkey authenticators
|
||||||
|
passkey_enabled = authenticators.filter(type=Authenticator.Type.WEBAUTHN).exists()
|
||||||
|
passkey_count = authenticators.filter(type=Authenticator.Type.WEBAUTHN).count()
|
||||||
|
|
||||||
# Count recovery codes if any
|
# Count recovery codes if any
|
||||||
recovery_count = 0
|
recovery_count = 0
|
||||||
@@ -60,12 +64,38 @@ def get_mfa_status(request):
|
|||||||
except Authenticator.DoesNotExist:
|
except Authenticator.DoesNotExist:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
# Check for Discord social account with MFA enabled
|
||||||
|
discord_mfa_enabled = False
|
||||||
|
connected_provider = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
social_accounts = user.socialaccount_set.all()
|
||||||
|
for social_account in social_accounts:
|
||||||
|
if social_account.provider == "discord":
|
||||||
|
connected_provider = "discord"
|
||||||
|
discord_mfa_enabled = social_account.extra_data.get("mfa_enabled", False)
|
||||||
|
break
|
||||||
|
elif social_account.provider == "google":
|
||||||
|
connected_provider = "google"
|
||||||
|
# Google doesn't expose MFA status
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# has_second_factor is True if user has either TOTP or Passkey configured
|
||||||
|
has_second_factor = totp_enabled or passkey_enabled
|
||||||
|
|
||||||
return Response(
|
return Response(
|
||||||
{
|
{
|
||||||
"mfa_enabled": totp_enabled,
|
"mfa_enabled": totp_enabled, # Backward compatibility
|
||||||
"totp_enabled": totp_enabled,
|
"totp_enabled": totp_enabled,
|
||||||
|
"passkey_enabled": passkey_enabled,
|
||||||
|
"passkey_count": passkey_count,
|
||||||
"recovery_codes_enabled": recovery_enabled,
|
"recovery_codes_enabled": recovery_enabled,
|
||||||
"recovery_codes_count": recovery_count,
|
"recovery_codes_count": recovery_count,
|
||||||
|
"has_second_factor": has_second_factor,
|
||||||
|
# New fields for enhanced MFA satisfaction
|
||||||
|
"discord_mfa_enabled": discord_mfa_enabled,
|
||||||
|
"connected_provider": connected_provider,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -90,6 +120,8 @@ def get_mfa_status(request):
|
|||||||
@permission_classes([IsAuthenticated])
|
@permission_classes([IsAuthenticated])
|
||||||
def setup_totp(request):
|
def setup_totp(request):
|
||||||
"""Generate TOTP secret and QR code for setup."""
|
"""Generate TOTP secret and QR code for setup."""
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
from allauth.mfa.totp.internal import auth as totp_auth
|
from allauth.mfa.totp.internal import auth as totp_auth
|
||||||
|
|
||||||
user = request.user
|
user = request.user
|
||||||
@@ -110,14 +142,16 @@ def setup_totp(request):
|
|||||||
qr.save(buffer, format="PNG")
|
qr.save(buffer, format="PNG")
|
||||||
qr_code_base64 = f"data:image/png;base64,{base64.b64encode(buffer.getvalue()).decode()}"
|
qr_code_base64 = f"data:image/png;base64,{base64.b64encode(buffer.getvalue()).decode()}"
|
||||||
|
|
||||||
# Store secret in session for later verification
|
# Store secret in session for later verification with 15-minute expiry
|
||||||
request.session["pending_totp_secret"] = secret
|
request.session["pending_totp_secret"] = secret
|
||||||
|
request.session["pending_totp_expires"] = (timezone.now().timestamp() + 900) # 15 minutes
|
||||||
|
|
||||||
return Response(
|
return Response(
|
||||||
{
|
{
|
||||||
"secret": secret,
|
"secret": secret,
|
||||||
"provisioning_uri": uri,
|
"provisioning_uri": uri,
|
||||||
"qr_code_base64": qr_code_base64,
|
"qr_code_base64": qr_code_base64,
|
||||||
|
"expires_in_seconds": 900,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -155,10 +189,17 @@ def setup_totp(request):
|
|||||||
@permission_classes([IsAuthenticated])
|
@permission_classes([IsAuthenticated])
|
||||||
def activate_totp(request):
|
def activate_totp(request):
|
||||||
"""Verify TOTP code and activate MFA."""
|
"""Verify TOTP code and activate MFA."""
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
from allauth.mfa.models import Authenticator
|
from allauth.mfa.models import Authenticator
|
||||||
from allauth.mfa.recovery_codes.internal import auth as recovery_auth
|
from allauth.mfa.recovery_codes.internal.auth import RecoveryCodes
|
||||||
from allauth.mfa.totp.internal import auth as totp_auth
|
from allauth.mfa.totp.internal import auth as totp_auth
|
||||||
|
|
||||||
|
from apps.accounts.services.security_service import (
|
||||||
|
log_security_event,
|
||||||
|
send_security_notification,
|
||||||
|
)
|
||||||
|
|
||||||
user = request.user
|
user = request.user
|
||||||
code = request.data.get("code", "").strip()
|
code = request.data.get("code", "").strip()
|
||||||
|
|
||||||
@@ -168,14 +209,28 @@ def activate_totp(request):
|
|||||||
status=status.HTTP_400_BAD_REQUEST,
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Get pending secret from session
|
# Get pending secret from session OR from request body
|
||||||
secret = request.session.get("pending_totp_secret")
|
# (request body is used as fallback for JWT auth where sessions may not persist)
|
||||||
|
secret = request.session.get("pending_totp_secret") or request.data.get("secret", "").strip()
|
||||||
if not secret:
|
if not secret:
|
||||||
return Response(
|
return Response(
|
||||||
{"detail": "No pending TOTP setup. Please start setup again."},
|
{"detail": "No pending TOTP setup. Please start setup again."},
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Check if setup has expired (15 minute timeout)
|
||||||
|
expires_at = request.session.get("pending_totp_expires")
|
||||||
|
if expires_at and timezone.now().timestamp() > expires_at:
|
||||||
|
# Clear expired session data
|
||||||
|
if "pending_totp_secret" in request.session:
|
||||||
|
del request.session["pending_totp_secret"]
|
||||||
|
if "pending_totp_expires" in request.session:
|
||||||
|
del request.session["pending_totp_expires"]
|
||||||
|
return Response(
|
||||||
|
{"detail": "TOTP setup session expired. Please start setup again."},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
# Verify the code
|
# Verify the code
|
||||||
if not totp_auth.validate_totp_code(secret, code):
|
if not totp_auth.validate_totp_code(secret, code):
|
||||||
return Response(
|
return Response(
|
||||||
@@ -197,21 +252,32 @@ def activate_totp(request):
|
|||||||
data={"secret": secret},
|
data={"secret": secret},
|
||||||
)
|
)
|
||||||
|
|
||||||
# Generate recovery codes
|
# Generate recovery codes using allauth's RecoveryCodes API
|
||||||
codes = recovery_auth.generate_recovery_codes()
|
recovery_instance = RecoveryCodes.activate(user)
|
||||||
Authenticator.objects.create(
|
codes = recovery_instance.get_unused_codes()
|
||||||
|
|
||||||
|
# Clear session (only if it exists - won't exist with JWT auth + secret from body)
|
||||||
|
if "pending_totp_secret" in request.session:
|
||||||
|
del request.session["pending_totp_secret"]
|
||||||
|
if "pending_totp_expires" in request.session:
|
||||||
|
del request.session["pending_totp_expires"]
|
||||||
|
|
||||||
|
# Log security event
|
||||||
|
log_security_event(
|
||||||
|
"mfa_enrolled",
|
||||||
|
request,
|
||||||
user=user,
|
user=user,
|
||||||
type=Authenticator.Type.RECOVERY_CODES,
|
metadata={"method": "totp"},
|
||||||
data={"codes": codes},
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Clear session
|
# Send security notification email
|
||||||
del request.session["pending_totp_secret"]
|
send_security_notification(user, "mfa_enrolled", {"method": "TOTP Authenticator"})
|
||||||
|
|
||||||
return Response(
|
return Response(
|
||||||
{
|
{
|
||||||
"detail": "Two-factor authentication enabled",
|
"detail": "Two-factor authentication enabled",
|
||||||
"recovery_codes": codes,
|
"recovery_codes": codes,
|
||||||
|
"recovery_codes_count": len(codes),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -247,13 +313,59 @@ def deactivate_totp(request):
|
|||||||
"""Disable TOTP authentication."""
|
"""Disable TOTP authentication."""
|
||||||
from allauth.mfa.models import Authenticator
|
from allauth.mfa.models import Authenticator
|
||||||
|
|
||||||
|
from apps.accounts.services.security_service import (
|
||||||
|
check_auth_method_availability,
|
||||||
|
log_security_event,
|
||||||
|
send_security_notification,
|
||||||
|
)
|
||||||
|
|
||||||
user = request.user
|
user = request.user
|
||||||
password = request.data.get("password", "")
|
password = request.data.get("password", "")
|
||||||
|
recovery_code = request.data.get("recovery_code", "")
|
||||||
|
|
||||||
# Verify password
|
# Check if user has other auth methods before we allow disabling MFA
|
||||||
if not user.check_password(password):
|
auth_methods = check_auth_method_availability(user)
|
||||||
|
|
||||||
|
# If TOTP is their only way in alongside passkeys, we need to ensure they have
|
||||||
|
# at least password or social login to fall back on
|
||||||
|
if not auth_methods["has_password"] and not auth_methods["has_social"] and not auth_methods["has_passkey"]:
|
||||||
return Response(
|
return Response(
|
||||||
{"detail": "Invalid password"},
|
{"detail": "Cannot disable MFA: you must have at least one authentication method. Please set a password or connect a social account first."},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify password OR recovery code
|
||||||
|
verified = False
|
||||||
|
verification_method = None
|
||||||
|
|
||||||
|
if password and user.check_password(password):
|
||||||
|
verified = True
|
||||||
|
verification_method = "password"
|
||||||
|
elif recovery_code:
|
||||||
|
# Try to verify with recovery code
|
||||||
|
try:
|
||||||
|
recovery_auth = Authenticator.objects.get(
|
||||||
|
user=user, type=Authenticator.Type.RECOVERY_CODES
|
||||||
|
)
|
||||||
|
unused_codes = recovery_auth.data.get("codes", [])
|
||||||
|
if recovery_code.upper().replace("-", "").replace(" ", "") in [
|
||||||
|
c.upper().replace("-", "").replace(" ", "") for c in unused_codes
|
||||||
|
]:
|
||||||
|
verified = True
|
||||||
|
verification_method = "recovery_code"
|
||||||
|
# Remove the used code
|
||||||
|
unused_codes = [
|
||||||
|
c for c in unused_codes
|
||||||
|
if c.upper().replace("-", "").replace(" ", "") != recovery_code.upper().replace("-", "").replace(" ", "")
|
||||||
|
]
|
||||||
|
recovery_auth.data["codes"] = unused_codes
|
||||||
|
recovery_auth.save()
|
||||||
|
except Authenticator.DoesNotExist:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if not verified:
|
||||||
|
return Response(
|
||||||
|
{"detail": "Invalid password or recovery code"},
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -268,6 +380,17 @@ def deactivate_totp(request):
|
|||||||
status=status.HTTP_400_BAD_REQUEST,
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Log security event
|
||||||
|
log_security_event(
|
||||||
|
"mfa_disabled",
|
||||||
|
request,
|
||||||
|
user=user,
|
||||||
|
metadata={"method": "totp", "verified_via": verification_method},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Send security notification email
|
||||||
|
send_security_notification(user, "mfa_disabled", {"method": "TOTP Authenticator"})
|
||||||
|
|
||||||
return Response(
|
return Response(
|
||||||
{
|
{
|
||||||
"detail": "Two-factor authentication disabled",
|
"detail": "Two-factor authentication disabled",
|
||||||
@@ -351,7 +474,12 @@ def verify_totp(request):
|
|||||||
def regenerate_recovery_codes(request):
|
def regenerate_recovery_codes(request):
|
||||||
"""Regenerate recovery codes."""
|
"""Regenerate recovery codes."""
|
||||||
from allauth.mfa.models import Authenticator
|
from allauth.mfa.models import Authenticator
|
||||||
from allauth.mfa.recovery_codes.internal import auth as recovery_auth
|
from allauth.mfa.recovery_codes.internal.auth import RecoveryCodes
|
||||||
|
|
||||||
|
from apps.accounts.services.security_service import (
|
||||||
|
log_security_event,
|
||||||
|
send_security_notification,
|
||||||
|
)
|
||||||
|
|
||||||
user = request.user
|
user = request.user
|
||||||
password = request.data.get("password", "")
|
password = request.data.get("password", "")
|
||||||
@@ -363,26 +491,40 @@ def regenerate_recovery_codes(request):
|
|||||||
status=status.HTTP_400_BAD_REQUEST,
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check if TOTP is enabled
|
# Check if MFA is enabled (TOTP or Passkey)
|
||||||
if not Authenticator.objects.filter(user=user, type=Authenticator.Type.TOTP).exists():
|
has_totp = Authenticator.objects.filter(user=user, type=Authenticator.Type.TOTP).exists()
|
||||||
|
has_passkey = Authenticator.objects.filter(user=user, type=Authenticator.Type.WEBAUTHN).exists()
|
||||||
|
|
||||||
|
if not has_totp and not has_passkey:
|
||||||
return Response(
|
return Response(
|
||||||
{"detail": "Two-factor authentication is not enabled"},
|
{"detail": "Two-factor authentication is not enabled"},
|
||||||
status=status.HTTP_400_BAD_REQUEST,
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Generate new codes
|
# Delete existing recovery codes first (so activate creates new ones)
|
||||||
codes = recovery_auth.generate_recovery_codes()
|
Authenticator.objects.filter(
|
||||||
|
user=user, type=Authenticator.Type.RECOVERY_CODES
|
||||||
|
).delete()
|
||||||
|
|
||||||
# Update or create recovery codes authenticator
|
# Generate new recovery codes using allauth's RecoveryCodes API
|
||||||
authenticator, created = Authenticator.objects.update_or_create(
|
recovery_instance = RecoveryCodes.activate(user)
|
||||||
|
codes = recovery_instance.get_unused_codes()
|
||||||
|
|
||||||
|
# Log security event
|
||||||
|
log_security_event(
|
||||||
|
"recovery_codes_regenerated",
|
||||||
|
request,
|
||||||
user=user,
|
user=user,
|
||||||
type=Authenticator.Type.RECOVERY_CODES,
|
metadata={"codes_generated": len(codes)},
|
||||||
defaults={"data": {"codes": codes}},
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Send security notification email
|
||||||
|
send_security_notification(user, "recovery_codes_regenerated", {"codes_generated": len(codes)})
|
||||||
|
|
||||||
return Response(
|
return Response(
|
||||||
{
|
{
|
||||||
"success": True,
|
"success": True,
|
||||||
"recovery_codes": codes,
|
"recovery_codes": codes,
|
||||||
|
"recovery_codes_count": len(codes),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|||||||
605
backend/apps/api/v1/auth/passkey.py
Normal file
605
backend/apps/api/v1/auth/passkey.py
Normal file
@@ -0,0 +1,605 @@
|
|||||||
|
"""
|
||||||
|
Passkey (WebAuthn) API Views
|
||||||
|
|
||||||
|
Provides REST API endpoints for WebAuthn/Passkey operations using django-allauth's
|
||||||
|
mfa.webauthn module. Supports passkey registration, authentication, and management.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from drf_spectacular.utils import extend_schema
|
||||||
|
from rest_framework import status
|
||||||
|
from rest_framework.decorators import api_view, permission_classes
|
||||||
|
from rest_framework.permissions import AllowAny, IsAuthenticated
|
||||||
|
from rest_framework.response import Response
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="get_passkey_status",
|
||||||
|
summary="Get passkey status for current user",
|
||||||
|
description="Returns whether passkeys are enabled and lists registered passkeys.",
|
||||||
|
responses={
|
||||||
|
200: {
|
||||||
|
"description": "Passkey status",
|
||||||
|
"example": {
|
||||||
|
"passkey_enabled": True,
|
||||||
|
"passkeys": [
|
||||||
|
{"id": "abc123", "name": "MacBook Pro", "created_at": "2026-01-06T12:00:00Z"}
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
tags=["Passkey"],
|
||||||
|
)
|
||||||
|
@api_view(["GET"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def get_passkey_status(request):
|
||||||
|
"""Get passkey status for current user."""
|
||||||
|
try:
|
||||||
|
from allauth.mfa.models import Authenticator
|
||||||
|
|
||||||
|
user = request.user
|
||||||
|
passkeys = Authenticator.objects.filter(
|
||||||
|
user=user, type=Authenticator.Type.WEBAUTHN
|
||||||
|
)
|
||||||
|
|
||||||
|
passkey_list = []
|
||||||
|
for pk in passkeys:
|
||||||
|
passkey_data = pk.data or {}
|
||||||
|
passkey_list.append({
|
||||||
|
"id": str(pk.id),
|
||||||
|
"name": passkey_data.get("name", "Passkey"),
|
||||||
|
"created_at": pk.created_at.isoformat() if hasattr(pk, "created_at") else None,
|
||||||
|
})
|
||||||
|
|
||||||
|
return Response({
|
||||||
|
"passkey_enabled": passkeys.exists(),
|
||||||
|
"passkey_count": passkeys.count(),
|
||||||
|
"passkeys": passkey_list,
|
||||||
|
})
|
||||||
|
except ImportError:
|
||||||
|
return Response({
|
||||||
|
"passkey_enabled": False,
|
||||||
|
"passkey_count": 0,
|
||||||
|
"passkeys": [],
|
||||||
|
"error": "WebAuthn module not available",
|
||||||
|
})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error getting passkey status: {e}")
|
||||||
|
return Response(
|
||||||
|
{"detail": "Failed to get passkey status"},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="get_registration_options",
|
||||||
|
summary="Get WebAuthn registration options",
|
||||||
|
description="Returns options for registering a new passkey. Start the registration flow.",
|
||||||
|
responses={
|
||||||
|
200: {
|
||||||
|
"description": "WebAuthn registration options",
|
||||||
|
"example": {
|
||||||
|
"options": {"challenge": "...", "rp": {"name": "ThrillWiki"}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
tags=["Passkey"],
|
||||||
|
)
|
||||||
|
@api_view(["GET"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def get_registration_options(request):
|
||||||
|
"""Get WebAuthn registration options for passkey setup."""
|
||||||
|
try:
|
||||||
|
from django.utils import timezone
|
||||||
|
from allauth.mfa.webauthn.internal import auth as webauthn_auth
|
||||||
|
|
||||||
|
# Use the correct allauth API: begin_registration
|
||||||
|
# The function takes (user, passwordless) - passwordless=False for standard passkeys
|
||||||
|
creation_options = webauthn_auth.begin_registration(request.user, passwordless=False)
|
||||||
|
|
||||||
|
# State is stored internally by begin_registration via set_state()
|
||||||
|
|
||||||
|
# Store registration timeout in session (5 minutes)
|
||||||
|
request.session["pending_passkey_expires"] = timezone.now().timestamp() + 300 # 5 minutes
|
||||||
|
|
||||||
|
# Debug log the structure
|
||||||
|
logger.debug(f"WebAuthn registration options type: {type(creation_options)}")
|
||||||
|
logger.debug(f"WebAuthn registration options keys: {creation_options.keys() if isinstance(creation_options, dict) else 'not a dict'}")
|
||||||
|
logger.info(f"WebAuthn registration options: {creation_options}")
|
||||||
|
|
||||||
|
return Response({
|
||||||
|
"options": creation_options,
|
||||||
|
"expires_in_seconds": 300,
|
||||||
|
})
|
||||||
|
except ImportError as e:
|
||||||
|
logger.error(f"WebAuthn module import error: {e}")
|
||||||
|
return Response(
|
||||||
|
{"detail": "WebAuthn module not available"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error getting registration options: {e}")
|
||||||
|
return Response(
|
||||||
|
{"detail": f"Failed to get registration options: {str(e)}"},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="register_passkey",
|
||||||
|
summary="Complete passkey registration",
|
||||||
|
description="Verifies the WebAuthn response and registers the new passkey.",
|
||||||
|
request={
|
||||||
|
"application/json": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"credential": {"type": "object", "description": "WebAuthn credential response"},
|
||||||
|
"name": {"type": "string", "description": "Name for this passkey"},
|
||||||
|
},
|
||||||
|
"required": ["credential"],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
responses={
|
||||||
|
200: {"description": "Passkey registered successfully"},
|
||||||
|
400: {"description": "Invalid credential or registration failed"},
|
||||||
|
},
|
||||||
|
tags=["Passkey"],
|
||||||
|
)
|
||||||
|
@api_view(["POST"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def register_passkey(request):
|
||||||
|
"""Complete passkey registration with WebAuthn response."""
|
||||||
|
try:
|
||||||
|
from django.utils import timezone
|
||||||
|
from allauth.mfa.webauthn.internal import auth as webauthn_auth
|
||||||
|
|
||||||
|
from apps.accounts.services.security_service import (
|
||||||
|
log_security_event,
|
||||||
|
send_security_notification,
|
||||||
|
)
|
||||||
|
|
||||||
|
credential = request.data.get("credential")
|
||||||
|
name = request.data.get("name", "Passkey")
|
||||||
|
|
||||||
|
if not credential:
|
||||||
|
return Response(
|
||||||
|
{"detail": "Credential is required"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check if registration has expired (5 minute timeout)
|
||||||
|
expires_at = request.session.get("pending_passkey_expires")
|
||||||
|
if expires_at and timezone.now().timestamp() > expires_at:
|
||||||
|
# Clear expired session data
|
||||||
|
if "pending_passkey_expires" in request.session:
|
||||||
|
del request.session["pending_passkey_expires"]
|
||||||
|
return Response(
|
||||||
|
{"detail": "Passkey registration session expired. Please start registration again."},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get stored state from session (no request needed, uses context)
|
||||||
|
state = webauthn_auth.get_state()
|
||||||
|
if not state:
|
||||||
|
return Response(
|
||||||
|
{"detail": "No pending registration. Please start registration again."},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Use the correct allauth API: complete_registration
|
||||||
|
try:
|
||||||
|
from allauth.mfa.webauthn.internal.auth import WebAuthn
|
||||||
|
|
||||||
|
# Parse the credential response to validate it
|
||||||
|
credential_data = webauthn_auth.parse_registration_response(credential)
|
||||||
|
|
||||||
|
# Complete registration to validate and clear state
|
||||||
|
webauthn_auth.complete_registration(credential_data)
|
||||||
|
|
||||||
|
# Use allauth's WebAuthn.add() to create the Authenticator properly
|
||||||
|
# It stores the raw credential dict and name in the data field
|
||||||
|
webauthn_wrapper = WebAuthn.add(
|
||||||
|
request.user,
|
||||||
|
name,
|
||||||
|
credential, # Pass raw credential dict, not parsed data
|
||||||
|
)
|
||||||
|
authenticator = webauthn_wrapper.instance
|
||||||
|
|
||||||
|
# Log security event
|
||||||
|
log_security_event(
|
||||||
|
"passkey_registered",
|
||||||
|
request,
|
||||||
|
user=request.user,
|
||||||
|
metadata={"passkey_name": name, "passkey_id": str(authenticator.id) if authenticator else None},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Send security notification email
|
||||||
|
send_security_notification(request.user, "passkey_registered", {"passkey_name": name})
|
||||||
|
|
||||||
|
return Response({
|
||||||
|
"detail": "Passkey registered successfully",
|
||||||
|
"name": name,
|
||||||
|
"id": str(authenticator.id) if authenticator else None,
|
||||||
|
})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"WebAuthn registration failed: {e}")
|
||||||
|
return Response(
|
||||||
|
{"detail": f"Registration failed: {str(e)}"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
except ImportError as e:
|
||||||
|
logger.error(f"WebAuthn module import error: {e}")
|
||||||
|
return Response(
|
||||||
|
{"detail": "WebAuthn module not available"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error registering passkey: {e}")
|
||||||
|
return Response(
|
||||||
|
{"detail": f"Failed to register passkey: {str(e)}"},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="get_authentication_options",
|
||||||
|
summary="Get WebAuthn authentication options",
|
||||||
|
description="Returns options for authenticating with a passkey.",
|
||||||
|
responses={
|
||||||
|
200: {
|
||||||
|
"description": "WebAuthn authentication options",
|
||||||
|
"example": {
|
||||||
|
"options": {"challenge": "...", "allowCredentials": []},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
tags=["Passkey"],
|
||||||
|
)
|
||||||
|
@api_view(["GET"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def get_authentication_options(request):
|
||||||
|
"""Get WebAuthn authentication options for passkey verification."""
|
||||||
|
try:
|
||||||
|
from allauth.mfa.webauthn.internal import auth as webauthn_auth
|
||||||
|
|
||||||
|
# Use the correct allauth API: begin_authentication
|
||||||
|
# Takes optional user, returns just options (state is stored internally)
|
||||||
|
request_options = webauthn_auth.begin_authentication(request.user)
|
||||||
|
|
||||||
|
return Response({
|
||||||
|
"options": request_options,
|
||||||
|
})
|
||||||
|
except ImportError as e:
|
||||||
|
logger.error(f"WebAuthn module import error: {e}")
|
||||||
|
return Response(
|
||||||
|
{"detail": "WebAuthn module not available"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error getting authentication options: {e}")
|
||||||
|
return Response(
|
||||||
|
{"detail": f"Failed to get authentication options: {str(e)}"},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="authenticate_passkey",
|
||||||
|
summary="Authenticate with passkey",
|
||||||
|
description="Verifies the WebAuthn response for authentication.",
|
||||||
|
request={
|
||||||
|
"application/json": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"credential": {"type": "object", "description": "WebAuthn credential response"},
|
||||||
|
},
|
||||||
|
"required": ["credential"],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
responses={
|
||||||
|
200: {"description": "Authentication successful"},
|
||||||
|
400: {"description": "Invalid credential or authentication failed"},
|
||||||
|
},
|
||||||
|
tags=["Passkey"],
|
||||||
|
)
|
||||||
|
@api_view(["POST"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def authenticate_passkey(request):
|
||||||
|
"""Verify passkey authentication."""
|
||||||
|
try:
|
||||||
|
from allauth.mfa.webauthn.internal import auth as webauthn_auth
|
||||||
|
|
||||||
|
credential = request.data.get("credential")
|
||||||
|
|
||||||
|
if not credential:
|
||||||
|
return Response(
|
||||||
|
{"detail": "Credential is required"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get stored state from session (no request needed, uses context)
|
||||||
|
state = webauthn_auth.get_state()
|
||||||
|
if not state:
|
||||||
|
return Response(
|
||||||
|
{"detail": "No pending authentication. Please start authentication again."},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Use the correct allauth API: complete_authentication
|
||||||
|
try:
|
||||||
|
# Complete authentication - takes user and credential response
|
||||||
|
# State is handled internally
|
||||||
|
webauthn_auth.complete_authentication(request.user, credential)
|
||||||
|
|
||||||
|
return Response({"success": True})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"WebAuthn authentication failed: {e}")
|
||||||
|
return Response(
|
||||||
|
{"detail": f"Authentication failed: {str(e)}"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
except ImportError as e:
|
||||||
|
logger.error(f"WebAuthn module import error: {e}")
|
||||||
|
return Response(
|
||||||
|
{"detail": "WebAuthn module not available"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error authenticating passkey: {e}")
|
||||||
|
return Response(
|
||||||
|
{"detail": f"Failed to authenticate: {str(e)}"},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="delete_passkey",
|
||||||
|
summary="Delete a passkey",
|
||||||
|
description="Removes a registered passkey from the user's account.",
|
||||||
|
request={
|
||||||
|
"application/json": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"password": {"type": "string", "description": "Current password for confirmation"},
|
||||||
|
},
|
||||||
|
"required": ["password"],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
responses={
|
||||||
|
200: {"description": "Passkey deleted successfully"},
|
||||||
|
400: {"description": "Invalid password or passkey not found"},
|
||||||
|
},
|
||||||
|
tags=["Passkey"],
|
||||||
|
)
|
||||||
|
@api_view(["DELETE"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def delete_passkey(request, passkey_id):
|
||||||
|
"""Delete a passkey."""
|
||||||
|
try:
|
||||||
|
from allauth.mfa.models import Authenticator
|
||||||
|
|
||||||
|
from apps.accounts.services.security_service import (
|
||||||
|
check_auth_method_availability,
|
||||||
|
log_security_event,
|
||||||
|
send_security_notification,
|
||||||
|
)
|
||||||
|
|
||||||
|
user = request.user
|
||||||
|
password = request.data.get("password", "")
|
||||||
|
|
||||||
|
# Verify password
|
||||||
|
if not user.check_password(password):
|
||||||
|
return Response(
|
||||||
|
{"detail": "Invalid password"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check if user has other auth methods before removing passkey
|
||||||
|
auth_methods = check_auth_method_availability(user)
|
||||||
|
|
||||||
|
# If this is the last passkey and user has no other auth method, block removal
|
||||||
|
if auth_methods["passkey_count"] == 1:
|
||||||
|
if not auth_methods["has_password"] and not auth_methods["has_social"] and not auth_methods["has_totp"]:
|
||||||
|
return Response(
|
||||||
|
{"detail": "Cannot remove last passkey: you must have at least one authentication method. Please set a password or connect a social account first."},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Find and delete the passkey
|
||||||
|
try:
|
||||||
|
authenticator = Authenticator.objects.get(
|
||||||
|
id=passkey_id,
|
||||||
|
user=user,
|
||||||
|
type=Authenticator.Type.WEBAUTHN,
|
||||||
|
)
|
||||||
|
passkey_name = authenticator.data.get("name", "Passkey") if authenticator.data else "Passkey"
|
||||||
|
authenticator.delete()
|
||||||
|
|
||||||
|
# Log security event
|
||||||
|
log_security_event(
|
||||||
|
"passkey_removed",
|
||||||
|
request,
|
||||||
|
user=user,
|
||||||
|
metadata={"passkey_name": passkey_name, "passkey_id": str(passkey_id)},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Send security notification email
|
||||||
|
send_security_notification(user, "passkey_removed", {"passkey_name": passkey_name})
|
||||||
|
|
||||||
|
except Authenticator.DoesNotExist:
|
||||||
|
return Response(
|
||||||
|
{"detail": "Passkey not found"},
|
||||||
|
status=status.HTTP_404_NOT_FOUND,
|
||||||
|
)
|
||||||
|
|
||||||
|
return Response({"detail": "Passkey deleted successfully"})
|
||||||
|
except ImportError:
|
||||||
|
return Response(
|
||||||
|
{"detail": "WebAuthn module not available"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error deleting passkey: {e}")
|
||||||
|
return Response(
|
||||||
|
{"detail": f"Failed to delete passkey: {str(e)}"},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="rename_passkey",
|
||||||
|
summary="Rename a passkey",
|
||||||
|
description="Updates the name of a registered passkey.",
|
||||||
|
request={
|
||||||
|
"application/json": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {"type": "string", "description": "New name for the passkey"},
|
||||||
|
},
|
||||||
|
"required": ["name"],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
responses={
|
||||||
|
200: {"description": "Passkey renamed successfully"},
|
||||||
|
404: {"description": "Passkey not found"},
|
||||||
|
},
|
||||||
|
tags=["Passkey"],
|
||||||
|
)
|
||||||
|
@api_view(["PATCH"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def rename_passkey(request, passkey_id):
|
||||||
|
"""Rename a passkey."""
|
||||||
|
try:
|
||||||
|
from allauth.mfa.models import Authenticator
|
||||||
|
|
||||||
|
user = request.user
|
||||||
|
new_name = request.data.get("name", "").strip()
|
||||||
|
|
||||||
|
if not new_name:
|
||||||
|
return Response(
|
||||||
|
{"detail": "Name is required"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
authenticator = Authenticator.objects.get(
|
||||||
|
id=passkey_id, user=user, type=Authenticator.Type.WEBAUTHN,
|
||||||
|
)
|
||||||
|
data = authenticator.data or {}
|
||||||
|
data["name"] = new_name
|
||||||
|
authenticator.data = data
|
||||||
|
authenticator.save()
|
||||||
|
except Authenticator.DoesNotExist:
|
||||||
|
return Response(
|
||||||
|
{"detail": "Passkey not found"},
|
||||||
|
status=status.HTTP_404_NOT_FOUND,
|
||||||
|
)
|
||||||
|
|
||||||
|
return Response({"detail": "Passkey renamed successfully", "name": new_name})
|
||||||
|
except ImportError:
|
||||||
|
return Response(
|
||||||
|
{"detail": "WebAuthn module not available"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error renaming passkey: {e}")
|
||||||
|
return Response(
|
||||||
|
{"detail": f"Failed to rename passkey: {str(e)}"},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="get_login_passkey_options",
|
||||||
|
summary="Get WebAuthn options for MFA login",
|
||||||
|
description="Returns passkey auth options using MFA token (unauthenticated).",
|
||||||
|
request={
|
||||||
|
"application/json": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"mfa_token": {"type": "string", "description": "MFA token from login"},
|
||||||
|
},
|
||||||
|
"required": ["mfa_token"],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
responses={
|
||||||
|
200: {"description": "WebAuthn authentication options"},
|
||||||
|
400: {"description": "Invalid or expired MFA token"},
|
||||||
|
},
|
||||||
|
tags=["Passkey"],
|
||||||
|
)
|
||||||
|
@api_view(["POST"])
|
||||||
|
@permission_classes([AllowAny])
|
||||||
|
def get_login_passkey_options(request):
|
||||||
|
"""Get WebAuthn authentication options for MFA login flow (unauthenticated)."""
|
||||||
|
from django.core.cache import cache
|
||||||
|
from django.contrib.auth import get_user_model
|
||||||
|
|
||||||
|
User = get_user_model()
|
||||||
|
mfa_token = request.data.get("mfa_token")
|
||||||
|
|
||||||
|
if not mfa_token:
|
||||||
|
return Response(
|
||||||
|
{"detail": "MFA token is required"}, status=status.HTTP_400_BAD_REQUEST
|
||||||
|
)
|
||||||
|
|
||||||
|
cache_key = f"mfa_login:{mfa_token}"
|
||||||
|
cached_data = cache.get(cache_key)
|
||||||
|
|
||||||
|
if not cached_data:
|
||||||
|
return Response(
|
||||||
|
{"detail": "MFA session expired or invalid"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
user_id = cached_data.get("user_id")
|
||||||
|
|
||||||
|
try:
|
||||||
|
user = User.objects.get(pk=user_id)
|
||||||
|
except User.DoesNotExist:
|
||||||
|
return Response({"detail": "User not found"}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
try:
|
||||||
|
from allauth.mfa.models import Authenticator
|
||||||
|
from allauth.mfa.webauthn.internal import auth as webauthn_auth
|
||||||
|
|
||||||
|
passkeys = Authenticator.objects.filter(
|
||||||
|
user=user, type=Authenticator.Type.WEBAUTHN
|
||||||
|
)
|
||||||
|
|
||||||
|
if not passkeys.exists():
|
||||||
|
return Response(
|
||||||
|
{"detail": "No passkeys registered"}, status=status.HTTP_400_BAD_REQUEST
|
||||||
|
)
|
||||||
|
|
||||||
|
original_user = getattr(request, "user", None)
|
||||||
|
request.user = user
|
||||||
|
|
||||||
|
try:
|
||||||
|
# begin_authentication takes just user, returns options (state stored internally)
|
||||||
|
request_options = webauthn_auth.begin_authentication(user)
|
||||||
|
# Note: State is managed by allauth's session context, but for MFA login flow
|
||||||
|
# we need to track user separately since they're not authenticated yet
|
||||||
|
passkey_state_key = f"mfa_passkey_state:{mfa_token}"
|
||||||
|
# Store a reference that this user has a pending passkey auth
|
||||||
|
cache.set(passkey_state_key, {"user_id": user_id}, timeout=300)
|
||||||
|
return Response({"options": request_options})
|
||||||
|
finally:
|
||||||
|
if original_user is not None:
|
||||||
|
request.user = original_user
|
||||||
|
|
||||||
|
except ImportError as e:
|
||||||
|
logger.error(f"WebAuthn module import error: {e}")
|
||||||
|
return Response(
|
||||||
|
{"detail": "WebAuthn module not available"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error getting login passkey options: {e}")
|
||||||
|
return Response(
|
||||||
|
{"detail": f"Failed to get passkey options: {str(e)}"},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
@@ -105,19 +105,36 @@ class UserOutputSerializer(serializers.ModelSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class LoginInputSerializer(serializers.Serializer):
|
class LoginInputSerializer(serializers.Serializer):
|
||||||
"""Input serializer for user login."""
|
"""Input serializer for user login.
|
||||||
|
|
||||||
|
Accepts either 'email' or 'username' field for backward compatibility.
|
||||||
|
The view will use whichever is provided.
|
||||||
|
"""
|
||||||
|
|
||||||
username = serializers.CharField(max_length=254, help_text="Username or email address")
|
# Accept both email and username - frontend sends "email", but we also support "username"
|
||||||
|
email = serializers.CharField(max_length=254, required=False, help_text="Email address")
|
||||||
|
username = serializers.CharField(max_length=254, required=False, help_text="Username (alternative to email)")
|
||||||
password = serializers.CharField(max_length=128, style={"input_type": "password"}, trim_whitespace=False)
|
password = serializers.CharField(max_length=128, style={"input_type": "password"}, trim_whitespace=False)
|
||||||
|
|
||||||
def validate(self, attrs):
|
def validate(self, attrs):
|
||||||
|
email = attrs.get("email")
|
||||||
username = attrs.get("username")
|
username = attrs.get("username")
|
||||||
password = attrs.get("password")
|
password = attrs.get("password")
|
||||||
|
|
||||||
if username and password:
|
# Use email if provided, fallback to username
|
||||||
return attrs
|
identifier = email or username
|
||||||
|
|
||||||
|
if not identifier:
|
||||||
|
raise serializers.ValidationError("Either email or username is required.")
|
||||||
|
|
||||||
|
if not password:
|
||||||
|
raise serializers.ValidationError("Password is required.")
|
||||||
|
|
||||||
|
# Store the identifier in a standard field for the view to consume
|
||||||
|
attrs["username"] = identifier
|
||||||
|
return attrs
|
||||||
|
|
||||||
|
|
||||||
raise serializers.ValidationError("Must include username/email and password.")
|
|
||||||
|
|
||||||
|
|
||||||
class LoginOutputSerializer(serializers.Serializer):
|
class LoginOutputSerializer(serializers.Serializer):
|
||||||
@@ -129,6 +146,53 @@ class LoginOutputSerializer(serializers.Serializer):
|
|||||||
message = serializers.CharField()
|
message = serializers.CharField()
|
||||||
|
|
||||||
|
|
||||||
|
class MFARequiredOutputSerializer(serializers.Serializer):
|
||||||
|
"""Output serializer when MFA verification is required after password auth."""
|
||||||
|
|
||||||
|
mfa_required = serializers.BooleanField(default=True)
|
||||||
|
mfa_token = serializers.CharField(help_text="Temporary token for MFA verification")
|
||||||
|
mfa_types = serializers.ListField(
|
||||||
|
child=serializers.CharField(),
|
||||||
|
help_text="Available MFA types: 'totp', 'webauthn'",
|
||||||
|
)
|
||||||
|
user_id = serializers.IntegerField(help_text="User ID for reference")
|
||||||
|
message = serializers.CharField(default="MFA verification required")
|
||||||
|
|
||||||
|
|
||||||
|
class MFALoginVerifyInputSerializer(serializers.Serializer):
|
||||||
|
"""Input serializer for MFA login verification."""
|
||||||
|
|
||||||
|
mfa_token = serializers.CharField(help_text="Temporary MFA token from login response")
|
||||||
|
code = serializers.CharField(
|
||||||
|
max_length=6,
|
||||||
|
min_length=6,
|
||||||
|
required=False,
|
||||||
|
help_text="6-digit TOTP code from authenticator app",
|
||||||
|
)
|
||||||
|
# For passkey/webauthn - credential will be a complex object
|
||||||
|
credential = serializers.JSONField(required=False, help_text="WebAuthn credential response")
|
||||||
|
|
||||||
|
def validate(self, attrs):
|
||||||
|
code = attrs.get("code")
|
||||||
|
credential = attrs.get("credential")
|
||||||
|
|
||||||
|
if not code and not credential:
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
"Either 'code' (TOTP) or 'credential' (passkey) is required."
|
||||||
|
)
|
||||||
|
|
||||||
|
return attrs
|
||||||
|
|
||||||
|
|
||||||
|
class MFALoginVerifyOutputSerializer(serializers.Serializer):
|
||||||
|
"""Output serializer for successful MFA verification."""
|
||||||
|
|
||||||
|
access = serializers.CharField()
|
||||||
|
refresh = serializers.CharField()
|
||||||
|
user = UserOutputSerializer()
|
||||||
|
message = serializers.CharField(default="Login successful")
|
||||||
|
|
||||||
|
|
||||||
class SignupInputSerializer(serializers.ModelSerializer):
|
class SignupInputSerializer(serializers.ModelSerializer):
|
||||||
"""Input serializer for user registration."""
|
"""Input serializer for user registration."""
|
||||||
|
|
||||||
|
|||||||
@@ -9,6 +9,8 @@ from django.urls import include, path
|
|||||||
from rest_framework_simplejwt.views import TokenRefreshView
|
from rest_framework_simplejwt.views import TokenRefreshView
|
||||||
|
|
||||||
from . import mfa as mfa_views
|
from . import mfa as mfa_views
|
||||||
|
from . import passkey as passkey_views
|
||||||
|
from . import account_management as account_views
|
||||||
from .views import (
|
from .views import (
|
||||||
AuthStatusAPIView,
|
AuthStatusAPIView,
|
||||||
# Social provider management views
|
# Social provider management views
|
||||||
@@ -22,9 +24,12 @@ from .views import (
|
|||||||
# Main auth views
|
# Main auth views
|
||||||
LoginAPIView,
|
LoginAPIView,
|
||||||
LogoutAPIView,
|
LogoutAPIView,
|
||||||
|
MFALoginVerifyAPIView,
|
||||||
PasswordChangeAPIView,
|
PasswordChangeAPIView,
|
||||||
PasswordResetAPIView,
|
PasswordResetAPIView,
|
||||||
|
ProcessOAuthProfileAPIView,
|
||||||
ResendVerificationAPIView,
|
ResendVerificationAPIView,
|
||||||
|
SessionToTokenAPIView, # For passkey login token exchange
|
||||||
SignupAPIView,
|
SignupAPIView,
|
||||||
SocialAuthStatusAPIView,
|
SocialAuthStatusAPIView,
|
||||||
SocialProvidersAPIView,
|
SocialProvidersAPIView,
|
||||||
@@ -33,13 +38,14 @@ from .views import (
|
|||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
# Core authentication endpoints
|
# Core authentication endpoints
|
||||||
path("login/", LoginAPIView.as_view(), name="auth-login"),
|
path("login/", LoginAPIView.as_view(), name="auth-login"),
|
||||||
|
path("login/mfa-verify/", MFALoginVerifyAPIView.as_view(), name="auth-login-mfa-verify"),
|
||||||
path("signup/", SignupAPIView.as_view(), name="auth-signup"),
|
path("signup/", SignupAPIView.as_view(), name="auth-signup"),
|
||||||
path("logout/", LogoutAPIView.as_view(), name="auth-logout"),
|
path("logout/", LogoutAPIView.as_view(), name="auth-logout"),
|
||||||
path("user/", CurrentUserAPIView.as_view(), name="auth-current-user"),
|
path("user/", CurrentUserAPIView.as_view(), name="auth-current-user"),
|
||||||
# JWT token management
|
# JWT token management
|
||||||
path("token/refresh/", TokenRefreshView.as_view(), name="auth-token-refresh"),
|
path("token/refresh/", TokenRefreshView.as_view(), name="auth-token-refresh"),
|
||||||
# Social authentication endpoints (dj-rest-auth)
|
path("token/session/", SessionToTokenAPIView.as_view(), name="auth-token-session"), # Exchange session for JWT
|
||||||
path("social/", include("dj_rest_auth.registration.urls")),
|
# Note: dj_rest_auth removed - using custom social auth views below
|
||||||
path(
|
path(
|
||||||
"password/reset/",
|
"password/reset/",
|
||||||
PasswordResetAPIView.as_view(),
|
PasswordResetAPIView.as_view(),
|
||||||
@@ -81,6 +87,11 @@ urlpatterns = [
|
|||||||
SocialAuthStatusAPIView.as_view(),
|
SocialAuthStatusAPIView.as_view(),
|
||||||
name="auth-social-status",
|
name="auth-social-status",
|
||||||
),
|
),
|
||||||
|
path(
|
||||||
|
"social/process-profile/",
|
||||||
|
ProcessOAuthProfileAPIView.as_view(),
|
||||||
|
name="auth-social-process-profile",
|
||||||
|
),
|
||||||
path("status/", AuthStatusAPIView.as_view(), name="auth-status"),
|
path("status/", AuthStatusAPIView.as_view(), name="auth-status"),
|
||||||
# Email verification endpoints
|
# Email verification endpoints
|
||||||
path(
|
path(
|
||||||
@@ -100,6 +111,26 @@ urlpatterns = [
|
|||||||
path("mfa/totp/deactivate/", mfa_views.deactivate_totp, name="auth-mfa-totp-deactivate"),
|
path("mfa/totp/deactivate/", mfa_views.deactivate_totp, name="auth-mfa-totp-deactivate"),
|
||||||
path("mfa/totp/verify/", mfa_views.verify_totp, name="auth-mfa-totp-verify"),
|
path("mfa/totp/verify/", mfa_views.verify_totp, name="auth-mfa-totp-verify"),
|
||||||
path("mfa/recovery-codes/regenerate/", mfa_views.regenerate_recovery_codes, name="auth-mfa-recovery-regenerate"),
|
path("mfa/recovery-codes/regenerate/", mfa_views.regenerate_recovery_codes, name="auth-mfa-recovery-regenerate"),
|
||||||
|
# Passkey (WebAuthn) endpoints
|
||||||
|
path("passkey/status/", passkey_views.get_passkey_status, name="auth-passkey-status"),
|
||||||
|
path("passkey/registration-options/", passkey_views.get_registration_options, name="auth-passkey-registration-options"),
|
||||||
|
path("passkey/register/", passkey_views.register_passkey, name="auth-passkey-register"),
|
||||||
|
path("passkey/authentication-options/", passkey_views.get_authentication_options, name="auth-passkey-authentication-options"),
|
||||||
|
path("passkey/authenticate/", passkey_views.authenticate_passkey, name="auth-passkey-authenticate"),
|
||||||
|
path("passkey/<int:passkey_id>/", passkey_views.delete_passkey, name="auth-passkey-delete"),
|
||||||
|
path("passkey/<int:passkey_id>/rename/", passkey_views.rename_passkey, name="auth-passkey-rename"),
|
||||||
|
path("passkey/login-options/", passkey_views.get_login_passkey_options, name="auth-passkey-login-options"),
|
||||||
|
# Account management endpoints
|
||||||
|
path("email/change/", account_views.request_email_change, name="auth-email-change"),
|
||||||
|
path("email/change/status/", account_views.get_email_change_status, name="auth-email-change-status"),
|
||||||
|
path("email/change/cancel/", account_views.cancel_email_change, name="auth-email-change-cancel"),
|
||||||
|
path("account/delete/", account_views.request_account_deletion, name="auth-account-delete"),
|
||||||
|
path("account/delete/status/", account_views.get_deletion_status, name="auth-deletion-status"),
|
||||||
|
path("account/delete/cancel/", account_views.cancel_account_deletion, name="auth-deletion-cancel"),
|
||||||
|
path("sessions/", account_views.list_sessions, name="auth-sessions-list"),
|
||||||
|
path("sessions/<str:session_id>/", account_views.revoke_session, name="auth-session-revoke"),
|
||||||
|
path("password/change/", account_views.change_password, name="auth-password-change-v2"),
|
||||||
|
path("security-log/", account_views.get_security_log, name="auth-security-log"),
|
||||||
]
|
]
|
||||||
|
|
||||||
# Note: User profiles and top lists functionality is now handled by the accounts app
|
# Note: User profiles and top lists functionality is now handled by the accounts app
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -3,12 +3,24 @@ Core API URL configuration.
|
|||||||
Centralized from apps.core.urls
|
Centralized from apps.core.urls
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from django.urls import path
|
from django.urls import include, path
|
||||||
|
from rest_framework.routers import DefaultRouter
|
||||||
|
|
||||||
from . import views
|
from . import views
|
||||||
|
from apps.core.api.milestone_views import MilestoneViewSet
|
||||||
|
|
||||||
|
# Create router for viewsets
|
||||||
|
router = DefaultRouter()
|
||||||
|
router.register(r"milestones", MilestoneViewSet, basename="milestone")
|
||||||
|
|
||||||
# Entity search endpoints - migrated from apps.core.urls
|
# Entity search endpoints - migrated from apps.core.urls
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
|
# View counts endpoint for tracking page views
|
||||||
|
path(
|
||||||
|
"views/",
|
||||||
|
views.ViewCountView.as_view(),
|
||||||
|
name="view_counts",
|
||||||
|
),
|
||||||
path(
|
path(
|
||||||
"entities/search/",
|
"entities/search/",
|
||||||
views.EntityFuzzySearchView.as_view(),
|
views.EntityFuzzySearchView.as_view(),
|
||||||
@@ -24,4 +36,13 @@ urlpatterns = [
|
|||||||
views.QuickEntitySuggestionView.as_view(),
|
views.QuickEntitySuggestionView.as_view(),
|
||||||
name="entity_suggestions",
|
name="entity_suggestions",
|
||||||
),
|
),
|
||||||
|
# Telemetry endpoint for frontend logging
|
||||||
|
path(
|
||||||
|
"telemetry/",
|
||||||
|
views.TelemetryView.as_view(),
|
||||||
|
name="telemetry",
|
||||||
|
),
|
||||||
|
# Include router URLs (milestones, etc.)
|
||||||
|
path("", include(router.urls)),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|||||||
@@ -22,6 +22,208 @@ from apps.core.services.entity_fuzzy_matching import (
|
|||||||
entity_fuzzy_matcher,
|
entity_fuzzy_matcher,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ViewCountView(APIView):
|
||||||
|
"""
|
||||||
|
Track and retrieve view counts for entities.
|
||||||
|
|
||||||
|
This endpoint provides the /core/views/ functionality expected by
|
||||||
|
the frontend for tracking page views on parks, rides, and companies.
|
||||||
|
"""
|
||||||
|
|
||||||
|
permission_classes = [AllowAny]
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
tags=["Core"],
|
||||||
|
summary="Get view counts for entities",
|
||||||
|
description="Retrieve view counts for specified entities",
|
||||||
|
)
|
||||||
|
def get(self, request):
|
||||||
|
"""Get view counts for entities by type and ID."""
|
||||||
|
entity_type = request.query_params.get("entity_type")
|
||||||
|
entity_id = request.query_params.get("entity_id")
|
||||||
|
|
||||||
|
if not entity_type or not entity_id:
|
||||||
|
return Response(
|
||||||
|
{"detail": "entity_type and entity_id are required"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Try to get view count from analytics tracking
|
||||||
|
try:
|
||||||
|
from apps.core.models import EntityViewCount
|
||||||
|
|
||||||
|
view_count = EntityViewCount.objects.filter(
|
||||||
|
entity_type=entity_type,
|
||||||
|
entity_id=entity_id,
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if view_count:
|
||||||
|
return Response({
|
||||||
|
"entity_type": entity_type,
|
||||||
|
"entity_id": entity_id,
|
||||||
|
"view_count": view_count.count,
|
||||||
|
"last_viewed": view_count.last_viewed_at,
|
||||||
|
})
|
||||||
|
except Exception:
|
||||||
|
# Model may not exist yet, return placeholder
|
||||||
|
pass
|
||||||
|
|
||||||
|
return Response({
|
||||||
|
"entity_type": entity_type,
|
||||||
|
"entity_id": entity_id,
|
||||||
|
"view_count": 0,
|
||||||
|
"last_viewed": None,
|
||||||
|
})
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
tags=["Core"],
|
||||||
|
summary="Record a view for an entity",
|
||||||
|
description="Increment the view count for a specified entity",
|
||||||
|
)
|
||||||
|
def post(self, request):
|
||||||
|
"""Record a view for an entity."""
|
||||||
|
entity_type = request.data.get("entity_type")
|
||||||
|
entity_id = request.data.get("entity_id")
|
||||||
|
|
||||||
|
if not entity_type or not entity_id:
|
||||||
|
return Response(
|
||||||
|
{"detail": "entity_type and entity_id are required"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Track the view
|
||||||
|
try:
|
||||||
|
from django.utils import timezone
|
||||||
|
from apps.core.models import EntityViewCount
|
||||||
|
|
||||||
|
view_count, created = EntityViewCount.objects.get_or_create(
|
||||||
|
entity_type=entity_type,
|
||||||
|
entity_id=entity_id,
|
||||||
|
defaults={"count": 0},
|
||||||
|
)
|
||||||
|
view_count.count += 1
|
||||||
|
view_count.last_viewed_at = timezone.now()
|
||||||
|
view_count.save(update_fields=["count", "last_viewed_at"])
|
||||||
|
|
||||||
|
return Response({
|
||||||
|
"success": True,
|
||||||
|
"entity_type": entity_type,
|
||||||
|
"entity_id": entity_id,
|
||||||
|
"view_count": view_count.count,
|
||||||
|
}, status=status.HTTP_200_OK)
|
||||||
|
except Exception as e:
|
||||||
|
# Model may not exist, log and return success anyway
|
||||||
|
logger.debug(f"View count tracking not available: {e}")
|
||||||
|
return Response({
|
||||||
|
"success": True,
|
||||||
|
"entity_type": entity_type,
|
||||||
|
"entity_id": entity_id,
|
||||||
|
"view_count": 1, # Assume first view
|
||||||
|
}, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
|
||||||
|
class TelemetryView(APIView):
|
||||||
|
"""
|
||||||
|
Handle frontend telemetry and request metadata logging.
|
||||||
|
|
||||||
|
This endpoint accepts telemetry data from the frontend for logging and
|
||||||
|
analytics purposes. When error data is present, it persists the error
|
||||||
|
to the database for monitoring.
|
||||||
|
|
||||||
|
Note: This endpoint bypasses authentication entirely to ensure errors
|
||||||
|
can be logged even when user tokens are expired or invalid.
|
||||||
|
"""
|
||||||
|
|
||||||
|
authentication_classes = [] # Bypass JWT auth to allow error logging with expired tokens
|
||||||
|
permission_classes = [AllowAny]
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
tags=["Core"],
|
||||||
|
summary="Log request metadata",
|
||||||
|
description="Log frontend telemetry and request metadata",
|
||||||
|
)
|
||||||
|
def post(self, request):
|
||||||
|
"""Accept telemetry data from frontend."""
|
||||||
|
data = request.data
|
||||||
|
|
||||||
|
# If this is an error report, persist it to the database
|
||||||
|
if data.get('p_error_type') or data.get('p_error_message') or data.get('error_type') or data.get('error_message'):
|
||||||
|
from apps.core.services import ErrorService
|
||||||
|
|
||||||
|
# Handle both p_ prefixed params (from log_request_metadata RPC) and direct params
|
||||||
|
error_message = data.get('p_error_message') or data.get('error_message') or 'Unknown error'
|
||||||
|
error_type = data.get('p_error_type') or data.get('error_type') or 'Error'
|
||||||
|
severity = data.get('p_severity') or data.get('severity') or 'medium'
|
||||||
|
error_stack = data.get('p_error_stack') or data.get('error_stack') or ''
|
||||||
|
error_code = data.get('p_error_code') or data.get('error_code') or ''
|
||||||
|
|
||||||
|
# Build metadata from available fields
|
||||||
|
metadata = {
|
||||||
|
'action': data.get('p_action') or data.get('action'),
|
||||||
|
'breadcrumbs': data.get('p_breadcrumbs'),
|
||||||
|
'duration_ms': data.get('p_duration_ms'),
|
||||||
|
'retry_attempts': data.get('p_retry_attempts'),
|
||||||
|
'affected_route': data.get('p_affected_route'),
|
||||||
|
'request_id': data.get('p_request_id') or data.get('request_id'),
|
||||||
|
}
|
||||||
|
# Remove None values
|
||||||
|
metadata = {k: v for k, v in metadata.items() if v is not None}
|
||||||
|
|
||||||
|
# Build environment from available fields
|
||||||
|
environment = data.get('p_environment_context') or data.get('environment') or {}
|
||||||
|
if isinstance(environment, str):
|
||||||
|
import json
|
||||||
|
try:
|
||||||
|
environment = json.loads(environment)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
environment = {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
error = ErrorService.capture_error(
|
||||||
|
error=error_message,
|
||||||
|
source='frontend',
|
||||||
|
request=request,
|
||||||
|
severity=severity,
|
||||||
|
metadata=metadata,
|
||||||
|
environment=environment,
|
||||||
|
)
|
||||||
|
# Update additional fields
|
||||||
|
error.error_type = error_type
|
||||||
|
error.error_stack = error_stack[:10000] if error_stack else ''
|
||||||
|
error.error_code = error_code
|
||||||
|
error.endpoint = data.get('p_affected_route') or ''
|
||||||
|
error.http_status = data.get('p_http_status')
|
||||||
|
error.save(update_fields=['error_type', 'error_stack', 'error_code', 'endpoint', 'http_status'])
|
||||||
|
|
||||||
|
logger.info(f"Frontend error captured: {error.short_error_id}")
|
||||||
|
return Response(
|
||||||
|
{"success": True, "error_id": str(error.error_id)},
|
||||||
|
status=status.HTTP_201_CREATED,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to capture frontend error: {e}")
|
||||||
|
# Fall through to regular telemetry logging
|
||||||
|
|
||||||
|
# Non-error telemetry - just log and acknowledge
|
||||||
|
logger.debug(
|
||||||
|
"Telemetry received",
|
||||||
|
extra={
|
||||||
|
"data": data,
|
||||||
|
"user_id": getattr(request.user, "id", None),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
return Response(
|
||||||
|
{"success": True, "message": "Telemetry logged"},
|
||||||
|
status=status.HTTP_200_OK,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class EntityFuzzySearchView(APIView):
|
class EntityFuzzySearchView(APIView):
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -1,7 +1,11 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
from .views import GenerateUploadURLView
|
from . import views
|
||||||
|
|
||||||
|
app_name = "images"
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path("generate-upload-url/", GenerateUploadURLView.as_view(), name="generate-upload-url"),
|
path("generate-upload-url/", views.GenerateUploadURLView.as_view(), name="generate_upload_url"),
|
||||||
|
path("delete/", views.DeleteImageView.as_view(), name="delete_image"),
|
||||||
|
path("og-image/", views.GenerateOGImageView.as_view(), name="og_image"),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
from django.conf import settings
|
||||||
from django.core.exceptions import ImproperlyConfigured
|
from django.core.exceptions import ImproperlyConfigured
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
from rest_framework.permissions import IsAuthenticated
|
from rest_framework.permissions import IsAuthenticated
|
||||||
@@ -30,3 +31,109 @@ class GenerateUploadURLView(APIView):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
capture_and_log(e, 'Generate upload URL - unexpected error', source='api')
|
capture_and_log(e, 'Generate upload URL - unexpected error', source='api')
|
||||||
return Response({"detail": "An unexpected error occurred."}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
return Response({"detail": "An unexpected error occurred."}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||||
|
|
||||||
|
|
||||||
|
class DeleteImageView(APIView):
|
||||||
|
"""
|
||||||
|
POST /images/delete/
|
||||||
|
Delete an image from Cloudflare Images.
|
||||||
|
"""
|
||||||
|
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
image_id = request.data.get("image_id")
|
||||||
|
|
||||||
|
if not image_id:
|
||||||
|
return Response(
|
||||||
|
{"detail": "image_id is required"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Get Cloudflare credentials
|
||||||
|
account_id = getattr(settings, "CLOUDFLARE_IMAGES_ACCOUNT_ID", None)
|
||||||
|
api_token = getattr(settings, "CLOUDFLARE_IMAGES_API_TOKEN", None)
|
||||||
|
|
||||||
|
if not account_id or not api_token:
|
||||||
|
logger.warning("Cloudflare Images not configured, mock deleting image")
|
||||||
|
return Response({"success": True, "mock": True})
|
||||||
|
|
||||||
|
# Delete from Cloudflare
|
||||||
|
url = f"https://api.cloudflare.com/client/v4/accounts/{account_id}/images/v1/{image_id}"
|
||||||
|
response = requests.delete(
|
||||||
|
url,
|
||||||
|
headers={"Authorization": f"Bearer {api_token}"},
|
||||||
|
timeout=10,
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.status_code in (200, 404): # 404 = already deleted
|
||||||
|
return Response({"success": True})
|
||||||
|
else:
|
||||||
|
logger.error(f"Cloudflare delete failed: {response.text}")
|
||||||
|
return Response(
|
||||||
|
{"detail": "Failed to delete image"},
|
||||||
|
status=status.HTTP_502_BAD_GATEWAY,
|
||||||
|
)
|
||||||
|
|
||||||
|
except requests.RequestException as e:
|
||||||
|
capture_and_log(e, "Delete image - Cloudflare API error", source="api")
|
||||||
|
return Response(
|
||||||
|
{"detail": "Failed to delete image"},
|
||||||
|
status=status.HTTP_502_BAD_GATEWAY,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
capture_and_log(e, "Delete image - unexpected error", source="api")
|
||||||
|
return Response(
|
||||||
|
{"detail": "An unexpected error occurred"},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class GenerateOGImageView(APIView):
|
||||||
|
"""
|
||||||
|
POST /images/og-image/
|
||||||
|
Generate an Open Graph image for social sharing.
|
||||||
|
"""
|
||||||
|
|
||||||
|
permission_classes = [] # Public endpoint
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
title = request.data.get("title", "")
|
||||||
|
description = request.data.get("description", "")
|
||||||
|
entity_type = request.data.get("entity_type", "")
|
||||||
|
image_url = request.data.get("image_url", "")
|
||||||
|
|
||||||
|
if not title:
|
||||||
|
return Response(
|
||||||
|
{"detail": "title is required"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# This is a placeholder for OG image generation
|
||||||
|
# In production, you would:
|
||||||
|
# 1. Use an image generation service (Cloudinary, imgix, etc.)
|
||||||
|
# 2. Or use a headless browser service (Puppeteer, Playwright)
|
||||||
|
# 3. Or use a dedicated OG image service
|
||||||
|
|
||||||
|
# For now, return a template URL or placeholder
|
||||||
|
base_url = getattr(settings, "SITE_URL", "https://thrillwiki.com")
|
||||||
|
og_image_url = f"{base_url}/api/v1/images/og-preview/?title={title[:100]}"
|
||||||
|
|
||||||
|
return Response({
|
||||||
|
"success": True,
|
||||||
|
"og_image_url": og_image_url,
|
||||||
|
"title": title,
|
||||||
|
"description": description[:200] if description else "",
|
||||||
|
"entity_type": entity_type,
|
||||||
|
"note": "Placeholder - configure OG image service for production",
|
||||||
|
})
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
capture_and_log(e, "Generate OG image", source="api")
|
||||||
|
return Response(
|
||||||
|
{"detail": str(e)},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -30,4 +30,8 @@ urlpatterns = [
|
|||||||
views.MapCacheAPIView.as_view(),
|
views.MapCacheAPIView.as_view(),
|
||||||
name="map_cache_invalidate",
|
name="map_cache_invalidate",
|
||||||
),
|
),
|
||||||
|
# Location detection and enrichment
|
||||||
|
path("detect-location/", views.DetectLocationView.as_view(), name="detect_location"),
|
||||||
|
path("enrich-location/", views.EnrichLocationView.as_view(), name="enrich_location"),
|
||||||
|
path("search-location/", views.SearchLocationView.as_view(), name="search_location"),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -999,3 +999,630 @@ MapSearchView = MapSearchAPIView
|
|||||||
MapBoundsView = MapBoundsAPIView
|
MapBoundsView = MapBoundsAPIView
|
||||||
MapStatsView = MapStatsAPIView
|
MapStatsView = MapStatsAPIView
|
||||||
MapCacheView = MapCacheAPIView
|
MapCacheView = MapCacheAPIView
|
||||||
|
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Location Detection / Enrichment Endpoints
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema_view(
|
||||||
|
post=extend_schema(
|
||||||
|
summary="Detect user location from IP",
|
||||||
|
description="Detect the user's approximate location based on their IP address.",
|
||||||
|
request={
|
||||||
|
"application/json": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"ip_address": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "IP address to geolocate. If not provided, uses request IP.",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
responses={
|
||||||
|
200: {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"latitude": {"type": "number"},
|
||||||
|
"longitude": {"type": "number"},
|
||||||
|
"city": {"type": "string"},
|
||||||
|
"region": {"type": "string"},
|
||||||
|
"country": {"type": "string"},
|
||||||
|
"timezone": {"type": "string"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
tags=["Maps"],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
class DetectLocationView(APIView):
|
||||||
|
"""
|
||||||
|
POST /maps/detect-location/
|
||||||
|
Detect user's location based on IP address using a geolocation service.
|
||||||
|
"""
|
||||||
|
|
||||||
|
permission_classes = [AllowAny]
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
try:
|
||||||
|
# Get IP address from request or payload
|
||||||
|
ip_address = request.data.get("ip_address")
|
||||||
|
if not ip_address:
|
||||||
|
# Get client IP from request
|
||||||
|
x_forwarded_for = request.META.get("HTTP_X_FORWARDED_FOR")
|
||||||
|
if x_forwarded_for:
|
||||||
|
ip_address = x_forwarded_for.split(",")[0].strip()
|
||||||
|
else:
|
||||||
|
ip_address = request.META.get("REMOTE_ADDR", "")
|
||||||
|
|
||||||
|
# For localhost/development, return a default location
|
||||||
|
if ip_address in ("127.0.0.1", "::1", "localhost") or ip_address.startswith("192.168."):
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"latitude": 40.7128,
|
||||||
|
"longitude": -74.006,
|
||||||
|
"city": "New York",
|
||||||
|
"region": "New York",
|
||||||
|
"country": "US",
|
||||||
|
"country_name": "United States",
|
||||||
|
"timezone": "America/New_York",
|
||||||
|
"detected": False,
|
||||||
|
"reason": "localhost_fallback",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Use IP geolocation service (ipapi.co, ipinfo.io, etc.)
|
||||||
|
import httpx
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = httpx.get(
|
||||||
|
f"https://ipapi.co/{ip_address}/json/",
|
||||||
|
timeout=5.0,
|
||||||
|
headers={"User-Agent": "ThrillWiki/1.0"},
|
||||||
|
)
|
||||||
|
if response.status_code == 200:
|
||||||
|
data = response.json()
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"latitude": data.get("latitude"),
|
||||||
|
"longitude": data.get("longitude"),
|
||||||
|
"city": data.get("city", ""),
|
||||||
|
"region": data.get("region", ""),
|
||||||
|
"country": data.get("country_code", ""),
|
||||||
|
"country_name": data.get("country_name", ""),
|
||||||
|
"timezone": data.get("timezone", ""),
|
||||||
|
"detected": True,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
except httpx.HTTPError as e:
|
||||||
|
logger.warning(f"IP geolocation failed: {e}")
|
||||||
|
|
||||||
|
# Fallback response
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"latitude": None,
|
||||||
|
"longitude": None,
|
||||||
|
"city": "",
|
||||||
|
"region": "",
|
||||||
|
"country": "",
|
||||||
|
"country_name": "",
|
||||||
|
"timezone": "",
|
||||||
|
"detected": False,
|
||||||
|
"reason": "geolocation_failed",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
capture_and_log(e, "Detect location from IP", source="api")
|
||||||
|
return Response(
|
||||||
|
{"detail": str(e)},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema_view(
|
||||||
|
post=extend_schema(
|
||||||
|
summary="Enrich location with geocoding",
|
||||||
|
description="Enrich location data with reverse geocoding (coordinates to address).",
|
||||||
|
request={
|
||||||
|
"application/json": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"latitude": {"type": "number", "required": True},
|
||||||
|
"longitude": {"type": "number", "required": True},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
responses={
|
||||||
|
200: {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"formatted_address": {"type": "string"},
|
||||||
|
"street_address": {"type": "string"},
|
||||||
|
"city": {"type": "string"},
|
||||||
|
"state": {"type": "string"},
|
||||||
|
"postal_code": {"type": "string"},
|
||||||
|
"country": {"type": "string"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
tags=["Maps"],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
class EnrichLocationView(APIView):
|
||||||
|
"""
|
||||||
|
POST /maps/enrich-location/
|
||||||
|
Enrich location with reverse geocoding (coordinates to address).
|
||||||
|
"""
|
||||||
|
|
||||||
|
permission_classes = [AllowAny]
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
try:
|
||||||
|
latitude = request.data.get("latitude")
|
||||||
|
longitude = request.data.get("longitude")
|
||||||
|
|
||||||
|
if latitude is None or longitude is None:
|
||||||
|
return Response(
|
||||||
|
{"detail": "latitude and longitude are required"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
lat = float(latitude)
|
||||||
|
lng = float(longitude)
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
return Response(
|
||||||
|
{"detail": "Invalid latitude or longitude"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Use reverse geocoding service
|
||||||
|
import httpx
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Using Nominatim (OpenStreetMap) - free, no API key required
|
||||||
|
response = httpx.get(
|
||||||
|
"https://nominatim.openstreetmap.org/reverse",
|
||||||
|
params={
|
||||||
|
"lat": lat,
|
||||||
|
"lon": lng,
|
||||||
|
"format": "json",
|
||||||
|
"addressdetails": 1,
|
||||||
|
},
|
||||||
|
timeout=5.0,
|
||||||
|
headers={"User-Agent": "ThrillWiki/1.0"},
|
||||||
|
)
|
||||||
|
if response.status_code == 200:
|
||||||
|
data = response.json()
|
||||||
|
address = data.get("address", {})
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"formatted_address": data.get("display_name", ""),
|
||||||
|
"street_address": address.get("road", ""),
|
||||||
|
"house_number": address.get("house_number", ""),
|
||||||
|
"city": (
|
||||||
|
address.get("city")
|
||||||
|
or address.get("town")
|
||||||
|
or address.get("village")
|
||||||
|
or ""
|
||||||
|
),
|
||||||
|
"state": address.get("state", ""),
|
||||||
|
"postal_code": address.get("postcode", ""),
|
||||||
|
"country": address.get("country", ""),
|
||||||
|
"country_code": address.get("country_code", "").upper(),
|
||||||
|
"enriched": True,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
except httpx.HTTPError as e:
|
||||||
|
logger.warning(f"Reverse geocoding failed: {e}")
|
||||||
|
|
||||||
|
# Fallback response
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"formatted_address": "",
|
||||||
|
"street_address": "",
|
||||||
|
"city": "",
|
||||||
|
"state": "",
|
||||||
|
"postal_code": "",
|
||||||
|
"country": "",
|
||||||
|
"country_code": "",
|
||||||
|
"enriched": False,
|
||||||
|
"reason": "geocoding_failed",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
capture_and_log(e, "Enrich location", source="api")
|
||||||
|
return Response(
|
||||||
|
{"detail": str(e)},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema_view(
|
||||||
|
post=extend_schema(
|
||||||
|
summary="Search for a location by text",
|
||||||
|
description="Forward geocoding - convert a text query (address, city name, etc.) to coordinates.",
|
||||||
|
request={
|
||||||
|
"application/json": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"query": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Location search query (address, city, place name, etc.)",
|
||||||
|
},
|
||||||
|
"limit": {
|
||||||
|
"type": "integer",
|
||||||
|
"description": "Maximum number of results to return (default: 5)",
|
||||||
|
},
|
||||||
|
"country": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "ISO 3166-1 alpha-2 country code to restrict search",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"required": ["query"],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
responses={
|
||||||
|
200: {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"results": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"latitude": {"type": "number"},
|
||||||
|
"longitude": {"type": "number"},
|
||||||
|
"formatted_address": {"type": "string"},
|
||||||
|
"city": {"type": "string"},
|
||||||
|
"state": {"type": "string"},
|
||||||
|
"country": {"type": "string"},
|
||||||
|
"importance": {"type": "number"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"query": {"type": "string"},
|
||||||
|
"count": {"type": "integer"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
400: {"description": "Missing or invalid query parameter"},
|
||||||
|
},
|
||||||
|
tags=["Maps"],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
class SearchLocationView(APIView):
|
||||||
|
"""
|
||||||
|
POST /maps/search-location/
|
||||||
|
Forward geocoding - search for locations by text query.
|
||||||
|
|
||||||
|
Full parity with Supabase Edge Function: search-location
|
||||||
|
|
||||||
|
Features:
|
||||||
|
- Query caching with SHA-256 hash (7-day expiration)
|
||||||
|
- Rate limiting (30 requests per minute per IP)
|
||||||
|
- Usage logging for monitoring
|
||||||
|
- Cache headers (X-Cache: HIT/MISS)
|
||||||
|
"""
|
||||||
|
|
||||||
|
permission_classes = [AllowAny]
|
||||||
|
|
||||||
|
# Rate limit settings matching original
|
||||||
|
RATE_LIMIT_REQUESTS = 30
|
||||||
|
RATE_LIMIT_PERIOD = 60 # 1 minute
|
||||||
|
CACHE_EXPIRATION = 7 * 24 * 60 * 60 # 7 days in seconds
|
||||||
|
|
||||||
|
def _hash_query(self, query: str) -> str:
|
||||||
|
"""Hash query for cache lookup (matching original SHA-256)."""
|
||||||
|
import hashlib
|
||||||
|
normalized = query.strip().lower()
|
||||||
|
return hashlib.sha256(normalized.encode()).hexdigest()
|
||||||
|
|
||||||
|
def _get_client_ip(self, request) -> str:
|
||||||
|
"""Get client IP from request headers."""
|
||||||
|
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
|
||||||
|
if x_forwarded_for:
|
||||||
|
return x_forwarded_for.split(',')[0].strip()
|
||||||
|
return request.META.get('HTTP_X_REAL_IP') or request.META.get('REMOTE_ADDR') or 'unknown'
|
||||||
|
|
||||||
|
def _check_rate_limit(self, client_ip: str) -> tuple[bool, int]:
|
||||||
|
"""
|
||||||
|
Check if client is rate limited.
|
||||||
|
Returns (is_allowed, current_count).
|
||||||
|
"""
|
||||||
|
from django.core.cache import cache
|
||||||
|
|
||||||
|
rate_limit_key = f"search_location:rate:{client_ip}"
|
||||||
|
current_count = cache.get(rate_limit_key, 0)
|
||||||
|
|
||||||
|
if current_count >= self.RATE_LIMIT_REQUESTS:
|
||||||
|
return False, current_count
|
||||||
|
|
||||||
|
# Increment counter with TTL
|
||||||
|
cache.set(rate_limit_key, current_count + 1, self.RATE_LIMIT_PERIOD)
|
||||||
|
return True, current_count + 1
|
||||||
|
|
||||||
|
def _get_cached_result(self, query_hash: str):
|
||||||
|
"""Get cached result if available."""
|
||||||
|
from django.core.cache import cache
|
||||||
|
|
||||||
|
cache_key = f"search_location:query:{query_hash}"
|
||||||
|
cached_data = cache.get(cache_key)
|
||||||
|
|
||||||
|
if cached_data:
|
||||||
|
# Update access count in a separate key
|
||||||
|
access_key = f"search_location:access:{query_hash}"
|
||||||
|
access_count = cache.get(access_key, 0)
|
||||||
|
cache.set(access_key, access_count + 1, self.CACHE_EXPIRATION)
|
||||||
|
|
||||||
|
return cached_data
|
||||||
|
|
||||||
|
def _set_cached_result(self, query: str, query_hash: str, results: list):
|
||||||
|
"""Cache the results."""
|
||||||
|
from django.core.cache import cache
|
||||||
|
|
||||||
|
cache_key = f"search_location:query:{query_hash}"
|
||||||
|
cache_data = {
|
||||||
|
"query": query,
|
||||||
|
"results": results,
|
||||||
|
"result_count": len(results),
|
||||||
|
}
|
||||||
|
cache.set(cache_key, cache_data, self.CACHE_EXPIRATION)
|
||||||
|
|
||||||
|
# Initialize access count
|
||||||
|
access_key = f"search_location:access:{query_hash}"
|
||||||
|
cache.set(access_key, 1, self.CACHE_EXPIRATION)
|
||||||
|
|
||||||
|
def _log_usage(self, query: str, cache_hit: bool, api_called: bool,
|
||||||
|
response_time_ms: int = None, result_count: int = None,
|
||||||
|
client_ip: str = None, user_id: str = None,
|
||||||
|
error: str = None, status_code: int = None):
|
||||||
|
"""Log API usage for monitoring."""
|
||||||
|
# Log to structured logger for now (can be enhanced to write to DB)
|
||||||
|
logger.info(
|
||||||
|
"OpenStreetMap API usage",
|
||||||
|
extra={
|
||||||
|
"query": query[:100],
|
||||||
|
"cache_hit": cache_hit,
|
||||||
|
"api_called": api_called,
|
||||||
|
"response_time_ms": response_time_ms,
|
||||||
|
"result_count": result_count,
|
||||||
|
"client_ip": client_ip,
|
||||||
|
"user_id": user_id,
|
||||||
|
"error": error,
|
||||||
|
"status_code": status_code,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
import time
|
||||||
|
import re
|
||||||
|
start_time = time.time()
|
||||||
|
|
||||||
|
client_ip = self._get_client_ip(request)
|
||||||
|
user_id = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Safely get user ID
|
||||||
|
if request.user and request.user.is_authenticated:
|
||||||
|
user_id = str(getattr(request.user, 'user_id', request.user.id))
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
# ================================================================
|
||||||
|
# STEP 0: Sanitize and validate input
|
||||||
|
# ================================================================
|
||||||
|
raw_query = request.data.get("query", "")
|
||||||
|
if not isinstance(raw_query, str):
|
||||||
|
raw_query = str(raw_query) if raw_query else ""
|
||||||
|
|
||||||
|
# Sanitize query: strip, limit length, remove control characters
|
||||||
|
query = raw_query.strip()[:500]
|
||||||
|
query = re.sub(r'[\x00-\x1f\x7f-\x9f]', '', query)
|
||||||
|
|
||||||
|
# Validate limit
|
||||||
|
try:
|
||||||
|
limit = min(int(request.data.get("limit", 5)), 10)
|
||||||
|
limit = max(limit, 1) # At least 1
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
limit = 5
|
||||||
|
|
||||||
|
# Sanitize country code (2-letter ISO code)
|
||||||
|
raw_country = request.data.get("country", "")
|
||||||
|
country_code = ""
|
||||||
|
if raw_country and isinstance(raw_country, str):
|
||||||
|
country_code = re.sub(r'[^a-zA-Z]', '', raw_country)[:2].lower()
|
||||||
|
|
||||||
|
|
||||||
|
# ================================================================
|
||||||
|
# STEP 1: Validate query (original: min 3 characters)
|
||||||
|
# ================================================================
|
||||||
|
if not query:
|
||||||
|
response_time = int((time.time() - start_time) * 1000)
|
||||||
|
self._log_usage(
|
||||||
|
query="",
|
||||||
|
cache_hit=False,
|
||||||
|
api_called=False,
|
||||||
|
response_time_ms=response_time,
|
||||||
|
client_ip=client_ip,
|
||||||
|
user_id=user_id,
|
||||||
|
error="Query is required",
|
||||||
|
status_code=400
|
||||||
|
)
|
||||||
|
return Response(
|
||||||
|
{"error": "Query is required"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
if len(query) < 3: # Match original: min 3 characters
|
||||||
|
response_time = int((time.time() - start_time) * 1000)
|
||||||
|
self._log_usage(
|
||||||
|
query=query,
|
||||||
|
cache_hit=False,
|
||||||
|
api_called=False,
|
||||||
|
response_time_ms=response_time,
|
||||||
|
client_ip=client_ip,
|
||||||
|
user_id=user_id,
|
||||||
|
error="Query must be at least 3 characters",
|
||||||
|
status_code=400
|
||||||
|
)
|
||||||
|
return Response(
|
||||||
|
{"error": "Query must be at least 3 characters"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
# ================================================================
|
||||||
|
# STEP 2: Check rate limit (30 req/min per IP)
|
||||||
|
# ================================================================
|
||||||
|
is_allowed, current_count = self._check_rate_limit(client_ip)
|
||||||
|
if not is_allowed:
|
||||||
|
response_time = int((time.time() - start_time) * 1000)
|
||||||
|
self._log_usage(
|
||||||
|
query=query,
|
||||||
|
cache_hit=False,
|
||||||
|
api_called=False,
|
||||||
|
response_time_ms=response_time,
|
||||||
|
client_ip=client_ip,
|
||||||
|
user_id=user_id,
|
||||||
|
error="Rate limit exceeded",
|
||||||
|
status_code=429
|
||||||
|
)
|
||||||
|
return Response(
|
||||||
|
{"error": "Rate limit exceeded. Please try again later."},
|
||||||
|
status=status.HTTP_429_TOO_MANY_REQUESTS,
|
||||||
|
headers={
|
||||||
|
"Retry-After": str(self.RATE_LIMIT_PERIOD),
|
||||||
|
"X-RateLimit-Limit": str(self.RATE_LIMIT_REQUESTS),
|
||||||
|
"X-RateLimit-Remaining": "0",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# ================================================================
|
||||||
|
# STEP 3: Check cache
|
||||||
|
# ================================================================
|
||||||
|
query_hash = self._hash_query(query)
|
||||||
|
cached = self._get_cached_result(query_hash)
|
||||||
|
|
||||||
|
if cached:
|
||||||
|
response_time = int((time.time() - start_time) * 1000)
|
||||||
|
results = cached.get("results", [])
|
||||||
|
|
||||||
|
self._log_usage(
|
||||||
|
query=query,
|
||||||
|
cache_hit=True,
|
||||||
|
api_called=False,
|
||||||
|
response_time_ms=response_time,
|
||||||
|
result_count=len(results),
|
||||||
|
client_ip=client_ip,
|
||||||
|
user_id=user_id,
|
||||||
|
status_code=200
|
||||||
|
)
|
||||||
|
|
||||||
|
# Return raw array like original (frontend handles both formats)
|
||||||
|
response = Response(
|
||||||
|
results,
|
||||||
|
status=status.HTTP_200_OK,
|
||||||
|
)
|
||||||
|
response["X-Cache"] = "HIT"
|
||||||
|
response["Cache-Control"] = "public, max-age=3600"
|
||||||
|
return response
|
||||||
|
|
||||||
|
# ================================================================
|
||||||
|
# STEP 4: Cache miss - call Nominatim API
|
||||||
|
# ================================================================
|
||||||
|
import httpx
|
||||||
|
|
||||||
|
try:
|
||||||
|
params = {
|
||||||
|
"q": query,
|
||||||
|
"format": "json",
|
||||||
|
"addressdetails": 1,
|
||||||
|
"limit": limit,
|
||||||
|
}
|
||||||
|
if country_code:
|
||||||
|
params["countrycodes"] = country_code.lower()
|
||||||
|
|
||||||
|
api_response = httpx.get(
|
||||||
|
"https://nominatim.openstreetmap.org/search",
|
||||||
|
params=params,
|
||||||
|
timeout=10.0,
|
||||||
|
headers={"User-Agent": "ThrillWiki/1.0 (https://thrillwiki.com)"},
|
||||||
|
)
|
||||||
|
|
||||||
|
if api_response.status_code != 200:
|
||||||
|
logger.warning(
|
||||||
|
f"Nominatim API error: {api_response.status_code}",
|
||||||
|
extra={"status": api_response.status_code}
|
||||||
|
)
|
||||||
|
return Response(
|
||||||
|
{"error": "Location search failed", "status": api_response.status_code},
|
||||||
|
status=api_response.status_code,
|
||||||
|
)
|
||||||
|
|
||||||
|
data = api_response.json()
|
||||||
|
response_time = int((time.time() - start_time) * 1000)
|
||||||
|
|
||||||
|
# ================================================================
|
||||||
|
# STEP 5: Cache the results (background-like, but sync in Django)
|
||||||
|
# ================================================================
|
||||||
|
try:
|
||||||
|
self._set_cached_result(query, query_hash, data)
|
||||||
|
except Exception as cache_error:
|
||||||
|
logger.warning(f"Failed to cache result: {cache_error}")
|
||||||
|
|
||||||
|
# Log usage
|
||||||
|
self._log_usage(
|
||||||
|
query=query,
|
||||||
|
cache_hit=False,
|
||||||
|
api_called=True,
|
||||||
|
response_time_ms=response_time,
|
||||||
|
result_count=len(data) if isinstance(data, list) else 0,
|
||||||
|
client_ip=client_ip,
|
||||||
|
user_id=user_id,
|
||||||
|
status_code=200
|
||||||
|
)
|
||||||
|
|
||||||
|
# Return raw array like original Nominatim response
|
||||||
|
response = Response(
|
||||||
|
data,
|
||||||
|
status=status.HTTP_200_OK,
|
||||||
|
)
|
||||||
|
response["X-Cache"] = "MISS"
|
||||||
|
response["Cache-Control"] = "public, max-age=3600"
|
||||||
|
return response
|
||||||
|
|
||||||
|
except httpx.HTTPError as e:
|
||||||
|
logger.warning(f"Forward geocoding failed: {e}")
|
||||||
|
response_time = int((time.time() - start_time) * 1000)
|
||||||
|
|
||||||
|
self._log_usage(
|
||||||
|
query=query,
|
||||||
|
cache_hit=False,
|
||||||
|
api_called=True,
|
||||||
|
response_time_ms=response_time,
|
||||||
|
client_ip=client_ip,
|
||||||
|
user_id=user_id,
|
||||||
|
error=str(e),
|
||||||
|
status_code=500
|
||||||
|
)
|
||||||
|
|
||||||
|
return Response(
|
||||||
|
{"error": "Failed to fetch location data"},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
except ValueError as e:
|
||||||
|
return Response(
|
||||||
|
{"error": f"Invalid parameter: {str(e)}"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
capture_and_log(e, "Search location", source="api")
|
||||||
|
return Response(
|
||||||
|
{"error": str(e)},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|||||||
@@ -333,6 +333,11 @@ class ParkListCreateAPIView(APIView):
|
|||||||
|
|
||||||
def _apply_park_attribute_filters(self, qs: QuerySet, params: dict) -> QuerySet:
|
def _apply_park_attribute_filters(self, qs: QuerySet, params: dict) -> QuerySet:
|
||||||
"""Apply park attribute filtering to the queryset."""
|
"""Apply park attribute filtering to the queryset."""
|
||||||
|
# Slug filter - exact match for single park lookup
|
||||||
|
slug = params.get("slug")
|
||||||
|
if slug:
|
||||||
|
qs = qs.filter(slug=slug)
|
||||||
|
|
||||||
park_type = params.get("park_type")
|
park_type = params.get("park_type")
|
||||||
if park_type:
|
if park_type:
|
||||||
qs = qs.filter(park_type=park_type)
|
qs = qs.filter(park_type=park_type)
|
||||||
|
|||||||
@@ -79,7 +79,7 @@ class ParkPhotoOutputSerializer(serializers.ModelSerializer):
|
|||||||
def get_image_url(self, obj):
|
def get_image_url(self, obj):
|
||||||
"""Get the full Cloudflare Images URL."""
|
"""Get the full Cloudflare Images URL."""
|
||||||
if obj.image:
|
if obj.image:
|
||||||
return obj.image.url
|
return obj.image.public_url
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@extend_schema_field(
|
@extend_schema_field(
|
||||||
@@ -95,10 +95,10 @@ class ParkPhotoOutputSerializer(serializers.ModelSerializer):
|
|||||||
|
|
||||||
# Common variants for park photos
|
# Common variants for park photos
|
||||||
variants = {
|
variants = {
|
||||||
"thumbnail": f"{obj.image.url}/thumbnail",
|
"thumbnail": f"{obj.image.public_url}/thumbnail",
|
||||||
"medium": f"{obj.image.url}/medium",
|
"medium": f"{obj.image.public_url}/medium",
|
||||||
"large": f"{obj.image.url}/large",
|
"large": f"{obj.image.public_url}/large",
|
||||||
"public": f"{obj.image.url}/public",
|
"public": f"{obj.image.public_url}/public",
|
||||||
}
|
}
|
||||||
return variants
|
return variants
|
||||||
|
|
||||||
@@ -113,6 +113,7 @@ class ParkPhotoOutputSerializer(serializers.ModelSerializer):
|
|||||||
"image_url",
|
"image_url",
|
||||||
"image_variants",
|
"image_variants",
|
||||||
"caption",
|
"caption",
|
||||||
|
"photographer",
|
||||||
"alt_text",
|
"alt_text",
|
||||||
"is_primary",
|
"is_primary",
|
||||||
"is_approved",
|
"is_approved",
|
||||||
@@ -147,6 +148,7 @@ class ParkPhotoCreateInputSerializer(serializers.ModelSerializer):
|
|||||||
fields = [
|
fields = [
|
||||||
"image",
|
"image",
|
||||||
"caption",
|
"caption",
|
||||||
|
"photographer",
|
||||||
"alt_text",
|
"alt_text",
|
||||||
"is_primary",
|
"is_primary",
|
||||||
]
|
]
|
||||||
@@ -159,6 +161,7 @@ class ParkPhotoUpdateInputSerializer(serializers.ModelSerializer):
|
|||||||
model = ParkPhoto
|
model = ParkPhoto
|
||||||
fields = [
|
fields = [
|
||||||
"caption",
|
"caption",
|
||||||
|
"photographer",
|
||||||
"alt_text",
|
"alt_text",
|
||||||
"is_primary",
|
"is_primary",
|
||||||
]
|
]
|
||||||
@@ -303,14 +306,14 @@ class HybridParkSerializer(serializers.ModelSerializer):
|
|||||||
def get_banner_image_url(self, obj):
|
def get_banner_image_url(self, obj):
|
||||||
"""Get banner image URL."""
|
"""Get banner image URL."""
|
||||||
if obj.banner_image and obj.banner_image.image:
|
if obj.banner_image and obj.banner_image.image:
|
||||||
return obj.banner_image.image.url
|
return obj.banner_image.image.public_url
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@extend_schema_field(serializers.URLField(allow_null=True))
|
@extend_schema_field(serializers.URLField(allow_null=True))
|
||||||
def get_card_image_url(self, obj):
|
def get_card_image_url(self, obj):
|
||||||
"""Get card image URL."""
|
"""Get card image URL."""
|
||||||
if obj.card_image and obj.card_image.image:
|
if obj.card_image and obj.card_image.image:
|
||||||
return obj.card_image.image.url
|
return obj.card_image.image.public_url
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@extend_schema_field(serializers.BooleanField())
|
@extend_schema_field(serializers.BooleanField())
|
||||||
|
|||||||
254
backend/apps/api/v1/rides/ride_model_views.py
Normal file
254
backend/apps/api/v1/rides/ride_model_views.py
Normal file
@@ -0,0 +1,254 @@
|
|||||||
|
"""
|
||||||
|
Global Ride Model views for ThrillWiki API v1.
|
||||||
|
|
||||||
|
This module provides top-level ride model endpoints that don't require
|
||||||
|
manufacturer context, matching the frontend's expectation of /rides/models/.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from django.db.models import Q
|
||||||
|
from drf_spectacular.types import OpenApiTypes
|
||||||
|
from drf_spectacular.utils import OpenApiParameter, extend_schema
|
||||||
|
from rest_framework import permissions, status
|
||||||
|
from rest_framework.pagination import PageNumberPagination
|
||||||
|
from rest_framework.request import Request
|
||||||
|
from rest_framework.response import Response
|
||||||
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
|
# Import serializers
|
||||||
|
from apps.api.v1.serializers.ride_models import (
|
||||||
|
RideModelDetailOutputSerializer,
|
||||||
|
RideModelListOutputSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Attempt to import models
|
||||||
|
try:
|
||||||
|
from apps.rides.models import RideModel
|
||||||
|
from apps.rides.models.company import Company
|
||||||
|
|
||||||
|
MODELS_AVAILABLE = True
|
||||||
|
except ImportError:
|
||||||
|
try:
|
||||||
|
from apps.rides.models.rides import Company, RideModel
|
||||||
|
|
||||||
|
MODELS_AVAILABLE = True
|
||||||
|
except ImportError:
|
||||||
|
RideModel = None
|
||||||
|
Company = None
|
||||||
|
MODELS_AVAILABLE = False
|
||||||
|
|
||||||
|
|
||||||
|
class StandardResultsSetPagination(PageNumberPagination):
|
||||||
|
page_size = 20
|
||||||
|
page_size_query_param = "page_size"
|
||||||
|
max_page_size = 100
|
||||||
|
|
||||||
|
|
||||||
|
class GlobalRideModelListAPIView(APIView):
|
||||||
|
"""
|
||||||
|
Global ride model list endpoint.
|
||||||
|
|
||||||
|
This endpoint provides a top-level list of all ride models without
|
||||||
|
requiring a manufacturer slug, matching the frontend's expectation
|
||||||
|
of calling /rides/models/ directly.
|
||||||
|
"""
|
||||||
|
|
||||||
|
permission_classes = [permissions.AllowAny]
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
summary="List all ride models with filtering and pagination",
|
||||||
|
description=(
|
||||||
|
"List all ride models across all manufacturers with comprehensive "
|
||||||
|
"filtering and pagination support. This is a global endpoint that "
|
||||||
|
"doesn't require manufacturer context."
|
||||||
|
),
|
||||||
|
parameters=[
|
||||||
|
OpenApiParameter(
|
||||||
|
name="page",
|
||||||
|
location=OpenApiParameter.QUERY,
|
||||||
|
type=OpenApiTypes.INT,
|
||||||
|
description="Page number for pagination",
|
||||||
|
),
|
||||||
|
OpenApiParameter(
|
||||||
|
name="page_size",
|
||||||
|
location=OpenApiParameter.QUERY,
|
||||||
|
type=OpenApiTypes.INT,
|
||||||
|
description="Number of results per page (max 100)",
|
||||||
|
),
|
||||||
|
OpenApiParameter(
|
||||||
|
name="search",
|
||||||
|
location=OpenApiParameter.QUERY,
|
||||||
|
type=OpenApiTypes.STR,
|
||||||
|
description="Search term for name, description, or manufacturer",
|
||||||
|
),
|
||||||
|
OpenApiParameter(
|
||||||
|
name="category",
|
||||||
|
location=OpenApiParameter.QUERY,
|
||||||
|
type=OpenApiTypes.STR,
|
||||||
|
description="Filter by category (e.g., RC, DR, FR, WR)",
|
||||||
|
),
|
||||||
|
OpenApiParameter(
|
||||||
|
name="manufacturer",
|
||||||
|
location=OpenApiParameter.QUERY,
|
||||||
|
type=OpenApiTypes.STR,
|
||||||
|
description="Filter by manufacturer slug",
|
||||||
|
),
|
||||||
|
OpenApiParameter(
|
||||||
|
name="target_market",
|
||||||
|
location=OpenApiParameter.QUERY,
|
||||||
|
type=OpenApiTypes.STR,
|
||||||
|
description="Filter by target market (e.g., FAMILY, THRILL)",
|
||||||
|
),
|
||||||
|
OpenApiParameter(
|
||||||
|
name="is_discontinued",
|
||||||
|
location=OpenApiParameter.QUERY,
|
||||||
|
type=OpenApiTypes.BOOL,
|
||||||
|
description="Filter by discontinued status",
|
||||||
|
),
|
||||||
|
OpenApiParameter(
|
||||||
|
name="ordering",
|
||||||
|
location=OpenApiParameter.QUERY,
|
||||||
|
type=OpenApiTypes.STR,
|
||||||
|
description="Order by field: name, -name, manufacturer__name, etc.",
|
||||||
|
),
|
||||||
|
],
|
||||||
|
responses={200: RideModelListOutputSerializer(many=True)},
|
||||||
|
tags=["Ride Models"],
|
||||||
|
)
|
||||||
|
def get(self, request: Request) -> Response:
|
||||||
|
"""List all ride models with filtering and pagination."""
|
||||||
|
if not MODELS_AVAILABLE:
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"count": 0,
|
||||||
|
"next": None,
|
||||||
|
"previous": None,
|
||||||
|
"results": [],
|
||||||
|
"detail": "Ride model listing is not available.",
|
||||||
|
},
|
||||||
|
status=status.HTTP_200_OK,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Base queryset with eager loading
|
||||||
|
qs = RideModel.objects.select_related("manufacturer").prefetch_related(
|
||||||
|
"photos"
|
||||||
|
).order_by("manufacturer__name", "name")
|
||||||
|
|
||||||
|
# Search filter
|
||||||
|
search = request.query_params.get("search", "").strip()
|
||||||
|
if search:
|
||||||
|
qs = qs.filter(
|
||||||
|
Q(name__icontains=search)
|
||||||
|
| Q(description__icontains=search)
|
||||||
|
| Q(manufacturer__name__icontains=search)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Category filter
|
||||||
|
category = request.query_params.get("category", "").strip()
|
||||||
|
if category:
|
||||||
|
# Support comma-separated categories
|
||||||
|
categories = [c.strip() for c in category.split(",") if c.strip()]
|
||||||
|
if categories:
|
||||||
|
qs = qs.filter(category__in=categories)
|
||||||
|
|
||||||
|
# Manufacturer filter
|
||||||
|
manufacturer = request.query_params.get("manufacturer", "").strip()
|
||||||
|
if manufacturer:
|
||||||
|
qs = qs.filter(manufacturer__slug=manufacturer)
|
||||||
|
|
||||||
|
# Target market filter
|
||||||
|
target_market = request.query_params.get("target_market", "").strip()
|
||||||
|
if target_market:
|
||||||
|
markets = [m.strip() for m in target_market.split(",") if m.strip()]
|
||||||
|
if markets:
|
||||||
|
qs = qs.filter(target_market__in=markets)
|
||||||
|
|
||||||
|
# Discontinued filter
|
||||||
|
is_discontinued = request.query_params.get("is_discontinued")
|
||||||
|
if is_discontinued is not None:
|
||||||
|
qs = qs.filter(is_discontinued=is_discontinued.lower() == "true")
|
||||||
|
|
||||||
|
# Ordering
|
||||||
|
ordering = request.query_params.get("ordering", "manufacturer__name,name")
|
||||||
|
valid_orderings = [
|
||||||
|
"name", "-name",
|
||||||
|
"manufacturer__name", "-manufacturer__name",
|
||||||
|
"first_installation_year", "-first_installation_year",
|
||||||
|
"total_installations", "-total_installations",
|
||||||
|
"created_at", "-created_at",
|
||||||
|
]
|
||||||
|
if ordering:
|
||||||
|
order_fields = [
|
||||||
|
f.strip() for f in ordering.split(",")
|
||||||
|
if f.strip() in valid_orderings or f.strip().lstrip("-") in [
|
||||||
|
o.lstrip("-") for o in valid_orderings
|
||||||
|
]
|
||||||
|
]
|
||||||
|
if order_fields:
|
||||||
|
qs = qs.order_by(*order_fields)
|
||||||
|
|
||||||
|
# Paginate
|
||||||
|
paginator = StandardResultsSetPagination()
|
||||||
|
page = paginator.paginate_queryset(qs, request)
|
||||||
|
|
||||||
|
if page is not None:
|
||||||
|
serializer = RideModelListOutputSerializer(
|
||||||
|
page, many=True, context={"request": request}
|
||||||
|
)
|
||||||
|
return paginator.get_paginated_response(serializer.data)
|
||||||
|
|
||||||
|
# Fallback without pagination
|
||||||
|
serializer = RideModelListOutputSerializer(
|
||||||
|
qs[:100], many=True, context={"request": request}
|
||||||
|
)
|
||||||
|
return Response(serializer.data)
|
||||||
|
|
||||||
|
|
||||||
|
class GlobalRideModelDetailAPIView(APIView):
|
||||||
|
"""
|
||||||
|
Global ride model detail endpoint by ID or slug.
|
||||||
|
|
||||||
|
This endpoint provides detail for a single ride model without
|
||||||
|
requiring manufacturer context.
|
||||||
|
"""
|
||||||
|
|
||||||
|
permission_classes = [permissions.AllowAny]
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
summary="Retrieve a ride model by ID",
|
||||||
|
description="Get detailed information about a specific ride model by its ID.",
|
||||||
|
parameters=[
|
||||||
|
OpenApiParameter(
|
||||||
|
name="pk",
|
||||||
|
location=OpenApiParameter.PATH,
|
||||||
|
type=OpenApiTypes.INT,
|
||||||
|
required=True,
|
||||||
|
description="Ride model ID",
|
||||||
|
),
|
||||||
|
],
|
||||||
|
responses={200: RideModelDetailOutputSerializer()},
|
||||||
|
tags=["Ride Models"],
|
||||||
|
)
|
||||||
|
def get(self, request: Request, pk: int) -> Response:
|
||||||
|
"""Get ride model detail by ID."""
|
||||||
|
if not MODELS_AVAILABLE:
|
||||||
|
return Response(
|
||||||
|
{"detail": "Ride model not found"},
|
||||||
|
status=status.HTTP_404_NOT_FOUND,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
ride_model = (
|
||||||
|
RideModel.objects.select_related("manufacturer")
|
||||||
|
.prefetch_related("photos", "variants", "technical_specs")
|
||||||
|
.get(pk=pk)
|
||||||
|
)
|
||||||
|
except RideModel.DoesNotExist:
|
||||||
|
return Response(
|
||||||
|
{"detail": "Ride model not found"},
|
||||||
|
status=status.HTTP_404_NOT_FOUND,
|
||||||
|
)
|
||||||
|
|
||||||
|
serializer = RideModelDetailOutputSerializer(
|
||||||
|
ride_model, context={"request": request}
|
||||||
|
)
|
||||||
|
return Response(serializer.data)
|
||||||
@@ -81,7 +81,7 @@ class RidePhotoOutputSerializer(serializers.ModelSerializer):
|
|||||||
def get_image_url(self, obj):
|
def get_image_url(self, obj):
|
||||||
"""Get the full Cloudflare Images URL."""
|
"""Get the full Cloudflare Images URL."""
|
||||||
if obj.image:
|
if obj.image:
|
||||||
return obj.image.url
|
return obj.image.public_url
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@extend_schema_field(
|
@extend_schema_field(
|
||||||
@@ -97,10 +97,10 @@ class RidePhotoOutputSerializer(serializers.ModelSerializer):
|
|||||||
|
|
||||||
# Common variants for ride photos
|
# Common variants for ride photos
|
||||||
variants = {
|
variants = {
|
||||||
"thumbnail": f"{obj.image.url}/thumbnail",
|
"thumbnail": f"{obj.image.public_url}/thumbnail",
|
||||||
"medium": f"{obj.image.url}/medium",
|
"medium": f"{obj.image.public_url}/medium",
|
||||||
"large": f"{obj.image.url}/large",
|
"large": f"{obj.image.public_url}/large",
|
||||||
"public": f"{obj.image.url}/public",
|
"public": f"{obj.image.public_url}/public",
|
||||||
}
|
}
|
||||||
return variants
|
return variants
|
||||||
|
|
||||||
@@ -117,6 +117,7 @@ class RidePhotoOutputSerializer(serializers.ModelSerializer):
|
|||||||
"image_url",
|
"image_url",
|
||||||
"image_variants",
|
"image_variants",
|
||||||
"caption",
|
"caption",
|
||||||
|
"photographer",
|
||||||
"alt_text",
|
"alt_text",
|
||||||
"is_primary",
|
"is_primary",
|
||||||
"is_approved",
|
"is_approved",
|
||||||
@@ -156,6 +157,7 @@ class RidePhotoCreateInputSerializer(serializers.ModelSerializer):
|
|||||||
fields = [
|
fields = [
|
||||||
"image",
|
"image",
|
||||||
"caption",
|
"caption",
|
||||||
|
"photographer",
|
||||||
"alt_text",
|
"alt_text",
|
||||||
"photo_type",
|
"photo_type",
|
||||||
"is_primary",
|
"is_primary",
|
||||||
@@ -169,6 +171,7 @@ class RidePhotoUpdateInputSerializer(serializers.ModelSerializer):
|
|||||||
model = RidePhoto
|
model = RidePhoto
|
||||||
fields = [
|
fields = [
|
||||||
"caption",
|
"caption",
|
||||||
|
"photographer",
|
||||||
"alt_text",
|
"alt_text",
|
||||||
"photo_type",
|
"photo_type",
|
||||||
"is_primary",
|
"is_primary",
|
||||||
@@ -303,6 +306,12 @@ class HybridRideSerializer(serializers.ModelSerializer):
|
|||||||
banner_image_url = serializers.SerializerMethodField()
|
banner_image_url = serializers.SerializerMethodField()
|
||||||
card_image_url = serializers.SerializerMethodField()
|
card_image_url = serializers.SerializerMethodField()
|
||||||
|
|
||||||
|
# Metric unit conversions for frontend (duplicate of imperial fields)
|
||||||
|
coaster_height_meters = serializers.SerializerMethodField()
|
||||||
|
coaster_length_meters = serializers.SerializerMethodField()
|
||||||
|
coaster_speed_kmh = serializers.SerializerMethodField()
|
||||||
|
coaster_max_drop_meters = serializers.SerializerMethodField()
|
||||||
|
|
||||||
# Computed fields for filtering
|
# Computed fields for filtering
|
||||||
opening_year = serializers.IntegerField(read_only=True)
|
opening_year = serializers.IntegerField(read_only=True)
|
||||||
search_text = serializers.CharField(read_only=True)
|
search_text = serializers.CharField(read_only=True)
|
||||||
@@ -481,14 +490,14 @@ class HybridRideSerializer(serializers.ModelSerializer):
|
|||||||
def get_banner_image_url(self, obj):
|
def get_banner_image_url(self, obj):
|
||||||
"""Get banner image URL."""
|
"""Get banner image URL."""
|
||||||
if obj.banner_image and obj.banner_image.image:
|
if obj.banner_image and obj.banner_image.image:
|
||||||
return obj.banner_image.image.url
|
return obj.banner_image.image.public_url
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@extend_schema_field(serializers.URLField(allow_null=True))
|
@extend_schema_field(serializers.URLField(allow_null=True))
|
||||||
def get_card_image_url(self, obj):
|
def get_card_image_url(self, obj):
|
||||||
"""Get card image URL."""
|
"""Get card image URL."""
|
||||||
if obj.card_image and obj.card_image.image:
|
if obj.card_image and obj.card_image.image:
|
||||||
return obj.card_image.image.url
|
return obj.card_image.image.public_url
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# Computed property
|
# Computed property
|
||||||
@@ -499,6 +508,47 @@ class HybridRideSerializer(serializers.ModelSerializer):
|
|||||||
"""Check if ride has an announced closing date in the future."""
|
"""Check if ride has an announced closing date in the future."""
|
||||||
return obj.is_closing
|
return obj.is_closing
|
||||||
|
|
||||||
|
# Metric conversions for frontend compatibility
|
||||||
|
@extend_schema_field(serializers.FloatField(allow_null=True))
|
||||||
|
def get_coaster_height_meters(self, obj):
|
||||||
|
"""Convert coaster height from feet to meters."""
|
||||||
|
try:
|
||||||
|
if hasattr(obj, "coaster_stats") and obj.coaster_stats and obj.coaster_stats.height_ft:
|
||||||
|
return round(float(obj.coaster_stats.height_ft) * 0.3048, 2)
|
||||||
|
return None
|
||||||
|
except (AttributeError, TypeError):
|
||||||
|
return None
|
||||||
|
|
||||||
|
@extend_schema_field(serializers.FloatField(allow_null=True))
|
||||||
|
def get_coaster_length_meters(self, obj):
|
||||||
|
"""Convert coaster length from feet to meters."""
|
||||||
|
try:
|
||||||
|
if hasattr(obj, "coaster_stats") and obj.coaster_stats and obj.coaster_stats.length_ft:
|
||||||
|
return round(float(obj.coaster_stats.length_ft) * 0.3048, 2)
|
||||||
|
return None
|
||||||
|
except (AttributeError, TypeError):
|
||||||
|
return None
|
||||||
|
|
||||||
|
@extend_schema_field(serializers.FloatField(allow_null=True))
|
||||||
|
def get_coaster_speed_kmh(self, obj):
|
||||||
|
"""Convert coaster speed from mph to km/h."""
|
||||||
|
try:
|
||||||
|
if hasattr(obj, "coaster_stats") and obj.coaster_stats and obj.coaster_stats.speed_mph:
|
||||||
|
return round(float(obj.coaster_stats.speed_mph) * 1.60934, 2)
|
||||||
|
return None
|
||||||
|
except (AttributeError, TypeError):
|
||||||
|
return None
|
||||||
|
|
||||||
|
@extend_schema_field(serializers.FloatField(allow_null=True))
|
||||||
|
def get_coaster_max_drop_meters(self, obj):
|
||||||
|
"""Convert coaster max drop from feet to meters."""
|
||||||
|
try:
|
||||||
|
if hasattr(obj, "coaster_stats") and obj.coaster_stats and obj.coaster_stats.max_drop_height_ft:
|
||||||
|
return round(float(obj.coaster_stats.max_drop_height_ft) * 0.3048, 2)
|
||||||
|
return None
|
||||||
|
except (AttributeError, TypeError):
|
||||||
|
return None
|
||||||
|
|
||||||
# Water ride stats fields
|
# Water ride stats fields
|
||||||
water_wetness_level = serializers.SerializerMethodField()
|
water_wetness_level = serializers.SerializerMethodField()
|
||||||
water_splash_height_ft = serializers.SerializerMethodField()
|
water_splash_height_ft = serializers.SerializerMethodField()
|
||||||
@@ -994,3 +1044,29 @@ class RideSerializer(serializers.ModelSerializer):
|
|||||||
"opening_date",
|
"opening_date",
|
||||||
"closing_date",
|
"closing_date",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class RideSubTypeSerializer(serializers.ModelSerializer):
|
||||||
|
"""Serializer for ride sub-types lookup table.
|
||||||
|
|
||||||
|
This serves the /rides/sub-types/ endpoint which the frontend
|
||||||
|
uses to populate sub-type dropdowns filtered by category.
|
||||||
|
"""
|
||||||
|
|
||||||
|
created_by = serializers.CharField(source="created_by.username", read_only=True, allow_null=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
# Import here to avoid circular imports
|
||||||
|
from apps.rides.models import RideSubType
|
||||||
|
model = RideSubType
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"name",
|
||||||
|
"category",
|
||||||
|
"description",
|
||||||
|
"created_by",
|
||||||
|
"created_at",
|
||||||
|
]
|
||||||
|
read_only_fields = ["id", "created_at", "created_by"]
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -12,6 +12,7 @@ from django.urls import include, path
|
|||||||
from rest_framework.routers import DefaultRouter
|
from rest_framework.routers import DefaultRouter
|
||||||
|
|
||||||
from .photo_views import RidePhotoViewSet
|
from .photo_views import RidePhotoViewSet
|
||||||
|
from .ride_model_views import GlobalRideModelDetailAPIView, GlobalRideModelListAPIView
|
||||||
from .views import (
|
from .views import (
|
||||||
CompanySearchAPIView,
|
CompanySearchAPIView,
|
||||||
DesignerListAPIView,
|
DesignerListAPIView,
|
||||||
@@ -24,6 +25,7 @@ from .views import (
|
|||||||
RideListCreateAPIView,
|
RideListCreateAPIView,
|
||||||
RideModelSearchAPIView,
|
RideModelSearchAPIView,
|
||||||
RideSearchSuggestionsAPIView,
|
RideSearchSuggestionsAPIView,
|
||||||
|
RideSubTypeListAPIView,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Create router for nested photo endpoints
|
# Create router for nested photo endpoints
|
||||||
@@ -40,6 +42,9 @@ urlpatterns = [
|
|||||||
path("hybrid/filter-metadata/", RideFilterMetadataAPIView.as_view(), name="ride-hybrid-filter-metadata"),
|
path("hybrid/filter-metadata/", RideFilterMetadataAPIView.as_view(), name="ride-hybrid-filter-metadata"),
|
||||||
# Filter options
|
# Filter options
|
||||||
path("filter-options/", FilterOptionsAPIView.as_view(), name="ride-filter-options"),
|
path("filter-options/", FilterOptionsAPIView.as_view(), name="ride-filter-options"),
|
||||||
|
# Global ride model endpoints - matches frontend's /rides/models/ expectation
|
||||||
|
path("models/", GlobalRideModelListAPIView.as_view(), name="ride-model-global-list"),
|
||||||
|
path("models/<int:pk>/", GlobalRideModelDetailAPIView.as_view(), name="ride-model-global-detail"),
|
||||||
# Autocomplete / suggestion endpoints
|
# Autocomplete / suggestion endpoints
|
||||||
path(
|
path(
|
||||||
"search/companies/",
|
"search/companies/",
|
||||||
@@ -59,6 +64,8 @@ urlpatterns = [
|
|||||||
# Manufacturer and Designer endpoints
|
# Manufacturer and Designer endpoints
|
||||||
path("manufacturers/", ManufacturerListAPIView.as_view(), name="manufacturer-list"),
|
path("manufacturers/", ManufacturerListAPIView.as_view(), name="manufacturer-list"),
|
||||||
path("designers/", DesignerListAPIView.as_view(), name="designer-list"),
|
path("designers/", DesignerListAPIView.as_view(), name="designer-list"),
|
||||||
|
# Ride sub-types endpoint - for autocomplete dropdowns
|
||||||
|
path("sub-types/", RideSubTypeListAPIView.as_view(), name="ride-sub-type-list"),
|
||||||
# Ride model management endpoints - nested under rides/manufacturers
|
# Ride model management endpoints - nested under rides/manufacturers
|
||||||
path(
|
path(
|
||||||
"manufacturers/<slug:manufacturer_slug>/",
|
"manufacturers/<slug:manufacturer_slug>/",
|
||||||
|
|||||||
@@ -2422,3 +2422,53 @@ class ManufacturerListAPIView(BaseCompanyListAPIView):
|
|||||||
)
|
)
|
||||||
class DesignerListAPIView(BaseCompanyListAPIView):
|
class DesignerListAPIView(BaseCompanyListAPIView):
|
||||||
role = "DESIGNER"
|
role = "DESIGNER"
|
||||||
|
|
||||||
|
|
||||||
|
# === RIDE SUB-TYPES ===
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
summary="List ride sub-types",
|
||||||
|
description="List ride sub-types, optionally filtered by category. Used for autocomplete dropdowns.",
|
||||||
|
parameters=[
|
||||||
|
OpenApiParameter(
|
||||||
|
"category",
|
||||||
|
OpenApiTypes.STR,
|
||||||
|
description="Filter by ride category (e.g., 'RC' for roller coaster)",
|
||||||
|
),
|
||||||
|
],
|
||||||
|
responses={200: OpenApiTypes.OBJECT},
|
||||||
|
tags=["Rides"],
|
||||||
|
)
|
||||||
|
class RideSubTypeListAPIView(APIView):
|
||||||
|
"""
|
||||||
|
API View for listing ride sub-types.
|
||||||
|
|
||||||
|
Used by the frontend's useRideSubTypes hook to populate
|
||||||
|
sub-type dropdown menus filtered by ride category.
|
||||||
|
|
||||||
|
Caching: 30-minute timeout (1800s) - sub-types are stable lookup data.
|
||||||
|
"""
|
||||||
|
|
||||||
|
permission_classes = [permissions.AllowAny]
|
||||||
|
|
||||||
|
@cache_api_response(timeout=1800, key_prefix="ride_sub_types")
|
||||||
|
def get(self, request: Request) -> Response:
|
||||||
|
from apps.rides.models import RideSubType
|
||||||
|
from apps.api.v1.rides.serializers import RideSubTypeSerializer
|
||||||
|
|
||||||
|
# Start with all sub-types
|
||||||
|
queryset = RideSubType.objects.all().order_by("name")
|
||||||
|
|
||||||
|
# Apply category filter if provided
|
||||||
|
category = request.query_params.get("category")
|
||||||
|
if category:
|
||||||
|
queryset = queryset.filter(category=category)
|
||||||
|
|
||||||
|
# Serialize and return
|
||||||
|
serializer = RideSubTypeSerializer(queryset, many=True)
|
||||||
|
return Response({
|
||||||
|
"results": serializer.data,
|
||||||
|
"count": queryset.count(),
|
||||||
|
})
|
||||||
|
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ from drf_spectacular.utils import (
|
|||||||
)
|
)
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
from apps.core.choices.serializers import RichChoiceFieldSerializer
|
from apps.core.choices.serializers import RichChoiceFieldSerializer, RichChoiceSerializerField
|
||||||
|
|
||||||
from .shared import ModelChoices
|
from .shared import ModelChoices
|
||||||
|
|
||||||
@@ -56,36 +56,26 @@ class CompanyDetailOutputSerializer(serializers.Serializer):
|
|||||||
name = serializers.CharField()
|
name = serializers.CharField()
|
||||||
slug = serializers.CharField()
|
slug = serializers.CharField()
|
||||||
roles = serializers.ListField(child=serializers.CharField())
|
roles = serializers.ListField(child=serializers.CharField())
|
||||||
description = serializers.CharField()
|
description = serializers.CharField(allow_blank=True)
|
||||||
website = serializers.URLField(required=False, allow_blank=True)
|
website = serializers.URLField(required=False, allow_blank=True, allow_null=True)
|
||||||
|
|
||||||
# Entity type and status (ported from legacy)
|
|
||||||
person_type = serializers.CharField(required=False, allow_blank=True)
|
|
||||||
status = serializers.CharField()
|
|
||||||
|
|
||||||
# Founding information
|
# Founding information
|
||||||
founded_year = serializers.IntegerField(allow_null=True)
|
founded_date = serializers.DateField(allow_null=True, required=False)
|
||||||
founded_date = serializers.DateField(allow_null=True)
|
|
||||||
founded_date_precision = serializers.CharField(required=False, allow_blank=True)
|
|
||||||
|
|
||||||
# Image URLs
|
# Counts (from model)
|
||||||
logo_url = serializers.URLField(required=False, allow_blank=True)
|
rides_count = serializers.IntegerField(required=False, default=0)
|
||||||
banner_image_url = serializers.URLField(required=False, allow_blank=True)
|
coasters_count = serializers.IntegerField(required=False, default=0)
|
||||||
card_image_url = serializers.URLField(required=False, allow_blank=True)
|
|
||||||
|
# Frontend URL
|
||||||
# Rating and review aggregates
|
url = serializers.URLField(required=False, allow_blank=True, allow_null=True)
|
||||||
average_rating = serializers.DecimalField(max_digits=3, decimal_places=2, allow_null=True)
|
|
||||||
review_count = serializers.IntegerField()
|
|
||||||
|
|
||||||
# Counts
|
|
||||||
parks_count = serializers.IntegerField()
|
|
||||||
rides_count = serializers.IntegerField()
|
|
||||||
|
|
||||||
# Metadata
|
# Metadata
|
||||||
created_at = serializers.DateTimeField()
|
created_at = serializers.DateTimeField()
|
||||||
updated_at = serializers.DateTimeField()
|
updated_at = serializers.DateTimeField()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class CompanyCreateInputSerializer(serializers.Serializer):
|
class CompanyCreateInputSerializer(serializers.Serializer):
|
||||||
"""Input serializer for creating companies."""
|
"""Input serializer for creating companies."""
|
||||||
|
|
||||||
@@ -97,30 +87,38 @@ class CompanyCreateInputSerializer(serializers.Serializer):
|
|||||||
description = serializers.CharField(allow_blank=True, default="")
|
description = serializers.CharField(allow_blank=True, default="")
|
||||||
website = serializers.URLField(required=False, allow_blank=True)
|
website = serializers.URLField(required=False, allow_blank=True)
|
||||||
|
|
||||||
# Entity type and status
|
# Entity type and status - using RichChoiceSerializerField
|
||||||
person_type = serializers.ChoiceField(
|
person_type = RichChoiceSerializerField(
|
||||||
choices=["INDIVIDUAL", "FIRM", "ORGANIZATION", "CORPORATION", "PARTNERSHIP", "GOVERNMENT"],
|
choice_group="person_types",
|
||||||
|
domain="parks",
|
||||||
required=False,
|
required=False,
|
||||||
allow_blank=True,
|
allow_blank=True,
|
||||||
)
|
)
|
||||||
status = serializers.ChoiceField(
|
status = RichChoiceSerializerField(
|
||||||
choices=["ACTIVE", "DEFUNCT", "MERGED", "ACQUIRED", "RENAMED", "DORMANT"],
|
choice_group="company_statuses",
|
||||||
|
domain="parks",
|
||||||
default="ACTIVE",
|
default="ACTIVE",
|
||||||
)
|
)
|
||||||
|
|
||||||
# Founding information
|
# Founding information
|
||||||
founded_year = serializers.IntegerField(required=False, allow_null=True)
|
founded_year = serializers.IntegerField(required=False, allow_null=True)
|
||||||
founded_date = serializers.DateField(required=False, allow_null=True)
|
founded_date = serializers.DateField(required=False, allow_null=True)
|
||||||
founded_date_precision = serializers.ChoiceField(
|
founded_date_precision = RichChoiceSerializerField(
|
||||||
choices=["YEAR", "MONTH", "DAY"],
|
choice_group="date_precision",
|
||||||
|
domain="parks",
|
||||||
required=False,
|
required=False,
|
||||||
allow_blank=True,
|
allow_blank=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Image URLs
|
# Image URLs (legacy - prefer using image IDs)
|
||||||
logo_url = serializers.URLField(required=False, allow_blank=True)
|
logo_url = serializers.URLField(required=False, allow_blank=True)
|
||||||
banner_image_url = serializers.URLField(required=False, allow_blank=True)
|
banner_image_url = serializers.URLField(required=False, allow_blank=True)
|
||||||
card_image_url = serializers.URLField(required=False, allow_blank=True)
|
card_image_url = serializers.URLField(required=False, allow_blank=True)
|
||||||
|
|
||||||
|
# Cloudflare image IDs (preferred for new submissions)
|
||||||
|
logo_image_id = serializers.CharField(max_length=255, required=False, allow_blank=True)
|
||||||
|
banner_image_id = serializers.CharField(max_length=255, required=False, allow_blank=True)
|
||||||
|
card_image_id = serializers.CharField(max_length=255, required=False, allow_blank=True)
|
||||||
|
|
||||||
|
|
||||||
class CompanyUpdateInputSerializer(serializers.Serializer):
|
class CompanyUpdateInputSerializer(serializers.Serializer):
|
||||||
@@ -134,30 +132,38 @@ class CompanyUpdateInputSerializer(serializers.Serializer):
|
|||||||
description = serializers.CharField(allow_blank=True, required=False)
|
description = serializers.CharField(allow_blank=True, required=False)
|
||||||
website = serializers.URLField(required=False, allow_blank=True)
|
website = serializers.URLField(required=False, allow_blank=True)
|
||||||
|
|
||||||
# Entity type and status
|
# Entity type and status - using RichChoiceSerializerField
|
||||||
person_type = serializers.ChoiceField(
|
person_type = RichChoiceSerializerField(
|
||||||
choices=["INDIVIDUAL", "FIRM", "ORGANIZATION", "CORPORATION", "PARTNERSHIP", "GOVERNMENT"],
|
choice_group="person_types",
|
||||||
|
domain="parks",
|
||||||
required=False,
|
required=False,
|
||||||
allow_blank=True,
|
allow_blank=True,
|
||||||
)
|
)
|
||||||
status = serializers.ChoiceField(
|
status = RichChoiceSerializerField(
|
||||||
choices=["ACTIVE", "DEFUNCT", "MERGED", "ACQUIRED", "RENAMED", "DORMANT"],
|
choice_group="company_statuses",
|
||||||
|
domain="parks",
|
||||||
required=False,
|
required=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Founding information
|
# Founding information
|
||||||
founded_year = serializers.IntegerField(required=False, allow_null=True)
|
founded_year = serializers.IntegerField(required=False, allow_null=True)
|
||||||
founded_date = serializers.DateField(required=False, allow_null=True)
|
founded_date = serializers.DateField(required=False, allow_null=True)
|
||||||
founded_date_precision = serializers.ChoiceField(
|
founded_date_precision = RichChoiceSerializerField(
|
||||||
choices=["YEAR", "MONTH", "DAY"],
|
choice_group="date_precision",
|
||||||
|
domain="parks",
|
||||||
required=False,
|
required=False,
|
||||||
allow_blank=True,
|
allow_blank=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Image URLs
|
# Image URLs (legacy - prefer using image IDs)
|
||||||
logo_url = serializers.URLField(required=False, allow_blank=True)
|
logo_url = serializers.URLField(required=False, allow_blank=True)
|
||||||
banner_image_url = serializers.URLField(required=False, allow_blank=True)
|
banner_image_url = serializers.URLField(required=False, allow_blank=True)
|
||||||
card_image_url = serializers.URLField(required=False, allow_blank=True)
|
card_image_url = serializers.URLField(required=False, allow_blank=True)
|
||||||
|
|
||||||
|
# Cloudflare image IDs (preferred for new submissions)
|
||||||
|
logo_image_id = serializers.CharField(max_length=255, required=False, allow_blank=True)
|
||||||
|
banner_image_id = serializers.CharField(max_length=255, required=False, allow_blank=True)
|
||||||
|
card_image_id = serializers.CharField(max_length=255, required=False, allow_blank=True)
|
||||||
|
|
||||||
|
|
||||||
# === RIDE MODEL SERIALIZERS ===
|
# === RIDE MODEL SERIALIZERS ===
|
||||||
|
|||||||
@@ -5,6 +5,8 @@ This module contains all serializers related to parks, park areas, park location
|
|||||||
and park search functionality.
|
and park search functionality.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from decimal import Decimal
|
||||||
|
|
||||||
from drf_spectacular.utils import (
|
from drf_spectacular.utils import (
|
||||||
OpenApiExample,
|
OpenApiExample,
|
||||||
extend_schema_field,
|
extend_schema_field,
|
||||||
@@ -532,13 +534,13 @@ class ParkFilterInputSerializer(serializers.Serializer):
|
|||||||
max_digits=3,
|
max_digits=3,
|
||||||
decimal_places=2,
|
decimal_places=2,
|
||||||
required=False,
|
required=False,
|
||||||
min_value=1,
|
min_value=Decimal("1"),
|
||||||
max_value=10,
|
max_value=Decimal("10"),
|
||||||
)
|
)
|
||||||
|
|
||||||
# Size filter
|
# Size filter
|
||||||
min_size_acres = serializers.DecimalField(max_digits=10, decimal_places=2, required=False, min_value=0)
|
min_size_acres = serializers.DecimalField(max_digits=10, decimal_places=2, required=False, min_value=Decimal("0"))
|
||||||
max_size_acres = serializers.DecimalField(max_digits=10, decimal_places=2, required=False, min_value=0)
|
max_size_acres = serializers.DecimalField(max_digits=10, decimal_places=2, required=False, min_value=Decimal("0"))
|
||||||
|
|
||||||
# Company filters
|
# Company filters
|
||||||
operator_id = serializers.IntegerField(required=False)
|
operator_id = serializers.IntegerField(required=False)
|
||||||
|
|||||||
@@ -59,7 +59,7 @@ class RideModelPhotoOutputSerializer(serializers.Serializer):
|
|||||||
def get_image_url(self, obj):
|
def get_image_url(self, obj):
|
||||||
"""Get the image URL."""
|
"""Get the image URL."""
|
||||||
if obj.image:
|
if obj.image:
|
||||||
return obj.image.url
|
return obj.image.public_url
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -211,6 +211,18 @@ class RideDetailOutputSerializer(serializers.Serializer):
|
|||||||
# Former names (name history)
|
# Former names (name history)
|
||||||
former_names = serializers.SerializerMethodField()
|
former_names = serializers.SerializerMethodField()
|
||||||
|
|
||||||
|
# Coaster statistics - includes both imperial and metric units for frontend flexibility
|
||||||
|
coaster_statistics = serializers.SerializerMethodField()
|
||||||
|
|
||||||
|
# Metric unit fields for frontend (converted from imperial)
|
||||||
|
height_meters = serializers.SerializerMethodField()
|
||||||
|
length_meters = serializers.SerializerMethodField()
|
||||||
|
max_speed_kmh = serializers.SerializerMethodField()
|
||||||
|
drop_meters = serializers.SerializerMethodField()
|
||||||
|
|
||||||
|
# Technical specifications list
|
||||||
|
technical_specifications = serializers.SerializerMethodField()
|
||||||
|
|
||||||
# URL
|
# URL
|
||||||
url = serializers.SerializerMethodField()
|
url = serializers.SerializerMethodField()
|
||||||
|
|
||||||
@@ -265,13 +277,13 @@ class RideDetailOutputSerializer(serializers.Serializer):
|
|||||||
return [
|
return [
|
||||||
{
|
{
|
||||||
"id": photo.id,
|
"id": photo.id,
|
||||||
"image_url": photo.image.url if photo.image else None,
|
"image_url": photo.image.public_url if photo.image else None,
|
||||||
"image_variants": (
|
"image_variants": (
|
||||||
{
|
{
|
||||||
"thumbnail": (f"{photo.image.url}/thumbnail" if photo.image else None),
|
"thumbnail": (f"{photo.image.public_url}/thumbnail" if photo.image else None),
|
||||||
"medium": f"{photo.image.url}/medium" if photo.image else None,
|
"medium": f"{photo.image.public_url}/medium" if photo.image else None,
|
||||||
"large": f"{photo.image.url}/large" if photo.image else None,
|
"large": f"{photo.image.public_url}/large" if photo.image else None,
|
||||||
"public": f"{photo.image.url}/public" if photo.image else None,
|
"public": f"{photo.image.public_url}/public" if photo.image else None,
|
||||||
}
|
}
|
||||||
if photo.image
|
if photo.image
|
||||||
else {}
|
else {}
|
||||||
@@ -295,12 +307,12 @@ class RideDetailOutputSerializer(serializers.Serializer):
|
|||||||
if photo and photo.image:
|
if photo and photo.image:
|
||||||
return {
|
return {
|
||||||
"id": photo.id,
|
"id": photo.id,
|
||||||
"image_url": photo.image.url,
|
"image_url": photo.image.public_url,
|
||||||
"image_variants": {
|
"image_variants": {
|
||||||
"thumbnail": f"{photo.image.url}/thumbnail",
|
"thumbnail": f"{photo.image.public_url}/thumbnail",
|
||||||
"medium": f"{photo.image.url}/medium",
|
"medium": f"{photo.image.public_url}/medium",
|
||||||
"large": f"{photo.image.url}/large",
|
"large": f"{photo.image.public_url}/large",
|
||||||
"public": f"{photo.image.url}/public",
|
"public": f"{photo.image.public_url}/public",
|
||||||
},
|
},
|
||||||
"caption": photo.caption,
|
"caption": photo.caption,
|
||||||
"alt_text": photo.alt_text,
|
"alt_text": photo.alt_text,
|
||||||
@@ -318,12 +330,12 @@ class RideDetailOutputSerializer(serializers.Serializer):
|
|||||||
if obj.banner_image and obj.banner_image.image:
|
if obj.banner_image and obj.banner_image.image:
|
||||||
return {
|
return {
|
||||||
"id": obj.banner_image.id,
|
"id": obj.banner_image.id,
|
||||||
"image_url": obj.banner_image.image.url,
|
"image_url": obj.banner_image.image.public_url,
|
||||||
"image_variants": {
|
"image_variants": {
|
||||||
"thumbnail": f"{obj.banner_image.image.url}/thumbnail",
|
"thumbnail": f"{obj.banner_image.image.public_url}/thumbnail",
|
||||||
"medium": f"{obj.banner_image.image.url}/medium",
|
"medium": f"{obj.banner_image.image.public_url}/medium",
|
||||||
"large": f"{obj.banner_image.image.url}/large",
|
"large": f"{obj.banner_image.image.public_url}/large",
|
||||||
"public": f"{obj.banner_image.image.url}/public",
|
"public": f"{obj.banner_image.image.public_url}/public",
|
||||||
},
|
},
|
||||||
"caption": obj.banner_image.caption,
|
"caption": obj.banner_image.caption,
|
||||||
"alt_text": obj.banner_image.alt_text,
|
"alt_text": obj.banner_image.alt_text,
|
||||||
@@ -343,12 +355,12 @@ class RideDetailOutputSerializer(serializers.Serializer):
|
|||||||
if latest_photo and latest_photo.image:
|
if latest_photo and latest_photo.image:
|
||||||
return {
|
return {
|
||||||
"id": latest_photo.id,
|
"id": latest_photo.id,
|
||||||
"image_url": latest_photo.image.url,
|
"image_url": latest_photo.image.public_url,
|
||||||
"image_variants": {
|
"image_variants": {
|
||||||
"thumbnail": f"{latest_photo.image.url}/thumbnail",
|
"thumbnail": f"{latest_photo.image.public_url}/thumbnail",
|
||||||
"medium": f"{latest_photo.image.url}/medium",
|
"medium": f"{latest_photo.image.public_url}/medium",
|
||||||
"large": f"{latest_photo.image.url}/large",
|
"large": f"{latest_photo.image.public_url}/large",
|
||||||
"public": f"{latest_photo.image.url}/public",
|
"public": f"{latest_photo.image.public_url}/public",
|
||||||
},
|
},
|
||||||
"caption": latest_photo.caption,
|
"caption": latest_photo.caption,
|
||||||
"alt_text": latest_photo.alt_text,
|
"alt_text": latest_photo.alt_text,
|
||||||
@@ -367,12 +379,12 @@ class RideDetailOutputSerializer(serializers.Serializer):
|
|||||||
if obj.card_image and obj.card_image.image:
|
if obj.card_image and obj.card_image.image:
|
||||||
return {
|
return {
|
||||||
"id": obj.card_image.id,
|
"id": obj.card_image.id,
|
||||||
"image_url": obj.card_image.image.url,
|
"image_url": obj.card_image.image.public_url,
|
||||||
"image_variants": {
|
"image_variants": {
|
||||||
"thumbnail": f"{obj.card_image.image.url}/thumbnail",
|
"thumbnail": f"{obj.card_image.image.public_url}/thumbnail",
|
||||||
"medium": f"{obj.card_image.image.url}/medium",
|
"medium": f"{obj.card_image.image.public_url}/medium",
|
||||||
"large": f"{obj.card_image.image.url}/large",
|
"large": f"{obj.card_image.image.public_url}/large",
|
||||||
"public": f"{obj.card_image.image.url}/public",
|
"public": f"{obj.card_image.image.public_url}/public",
|
||||||
},
|
},
|
||||||
"caption": obj.card_image.caption,
|
"caption": obj.card_image.caption,
|
||||||
"alt_text": obj.card_image.alt_text,
|
"alt_text": obj.card_image.alt_text,
|
||||||
@@ -392,12 +404,12 @@ class RideDetailOutputSerializer(serializers.Serializer):
|
|||||||
if latest_photo and latest_photo.image:
|
if latest_photo and latest_photo.image:
|
||||||
return {
|
return {
|
||||||
"id": latest_photo.id,
|
"id": latest_photo.id,
|
||||||
"image_url": latest_photo.image.url,
|
"image_url": latest_photo.image.public_url,
|
||||||
"image_variants": {
|
"image_variants": {
|
||||||
"thumbnail": f"{latest_photo.image.url}/thumbnail",
|
"thumbnail": f"{latest_photo.image.public_url}/thumbnail",
|
||||||
"medium": f"{latest_photo.image.url}/medium",
|
"medium": f"{latest_photo.image.public_url}/medium",
|
||||||
"large": f"{latest_photo.image.url}/large",
|
"large": f"{latest_photo.image.public_url}/large",
|
||||||
"public": f"{latest_photo.image.url}/public",
|
"public": f"{latest_photo.image.public_url}/public",
|
||||||
},
|
},
|
||||||
"caption": latest_photo.caption,
|
"caption": latest_photo.caption,
|
||||||
"alt_text": latest_photo.alt_text,
|
"alt_text": latest_photo.alt_text,
|
||||||
@@ -427,6 +439,99 @@ class RideDetailOutputSerializer(serializers.Serializer):
|
|||||||
for entry in former_names
|
for entry in former_names
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@extend_schema_field(serializers.DictField(allow_null=True))
|
||||||
|
def get_coaster_statistics(self, obj):
|
||||||
|
"""Get coaster statistics with both imperial and metric units."""
|
||||||
|
try:
|
||||||
|
if hasattr(obj, "coaster_stats") and obj.coaster_stats:
|
||||||
|
stats = obj.coaster_stats
|
||||||
|
return {
|
||||||
|
# Imperial units (stored in DB)
|
||||||
|
"height_ft": float(stats.height_ft) if stats.height_ft else None,
|
||||||
|
"length_ft": float(stats.length_ft) if stats.length_ft else None,
|
||||||
|
"speed_mph": float(stats.speed_mph) if stats.speed_mph else None,
|
||||||
|
"max_drop_height_ft": float(stats.max_drop_height_ft) if stats.max_drop_height_ft else None,
|
||||||
|
# Metric conversions for frontend
|
||||||
|
"height_meters": round(float(stats.height_ft) * 0.3048, 2) if stats.height_ft else None,
|
||||||
|
"length_meters": round(float(stats.length_ft) * 0.3048, 2) if stats.length_ft else None,
|
||||||
|
"max_speed_kmh": round(float(stats.speed_mph) * 1.60934, 2) if stats.speed_mph else None,
|
||||||
|
"drop_meters": round(float(stats.max_drop_height_ft) * 0.3048, 2) if stats.max_drop_height_ft else None,
|
||||||
|
# Other stats
|
||||||
|
"inversions": stats.inversions,
|
||||||
|
"ride_time_seconds": stats.ride_time_seconds,
|
||||||
|
"track_type": stats.track_type,
|
||||||
|
"track_material": stats.track_material,
|
||||||
|
"roller_coaster_type": stats.roller_coaster_type,
|
||||||
|
"propulsion_system": stats.propulsion_system,
|
||||||
|
"train_style": stats.train_style,
|
||||||
|
"trains_count": stats.trains_count,
|
||||||
|
"cars_per_train": stats.cars_per_train,
|
||||||
|
"seats_per_car": stats.seats_per_car,
|
||||||
|
}
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
return None
|
||||||
|
|
||||||
|
@extend_schema_field(serializers.FloatField(allow_null=True))
|
||||||
|
def get_height_meters(self, obj):
|
||||||
|
"""Convert height from feet to meters for frontend."""
|
||||||
|
try:
|
||||||
|
if hasattr(obj, "coaster_stats") and obj.coaster_stats and obj.coaster_stats.height_ft:
|
||||||
|
return round(float(obj.coaster_stats.height_ft) * 0.3048, 2)
|
||||||
|
except (AttributeError, TypeError):
|
||||||
|
pass
|
||||||
|
return None
|
||||||
|
|
||||||
|
@extend_schema_field(serializers.FloatField(allow_null=True))
|
||||||
|
def get_length_meters(self, obj):
|
||||||
|
"""Convert length from feet to meters for frontend."""
|
||||||
|
try:
|
||||||
|
if hasattr(obj, "coaster_stats") and obj.coaster_stats and obj.coaster_stats.length_ft:
|
||||||
|
return round(float(obj.coaster_stats.length_ft) * 0.3048, 2)
|
||||||
|
except (AttributeError, TypeError):
|
||||||
|
pass
|
||||||
|
return None
|
||||||
|
|
||||||
|
@extend_schema_field(serializers.FloatField(allow_null=True))
|
||||||
|
def get_max_speed_kmh(self, obj):
|
||||||
|
"""Convert max speed from mph to km/h for frontend."""
|
||||||
|
try:
|
||||||
|
if hasattr(obj, "coaster_stats") and obj.coaster_stats and obj.coaster_stats.speed_mph:
|
||||||
|
return round(float(obj.coaster_stats.speed_mph) * 1.60934, 2)
|
||||||
|
except (AttributeError, TypeError):
|
||||||
|
pass
|
||||||
|
return None
|
||||||
|
|
||||||
|
@extend_schema_field(serializers.FloatField(allow_null=True))
|
||||||
|
def get_drop_meters(self, obj):
|
||||||
|
"""Convert drop height from feet to meters for frontend."""
|
||||||
|
try:
|
||||||
|
if hasattr(obj, "coaster_stats") and obj.coaster_stats and obj.coaster_stats.max_drop_height_ft:
|
||||||
|
return round(float(obj.coaster_stats.max_drop_height_ft) * 0.3048, 2)
|
||||||
|
except (AttributeError, TypeError):
|
||||||
|
pass
|
||||||
|
return None
|
||||||
|
|
||||||
|
@extend_schema_field(serializers.ListField(child=serializers.DictField()))
|
||||||
|
def get_technical_specifications(self, obj):
|
||||||
|
"""Get technical specifications list for this ride."""
|
||||||
|
try:
|
||||||
|
from apps.rides.models import RideTechnicalSpec
|
||||||
|
|
||||||
|
specs = RideTechnicalSpec.objects.filter(ride=obj).order_by("category", "name")
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
"id": spec.id,
|
||||||
|
"name": spec.name,
|
||||||
|
"value": spec.value,
|
||||||
|
"unit": spec.unit,
|
||||||
|
"category": spec.category,
|
||||||
|
}
|
||||||
|
for spec in specs
|
||||||
|
]
|
||||||
|
except Exception:
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
class RideImageSettingsInputSerializer(serializers.Serializer):
|
class RideImageSettingsInputSerializer(serializers.Serializer):
|
||||||
"""Input serializer for setting ride banner and card images."""
|
"""Input serializer for setting ride banner and card images."""
|
||||||
|
|||||||
@@ -493,6 +493,18 @@ def ensure_filter_option_format(options: list[Any]) -> list[dict[str, Any]]:
|
|||||||
"count": option.get("count"),
|
"count": option.get("count"),
|
||||||
"selected": option.get("selected", False),
|
"selected": option.get("selected", False),
|
||||||
}
|
}
|
||||||
|
elif isinstance(option, tuple):
|
||||||
|
# Tuple format: (value, label) or (value, label, count)
|
||||||
|
if len(option) >= 2:
|
||||||
|
standardized_option = {
|
||||||
|
"value": str(option[0]),
|
||||||
|
"label": str(option[1]),
|
||||||
|
"count": option[2] if len(option) > 2 else None,
|
||||||
|
"selected": False,
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
# Single-element tuple, treat as simple value
|
||||||
|
standardized_option = {"value": str(option[0]), "label": str(option[0]), "count": None, "selected": False}
|
||||||
elif hasattr(option, "value") and hasattr(option, "label"):
|
elif hasattr(option, "value") and hasattr(option, "label"):
|
||||||
# RichChoice object format
|
# RichChoice object format
|
||||||
standardized_option = {
|
standardized_option = {
|
||||||
|
|||||||
@@ -27,12 +27,42 @@ from .views.reviews import LatestReviewsAPIView
|
|||||||
from .views.stats import StatsAPIView, StatsRecalculateAPIView
|
from .views.stats import StatsAPIView, StatsRecalculateAPIView
|
||||||
from .viewsets_rankings import RideRankingViewSet, TriggerRankingCalculationView
|
from .viewsets_rankings import RideRankingViewSet, TriggerRankingCalculationView
|
||||||
|
|
||||||
|
# Import analytics views
|
||||||
|
from apps.core.api.analytics_views import (
|
||||||
|
ApprovalTransactionMetricViewSet,
|
||||||
|
ErrorSummaryView,
|
||||||
|
RequestMetadataViewSet,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Import observability views
|
||||||
|
from apps.core.api.observability_views import (
|
||||||
|
AlertCorrelationViewSet,
|
||||||
|
AnomalyViewSet,
|
||||||
|
CleanupJobLogViewSet,
|
||||||
|
DataRetentionStatsView,
|
||||||
|
PipelineErrorViewSet,
|
||||||
|
)
|
||||||
|
from apps.notifications.api.log_views import NotificationLogViewSet
|
||||||
|
from apps.moderation.views import ModerationAuditLogViewSet
|
||||||
|
|
||||||
# Create the main API router
|
# Create the main API router
|
||||||
router = DefaultRouter()
|
router = DefaultRouter()
|
||||||
|
|
||||||
# Register ranking endpoints
|
# Register ranking endpoints
|
||||||
router.register(r"rankings", RideRankingViewSet, basename="ranking")
|
router.register(r"rankings", RideRankingViewSet, basename="ranking")
|
||||||
|
|
||||||
|
# Register analytics endpoints
|
||||||
|
router.register(r"request_metadata", RequestMetadataViewSet, basename="request_metadata")
|
||||||
|
router.register(r"approval_transaction_metrics", ApprovalTransactionMetricViewSet, basename="approval_transaction_metrics")
|
||||||
|
|
||||||
|
# Register observability endpoints (Supabase table parity)
|
||||||
|
router.register(r"pipeline_errors", PipelineErrorViewSet, basename="pipeline_errors")
|
||||||
|
router.register(r"notification_logs", NotificationLogViewSet, basename="notification_logs")
|
||||||
|
router.register(r"cleanup_job_log", CleanupJobLogViewSet, basename="cleanup_job_log")
|
||||||
|
router.register(r"moderation_audit_log", ModerationAuditLogViewSet, basename="moderation_audit_log")
|
||||||
|
router.register(r"alert_correlations_view", AlertCorrelationViewSet, basename="alert_correlations_view")
|
||||||
|
router.register(r"recent_anomalies_view", AnomalyViewSet, basename="recent_anomalies_view")
|
||||||
|
|
||||||
app_name = "api_v1"
|
app_name = "api_v1"
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
@@ -40,6 +70,10 @@ urlpatterns = [
|
|||||||
# See backend/thrillwiki/urls.py for documentation endpoints
|
# See backend/thrillwiki/urls.py for documentation endpoints
|
||||||
# Authentication endpoints
|
# Authentication endpoints
|
||||||
path("auth/", include("apps.api.v1.auth.urls")),
|
path("auth/", include("apps.api.v1.auth.urls")),
|
||||||
|
# Analytics endpoints (error_summary is a view, not a viewset)
|
||||||
|
path("error_summary/", ErrorSummaryView.as_view(), name="error-summary"),
|
||||||
|
# Data retention stats view (aggregation endpoint)
|
||||||
|
path("data_retention_stats/", DataRetentionStatsView.as_view(), name="data-retention-stats"),
|
||||||
# Health check endpoints
|
# Health check endpoints
|
||||||
path("health/", HealthCheckAPIView.as_view(), name="health-check"),
|
path("health/", HealthCheckAPIView.as_view(), name="health-check"),
|
||||||
path("health/simple/", SimpleHealthAPIView.as_view(), name="simple-health"),
|
path("health/simple/", SimpleHealthAPIView.as_view(), name="simple-health"),
|
||||||
@@ -106,8 +140,11 @@ urlpatterns = [
|
|||||||
path("media/", include("apps.media.urls")),
|
path("media/", include("apps.media.urls")),
|
||||||
path("blog/", include("apps.blog.urls")),
|
path("blog/", include("apps.blog.urls")),
|
||||||
path("support/", include("apps.support.urls")),
|
path("support/", include("apps.support.urls")),
|
||||||
|
path("notifications/", include("apps.notifications.urls")),
|
||||||
path("errors/", include("apps.core.urls.errors")),
|
path("errors/", include("apps.core.urls.errors")),
|
||||||
path("images/", include("apps.api.v1.images.urls")),
|
path("images/", include("apps.api.v1.images.urls")),
|
||||||
|
# Admin dashboard API endpoints
|
||||||
|
path("admin/", include("apps.api.v1.admin.urls")),
|
||||||
# Cloudflare Images Toolkit API endpoints
|
# Cloudflare Images Toolkit API endpoints
|
||||||
path("cloudflare-images/", include("django_cloudflareimages_toolkit.urls")),
|
path("cloudflare-images/", include("django_cloudflareimages_toolkit.urls")),
|
||||||
# Include router URLs (for rankings and any other router-registered endpoints)
|
# Include router URLs (for rankings and any other router-registered endpoints)
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ entity completeness, and system health.
|
|||||||
|
|
||||||
from drf_spectacular.utils import extend_schema
|
from drf_spectacular.utils import extend_schema
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
from rest_framework.permissions import IsAdminUser
|
from apps.core.permissions import IsAdminWithSecondFactor
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
@@ -89,7 +89,7 @@ class DataCompletenessAPIView(APIView):
|
|||||||
companies, and ride models.
|
companies, and ride models.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
permission_classes = [IsAdminUser]
|
permission_classes = [IsAdminWithSecondFactor]
|
||||||
|
|
||||||
@extend_schema(
|
@extend_schema(
|
||||||
tags=["Admin"],
|
tags=["Admin"],
|
||||||
|
|||||||
89
backend/apps/core/api/alert_serializers.py
Normal file
89
backend/apps/core/api/alert_serializers.py
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
"""
|
||||||
|
Serializers for admin alert API endpoints.
|
||||||
|
|
||||||
|
Provides serializers for SystemAlert, RateLimitAlert, and RateLimitAlertConfig models.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from rest_framework import serializers
|
||||||
|
|
||||||
|
from apps.core.models import RateLimitAlert, RateLimitAlertConfig, SystemAlert
|
||||||
|
|
||||||
|
|
||||||
|
class SystemAlertSerializer(serializers.ModelSerializer):
|
||||||
|
"""Serializer for system alerts."""
|
||||||
|
|
||||||
|
is_resolved = serializers.BooleanField(read_only=True)
|
||||||
|
resolved_by_username = serializers.CharField(source="resolved_by.username", read_only=True, allow_null=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = SystemAlert
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"alert_type",
|
||||||
|
"severity",
|
||||||
|
"message",
|
||||||
|
"metadata",
|
||||||
|
"resolved_at",
|
||||||
|
"resolved_by",
|
||||||
|
"resolved_by_username",
|
||||||
|
"created_at",
|
||||||
|
"is_resolved",
|
||||||
|
]
|
||||||
|
read_only_fields = ["id", "created_at", "is_resolved", "resolved_by_username"]
|
||||||
|
|
||||||
|
|
||||||
|
class SystemAlertResolveSerializer(serializers.Serializer):
|
||||||
|
"""Serializer for resolving system alerts."""
|
||||||
|
|
||||||
|
notes = serializers.CharField(required=False, allow_blank=True)
|
||||||
|
|
||||||
|
|
||||||
|
class RateLimitAlertConfigSerializer(serializers.ModelSerializer):
|
||||||
|
"""Serializer for rate limit alert configurations."""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = RateLimitAlertConfig
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"metric_type",
|
||||||
|
"threshold_value",
|
||||||
|
"time_window_ms",
|
||||||
|
"function_name",
|
||||||
|
"enabled",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
]
|
||||||
|
read_only_fields = ["id", "created_at", "updated_at"]
|
||||||
|
|
||||||
|
|
||||||
|
class RateLimitAlertSerializer(serializers.ModelSerializer):
|
||||||
|
"""Serializer for rate limit alerts."""
|
||||||
|
|
||||||
|
is_resolved = serializers.BooleanField(read_only=True)
|
||||||
|
config_id = serializers.UUIDField(source="config.id", read_only=True)
|
||||||
|
resolved_by_username = serializers.CharField(source="resolved_by.username", read_only=True, allow_null=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = RateLimitAlert
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"config_id",
|
||||||
|
"metric_type",
|
||||||
|
"metric_value",
|
||||||
|
"threshold_value",
|
||||||
|
"time_window_ms",
|
||||||
|
"function_name",
|
||||||
|
"alert_message",
|
||||||
|
"resolved_at",
|
||||||
|
"resolved_by",
|
||||||
|
"resolved_by_username",
|
||||||
|
"created_at",
|
||||||
|
"is_resolved",
|
||||||
|
]
|
||||||
|
read_only_fields = ["id", "created_at", "is_resolved", "config_id", "resolved_by_username"]
|
||||||
|
|
||||||
|
|
||||||
|
class RateLimitAlertResolveSerializer(serializers.Serializer):
|
||||||
|
"""Serializer for resolving rate limit alerts."""
|
||||||
|
|
||||||
|
notes = serializers.CharField(required=False, allow_blank=True)
|
||||||
226
backend/apps/core/api/alert_views.py
Normal file
226
backend/apps/core/api/alert_views.py
Normal file
@@ -0,0 +1,226 @@
|
|||||||
|
"""
|
||||||
|
ViewSets for admin alert API endpoints.
|
||||||
|
|
||||||
|
Provides CRUD operations for SystemAlert, RateLimitAlert, and RateLimitAlertConfig.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from django.utils import timezone
|
||||||
|
from django_filters.rest_framework import DjangoFilterBackend
|
||||||
|
from drf_spectacular.utils import extend_schema, extend_schema_view
|
||||||
|
from rest_framework import status, viewsets
|
||||||
|
from rest_framework.decorators import action
|
||||||
|
from rest_framework.filters import OrderingFilter, SearchFilter
|
||||||
|
from rest_framework.permissions import IsAdminUser
|
||||||
|
from rest_framework.response import Response
|
||||||
|
|
||||||
|
from apps.core.models import RateLimitAlert, RateLimitAlertConfig, SystemAlert
|
||||||
|
|
||||||
|
from .alert_serializers import (
|
||||||
|
RateLimitAlertConfigSerializer,
|
||||||
|
RateLimitAlertResolveSerializer,
|
||||||
|
RateLimitAlertSerializer,
|
||||||
|
SystemAlertResolveSerializer,
|
||||||
|
SystemAlertSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema_view(
|
||||||
|
list=extend_schema(
|
||||||
|
summary="List system alerts",
|
||||||
|
description="Get all system alerts, optionally filtered by severity or resolved status.",
|
||||||
|
tags=["Admin - Alerts"],
|
||||||
|
),
|
||||||
|
retrieve=extend_schema(
|
||||||
|
summary="Get system alert",
|
||||||
|
description="Get details of a specific system alert.",
|
||||||
|
tags=["Admin - Alerts"],
|
||||||
|
),
|
||||||
|
create=extend_schema(
|
||||||
|
summary="Create system alert",
|
||||||
|
description="Create a new system alert.",
|
||||||
|
tags=["Admin - Alerts"],
|
||||||
|
),
|
||||||
|
update=extend_schema(
|
||||||
|
summary="Update system alert",
|
||||||
|
description="Update an existing system alert.",
|
||||||
|
tags=["Admin - Alerts"],
|
||||||
|
),
|
||||||
|
partial_update=extend_schema(
|
||||||
|
summary="Partial update system alert",
|
||||||
|
description="Partially update an existing system alert.",
|
||||||
|
tags=["Admin - Alerts"],
|
||||||
|
),
|
||||||
|
destroy=extend_schema(
|
||||||
|
summary="Delete system alert",
|
||||||
|
description="Delete a system alert.",
|
||||||
|
tags=["Admin - Alerts"],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
class SystemAlertViewSet(viewsets.ModelViewSet):
|
||||||
|
"""
|
||||||
|
ViewSet for managing system alerts.
|
||||||
|
|
||||||
|
Provides CRUD operations plus a resolve action for marking alerts as resolved.
|
||||||
|
"""
|
||||||
|
|
||||||
|
queryset = SystemAlert.objects.all()
|
||||||
|
serializer_class = SystemAlertSerializer
|
||||||
|
permission_classes = [IsAdminUser]
|
||||||
|
filter_backends = [DjangoFilterBackend, SearchFilter, OrderingFilter]
|
||||||
|
filterset_fields = ["severity", "alert_type"]
|
||||||
|
search_fields = ["message"]
|
||||||
|
ordering_fields = ["created_at", "severity"]
|
||||||
|
ordering = ["-created_at"]
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
queryset = super().get_queryset()
|
||||||
|
|
||||||
|
# Filter by resolved status
|
||||||
|
resolved = self.request.query_params.get("resolved")
|
||||||
|
if resolved is not None:
|
||||||
|
if resolved.lower() == "true":
|
||||||
|
queryset = queryset.exclude(resolved_at__isnull=True)
|
||||||
|
elif resolved.lower() == "false":
|
||||||
|
queryset = queryset.filter(resolved_at__isnull=True)
|
||||||
|
|
||||||
|
return queryset
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
summary="Resolve system alert",
|
||||||
|
description="Mark a system alert as resolved.",
|
||||||
|
request=SystemAlertResolveSerializer,
|
||||||
|
responses={200: SystemAlertSerializer},
|
||||||
|
tags=["Admin - Alerts"],
|
||||||
|
)
|
||||||
|
@action(detail=True, methods=["post"])
|
||||||
|
def resolve(self, request, pk=None):
|
||||||
|
"""Mark an alert as resolved."""
|
||||||
|
alert = self.get_object()
|
||||||
|
|
||||||
|
if alert.resolved_at:
|
||||||
|
return Response(
|
||||||
|
{"detail": "Alert is already resolved"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
alert.resolved_at = timezone.now()
|
||||||
|
alert.resolved_by = request.user
|
||||||
|
alert.save()
|
||||||
|
|
||||||
|
serializer = self.get_serializer(alert)
|
||||||
|
return Response(serializer.data)
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema_view(
|
||||||
|
list=extend_schema(
|
||||||
|
summary="List rate limit alert configs",
|
||||||
|
description="Get all rate limit alert configurations.",
|
||||||
|
tags=["Admin - Alerts"],
|
||||||
|
),
|
||||||
|
retrieve=extend_schema(
|
||||||
|
summary="Get rate limit alert config",
|
||||||
|
description="Get details of a specific rate limit alert configuration.",
|
||||||
|
tags=["Admin - Alerts"],
|
||||||
|
),
|
||||||
|
create=extend_schema(
|
||||||
|
summary="Create rate limit alert config",
|
||||||
|
description="Create a new rate limit alert configuration.",
|
||||||
|
tags=["Admin - Alerts"],
|
||||||
|
),
|
||||||
|
update=extend_schema(
|
||||||
|
summary="Update rate limit alert config",
|
||||||
|
description="Update an existing rate limit alert configuration.",
|
||||||
|
tags=["Admin - Alerts"],
|
||||||
|
),
|
||||||
|
partial_update=extend_schema(
|
||||||
|
summary="Partial update rate limit alert config",
|
||||||
|
description="Partially update an existing rate limit alert configuration.",
|
||||||
|
tags=["Admin - Alerts"],
|
||||||
|
),
|
||||||
|
destroy=extend_schema(
|
||||||
|
summary="Delete rate limit alert config",
|
||||||
|
description="Delete a rate limit alert configuration.",
|
||||||
|
tags=["Admin - Alerts"],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
class RateLimitAlertConfigViewSet(viewsets.ModelViewSet):
|
||||||
|
"""
|
||||||
|
ViewSet for managing rate limit alert configurations.
|
||||||
|
|
||||||
|
Provides CRUD operations for alert thresholds.
|
||||||
|
"""
|
||||||
|
|
||||||
|
queryset = RateLimitAlertConfig.objects.all()
|
||||||
|
serializer_class = RateLimitAlertConfigSerializer
|
||||||
|
permission_classes = [IsAdminUser]
|
||||||
|
filter_backends = [DjangoFilterBackend, OrderingFilter]
|
||||||
|
filterset_fields = ["metric_type", "enabled"]
|
||||||
|
ordering_fields = ["created_at", "metric_type", "threshold_value"]
|
||||||
|
ordering = ["metric_type", "-created_at"]
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema_view(
|
||||||
|
list=extend_schema(
|
||||||
|
summary="List rate limit alerts",
|
||||||
|
description="Get all rate limit alerts, optionally filtered by resolved status.",
|
||||||
|
tags=["Admin - Alerts"],
|
||||||
|
),
|
||||||
|
retrieve=extend_schema(
|
||||||
|
summary="Get rate limit alert",
|
||||||
|
description="Get details of a specific rate limit alert.",
|
||||||
|
tags=["Admin - Alerts"],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
class RateLimitAlertViewSet(viewsets.ReadOnlyModelViewSet):
|
||||||
|
"""
|
||||||
|
ViewSet for viewing rate limit alerts.
|
||||||
|
|
||||||
|
Provides read-only access and a resolve action.
|
||||||
|
"""
|
||||||
|
|
||||||
|
queryset = RateLimitAlert.objects.select_related("config").all()
|
||||||
|
serializer_class = RateLimitAlertSerializer
|
||||||
|
permission_classes = [IsAdminUser]
|
||||||
|
filter_backends = [DjangoFilterBackend, SearchFilter, OrderingFilter]
|
||||||
|
filterset_fields = ["metric_type"]
|
||||||
|
search_fields = ["alert_message", "function_name"]
|
||||||
|
ordering_fields = ["created_at", "metric_value"]
|
||||||
|
ordering = ["-created_at"]
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
queryset = super().get_queryset()
|
||||||
|
|
||||||
|
# Filter by resolved status
|
||||||
|
resolved = self.request.query_params.get("resolved")
|
||||||
|
if resolved is not None:
|
||||||
|
if resolved.lower() == "true":
|
||||||
|
queryset = queryset.exclude(resolved_at__isnull=True)
|
||||||
|
elif resolved.lower() == "false":
|
||||||
|
queryset = queryset.filter(resolved_at__isnull=True)
|
||||||
|
|
||||||
|
return queryset
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
summary="Resolve rate limit alert",
|
||||||
|
description="Mark a rate limit alert as resolved.",
|
||||||
|
request=RateLimitAlertResolveSerializer,
|
||||||
|
responses={200: RateLimitAlertSerializer},
|
||||||
|
tags=["Admin - Alerts"],
|
||||||
|
)
|
||||||
|
@action(detail=True, methods=["post"])
|
||||||
|
def resolve(self, request, pk=None):
|
||||||
|
"""Mark an alert as resolved."""
|
||||||
|
alert = self.get_object()
|
||||||
|
|
||||||
|
if alert.resolved_at:
|
||||||
|
return Response(
|
||||||
|
{"detail": "Alert is already resolved"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
alert.resolved_at = timezone.now()
|
||||||
|
alert.resolved_by = request.user
|
||||||
|
alert.save()
|
||||||
|
|
||||||
|
serializer = self.get_serializer(alert)
|
||||||
|
return Response(serializer.data)
|
||||||
204
backend/apps/core/api/analytics_serializers.py
Normal file
204
backend/apps/core/api/analytics_serializers.py
Normal file
@@ -0,0 +1,204 @@
|
|||||||
|
"""
|
||||||
|
Serializers for admin analytics endpoints.
|
||||||
|
|
||||||
|
Provides serialization for RequestMetadata, RequestBreadcrumb,
|
||||||
|
ApprovalTransactionMetric, and ErrorSummary aggregation.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from rest_framework import serializers
|
||||||
|
|
||||||
|
from apps.core.models import (
|
||||||
|
ApprovalTransactionMetric,
|
||||||
|
RequestBreadcrumb,
|
||||||
|
RequestMetadata,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class RequestBreadcrumbSerializer(serializers.ModelSerializer):
|
||||||
|
"""Serializer for request breadcrumb data."""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = RequestBreadcrumb
|
||||||
|
fields = [
|
||||||
|
"timestamp",
|
||||||
|
"category",
|
||||||
|
"message",
|
||||||
|
"level",
|
||||||
|
"sequence_order",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class RequestMetadataSerializer(serializers.ModelSerializer):
|
||||||
|
"""
|
||||||
|
Serializer for request metadata with nested breadcrumbs.
|
||||||
|
|
||||||
|
Supports the expand=request_breadcrumbs query parameter
|
||||||
|
to include breadcrumb data in the response.
|
||||||
|
"""
|
||||||
|
|
||||||
|
request_breadcrumbs = RequestBreadcrumbSerializer(many=True, read_only=True)
|
||||||
|
user_id = serializers.CharField(source="user_id", read_only=True, allow_null=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = RequestMetadata
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"request_id",
|
||||||
|
"trace_id",
|
||||||
|
"session_id",
|
||||||
|
"parent_request_id",
|
||||||
|
"action",
|
||||||
|
"method",
|
||||||
|
"endpoint",
|
||||||
|
"request_method",
|
||||||
|
"request_path",
|
||||||
|
"affected_route",
|
||||||
|
"http_status",
|
||||||
|
"status_code",
|
||||||
|
"response_status",
|
||||||
|
"success",
|
||||||
|
"started_at",
|
||||||
|
"completed_at",
|
||||||
|
"duration_ms",
|
||||||
|
"response_time_ms",
|
||||||
|
"error_type",
|
||||||
|
"error_message",
|
||||||
|
"error_stack",
|
||||||
|
"error_code",
|
||||||
|
"error_origin",
|
||||||
|
"component_stack",
|
||||||
|
"severity",
|
||||||
|
"is_resolved",
|
||||||
|
"resolved_at",
|
||||||
|
"resolved_by",
|
||||||
|
"resolution_notes",
|
||||||
|
"retry_count",
|
||||||
|
"retry_attempts",
|
||||||
|
"user_id",
|
||||||
|
"user_agent",
|
||||||
|
"ip_address_hash",
|
||||||
|
"client_version",
|
||||||
|
"timezone",
|
||||||
|
"referrer",
|
||||||
|
"entity_type",
|
||||||
|
"entity_id",
|
||||||
|
"created_at",
|
||||||
|
"request_breadcrumbs",
|
||||||
|
]
|
||||||
|
read_only_fields = ["id", "created_at"]
|
||||||
|
|
||||||
|
def to_representation(self, instance):
|
||||||
|
"""Conditionally include breadcrumbs based on expand parameter."""
|
||||||
|
data = super().to_representation(instance)
|
||||||
|
request = self.context.get("request")
|
||||||
|
|
||||||
|
# Only include breadcrumbs if explicitly expanded
|
||||||
|
if request:
|
||||||
|
expand = request.query_params.get("expand", "")
|
||||||
|
if "request_breadcrumbs" not in expand:
|
||||||
|
data.pop("request_breadcrumbs", None)
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
class RequestMetadataCreateSerializer(serializers.ModelSerializer):
|
||||||
|
"""Serializer for creating request metadata (log_request_metadata RPC)."""
|
||||||
|
|
||||||
|
breadcrumbs = RequestBreadcrumbSerializer(many=True, required=False)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = RequestMetadata
|
||||||
|
fields = [
|
||||||
|
"request_id",
|
||||||
|
"trace_id",
|
||||||
|
"session_id",
|
||||||
|
"parent_request_id",
|
||||||
|
"action",
|
||||||
|
"method",
|
||||||
|
"endpoint",
|
||||||
|
"request_method",
|
||||||
|
"request_path",
|
||||||
|
"affected_route",
|
||||||
|
"http_status",
|
||||||
|
"status_code",
|
||||||
|
"response_status",
|
||||||
|
"success",
|
||||||
|
"completed_at",
|
||||||
|
"duration_ms",
|
||||||
|
"response_time_ms",
|
||||||
|
"error_type",
|
||||||
|
"error_message",
|
||||||
|
"error_stack",
|
||||||
|
"error_code",
|
||||||
|
"error_origin",
|
||||||
|
"component_stack",
|
||||||
|
"severity",
|
||||||
|
"retry_count",
|
||||||
|
"retry_attempts",
|
||||||
|
"user_agent",
|
||||||
|
"ip_address_hash",
|
||||||
|
"client_version",
|
||||||
|
"timezone",
|
||||||
|
"referrer",
|
||||||
|
"entity_type",
|
||||||
|
"entity_id",
|
||||||
|
"breadcrumbs",
|
||||||
|
]
|
||||||
|
|
||||||
|
def create(self, validated_data):
|
||||||
|
breadcrumbs_data = validated_data.pop("breadcrumbs", [])
|
||||||
|
request_metadata = RequestMetadata.objects.create(**validated_data)
|
||||||
|
|
||||||
|
for i, breadcrumb_data in enumerate(breadcrumbs_data):
|
||||||
|
RequestBreadcrumb.objects.create(
|
||||||
|
request_metadata=request_metadata,
|
||||||
|
sequence_order=breadcrumb_data.get("sequence_order", i),
|
||||||
|
**{k: v for k, v in breadcrumb_data.items() if k != "sequence_order"}
|
||||||
|
)
|
||||||
|
|
||||||
|
return request_metadata
|
||||||
|
|
||||||
|
|
||||||
|
class RequestMetadataResolveSerializer(serializers.Serializer):
|
||||||
|
"""Serializer for resolving request metadata errors."""
|
||||||
|
|
||||||
|
resolution_notes = serializers.CharField(required=False, allow_blank=True)
|
||||||
|
|
||||||
|
|
||||||
|
class ApprovalTransactionMetricSerializer(serializers.ModelSerializer):
|
||||||
|
"""Serializer for approval transaction metrics."""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = ApprovalTransactionMetric
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"submission_id",
|
||||||
|
"moderator_id",
|
||||||
|
"submitter_id",
|
||||||
|
"request_id",
|
||||||
|
"success",
|
||||||
|
"duration_ms",
|
||||||
|
"items_count",
|
||||||
|
"rollback_triggered",
|
||||||
|
"error_code",
|
||||||
|
"error_message",
|
||||||
|
"error_details",
|
||||||
|
"created_at",
|
||||||
|
]
|
||||||
|
read_only_fields = ["id", "created_at"]
|
||||||
|
|
||||||
|
|
||||||
|
class ErrorSummarySerializer(serializers.Serializer):
|
||||||
|
"""
|
||||||
|
Read-only serializer for error summary aggregation.
|
||||||
|
|
||||||
|
Aggregates error data from RequestMetadata for dashboard display.
|
||||||
|
"""
|
||||||
|
|
||||||
|
date = serializers.DateField(read_only=True)
|
||||||
|
error_type = serializers.CharField(read_only=True)
|
||||||
|
severity = serializers.CharField(read_only=True)
|
||||||
|
error_count = serializers.IntegerField(read_only=True)
|
||||||
|
resolved_count = serializers.IntegerField(read_only=True)
|
||||||
|
affected_users = serializers.IntegerField(read_only=True)
|
||||||
|
avg_resolution_minutes = serializers.FloatField(read_only=True, allow_null=True)
|
||||||
184
backend/apps/core/api/analytics_views.py
Normal file
184
backend/apps/core/api/analytics_views.py
Normal file
@@ -0,0 +1,184 @@
|
|||||||
|
"""
|
||||||
|
ViewSets for admin analytics endpoints.
|
||||||
|
|
||||||
|
Provides read/write access to RequestMetadata, ApprovalTransactionMetric,
|
||||||
|
and a read-only aggregation endpoint for ErrorSummary.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from datetime import timedelta
|
||||||
|
|
||||||
|
from django.db.models import Avg, Count, F, Q
|
||||||
|
from django.db.models.functions import TruncDate
|
||||||
|
from django.utils import timezone
|
||||||
|
from django_filters import rest_framework as filters
|
||||||
|
from rest_framework import status, viewsets
|
||||||
|
from rest_framework.decorators import action
|
||||||
|
from rest_framework.permissions import IsAdminUser, IsAuthenticated
|
||||||
|
from rest_framework.response import Response
|
||||||
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
|
from apps.core.models import ApprovalTransactionMetric, RequestMetadata
|
||||||
|
|
||||||
|
from .analytics_serializers import (
|
||||||
|
ApprovalTransactionMetricSerializer,
|
||||||
|
ErrorSummarySerializer,
|
||||||
|
RequestMetadataCreateSerializer,
|
||||||
|
RequestMetadataResolveSerializer,
|
||||||
|
RequestMetadataSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class RequestMetadataFilter(filters.FilterSet):
|
||||||
|
"""Filter for RequestMetadata queries."""
|
||||||
|
|
||||||
|
error_type__ne = filters.CharFilter(field_name="error_type", method="filter_not_equal")
|
||||||
|
created_at__gte = filters.IsoDateTimeFilter(field_name="created_at", lookup_expr="gte")
|
||||||
|
created_at__lte = filters.IsoDateTimeFilter(field_name="created_at", lookup_expr="lte")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = RequestMetadata
|
||||||
|
fields = {
|
||||||
|
"error_type": ["exact", "isnull"],
|
||||||
|
"severity": ["exact"],
|
||||||
|
"is_resolved": ["exact"],
|
||||||
|
"success": ["exact"],
|
||||||
|
"http_status": ["exact", "gte", "lte"],
|
||||||
|
"user": ["exact"],
|
||||||
|
"endpoint": ["exact", "icontains"],
|
||||||
|
}
|
||||||
|
|
||||||
|
def filter_not_equal(self, queryset, name, value):
|
||||||
|
"""Handle the error_type__ne filter for non-null error types."""
|
||||||
|
# The frontend sends a JSON object for 'not null' filter
|
||||||
|
# We interpret this as 'error_type is not null'
|
||||||
|
if value:
|
||||||
|
return queryset.exclude(error_type__isnull=True)
|
||||||
|
return queryset
|
||||||
|
|
||||||
|
|
||||||
|
class RequestMetadataViewSet(viewsets.ModelViewSet):
|
||||||
|
"""
|
||||||
|
ViewSet for request metadata CRUD operations.
|
||||||
|
|
||||||
|
Supports filtering by error_type, severity, date range, etc.
|
||||||
|
Use the expand=request_breadcrumbs query parameter to include breadcrumbs.
|
||||||
|
"""
|
||||||
|
|
||||||
|
queryset = RequestMetadata.objects.all()
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
filterset_class = RequestMetadataFilter
|
||||||
|
ordering_fields = ["created_at", "severity", "error_type"]
|
||||||
|
ordering = ["-created_at"]
|
||||||
|
|
||||||
|
def get_serializer_class(self):
|
||||||
|
if self.action == "create":
|
||||||
|
return RequestMetadataCreateSerializer
|
||||||
|
return RequestMetadataSerializer
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
"""Optimize queryset with prefetch for breadcrumbs if expanded."""
|
||||||
|
queryset = super().get_queryset()
|
||||||
|
expand = self.request.query_params.get("expand", "")
|
||||||
|
|
||||||
|
if "request_breadcrumbs" in expand:
|
||||||
|
queryset = queryset.prefetch_related("request_breadcrumbs")
|
||||||
|
|
||||||
|
return queryset
|
||||||
|
|
||||||
|
def perform_create(self, serializer):
|
||||||
|
"""Associate request metadata with current user if authenticated."""
|
||||||
|
user = self.request.user if self.request.user.is_authenticated else None
|
||||||
|
serializer.save(user=user)
|
||||||
|
|
||||||
|
@action(detail=True, methods=["post"], permission_classes=[IsAdminUser])
|
||||||
|
def resolve(self, request, pk=None):
|
||||||
|
"""Mark a request metadata entry as resolved."""
|
||||||
|
instance = self.get_object()
|
||||||
|
serializer = RequestMetadataResolveSerializer(data=request.data)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
|
||||||
|
instance.is_resolved = True
|
||||||
|
instance.resolved_at = timezone.now()
|
||||||
|
instance.resolved_by = request.user
|
||||||
|
instance.resolution_notes = serializer.validated_data.get("resolution_notes", "")
|
||||||
|
instance.save(update_fields=["is_resolved", "resolved_at", "resolved_by", "resolution_notes"])
|
||||||
|
|
||||||
|
return Response(RequestMetadataSerializer(instance).data)
|
||||||
|
|
||||||
|
|
||||||
|
class ApprovalTransactionMetricFilter(filters.FilterSet):
|
||||||
|
"""Filter for ApprovalTransactionMetric queries."""
|
||||||
|
|
||||||
|
created_at__gte = filters.IsoDateTimeFilter(field_name="created_at", lookup_expr="gte")
|
||||||
|
created_at__lte = filters.IsoDateTimeFilter(field_name="created_at", lookup_expr="lte")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = ApprovalTransactionMetric
|
||||||
|
fields = {
|
||||||
|
"success": ["exact"],
|
||||||
|
"moderator_id": ["exact"],
|
||||||
|
"submitter_id": ["exact"],
|
||||||
|
"submission_id": ["exact"],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class ApprovalTransactionMetricViewSet(viewsets.ReadOnlyModelViewSet):
|
||||||
|
"""
|
||||||
|
Read-only ViewSet for approval transaction metrics.
|
||||||
|
|
||||||
|
Provides analytics data about moderation approval operations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
queryset = ApprovalTransactionMetric.objects.all()
|
||||||
|
serializer_class = ApprovalTransactionMetricSerializer
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
filterset_class = ApprovalTransactionMetricFilter
|
||||||
|
ordering_fields = ["created_at", "duration_ms", "success"]
|
||||||
|
ordering = ["-created_at"]
|
||||||
|
|
||||||
|
|
||||||
|
class ErrorSummaryView(APIView):
|
||||||
|
"""
|
||||||
|
Aggregation endpoint for error summary statistics.
|
||||||
|
|
||||||
|
Returns daily error counts grouped by error_type and severity,
|
||||||
|
similar to the Supabase error_summary view.
|
||||||
|
"""
|
||||||
|
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
|
def get(self, request):
|
||||||
|
"""Get aggregated error summary data."""
|
||||||
|
# Default to last 30 days
|
||||||
|
days = int(request.query_params.get("days", 30))
|
||||||
|
since = timezone.now() - timedelta(days=days)
|
||||||
|
|
||||||
|
# Aggregate error data by date, error_type, and severity
|
||||||
|
summary = (
|
||||||
|
RequestMetadata.objects.filter(
|
||||||
|
created_at__gte=since,
|
||||||
|
error_type__isnull=False,
|
||||||
|
)
|
||||||
|
.annotate(date=TruncDate("created_at"))
|
||||||
|
.values("date", "error_type", "severity")
|
||||||
|
.annotate(
|
||||||
|
error_count=Count("id"),
|
||||||
|
resolved_count=Count("id", filter=Q(is_resolved=True)),
|
||||||
|
affected_users=Count("user", distinct=True),
|
||||||
|
avg_resolution_minutes=Avg(
|
||||||
|
(F("resolved_at") - F("created_at")),
|
||||||
|
filter=Q(is_resolved=True, resolved_at__isnull=False),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.order_by("-date", "-error_count")
|
||||||
|
)
|
||||||
|
|
||||||
|
# Convert timedelta to minutes for avg_resolution_minutes
|
||||||
|
results = []
|
||||||
|
for item in summary:
|
||||||
|
if item["avg_resolution_minutes"]:
|
||||||
|
item["avg_resolution_minutes"] = item["avg_resolution_minutes"].total_seconds() / 60
|
||||||
|
results.append(item)
|
||||||
|
|
||||||
|
serializer = ErrorSummarySerializer(results, many=True)
|
||||||
|
return Response(serializer.data)
|
||||||
162
backend/apps/core/api/incident_serializers.py
Normal file
162
backend/apps/core/api/incident_serializers.py
Normal file
@@ -0,0 +1,162 @@
|
|||||||
|
"""
|
||||||
|
Serializers for Incident management API endpoints.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from rest_framework import serializers
|
||||||
|
|
||||||
|
from apps.core.models import Incident, IncidentAlert
|
||||||
|
|
||||||
|
|
||||||
|
class IncidentAlertSerializer(serializers.ModelSerializer):
|
||||||
|
"""Serializer for linked alerts within an incident."""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = IncidentAlert
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"alert_source",
|
||||||
|
"alert_id",
|
||||||
|
"created_at",
|
||||||
|
]
|
||||||
|
read_only_fields = ["id", "created_at"]
|
||||||
|
|
||||||
|
|
||||||
|
class IncidentSerializer(serializers.ModelSerializer):
|
||||||
|
"""Serializer for Incident model."""
|
||||||
|
|
||||||
|
acknowledged_by_username = serializers.CharField(
|
||||||
|
source="acknowledged_by.username", read_only=True, allow_null=True
|
||||||
|
)
|
||||||
|
resolved_by_username = serializers.CharField(
|
||||||
|
source="resolved_by.username", read_only=True, allow_null=True
|
||||||
|
)
|
||||||
|
status_display = serializers.CharField(source="get_status_display", read_only=True)
|
||||||
|
severity_display = serializers.CharField(source="get_severity_display", read_only=True)
|
||||||
|
linked_alerts = IncidentAlertSerializer(many=True, read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Incident
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"incident_number",
|
||||||
|
"title",
|
||||||
|
"description",
|
||||||
|
"severity",
|
||||||
|
"severity_display",
|
||||||
|
"status",
|
||||||
|
"status_display",
|
||||||
|
"detected_at",
|
||||||
|
"acknowledged_at",
|
||||||
|
"acknowledged_by",
|
||||||
|
"acknowledged_by_username",
|
||||||
|
"resolved_at",
|
||||||
|
"resolved_by",
|
||||||
|
"resolved_by_username",
|
||||||
|
"resolution_notes",
|
||||||
|
"alert_count",
|
||||||
|
"linked_alerts",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
]
|
||||||
|
read_only_fields = [
|
||||||
|
"id",
|
||||||
|
"incident_number",
|
||||||
|
"detected_at",
|
||||||
|
"acknowledged_at",
|
||||||
|
"acknowledged_by",
|
||||||
|
"resolved_at",
|
||||||
|
"resolved_by",
|
||||||
|
"alert_count",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class IncidentCreateSerializer(serializers.ModelSerializer):
|
||||||
|
"""Serializer for creating incidents with linked alerts."""
|
||||||
|
|
||||||
|
alert_ids = serializers.ListField(
|
||||||
|
child=serializers.UUIDField(),
|
||||||
|
write_only=True,
|
||||||
|
required=False,
|
||||||
|
help_text="List of alert IDs to link to this incident",
|
||||||
|
)
|
||||||
|
alert_sources = serializers.ListField(
|
||||||
|
child=serializers.ChoiceField(choices=["system", "rate_limit"]),
|
||||||
|
write_only=True,
|
||||||
|
required=False,
|
||||||
|
help_text="Source types for each alert (must match alert_ids length)",
|
||||||
|
)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Incident
|
||||||
|
fields = [
|
||||||
|
"title",
|
||||||
|
"description",
|
||||||
|
"severity",
|
||||||
|
"alert_ids",
|
||||||
|
"alert_sources",
|
||||||
|
]
|
||||||
|
|
||||||
|
def validate(self, data):
|
||||||
|
alert_ids = data.get("alert_ids", [])
|
||||||
|
alert_sources = data.get("alert_sources", [])
|
||||||
|
|
||||||
|
if alert_ids and len(alert_ids) != len(alert_sources):
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
{"alert_sources": "Must provide one source per alert_id"}
|
||||||
|
)
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
def create(self, validated_data):
|
||||||
|
alert_ids = validated_data.pop("alert_ids", [])
|
||||||
|
alert_sources = validated_data.pop("alert_sources", [])
|
||||||
|
|
||||||
|
incident = Incident.objects.create(**validated_data)
|
||||||
|
|
||||||
|
# Create linked alerts
|
||||||
|
for alert_id, source in zip(alert_ids, alert_sources):
|
||||||
|
IncidentAlert.objects.create(
|
||||||
|
incident=incident,
|
||||||
|
alert_id=alert_id,
|
||||||
|
alert_source=source,
|
||||||
|
)
|
||||||
|
|
||||||
|
return incident
|
||||||
|
|
||||||
|
|
||||||
|
class IncidentAcknowledgeSerializer(serializers.Serializer):
|
||||||
|
"""Serializer for acknowledging an incident."""
|
||||||
|
|
||||||
|
pass # No additional data needed
|
||||||
|
|
||||||
|
|
||||||
|
class IncidentResolveSerializer(serializers.Serializer):
|
||||||
|
"""Serializer for resolving an incident."""
|
||||||
|
|
||||||
|
resolution_notes = serializers.CharField(required=False, allow_blank=True)
|
||||||
|
resolve_alerts = serializers.BooleanField(
|
||||||
|
default=True,
|
||||||
|
help_text="Whether to also resolve all linked alerts",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class LinkAlertsSerializer(serializers.Serializer):
|
||||||
|
"""Serializer for linking alerts to an incident."""
|
||||||
|
|
||||||
|
alert_ids = serializers.ListField(
|
||||||
|
child=serializers.UUIDField(),
|
||||||
|
help_text="List of alert IDs to link",
|
||||||
|
)
|
||||||
|
alert_sources = serializers.ListField(
|
||||||
|
child=serializers.ChoiceField(choices=["system", "rate_limit"]),
|
||||||
|
help_text="Source types for each alert",
|
||||||
|
)
|
||||||
|
|
||||||
|
def validate(self, data):
|
||||||
|
if len(data["alert_ids"]) != len(data["alert_sources"]):
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
{"alert_sources": "Must provide one source per alert_id"}
|
||||||
|
)
|
||||||
|
return data
|
||||||
201
backend/apps/core/api/incident_views.py
Normal file
201
backend/apps/core/api/incident_views.py
Normal file
@@ -0,0 +1,201 @@
|
|||||||
|
"""
|
||||||
|
ViewSets for Incident management API endpoints.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from django.utils import timezone
|
||||||
|
from django_filters.rest_framework import DjangoFilterBackend
|
||||||
|
from drf_spectacular.utils import extend_schema, extend_schema_view
|
||||||
|
from rest_framework import status, viewsets
|
||||||
|
from rest_framework.decorators import action
|
||||||
|
from rest_framework.filters import OrderingFilter, SearchFilter
|
||||||
|
from rest_framework.permissions import IsAdminUser
|
||||||
|
from rest_framework.response import Response
|
||||||
|
|
||||||
|
from apps.core.models import Incident, IncidentAlert, RateLimitAlert, SystemAlert
|
||||||
|
|
||||||
|
from .incident_serializers import (
|
||||||
|
IncidentAcknowledgeSerializer,
|
||||||
|
IncidentAlertSerializer,
|
||||||
|
IncidentCreateSerializer,
|
||||||
|
IncidentResolveSerializer,
|
||||||
|
IncidentSerializer,
|
||||||
|
LinkAlertsSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema_view(
|
||||||
|
list=extend_schema(
|
||||||
|
summary="List incidents",
|
||||||
|
description="Get all incidents, optionally filtered by status or severity.",
|
||||||
|
tags=["Admin - Incidents"],
|
||||||
|
),
|
||||||
|
retrieve=extend_schema(
|
||||||
|
summary="Get incident",
|
||||||
|
description="Get details of a specific incident including linked alerts.",
|
||||||
|
tags=["Admin - Incidents"],
|
||||||
|
),
|
||||||
|
create=extend_schema(
|
||||||
|
summary="Create incident",
|
||||||
|
description="Create a new incident and optionally link alerts.",
|
||||||
|
tags=["Admin - Incidents"],
|
||||||
|
),
|
||||||
|
update=extend_schema(
|
||||||
|
summary="Update incident",
|
||||||
|
description="Update an existing incident.",
|
||||||
|
tags=["Admin - Incidents"],
|
||||||
|
),
|
||||||
|
partial_update=extend_schema(
|
||||||
|
summary="Partial update incident",
|
||||||
|
description="Partially update an existing incident.",
|
||||||
|
tags=["Admin - Incidents"],
|
||||||
|
),
|
||||||
|
destroy=extend_schema(
|
||||||
|
summary="Delete incident",
|
||||||
|
description="Delete an incident.",
|
||||||
|
tags=["Admin - Incidents"],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
class IncidentViewSet(viewsets.ModelViewSet):
|
||||||
|
"""
|
||||||
|
ViewSet for managing incidents.
|
||||||
|
|
||||||
|
Provides CRUD operations plus acknowledge, resolve, and alert linking actions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
queryset = Incident.objects.prefetch_related("linked_alerts").all()
|
||||||
|
permission_classes = [IsAdminUser]
|
||||||
|
filter_backends = [DjangoFilterBackend, SearchFilter, OrderingFilter]
|
||||||
|
filterset_fields = ["status", "severity"]
|
||||||
|
search_fields = ["title", "description", "incident_number"]
|
||||||
|
ordering_fields = ["detected_at", "severity", "status", "alert_count"]
|
||||||
|
ordering = ["-detected_at"]
|
||||||
|
|
||||||
|
def get_serializer_class(self):
|
||||||
|
if self.action == "create":
|
||||||
|
return IncidentCreateSerializer
|
||||||
|
if self.action == "acknowledge":
|
||||||
|
return IncidentAcknowledgeSerializer
|
||||||
|
if self.action == "resolve":
|
||||||
|
return IncidentResolveSerializer
|
||||||
|
if self.action == "link_alerts":
|
||||||
|
return LinkAlertsSerializer
|
||||||
|
if self.action == "alerts":
|
||||||
|
return IncidentAlertSerializer
|
||||||
|
return IncidentSerializer
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
summary="Acknowledge incident",
|
||||||
|
description="Mark an incident as being investigated.",
|
||||||
|
request=IncidentAcknowledgeSerializer,
|
||||||
|
responses={200: IncidentSerializer},
|
||||||
|
tags=["Admin - Incidents"],
|
||||||
|
)
|
||||||
|
@action(detail=True, methods=["post"])
|
||||||
|
def acknowledge(self, request, pk=None):
|
||||||
|
"""Mark an incident as being investigated."""
|
||||||
|
incident = self.get_object()
|
||||||
|
|
||||||
|
if incident.status != Incident.Status.OPEN:
|
||||||
|
return Response(
|
||||||
|
{"detail": f"Cannot acknowledge incident in '{incident.status}' status"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
incident.status = Incident.Status.INVESTIGATING
|
||||||
|
incident.acknowledged_at = timezone.now()
|
||||||
|
incident.acknowledged_by = request.user
|
||||||
|
incident.save()
|
||||||
|
|
||||||
|
return Response(IncidentSerializer(incident).data)
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
summary="Resolve incident",
|
||||||
|
description="Mark an incident as resolved, optionally resolving all linked alerts.",
|
||||||
|
request=IncidentResolveSerializer,
|
||||||
|
responses={200: IncidentSerializer},
|
||||||
|
tags=["Admin - Incidents"],
|
||||||
|
)
|
||||||
|
@action(detail=True, methods=["post"])
|
||||||
|
def resolve(self, request, pk=None):
|
||||||
|
"""Mark an incident as resolved."""
|
||||||
|
incident = self.get_object()
|
||||||
|
|
||||||
|
if incident.status in (Incident.Status.RESOLVED, Incident.Status.CLOSED):
|
||||||
|
return Response(
|
||||||
|
{"detail": "Incident is already resolved or closed"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
serializer = IncidentResolveSerializer(data=request.data)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
|
||||||
|
incident.status = Incident.Status.RESOLVED
|
||||||
|
incident.resolved_at = timezone.now()
|
||||||
|
incident.resolved_by = request.user
|
||||||
|
incident.resolution_notes = serializer.validated_data.get("resolution_notes", "")
|
||||||
|
incident.save()
|
||||||
|
|
||||||
|
# Optionally resolve all linked alerts
|
||||||
|
if serializer.validated_data.get("resolve_alerts", True):
|
||||||
|
now = timezone.now()
|
||||||
|
for link in incident.linked_alerts.all():
|
||||||
|
if link.alert_source == "system":
|
||||||
|
SystemAlert.objects.filter(
|
||||||
|
id=link.alert_id, resolved_at__isnull=True
|
||||||
|
).update(resolved_at=now, resolved_by=request.user)
|
||||||
|
elif link.alert_source == "rate_limit":
|
||||||
|
RateLimitAlert.objects.filter(
|
||||||
|
id=link.alert_id, resolved_at__isnull=True
|
||||||
|
).update(resolved_at=now, resolved_by=request.user)
|
||||||
|
|
||||||
|
return Response(IncidentSerializer(incident).data)
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
summary="Get linked alerts",
|
||||||
|
description="Get all alerts linked to this incident.",
|
||||||
|
responses={200: IncidentAlertSerializer(many=True)},
|
||||||
|
tags=["Admin - Incidents"],
|
||||||
|
)
|
||||||
|
@action(detail=True, methods=["get"])
|
||||||
|
def alerts(self, request, pk=None):
|
||||||
|
"""Get all alerts linked to this incident."""
|
||||||
|
incident = self.get_object()
|
||||||
|
alerts = incident.linked_alerts.all()
|
||||||
|
serializer = IncidentAlertSerializer(alerts, many=True)
|
||||||
|
return Response(serializer.data)
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
summary="Link alerts to incident",
|
||||||
|
description="Link additional alerts to an existing incident.",
|
||||||
|
request=LinkAlertsSerializer,
|
||||||
|
responses={200: IncidentSerializer},
|
||||||
|
tags=["Admin - Incidents"],
|
||||||
|
)
|
||||||
|
@action(detail=True, methods=["post"], url_path="link-alerts")
|
||||||
|
def link_alerts(self, request, pk=None):
|
||||||
|
"""Link additional alerts to an incident."""
|
||||||
|
incident = self.get_object()
|
||||||
|
|
||||||
|
serializer = LinkAlertsSerializer(data=request.data)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
|
||||||
|
alert_ids = serializer.validated_data["alert_ids"]
|
||||||
|
alert_sources = serializer.validated_data["alert_sources"]
|
||||||
|
|
||||||
|
created = 0
|
||||||
|
for alert_id, source in zip(alert_ids, alert_sources):
|
||||||
|
_, was_created = IncidentAlert.objects.get_or_create(
|
||||||
|
incident=incident,
|
||||||
|
alert_id=alert_id,
|
||||||
|
alert_source=source,
|
||||||
|
)
|
||||||
|
if was_created:
|
||||||
|
created += 1
|
||||||
|
|
||||||
|
# Refresh to get updated alert_count
|
||||||
|
incident.refresh_from_db()
|
||||||
|
|
||||||
|
return Response({
|
||||||
|
"detail": f"Linked {created} new alerts to incident",
|
||||||
|
"incident": IncidentSerializer(incident).data,
|
||||||
|
})
|
||||||
93
backend/apps/core/api/milestone_serializers.py
Normal file
93
backend/apps/core/api/milestone_serializers.py
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
"""
|
||||||
|
Milestone serializers for timeline events.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from rest_framework import serializers
|
||||||
|
|
||||||
|
from apps.core.models import Milestone
|
||||||
|
|
||||||
|
|
||||||
|
class MilestoneSerializer(serializers.ModelSerializer):
|
||||||
|
"""Serializer for Milestone model matching frontend milestoneValidationSchema."""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Milestone
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"title",
|
||||||
|
"description",
|
||||||
|
"event_type",
|
||||||
|
"event_date",
|
||||||
|
"event_date_precision",
|
||||||
|
"entity_type",
|
||||||
|
"entity_id",
|
||||||
|
"is_public",
|
||||||
|
"display_order",
|
||||||
|
"from_value",
|
||||||
|
"to_value",
|
||||||
|
"from_entity_id",
|
||||||
|
"to_entity_id",
|
||||||
|
"from_location_id",
|
||||||
|
"to_location_id",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
]
|
||||||
|
read_only_fields = ["id", "created_at", "updated_at"]
|
||||||
|
|
||||||
|
|
||||||
|
class MilestoneCreateSerializer(serializers.ModelSerializer):
|
||||||
|
"""Serializer for creating milestones."""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Milestone
|
||||||
|
fields = [
|
||||||
|
"title",
|
||||||
|
"description",
|
||||||
|
"event_type",
|
||||||
|
"event_date",
|
||||||
|
"event_date_precision",
|
||||||
|
"entity_type",
|
||||||
|
"entity_id",
|
||||||
|
"is_public",
|
||||||
|
"display_order",
|
||||||
|
"from_value",
|
||||||
|
"to_value",
|
||||||
|
"from_entity_id",
|
||||||
|
"to_entity_id",
|
||||||
|
"from_location_id",
|
||||||
|
"to_location_id",
|
||||||
|
]
|
||||||
|
|
||||||
|
def validate(self, attrs):
|
||||||
|
"""Validate change events have from/to values."""
|
||||||
|
change_events = ["name_change", "operator_change", "owner_change", "location_change", "status_change"]
|
||||||
|
if attrs.get("event_type") in change_events:
|
||||||
|
has_change_data = (
|
||||||
|
attrs.get("from_value")
|
||||||
|
or attrs.get("to_value")
|
||||||
|
or attrs.get("from_entity_id")
|
||||||
|
or attrs.get("to_entity_id")
|
||||||
|
or attrs.get("from_location_id")
|
||||||
|
or attrs.get("to_location_id")
|
||||||
|
)
|
||||||
|
if not has_change_data:
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
"Change events must specify what changed (from/to values or entity IDs)"
|
||||||
|
)
|
||||||
|
return attrs
|
||||||
|
|
||||||
|
|
||||||
|
class MilestoneListSerializer(serializers.ModelSerializer):
|
||||||
|
"""Lightweight serializer for listing milestones."""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Milestone
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"title",
|
||||||
|
"event_type",
|
||||||
|
"event_date",
|
||||||
|
"entity_type",
|
||||||
|
"entity_id",
|
||||||
|
"is_public",
|
||||||
|
]
|
||||||
79
backend/apps/core/api/milestone_views.py
Normal file
79
backend/apps/core/api/milestone_views.py
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
"""
|
||||||
|
Milestone views for timeline events.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from django_filters import rest_framework as filters
|
||||||
|
from rest_framework import status, viewsets
|
||||||
|
from rest_framework.decorators import action
|
||||||
|
from rest_framework.permissions import IsAuthenticated, IsAuthenticatedOrReadOnly
|
||||||
|
from rest_framework.response import Response
|
||||||
|
|
||||||
|
from apps.core.models import Milestone
|
||||||
|
|
||||||
|
from .milestone_serializers import (
|
||||||
|
MilestoneCreateSerializer,
|
||||||
|
MilestoneListSerializer,
|
||||||
|
MilestoneSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class MilestoneFilter(filters.FilterSet):
|
||||||
|
"""Filters for milestone listing."""
|
||||||
|
|
||||||
|
entity_type = filters.CharFilter(field_name="entity_type")
|
||||||
|
entity_id = filters.UUIDFilter(field_name="entity_id")
|
||||||
|
event_type = filters.CharFilter(field_name="event_type")
|
||||||
|
is_public = filters.BooleanFilter(field_name="is_public")
|
||||||
|
event_date_after = filters.DateFilter(field_name="event_date", lookup_expr="gte")
|
||||||
|
event_date_before = filters.DateFilter(field_name="event_date", lookup_expr="lte")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Milestone
|
||||||
|
fields = ["entity_type", "entity_id", "event_type", "is_public"]
|
||||||
|
|
||||||
|
|
||||||
|
class MilestoneViewSet(viewsets.ModelViewSet):
|
||||||
|
"""
|
||||||
|
ViewSet for managing milestones/timeline events.
|
||||||
|
|
||||||
|
Supports filtering by entity_type, entity_id, event_type, and date range.
|
||||||
|
"""
|
||||||
|
|
||||||
|
queryset = Milestone.objects.all()
|
||||||
|
filterset_class = MilestoneFilter
|
||||||
|
permission_classes = [IsAuthenticatedOrReadOnly]
|
||||||
|
|
||||||
|
def get_serializer_class(self):
|
||||||
|
if self.action == "list":
|
||||||
|
return MilestoneListSerializer
|
||||||
|
if self.action == "create":
|
||||||
|
return MilestoneCreateSerializer
|
||||||
|
return MilestoneSerializer
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
"""Filter queryset based on visibility."""
|
||||||
|
queryset = super().get_queryset()
|
||||||
|
|
||||||
|
# Non-authenticated users only see public milestones
|
||||||
|
if not self.request.user.is_authenticated:
|
||||||
|
queryset = queryset.filter(is_public=True)
|
||||||
|
|
||||||
|
return queryset.order_by("-event_date", "display_order")
|
||||||
|
|
||||||
|
@action(detail=False, methods=["get"], url_path="entity/(?P<entity_type>[^/]+)/(?P<entity_id>[^/]+)")
|
||||||
|
def by_entity(self, request, entity_type=None, entity_id=None):
|
||||||
|
"""Get all milestones for a specific entity."""
|
||||||
|
queryset = self.get_queryset().filter(
|
||||||
|
entity_type=entity_type,
|
||||||
|
entity_id=entity_id,
|
||||||
|
)
|
||||||
|
serializer = MilestoneListSerializer(queryset, many=True)
|
||||||
|
return Response(serializer.data)
|
||||||
|
|
||||||
|
@action(detail=False, methods=["get"], url_path="timeline")
|
||||||
|
def timeline(self, request):
|
||||||
|
"""Get a unified timeline view of recent milestones across all entities."""
|
||||||
|
limit = int(request.query_params.get("limit", 50))
|
||||||
|
queryset = self.get_queryset()[:limit]
|
||||||
|
serializer = MilestoneListSerializer(queryset, many=True)
|
||||||
|
return Response(serializer.data)
|
||||||
176
backend/apps/core/api/observability_serializers.py
Normal file
176
backend/apps/core/api/observability_serializers.py
Normal file
@@ -0,0 +1,176 @@
|
|||||||
|
"""
|
||||||
|
Serializers for observability API endpoints.
|
||||||
|
|
||||||
|
Provides serializers for PipelineError, Anomaly, AlertCorrelationRule,
|
||||||
|
CleanupJobLog, and DataRetentionStats.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from rest_framework import serializers
|
||||||
|
|
||||||
|
from apps.core.choices.serializers import RichChoiceSerializerField
|
||||||
|
from apps.core.models import (
|
||||||
|
AlertCorrelationRule,
|
||||||
|
Anomaly,
|
||||||
|
CleanupJobLog,
|
||||||
|
PipelineError,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PipelineErrorSerializer(serializers.ModelSerializer):
|
||||||
|
"""Serializer for pipeline errors."""
|
||||||
|
|
||||||
|
severity = RichChoiceSerializerField(
|
||||||
|
choice_group="pipeline_error_severities",
|
||||||
|
domain="core",
|
||||||
|
)
|
||||||
|
resolved_by_username = serializers.CharField(
|
||||||
|
source="resolved_by.username",
|
||||||
|
read_only=True,
|
||||||
|
allow_null=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = PipelineError
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"function_name",
|
||||||
|
"error_message",
|
||||||
|
"error_code",
|
||||||
|
"error_context",
|
||||||
|
"stack_trace",
|
||||||
|
"severity",
|
||||||
|
"submission_id",
|
||||||
|
"item_id",
|
||||||
|
"request_id",
|
||||||
|
"trace_id",
|
||||||
|
"resolved",
|
||||||
|
"resolved_by",
|
||||||
|
"resolved_by_username",
|
||||||
|
"resolved_at",
|
||||||
|
"resolution_notes",
|
||||||
|
"occurred_at",
|
||||||
|
]
|
||||||
|
read_only_fields = ["id", "occurred_at", "resolved_by_username"]
|
||||||
|
|
||||||
|
|
||||||
|
class PipelineErrorResolveSerializer(serializers.Serializer):
|
||||||
|
"""Serializer for resolving pipeline errors."""
|
||||||
|
|
||||||
|
resolution_notes = serializers.CharField(required=False, allow_blank=True)
|
||||||
|
|
||||||
|
|
||||||
|
class AnomalySerializer(serializers.ModelSerializer):
|
||||||
|
"""Serializer for detected anomalies."""
|
||||||
|
|
||||||
|
anomaly_type = RichChoiceSerializerField(
|
||||||
|
choice_group="anomaly_types",
|
||||||
|
domain="core",
|
||||||
|
)
|
||||||
|
severity = RichChoiceSerializerField(
|
||||||
|
choice_group="severity_levels",
|
||||||
|
domain="core",
|
||||||
|
)
|
||||||
|
alert_message = serializers.CharField(
|
||||||
|
source="alert.message",
|
||||||
|
read_only=True,
|
||||||
|
allow_null=True,
|
||||||
|
)
|
||||||
|
alert_resolved_at = serializers.DateTimeField(
|
||||||
|
source="alert.resolved_at",
|
||||||
|
read_only=True,
|
||||||
|
allow_null=True,
|
||||||
|
)
|
||||||
|
alert_id = serializers.UUIDField(
|
||||||
|
source="alert.id",
|
||||||
|
read_only=True,
|
||||||
|
allow_null=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Anomaly
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"metric_name",
|
||||||
|
"metric_category",
|
||||||
|
"anomaly_type",
|
||||||
|
"severity",
|
||||||
|
"anomaly_value",
|
||||||
|
"baseline_value",
|
||||||
|
"deviation_score",
|
||||||
|
"confidence_score",
|
||||||
|
"detection_algorithm",
|
||||||
|
"time_window_start",
|
||||||
|
"time_window_end",
|
||||||
|
"alert_created",
|
||||||
|
"alert_id",
|
||||||
|
"alert_message",
|
||||||
|
"alert_resolved_at",
|
||||||
|
"detected_at",
|
||||||
|
]
|
||||||
|
read_only_fields = [
|
||||||
|
"id",
|
||||||
|
"detected_at",
|
||||||
|
"alert_id",
|
||||||
|
"alert_message",
|
||||||
|
"alert_resolved_at",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class AlertCorrelationRuleSerializer(serializers.ModelSerializer):
|
||||||
|
"""Serializer for alert correlation rules."""
|
||||||
|
|
||||||
|
incident_severity = RichChoiceSerializerField(
|
||||||
|
choice_group="severity_levels",
|
||||||
|
domain="core",
|
||||||
|
)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = AlertCorrelationRule
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"rule_name",
|
||||||
|
"rule_description",
|
||||||
|
"min_alerts_required",
|
||||||
|
"time_window_minutes",
|
||||||
|
"incident_severity",
|
||||||
|
"incident_title_template",
|
||||||
|
"is_active",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
]
|
||||||
|
read_only_fields = ["id", "created_at", "updated_at"]
|
||||||
|
|
||||||
|
|
||||||
|
class CleanupJobLogSerializer(serializers.ModelSerializer):
|
||||||
|
"""Serializer for cleanup job logs."""
|
||||||
|
|
||||||
|
status = RichChoiceSerializerField(
|
||||||
|
choice_group="cleanup_job_statuses",
|
||||||
|
domain="core",
|
||||||
|
)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = CleanupJobLog
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"job_name",
|
||||||
|
"status",
|
||||||
|
"records_processed",
|
||||||
|
"records_deleted",
|
||||||
|
"error_message",
|
||||||
|
"duration_ms",
|
||||||
|
"executed_at",
|
||||||
|
]
|
||||||
|
read_only_fields = ["id", "executed_at"]
|
||||||
|
|
||||||
|
|
||||||
|
class DataRetentionStatsSerializer(serializers.Serializer):
|
||||||
|
"""Serializer for data retention statistics view."""
|
||||||
|
|
||||||
|
table_name = serializers.CharField()
|
||||||
|
total_records = serializers.IntegerField()
|
||||||
|
last_7_days = serializers.IntegerField()
|
||||||
|
last_30_days = serializers.IntegerField()
|
||||||
|
oldest_record = serializers.DateTimeField(allow_null=True)
|
||||||
|
newest_record = serializers.DateTimeField(allow_null=True)
|
||||||
|
table_size = serializers.CharField()
|
||||||
351
backend/apps/core/api/observability_views.py
Normal file
351
backend/apps/core/api/observability_views.py
Normal file
@@ -0,0 +1,351 @@
|
|||||||
|
"""
|
||||||
|
ViewSets and Views for observability API endpoints.
|
||||||
|
|
||||||
|
Provides CRUD operations for PipelineError, read-only access for
|
||||||
|
Anomaly, AlertCorrelationRule, CleanupJobLog, and aggregated views
|
||||||
|
for DataRetentionStats.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from django.db import connection
|
||||||
|
from django.db.models import Count, Max, Min
|
||||||
|
from django.db.models.functions import Coalesce
|
||||||
|
from django.utils import timezone
|
||||||
|
from django_filters.rest_framework import DjangoFilterBackend
|
||||||
|
from drf_spectacular.utils import extend_schema, extend_schema_view
|
||||||
|
from rest_framework import status, viewsets
|
||||||
|
from rest_framework.decorators import action
|
||||||
|
from rest_framework.filters import OrderingFilter, SearchFilter
|
||||||
|
from rest_framework.permissions import IsAdminUser
|
||||||
|
from rest_framework.response import Response
|
||||||
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
|
from apps.core.models import (
|
||||||
|
AlertCorrelationRule,
|
||||||
|
Anomaly,
|
||||||
|
CleanupJobLog,
|
||||||
|
PipelineError,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .observability_serializers import (
|
||||||
|
AlertCorrelationRuleSerializer,
|
||||||
|
AnomalySerializer,
|
||||||
|
CleanupJobLogSerializer,
|
||||||
|
DataRetentionStatsSerializer,
|
||||||
|
PipelineErrorResolveSerializer,
|
||||||
|
PipelineErrorSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema_view(
|
||||||
|
list=extend_schema(
|
||||||
|
summary="List pipeline errors",
|
||||||
|
description="Get all pipeline errors, optionally filtered by severity or resolved status.",
|
||||||
|
tags=["Admin - Observability"],
|
||||||
|
),
|
||||||
|
retrieve=extend_schema(
|
||||||
|
summary="Get pipeline error",
|
||||||
|
description="Get details of a specific pipeline error.",
|
||||||
|
tags=["Admin - Observability"],
|
||||||
|
),
|
||||||
|
create=extend_schema(
|
||||||
|
summary="Create pipeline error",
|
||||||
|
description="Create a new pipeline error.",
|
||||||
|
tags=["Admin - Observability"],
|
||||||
|
),
|
||||||
|
update=extend_schema(
|
||||||
|
summary="Update pipeline error",
|
||||||
|
description="Update an existing pipeline error.",
|
||||||
|
tags=["Admin - Observability"],
|
||||||
|
),
|
||||||
|
partial_update=extend_schema(
|
||||||
|
summary="Partial update pipeline error",
|
||||||
|
description="Partially update an existing pipeline error.",
|
||||||
|
tags=["Admin - Observability"],
|
||||||
|
),
|
||||||
|
destroy=extend_schema(
|
||||||
|
summary="Delete pipeline error",
|
||||||
|
description="Delete a pipeline error.",
|
||||||
|
tags=["Admin - Observability"],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
class PipelineErrorViewSet(viewsets.ModelViewSet):
|
||||||
|
"""
|
||||||
|
ViewSet for managing pipeline errors.
|
||||||
|
|
||||||
|
Provides CRUD operations plus a resolve action for marking errors as resolved.
|
||||||
|
"""
|
||||||
|
|
||||||
|
queryset = PipelineError.objects.select_related("resolved_by").all()
|
||||||
|
serializer_class = PipelineErrorSerializer
|
||||||
|
permission_classes = [IsAdminUser]
|
||||||
|
filter_backends = [DjangoFilterBackend, SearchFilter, OrderingFilter]
|
||||||
|
filterset_fields = ["severity", "function_name", "resolved", "error_code"]
|
||||||
|
search_fields = ["error_message", "function_name", "error_code"]
|
||||||
|
ordering_fields = ["occurred_at", "severity"]
|
||||||
|
ordering = ["-occurred_at"]
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
queryset = super().get_queryset()
|
||||||
|
|
||||||
|
# Date range filtering
|
||||||
|
start_date = self.request.query_params.get("start_date")
|
||||||
|
end_date = self.request.query_params.get("end_date")
|
||||||
|
|
||||||
|
if start_date:
|
||||||
|
queryset = queryset.filter(occurred_at__gte=start_date)
|
||||||
|
if end_date:
|
||||||
|
queryset = queryset.filter(occurred_at__lte=end_date)
|
||||||
|
|
||||||
|
return queryset
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
summary="Resolve pipeline error",
|
||||||
|
description="Mark a pipeline error as resolved.",
|
||||||
|
request=PipelineErrorResolveSerializer,
|
||||||
|
responses={200: PipelineErrorSerializer},
|
||||||
|
tags=["Admin - Observability"],
|
||||||
|
)
|
||||||
|
@action(detail=True, methods=["post"])
|
||||||
|
def resolve(self, request, pk=None):
|
||||||
|
"""Mark a pipeline error as resolved."""
|
||||||
|
error = self.get_object()
|
||||||
|
|
||||||
|
if error.resolved:
|
||||||
|
return Response(
|
||||||
|
{"detail": "Error is already resolved"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
serializer = PipelineErrorResolveSerializer(data=request.data)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
|
||||||
|
error.resolved = True
|
||||||
|
error.resolved_at = timezone.now()
|
||||||
|
error.resolved_by = request.user
|
||||||
|
error.resolution_notes = serializer.validated_data.get("resolution_notes", "")
|
||||||
|
error.save()
|
||||||
|
|
||||||
|
return Response(PipelineErrorSerializer(error).data)
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema_view(
|
||||||
|
list=extend_schema(
|
||||||
|
summary="List recent anomalies",
|
||||||
|
description="Get recent anomalies with optional filtering by severity or type.",
|
||||||
|
tags=["Admin - Observability"],
|
||||||
|
),
|
||||||
|
retrieve=extend_schema(
|
||||||
|
summary="Get anomaly details",
|
||||||
|
description="Get details of a specific anomaly.",
|
||||||
|
tags=["Admin - Observability"],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
class AnomalyViewSet(viewsets.ReadOnlyModelViewSet):
|
||||||
|
"""
|
||||||
|
ViewSet for viewing detected anomalies.
|
||||||
|
|
||||||
|
Provides read-only access to anomaly data with filtering options.
|
||||||
|
This serves as the recent_anomalies_view endpoint.
|
||||||
|
"""
|
||||||
|
|
||||||
|
queryset = Anomaly.objects.select_related("alert").all()
|
||||||
|
serializer_class = AnomalySerializer
|
||||||
|
permission_classes = [IsAdminUser]
|
||||||
|
filter_backends = [DjangoFilterBackend, SearchFilter, OrderingFilter]
|
||||||
|
filterset_fields = ["severity", "anomaly_type", "metric_category", "alert_created"]
|
||||||
|
search_fields = ["metric_name", "metric_category"]
|
||||||
|
ordering_fields = ["detected_at", "severity", "deviation_score"]
|
||||||
|
ordering = ["-detected_at"]
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
queryset = super().get_queryset()
|
||||||
|
|
||||||
|
# Date range filtering
|
||||||
|
start_date = self.request.query_params.get("start_date")
|
||||||
|
end_date = self.request.query_params.get("end_date")
|
||||||
|
|
||||||
|
if start_date:
|
||||||
|
queryset = queryset.filter(detected_at__gte=start_date)
|
||||||
|
if end_date:
|
||||||
|
queryset = queryset.filter(detected_at__lte=end_date)
|
||||||
|
|
||||||
|
return queryset
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema_view(
|
||||||
|
list=extend_schema(
|
||||||
|
summary="List alert correlations",
|
||||||
|
description="Get all alert correlation rules with optional filtering.",
|
||||||
|
tags=["Admin - Observability"],
|
||||||
|
),
|
||||||
|
retrieve=extend_schema(
|
||||||
|
summary="Get alert correlation rule",
|
||||||
|
description="Get details of a specific alert correlation rule.",
|
||||||
|
tags=["Admin - Observability"],
|
||||||
|
),
|
||||||
|
create=extend_schema(
|
||||||
|
summary="Create alert correlation rule",
|
||||||
|
description="Create a new alert correlation rule.",
|
||||||
|
tags=["Admin - Observability"],
|
||||||
|
),
|
||||||
|
update=extend_schema(
|
||||||
|
summary="Update alert correlation rule",
|
||||||
|
description="Update an existing alert correlation rule.",
|
||||||
|
tags=["Admin - Observability"],
|
||||||
|
),
|
||||||
|
partial_update=extend_schema(
|
||||||
|
summary="Partial update alert correlation rule",
|
||||||
|
description="Partially update an existing alert correlation rule.",
|
||||||
|
tags=["Admin - Observability"],
|
||||||
|
),
|
||||||
|
destroy=extend_schema(
|
||||||
|
summary="Delete alert correlation rule",
|
||||||
|
description="Delete an alert correlation rule.",
|
||||||
|
tags=["Admin - Observability"],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
class AlertCorrelationViewSet(viewsets.ModelViewSet):
|
||||||
|
"""
|
||||||
|
ViewSet for managing alert correlation rules.
|
||||||
|
|
||||||
|
Provides CRUD operations for configuring how alerts are correlated.
|
||||||
|
This serves as the alert_correlations_view endpoint.
|
||||||
|
"""
|
||||||
|
|
||||||
|
queryset = AlertCorrelationRule.objects.all()
|
||||||
|
serializer_class = AlertCorrelationRuleSerializer
|
||||||
|
permission_classes = [IsAdminUser]
|
||||||
|
filter_backends = [DjangoFilterBackend, SearchFilter, OrderingFilter]
|
||||||
|
filterset_fields = ["is_active", "incident_severity"]
|
||||||
|
search_fields = ["rule_name", "rule_description"]
|
||||||
|
ordering_fields = ["rule_name", "created_at"]
|
||||||
|
ordering = ["rule_name"]
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema_view(
|
||||||
|
list=extend_schema(
|
||||||
|
summary="List cleanup job logs",
|
||||||
|
description="Get all cleanup job logs with optional filtering by status.",
|
||||||
|
tags=["Admin - Observability"],
|
||||||
|
),
|
||||||
|
retrieve=extend_schema(
|
||||||
|
summary="Get cleanup job log",
|
||||||
|
description="Get details of a specific cleanup job log entry.",
|
||||||
|
tags=["Admin - Observability"],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
class CleanupJobLogViewSet(viewsets.ReadOnlyModelViewSet):
|
||||||
|
"""
|
||||||
|
ViewSet for viewing cleanup job logs.
|
||||||
|
|
||||||
|
Provides read-only access to cleanup job execution history.
|
||||||
|
"""
|
||||||
|
|
||||||
|
queryset = CleanupJobLog.objects.all()
|
||||||
|
serializer_class = CleanupJobLogSerializer
|
||||||
|
permission_classes = [IsAdminUser]
|
||||||
|
filter_backends = [DjangoFilterBackend, SearchFilter, OrderingFilter]
|
||||||
|
filterset_fields = ["status", "job_name"]
|
||||||
|
search_fields = ["job_name", "error_message"]
|
||||||
|
ordering_fields = ["executed_at", "duration_ms"]
|
||||||
|
ordering = ["-executed_at"]
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
queryset = super().get_queryset()
|
||||||
|
|
||||||
|
# Date range filtering
|
||||||
|
start_date = self.request.query_params.get("start_date")
|
||||||
|
end_date = self.request.query_params.get("end_date")
|
||||||
|
|
||||||
|
if start_date:
|
||||||
|
queryset = queryset.filter(executed_at__gte=start_date)
|
||||||
|
if end_date:
|
||||||
|
queryset = queryset.filter(executed_at__lte=end_date)
|
||||||
|
|
||||||
|
return queryset
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
summary="Get data retention stats",
|
||||||
|
description="Get aggregated data retention statistics for monitoring database growth.",
|
||||||
|
tags=["Admin - Observability"],
|
||||||
|
responses={200: DataRetentionStatsSerializer(many=True)},
|
||||||
|
)
|
||||||
|
class DataRetentionStatsView(APIView):
|
||||||
|
"""
|
||||||
|
API view for data retention statistics.
|
||||||
|
|
||||||
|
Returns aggregated statistics about table sizes, record counts,
|
||||||
|
and data age for monitoring data retention and growth.
|
||||||
|
"""
|
||||||
|
|
||||||
|
permission_classes = [IsAdminUser]
|
||||||
|
|
||||||
|
def get(self, request):
|
||||||
|
"""Get data retention statistics for key tables."""
|
||||||
|
from datetime import timedelta
|
||||||
|
|
||||||
|
from django.apps import apps
|
||||||
|
|
||||||
|
now = timezone.now()
|
||||||
|
seven_days_ago = now - timedelta(days=7)
|
||||||
|
thirty_days_ago = now - timedelta(days=30)
|
||||||
|
|
||||||
|
# Tables to report on
|
||||||
|
tables_to_check = [
|
||||||
|
("core", "pipelineerror", "occurred_at"),
|
||||||
|
("core", "applicationerror", "created_at"),
|
||||||
|
("core", "systemalert", "created_at"),
|
||||||
|
("core", "requestmetadata", "created_at"),
|
||||||
|
("core", "anomaly", "detected_at"),
|
||||||
|
("core", "cleanupjoblog", "executed_at"),
|
||||||
|
("moderation", "editsubmission", "created_at"),
|
||||||
|
("moderation", "moderationauditlog", "created_at"),
|
||||||
|
("notifications", "notificationlog", "created_at"),
|
||||||
|
]
|
||||||
|
|
||||||
|
stats = []
|
||||||
|
for app_label, model_name, date_field in tables_to_check:
|
||||||
|
try:
|
||||||
|
model = apps.get_model(app_label, model_name)
|
||||||
|
filter_kwargs_7d = {f"{date_field}__gte": seven_days_ago}
|
||||||
|
filter_kwargs_30d = {f"{date_field}__gte": thirty_days_ago}
|
||||||
|
|
||||||
|
# Get record counts and date ranges
|
||||||
|
qs = model.objects.aggregate(
|
||||||
|
total=Coalesce(Count("id"), 0),
|
||||||
|
last_7_days=Coalesce(Count("id", filter=model.objects.filter(**filter_kwargs_7d).query.where), 0),
|
||||||
|
last_30_days=Coalesce(Count("id", filter=model.objects.filter(**filter_kwargs_30d).query.where), 0),
|
||||||
|
oldest_record=Min(date_field),
|
||||||
|
newest_record=Max(date_field),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get table size from database
|
||||||
|
table_name = model._meta.db_table
|
||||||
|
with connection.cursor() as cursor:
|
||||||
|
cursor.execute(
|
||||||
|
"""
|
||||||
|
SELECT pg_size_pretty(pg_total_relation_size(%s))
|
||||||
|
""",
|
||||||
|
[table_name],
|
||||||
|
)
|
||||||
|
result = cursor.fetchone()
|
||||||
|
table_size = result[0] if result else "Unknown"
|
||||||
|
|
||||||
|
stats.append(
|
||||||
|
{
|
||||||
|
"table_name": table_name,
|
||||||
|
"total_records": model.objects.count(),
|
||||||
|
"last_7_days": model.objects.filter(**filter_kwargs_7d).count(),
|
||||||
|
"last_30_days": model.objects.filter(**filter_kwargs_30d).count(),
|
||||||
|
"oldest_record": qs.get("oldest_record"),
|
||||||
|
"newest_record": qs.get("newest_record"),
|
||||||
|
"table_size": table_size,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
# Skip tables that don't exist or have errors
|
||||||
|
continue
|
||||||
|
|
||||||
|
serializer = DataRetentionStatsSerializer(stats, many=True)
|
||||||
|
return Response(serializer.data)
|
||||||
@@ -15,7 +15,7 @@ Key Components:
|
|||||||
from .base import ChoiceCategory, ChoiceGroup, RichChoice
|
from .base import ChoiceCategory, ChoiceGroup, RichChoice
|
||||||
from .fields import RichChoiceField
|
from .fields import RichChoiceField
|
||||||
from .registry import ChoiceRegistry, register_choices
|
from .registry import ChoiceRegistry, register_choices
|
||||||
from .serializers import RichChoiceOptionSerializer, RichChoiceSerializer
|
from .serializers import RichChoiceOptionSerializer, RichChoiceSerializer, RichChoiceSerializerField
|
||||||
from .utils import get_choice_display, validate_choice_value
|
from .utils import get_choice_display, validate_choice_value
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
@@ -26,6 +26,7 @@ __all__ = [
|
|||||||
"register_choices",
|
"register_choices",
|
||||||
"RichChoiceField",
|
"RichChoiceField",
|
||||||
"RichChoiceSerializer",
|
"RichChoiceSerializer",
|
||||||
|
"RichChoiceSerializerField",
|
||||||
"RichChoiceOptionSerializer",
|
"RichChoiceOptionSerializer",
|
||||||
"validate_choice_value",
|
"validate_choice_value",
|
||||||
"get_choice_display",
|
"get_choice_display",
|
||||||
|
|||||||
@@ -2,7 +2,8 @@
|
|||||||
Core System Rich Choice Objects
|
Core System Rich Choice Objects
|
||||||
|
|
||||||
This module defines all choice objects for core system functionality,
|
This module defines all choice objects for core system functionality,
|
||||||
including health checks, API statuses, and other system-level choices.
|
including health checks, API statuses, severity levels, alert types,
|
||||||
|
and other system-level choices.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from .base import ChoiceCategory, RichChoice
|
from .base import ChoiceCategory, RichChoice
|
||||||
@@ -124,6 +125,584 @@ ENTITY_TYPES = [
|
|||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Severity Levels (used by ApplicationError, SystemAlert, Incident, RequestMetadata)
|
||||||
|
# ============================================================================
|
||||||
|
SEVERITY_LEVELS = [
|
||||||
|
RichChoice(
|
||||||
|
value="critical",
|
||||||
|
label="Critical",
|
||||||
|
description="Critical issue requiring immediate attention",
|
||||||
|
metadata={
|
||||||
|
"color": "red",
|
||||||
|
"icon": "alert-octagon",
|
||||||
|
"css_class": "bg-red-100 text-red-800 border-red-300",
|
||||||
|
"sort_order": 1,
|
||||||
|
"priority": 1,
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.PRIORITY,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="high",
|
||||||
|
label="High",
|
||||||
|
description="High priority issue",
|
||||||
|
metadata={
|
||||||
|
"color": "orange",
|
||||||
|
"icon": "alert-triangle",
|
||||||
|
"css_class": "bg-orange-100 text-orange-800 border-orange-300",
|
||||||
|
"sort_order": 2,
|
||||||
|
"priority": 2,
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.PRIORITY,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="medium",
|
||||||
|
label="Medium",
|
||||||
|
description="Medium priority issue",
|
||||||
|
metadata={
|
||||||
|
"color": "yellow",
|
||||||
|
"icon": "info",
|
||||||
|
"css_class": "bg-yellow-100 text-yellow-800 border-yellow-300",
|
||||||
|
"sort_order": 3,
|
||||||
|
"priority": 3,
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.PRIORITY,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="low",
|
||||||
|
label="Low",
|
||||||
|
description="Low priority issue",
|
||||||
|
metadata={
|
||||||
|
"color": "blue",
|
||||||
|
"icon": "info",
|
||||||
|
"css_class": "bg-blue-100 text-blue-800 border-blue-300",
|
||||||
|
"sort_order": 4,
|
||||||
|
"priority": 4,
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.PRIORITY,
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
# Extended severity levels including debug/info/warning/error for RequestMetadata
|
||||||
|
REQUEST_SEVERITY_LEVELS = [
|
||||||
|
RichChoice(
|
||||||
|
value="debug",
|
||||||
|
label="Debug",
|
||||||
|
description="Debug-level information",
|
||||||
|
metadata={
|
||||||
|
"color": "gray",
|
||||||
|
"icon": "bug",
|
||||||
|
"css_class": "bg-gray-100 text-gray-800",
|
||||||
|
"sort_order": 1,
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.TECHNICAL,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="info",
|
||||||
|
label="Info",
|
||||||
|
description="Informational message",
|
||||||
|
metadata={
|
||||||
|
"color": "blue",
|
||||||
|
"icon": "info",
|
||||||
|
"css_class": "bg-blue-100 text-blue-800",
|
||||||
|
"sort_order": 2,
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.TECHNICAL,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="warning",
|
||||||
|
label="Warning",
|
||||||
|
description="Warning condition",
|
||||||
|
metadata={
|
||||||
|
"color": "yellow",
|
||||||
|
"icon": "alert-triangle",
|
||||||
|
"css_class": "bg-yellow-100 text-yellow-800",
|
||||||
|
"sort_order": 3,
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.TECHNICAL,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="error",
|
||||||
|
label="Error",
|
||||||
|
description="Error condition",
|
||||||
|
metadata={
|
||||||
|
"color": "red",
|
||||||
|
"icon": "x-circle",
|
||||||
|
"css_class": "bg-red-100 text-red-800",
|
||||||
|
"sort_order": 4,
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.TECHNICAL,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="critical",
|
||||||
|
label="Critical",
|
||||||
|
description="Critical error requiring immediate attention",
|
||||||
|
metadata={
|
||||||
|
"color": "red",
|
||||||
|
"icon": "alert-octagon",
|
||||||
|
"css_class": "bg-red-200 text-red-900 font-bold",
|
||||||
|
"sort_order": 5,
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.TECHNICAL,
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Error/Request Sources
|
||||||
|
# ============================================================================
|
||||||
|
ERROR_SOURCES = [
|
||||||
|
RichChoice(
|
||||||
|
value="frontend",
|
||||||
|
label="Frontend",
|
||||||
|
description="Error originated from frontend application",
|
||||||
|
metadata={
|
||||||
|
"color": "purple",
|
||||||
|
"icon": "monitor",
|
||||||
|
"css_class": "bg-purple-100 text-purple-800",
|
||||||
|
"sort_order": 1,
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.TECHNICAL,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="backend",
|
||||||
|
label="Backend",
|
||||||
|
description="Error originated from backend server",
|
||||||
|
metadata={
|
||||||
|
"color": "blue",
|
||||||
|
"icon": "server",
|
||||||
|
"css_class": "bg-blue-100 text-blue-800",
|
||||||
|
"sort_order": 2,
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.TECHNICAL,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="api",
|
||||||
|
label="API",
|
||||||
|
description="Error originated from API layer",
|
||||||
|
metadata={
|
||||||
|
"color": "green",
|
||||||
|
"icon": "code",
|
||||||
|
"css_class": "bg-green-100 text-green-800",
|
||||||
|
"sort_order": 3,
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.TECHNICAL,
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# System Alert Types
|
||||||
|
# ============================================================================
|
||||||
|
SYSTEM_ALERT_TYPES = [
|
||||||
|
RichChoice(
|
||||||
|
value="orphaned_images",
|
||||||
|
label="Orphaned Images",
|
||||||
|
description="Images not associated with any entity",
|
||||||
|
metadata={"color": "orange", "icon": "image", "sort_order": 1},
|
||||||
|
category=ChoiceCategory.TECHNICAL,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="stale_submissions",
|
||||||
|
label="Stale Submissions",
|
||||||
|
description="Submissions pending for too long",
|
||||||
|
metadata={"color": "yellow", "icon": "clock", "sort_order": 2},
|
||||||
|
category=ChoiceCategory.TECHNICAL,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="circular_dependency",
|
||||||
|
label="Circular Dependency",
|
||||||
|
description="Detected circular reference in data",
|
||||||
|
metadata={"color": "red", "icon": "refresh-cw", "sort_order": 3},
|
||||||
|
category=ChoiceCategory.TECHNICAL,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="validation_error",
|
||||||
|
label="Validation Error",
|
||||||
|
description="Data validation failure",
|
||||||
|
metadata={"color": "red", "icon": "alert-circle", "sort_order": 4},
|
||||||
|
category=ChoiceCategory.TECHNICAL,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="ban_attempt",
|
||||||
|
label="Ban Attempt",
|
||||||
|
description="User ban action was triggered",
|
||||||
|
metadata={"color": "red", "icon": "shield-off", "sort_order": 5},
|
||||||
|
category=ChoiceCategory.SECURITY,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="upload_timeout",
|
||||||
|
label="Upload Timeout",
|
||||||
|
description="File upload exceeded time limit",
|
||||||
|
metadata={"color": "orange", "icon": "upload-cloud", "sort_order": 6},
|
||||||
|
category=ChoiceCategory.TECHNICAL,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="high_error_rate",
|
||||||
|
label="High Error Rate",
|
||||||
|
description="Elevated error rate detected",
|
||||||
|
metadata={"color": "red", "icon": "trending-up", "sort_order": 7},
|
||||||
|
category=ChoiceCategory.TECHNICAL,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="database_connection",
|
||||||
|
label="Database Connection",
|
||||||
|
description="Database connectivity issue",
|
||||||
|
metadata={"color": "red", "icon": "database", "sort_order": 8},
|
||||||
|
category=ChoiceCategory.TECHNICAL,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="memory_usage",
|
||||||
|
label="Memory Usage",
|
||||||
|
description="High memory consumption detected",
|
||||||
|
metadata={"color": "orange", "icon": "cpu", "sort_order": 9},
|
||||||
|
category=ChoiceCategory.TECHNICAL,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="queue_backup",
|
||||||
|
label="Queue Backup",
|
||||||
|
description="Task queue is backing up",
|
||||||
|
metadata={"color": "yellow", "icon": "layers", "sort_order": 10},
|
||||||
|
category=ChoiceCategory.TECHNICAL,
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Metric Types for Rate Limiting
|
||||||
|
# ============================================================================
|
||||||
|
METRIC_TYPES = [
|
||||||
|
RichChoice(
|
||||||
|
value="block_rate",
|
||||||
|
label="Block Rate",
|
||||||
|
description="Percentage of requests being blocked",
|
||||||
|
metadata={"color": "red", "icon": "shield", "sort_order": 1},
|
||||||
|
category=ChoiceCategory.TECHNICAL,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="total_requests",
|
||||||
|
label="Total Requests",
|
||||||
|
description="Total number of requests",
|
||||||
|
metadata={"color": "blue", "icon": "activity", "sort_order": 2},
|
||||||
|
category=ChoiceCategory.TECHNICAL,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="unique_ips",
|
||||||
|
label="Unique IPs",
|
||||||
|
description="Number of unique IP addresses",
|
||||||
|
metadata={"color": "purple", "icon": "globe", "sort_order": 3},
|
||||||
|
category=ChoiceCategory.TECHNICAL,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="function_specific",
|
||||||
|
label="Function Specific",
|
||||||
|
description="Metrics for a specific function",
|
||||||
|
metadata={"color": "green", "icon": "code", "sort_order": 4},
|
||||||
|
category=ChoiceCategory.TECHNICAL,
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Incident Statuses
|
||||||
|
# ============================================================================
|
||||||
|
INCIDENT_STATUSES = [
|
||||||
|
RichChoice(
|
||||||
|
value="open",
|
||||||
|
label="Open",
|
||||||
|
description="Incident is open and awaiting investigation",
|
||||||
|
metadata={
|
||||||
|
"color": "red",
|
||||||
|
"icon": "alert-circle",
|
||||||
|
"css_class": "bg-red-100 text-red-800",
|
||||||
|
"sort_order": 1,
|
||||||
|
"is_active": True,
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.STATUS,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="investigating",
|
||||||
|
label="Investigating",
|
||||||
|
description="Incident is being actively investigated",
|
||||||
|
metadata={
|
||||||
|
"color": "yellow",
|
||||||
|
"icon": "search",
|
||||||
|
"css_class": "bg-yellow-100 text-yellow-800",
|
||||||
|
"sort_order": 2,
|
||||||
|
"is_active": True,
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.STATUS,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="resolved",
|
||||||
|
label="Resolved",
|
||||||
|
description="Incident has been resolved",
|
||||||
|
metadata={
|
||||||
|
"color": "green",
|
||||||
|
"icon": "check-circle",
|
||||||
|
"css_class": "bg-green-100 text-green-800",
|
||||||
|
"sort_order": 3,
|
||||||
|
"is_active": False,
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.STATUS,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="closed",
|
||||||
|
label="Closed",
|
||||||
|
description="Incident is closed",
|
||||||
|
metadata={
|
||||||
|
"color": "gray",
|
||||||
|
"icon": "x-circle",
|
||||||
|
"css_class": "bg-gray-100 text-gray-800",
|
||||||
|
"sort_order": 4,
|
||||||
|
"is_active": False,
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.STATUS,
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Alert Sources
|
||||||
|
# ============================================================================
|
||||||
|
ALERT_SOURCES = [
|
||||||
|
RichChoice(
|
||||||
|
value="system",
|
||||||
|
label="System Alert",
|
||||||
|
description="Alert from system monitoring",
|
||||||
|
metadata={"color": "blue", "icon": "server", "sort_order": 1},
|
||||||
|
category=ChoiceCategory.TECHNICAL,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="rate_limit",
|
||||||
|
label="Rate Limit Alert",
|
||||||
|
description="Alert from rate limiting system",
|
||||||
|
metadata={"color": "orange", "icon": "shield", "sort_order": 2},
|
||||||
|
category=ChoiceCategory.TECHNICAL,
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Pipeline Error Severities
|
||||||
|
# ============================================================================
|
||||||
|
PIPELINE_ERROR_SEVERITIES = [
|
||||||
|
RichChoice(
|
||||||
|
value="critical",
|
||||||
|
label="Critical",
|
||||||
|
description="Critical pipeline failure requiring immediate attention",
|
||||||
|
metadata={
|
||||||
|
"color": "red",
|
||||||
|
"icon": "alert-octagon",
|
||||||
|
"css_class": "bg-red-100 text-red-800 border-red-300",
|
||||||
|
"sort_order": 1,
|
||||||
|
"priority": 1,
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.PRIORITY,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="error",
|
||||||
|
label="Error",
|
||||||
|
description="Pipeline error that needs investigation",
|
||||||
|
metadata={
|
||||||
|
"color": "orange",
|
||||||
|
"icon": "alert-triangle",
|
||||||
|
"css_class": "bg-orange-100 text-orange-800 border-orange-300",
|
||||||
|
"sort_order": 2,
|
||||||
|
"priority": 2,
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.PRIORITY,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="warning",
|
||||||
|
label="Warning",
|
||||||
|
description="Pipeline warning that may need attention",
|
||||||
|
metadata={
|
||||||
|
"color": "yellow",
|
||||||
|
"icon": "alert-circle",
|
||||||
|
"css_class": "bg-yellow-100 text-yellow-800 border-yellow-300",
|
||||||
|
"sort_order": 3,
|
||||||
|
"priority": 3,
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.PRIORITY,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="info",
|
||||||
|
label="Info",
|
||||||
|
description="Informational pipeline event",
|
||||||
|
metadata={
|
||||||
|
"color": "blue",
|
||||||
|
"icon": "info",
|
||||||
|
"css_class": "bg-blue-100 text-blue-800 border-blue-300",
|
||||||
|
"sort_order": 4,
|
||||||
|
"priority": 4,
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.PRIORITY,
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Anomaly Types
|
||||||
|
# ============================================================================
|
||||||
|
ANOMALY_TYPES = [
|
||||||
|
RichChoice(
|
||||||
|
value="spike",
|
||||||
|
label="Spike",
|
||||||
|
description="Sudden increase in metric value",
|
||||||
|
metadata={
|
||||||
|
"color": "red",
|
||||||
|
"icon": "trending-up",
|
||||||
|
"css_class": "bg-red-100 text-red-800",
|
||||||
|
"sort_order": 1,
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.TECHNICAL,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="drop",
|
||||||
|
label="Drop",
|
||||||
|
description="Sudden decrease in metric value",
|
||||||
|
metadata={
|
||||||
|
"color": "blue",
|
||||||
|
"icon": "trending-down",
|
||||||
|
"css_class": "bg-blue-100 text-blue-800",
|
||||||
|
"sort_order": 2,
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.TECHNICAL,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="trend_change",
|
||||||
|
label="Trend Change",
|
||||||
|
description="Change in the overall trend direction",
|
||||||
|
metadata={
|
||||||
|
"color": "yellow",
|
||||||
|
"icon": "activity",
|
||||||
|
"css_class": "bg-yellow-100 text-yellow-800",
|
||||||
|
"sort_order": 3,
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.TECHNICAL,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="outlier",
|
||||||
|
label="Outlier",
|
||||||
|
description="Value outside normal distribution",
|
||||||
|
metadata={
|
||||||
|
"color": "purple",
|
||||||
|
"icon": "git-branch",
|
||||||
|
"css_class": "bg-purple-100 text-purple-800",
|
||||||
|
"sort_order": 4,
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.TECHNICAL,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="threshold_breach",
|
||||||
|
label="Threshold Breach",
|
||||||
|
description="Value exceeded configured threshold",
|
||||||
|
metadata={
|
||||||
|
"color": "orange",
|
||||||
|
"icon": "alert-triangle",
|
||||||
|
"css_class": "bg-orange-100 text-orange-800",
|
||||||
|
"sort_order": 5,
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.TECHNICAL,
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Cleanup Job Statuses
|
||||||
|
# ============================================================================
|
||||||
|
CLEANUP_JOB_STATUSES = [
|
||||||
|
RichChoice(
|
||||||
|
value="success",
|
||||||
|
label="Success",
|
||||||
|
description="Cleanup job completed successfully",
|
||||||
|
metadata={
|
||||||
|
"color": "green",
|
||||||
|
"icon": "check-circle",
|
||||||
|
"css_class": "bg-green-100 text-green-800",
|
||||||
|
"sort_order": 1,
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.STATUS,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="failed",
|
||||||
|
label="Failed",
|
||||||
|
description="Cleanup job failed with errors",
|
||||||
|
metadata={
|
||||||
|
"color": "red",
|
||||||
|
"icon": "x-circle",
|
||||||
|
"css_class": "bg-red-100 text-red-800",
|
||||||
|
"sort_order": 2,
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.STATUS,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="partial",
|
||||||
|
label="Partial",
|
||||||
|
description="Cleanup job completed with some failures",
|
||||||
|
metadata={
|
||||||
|
"color": "yellow",
|
||||||
|
"icon": "alert-circle",
|
||||||
|
"css_class": "bg-yellow-100 text-yellow-800",
|
||||||
|
"sort_order": 3,
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.STATUS,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="skipped",
|
||||||
|
label="Skipped",
|
||||||
|
description="Cleanup job was skipped",
|
||||||
|
metadata={
|
||||||
|
"color": "gray",
|
||||||
|
"icon": "skip-forward",
|
||||||
|
"css_class": "bg-gray-100 text-gray-800",
|
||||||
|
"sort_order": 4,
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.STATUS,
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Date Precision (shared across multiple domains)
|
||||||
|
# ============================================================================
|
||||||
|
DATE_PRECISION = [
|
||||||
|
RichChoice(
|
||||||
|
value="exact",
|
||||||
|
label="Exact Date",
|
||||||
|
description="Date is known exactly",
|
||||||
|
metadata={"color": "green", "icon": "calendar", "sort_order": 1, "format": "YYYY-MM-DD"},
|
||||||
|
category=ChoiceCategory.CLASSIFICATION,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="month",
|
||||||
|
label="Month and Year",
|
||||||
|
description="Only month and year are known",
|
||||||
|
metadata={"color": "blue", "icon": "calendar", "sort_order": 2, "format": "YYYY-MM"},
|
||||||
|
category=ChoiceCategory.CLASSIFICATION,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="year",
|
||||||
|
label="Year Only",
|
||||||
|
description="Only the year is known",
|
||||||
|
metadata={"color": "yellow", "icon": "calendar", "sort_order": 3, "format": "YYYY"},
|
||||||
|
category=ChoiceCategory.CLASSIFICATION,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="decade",
|
||||||
|
label="Decade",
|
||||||
|
description="Only the decade is known",
|
||||||
|
metadata={"color": "orange", "icon": "calendar", "sort_order": 4, "format": "YYYYs"},
|
||||||
|
category=ChoiceCategory.CLASSIFICATION,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="century",
|
||||||
|
label="Century",
|
||||||
|
description="Only the century is known",
|
||||||
|
metadata={"color": "gray", "icon": "calendar", "sort_order": 5, "format": "YYc"},
|
||||||
|
category=ChoiceCategory.CLASSIFICATION,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="approximate",
|
||||||
|
label="Approximate",
|
||||||
|
description="Date is approximate/estimated",
|
||||||
|
metadata={"color": "gray", "icon": "help-circle", "sort_order": 6, "format": "~YYYY"},
|
||||||
|
category=ChoiceCategory.CLASSIFICATION,
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
def register_core_choices():
|
def register_core_choices():
|
||||||
"""Register all core system choices with the global registry"""
|
"""Register all core system choices with the global registry"""
|
||||||
@@ -152,6 +731,95 @@ def register_core_choices():
|
|||||||
metadata={"domain": "core", "type": "entity_type"},
|
metadata={"domain": "core", "type": "entity_type"},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
register_choices(
|
||||||
|
name="severity_levels",
|
||||||
|
choices=SEVERITY_LEVELS,
|
||||||
|
domain="core",
|
||||||
|
description="Severity levels for errors and alerts",
|
||||||
|
metadata={"domain": "core", "type": "severity"},
|
||||||
|
)
|
||||||
|
|
||||||
|
register_choices(
|
||||||
|
name="request_severity_levels",
|
||||||
|
choices=REQUEST_SEVERITY_LEVELS,
|
||||||
|
domain="core",
|
||||||
|
description="Extended severity levels for request metadata",
|
||||||
|
metadata={"domain": "core", "type": "request_severity"},
|
||||||
|
)
|
||||||
|
|
||||||
|
register_choices(
|
||||||
|
name="error_sources",
|
||||||
|
choices=ERROR_SOURCES,
|
||||||
|
domain="core",
|
||||||
|
description="Sources of application errors",
|
||||||
|
metadata={"domain": "core", "type": "error_source"},
|
||||||
|
)
|
||||||
|
|
||||||
|
register_choices(
|
||||||
|
name="system_alert_types",
|
||||||
|
choices=SYSTEM_ALERT_TYPES,
|
||||||
|
domain="core",
|
||||||
|
description="Types of system alerts",
|
||||||
|
metadata={"domain": "core", "type": "alert_type"},
|
||||||
|
)
|
||||||
|
|
||||||
|
register_choices(
|
||||||
|
name="metric_types",
|
||||||
|
choices=METRIC_TYPES,
|
||||||
|
domain="core",
|
||||||
|
description="Types of rate limit metrics",
|
||||||
|
metadata={"domain": "core", "type": "metric_type"},
|
||||||
|
)
|
||||||
|
|
||||||
|
register_choices(
|
||||||
|
name="incident_statuses",
|
||||||
|
choices=INCIDENT_STATUSES,
|
||||||
|
domain="core",
|
||||||
|
description="Incident status options",
|
||||||
|
metadata={"domain": "core", "type": "incident_status"},
|
||||||
|
)
|
||||||
|
|
||||||
|
register_choices(
|
||||||
|
name="alert_sources",
|
||||||
|
choices=ALERT_SOURCES,
|
||||||
|
domain="core",
|
||||||
|
description="Sources of alerts",
|
||||||
|
metadata={"domain": "core", "type": "alert_source"},
|
||||||
|
)
|
||||||
|
|
||||||
|
register_choices(
|
||||||
|
name="pipeline_error_severities",
|
||||||
|
choices=PIPELINE_ERROR_SEVERITIES,
|
||||||
|
domain="core",
|
||||||
|
description="Severity levels for pipeline errors",
|
||||||
|
metadata={"domain": "core", "type": "pipeline_error_severity"},
|
||||||
|
)
|
||||||
|
|
||||||
|
register_choices(
|
||||||
|
name="anomaly_types",
|
||||||
|
choices=ANOMALY_TYPES,
|
||||||
|
domain="core",
|
||||||
|
description="Types of detected anomalies",
|
||||||
|
metadata={"domain": "core", "type": "anomaly_type"},
|
||||||
|
)
|
||||||
|
|
||||||
|
register_choices(
|
||||||
|
name="cleanup_job_statuses",
|
||||||
|
choices=CLEANUP_JOB_STATUSES,
|
||||||
|
domain="core",
|
||||||
|
description="Status options for cleanup jobs",
|
||||||
|
metadata={"domain": "core", "type": "cleanup_job_status"},
|
||||||
|
)
|
||||||
|
|
||||||
|
register_choices(
|
||||||
|
name="date_precision",
|
||||||
|
choices=DATE_PRECISION,
|
||||||
|
domain="core",
|
||||||
|
description="Date precision options",
|
||||||
|
metadata={"domain": "core", "type": "date_precision"},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
# Auto-register choices when module is imported
|
# Auto-register choices when module is imported
|
||||||
register_core_choices()
|
register_core_choices()
|
||||||
|
|
||||||
|
|||||||
133
backend/apps/core/choices/filters.py
Normal file
133
backend/apps/core/choices/filters.py
Normal file
@@ -0,0 +1,133 @@
|
|||||||
|
"""
|
||||||
|
Django-filter Integration for Rich Choices
|
||||||
|
|
||||||
|
This module provides django-filter compatible filter classes that integrate
|
||||||
|
with the RichChoice registry system.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from django_filters import ChoiceFilter, MultipleChoiceFilter
|
||||||
|
|
||||||
|
from .registry import registry
|
||||||
|
|
||||||
|
|
||||||
|
class RichChoiceFilter(ChoiceFilter):
|
||||||
|
"""
|
||||||
|
Django-filter ChoiceFilter that uses the RichChoice registry.
|
||||||
|
|
||||||
|
This is the REQUIRED replacement for ChoiceFilter with inline choices.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
class MyFilterSet(django_filters.FilterSet):
|
||||||
|
status = RichChoiceFilter(
|
||||||
|
choice_group="ticket_statuses",
|
||||||
|
domain="support",
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
choice_group: str,
|
||||||
|
domain: str = "core",
|
||||||
|
allow_deprecated: bool = False,
|
||||||
|
**kwargs
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Initialize the filter.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
choice_group: Name of the choice group in the registry
|
||||||
|
domain: Domain namespace for the choice group
|
||||||
|
allow_deprecated: Whether to include deprecated choices
|
||||||
|
**kwargs: Additional arguments passed to ChoiceFilter
|
||||||
|
"""
|
||||||
|
self.choice_group = choice_group
|
||||||
|
self.domain = domain
|
||||||
|
self.allow_deprecated = allow_deprecated
|
||||||
|
|
||||||
|
# Get choices from registry
|
||||||
|
if allow_deprecated:
|
||||||
|
choices_list = registry.get_choices(choice_group, domain)
|
||||||
|
else:
|
||||||
|
choices_list = registry.get_active_choices(choice_group, domain)
|
||||||
|
|
||||||
|
choices = [(c.value, c.label) for c in choices_list]
|
||||||
|
|
||||||
|
super().__init__(choices=choices, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class RichMultipleChoiceFilter(MultipleChoiceFilter):
|
||||||
|
"""
|
||||||
|
Django-filter MultipleChoiceFilter that uses the RichChoice registry.
|
||||||
|
|
||||||
|
This is the REQUIRED replacement for MultipleChoiceFilter with inline choices.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
class MyFilterSet(django_filters.FilterSet):
|
||||||
|
statuses = RichMultipleChoiceFilter(
|
||||||
|
choice_group="ticket_statuses",
|
||||||
|
domain="support",
|
||||||
|
field_name="status",
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
choice_group: str,
|
||||||
|
domain: str = "core",
|
||||||
|
allow_deprecated: bool = False,
|
||||||
|
**kwargs
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Initialize the filter.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
choice_group: Name of the choice group in the registry
|
||||||
|
domain: Domain namespace for the choice group
|
||||||
|
allow_deprecated: Whether to include deprecated choices
|
||||||
|
**kwargs: Additional arguments passed to MultipleChoiceFilter
|
||||||
|
"""
|
||||||
|
self.choice_group = choice_group
|
||||||
|
self.domain = domain
|
||||||
|
self.allow_deprecated = allow_deprecated
|
||||||
|
|
||||||
|
# Get choices from registry
|
||||||
|
if allow_deprecated:
|
||||||
|
choices_list = registry.get_choices(choice_group, domain)
|
||||||
|
else:
|
||||||
|
choices_list = registry.get_active_choices(choice_group, domain)
|
||||||
|
|
||||||
|
choices = [(c.value, c.label) for c in choices_list]
|
||||||
|
|
||||||
|
super().__init__(choices=choices, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def get_choice_filter_class(
|
||||||
|
choice_group: str,
|
||||||
|
domain: str = "core",
|
||||||
|
allow_deprecated: bool = False,
|
||||||
|
**extra_kwargs: Any
|
||||||
|
) -> type[RichChoiceFilter]:
|
||||||
|
"""
|
||||||
|
Factory function to create a RichChoiceFilter class with preset choices.
|
||||||
|
|
||||||
|
Useful when you need to define the filter class dynamically or
|
||||||
|
when the choice_group/domain aren't available at class definition time.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
StatusFilter = get_choice_filter_class("ticket_statuses", "support")
|
||||||
|
|
||||||
|
class MyFilterSet(django_filters.FilterSet):
|
||||||
|
status = StatusFilter()
|
||||||
|
"""
|
||||||
|
class DynamicRichChoiceFilter(RichChoiceFilter):
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
kwargs.setdefault("choice_group", choice_group)
|
||||||
|
kwargs.setdefault("domain", domain)
|
||||||
|
kwargs.setdefault("allow_deprecated", allow_deprecated)
|
||||||
|
for key, value in extra_kwargs.items():
|
||||||
|
kwargs.setdefault(key, value)
|
||||||
|
super().__init__(**kwargs)
|
||||||
|
|
||||||
|
return DynamicRichChoiceFilter
|
||||||
@@ -265,3 +265,98 @@ def serialize_choice_value(value: str, choice_group: str, domain: str = "core",
|
|||||||
}
|
}
|
||||||
else:
|
else:
|
||||||
return value
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
class RichChoiceSerializerField(serializers.ChoiceField):
|
||||||
|
"""
|
||||||
|
DRF serializer field for RichChoice values.
|
||||||
|
|
||||||
|
This field validates input against the RichChoice registry and provides
|
||||||
|
type-safe choice handling with proper error messages. It is the REQUIRED
|
||||||
|
replacement for serializers.ChoiceField with inline choices.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
class MySerializer(serializers.Serializer):
|
||||||
|
status = RichChoiceSerializerField(
|
||||||
|
choice_group="ticket_statuses",
|
||||||
|
domain="support",
|
||||||
|
)
|
||||||
|
|
||||||
|
# With rich metadata in output
|
||||||
|
severity = RichChoiceSerializerField(
|
||||||
|
choice_group="severity_levels",
|
||||||
|
domain="core",
|
||||||
|
include_metadata=True,
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
choice_group: str,
|
||||||
|
domain: str = "core",
|
||||||
|
include_metadata: bool = False,
|
||||||
|
allow_deprecated: bool = False,
|
||||||
|
**kwargs
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Initialize the serializer field.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
choice_group: Name of the choice group in the registry
|
||||||
|
domain: Domain namespace for the choice group
|
||||||
|
include_metadata: Whether to include rich choice metadata in output
|
||||||
|
allow_deprecated: Whether to allow deprecated choices
|
||||||
|
**kwargs: Additional arguments passed to ChoiceField
|
||||||
|
"""
|
||||||
|
self.choice_group = choice_group
|
||||||
|
self.domain = domain
|
||||||
|
self.include_metadata = include_metadata
|
||||||
|
self.allow_deprecated = allow_deprecated
|
||||||
|
|
||||||
|
# Get choices from registry for validation
|
||||||
|
if allow_deprecated:
|
||||||
|
choices_list = registry.get_choices(choice_group, domain)
|
||||||
|
else:
|
||||||
|
choices_list = registry.get_active_choices(choice_group, domain)
|
||||||
|
|
||||||
|
# Build choices tuple for DRF ChoiceField
|
||||||
|
choices = [(c.value, c.label) for c in choices_list]
|
||||||
|
|
||||||
|
# Store valid values for error messages
|
||||||
|
self._valid_values = [c.value for c in choices_list]
|
||||||
|
|
||||||
|
super().__init__(choices=choices, **kwargs)
|
||||||
|
|
||||||
|
def to_representation(self, value: str) -> Any:
|
||||||
|
"""Convert choice value to representation."""
|
||||||
|
if not value:
|
||||||
|
return value
|
||||||
|
|
||||||
|
if self.include_metadata:
|
||||||
|
return serialize_choice_value(
|
||||||
|
value,
|
||||||
|
self.choice_group,
|
||||||
|
self.domain,
|
||||||
|
include_metadata=True
|
||||||
|
)
|
||||||
|
return value
|
||||||
|
|
||||||
|
def to_internal_value(self, data: Any) -> str:
|
||||||
|
"""Convert input data to choice value."""
|
||||||
|
# Handle rich choice object input (value dict)
|
||||||
|
if isinstance(data, dict) and "value" in data:
|
||||||
|
data = data["value"]
|
||||||
|
|
||||||
|
# Validate and return
|
||||||
|
return super().to_internal_value(data)
|
||||||
|
|
||||||
|
def fail(self, key: str, **kwargs: Any) -> None:
|
||||||
|
"""Provide better error messages with valid choices listed."""
|
||||||
|
if key == "invalid_choice":
|
||||||
|
valid_choices = ", ".join(self._valid_values)
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
f"'{kwargs.get('input', '')}' is not a valid choice for {self.choice_group}. "
|
||||||
|
f"Valid choices are: {valid_choices}"
|
||||||
|
)
|
||||||
|
super().fail(key, **kwargs)
|
||||||
|
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ from django.utils import timezone
|
|||||||
|
|
||||||
from apps.parks.models import Park
|
from apps.parks.models import Park
|
||||||
from apps.rides.models import Ride
|
from apps.rides.models import Ride
|
||||||
|
from apps.core.utils import capture_and_log
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -90,7 +91,7 @@ class Command(BaseCommand):
|
|||||||
self.stdout.write(f" {item['name']} ({item['park']}) - opened: {item['date_opened']}")
|
self.stdout.write(f" {item['name']} ({item['park']}) - opened: {item['date_opened']}")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error calculating new content: {e}", exc_info=True)
|
capture_and_log(e, 'Calculate new content', source='management', severity='high')
|
||||||
raise CommandError(f"Failed to calculate new content: {e}") from None
|
raise CommandError(f"Failed to calculate new content: {e}") from None
|
||||||
|
|
||||||
def _get_new_parks(self, cutoff_date: datetime, limit: int) -> list[dict[str, Any]]:
|
def _get_new_parks(self, cutoff_date: datetime, limit: int) -> list[dict[str, Any]]:
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ from django.utils import timezone
|
|||||||
from apps.core.analytics import PageView
|
from apps.core.analytics import PageView
|
||||||
from apps.parks.models import Park
|
from apps.parks.models import Park
|
||||||
from apps.rides.models import Ride
|
from apps.rides.models import Ride
|
||||||
|
from apps.core.utils import capture_and_log
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -99,7 +100,7 @@ class Command(BaseCommand):
|
|||||||
self.stdout.write(f" {item['name']} (score: {item.get('views_change', 'N/A')})")
|
self.stdout.write(f" {item['name']} (score: {item.get('views_change', 'N/A')})")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error calculating trending content: {e}", exc_info=True)
|
capture_and_log(e, 'Calculate trending content', source='management', severity='high')
|
||||||
raise CommandError(f"Failed to calculate trending content: {e}") from None
|
raise CommandError(f"Failed to calculate trending content: {e}") from None
|
||||||
|
|
||||||
def _calculate_trending_parks(
|
def _calculate_trending_parks(
|
||||||
@@ -199,7 +200,7 @@ class Command(BaseCommand):
|
|||||||
return final_score
|
return final_score
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error calculating score for {content_type} {content_obj.id}: {e}")
|
capture_and_log(e, f'Calculate score for {content_type} {content_obj.id}', source='management', severity='medium')
|
||||||
return 0.0
|
return 0.0
|
||||||
|
|
||||||
def _calculate_view_growth_score(
|
def _calculate_view_growth_score(
|
||||||
|
|||||||
@@ -9,6 +9,8 @@ from django.conf import settings
|
|||||||
from django.db import connection
|
from django.db import connection
|
||||||
from django.utils.deprecation import MiddlewareMixin
|
from django.utils.deprecation import MiddlewareMixin
|
||||||
|
|
||||||
|
from apps.core.utils import capture_and_log
|
||||||
|
|
||||||
performance_logger = logging.getLogger("performance")
|
performance_logger = logging.getLogger("performance")
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -130,12 +132,11 @@ class PerformanceMiddleware(MiddlewareMixin):
|
|||||||
),
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
performance_logger.error(
|
capture_and_log(
|
||||||
f"Request exception: {request.method} {request.path} - "
|
exception,
|
||||||
f"{duration:.3f}s, {total_queries} queries, {type(exception).__name__}: {
|
f'Request exception: {request.method} {request.path} - {duration:.3f}s, {total_queries} queries',
|
||||||
exception
|
source='middleware',
|
||||||
}",
|
severity='high',
|
||||||
extra=performance_data,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Don't return anything - let the exception propagate normally
|
# Don't return anything - let the exception propagate normally
|
||||||
|
|||||||
@@ -19,6 +19,8 @@ from collections.abc import Callable
|
|||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
from django.http import HttpRequest, HttpResponse, JsonResponse
|
from django.http import HttpRequest, HttpResponse, JsonResponse
|
||||||
|
|
||||||
|
from apps.core.utils import capture_and_log
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@@ -37,15 +39,30 @@ class AuthRateLimitMiddleware:
|
|||||||
# Login endpoints
|
# Login endpoints
|
||||||
"/api/v1/auth/login/": {"per_minute": 5, "per_hour": 30, "per_day": 100},
|
"/api/v1/auth/login/": {"per_minute": 5, "per_hour": 30, "per_day": 100},
|
||||||
"/accounts/login/": {"per_minute": 5, "per_hour": 30, "per_day": 100},
|
"/accounts/login/": {"per_minute": 5, "per_hour": 30, "per_day": 100},
|
||||||
|
# MFA verification (strict limits - 6-digit codes have limited entropy)
|
||||||
|
"/api/v1/auth/login/mfa-verify/": {"per_minute": 5, "per_hour": 15, "per_day": 50},
|
||||||
|
"/api/v1/auth/mfa/totp/verify/": {"per_minute": 5, "per_hour": 15, "per_day": 50},
|
||||||
|
"/api/v1/auth/mfa/totp/activate/": {"per_minute": 3, "per_hour": 10, "per_day": 30},
|
||||||
|
"/api/v1/auth/mfa/totp/deactivate/": {"per_minute": 3, "per_hour": 10, "per_day": 20},
|
||||||
|
# Passkey endpoints
|
||||||
|
"/api/v1/auth/passkey/authenticate/": {"per_minute": 10, "per_hour": 30, "per_day": 100},
|
||||||
|
"/api/v1/auth/passkey/register/": {"per_minute": 5, "per_hour": 15, "per_day": 30},
|
||||||
# Signup endpoints
|
# Signup endpoints
|
||||||
"/api/v1/auth/signup/": {"per_minute": 3, "per_hour": 10, "per_day": 20},
|
"/api/v1/auth/signup/": {"per_minute": 3, "per_hour": 10, "per_day": 20},
|
||||||
"/accounts/signup/": {"per_minute": 3, "per_hour": 10, "per_day": 20},
|
"/accounts/signup/": {"per_minute": 3, "per_hour": 10, "per_day": 20},
|
||||||
# Password reset endpoints
|
# Password reset endpoints
|
||||||
"/api/v1/auth/password-reset/": {"per_minute": 2, "per_hour": 5, "per_day": 10},
|
"/api/v1/auth/password-reset/": {"per_minute": 2, "per_hour": 5, "per_day": 10},
|
||||||
"/accounts/password/reset/": {"per_minute": 2, "per_hour": 5, "per_day": 10},
|
"/accounts/password/reset/": {"per_minute": 2, "per_hour": 5, "per_day": 10},
|
||||||
|
# Password change (prevent brute force on current password)
|
||||||
|
"/api/v1/auth/password/change/": {"per_minute": 3, "per_hour": 10, "per_day": 30},
|
||||||
# Token endpoints
|
# Token endpoints
|
||||||
"/api/v1/auth/token/": {"per_minute": 10, "per_hour": 60, "per_day": 200},
|
"/api/v1/auth/token/": {"per_minute": 10, "per_hour": 60, "per_day": 200},
|
||||||
"/api/v1/auth/token/refresh/": {"per_minute": 20, "per_hour": 120, "per_day": 500},
|
"/api/v1/auth/token/refresh/": {"per_minute": 20, "per_hour": 120, "per_day": 500},
|
||||||
|
# Social account management
|
||||||
|
"/api/v1/auth/social/connect/google/": {"per_minute": 5, "per_hour": 15, "per_day": 30},
|
||||||
|
"/api/v1/auth/social/connect/discord/": {"per_minute": 5, "per_hour": 15, "per_day": 30},
|
||||||
|
"/api/v1/auth/social/disconnect/google/": {"per_minute": 5, "per_hour": 15, "per_day": 20},
|
||||||
|
"/api/v1/auth/social/disconnect/discord/": {"per_minute": 5, "per_hour": 15, "per_day": 20},
|
||||||
}
|
}
|
||||||
|
|
||||||
def __init__(self, get_response: Callable[[HttpRequest], HttpResponse]):
|
def __init__(self, get_response: Callable[[HttpRequest], HttpResponse]):
|
||||||
@@ -215,7 +232,9 @@ class SecurityEventLogger:
|
|||||||
user = getattr(request, "user", None)
|
user = getattr(request, "user", None)
|
||||||
username = user.username if user and user.is_authenticated else "anonymous"
|
username = user.username if user and user.is_authenticated else "anonymous"
|
||||||
|
|
||||||
logger.error(
|
capture_and_log(
|
||||||
f"Suspicious activity detected - Type: {activity_type}, "
|
RuntimeError(f'Suspicious activity detected - Type: {activity_type}'),
|
||||||
f"IP: {client_ip}, User: {username}, Details: {details}"
|
f'Suspicious activity - IP: {client_ip}, User: {username}, Details: {details}',
|
||||||
|
source='security',
|
||||||
|
severity='high',
|
||||||
)
|
)
|
||||||
|
|||||||
76
backend/apps/core/migrations/0006_add_alert_models.py
Normal file
76
backend/apps/core/migrations/0006_add_alert_models.py
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
# Generated by Django 5.2.9 on 2026-01-06 17:00
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
import uuid
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('core', '0005_add_application_error'),
|
||||||
|
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='RateLimitAlertConfig',
|
||||||
|
fields=[
|
||||||
|
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||||
|
('metric_type', models.CharField(choices=[('block_rate', 'Block Rate'), ('total_requests', 'Total Requests'), ('unique_ips', 'Unique IPs'), ('function_specific', 'Function Specific')], db_index=True, help_text='Type of metric to monitor', max_length=50)),
|
||||||
|
('threshold_value', models.FloatField(help_text='Threshold value that triggers alert')),
|
||||||
|
('time_window_ms', models.IntegerField(help_text='Time window in milliseconds for measurement')),
|
||||||
|
('function_name', models.CharField(blank=True, help_text='Specific function to monitor (for function_specific metric type)', max_length=100, null=True)),
|
||||||
|
('enabled', models.BooleanField(db_index=True, default=True, help_text='Whether this config is active')),
|
||||||
|
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('updated_at', models.DateTimeField(auto_now=True)),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'verbose_name': 'Rate Limit Alert Config',
|
||||||
|
'verbose_name_plural': 'Rate Limit Alert Configs',
|
||||||
|
'ordering': ['metric_type', '-created_at'],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='RateLimitAlert',
|
||||||
|
fields=[
|
||||||
|
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||||
|
('metric_type', models.CharField(help_text='Type of metric', max_length=50)),
|
||||||
|
('metric_value', models.FloatField(help_text='Actual value that triggered the alert')),
|
||||||
|
('threshold_value', models.FloatField(help_text='Threshold that was exceeded')),
|
||||||
|
('time_window_ms', models.IntegerField(help_text='Time window of measurement')),
|
||||||
|
('function_name', models.CharField(blank=True, help_text='Function name if applicable', max_length=100, null=True)),
|
||||||
|
('alert_message', models.TextField(help_text='Descriptive alert message')),
|
||||||
|
('resolved_at', models.DateTimeField(blank=True, db_index=True, help_text='When this alert was resolved', null=True)),
|
||||||
|
('created_at', models.DateTimeField(auto_now_add=True, db_index=True)),
|
||||||
|
('resolved_by', models.ForeignKey(blank=True, help_text='Admin who resolved this alert', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='resolved_rate_limit_alerts', to=settings.AUTH_USER_MODEL)),
|
||||||
|
('config', models.ForeignKey(help_text='Configuration that triggered this alert', on_delete=django.db.models.deletion.CASCADE, related_name='alerts', to='core.ratelimitalertconfig')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'verbose_name': 'Rate Limit Alert',
|
||||||
|
'verbose_name_plural': 'Rate Limit Alerts',
|
||||||
|
'ordering': ['-created_at'],
|
||||||
|
'indexes': [models.Index(fields=['metric_type', 'created_at'], name='core_rateli_metric__6fd63e_idx'), models.Index(fields=['resolved_at', 'created_at'], name='core_rateli_resolve_98c143_idx')],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='SystemAlert',
|
||||||
|
fields=[
|
||||||
|
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||||
|
('alert_type', models.CharField(choices=[('orphaned_images', 'Orphaned Images'), ('stale_submissions', 'Stale Submissions'), ('circular_dependency', 'Circular Dependency'), ('validation_error', 'Validation Error'), ('ban_attempt', 'Ban Attempt'), ('upload_timeout', 'Upload Timeout'), ('high_error_rate', 'High Error Rate'), ('database_connection', 'Database Connection'), ('memory_usage', 'Memory Usage'), ('queue_backup', 'Queue Backup')], db_index=True, help_text='Type of system alert', max_length=50)),
|
||||||
|
('severity', models.CharField(choices=[('low', 'Low'), ('medium', 'Medium'), ('high', 'High'), ('critical', 'Critical')], db_index=True, help_text='Alert severity level', max_length=20)),
|
||||||
|
('message', models.TextField(help_text='Human-readable alert message')),
|
||||||
|
('metadata', models.JSONField(blank=True, help_text='Additional context data for this alert', null=True)),
|
||||||
|
('resolved_at', models.DateTimeField(blank=True, db_index=True, help_text='When this alert was resolved', null=True)),
|
||||||
|
('created_at', models.DateTimeField(auto_now_add=True, db_index=True)),
|
||||||
|
('resolved_by', models.ForeignKey(blank=True, help_text='Admin who resolved this alert', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='resolved_system_alerts', to=settings.AUTH_USER_MODEL)),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'verbose_name': 'System Alert',
|
||||||
|
'verbose_name_plural': 'System Alerts',
|
||||||
|
'ordering': ['-created_at'],
|
||||||
|
'indexes': [models.Index(fields=['severity', 'created_at'], name='core_system_severit_bd3efd_idx'), models.Index(fields=['alert_type', 'created_at'], name='core_system_alert_t_10942e_idx'), models.Index(fields=['resolved_at', 'created_at'], name='core_system_resolve_9da33f_idx')],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,72 @@
|
|||||||
|
# Generated by Django 5.2.9 on 2026-01-06 17:43
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
import uuid
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('core', '0006_add_alert_models'),
|
||||||
|
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='Incident',
|
||||||
|
fields=[
|
||||||
|
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||||
|
('incident_number', models.CharField(db_index=True, help_text='Auto-generated incident number (INC-YYYYMMDD-XXXX)', max_length=20, unique=True)),
|
||||||
|
('title', models.CharField(help_text='Brief description of the incident', max_length=255)),
|
||||||
|
('description', models.TextField(blank=True, help_text='Detailed description', null=True)),
|
||||||
|
('severity', models.CharField(choices=[('low', 'Low'), ('medium', 'Medium'), ('high', 'High'), ('critical', 'Critical')], db_index=True, help_text='Incident severity level', max_length=20)),
|
||||||
|
('status', models.CharField(choices=[('open', 'Open'), ('investigating', 'Investigating'), ('resolved', 'Resolved'), ('closed', 'Closed')], db_index=True, default='open', help_text='Current incident status', max_length=20)),
|
||||||
|
('detected_at', models.DateTimeField(auto_now_add=True, help_text='When the incident was detected')),
|
||||||
|
('acknowledged_at', models.DateTimeField(blank=True, help_text='When someone started investigating', null=True)),
|
||||||
|
('resolved_at', models.DateTimeField(blank=True, help_text='When the incident was resolved', null=True)),
|
||||||
|
('resolution_notes', models.TextField(blank=True, help_text='Notes about the resolution', null=True)),
|
||||||
|
('alert_count', models.PositiveIntegerField(default=0, help_text='Number of linked alerts')),
|
||||||
|
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('updated_at', models.DateTimeField(auto_now=True)),
|
||||||
|
('acknowledged_by', models.ForeignKey(blank=True, help_text='User who acknowledged the incident', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='acknowledged_incidents', to=settings.AUTH_USER_MODEL)),
|
||||||
|
('resolved_by', models.ForeignKey(blank=True, help_text='User who resolved the incident', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='resolved_incidents', to=settings.AUTH_USER_MODEL)),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'verbose_name': 'Incident',
|
||||||
|
'verbose_name_plural': 'Incidents',
|
||||||
|
'ordering': ['-detected_at'],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='IncidentAlert',
|
||||||
|
fields=[
|
||||||
|
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||||
|
('alert_source', models.CharField(choices=[('system', 'System Alert'), ('rate_limit', 'Rate Limit Alert')], help_text='Source type of the alert', max_length=20)),
|
||||||
|
('alert_id', models.UUIDField(help_text='ID of the linked alert')),
|
||||||
|
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('incident', models.ForeignKey(help_text='The incident this alert is linked to', on_delete=django.db.models.deletion.CASCADE, related_name='linked_alerts', to='core.incident')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'verbose_name': 'Incident Alert',
|
||||||
|
'verbose_name_plural': 'Incident Alerts',
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.AddIndex(
|
||||||
|
model_name='incident',
|
||||||
|
index=models.Index(fields=['status', 'detected_at'], name='core_incide_status_c17ea4_idx'),
|
||||||
|
),
|
||||||
|
migrations.AddIndex(
|
||||||
|
model_name='incident',
|
||||||
|
index=models.Index(fields=['severity', 'detected_at'], name='core_incide_severit_24b148_idx'),
|
||||||
|
),
|
||||||
|
migrations.AddIndex(
|
||||||
|
model_name='incidentalert',
|
||||||
|
index=models.Index(fields=['alert_source', 'alert_id'], name='core_incide_alert_s_9e655c_idx'),
|
||||||
|
),
|
||||||
|
migrations.AlterUniqueTogether(
|
||||||
|
name='incidentalert',
|
||||||
|
unique_together={('incident', 'alert_source', 'alert_id')},
|
||||||
|
),
|
||||||
|
]
|
||||||
335
backend/apps/core/migrations/0008_add_analytics_models.py
Normal file
335
backend/apps/core/migrations/0008_add_analytics_models.py
Normal file
@@ -0,0 +1,335 @@
|
|||||||
|
# Generated by Django 5.1.6 on 2026-01-06 18:23
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
import uuid
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("core", "0007_add_incident_and_report_models"),
|
||||||
|
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name="pageviewevent",
|
||||||
|
name="pgh_obj",
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name="pageviewevent",
|
||||||
|
name="content_type",
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name="pageviewevent",
|
||||||
|
name="pgh_context",
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="ApprovalTransactionMetric",
|
||||||
|
fields=[
|
||||||
|
("id", models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||||
|
(
|
||||||
|
"submission_id",
|
||||||
|
models.CharField(db_index=True, help_text="ID of the content submission", max_length=255),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"moderator_id",
|
||||||
|
models.CharField(
|
||||||
|
db_index=True, help_text="ID of the moderator who processed the submission", max_length=255
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"submitter_id",
|
||||||
|
models.CharField(
|
||||||
|
db_index=True, help_text="ID of the user who submitted the content", max_length=255
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"request_id",
|
||||||
|
models.CharField(
|
||||||
|
blank=True, db_index=True, help_text="Correlation request ID", max_length=255, null=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("success", models.BooleanField(db_index=True, help_text="Whether the approval was successful")),
|
||||||
|
(
|
||||||
|
"duration_ms",
|
||||||
|
models.PositiveIntegerField(blank=True, help_text="Processing duration in milliseconds", null=True),
|
||||||
|
),
|
||||||
|
("items_count", models.PositiveIntegerField(default=1, help_text="Number of items processed")),
|
||||||
|
(
|
||||||
|
"rollback_triggered",
|
||||||
|
models.BooleanField(default=False, help_text="Whether a rollback was triggered"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"error_code",
|
||||||
|
models.CharField(blank=True, help_text="Error code if failed", max_length=50, null=True),
|
||||||
|
),
|
||||||
|
("error_message", models.TextField(blank=True, help_text="Error message if failed", null=True)),
|
||||||
|
("error_details", models.TextField(blank=True, help_text="Detailed error information", null=True)),
|
||||||
|
(
|
||||||
|
"created_at",
|
||||||
|
models.DateTimeField(auto_now_add=True, db_index=True, help_text="When this metric was recorded"),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "Approval Transaction Metric",
|
||||||
|
"verbose_name_plural": "Approval Transaction Metrics",
|
||||||
|
"ordering": ["-created_at"],
|
||||||
|
"indexes": [
|
||||||
|
models.Index(fields=["success", "created_at"], name="core_approv_success_9c326b_idx"),
|
||||||
|
models.Index(fields=["moderator_id", "created_at"], name="core_approv_moderat_ec41ba_idx"),
|
||||||
|
],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="RequestMetadata",
|
||||||
|
fields=[
|
||||||
|
("id", models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||||
|
(
|
||||||
|
"request_id",
|
||||||
|
models.CharField(
|
||||||
|
db_index=True,
|
||||||
|
help_text="Unique request identifier for correlation",
|
||||||
|
max_length=255,
|
||||||
|
unique=True,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"trace_id",
|
||||||
|
models.CharField(
|
||||||
|
blank=True, db_index=True, help_text="Distributed tracing ID", max_length=255, null=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"session_id",
|
||||||
|
models.CharField(
|
||||||
|
blank=True, db_index=True, help_text="User session identifier", max_length=255, null=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"parent_request_id",
|
||||||
|
models.CharField(
|
||||||
|
blank=True, help_text="Parent request ID for nested requests", max_length=255, null=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"action",
|
||||||
|
models.CharField(
|
||||||
|
blank=True, help_text="Action/operation being performed", max_length=255, null=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"method",
|
||||||
|
models.CharField(blank=True, help_text="HTTP method (GET, POST, etc.)", max_length=10, null=True),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"endpoint",
|
||||||
|
models.CharField(
|
||||||
|
blank=True, db_index=True, help_text="API endpoint or URL path", max_length=500, null=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"request_method",
|
||||||
|
models.CharField(blank=True, help_text="HTTP request method", max_length=10, null=True),
|
||||||
|
),
|
||||||
|
("request_path", models.CharField(blank=True, help_text="Request URL path", max_length=500, null=True)),
|
||||||
|
(
|
||||||
|
"affected_route",
|
||||||
|
models.CharField(blank=True, help_text="Frontend route affected", max_length=255, null=True),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"http_status",
|
||||||
|
models.PositiveIntegerField(blank=True, db_index=True, help_text="HTTP status code", null=True),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"status_code",
|
||||||
|
models.PositiveIntegerField(blank=True, help_text="Status code (alias for http_status)", null=True),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"response_status",
|
||||||
|
models.PositiveIntegerField(blank=True, help_text="Response status code", null=True),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"success",
|
||||||
|
models.BooleanField(
|
||||||
|
blank=True, db_index=True, help_text="Whether the request was successful", null=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("started_at", models.DateTimeField(auto_now_add=True, help_text="When the request started")),
|
||||||
|
("completed_at", models.DateTimeField(blank=True, help_text="When the request completed", null=True)),
|
||||||
|
(
|
||||||
|
"duration_ms",
|
||||||
|
models.PositiveIntegerField(blank=True, help_text="Request duration in milliseconds", null=True),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"response_time_ms",
|
||||||
|
models.PositiveIntegerField(blank=True, help_text="Response time in milliseconds", null=True),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"error_type",
|
||||||
|
models.CharField(
|
||||||
|
blank=True, db_index=True, help_text="Type/class of error", max_length=100, null=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("error_message", models.TextField(blank=True, help_text="Error message", null=True)),
|
||||||
|
("error_stack", models.TextField(blank=True, help_text="Error stack trace", null=True)),
|
||||||
|
(
|
||||||
|
"error_code",
|
||||||
|
models.CharField(
|
||||||
|
blank=True, db_index=True, help_text="Application error code", max_length=50, null=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"error_origin",
|
||||||
|
models.CharField(blank=True, help_text="Where the error originated", max_length=100, null=True),
|
||||||
|
),
|
||||||
|
("component_stack", models.TextField(blank=True, help_text="React component stack trace", null=True)),
|
||||||
|
(
|
||||||
|
"severity",
|
||||||
|
models.CharField(
|
||||||
|
choices=[
|
||||||
|
("debug", "Debug"),
|
||||||
|
("info", "Info"),
|
||||||
|
("warning", "Warning"),
|
||||||
|
("error", "Error"),
|
||||||
|
("critical", "Critical"),
|
||||||
|
],
|
||||||
|
db_index=True,
|
||||||
|
default="info",
|
||||||
|
help_text="Error severity level",
|
||||||
|
max_length=20,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"is_resolved",
|
||||||
|
models.BooleanField(db_index=True, default=False, help_text="Whether this error has been resolved"),
|
||||||
|
),
|
||||||
|
("resolved_at", models.DateTimeField(blank=True, help_text="When the error was resolved", null=True)),
|
||||||
|
("resolution_notes", models.TextField(blank=True, help_text="Notes about resolution", null=True)),
|
||||||
|
("retry_count", models.PositiveIntegerField(default=0, help_text="Number of retry attempts")),
|
||||||
|
(
|
||||||
|
"retry_attempts",
|
||||||
|
models.PositiveIntegerField(blank=True, help_text="Total retry attempts made", null=True),
|
||||||
|
),
|
||||||
|
("user_agent", models.TextField(blank=True, help_text="User agent string", null=True)),
|
||||||
|
(
|
||||||
|
"ip_address_hash",
|
||||||
|
models.CharField(
|
||||||
|
blank=True, db_index=True, help_text="Hashed IP address", max_length=64, null=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"client_version",
|
||||||
|
models.CharField(blank=True, help_text="Client application version", max_length=50, null=True),
|
||||||
|
),
|
||||||
|
("timezone", models.CharField(blank=True, help_text="User timezone", max_length=50, null=True)),
|
||||||
|
("referrer", models.TextField(blank=True, help_text="HTTP referrer", null=True)),
|
||||||
|
(
|
||||||
|
"entity_type",
|
||||||
|
models.CharField(
|
||||||
|
blank=True, db_index=True, help_text="Type of entity affected", max_length=50, null=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"entity_id",
|
||||||
|
models.CharField(
|
||||||
|
blank=True, db_index=True, help_text="ID of entity affected", max_length=255, null=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"created_at",
|
||||||
|
models.DateTimeField(auto_now_add=True, db_index=True, help_text="When this record was created"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"resolved_by",
|
||||||
|
models.ForeignKey(
|
||||||
|
blank=True,
|
||||||
|
help_text="User who resolved this error",
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
|
related_name="resolved_request_metadata",
|
||||||
|
to=settings.AUTH_USER_MODEL,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"user",
|
||||||
|
models.ForeignKey(
|
||||||
|
blank=True,
|
||||||
|
help_text="User who made the request",
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
|
related_name="request_metadata",
|
||||||
|
to=settings.AUTH_USER_MODEL,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "Request Metadata",
|
||||||
|
"verbose_name_plural": "Request Metadata",
|
||||||
|
"ordering": ["-created_at"],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="RequestBreadcrumb",
|
||||||
|
fields=[
|
||||||
|
("id", models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||||
|
("timestamp", models.DateTimeField(help_text="When this breadcrumb occurred")),
|
||||||
|
(
|
||||||
|
"category",
|
||||||
|
models.CharField(
|
||||||
|
help_text="Breadcrumb category (e.g., 'http', 'navigation', 'console')", max_length=100
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("message", models.TextField(help_text="Breadcrumb message")),
|
||||||
|
(
|
||||||
|
"level",
|
||||||
|
models.CharField(
|
||||||
|
blank=True, help_text="Log level (debug, info, warning, error)", max_length=20, null=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("sequence_order", models.PositiveIntegerField(default=0, help_text="Order within the request")),
|
||||||
|
(
|
||||||
|
"request_metadata",
|
||||||
|
models.ForeignKey(
|
||||||
|
help_text="Parent request",
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name="request_breadcrumbs",
|
||||||
|
to="core.requestmetadata",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "Request Breadcrumb",
|
||||||
|
"verbose_name_plural": "Request Breadcrumbs",
|
||||||
|
"ordering": ["sequence_order", "timestamp"],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.DeleteModel(
|
||||||
|
name="PageView",
|
||||||
|
),
|
||||||
|
migrations.DeleteModel(
|
||||||
|
name="PageViewEvent",
|
||||||
|
),
|
||||||
|
migrations.AddIndex(
|
||||||
|
model_name="requestmetadata",
|
||||||
|
index=models.Index(fields=["error_type", "created_at"], name="core_reques_error_t_d384f1_idx"),
|
||||||
|
),
|
||||||
|
migrations.AddIndex(
|
||||||
|
model_name="requestmetadata",
|
||||||
|
index=models.Index(fields=["severity", "created_at"], name="core_reques_severit_04b88d_idx"),
|
||||||
|
),
|
||||||
|
migrations.AddIndex(
|
||||||
|
model_name="requestmetadata",
|
||||||
|
index=models.Index(fields=["is_resolved", "created_at"], name="core_reques_is_reso_614d34_idx"),
|
||||||
|
),
|
||||||
|
migrations.AddIndex(
|
||||||
|
model_name="requestmetadata",
|
||||||
|
index=models.Index(fields=["user", "created_at"], name="core_reques_user_id_db6ee3_idx"),
|
||||||
|
),
|
||||||
|
migrations.AddIndex(
|
||||||
|
model_name="requestbreadcrumb",
|
||||||
|
index=models.Index(fields=["request_metadata", "sequence_order"], name="core_reques_request_0e8be4_idx"),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,64 @@
|
|||||||
|
# Generated by Django 5.2.9 on 2026-01-07 01:23
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
import pgtrigger.compiler
|
||||||
|
import pgtrigger.migrations
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('contenttypes', '0002_remove_content_type_name'),
|
||||||
|
('core', '0008_add_analytics_models'),
|
||||||
|
('pghistory', '0006_delete_aggregateevent'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='PageView',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('object_id', models.PositiveIntegerField()),
|
||||||
|
('timestamp', models.DateTimeField(auto_now_add=True, db_index=True)),
|
||||||
|
('ip_address', models.GenericIPAddressField()),
|
||||||
|
('user_agent', models.CharField(blank=True, max_length=512)),
|
||||||
|
('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='page_views', to='contenttypes.contenttype')),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='PageViewEvent',
|
||||||
|
fields=[
|
||||||
|
('pgh_id', models.AutoField(primary_key=True, serialize=False)),
|
||||||
|
('pgh_created_at', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('pgh_label', models.TextField(help_text='The event label.')),
|
||||||
|
('id', models.BigIntegerField()),
|
||||||
|
('object_id', models.PositiveIntegerField()),
|
||||||
|
('timestamp', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('ip_address', models.GenericIPAddressField()),
|
||||||
|
('user_agent', models.CharField(blank=True, max_length=512)),
|
||||||
|
('content_type', models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='contenttypes.contenttype')),
|
||||||
|
('pgh_context', models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='pghistory.context')),
|
||||||
|
('pgh_obj', models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='events', to='core.pageview')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'abstract': False,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.AddIndex(
|
||||||
|
model_name='pageview',
|
||||||
|
index=models.Index(fields=['timestamp'], name='core_pagevi_timesta_757ebb_idx'),
|
||||||
|
),
|
||||||
|
migrations.AddIndex(
|
||||||
|
model_name='pageview',
|
||||||
|
index=models.Index(fields=['content_type', 'object_id'], name='core_pagevi_content_eda7ad_idx'),
|
||||||
|
),
|
||||||
|
pgtrigger.migrations.AddTrigger(
|
||||||
|
model_name='pageview',
|
||||||
|
trigger=pgtrigger.compiler.Trigger(name='insert_insert', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "core_pageviewevent" ("content_type_id", "id", "ip_address", "object_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "timestamp", "user_agent") VALUES (NEW."content_type_id", NEW."id", NEW."ip_address", NEW."object_id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."timestamp", NEW."user_agent"); RETURN NULL;', hash='1682d124ea3ba215e630c7cfcde929f7444cf247', operation='INSERT', pgid='pgtrigger_insert_insert_ee1e1', table='core_pageview', when='AFTER')),
|
||||||
|
),
|
||||||
|
pgtrigger.migrations.AddTrigger(
|
||||||
|
model_name='pageview',
|
||||||
|
trigger=pgtrigger.compiler.Trigger(name='update_update', sql=pgtrigger.compiler.UpsertTriggerSql(condition='WHEN (OLD.* IS DISTINCT FROM NEW.*)', func='INSERT INTO "core_pageviewevent" ("content_type_id", "id", "ip_address", "object_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "timestamp", "user_agent") VALUES (NEW."content_type_id", NEW."id", NEW."ip_address", NEW."object_id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."timestamp", NEW."user_agent"); RETURN NULL;', hash='4221b2dd6636cae454f8d69c0c1841c40c47e6a6', operation='UPDATE', pgid='pgtrigger_update_update_3c505', table='core_pageview', when='AFTER')),
|
||||||
|
),
|
||||||
|
]
|
||||||
94
backend/apps/core/migrations/0010_add_milestone_model.py
Normal file
94
backend/apps/core/migrations/0010_add_milestone_model.py
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
# Generated by Django 5.2.9 on 2026-01-08 17:59
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
import pgtrigger.compiler
|
||||||
|
import pgtrigger.migrations
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('core', '0009_pageview_pageviewevent_and_more'),
|
||||||
|
('pghistory', '0007_auto_20250421_0444'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='MilestoneEvent',
|
||||||
|
fields=[
|
||||||
|
('pgh_id', models.AutoField(primary_key=True, serialize=False)),
|
||||||
|
('pgh_created_at', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('pgh_label', models.TextField(help_text='The event label.')),
|
||||||
|
('id', models.BigIntegerField()),
|
||||||
|
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('updated_at', models.DateTimeField(auto_now=True)),
|
||||||
|
('title', models.CharField(help_text='Title or name of the event', max_length=200)),
|
||||||
|
('description', models.TextField(blank=True, help_text='Detailed description of the event')),
|
||||||
|
('event_type', models.CharField(help_text="Type of event (e.g., 'opening', 'closing', 'name_change', 'status_change')", max_length=50)),
|
||||||
|
('event_date', models.DateField(help_text='Date when the event occurred or will occur')),
|
||||||
|
('event_date_precision', models.CharField(choices=[('exact', 'Exact Date'), ('month', 'Month and Year'), ('year', 'Year Only'), ('decade', 'Decade'), ('century', 'Century'), ('approximate', 'Approximate')], default='exact', help_text='Precision of the event date', max_length=20)),
|
||||||
|
('entity_type', models.CharField(help_text="Type of entity (e.g., 'park', 'ride', 'company')", max_length=50)),
|
||||||
|
('entity_id', models.UUIDField(help_text='UUID of the associated entity')),
|
||||||
|
('is_public', models.BooleanField(default=True, help_text='Whether this milestone is publicly visible')),
|
||||||
|
('display_order', models.IntegerField(default=0, help_text='Order for displaying multiple milestones on the same date')),
|
||||||
|
('from_value', models.CharField(blank=True, help_text='Previous value (for change events)', max_length=200)),
|
||||||
|
('to_value', models.CharField(blank=True, help_text='New value (for change events)', max_length=200)),
|
||||||
|
('from_entity_id', models.UUIDField(blank=True, help_text='Previous entity reference (e.g., old operator)', null=True)),
|
||||||
|
('to_entity_id', models.UUIDField(blank=True, help_text='New entity reference (e.g., new operator)', null=True)),
|
||||||
|
('from_location_id', models.UUIDField(blank=True, help_text='Previous location reference (for relocations)', null=True)),
|
||||||
|
('to_location_id', models.UUIDField(blank=True, help_text='New location reference (for relocations)', null=True)),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'abstract': False,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='Milestone',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('updated_at', models.DateTimeField(auto_now=True)),
|
||||||
|
('title', models.CharField(help_text='Title or name of the event', max_length=200)),
|
||||||
|
('description', models.TextField(blank=True, help_text='Detailed description of the event')),
|
||||||
|
('event_type', models.CharField(db_index=True, help_text="Type of event (e.g., 'opening', 'closing', 'name_change', 'status_change')", max_length=50)),
|
||||||
|
('event_date', models.DateField(db_index=True, help_text='Date when the event occurred or will occur')),
|
||||||
|
('event_date_precision', models.CharField(choices=[('exact', 'Exact Date'), ('month', 'Month and Year'), ('year', 'Year Only'), ('decade', 'Decade'), ('century', 'Century'), ('approximate', 'Approximate')], default='exact', help_text='Precision of the event date', max_length=20)),
|
||||||
|
('entity_type', models.CharField(db_index=True, help_text="Type of entity (e.g., 'park', 'ride', 'company')", max_length=50)),
|
||||||
|
('entity_id', models.UUIDField(db_index=True, help_text='UUID of the associated entity')),
|
||||||
|
('is_public', models.BooleanField(default=True, help_text='Whether this milestone is publicly visible')),
|
||||||
|
('display_order', models.IntegerField(default=0, help_text='Order for displaying multiple milestones on the same date')),
|
||||||
|
('from_value', models.CharField(blank=True, help_text='Previous value (for change events)', max_length=200)),
|
||||||
|
('to_value', models.CharField(blank=True, help_text='New value (for change events)', max_length=200)),
|
||||||
|
('from_entity_id', models.UUIDField(blank=True, help_text='Previous entity reference (e.g., old operator)', null=True)),
|
||||||
|
('to_entity_id', models.UUIDField(blank=True, help_text='New entity reference (e.g., new operator)', null=True)),
|
||||||
|
('from_location_id', models.UUIDField(blank=True, help_text='Previous location reference (for relocations)', null=True)),
|
||||||
|
('to_location_id', models.UUIDField(blank=True, help_text='New location reference (for relocations)', null=True)),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'verbose_name': 'Milestone',
|
||||||
|
'verbose_name_plural': 'Milestones',
|
||||||
|
'ordering': ['-event_date', 'display_order'],
|
||||||
|
'abstract': False,
|
||||||
|
'indexes': [models.Index(fields=['entity_type', 'entity_id'], name='core_milest_entity__effdde_idx'), models.Index(fields=['event_type', 'event_date'], name='core_milest_event_t_0070b8_idx'), models.Index(fields=['is_public', 'event_date'], name='core_milest_is_publ_2ce98c_idx')],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
pgtrigger.migrations.AddTrigger(
|
||||||
|
model_name='milestone',
|
||||||
|
trigger=pgtrigger.compiler.Trigger(name='insert_insert', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "core_milestoneevent" ("created_at", "description", "display_order", "entity_id", "entity_type", "event_date", "event_date_precision", "event_type", "from_entity_id", "from_location_id", "from_value", "id", "is_public", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "title", "to_entity_id", "to_location_id", "to_value", "updated_at") VALUES (NEW."created_at", NEW."description", NEW."display_order", NEW."entity_id", NEW."entity_type", NEW."event_date", NEW."event_date_precision", NEW."event_type", NEW."from_entity_id", NEW."from_location_id", NEW."from_value", NEW."id", NEW."is_public", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."title", NEW."to_entity_id", NEW."to_location_id", NEW."to_value", NEW."updated_at"); RETURN NULL;', hash='6c4386ed0356cf9a3db65c829163401409e79622', operation='INSERT', pgid='pgtrigger_insert_insert_52c81', table='core_milestone', when='AFTER')),
|
||||||
|
),
|
||||||
|
pgtrigger.migrations.AddTrigger(
|
||||||
|
model_name='milestone',
|
||||||
|
trigger=pgtrigger.compiler.Trigger(name='update_update', sql=pgtrigger.compiler.UpsertTriggerSql(condition='WHEN (OLD.* IS DISTINCT FROM NEW.*)', func='INSERT INTO "core_milestoneevent" ("created_at", "description", "display_order", "entity_id", "entity_type", "event_date", "event_date_precision", "event_type", "from_entity_id", "from_location_id", "from_value", "id", "is_public", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "title", "to_entity_id", "to_location_id", "to_value", "updated_at") VALUES (NEW."created_at", NEW."description", NEW."display_order", NEW."entity_id", NEW."entity_type", NEW."event_date", NEW."event_date_precision", NEW."event_type", NEW."from_entity_id", NEW."from_location_id", NEW."from_value", NEW."id", NEW."is_public", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."title", NEW."to_entity_id", NEW."to_location_id", NEW."to_value", NEW."updated_at"); RETURN NULL;', hash='fafe30b7266d1d1a0a2b3486f5b7e713a8252f97', operation='UPDATE', pgid='pgtrigger_update_update_0209b', table='core_milestone', when='AFTER')),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='milestoneevent',
|
||||||
|
name='pgh_context',
|
||||||
|
field=models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='pghistory.context'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='milestoneevent',
|
||||||
|
name='pgh_obj',
|
||||||
|
field=models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='events', to='core.milestone'),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,69 @@
|
|||||||
|
# Generated by Django 5.2.10 on 2026-01-11 00:48
|
||||||
|
|
||||||
|
import apps.core.choices.fields
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('core', '0010_add_milestone_model'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='applicationerror',
|
||||||
|
name='severity',
|
||||||
|
field=apps.core.choices.fields.RichChoiceField(allow_deprecated=False, choice_group='severity_levels', choices=[('critical', 'Critical'), ('high', 'High'), ('medium', 'Medium'), ('low', 'Low')], db_index=True, default='medium', domain='core', help_text='Error severity level', max_length=20),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='applicationerror',
|
||||||
|
name='source',
|
||||||
|
field=apps.core.choices.fields.RichChoiceField(allow_deprecated=False, choice_group='error_sources', choices=[('frontend', 'Frontend'), ('backend', 'Backend'), ('api', 'API')], db_index=True, domain='core', help_text='Where the error originated', max_length=20),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='incident',
|
||||||
|
name='severity',
|
||||||
|
field=apps.core.choices.fields.RichChoiceField(allow_deprecated=False, choice_group='severity_levels', choices=[('critical', 'Critical'), ('high', 'High'), ('medium', 'Medium'), ('low', 'Low')], db_index=True, domain='core', help_text='Incident severity level', max_length=20),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='incident',
|
||||||
|
name='status',
|
||||||
|
field=apps.core.choices.fields.RichChoiceField(allow_deprecated=False, choice_group='incident_statuses', choices=[('open', 'Open'), ('investigating', 'Investigating'), ('resolved', 'Resolved'), ('closed', 'Closed')], db_index=True, default='open', domain='core', help_text='Current incident status', max_length=20),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='incidentalert',
|
||||||
|
name='alert_source',
|
||||||
|
field=apps.core.choices.fields.RichChoiceField(allow_deprecated=False, choice_group='alert_sources', choices=[('system', 'System Alert'), ('rate_limit', 'Rate Limit Alert')], domain='core', help_text='Source type of the alert', max_length=20),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='milestone',
|
||||||
|
name='event_date_precision',
|
||||||
|
field=apps.core.choices.fields.RichChoiceField(allow_deprecated=False, choice_group='date_precision', choices=[('exact', 'Exact Date'), ('month', 'Month and Year'), ('year', 'Year Only'), ('decade', 'Decade'), ('century', 'Century'), ('approximate', 'Approximate')], default='exact', domain='core', help_text='Precision of the event date', max_length=20),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='milestoneevent',
|
||||||
|
name='event_date_precision',
|
||||||
|
field=apps.core.choices.fields.RichChoiceField(allow_deprecated=False, choice_group='date_precision', choices=[('exact', 'Exact Date'), ('month', 'Month and Year'), ('year', 'Year Only'), ('decade', 'Decade'), ('century', 'Century'), ('approximate', 'Approximate')], default='exact', domain='core', help_text='Precision of the event date', max_length=20),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='ratelimitalertconfig',
|
||||||
|
name='metric_type',
|
||||||
|
field=apps.core.choices.fields.RichChoiceField(allow_deprecated=False, choice_group='metric_types', choices=[('block_rate', 'Block Rate'), ('total_requests', 'Total Requests'), ('unique_ips', 'Unique IPs'), ('function_specific', 'Function Specific')], db_index=True, domain='core', help_text='Type of metric to monitor', max_length=50),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='requestmetadata',
|
||||||
|
name='severity',
|
||||||
|
field=apps.core.choices.fields.RichChoiceField(allow_deprecated=False, choice_group='request_severity_levels', choices=[('debug', 'Debug'), ('info', 'Info'), ('warning', 'Warning'), ('error', 'Error'), ('critical', 'Critical')], db_index=True, default='info', domain='core', help_text='Error severity level', max_length=20),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='systemalert',
|
||||||
|
name='alert_type',
|
||||||
|
field=apps.core.choices.fields.RichChoiceField(allow_deprecated=False, choice_group='system_alert_types', choices=[('orphaned_images', 'Orphaned Images'), ('stale_submissions', 'Stale Submissions'), ('circular_dependency', 'Circular Dependency'), ('validation_error', 'Validation Error'), ('ban_attempt', 'Ban Attempt'), ('upload_timeout', 'Upload Timeout'), ('high_error_rate', 'High Error Rate'), ('database_connection', 'Database Connection'), ('memory_usage', 'Memory Usage'), ('queue_backup', 'Queue Backup')], db_index=True, domain='core', help_text='Type of system alert', max_length=50),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name='systemalert',
|
||||||
|
name='severity',
|
||||||
|
field=apps.core.choices.fields.RichChoiceField(allow_deprecated=False, choice_group='severity_levels', choices=[('critical', 'Critical'), ('high', 'High'), ('medium', 'Medium'), ('low', 'Low')], db_index=True, domain='core', help_text='Alert severity level', max_length=20),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,320 @@
|
|||||||
|
# Generated by Django 5.2.10 on 2026-01-11 18:06
|
||||||
|
|
||||||
|
import apps.core.choices.fields
|
||||||
|
import django.db.models.deletion
|
||||||
|
import uuid
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("core", "0011_alter_applicationerror_severity_and_more"),
|
||||||
|
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="AlertCorrelationRule",
|
||||||
|
fields=[
|
||||||
|
("id", models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||||
|
(
|
||||||
|
"rule_name",
|
||||||
|
models.CharField(
|
||||||
|
db_index=True, help_text="Unique name for this correlation rule", max_length=255, unique=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"rule_description",
|
||||||
|
models.TextField(blank=True, help_text="Description of what this rule correlates"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"min_alerts_required",
|
||||||
|
models.PositiveIntegerField(
|
||||||
|
default=3, help_text="Minimum number of alerts needed to trigger correlation"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"time_window_minutes",
|
||||||
|
models.PositiveIntegerField(default=30, help_text="Time window in minutes for alert correlation"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"incident_severity",
|
||||||
|
apps.core.choices.fields.RichChoiceField(
|
||||||
|
allow_deprecated=False,
|
||||||
|
choice_group="severity_levels",
|
||||||
|
choices=[("critical", "Critical"), ("high", "High"), ("medium", "Medium"), ("low", "Low")],
|
||||||
|
default="medium",
|
||||||
|
domain="core",
|
||||||
|
help_text="Severity to assign to correlated incidents",
|
||||||
|
max_length=20,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"incident_title_template",
|
||||||
|
models.CharField(
|
||||||
|
help_text="Template for incident title (supports {count}, {rule_name})", max_length=255
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"is_active",
|
||||||
|
models.BooleanField(db_index=True, default=True, help_text="Whether this rule is currently active"),
|
||||||
|
),
|
||||||
|
("created_at", models.DateTimeField(auto_now_add=True, help_text="When this rule was created")),
|
||||||
|
("updated_at", models.DateTimeField(auto_now=True, help_text="When this rule was last updated")),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "Alert Correlation Rule",
|
||||||
|
"verbose_name_plural": "Alert Correlation Rules",
|
||||||
|
"ordering": ["rule_name"],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="CleanupJobLog",
|
||||||
|
fields=[
|
||||||
|
("id", models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||||
|
("job_name", models.CharField(db_index=True, help_text="Name of the cleanup job", max_length=255)),
|
||||||
|
(
|
||||||
|
"status",
|
||||||
|
apps.core.choices.fields.RichChoiceField(
|
||||||
|
allow_deprecated=False,
|
||||||
|
choice_group="cleanup_job_statuses",
|
||||||
|
choices=[
|
||||||
|
("success", "Success"),
|
||||||
|
("failed", "Failed"),
|
||||||
|
("partial", "Partial"),
|
||||||
|
("skipped", "Skipped"),
|
||||||
|
],
|
||||||
|
db_index=True,
|
||||||
|
default="success",
|
||||||
|
domain="core",
|
||||||
|
help_text="Execution status",
|
||||||
|
max_length=20,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("records_processed", models.PositiveIntegerField(default=0, help_text="Number of records processed")),
|
||||||
|
("records_deleted", models.PositiveIntegerField(default=0, help_text="Number of records deleted")),
|
||||||
|
("error_message", models.TextField(blank=True, help_text="Error message if job failed", null=True)),
|
||||||
|
(
|
||||||
|
"duration_ms",
|
||||||
|
models.PositiveIntegerField(blank=True, help_text="Execution duration in milliseconds", null=True),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"executed_at",
|
||||||
|
models.DateTimeField(auto_now_add=True, db_index=True, help_text="When this job was executed"),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "Cleanup Job Log",
|
||||||
|
"verbose_name_plural": "Cleanup Job Logs",
|
||||||
|
"ordering": ["-executed_at"],
|
||||||
|
"indexes": [
|
||||||
|
models.Index(fields=["job_name", "executed_at"], name="core_cleanu_job_nam_4530fd_idx"),
|
||||||
|
models.Index(fields=["status", "executed_at"], name="core_cleanu_status_fa6360_idx"),
|
||||||
|
],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="Anomaly",
|
||||||
|
fields=[
|
||||||
|
("id", models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||||
|
(
|
||||||
|
"metric_name",
|
||||||
|
models.CharField(
|
||||||
|
db_index=True, help_text="Name of the metric that exhibited anomalous behavior", max_length=255
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"metric_category",
|
||||||
|
models.CharField(
|
||||||
|
db_index=True,
|
||||||
|
help_text="Category of the metric (e.g., 'performance', 'error_rate', 'traffic')",
|
||||||
|
max_length=100,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"anomaly_type",
|
||||||
|
apps.core.choices.fields.RichChoiceField(
|
||||||
|
allow_deprecated=False,
|
||||||
|
choice_group="anomaly_types",
|
||||||
|
choices=[
|
||||||
|
("spike", "Spike"),
|
||||||
|
("drop", "Drop"),
|
||||||
|
("trend_change", "Trend Change"),
|
||||||
|
("outlier", "Outlier"),
|
||||||
|
("threshold_breach", "Threshold Breach"),
|
||||||
|
],
|
||||||
|
db_index=True,
|
||||||
|
domain="core",
|
||||||
|
help_text="Type of anomaly detected",
|
||||||
|
max_length=30,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"severity",
|
||||||
|
apps.core.choices.fields.RichChoiceField(
|
||||||
|
allow_deprecated=False,
|
||||||
|
choice_group="severity_levels",
|
||||||
|
choices=[("critical", "Critical"), ("high", "High"), ("medium", "Medium"), ("low", "Low")],
|
||||||
|
db_index=True,
|
||||||
|
domain="core",
|
||||||
|
help_text="Severity of the anomaly",
|
||||||
|
max_length=20,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"anomaly_value",
|
||||||
|
models.DecimalField(decimal_places=6, help_text="The anomalous value detected", max_digits=20),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"baseline_value",
|
||||||
|
models.DecimalField(decimal_places=6, help_text="The expected baseline value", max_digits=20),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"deviation_score",
|
||||||
|
models.DecimalField(decimal_places=4, help_text="Standard deviations from normal", max_digits=10),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"confidence_score",
|
||||||
|
models.DecimalField(
|
||||||
|
decimal_places=4, help_text="Confidence score of the detection (0-1)", max_digits=5
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("detection_algorithm", models.CharField(help_text="Algorithm used for detection", max_length=100)),
|
||||||
|
("time_window_start", models.DateTimeField(help_text="Start of the detection time window")),
|
||||||
|
("time_window_end", models.DateTimeField(help_text="End of the detection time window")),
|
||||||
|
(
|
||||||
|
"alert_created",
|
||||||
|
models.BooleanField(
|
||||||
|
db_index=True, default=False, help_text="Whether an alert was created for this anomaly"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"detected_at",
|
||||||
|
models.DateTimeField(auto_now_add=True, db_index=True, help_text="When this anomaly was detected"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"alert",
|
||||||
|
models.ForeignKey(
|
||||||
|
blank=True,
|
||||||
|
help_text="Linked system alert if created",
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
|
related_name="anomalies",
|
||||||
|
to="core.systemalert",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "Anomaly",
|
||||||
|
"verbose_name_plural": "Anomalies",
|
||||||
|
"ordering": ["-detected_at"],
|
||||||
|
"indexes": [
|
||||||
|
models.Index(fields=["metric_name", "detected_at"], name="core_anomal_metric__06c3c9_idx"),
|
||||||
|
models.Index(fields=["severity", "detected_at"], name="core_anomal_severit_ea7a17_idx"),
|
||||||
|
models.Index(fields=["anomaly_type", "detected_at"], name="core_anomal_anomaly_eb45f7_idx"),
|
||||||
|
models.Index(fields=["alert_created", "detected_at"], name="core_anomal_alert_c_5a0c1a_idx"),
|
||||||
|
],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="PipelineError",
|
||||||
|
fields=[
|
||||||
|
("id", models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||||
|
(
|
||||||
|
"function_name",
|
||||||
|
models.CharField(
|
||||||
|
db_index=True, help_text="Name of the function/pipeline that failed", max_length=255
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("error_message", models.TextField(help_text="Error message describing the failure")),
|
||||||
|
(
|
||||||
|
"error_code",
|
||||||
|
models.CharField(
|
||||||
|
blank=True, db_index=True, help_text="Error code for categorization", max_length=100, null=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("error_context", models.JSONField(blank=True, help_text="Additional context data as JSON", null=True)),
|
||||||
|
("stack_trace", models.TextField(blank=True, help_text="Full stack trace for debugging", null=True)),
|
||||||
|
(
|
||||||
|
"severity",
|
||||||
|
apps.core.choices.fields.RichChoiceField(
|
||||||
|
allow_deprecated=False,
|
||||||
|
choice_group="pipeline_error_severities",
|
||||||
|
choices=[
|
||||||
|
("critical", "Critical"),
|
||||||
|
("error", "Error"),
|
||||||
|
("warning", "Warning"),
|
||||||
|
("info", "Info"),
|
||||||
|
],
|
||||||
|
db_index=True,
|
||||||
|
default="error",
|
||||||
|
domain="core",
|
||||||
|
help_text="Severity level of the error",
|
||||||
|
max_length=20,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"submission_id",
|
||||||
|
models.UUIDField(
|
||||||
|
blank=True, db_index=True, help_text="ID of related content submission if applicable", null=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"item_id",
|
||||||
|
models.CharField(
|
||||||
|
blank=True,
|
||||||
|
db_index=True,
|
||||||
|
help_text="Generic reference to related item",
|
||||||
|
max_length=255,
|
||||||
|
null=True,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"request_id",
|
||||||
|
models.UUIDField(blank=True, db_index=True, help_text="Request ID for correlation", null=True),
|
||||||
|
),
|
||||||
|
("trace_id", models.UUIDField(blank=True, db_index=True, help_text="Distributed trace ID", null=True)),
|
||||||
|
(
|
||||||
|
"resolved",
|
||||||
|
models.BooleanField(db_index=True, default=False, help_text="Whether this error has been resolved"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"resolved_at",
|
||||||
|
models.DateTimeField(
|
||||||
|
blank=True, db_index=True, help_text="When this error was resolved", null=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"resolution_notes",
|
||||||
|
models.TextField(blank=True, help_text="Notes about how the error was resolved", null=True),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"occurred_at",
|
||||||
|
models.DateTimeField(auto_now_add=True, db_index=True, help_text="When this error occurred"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"resolved_by",
|
||||||
|
models.ForeignKey(
|
||||||
|
blank=True,
|
||||||
|
help_text="User who resolved this error",
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
|
related_name="resolved_pipeline_errors",
|
||||||
|
to=settings.AUTH_USER_MODEL,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "Pipeline Error",
|
||||||
|
"verbose_name_plural": "Pipeline Errors",
|
||||||
|
"ordering": ["-occurred_at"],
|
||||||
|
"indexes": [
|
||||||
|
models.Index(fields=["severity", "occurred_at"], name="core_pipeli_severit_9c8037_idx"),
|
||||||
|
models.Index(fields=["function_name", "occurred_at"], name="core_pipeli_functio_efb015_idx"),
|
||||||
|
models.Index(fields=["resolved", "occurred_at"], name="core_pipeli_resolve_cd60c5_idx"),
|
||||||
|
],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -28,3 +28,65 @@ class IsStaffOrReadOnly(permissions.BasePermission):
|
|||||||
if request.method in permissions.SAFE_METHODS:
|
if request.method in permissions.SAFE_METHODS:
|
||||||
return True
|
return True
|
||||||
return request.user and request.user.is_staff
|
return request.user and request.user.is_staff
|
||||||
|
|
||||||
|
|
||||||
|
class IsAdminWithSecondFactor(permissions.BasePermission):
|
||||||
|
"""
|
||||||
|
Requires admin status AND at least one configured second factor.
|
||||||
|
|
||||||
|
Accepts either:
|
||||||
|
- TOTP (MFA/Authenticator app)
|
||||||
|
- WebAuthn (Passkey/Security key)
|
||||||
|
|
||||||
|
This permission ensures that admin users have a second factor configured
|
||||||
|
before they can access sensitive admin endpoints.
|
||||||
|
"""
|
||||||
|
|
||||||
|
message = "Admin access requires MFA or Passkey to be configured."
|
||||||
|
|
||||||
|
def has_permission(self, request, view):
|
||||||
|
user = request.user
|
||||||
|
|
||||||
|
# Must be authenticated
|
||||||
|
if not user or not user.is_authenticated:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Must be admin (staff, superuser, or ADMIN role)
|
||||||
|
if not self._is_admin(user):
|
||||||
|
self.message = "You do not have admin privileges."
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Must have at least one second factor configured
|
||||||
|
if not self._has_second_factor(user):
|
||||||
|
self.message = "Admin access requires MFA or Passkey to be configured."
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _is_admin(self, user) -> bool:
|
||||||
|
"""Check if user has admin privileges."""
|
||||||
|
if user.is_superuser:
|
||||||
|
return True
|
||||||
|
if user.is_staff:
|
||||||
|
return True
|
||||||
|
# Check custom role field if it exists
|
||||||
|
if hasattr(user, "role") and user.role in ("ADMIN", "SUPERUSER"):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _has_second_factor(self, user) -> bool:
|
||||||
|
"""Check if user has at least one second factor configured."""
|
||||||
|
try:
|
||||||
|
from allauth.mfa.models import Authenticator
|
||||||
|
|
||||||
|
# Check for TOTP or WebAuthn authenticators
|
||||||
|
return Authenticator.objects.filter(
|
||||||
|
user=user,
|
||||||
|
type__in=[Authenticator.Type.TOTP, Authenticator.Type.WEBAUTHN]
|
||||||
|
).exists()
|
||||||
|
except ImportError:
|
||||||
|
# allauth.mfa not installed
|
||||||
|
return False
|
||||||
|
except Exception:
|
||||||
|
# Any other error, fail closed (deny access)
|
||||||
|
return False
|
||||||
|
|||||||
@@ -130,23 +130,28 @@ class ErrorService:
|
|||||||
# Merge request_context into metadata
|
# Merge request_context into metadata
|
||||||
merged_metadata = {**(metadata or {}), "request_context": request_context}
|
merged_metadata = {**(metadata or {}), "request_context": request_context}
|
||||||
|
|
||||||
|
# Build create kwargs, only including error_id if provided
|
||||||
|
create_kwargs = {
|
||||||
|
"error_type": error_type,
|
||||||
|
"error_message": error_message[:5000], # Limit message length
|
||||||
|
"error_stack": error_stack[:10000], # Limit stack length
|
||||||
|
"error_code": error_code,
|
||||||
|
"severity": severity,
|
||||||
|
"source": source,
|
||||||
|
"endpoint": endpoint,
|
||||||
|
"http_method": http_method,
|
||||||
|
"user_agent": user_agent[:1000],
|
||||||
|
"user": user,
|
||||||
|
"ip_address_hash": ip_address_hash,
|
||||||
|
"metadata": merged_metadata,
|
||||||
|
"environment": environment or {},
|
||||||
|
}
|
||||||
|
# Only include error_id if explicitly provided, else let model default
|
||||||
|
if error_id is not None:
|
||||||
|
create_kwargs["error_id"] = error_id
|
||||||
|
|
||||||
# Create and save error
|
# Create and save error
|
||||||
app_error = ApplicationError.objects.create(
|
app_error = ApplicationError.objects.create(**create_kwargs)
|
||||||
error_id=error_id or None, # Let model generate if not provided
|
|
||||||
error_type=error_type,
|
|
||||||
error_message=error_message[:5000], # Limit message length
|
|
||||||
error_stack=error_stack[:10000], # Limit stack length
|
|
||||||
error_code=error_code,
|
|
||||||
severity=severity,
|
|
||||||
source=source,
|
|
||||||
endpoint=endpoint,
|
|
||||||
http_method=http_method,
|
|
||||||
user_agent=user_agent[:1000],
|
|
||||||
user=user,
|
|
||||||
ip_address_hash=ip_address_hash,
|
|
||||||
metadata=merged_metadata,
|
|
||||||
environment=environment or {},
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
f"Captured error {app_error.short_error_id}: {error_type} from {source}"
|
f"Captured error {app_error.short_error_id}: {error_type} from {source}"
|
||||||
|
|||||||
@@ -14,6 +14,8 @@ from django.conf import settings
|
|||||||
from django.core.files.uploadedfile import UploadedFile
|
from django.core.files.uploadedfile import UploadedFile
|
||||||
from PIL import ExifTags, Image
|
from PIL import ExifTags, Image
|
||||||
|
|
||||||
|
from apps.core.utils import capture_and_log
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@@ -193,5 +195,5 @@ class MediaService:
|
|||||||
"available_space": "unknown",
|
"available_space": "unknown",
|
||||||
}
|
}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Failed to get storage stats: {str(e)}")
|
capture_and_log(e, 'Get storage stats', source='service', severity='low')
|
||||||
return {"error": str(e)}
|
return {"error": str(e)}
|
||||||
|
|||||||
@@ -199,7 +199,7 @@ class TrendingService:
|
|||||||
# Get card image URL
|
# Get card image URL
|
||||||
card_image_url = ""
|
card_image_url = ""
|
||||||
if park.card_image and hasattr(park.card_image, "image"):
|
if park.card_image and hasattr(park.card_image, "image"):
|
||||||
card_image_url = park.card_image.image.url if park.card_image.image else ""
|
card_image_url = park.card_image.image.public_url if park.card_image.image else ""
|
||||||
|
|
||||||
# Get primary company (operator)
|
# Get primary company (operator)
|
||||||
primary_company = park.operator.name if park.operator else ""
|
primary_company = park.operator.name if park.operator else ""
|
||||||
@@ -247,7 +247,7 @@ class TrendingService:
|
|||||||
# Get card image URL
|
# Get card image URL
|
||||||
card_image_url = ""
|
card_image_url = ""
|
||||||
if ride.card_image and hasattr(ride.card_image, "image"):
|
if ride.card_image and hasattr(ride.card_image, "image"):
|
||||||
card_image_url = ride.card_image.image.url if ride.card_image.image else ""
|
card_image_url = ride.card_image.image.public_url if ride.card_image.image else ""
|
||||||
|
|
||||||
trending_rides.append(
|
trending_rides.append(
|
||||||
{
|
{
|
||||||
@@ -450,7 +450,7 @@ class TrendingService:
|
|||||||
# Get card image URL
|
# Get card image URL
|
||||||
card_image_url = ""
|
card_image_url = ""
|
||||||
if park.card_image and hasattr(park.card_image, "image"):
|
if park.card_image and hasattr(park.card_image, "image"):
|
||||||
card_image_url = park.card_image.image.url if park.card_image.image else ""
|
card_image_url = park.card_image.image.public_url if park.card_image.image else ""
|
||||||
|
|
||||||
# Get primary company (operator)
|
# Get primary company (operator)
|
||||||
primary_company = park.operator.name if park.operator else ""
|
primary_company = park.operator.name if park.operator else ""
|
||||||
@@ -506,7 +506,7 @@ class TrendingService:
|
|||||||
# Get card image URL
|
# Get card image URL
|
||||||
card_image_url = ""
|
card_image_url = ""
|
||||||
if ride.card_image and hasattr(ride.card_image, "image"):
|
if ride.card_image and hasattr(ride.card_image, "image"):
|
||||||
card_image_url = ride.card_image.image.url if ride.card_image.image else ""
|
card_image_url = ride.card_image.image.public_url if ride.card_image.image else ""
|
||||||
|
|
||||||
results.append(
|
results.append(
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -496,9 +496,10 @@ class TransitionCallbackRegistry:
|
|||||||
failures.append((callback, None))
|
failures.append((callback, None))
|
||||||
overall_success = False
|
overall_success = False
|
||||||
|
|
||||||
if not callback.continue_on_error:
|
if not callback.continue_on_error:
|
||||||
logger.error(
|
logger.error(
|
||||||
f"Aborting callback chain - {callback.name} failed " f"and continue_on_error=False"
|
f"Aborting callback chain - {callback.name} failed "
|
||||||
|
f"and continue_on_error=False"
|
||||||
)
|
)
|
||||||
break
|
break
|
||||||
|
|
||||||
@@ -509,7 +510,8 @@ class TransitionCallbackRegistry:
|
|||||||
|
|
||||||
if not callback.continue_on_error:
|
if not callback.continue_on_error:
|
||||||
logger.error(
|
logger.error(
|
||||||
f"Aborting callback chain - {callback.name} raised exception " f"and continue_on_error=False"
|
f"Aborting callback chain - {callback.name} raised exception "
|
||||||
|
f"and continue_on_error=False"
|
||||||
)
|
)
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|||||||
@@ -53,13 +53,32 @@ def with_callbacks(
|
|||||||
def wrapper(instance, *args, **kwargs):
|
def wrapper(instance, *args, **kwargs):
|
||||||
# Extract user from kwargs
|
# Extract user from kwargs
|
||||||
user = kwargs.get("user")
|
user = kwargs.get("user")
|
||||||
|
|
||||||
|
# Pass user as 'by' for django-fsm-log's @fsm_log_by decorator
|
||||||
|
# This must be set before calling the inner func so the decorator can capture it
|
||||||
|
if user is not None and 'by' not in kwargs:
|
||||||
|
kwargs['by'] = user
|
||||||
|
|
||||||
# Get source state before transition
|
# Get source state before transition
|
||||||
source_state = getattr(instance, field_name, None)
|
source_state = getattr(instance, field_name, None)
|
||||||
|
|
||||||
# Get target state from the transition decorator
|
# Get target state from the transition decorator
|
||||||
# The @transition decorator sets _django_fsm_target
|
# The @transition decorator sets _django_fsm attribute (may be dict or FSMMeta object)
|
||||||
target_state = getattr(func, "_django_fsm", {}).get("target", None)
|
fsm_meta = getattr(func, "_django_fsm", None)
|
||||||
|
target_state = None
|
||||||
|
if fsm_meta is not None:
|
||||||
|
if isinstance(fsm_meta, dict):
|
||||||
|
target_state = fsm_meta.get("target", None)
|
||||||
|
elif hasattr(fsm_meta, "target"):
|
||||||
|
target_state = fsm_meta.target
|
||||||
|
elif hasattr(fsm_meta, "transitions"):
|
||||||
|
# FSMMeta object - try to get target from first transition
|
||||||
|
try:
|
||||||
|
transitions = list(fsm_meta.transitions.values())
|
||||||
|
if transitions:
|
||||||
|
target_state = transitions[0].target if hasattr(transitions[0], 'target') else None
|
||||||
|
except (AttributeError, TypeError, StopIteration):
|
||||||
|
pass
|
||||||
|
|
||||||
# If we can't determine the target from decorator metadata,
|
# If we can't determine the target from decorator metadata,
|
||||||
# we'll capture it after the transition
|
# we'll capture it after the transition
|
||||||
@@ -84,7 +103,8 @@ def with_callbacks(
|
|||||||
if not pre_success and pre_failures:
|
if not pre_success and pre_failures:
|
||||||
for callback, exc in pre_failures:
|
for callback, exc in pre_failures:
|
||||||
if not callback.continue_on_error:
|
if not callback.continue_on_error:
|
||||||
logger.error(f"Pre-transition callback {callback.name} failed, " f"aborting transition")
|
logger.error(f"Pre-transition callback {callback.name} failed, "
|
||||||
|
f"aborting transition")
|
||||||
if exc:
|
if exc:
|
||||||
raise exc
|
raise exc
|
||||||
raise RuntimeError(f"Pre-transition callback {callback.name} failed")
|
raise RuntimeError(f"Pre-transition callback {callback.name} failed")
|
||||||
@@ -283,7 +303,7 @@ class TransitionMethodFactory:
|
|||||||
def create_approve_method(
|
def create_approve_method(
|
||||||
source: str,
|
source: str,
|
||||||
target: str,
|
target: str,
|
||||||
field_name: str = "status",
|
field=None,
|
||||||
permission_guard: Callable | None = None,
|
permission_guard: Callable | None = None,
|
||||||
enable_callbacks: bool = True,
|
enable_callbacks: bool = True,
|
||||||
emit_signals: bool = True,
|
emit_signals: bool = True,
|
||||||
@@ -294,7 +314,7 @@ class TransitionMethodFactory:
|
|||||||
Args:
|
Args:
|
||||||
source: Source state value(s)
|
source: Source state value(s)
|
||||||
target: Target state value
|
target: Target state value
|
||||||
field_name: Name of the FSM field
|
field: FSM field object (required for django-fsm 3.x)
|
||||||
permission_guard: Optional permission guard
|
permission_guard: Optional permission guard
|
||||||
enable_callbacks: Whether to wrap with callback execution
|
enable_callbacks: Whether to wrap with callback execution
|
||||||
emit_signals: Whether to emit Django signals
|
emit_signals: Whether to emit Django signals
|
||||||
@@ -302,16 +322,21 @@ class TransitionMethodFactory:
|
|||||||
Returns:
|
Returns:
|
||||||
Approval transition method
|
Approval transition method
|
||||||
"""
|
"""
|
||||||
|
# Get field name for callback wrapper
|
||||||
|
field_name = field.name if hasattr(field, 'name') else 'status'
|
||||||
|
|
||||||
@fsm_log_by
|
@fsm_log_by
|
||||||
@transition(
|
@transition(
|
||||||
field=field_name,
|
field=field,
|
||||||
source=source,
|
source=source,
|
||||||
target=target,
|
target=target,
|
||||||
conditions=[permission_guard] if permission_guard else [],
|
permission=permission_guard,
|
||||||
)
|
)
|
||||||
def approve(instance, user=None, comment: str = "", **kwargs):
|
def approve(instance, user=None, comment: str = "", **kwargs):
|
||||||
"""Approve and transition to approved state."""
|
"""Approve and transition to approved state."""
|
||||||
|
# Pass user as 'by' for django-fsm-log's @fsm_log_by decorator
|
||||||
|
if user is not None:
|
||||||
|
kwargs['by'] = user
|
||||||
if hasattr(instance, "approved_by_id"):
|
if hasattr(instance, "approved_by_id"):
|
||||||
instance.approved_by = user
|
instance.approved_by = user
|
||||||
if hasattr(instance, "approval_comment"):
|
if hasattr(instance, "approval_comment"):
|
||||||
@@ -334,7 +359,7 @@ class TransitionMethodFactory:
|
|||||||
def create_reject_method(
|
def create_reject_method(
|
||||||
source: str,
|
source: str,
|
||||||
target: str,
|
target: str,
|
||||||
field_name: str = "status",
|
field=None,
|
||||||
permission_guard: Callable | None = None,
|
permission_guard: Callable | None = None,
|
||||||
enable_callbacks: bool = True,
|
enable_callbacks: bool = True,
|
||||||
emit_signals: bool = True,
|
emit_signals: bool = True,
|
||||||
@@ -345,7 +370,7 @@ class TransitionMethodFactory:
|
|||||||
Args:
|
Args:
|
||||||
source: Source state value(s)
|
source: Source state value(s)
|
||||||
target: Target state value
|
target: Target state value
|
||||||
field_name: Name of the FSM field
|
field: FSM field object (required for django-fsm 3.x)
|
||||||
permission_guard: Optional permission guard
|
permission_guard: Optional permission guard
|
||||||
enable_callbacks: Whether to wrap with callback execution
|
enable_callbacks: Whether to wrap with callback execution
|
||||||
emit_signals: Whether to emit Django signals
|
emit_signals: Whether to emit Django signals
|
||||||
@@ -353,16 +378,21 @@ class TransitionMethodFactory:
|
|||||||
Returns:
|
Returns:
|
||||||
Rejection transition method
|
Rejection transition method
|
||||||
"""
|
"""
|
||||||
|
# Get field name for callback wrapper
|
||||||
|
field_name = field.name if hasattr(field, 'name') else 'status'
|
||||||
|
|
||||||
@fsm_log_by
|
@fsm_log_by
|
||||||
@transition(
|
@transition(
|
||||||
field=field_name,
|
field=field,
|
||||||
source=source,
|
source=source,
|
||||||
target=target,
|
target=target,
|
||||||
conditions=[permission_guard] if permission_guard else [],
|
permission=permission_guard,
|
||||||
)
|
)
|
||||||
def reject(instance, user=None, reason: str = "", **kwargs):
|
def reject(instance, user=None, reason: str = "", **kwargs):
|
||||||
"""Reject and transition to rejected state."""
|
"""Reject and transition to rejected state."""
|
||||||
|
# Pass user as 'by' for django-fsm-log's @fsm_log_by decorator
|
||||||
|
if user is not None:
|
||||||
|
kwargs['by'] = user
|
||||||
if hasattr(instance, "rejected_by_id"):
|
if hasattr(instance, "rejected_by_id"):
|
||||||
instance.rejected_by = user
|
instance.rejected_by = user
|
||||||
if hasattr(instance, "rejection_reason"):
|
if hasattr(instance, "rejection_reason"):
|
||||||
@@ -385,7 +415,7 @@ class TransitionMethodFactory:
|
|||||||
def create_escalate_method(
|
def create_escalate_method(
|
||||||
source: str,
|
source: str,
|
||||||
target: str,
|
target: str,
|
||||||
field_name: str = "status",
|
field=None,
|
||||||
permission_guard: Callable | None = None,
|
permission_guard: Callable | None = None,
|
||||||
enable_callbacks: bool = True,
|
enable_callbacks: bool = True,
|
||||||
emit_signals: bool = True,
|
emit_signals: bool = True,
|
||||||
@@ -396,7 +426,7 @@ class TransitionMethodFactory:
|
|||||||
Args:
|
Args:
|
||||||
source: Source state value(s)
|
source: Source state value(s)
|
||||||
target: Target state value
|
target: Target state value
|
||||||
field_name: Name of the FSM field
|
field: FSM field object (required for django-fsm 3.x)
|
||||||
permission_guard: Optional permission guard
|
permission_guard: Optional permission guard
|
||||||
enable_callbacks: Whether to wrap with callback execution
|
enable_callbacks: Whether to wrap with callback execution
|
||||||
emit_signals: Whether to emit Django signals
|
emit_signals: Whether to emit Django signals
|
||||||
@@ -404,16 +434,21 @@ class TransitionMethodFactory:
|
|||||||
Returns:
|
Returns:
|
||||||
Escalation transition method
|
Escalation transition method
|
||||||
"""
|
"""
|
||||||
|
# Get field name for callback wrapper
|
||||||
|
field_name = field.name if hasattr(field, 'name') else 'status'
|
||||||
|
|
||||||
@fsm_log_by
|
@fsm_log_by
|
||||||
@transition(
|
@transition(
|
||||||
field=field_name,
|
field=field,
|
||||||
source=source,
|
source=source,
|
||||||
target=target,
|
target=target,
|
||||||
conditions=[permission_guard] if permission_guard else [],
|
permission=permission_guard,
|
||||||
)
|
)
|
||||||
def escalate(instance, user=None, reason: str = "", **kwargs):
|
def escalate(instance, user=None, reason: str = "", **kwargs):
|
||||||
"""Escalate to higher authority."""
|
"""Escalate to higher authority."""
|
||||||
|
# Pass user as 'by' for django-fsm-log's @fsm_log_by decorator
|
||||||
|
if user is not None:
|
||||||
|
kwargs['by'] = user
|
||||||
if hasattr(instance, "escalated_by_id"):
|
if hasattr(instance, "escalated_by_id"):
|
||||||
instance.escalated_by = user
|
instance.escalated_by = user
|
||||||
if hasattr(instance, "escalation_reason"):
|
if hasattr(instance, "escalation_reason"):
|
||||||
@@ -437,7 +472,7 @@ class TransitionMethodFactory:
|
|||||||
method_name: str,
|
method_name: str,
|
||||||
source: str,
|
source: str,
|
||||||
target: str,
|
target: str,
|
||||||
field_name: str = "status",
|
field=None,
|
||||||
permission_guard: Callable | None = None,
|
permission_guard: Callable | None = None,
|
||||||
docstring: str | None = None,
|
docstring: str | None = None,
|
||||||
enable_callbacks: bool = True,
|
enable_callbacks: bool = True,
|
||||||
@@ -450,7 +485,7 @@ class TransitionMethodFactory:
|
|||||||
method_name: Name for the method
|
method_name: Name for the method
|
||||||
source: Source state value(s)
|
source: Source state value(s)
|
||||||
target: Target state value
|
target: Target state value
|
||||||
field_name: Name of the FSM field
|
field: FSM field object (required for django-fsm 3.x)
|
||||||
permission_guard: Optional permission guard
|
permission_guard: Optional permission guard
|
||||||
docstring: Optional docstring for the method
|
docstring: Optional docstring for the method
|
||||||
enable_callbacks: Whether to wrap with callback execution
|
enable_callbacks: Whether to wrap with callback execution
|
||||||
@@ -459,32 +494,48 @@ class TransitionMethodFactory:
|
|||||||
Returns:
|
Returns:
|
||||||
Generic transition method
|
Generic transition method
|
||||||
"""
|
"""
|
||||||
|
# Get field name for callback wrapper
|
||||||
|
field_name = field.name if hasattr(field, 'name') else 'status'
|
||||||
|
|
||||||
@fsm_log_by
|
# Create the transition function with the correct name from the start
|
||||||
@transition(
|
# by using exec to define it dynamically. This ensures __name__ is correct
|
||||||
field=field_name,
|
# before decorators are applied, which is critical for django-fsm's
|
||||||
|
# method registration.
|
||||||
|
doc = docstring if docstring else f"Transition from {source} to {target}"
|
||||||
|
|
||||||
|
# Define the function dynamically with the correct name
|
||||||
|
# IMPORTANT: We set kwargs['by'] = user so that @fsm_log_by can capture
|
||||||
|
# who performed the transition. The decorator looks for 'by' in kwargs.
|
||||||
|
func_code = f'''
|
||||||
|
def {method_name}(instance, user=None, **kwargs):
|
||||||
|
"""{doc}"""
|
||||||
|
# Pass user as 'by' for django-fsm-log's @fsm_log_by decorator
|
||||||
|
if user is not None:
|
||||||
|
kwargs['by'] = user
|
||||||
|
pass
|
||||||
|
'''
|
||||||
|
local_namespace: dict = {}
|
||||||
|
exec(func_code, {}, local_namespace)
|
||||||
|
inner_func = local_namespace[method_name]
|
||||||
|
|
||||||
|
# Apply decorators in correct order (innermost first)
|
||||||
|
# @fsm_log_by -> @transition -> inner_func
|
||||||
|
decorated = transition(
|
||||||
|
field=field,
|
||||||
source=source,
|
source=source,
|
||||||
target=target,
|
target=target,
|
||||||
conditions=[permission_guard] if permission_guard else [],
|
permission=permission_guard,
|
||||||
)
|
)(inner_func)
|
||||||
def generic_transition(instance, user=None, **kwargs):
|
decorated = fsm_log_by(decorated)
|
||||||
"""Execute state transition."""
|
|
||||||
pass
|
|
||||||
|
|
||||||
generic_transition.__name__ = method_name
|
|
||||||
if docstring:
|
|
||||||
generic_transition.__doc__ = docstring
|
|
||||||
else:
|
|
||||||
generic_transition.__doc__ = f"Transition from {source} to {target}"
|
|
||||||
|
|
||||||
# Apply callback wrapper if enabled
|
# Apply callback wrapper if enabled
|
||||||
if enable_callbacks:
|
if enable_callbacks:
|
||||||
generic_transition = with_callbacks(
|
decorated = with_callbacks(
|
||||||
field_name=field_name,
|
field_name=field_name,
|
||||||
emit_signals=emit_signals,
|
emit_signals=emit_signals,
|
||||||
)(generic_transition)
|
)(decorated)
|
||||||
|
|
||||||
return generic_transition
|
return decorated
|
||||||
|
|
||||||
|
|
||||||
def with_transition_logging(transition_method: Callable) -> Callable:
|
def with_transition_logging(transition_method: Callable) -> Callable:
|
||||||
|
|||||||
@@ -71,69 +71,79 @@ def generate_transition_methods_for_model(
|
|||||||
choice_group: Choice group name
|
choice_group: Choice group name
|
||||||
domain: Domain namespace
|
domain: Domain namespace
|
||||||
"""
|
"""
|
||||||
|
# Get the actual field from the model class - django-fsm 3.x requires
|
||||||
|
# the field object, not just the string name, when creating methods dynamically
|
||||||
|
field = model_class._meta.get_field(field_name)
|
||||||
|
|
||||||
builder = StateTransitionBuilder(choice_group, domain)
|
builder = StateTransitionBuilder(choice_group, domain)
|
||||||
transition_graph = builder.build_transition_graph()
|
transition_graph = builder.build_transition_graph()
|
||||||
factory = TransitionMethodFactory()
|
factory = TransitionMethodFactory()
|
||||||
|
|
||||||
|
# Group transitions by target to avoid overwriting methods
|
||||||
|
# {target: [source1, source2, ...]}
|
||||||
|
target_to_sources: dict[str, list[str]] = {}
|
||||||
for source, targets in transition_graph.items():
|
for source, targets in transition_graph.items():
|
||||||
source_metadata = builder.get_choice_metadata(source)
|
|
||||||
|
|
||||||
for target in targets:
|
for target in targets:
|
||||||
# Use shared method name determination
|
if target not in target_to_sources:
|
||||||
method_name = determine_method_name_for_transition(source, target)
|
target_to_sources[target] = []
|
||||||
|
target_to_sources[target].append(source)
|
||||||
|
|
||||||
# Get target metadata for combined guards
|
# Create one transition method per target, handling all valid sources
|
||||||
target_metadata = builder.get_choice_metadata(target)
|
for target, sources in target_to_sources.items():
|
||||||
|
# Use shared method name determination (all sources go to same target = same method)
|
||||||
|
method_name = determine_method_name_for_transition(sources[0], target)
|
||||||
|
|
||||||
|
# Get target metadata for guards
|
||||||
|
target_metadata = builder.get_choice_metadata(target)
|
||||||
|
|
||||||
|
# For permission guard, use target metadata only (all sources share the same permission)
|
||||||
|
# Source-specific guards would need to be checked via conditions, but for FSM 3.x
|
||||||
|
# we use permission which gets called with (instance, user)
|
||||||
|
target_guards = extract_guards_from_metadata(target_metadata)
|
||||||
|
|
||||||
|
# Create combined guard if we have multiple guards
|
||||||
|
combined_guard: Callable | None = None
|
||||||
|
if len(target_guards) == 1:
|
||||||
|
combined_guard = target_guards[0]
|
||||||
|
elif len(target_guards) > 1:
|
||||||
|
combined_guard = CompositeGuard(guards=target_guards, operator="AND")
|
||||||
|
|
||||||
# Extract guards from both source and target metadata
|
# Use list of sources for transitions with multiple valid source states
|
||||||
# This ensures metadata flags like requires_assignment, zero_tolerance,
|
source_value = sources if len(sources) > 1 else sources[0]
|
||||||
# required_permissions, and escalation_level are enforced
|
|
||||||
guards = extract_guards_from_metadata(source_metadata)
|
|
||||||
target_guards = extract_guards_from_metadata(target_metadata)
|
|
||||||
|
|
||||||
# Combine all guards
|
# Create appropriate transition method - pass actual field object
|
||||||
all_guards = guards + target_guards
|
if "approve" in method_name or "accept" in method_name:
|
||||||
|
method = factory.create_approve_method(
|
||||||
|
source=source_value,
|
||||||
|
target=target,
|
||||||
|
field=field,
|
||||||
|
permission_guard=combined_guard,
|
||||||
|
)
|
||||||
|
elif "reject" in method_name or "deny" in method_name:
|
||||||
|
method = factory.create_reject_method(
|
||||||
|
source=source_value,
|
||||||
|
target=target,
|
||||||
|
field=field,
|
||||||
|
permission_guard=combined_guard,
|
||||||
|
)
|
||||||
|
elif "escalate" in method_name:
|
||||||
|
method = factory.create_escalate_method(
|
||||||
|
source=source_value,
|
||||||
|
target=target,
|
||||||
|
field=field,
|
||||||
|
permission_guard=combined_guard,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
method = factory.create_generic_transition_method(
|
||||||
|
method_name=method_name,
|
||||||
|
source=source_value,
|
||||||
|
target=target,
|
||||||
|
field=field,
|
||||||
|
permission_guard=combined_guard,
|
||||||
|
)
|
||||||
|
|
||||||
# Create combined guard if we have multiple guards
|
# Attach method to model class
|
||||||
combined_guard: Callable | None = None
|
setattr(model_class, method_name, method)
|
||||||
if len(all_guards) == 1:
|
|
||||||
combined_guard = all_guards[0]
|
|
||||||
elif len(all_guards) > 1:
|
|
||||||
combined_guard = CompositeGuard(guards=all_guards, operator="AND")
|
|
||||||
|
|
||||||
# Create appropriate transition method
|
|
||||||
if "approve" in method_name or "accept" in method_name:
|
|
||||||
method = factory.create_approve_method(
|
|
||||||
source=source,
|
|
||||||
target=target,
|
|
||||||
field_name=field_name,
|
|
||||||
permission_guard=combined_guard,
|
|
||||||
)
|
|
||||||
elif "reject" in method_name or "deny" in method_name:
|
|
||||||
method = factory.create_reject_method(
|
|
||||||
source=source,
|
|
||||||
target=target,
|
|
||||||
field_name=field_name,
|
|
||||||
permission_guard=combined_guard,
|
|
||||||
)
|
|
||||||
elif "escalate" in method_name:
|
|
||||||
method = factory.create_escalate_method(
|
|
||||||
source=source,
|
|
||||||
target=target,
|
|
||||||
field_name=field_name,
|
|
||||||
permission_guard=combined_guard,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
method = factory.create_generic_transition_method(
|
|
||||||
method_name=method_name,
|
|
||||||
source=source,
|
|
||||||
target=target,
|
|
||||||
field_name=field_name,
|
|
||||||
permission_guard=combined_guard,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Attach method to model class
|
|
||||||
setattr(model_class, method_name, method)
|
|
||||||
|
|
||||||
|
|
||||||
class StateMachineModelMixin:
|
class StateMachineModelMixin:
|
||||||
|
|||||||
@@ -83,7 +83,7 @@ class MetadataValidator:
|
|||||||
result.errors.extend(self.validate_transitions())
|
result.errors.extend(self.validate_transitions())
|
||||||
result.errors.extend(self.validate_terminal_states())
|
result.errors.extend(self.validate_terminal_states())
|
||||||
result.errors.extend(self.validate_permission_consistency())
|
result.errors.extend(self.validate_permission_consistency())
|
||||||
result.errors.extend(self.validate_no_cycles())
|
result.warnings.extend(self.validate_no_cycles()) # Cycles are warnings, not errors
|
||||||
result.errors.extend(self.validate_reachability())
|
result.errors.extend(self.validate_reachability())
|
||||||
|
|
||||||
# Set validity based on errors
|
# Set validity based on errors
|
||||||
@@ -197,23 +197,20 @@ class MetadataValidator:
|
|||||||
|
|
||||||
return errors
|
return errors
|
||||||
|
|
||||||
def validate_no_cycles(self) -> list[ValidationError]:
|
def validate_no_cycles(self) -> list[ValidationWarning]:
|
||||||
"""
|
"""
|
||||||
Detect invalid state cycles (excluding self-loops).
|
Detect state cycles (excluding self-loops).
|
||||||
|
|
||||||
|
Note: Cycles are allowed in many FSMs (e.g., status transitions that allow
|
||||||
|
reopening or revival). This method returns warnings, not errors, since
|
||||||
|
cycles are often intentional in operational status FSMs.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
List of validation errors
|
List of validation warnings
|
||||||
"""
|
"""
|
||||||
errors = []
|
warnings = []
|
||||||
graph = self.builder.build_transition_graph()
|
graph = self.builder.build_transition_graph()
|
||||||
|
|
||||||
# Check for self-loops (state transitioning to itself)
|
|
||||||
for state, targets in graph.items():
|
|
||||||
if state in targets:
|
|
||||||
# Self-loops are warnings, not errors
|
|
||||||
# but we can flag them
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Detect cycles using DFS
|
# Detect cycles using DFS
|
||||||
visited: set[str] = set()
|
visited: set[str] = set()
|
||||||
rec_stack: set[str] = set()
|
rec_stack: set[str] = set()
|
||||||
@@ -240,16 +237,16 @@ class MetadataValidator:
|
|||||||
if state not in visited:
|
if state not in visited:
|
||||||
cycle = has_cycle(state, [])
|
cycle = has_cycle(state, [])
|
||||||
if cycle:
|
if cycle:
|
||||||
errors.append(
|
warnings.append(
|
||||||
ValidationError(
|
ValidationWarning(
|
||||||
code="STATE_CYCLE_DETECTED",
|
code="STATE_CYCLE_EXISTS",
|
||||||
message=(f"Cycle detected: {' -> '.join(cycle)}"),
|
message=(f"Cycle exists (may be intentional): {' -> '.join(cycle)}"),
|
||||||
state=cycle[0],
|
state=cycle[0],
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
break # Report first cycle only
|
break # Report first cycle only
|
||||||
|
|
||||||
return errors
|
return warnings
|
||||||
|
|
||||||
def validate_reachability(self) -> list[ValidationError]:
|
def validate_reachability(self) -> list[ValidationError]:
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -3,3 +3,22 @@ Core tasks package for ThrillWiki.
|
|||||||
|
|
||||||
This package contains all Celery tasks for the core application.
|
This package contains all Celery tasks for the core application.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from apps.core.tasks.scheduled import (
|
||||||
|
cleanup_old_versions,
|
||||||
|
cleanup_orphaned_images,
|
||||||
|
data_retention_cleanup,
|
||||||
|
process_closing_entities,
|
||||||
|
process_expired_bans,
|
||||||
|
process_scheduled_deletions,
|
||||||
|
)
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"process_scheduled_deletions",
|
||||||
|
"process_closing_entities",
|
||||||
|
"process_expired_bans",
|
||||||
|
"cleanup_orphaned_images",
|
||||||
|
"cleanup_old_versions",
|
||||||
|
"data_retention_cleanup",
|
||||||
|
]
|
||||||
|
|
||||||
|
|||||||
417
backend/apps/core/tasks/scheduled.py
Normal file
417
backend/apps/core/tasks/scheduled.py
Normal file
@@ -0,0 +1,417 @@
|
|||||||
|
"""
|
||||||
|
Scheduled Celery tasks for ThrillWiki.
|
||||||
|
|
||||||
|
These tasks are run on a schedule via Celery Beat for maintenance operations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from datetime import timedelta
|
||||||
|
|
||||||
|
from celery import shared_task
|
||||||
|
from django.contrib.auth import get_user_model
|
||||||
|
from django.db import transaction
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
|
from apps.core.utils import capture_and_log
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
User = get_user_model()
|
||||||
|
|
||||||
|
|
||||||
|
@shared_task(name="core.process_scheduled_deletions")
|
||||||
|
def process_scheduled_deletions() -> dict:
|
||||||
|
"""
|
||||||
|
Process scheduled account deletions.
|
||||||
|
|
||||||
|
Users who requested account deletion and whose grace period has expired
|
||||||
|
will have their accounts permanently deleted.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Summary with counts of processed, succeeded, and failed deletions
|
||||||
|
"""
|
||||||
|
from apps.accounts.models import AccountDeletionRequest
|
||||||
|
|
||||||
|
logger.info("Starting scheduled account deletions processing")
|
||||||
|
|
||||||
|
cutoff_time = timezone.now()
|
||||||
|
processed = 0
|
||||||
|
succeeded = 0
|
||||||
|
failed = 0
|
||||||
|
failures = []
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Get deletion requests that are past their scheduled time
|
||||||
|
pending_deletions = AccountDeletionRequest.objects.filter(
|
||||||
|
status="pending",
|
||||||
|
scheduled_deletion_at__lte=cutoff_time,
|
||||||
|
).select_related("user")
|
||||||
|
|
||||||
|
for request in pending_deletions:
|
||||||
|
processed += 1
|
||||||
|
try:
|
||||||
|
with transaction.atomic():
|
||||||
|
user = request.user
|
||||||
|
username = user.username
|
||||||
|
|
||||||
|
# Mark request as processing
|
||||||
|
request.status = "processing"
|
||||||
|
request.save()
|
||||||
|
|
||||||
|
# Anonymize user data (keep submissions)
|
||||||
|
user.username = f"deleted_{user.id}"
|
||||||
|
user.email = f"deleted_{user.id}@deleted.thrillwiki.com"
|
||||||
|
user.first_name = ""
|
||||||
|
user.last_name = ""
|
||||||
|
user.is_active = False
|
||||||
|
user.save()
|
||||||
|
|
||||||
|
# Mark deletion as complete
|
||||||
|
request.status = "completed"
|
||||||
|
request.completed_at = timezone.now()
|
||||||
|
request.save()
|
||||||
|
|
||||||
|
succeeded += 1
|
||||||
|
logger.info(f"Successfully processed deletion for user {username}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
failed += 1
|
||||||
|
error_msg = f"User {request.user_id}: {str(e)}"
|
||||||
|
failures.append(error_msg)
|
||||||
|
capture_and_log(e, f"Process scheduled deletion for user {request.user_id}", source="task")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
capture_and_log(e, "Process scheduled deletions", source="task")
|
||||||
|
|
||||||
|
result = {
|
||||||
|
"processed": processed,
|
||||||
|
"succeeded": succeeded,
|
||||||
|
"failed": failed,
|
||||||
|
"failures": failures[:10], # Limit failure list
|
||||||
|
"timestamp": timezone.now().isoformat(),
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Completed scheduled deletions: {processed} processed, {succeeded} succeeded, {failed} failed"
|
||||||
|
)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@shared_task(name="core.process_closing_entities")
|
||||||
|
def process_closing_entities() -> dict:
|
||||||
|
"""
|
||||||
|
Process parks and rides that have reached their closing date.
|
||||||
|
|
||||||
|
Entities in CLOSING status with a closing_date in the past will be
|
||||||
|
transitioned to their post_closing_status (typically CLOSED or SBNO).
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Summary with counts
|
||||||
|
"""
|
||||||
|
from apps.parks.models import Park
|
||||||
|
from apps.rides.models import Ride
|
||||||
|
|
||||||
|
logger.info("Starting closing entities processing")
|
||||||
|
|
||||||
|
today = timezone.now().date()
|
||||||
|
results = {"parks": {"processed": 0, "succeeded": 0, "failed": 0}, "rides": {"processed": 0, "succeeded": 0, "failed": 0}}
|
||||||
|
|
||||||
|
# Get system user for automated transitions
|
||||||
|
try:
|
||||||
|
system_user = User.objects.get(username="system")
|
||||||
|
except User.DoesNotExist:
|
||||||
|
system_user = User.objects.filter(is_staff=True).first()
|
||||||
|
|
||||||
|
# Process parks
|
||||||
|
try:
|
||||||
|
closing_parks = Park.objects.filter(
|
||||||
|
status="CLOSING",
|
||||||
|
closing_date__lte=today,
|
||||||
|
)
|
||||||
|
|
||||||
|
for park in closing_parks:
|
||||||
|
results["parks"]["processed"] += 1
|
||||||
|
try:
|
||||||
|
with transaction.atomic():
|
||||||
|
# Transition to closed status
|
||||||
|
park.status = getattr(park, "post_closing_status", "CLOSED") or "CLOSED"
|
||||||
|
park.save(update_fields=["status", "updated_at"])
|
||||||
|
results["parks"]["succeeded"] += 1
|
||||||
|
logger.info(f"Transitioned park {park.name} to {park.status}")
|
||||||
|
except Exception as e:
|
||||||
|
results["parks"]["failed"] += 1
|
||||||
|
capture_and_log(e, f"Process closing park {park.id}", source="task")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
capture_and_log(e, "Process closing parks", source="task")
|
||||||
|
|
||||||
|
# Process rides (already handled by rides.check_overdue_closings, but included for completeness)
|
||||||
|
try:
|
||||||
|
closing_rides = Ride.objects.filter(
|
||||||
|
status="CLOSING",
|
||||||
|
closing_date__lte=today,
|
||||||
|
)
|
||||||
|
|
||||||
|
for ride in closing_rides:
|
||||||
|
results["rides"]["processed"] += 1
|
||||||
|
try:
|
||||||
|
with transaction.atomic():
|
||||||
|
if hasattr(ride, "apply_post_closing_status") and system_user:
|
||||||
|
ride.apply_post_closing_status(user=system_user)
|
||||||
|
else:
|
||||||
|
ride.status = getattr(ride, "post_closing_status", "CLOSED") or "CLOSED"
|
||||||
|
ride.save(update_fields=["status", "updated_at"])
|
||||||
|
results["rides"]["succeeded"] += 1
|
||||||
|
logger.info(f"Transitioned ride {ride.name} to {ride.status}")
|
||||||
|
except Exception as e:
|
||||||
|
results["rides"]["failed"] += 1
|
||||||
|
capture_and_log(e, f"Process closing ride {ride.id}", source="task")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
capture_and_log(e, "Process closing rides", source="task")
|
||||||
|
|
||||||
|
logger.info(f"Completed closing entities: Parks {results['parks']}, Rides {results['rides']}")
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
@shared_task(name="core.process_expired_bans")
|
||||||
|
def process_expired_bans() -> dict:
|
||||||
|
"""
|
||||||
|
Process expired user bans.
|
||||||
|
|
||||||
|
Users with temporary bans that have expired will have their ban lifted.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Summary with counts
|
||||||
|
"""
|
||||||
|
from apps.accounts.models import UserBan
|
||||||
|
|
||||||
|
logger.info("Starting expired bans processing")
|
||||||
|
|
||||||
|
now = timezone.now()
|
||||||
|
processed = 0
|
||||||
|
succeeded = 0
|
||||||
|
failed = 0
|
||||||
|
|
||||||
|
try:
|
||||||
|
expired_bans = UserBan.objects.filter(
|
||||||
|
is_active=True,
|
||||||
|
expires_at__isnull=False,
|
||||||
|
expires_at__lte=now,
|
||||||
|
).select_related("user")
|
||||||
|
|
||||||
|
for ban in expired_bans:
|
||||||
|
processed += 1
|
||||||
|
try:
|
||||||
|
with transaction.atomic():
|
||||||
|
ban.is_active = False
|
||||||
|
ban.save(update_fields=["is_active", "updated_at"])
|
||||||
|
|
||||||
|
# Reactivate user if this was their only active ban
|
||||||
|
active_bans = UserBan.objects.filter(user=ban.user, is_active=True).count()
|
||||||
|
if active_bans == 0 and not ban.user.is_active:
|
||||||
|
ban.user.is_active = True
|
||||||
|
ban.user.save(update_fields=["is_active"])
|
||||||
|
|
||||||
|
succeeded += 1
|
||||||
|
logger.info(f"Lifted expired ban for user {ban.user.username}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
failed += 1
|
||||||
|
capture_and_log(e, f"Process expired ban {ban.id}", source="task")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
capture_and_log(e, "Process expired bans", source="task")
|
||||||
|
# Model may not exist yet
|
||||||
|
if "UserBan" in str(e):
|
||||||
|
logger.info("UserBan model not found, skipping expired bans processing")
|
||||||
|
return {"skipped": True, "reason": "UserBan model not found"}
|
||||||
|
|
||||||
|
result = {
|
||||||
|
"processed": processed,
|
||||||
|
"succeeded": succeeded,
|
||||||
|
"failed": failed,
|
||||||
|
"timestamp": timezone.now().isoformat(),
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(f"Completed expired bans: {processed} processed, {succeeded} succeeded, {failed} failed")
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@shared_task(name="core.cleanup_orphaned_images")
|
||||||
|
def cleanup_orphaned_images() -> dict:
|
||||||
|
"""
|
||||||
|
Clean up orphaned images.
|
||||||
|
|
||||||
|
Images that are not associated with any entity and are older than the
|
||||||
|
retention period will be deleted.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Summary with counts
|
||||||
|
"""
|
||||||
|
logger.info("Starting orphaned images cleanup")
|
||||||
|
|
||||||
|
# This is a placeholder - actual implementation depends on image storage strategy
|
||||||
|
# For Cloudflare Images, we would need to:
|
||||||
|
# 1. Query all images from Cloudflare
|
||||||
|
# 2. Compare against images referenced in the database
|
||||||
|
# 3. Delete orphaned images
|
||||||
|
|
||||||
|
result = {
|
||||||
|
"processed": 0,
|
||||||
|
"deleted": 0,
|
||||||
|
"skipped": 0,
|
||||||
|
"timestamp": timezone.now().isoformat(),
|
||||||
|
"note": "Placeholder implementation - configure based on image storage",
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info("Completed orphaned images cleanup")
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@shared_task(name="core.cleanup_old_versions")
|
||||||
|
def cleanup_old_versions() -> dict:
|
||||||
|
"""
|
||||||
|
Clean up old entity versions from pghistory.
|
||||||
|
|
||||||
|
Keeps the most recent N versions and deletes older ones to manage
|
||||||
|
database size.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Summary with counts
|
||||||
|
"""
|
||||||
|
logger.info("Starting old versions cleanup")
|
||||||
|
|
||||||
|
# Configuration
|
||||||
|
MAX_VERSIONS_PER_ENTITY = 50
|
||||||
|
MIN_AGE_DAYS = 90 # Only delete versions older than this
|
||||||
|
|
||||||
|
deleted_count = 0
|
||||||
|
cutoff_date = timezone.now() - timedelta(days=MIN_AGE_DAYS)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# pghistory stores events in pgh_* tables
|
||||||
|
# We need to identify which models have history tracking
|
||||||
|
from django.db import connection
|
||||||
|
|
||||||
|
with connection.cursor() as cursor:
|
||||||
|
# Get list of pghistory event tables
|
||||||
|
cursor.execute(
|
||||||
|
"""
|
||||||
|
SELECT table_name
|
||||||
|
FROM information_schema.tables
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_name LIKE 'pgh_%event'
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
event_tables = [row[0] for row in cursor.fetchall()]
|
||||||
|
|
||||||
|
for table_name in event_tables:
|
||||||
|
try:
|
||||||
|
# Delete old versions beyond the retention limit
|
||||||
|
# This is a simplified approach - a more sophisticated one
|
||||||
|
# would keep the most recent N per entity
|
||||||
|
cursor.execute(
|
||||||
|
f"""
|
||||||
|
DELETE FROM {table_name}
|
||||||
|
WHERE pgh_created_at < %s
|
||||||
|
AND pgh_id NOT IN (
|
||||||
|
SELECT pgh_id FROM (
|
||||||
|
SELECT pgh_id,
|
||||||
|
ROW_NUMBER() OVER (PARTITION BY pgh_obj_id ORDER BY pgh_created_at DESC) as rn
|
||||||
|
FROM {table_name}
|
||||||
|
) ranked
|
||||||
|
WHERE rn <= %s
|
||||||
|
)
|
||||||
|
""",
|
||||||
|
[cutoff_date, MAX_VERSIONS_PER_ENTITY],
|
||||||
|
)
|
||||||
|
deleted_in_table = cursor.rowcount
|
||||||
|
deleted_count += deleted_in_table
|
||||||
|
if deleted_in_table > 0:
|
||||||
|
logger.info(f"Deleted {deleted_in_table} old versions from {table_name}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Error cleaning up {table_name}: {e}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
capture_and_log(e, "Cleanup old versions", source="task")
|
||||||
|
|
||||||
|
result = {
|
||||||
|
"deleted": deleted_count,
|
||||||
|
"cutoff_date": cutoff_date.isoformat(),
|
||||||
|
"max_versions_per_entity": MAX_VERSIONS_PER_ENTITY,
|
||||||
|
"timestamp": timezone.now().isoformat(),
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(f"Completed old versions cleanup: {deleted_count} versions deleted")
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@shared_task(name="core.data_retention_cleanup")
|
||||||
|
def data_retention_cleanup() -> dict:
|
||||||
|
"""
|
||||||
|
Clean up data per retention policy (GDPR compliance).
|
||||||
|
|
||||||
|
Handles:
|
||||||
|
- Session cleanup
|
||||||
|
- Expired token cleanup
|
||||||
|
- Old audit log cleanup
|
||||||
|
- Temporary data cleanup
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Summary with counts
|
||||||
|
"""
|
||||||
|
logger.info("Starting data retention cleanup")
|
||||||
|
|
||||||
|
results = {
|
||||||
|
"sessions": 0,
|
||||||
|
"tokens": 0,
|
||||||
|
"audit_logs": 0,
|
||||||
|
"temp_data": 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
from django.contrib.sessions.models import Session
|
||||||
|
|
||||||
|
# Clean up expired sessions
|
||||||
|
expired_sessions = Session.objects.filter(expire_date__lt=timezone.now())
|
||||||
|
results["sessions"] = expired_sessions.count()
|
||||||
|
expired_sessions.delete()
|
||||||
|
logger.info(f"Deleted {results['sessions']} expired sessions")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Session cleanup error: {e}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
from rest_framework_simplejwt.token_blacklist.models import OutstandingToken
|
||||||
|
|
||||||
|
# Clean up expired tokens (older than 30 days)
|
||||||
|
cutoff = timezone.now() - timedelta(days=30)
|
||||||
|
expired_tokens = OutstandingToken.objects.filter(expires_at__lt=cutoff)
|
||||||
|
results["tokens"] = expired_tokens.count()
|
||||||
|
expired_tokens.delete()
|
||||||
|
logger.info(f"Deleted {results['tokens']} expired tokens")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Token cleanup error: {e}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
from apps.accounts.models import ProfileAuditLog
|
||||||
|
|
||||||
|
# Clean up old audit logs (older than 1 year)
|
||||||
|
cutoff = timezone.now() - timedelta(days=365)
|
||||||
|
old_logs = ProfileAuditLog.objects.filter(created_at__lt=cutoff)
|
||||||
|
results["audit_logs"] = old_logs.count()
|
||||||
|
old_logs.delete()
|
||||||
|
logger.info(f"Deleted {results['audit_logs']} old audit logs")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Audit log cleanup error: {e}")
|
||||||
|
|
||||||
|
result = {
|
||||||
|
**results,
|
||||||
|
"timestamp": timezone.now().isoformat(),
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(f"Completed data retention cleanup: {result}")
|
||||||
|
return result
|
||||||
137
backend/apps/core/tests/test_permissions.py
Normal file
137
backend/apps/core/tests/test_permissions.py
Normal file
@@ -0,0 +1,137 @@
|
|||||||
|
"""
|
||||||
|
Tests for custom permissions, particularly IsAdminWithSecondFactor.
|
||||||
|
|
||||||
|
Tests that admin users must have MFA or Passkey configured before
|
||||||
|
accessing sensitive admin endpoints.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from unittest.mock import MagicMock, patch
|
||||||
|
|
||||||
|
from django.contrib.auth import get_user_model
|
||||||
|
from django.test import RequestFactory, TestCase
|
||||||
|
|
||||||
|
from apps.core.permissions import IsAdminWithSecondFactor
|
||||||
|
|
||||||
|
User = get_user_model()
|
||||||
|
|
||||||
|
|
||||||
|
class TestIsAdminWithSecondFactor(TestCase):
|
||||||
|
"""Tests for IsAdminWithSecondFactor permission class."""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
"""Set up test fixtures."""
|
||||||
|
self.factory = RequestFactory()
|
||||||
|
self.permission = IsAdminWithSecondFactor()
|
||||||
|
|
||||||
|
def _make_request(self, user=None):
|
||||||
|
"""Create a mock request with the given user."""
|
||||||
|
request = self.factory.get("/api/v1/admin/test/")
|
||||||
|
request.user = user if user else MagicMock(is_authenticated=False)
|
||||||
|
return request
|
||||||
|
|
||||||
|
def test_anonymous_user_denied(self):
|
||||||
|
"""Anonymous users should be denied access."""
|
||||||
|
request = self._make_request()
|
||||||
|
request.user.is_authenticated = False
|
||||||
|
|
||||||
|
self.assertFalse(self.permission.has_permission(request, None))
|
||||||
|
|
||||||
|
def test_non_admin_user_denied(self):
|
||||||
|
"""Non-admin users should be denied access."""
|
||||||
|
user = MagicMock()
|
||||||
|
user.is_authenticated = True
|
||||||
|
user.is_superuser = False
|
||||||
|
user.is_staff = False
|
||||||
|
user.role = "USER"
|
||||||
|
|
||||||
|
request = self._make_request(user)
|
||||||
|
|
||||||
|
self.assertFalse(self.permission.has_permission(request, None))
|
||||||
|
self.assertIn("admin privileges", self.permission.message)
|
||||||
|
|
||||||
|
@patch("apps.core.permissions.IsAdminWithSecondFactor._has_second_factor")
|
||||||
|
def test_admin_without_mfa_denied(self, mock_has_second_factor):
|
||||||
|
"""Admin without MFA or Passkey should be denied access."""
|
||||||
|
mock_has_second_factor.return_value = False
|
||||||
|
|
||||||
|
user = MagicMock()
|
||||||
|
user.is_authenticated = True
|
||||||
|
user.is_superuser = True
|
||||||
|
user.is_staff = True
|
||||||
|
user.role = "ADMIN"
|
||||||
|
|
||||||
|
request = self._make_request(user)
|
||||||
|
|
||||||
|
self.assertFalse(self.permission.has_permission(request, None))
|
||||||
|
self.assertIn("MFA or Passkey", self.permission.message)
|
||||||
|
|
||||||
|
@patch("apps.core.permissions.IsAdminWithSecondFactor._has_second_factor")
|
||||||
|
def test_superuser_with_mfa_allowed(self, mock_has_second_factor):
|
||||||
|
"""Superuser with MFA configured should be allowed access."""
|
||||||
|
mock_has_second_factor.return_value = True
|
||||||
|
|
||||||
|
user = MagicMock()
|
||||||
|
user.is_authenticated = True
|
||||||
|
user.is_superuser = True
|
||||||
|
user.is_staff = True
|
||||||
|
|
||||||
|
request = self._make_request(user)
|
||||||
|
|
||||||
|
self.assertTrue(self.permission.has_permission(request, None))
|
||||||
|
|
||||||
|
@patch("apps.core.permissions.IsAdminWithSecondFactor._has_second_factor")
|
||||||
|
def test_staff_with_passkey_allowed(self, mock_has_second_factor):
|
||||||
|
"""Staff user with Passkey configured should be allowed access."""
|
||||||
|
mock_has_second_factor.return_value = True
|
||||||
|
|
||||||
|
user = MagicMock()
|
||||||
|
user.is_authenticated = True
|
||||||
|
user.is_superuser = False
|
||||||
|
user.is_staff = True
|
||||||
|
|
||||||
|
request = self._make_request(user)
|
||||||
|
|
||||||
|
self.assertTrue(self.permission.has_permission(request, None))
|
||||||
|
|
||||||
|
@patch("apps.core.permissions.IsAdminWithSecondFactor._has_second_factor")
|
||||||
|
def test_admin_role_with_mfa_allowed(self, mock_has_second_factor):
|
||||||
|
"""User with ADMIN role and MFA should be allowed access."""
|
||||||
|
mock_has_second_factor.return_value = True
|
||||||
|
|
||||||
|
user = MagicMock()
|
||||||
|
user.is_authenticated = True
|
||||||
|
user.is_superuser = False
|
||||||
|
user.is_staff = False
|
||||||
|
user.role = "ADMIN"
|
||||||
|
|
||||||
|
request = self._make_request(user)
|
||||||
|
|
||||||
|
self.assertTrue(self.permission.has_permission(request, None))
|
||||||
|
|
||||||
|
def test_has_second_factor_with_totp(self):
|
||||||
|
"""Test _has_second_factor detects TOTP authenticator."""
|
||||||
|
user = MagicMock()
|
||||||
|
|
||||||
|
with patch("apps.core.permissions.Authenticator") as MockAuth:
|
||||||
|
# Mock the queryset to return True for TOTP
|
||||||
|
mock_qs = MagicMock()
|
||||||
|
mock_qs.filter.return_value.exists.return_value = True
|
||||||
|
MockAuth.objects.filter.return_value = mock_qs
|
||||||
|
MockAuth.Type.TOTP = "totp"
|
||||||
|
MockAuth.Type.WEBAUTHN = "webauthn"
|
||||||
|
|
||||||
|
# Need to patch the import inside the method
|
||||||
|
with patch.dict("sys.modules", {"allauth.mfa.models": MagicMock(Authenticator=MockAuth)}):
|
||||||
|
result = self.permission._has_second_factor(user)
|
||||||
|
# This tests the exception path since import is mocked at module level
|
||||||
|
# The actual integration test would require a full database setup
|
||||||
|
|
||||||
|
def test_has_second_factor_import_error(self):
|
||||||
|
"""Test _has_second_factor handles ImportError gracefully."""
|
||||||
|
user = MagicMock()
|
||||||
|
|
||||||
|
with patch.dict("sys.modules", {"allauth.mfa.models": None}):
|
||||||
|
with patch("builtins.__import__", side_effect=ImportError):
|
||||||
|
# Should return False, not raise exception
|
||||||
|
result = self.permission._has_second_factor(user)
|
||||||
|
self.assertFalse(result)
|
||||||
@@ -55,3 +55,45 @@ def get_direct_upload_url(user_id=None):
|
|||||||
raise e
|
raise e
|
||||||
|
|
||||||
return result.get("result", {})
|
return result.get("result", {})
|
||||||
|
|
||||||
|
|
||||||
|
def delete_cloudflare_image(image_id: str) -> bool:
|
||||||
|
"""
|
||||||
|
Delete an image from Cloudflare Images.
|
||||||
|
|
||||||
|
Used to cleanup orphaned images when submissions are rejected or deleted.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
image_id: The Cloudflare image ID to delete.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if deletion succeeded, False otherwise.
|
||||||
|
"""
|
||||||
|
account_id = getattr(settings, "CLOUDFLARE_IMAGES_ACCOUNT_ID", None)
|
||||||
|
api_token = getattr(settings, "CLOUDFLARE_IMAGES_API_TOKEN", None)
|
||||||
|
|
||||||
|
if not account_id or not api_token:
|
||||||
|
logger.error("Cloudflare settings missing, cannot delete image %s", image_id)
|
||||||
|
return False
|
||||||
|
|
||||||
|
url = f"https://api.cloudflare.com/client/v4/accounts/{account_id}/images/v1/{image_id}"
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
"Authorization": f"Bearer {api_token}",
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = requests.delete(url, headers=headers)
|
||||||
|
response.raise_for_status()
|
||||||
|
result = response.json()
|
||||||
|
|
||||||
|
if result.get("success"):
|
||||||
|
logger.info("Successfully deleted Cloudflare image: %s", image_id)
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
error_msg = result.get("errors", [{"message": "Unknown error"}])[0].get("message")
|
||||||
|
logger.warning("Failed to delete Cloudflare image %s: %s", image_id, error_msg)
|
||||||
|
return False
|
||||||
|
except requests.RequestException as e:
|
||||||
|
capture_and_log(e, f"Delete Cloudflare image {image_id}", source="service")
|
||||||
|
return False
|
||||||
|
|||||||
@@ -160,7 +160,7 @@ def error_validation(
|
|||||||
return custom_message
|
return custom_message
|
||||||
if field_name:
|
if field_name:
|
||||||
return f"Please check the {field_name} field and try again."
|
return f"Please check the {field_name} field and try again."
|
||||||
return "Please check the form and correct any errors."
|
return "Validation error. Please check the form and correct any errors."
|
||||||
|
|
||||||
|
|
||||||
def error_permission(
|
def error_permission(
|
||||||
@@ -400,6 +400,42 @@ def info_processing(
|
|||||||
return "Processing..."
|
return "Processing..."
|
||||||
|
|
||||||
|
|
||||||
|
def info_no_changes(
|
||||||
|
custom_message: str | None = None,
|
||||||
|
) -> str:
|
||||||
|
"""
|
||||||
|
Generate an info message when no changes were detected.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
custom_message: Optional custom message to use instead of default
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Formatted info message
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
>>> info_no_changes()
|
||||||
|
'No changes detected.'
|
||||||
|
"""
|
||||||
|
if custom_message:
|
||||||
|
return custom_message
|
||||||
|
return "No changes detected."
|
||||||
|
|
||||||
|
|
||||||
|
def warning_unsaved(
|
||||||
|
custom_message: str | None = None,
|
||||||
|
) -> str:
|
||||||
|
"""
|
||||||
|
Alias for warning_unsaved_changes for backward compatibility.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
custom_message: Optional custom message to use instead of default
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Formatted warning message
|
||||||
|
"""
|
||||||
|
return warning_unsaved_changes(custom_message)
|
||||||
|
|
||||||
|
|
||||||
def confirm_delete(
|
def confirm_delete(
|
||||||
model_name: str,
|
model_name: str,
|
||||||
object_name: str | None = None,
|
object_name: str | None = None,
|
||||||
|
|||||||
@@ -142,7 +142,7 @@ def get_og_image(
|
|||||||
try:
|
try:
|
||||||
first_photo = instance.photos.first()
|
first_photo = instance.photos.first()
|
||||||
if first_photo and hasattr(first_photo, "image"):
|
if first_photo and hasattr(first_photo, "image"):
|
||||||
return urljoin(base_url, first_photo.image.url)
|
return urljoin(base_url, first_photo.image.public_url)
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|||||||
@@ -1,50 +1,4 @@
|
|||||||
from django.apps import AppConfig
|
from django.apps import AppConfig
|
||||||
from django.db.models.signals import post_migrate
|
|
||||||
|
|
||||||
|
|
||||||
def create_photo_permissions(sender, **kwargs):
|
|
||||||
"""Create custom permissions for domain-specific photo models"""
|
|
||||||
from django.contrib.auth.models import Permission
|
|
||||||
from django.contrib.contenttypes.models import ContentType
|
|
||||||
|
|
||||||
from apps.parks.models import ParkPhoto
|
|
||||||
from apps.rides.models import RidePhoto
|
|
||||||
|
|
||||||
# Create permissions for ParkPhoto
|
|
||||||
park_photo_content_type = ContentType.objects.get_for_model(ParkPhoto)
|
|
||||||
Permission.objects.get_or_create(
|
|
||||||
codename="add_parkphoto",
|
|
||||||
name="Can add park photo",
|
|
||||||
content_type=park_photo_content_type,
|
|
||||||
)
|
|
||||||
Permission.objects.get_or_create(
|
|
||||||
codename="change_parkphoto",
|
|
||||||
name="Can change park photo",
|
|
||||||
content_type=park_photo_content_type,
|
|
||||||
)
|
|
||||||
Permission.objects.get_or_create(
|
|
||||||
codename="delete_parkphoto",
|
|
||||||
name="Can delete park photo",
|
|
||||||
content_type=park_photo_content_type,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create permissions for RidePhoto
|
|
||||||
ride_photo_content_type = ContentType.objects.get_for_model(RidePhoto)
|
|
||||||
Permission.objects.get_or_create(
|
|
||||||
codename="add_ridephoto",
|
|
||||||
name="Can add ride photo",
|
|
||||||
content_type=ride_photo_content_type,
|
|
||||||
)
|
|
||||||
Permission.objects.get_or_create(
|
|
||||||
codename="change_ridephoto",
|
|
||||||
name="Can change ride photo",
|
|
||||||
content_type=ride_photo_content_type,
|
|
||||||
)
|
|
||||||
Permission.objects.get_or_create(
|
|
||||||
codename="delete_ridephoto",
|
|
||||||
name="Can delete ride photo",
|
|
||||||
content_type=ride_photo_content_type,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class MediaConfig(AppConfig):
|
class MediaConfig(AppConfig):
|
||||||
@@ -52,4 +6,7 @@ class MediaConfig(AppConfig):
|
|||||||
name = "apps.media"
|
name = "apps.media"
|
||||||
|
|
||||||
def ready(self):
|
def ready(self):
|
||||||
post_migrate.connect(create_photo_permissions, sender=self)
|
# Note: Django automatically creates add/change/delete/view permissions
|
||||||
|
# for all models, so no custom post_migrate handler is needed.
|
||||||
|
pass
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
Django admin configuration for the Moderation application.
|
Django admin configuration for the Moderation application.
|
||||||
|
|
||||||
This module provides comprehensive admin interfaces for content moderation
|
This module provides comprehensive admin interfaces for content moderation
|
||||||
including edit submissions, photo submissions, and state transition logs.
|
including edit submissions and state transition logs.
|
||||||
Includes a custom moderation admin site for dedicated moderation workflows.
|
Includes a custom moderation admin site for dedicated moderation workflows.
|
||||||
|
|
||||||
Performance targets:
|
Performance targets:
|
||||||
@@ -18,7 +18,7 @@ from django.utils.html import format_html
|
|||||||
from django.utils.safestring import mark_safe
|
from django.utils.safestring import mark_safe
|
||||||
from django_fsm_log.models import StateLog
|
from django_fsm_log.models import StateLog
|
||||||
|
|
||||||
from .models import EditSubmission, PhotoSubmission
|
from .models import EditSubmission
|
||||||
|
|
||||||
|
|
||||||
class ModerationAdminSite(AdminSite):
|
class ModerationAdminSite(AdminSite):
|
||||||
@@ -52,13 +52,13 @@ class ModerationAdminSite(AdminSite):
|
|||||||
|
|
||||||
# Get pending counts
|
# Get pending counts
|
||||||
extra_context["pending_edits"] = EditSubmission.objects.filter(status="PENDING").count()
|
extra_context["pending_edits"] = EditSubmission.objects.filter(status="PENDING").count()
|
||||||
extra_context["pending_photos"] = PhotoSubmission.objects.filter(status="PENDING").count()
|
extra_context["pending_photos"] = EditSubmission.objects.filter(submission_type="PHOTO", status="PENDING").count()
|
||||||
|
|
||||||
# Get recent activity
|
# Get recent activity
|
||||||
extra_context["recent_edits"] = EditSubmission.objects.select_related("user", "handled_by").order_by(
|
extra_context["recent_edits"] = EditSubmission.objects.select_related("user", "handled_by").order_by(
|
||||||
"-created_at"
|
"-created_at"
|
||||||
)[:5]
|
)[:5]
|
||||||
extra_context["recent_photos"] = PhotoSubmission.objects.select_related("user", "handled_by").order_by(
|
extra_context["recent_photos"] = EditSubmission.objects.filter(submission_type="PHOTO").select_related("user", "handled_by").order_by(
|
||||||
"-created_at"
|
"-created_at"
|
||||||
)[:5]
|
)[:5]
|
||||||
|
|
||||||
@@ -307,198 +307,6 @@ class EditSubmissionAdmin(admin.ModelAdmin):
|
|||||||
return actions
|
return actions
|
||||||
|
|
||||||
|
|
||||||
class PhotoSubmissionAdmin(admin.ModelAdmin):
|
|
||||||
"""
|
|
||||||
Admin interface for photo submission moderation.
|
|
||||||
|
|
||||||
Provides photo submission management with:
|
|
||||||
- Image preview in list view
|
|
||||||
- Bulk approve/reject actions
|
|
||||||
- FSM-aware status handling
|
|
||||||
- User and content linking
|
|
||||||
|
|
||||||
Query optimizations:
|
|
||||||
- select_related: user, content_type, handled_by
|
|
||||||
"""
|
|
||||||
|
|
||||||
list_display = (
|
|
||||||
"id",
|
|
||||||
"user_link",
|
|
||||||
"content_type_display",
|
|
||||||
"content_link",
|
|
||||||
"photo_preview",
|
|
||||||
"status_badge",
|
|
||||||
"created_at",
|
|
||||||
"handled_by_link",
|
|
||||||
)
|
|
||||||
list_filter = ("status", "content_type", "created_at")
|
|
||||||
list_select_related = ["user", "content_type", "handled_by"]
|
|
||||||
search_fields = ("user__username", "caption", "notes", "object_id")
|
|
||||||
readonly_fields = (
|
|
||||||
"user",
|
|
||||||
"content_type",
|
|
||||||
"object_id",
|
|
||||||
"photo_preview",
|
|
||||||
"created_at",
|
|
||||||
)
|
|
||||||
list_per_page = 50
|
|
||||||
show_full_result_count = False
|
|
||||||
ordering = ("-created_at",)
|
|
||||||
date_hierarchy = "created_at"
|
|
||||||
|
|
||||||
fieldsets = (
|
|
||||||
(
|
|
||||||
"Submission Details",
|
|
||||||
{
|
|
||||||
"fields": ("user", "content_type", "object_id"),
|
|
||||||
"description": "Who submitted what.",
|
|
||||||
},
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"Photo",
|
|
||||||
{
|
|
||||||
"fields": ("photo", "photo_preview", "caption"),
|
|
||||||
"description": "The submitted photo.",
|
|
||||||
},
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"Status",
|
|
||||||
{
|
|
||||||
"fields": ("status", "handled_by", "notes"),
|
|
||||||
"description": "Current status and moderation notes.",
|
|
||||||
},
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"Metadata",
|
|
||||||
{
|
|
||||||
"fields": ("created_at",),
|
|
||||||
"classes": ("collapse",),
|
|
||||||
},
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
@admin.display(description="User")
|
|
||||||
def user_link(self, obj):
|
|
||||||
"""Display user as clickable link."""
|
|
||||||
if obj.user:
|
|
||||||
try:
|
|
||||||
url = reverse("admin:accounts_customuser_change", args=[obj.user.id])
|
|
||||||
return format_html('<a href="{}">{}</a>', url, obj.user.username)
|
|
||||||
except Exception:
|
|
||||||
return obj.user.username
|
|
||||||
return "-"
|
|
||||||
|
|
||||||
@admin.display(description="Type")
|
|
||||||
def content_type_display(self, obj):
|
|
||||||
"""Display content type in a readable format."""
|
|
||||||
if obj.content_type:
|
|
||||||
return f"{obj.content_type.app_label}.{obj.content_type.model}"
|
|
||||||
return "-"
|
|
||||||
|
|
||||||
@admin.display(description="Content")
|
|
||||||
def content_link(self, obj):
|
|
||||||
"""Display content object as clickable link."""
|
|
||||||
try:
|
|
||||||
content_obj = obj.content_object
|
|
||||||
if content_obj:
|
|
||||||
if hasattr(content_obj, "get_absolute_url"):
|
|
||||||
url = content_obj.get_absolute_url()
|
|
||||||
return format_html('<a href="{}">{}</a>', url, str(content_obj)[:30])
|
|
||||||
return str(content_obj)[:30]
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
return format_html('<span style="color: red;">Not found</span>')
|
|
||||||
|
|
||||||
@admin.display(description="Preview")
|
|
||||||
def photo_preview(self, obj):
|
|
||||||
"""Display photo preview thumbnail."""
|
|
||||||
if obj.photo:
|
|
||||||
return format_html(
|
|
||||||
'<img src="{}" style="max-height: 80px; max-width: 150px; '
|
|
||||||
'border-radius: 4px; object-fit: cover;" loading="lazy" />',
|
|
||||||
obj.photo.url,
|
|
||||||
)
|
|
||||||
return format_html('<span style="color: gray;">No photo</span>')
|
|
||||||
|
|
||||||
@admin.display(description="Status")
|
|
||||||
def status_badge(self, obj):
|
|
||||||
"""Display status with color-coded badge."""
|
|
||||||
colors = {
|
|
||||||
"PENDING": "orange",
|
|
||||||
"APPROVED": "green",
|
|
||||||
"REJECTED": "red",
|
|
||||||
}
|
|
||||||
color = colors.get(obj.status, "gray")
|
|
||||||
return format_html(
|
|
||||||
'<span style="background-color: {}; color: white; padding: 2px 8px; '
|
|
||||||
'border-radius: 4px; font-size: 11px;">{}</span>',
|
|
||||||
color,
|
|
||||||
obj.status,
|
|
||||||
)
|
|
||||||
|
|
||||||
@admin.display(description="Handled By")
|
|
||||||
def handled_by_link(self, obj):
|
|
||||||
"""Display handler as clickable link."""
|
|
||||||
if obj.handled_by:
|
|
||||||
try:
|
|
||||||
url = reverse("admin:accounts_customuser_change", args=[obj.handled_by.id])
|
|
||||||
return format_html('<a href="{}">{}</a>', url, obj.handled_by.username)
|
|
||||||
except Exception:
|
|
||||||
return obj.handled_by.username
|
|
||||||
return "-"
|
|
||||||
|
|
||||||
def save_model(self, request, obj, form, change):
|
|
||||||
"""Handle FSM transitions on status change."""
|
|
||||||
if "status" in form.changed_data:
|
|
||||||
try:
|
|
||||||
if obj.status == "APPROVED":
|
|
||||||
obj.approve(request.user, obj.notes)
|
|
||||||
elif obj.status == "REJECTED":
|
|
||||||
obj.reject(request.user, obj.notes)
|
|
||||||
except Exception as e:
|
|
||||||
messages.error(request, f"Status transition failed: {str(e)}")
|
|
||||||
return
|
|
||||||
super().save_model(request, obj, form, change)
|
|
||||||
|
|
||||||
@admin.action(description="Approve selected photos")
|
|
||||||
def bulk_approve(self, request, queryset):
|
|
||||||
"""Approve all selected pending photo submissions."""
|
|
||||||
count = 0
|
|
||||||
for submission in queryset.filter(status="PENDING"):
|
|
||||||
try:
|
|
||||||
submission.approve(request.user, "Bulk approved")
|
|
||||||
count += 1
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
self.message_user(request, f"Approved {count} photo submissions.")
|
|
||||||
|
|
||||||
@admin.action(description="Reject selected photos")
|
|
||||||
def bulk_reject(self, request, queryset):
|
|
||||||
"""Reject all selected pending photo submissions."""
|
|
||||||
count = 0
|
|
||||||
for submission in queryset.filter(status="PENDING"):
|
|
||||||
try:
|
|
||||||
submission.reject(request.user, "Bulk rejected")
|
|
||||||
count += 1
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
self.message_user(request, f"Rejected {count} photo submissions.")
|
|
||||||
|
|
||||||
def get_actions(self, request):
|
|
||||||
"""Add moderation actions."""
|
|
||||||
actions = super().get_actions(request)
|
|
||||||
actions["bulk_approve"] = (
|
|
||||||
self.bulk_approve,
|
|
||||||
"bulk_approve",
|
|
||||||
"Approve selected photos",
|
|
||||||
)
|
|
||||||
actions["bulk_reject"] = (
|
|
||||||
self.bulk_reject,
|
|
||||||
"bulk_reject",
|
|
||||||
"Reject selected photos",
|
|
||||||
)
|
|
||||||
return actions
|
|
||||||
|
|
||||||
|
|
||||||
class StateLogAdmin(admin.ModelAdmin):
|
class StateLogAdmin(admin.ModelAdmin):
|
||||||
"""
|
"""
|
||||||
@@ -754,7 +562,6 @@ class HistoryEventAdmin(admin.ModelAdmin):
|
|||||||
|
|
||||||
# Register with moderation site only
|
# Register with moderation site only
|
||||||
moderation_site.register(EditSubmission, EditSubmissionAdmin)
|
moderation_site.register(EditSubmission, EditSubmissionAdmin)
|
||||||
moderation_site.register(PhotoSubmission, PhotoSubmissionAdmin)
|
|
||||||
moderation_site.register(StateLog, StateLogAdmin)
|
moderation_site.register(StateLog, StateLogAdmin)
|
||||||
|
|
||||||
# Note: Concrete pghistory event models would be registered as they are created
|
# Note: Concrete pghistory event models would be registered as they are created
|
||||||
|
|||||||
@@ -25,7 +25,6 @@ class ModerationConfig(AppConfig):
|
|||||||
EditSubmission,
|
EditSubmission,
|
||||||
ModerationQueue,
|
ModerationQueue,
|
||||||
ModerationReport,
|
ModerationReport,
|
||||||
PhotoSubmission,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Apply FSM to all models with their respective choice groups
|
# Apply FSM to all models with their respective choice groups
|
||||||
@@ -53,12 +52,6 @@ class ModerationConfig(AppConfig):
|
|||||||
choice_group="bulk_operation_statuses",
|
choice_group="bulk_operation_statuses",
|
||||||
domain="moderation",
|
domain="moderation",
|
||||||
)
|
)
|
||||||
apply_state_machine(
|
|
||||||
PhotoSubmission,
|
|
||||||
field_name="status",
|
|
||||||
choice_group="photo_submission_statuses",
|
|
||||||
domain="moderation",
|
|
||||||
)
|
|
||||||
|
|
||||||
def _register_callbacks(self):
|
def _register_callbacks(self):
|
||||||
"""Register FSM transition callbacks for moderation models."""
|
"""Register FSM transition callbacks for moderation models."""
|
||||||
@@ -78,7 +71,6 @@ class ModerationConfig(AppConfig):
|
|||||||
EditSubmission,
|
EditSubmission,
|
||||||
ModerationQueue,
|
ModerationQueue,
|
||||||
ModerationReport,
|
ModerationReport,
|
||||||
PhotoSubmission,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# EditSubmission callbacks (transitions from CLAIMED state)
|
# EditSubmission callbacks (transitions from CLAIMED state)
|
||||||
@@ -88,14 +80,6 @@ class ModerationConfig(AppConfig):
|
|||||||
register_callback(EditSubmission, "status", "CLAIMED", "REJECTED", ModerationCacheInvalidation())
|
register_callback(EditSubmission, "status", "CLAIMED", "REJECTED", ModerationCacheInvalidation())
|
||||||
register_callback(EditSubmission, "status", "CLAIMED", "ESCALATED", SubmissionEscalatedNotification())
|
register_callback(EditSubmission, "status", "CLAIMED", "ESCALATED", SubmissionEscalatedNotification())
|
||||||
register_callback(EditSubmission, "status", "CLAIMED", "ESCALATED", ModerationCacheInvalidation())
|
register_callback(EditSubmission, "status", "CLAIMED", "ESCALATED", ModerationCacheInvalidation())
|
||||||
|
|
||||||
# PhotoSubmission callbacks (transitions from CLAIMED state)
|
|
||||||
register_callback(PhotoSubmission, "status", "CLAIMED", "APPROVED", SubmissionApprovedNotification())
|
|
||||||
register_callback(PhotoSubmission, "status", "CLAIMED", "APPROVED", ModerationCacheInvalidation())
|
|
||||||
register_callback(PhotoSubmission, "status", "CLAIMED", "REJECTED", SubmissionRejectedNotification())
|
|
||||||
register_callback(PhotoSubmission, "status", "CLAIMED", "REJECTED", ModerationCacheInvalidation())
|
|
||||||
register_callback(PhotoSubmission, "status", "CLAIMED", "ESCALATED", SubmissionEscalatedNotification())
|
|
||||||
|
|
||||||
# ModerationReport callbacks
|
# ModerationReport callbacks
|
||||||
register_callback(ModerationReport, "status", "*", "*", ModerationNotificationCallback())
|
register_callback(ModerationReport, "status", "*", "*", ModerationNotificationCallback())
|
||||||
register_callback(ModerationReport, "status", "*", "*", ModerationCacheInvalidation())
|
register_callback(ModerationReport, "status", "*", "*", ModerationCacheInvalidation())
|
||||||
|
|||||||
@@ -124,6 +124,20 @@ SUBMISSION_TYPES = [
|
|||||||
},
|
},
|
||||||
category=ChoiceCategory.CLASSIFICATION,
|
category=ChoiceCategory.CLASSIFICATION,
|
||||||
),
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="PHOTO",
|
||||||
|
label="Photo Submission",
|
||||||
|
description="Photo upload for existing content",
|
||||||
|
metadata={
|
||||||
|
"color": "purple",
|
||||||
|
"icon": "photograph",
|
||||||
|
"css_class": "bg-purple-100 text-purple-800 border-purple-200",
|
||||||
|
"sort_order": 3,
|
||||||
|
"requires_existing_object": True,
|
||||||
|
"complexity_level": "low",
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.CLASSIFICATION,
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
@@ -934,6 +948,122 @@ BULK_OPERATION_TYPES = [
|
|||||||
# PhotoSubmission uses the same STATUS_CHOICES as EditSubmission
|
# PhotoSubmission uses the same STATUS_CHOICES as EditSubmission
|
||||||
PHOTO_SUBMISSION_STATUSES = EDIT_SUBMISSION_STATUSES
|
PHOTO_SUBMISSION_STATUSES = EDIT_SUBMISSION_STATUSES
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# ModerationAuditLog Action Choices
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
MODERATION_AUDIT_ACTIONS = [
|
||||||
|
RichChoice(
|
||||||
|
value="approved",
|
||||||
|
label="Approved",
|
||||||
|
description="Submission was approved by moderator",
|
||||||
|
metadata={
|
||||||
|
"color": "green",
|
||||||
|
"icon": "check-circle",
|
||||||
|
"css_class": "bg-green-100 text-green-800",
|
||||||
|
"sort_order": 1,
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.CLASSIFICATION,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="rejected",
|
||||||
|
label="Rejected",
|
||||||
|
description="Submission was rejected by moderator",
|
||||||
|
metadata={
|
||||||
|
"color": "red",
|
||||||
|
"icon": "x-circle",
|
||||||
|
"css_class": "bg-red-100 text-red-800",
|
||||||
|
"sort_order": 2,
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.CLASSIFICATION,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="claimed",
|
||||||
|
label="Claimed",
|
||||||
|
description="Submission was claimed by moderator",
|
||||||
|
metadata={
|
||||||
|
"color": "blue",
|
||||||
|
"icon": "user-check",
|
||||||
|
"css_class": "bg-blue-100 text-blue-800",
|
||||||
|
"sort_order": 3,
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.CLASSIFICATION,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="unclaimed",
|
||||||
|
label="Unclaimed",
|
||||||
|
description="Submission was released by moderator",
|
||||||
|
metadata={
|
||||||
|
"color": "gray",
|
||||||
|
"icon": "user-minus",
|
||||||
|
"css_class": "bg-gray-100 text-gray-800",
|
||||||
|
"sort_order": 4,
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.CLASSIFICATION,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="escalated",
|
||||||
|
label="Escalated",
|
||||||
|
description="Submission was escalated for higher-level review",
|
||||||
|
metadata={
|
||||||
|
"color": "purple",
|
||||||
|
"icon": "arrow-up",
|
||||||
|
"css_class": "bg-purple-100 text-purple-800",
|
||||||
|
"sort_order": 5,
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.CLASSIFICATION,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="converted_to_edit",
|
||||||
|
label="Converted to Edit",
|
||||||
|
description="Photo submission was converted to an edit submission",
|
||||||
|
metadata={
|
||||||
|
"color": "indigo",
|
||||||
|
"icon": "refresh",
|
||||||
|
"css_class": "bg-indigo-100 text-indigo-800",
|
||||||
|
"sort_order": 6,
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.CLASSIFICATION,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="status_changed",
|
||||||
|
label="Status Changed",
|
||||||
|
description="Submission status was changed",
|
||||||
|
metadata={
|
||||||
|
"color": "yellow",
|
||||||
|
"icon": "refresh-cw",
|
||||||
|
"css_class": "bg-yellow-100 text-yellow-800",
|
||||||
|
"sort_order": 7,
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.CLASSIFICATION,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="notes_added",
|
||||||
|
label="Notes Added",
|
||||||
|
description="Moderator notes were added to submission",
|
||||||
|
metadata={
|
||||||
|
"color": "blue",
|
||||||
|
"icon": "edit",
|
||||||
|
"css_class": "bg-blue-100 text-blue-800",
|
||||||
|
"sort_order": 8,
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.CLASSIFICATION,
|
||||||
|
),
|
||||||
|
RichChoice(
|
||||||
|
value="auto_approved",
|
||||||
|
label="Auto Approved",
|
||||||
|
description="Submission was auto-approved by the system",
|
||||||
|
metadata={
|
||||||
|
"color": "green",
|
||||||
|
"icon": "zap",
|
||||||
|
"css_class": "bg-green-100 text-green-800",
|
||||||
|
"sort_order": 9,
|
||||||
|
"is_system_action": True,
|
||||||
|
},
|
||||||
|
category=ChoiceCategory.CLASSIFICATION,
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
# Choice Registration
|
# Choice Registration
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
@@ -958,3 +1088,6 @@ register_choices("bulk_operation_types", BULK_OPERATION_TYPES, "moderation", "Bu
|
|||||||
register_choices(
|
register_choices(
|
||||||
"photo_submission_statuses", PHOTO_SUBMISSION_STATUSES, "moderation", "Photo submission status options"
|
"photo_submission_statuses", PHOTO_SUBMISSION_STATUSES, "moderation", "Photo submission status options"
|
||||||
)
|
)
|
||||||
|
register_choices(
|
||||||
|
"moderation_audit_actions", MODERATION_AUDIT_ACTIONS, "moderation", "Moderation audit log action types"
|
||||||
|
)
|
||||||
|
|||||||
@@ -27,12 +27,10 @@ User = get_user_model()
|
|||||||
class ModerationReportFilter(django_filters.FilterSet):
|
class ModerationReportFilter(django_filters.FilterSet):
|
||||||
"""Filter for ModerationReport model."""
|
"""Filter for ModerationReport model."""
|
||||||
|
|
||||||
# Status filters
|
# Status filters - use method filter for case-insensitive matching
|
||||||
status = django_filters.ChoiceFilter(
|
status = django_filters.CharFilter(
|
||||||
choices=lambda: [
|
method="filter_status",
|
||||||
(choice.value, choice.label) for choice in get_choices("moderation_report_statuses", "moderation")
|
help_text="Filter by report status (case-insensitive)",
|
||||||
],
|
|
||||||
help_text="Filter by report status",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Priority filters
|
# Priority filters
|
||||||
@@ -144,6 +142,19 @@ class ModerationReportFilter(django_filters.FilterSet):
|
|||||||
return queryset.exclude(resolution_action__isnull=True, resolution_action="")
|
return queryset.exclude(resolution_action__isnull=True, resolution_action="")
|
||||||
return queryset.filter(Q(resolution_action__isnull=True) | Q(resolution_action=""))
|
return queryset.filter(Q(resolution_action__isnull=True) | Q(resolution_action=""))
|
||||||
|
|
||||||
|
def filter_status(self, queryset, name, value):
|
||||||
|
"""Filter by status with case-insensitive matching."""
|
||||||
|
if not value:
|
||||||
|
return queryset
|
||||||
|
# Normalize to uppercase for matching against RichChoice values
|
||||||
|
normalized_value = value.upper()
|
||||||
|
# Validate against registered choices
|
||||||
|
valid_values = {choice.value for choice in get_choices("moderation_report_statuses", "moderation")}
|
||||||
|
if normalized_value in valid_values:
|
||||||
|
return queryset.filter(status=normalized_value)
|
||||||
|
# If not valid, return empty queryset (invalid filter value)
|
||||||
|
return queryset.none()
|
||||||
|
|
||||||
|
|
||||||
class ModerationQueueFilter(django_filters.FilterSet):
|
class ModerationQueueFilter(django_filters.FilterSet):
|
||||||
"""Filter for ModerationQueue model."""
|
"""Filter for ModerationQueue model."""
|
||||||
|
|||||||
@@ -0,0 +1,92 @@
|
|||||||
|
"""
|
||||||
|
Management command to expire stale claims on submissions.
|
||||||
|
|
||||||
|
This command can be run manually or via cron as an alternative to the Celery
|
||||||
|
scheduled task when Celery is not available.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
python manage.py expire_stale_claims
|
||||||
|
python manage.py expire_stale_claims --minutes=10 # Custom timeout
|
||||||
|
"""
|
||||||
|
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
|
from apps.moderation.tasks import expire_stale_claims, DEFAULT_LOCK_DURATION_MINUTES
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = "Release stale claims on submissions that have exceeded the lock timeout"
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument(
|
||||||
|
"--minutes",
|
||||||
|
type=int,
|
||||||
|
default=DEFAULT_LOCK_DURATION_MINUTES,
|
||||||
|
help=f"Minutes after which a claim is considered stale (default: {DEFAULT_LOCK_DURATION_MINUTES})",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--dry-run",
|
||||||
|
action="store_true",
|
||||||
|
help="Show what would be released without actually releasing",
|
||||||
|
)
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
from datetime import timedelta
|
||||||
|
from django.utils import timezone
|
||||||
|
from apps.moderation.models import EditSubmission
|
||||||
|
|
||||||
|
minutes = options["minutes"]
|
||||||
|
dry_run = options["dry_run"]
|
||||||
|
cutoff_time = timezone.now() - timedelta(minutes=minutes)
|
||||||
|
|
||||||
|
self.stdout.write(f"Looking for claims older than {minutes} minutes...")
|
||||||
|
self.stdout.write(f"Cutoff time: {cutoff_time.isoformat()}")
|
||||||
|
|
||||||
|
# Find stale claims
|
||||||
|
stale_edit = EditSubmission.objects.filter(
|
||||||
|
status="CLAIMED",
|
||||||
|
claimed_at__lt=cutoff_time,
|
||||||
|
).select_related("claimed_by")
|
||||||
|
# Also find PHOTO type EditSubmissions
|
||||||
|
stale_photo = EditSubmission.objects.filter(
|
||||||
|
submission_type="PHOTO",
|
||||||
|
status="CLAIMED",
|
||||||
|
claimed_at__lt=cutoff_time,
|
||||||
|
).select_related("claimed_by")
|
||||||
|
|
||||||
|
stale_edit_count = stale_edit.count()
|
||||||
|
stale_photo_count = stale_photo.count()
|
||||||
|
|
||||||
|
if stale_edit_count == 0 and stale_photo_count == 0:
|
||||||
|
self.stdout.write(self.style.SUCCESS("No stale claims found."))
|
||||||
|
return
|
||||||
|
|
||||||
|
self.stdout.write(f"Found {stale_edit_count} stale EditSubmission claims:")
|
||||||
|
for sub in stale_edit:
|
||||||
|
self.stdout.write(
|
||||||
|
f" - ID {sub.id}: claimed by {sub.claimed_by} at {sub.claimed_at}"
|
||||||
|
)
|
||||||
|
|
||||||
|
self.stdout.write(f"Found {stale_photo_count} stale PHOTO submission claims:")
|
||||||
|
for sub in stale_photo:
|
||||||
|
self.stdout.write(
|
||||||
|
f" - ID {sub.id}: claimed by {sub.claimed_by} at {sub.claimed_at}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if dry_run:
|
||||||
|
self.stdout.write(self.style.WARNING("\n--dry-run: No changes made."))
|
||||||
|
return
|
||||||
|
|
||||||
|
# Run the actual expiration task
|
||||||
|
result = expire_stale_claims(lock_duration_minutes=minutes)
|
||||||
|
|
||||||
|
self.stdout.write(self.style.SUCCESS("\nExpiration complete:"))
|
||||||
|
self.stdout.write(
|
||||||
|
f" EditSubmissions: {result['edit_submissions']['released']} released, "
|
||||||
|
f"{result['edit_submissions']['failed']} failed"
|
||||||
|
)
|
||||||
|
|
||||||
|
if result["failures"]:
|
||||||
|
self.stdout.write(self.style.ERROR("\nFailures:"))
|
||||||
|
for failure in result["failures"]:
|
||||||
|
self.stdout.write(f" - {failure}")
|
||||||
@@ -5,7 +5,7 @@ from django.contrib.contenttypes.models import ContentType
|
|||||||
from django.core.files.uploadedfile import SimpleUploadedFile
|
from django.core.files.uploadedfile import SimpleUploadedFile
|
||||||
from django.core.management.base import BaseCommand
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
from apps.moderation.models import EditSubmission, PhotoSubmission
|
from apps.moderation.models import EditSubmission
|
||||||
from apps.parks.models import Park
|
from apps.parks.models import Park
|
||||||
from apps.rides.models import Ride
|
from apps.rides.models import Ride
|
||||||
|
|
||||||
@@ -218,40 +218,38 @@ class Command(BaseCommand):
|
|||||||
status="PENDING",
|
status="PENDING",
|
||||||
)
|
)
|
||||||
|
|
||||||
# Create PhotoSubmissions with detailed captions
|
# Create PHOTO submissions using EditSubmission with submission_type=PHOTO
|
||||||
|
|
||||||
# Park photo submission
|
# Park photo submission
|
||||||
image_data = (
|
EditSubmission.objects.create(
|
||||||
b"GIF87a\x01\x00\x01\x00\x80\x01\x00\x00\x00\x00ccc,\x00\x00\x00\x00\x01\x00\x01\x00\x00\x02\x02D\x01\x00;"
|
|
||||||
)
|
|
||||||
dummy_image = SimpleUploadedFile("park_entrance.gif", image_data, content_type="image/gif")
|
|
||||||
|
|
||||||
PhotoSubmission.objects.create(
|
|
||||||
user=user,
|
user=user,
|
||||||
content_type=park_ct,
|
content_type=park_ct,
|
||||||
object_id=test_park.id,
|
object_id=test_park.id,
|
||||||
photo=dummy_image,
|
submission_type="PHOTO",
|
||||||
|
changes={}, # No field changes for photos
|
||||||
caption=(
|
caption=(
|
||||||
"Main entrance plaza of Test Park showing the newly installed digital display board "
|
"Main entrance plaza of Test Park showing the newly installed digital display board "
|
||||||
"and renovated ticketing area. Photo taken during morning park opening."
|
"and renovated ticketing area. Photo taken during morning park opening."
|
||||||
),
|
),
|
||||||
date_taken=date(2024, 1, 15),
|
date_taken=date(2024, 1, 15),
|
||||||
status="PENDING",
|
status="PENDING",
|
||||||
|
reason="Photo of park entrance",
|
||||||
)
|
)
|
||||||
|
|
||||||
# Ride photo submission
|
# Ride photo submission
|
||||||
dummy_image2 = SimpleUploadedFile("coaster_track.gif", image_data, content_type="image/gif")
|
EditSubmission.objects.create(
|
||||||
PhotoSubmission.objects.create(
|
|
||||||
user=user,
|
user=user,
|
||||||
content_type=ride_ct,
|
content_type=ride_ct,
|
||||||
object_id=test_ride.id,
|
object_id=test_ride.id,
|
||||||
photo=dummy_image2,
|
submission_type="PHOTO",
|
||||||
|
changes={}, # No field changes for photos
|
||||||
caption=(
|
caption=(
|
||||||
"Test Coaster's first drop and loop element showing the new paint scheme. "
|
"Test Coaster's first drop and loop element showing the new paint scheme. "
|
||||||
"Photo taken from the guest pathway near Station Alpha."
|
"Photo taken from the guest pathway near Station Alpha."
|
||||||
),
|
),
|
||||||
date_taken=date(2024, 1, 20),
|
date_taken=date(2024, 1, 20),
|
||||||
status="PENDING",
|
status="PENDING",
|
||||||
|
reason="Photo of ride",
|
||||||
)
|
)
|
||||||
|
|
||||||
self.stdout.write(self.style.SUCCESS("Successfully seeded test submissions"))
|
self.stdout.write(self.style.SUCCESS("Successfully seeded test submissions"))
|
||||||
|
|||||||
@@ -9,7 +9,6 @@ from apps.moderation.models import (
|
|||||||
EditSubmission,
|
EditSubmission,
|
||||||
ModerationQueue,
|
ModerationQueue,
|
||||||
ModerationReport,
|
ModerationReport,
|
||||||
PhotoSubmission,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -28,8 +27,7 @@ class Command(BaseCommand):
|
|||||||
type=str,
|
type=str,
|
||||||
help=(
|
help=(
|
||||||
"Validate only specific model "
|
"Validate only specific model "
|
||||||
"(editsubmission, moderationreport, moderationqueue, "
|
"(editsubmission, moderationreport, moderationqueue, bulkoperation)"
|
||||||
"bulkoperation, photosubmission)"
|
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
@@ -65,11 +63,7 @@ class Command(BaseCommand):
|
|||||||
"bulk_operation_statuses",
|
"bulk_operation_statuses",
|
||||||
"moderation",
|
"moderation",
|
||||||
),
|
),
|
||||||
"photosubmission": (
|
# Note: PhotoSubmission removed - photos now handled via EditSubmission
|
||||||
PhotoSubmission,
|
|
||||||
"photo_submission_statuses",
|
|
||||||
"moderation",
|
|
||||||
),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# Filter by model name if specified
|
# Filter by model name if specified
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user