mirror of
https://github.com/pacnpal/thrillwiki_django_no_react.git
synced 2026-02-05 13:55:19 -05:00
Compare commits
6 Commits
nuxt
...
28c9ec56da
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
28c9ec56da | ||
|
|
3ec5a4857d | ||
|
|
4da7e52fb0 | ||
|
|
b80654952d | ||
|
|
2b7bb4dfaa | ||
|
|
a801813dcf |
@@ -11,7 +11,7 @@ class Migration(migrations.Migration):
|
|||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("accounts", "0014_remove_toplist_user_remove_toplistitem_top_list_and_more"),
|
("accounts", "0014_remove_toplist_user_remove_toplistitem_top_list_and_more"),
|
||||||
("pghistory", "0007_auto_20250421_0444"),
|
("pghistory", "0006_delete_aggregateevent"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
|
|||||||
@@ -0,0 +1,41 @@
|
|||||||
|
# Generated by Django 5.2.9 on 2026-01-07 01:23
|
||||||
|
|
||||||
|
import pgtrigger.compiler
|
||||||
|
import pgtrigger.migrations
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('accounts', '0015_loginhistory_loginhistoryevent_and_more'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
pgtrigger.migrations.RemoveTrigger(
|
||||||
|
model_name='emailverification',
|
||||||
|
name='insert_insert',
|
||||||
|
),
|
||||||
|
pgtrigger.migrations.RemoveTrigger(
|
||||||
|
model_name='emailverification',
|
||||||
|
name='update_update',
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='emailverification',
|
||||||
|
name='updated_at',
|
||||||
|
field=models.DateTimeField(auto_now=True, help_text='When this verification was last updated'),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='emailverificationevent',
|
||||||
|
name='updated_at',
|
||||||
|
field=models.DateTimeField(auto_now=True, help_text='When this verification was last updated'),
|
||||||
|
),
|
||||||
|
pgtrigger.migrations.AddTrigger(
|
||||||
|
model_name='emailverification',
|
||||||
|
trigger=pgtrigger.compiler.Trigger(name='insert_insert', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "accounts_emailverificationevent" ("created_at", "id", "last_sent", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "token", "updated_at", "user_id") VALUES (NEW."created_at", NEW."id", NEW."last_sent", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."token", NEW."updated_at", NEW."user_id"); RETURN NULL;', hash='53c568e932b1b55a3c79e79220e6d6f269458003', operation='INSERT', pgid='pgtrigger_insert_insert_53748', table='accounts_emailverification', when='AFTER')),
|
||||||
|
),
|
||||||
|
pgtrigger.migrations.AddTrigger(
|
||||||
|
model_name='emailverification',
|
||||||
|
trigger=pgtrigger.compiler.Trigger(name='update_update', sql=pgtrigger.compiler.UpsertTriggerSql(condition='WHEN (OLD.* IS DISTINCT FROM NEW.*)', func='INSERT INTO "accounts_emailverificationevent" ("created_at", "id", "last_sent", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "token", "updated_at", "user_id") VALUES (NEW."created_at", NEW."id", NEW."last_sent", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."token", NEW."updated_at", NEW."user_id"); RETURN NULL;', hash='8b45a9a0a1810564cb46c098552ab4ec7920daeb', operation='UPDATE', pgid='pgtrigger_update_update_7a2a8', table='accounts_emailverification', when='AFTER')),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -110,13 +110,20 @@ urlpatterns = [
|
|||||||
path("profile/avatar/upload/", views.upload_avatar, name="upload_avatar"),
|
path("profile/avatar/upload/", views.upload_avatar, name="upload_avatar"),
|
||||||
path("profile/avatar/save/", views.save_avatar_image, name="save_avatar_image"),
|
path("profile/avatar/save/", views.save_avatar_image, name="save_avatar_image"),
|
||||||
path("profile/avatar/delete/", views.delete_avatar, name="delete_avatar"),
|
path("profile/avatar/delete/", views.delete_avatar, name="delete_avatar"),
|
||||||
|
# User permissions endpoint
|
||||||
|
path("permissions/", views.get_user_permissions, name="get_user_permissions"),
|
||||||
# Login history endpoint
|
# Login history endpoint
|
||||||
path("login-history/", views.get_login_history, name="get_login_history"),
|
path("login-history/", views.get_login_history, name="get_login_history"),
|
||||||
|
# Email change cancellation endpoint
|
||||||
|
path("email-change/cancel/", views.cancel_email_change, name="cancel_email_change"),
|
||||||
# Magic Link (Login by Code) endpoints
|
# Magic Link (Login by Code) endpoints
|
||||||
path("magic-link/request/", views_magic_link.request_magic_link, name="request_magic_link"),
|
path("magic-link/request/", views_magic_link.request_magic_link, name="request_magic_link"),
|
||||||
path("magic-link/verify/", views_magic_link.verify_magic_link, name="verify_magic_link"),
|
path("magic-link/verify/", views_magic_link.verify_magic_link, name="verify_magic_link"),
|
||||||
# Public Profile
|
# Public Profile
|
||||||
path("profiles/<str:username>/", views.get_public_user_profile, name="get_public_user_profile"),
|
path("profiles/<str:username>/", views.get_public_user_profile, name="get_public_user_profile"),
|
||||||
|
# Bulk lookup endpoints
|
||||||
|
path("profiles/bulk/", views.bulk_get_profiles, name="bulk_get_profiles"),
|
||||||
|
path("users/bulk/", views.get_users_with_emails, name="get_users_with_emails"),
|
||||||
# ViewSet routes
|
# ViewSet routes
|
||||||
path("", include(router.urls)),
|
path("", include(router.urls)),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -826,6 +826,63 @@ def check_user_deletion_eligibility(request, user_id):
|
|||||||
# === USER PROFILE ENDPOINTS ===
|
# === USER PROFILE ENDPOINTS ===
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="get_user_permissions",
|
||||||
|
summary="Get current user's management permissions",
|
||||||
|
description="Get the authenticated user's management permissions including role information.",
|
||||||
|
responses={
|
||||||
|
200: {
|
||||||
|
"description": "User permissions",
|
||||||
|
"example": {
|
||||||
|
"user_id": "uuid",
|
||||||
|
"is_superuser": True,
|
||||||
|
"is_staff": True,
|
||||||
|
"is_moderator": False,
|
||||||
|
"roles": ["admin"],
|
||||||
|
"permissions": ["can_moderate", "can_manage_users"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
401: {
|
||||||
|
"description": "Authentication required",
|
||||||
|
"example": {"detail": "Authentication credentials were not provided."},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
tags=["User Profile"],
|
||||||
|
)
|
||||||
|
@api_view(["GET"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def get_user_permissions(request):
|
||||||
|
"""Get the authenticated user's management permissions."""
|
||||||
|
user = request.user
|
||||||
|
profile = getattr(user, "profile", None)
|
||||||
|
|
||||||
|
# Get roles from profile if exists
|
||||||
|
roles = []
|
||||||
|
if profile:
|
||||||
|
if hasattr(profile, "role") and profile.role:
|
||||||
|
roles.append(profile.role)
|
||||||
|
if user.is_superuser:
|
||||||
|
roles.append("admin")
|
||||||
|
if user.is_staff:
|
||||||
|
roles.append("staff")
|
||||||
|
|
||||||
|
# Build permissions list based on flags
|
||||||
|
permissions = []
|
||||||
|
if user.is_superuser or user.is_staff:
|
||||||
|
permissions.extend(["can_moderate", "can_manage_users", "can_view_admin"])
|
||||||
|
elif profile and getattr(profile, "is_moderator", False):
|
||||||
|
permissions.append("can_moderate")
|
||||||
|
|
||||||
|
return Response({
|
||||||
|
"user_id": str(user.id),
|
||||||
|
"is_superuser": user.is_superuser,
|
||||||
|
"is_staff": user.is_staff,
|
||||||
|
"is_moderator": profile and getattr(profile, "is_moderator", False) if profile else False,
|
||||||
|
"roles": list(set(roles)), # Deduplicate
|
||||||
|
"permissions": list(set(permissions)), # Deduplicate
|
||||||
|
}, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
|
||||||
@extend_schema(
|
@extend_schema(
|
||||||
operation_id="get_user_profile",
|
operation_id="get_user_profile",
|
||||||
summary="Get current user's complete profile",
|
summary="Get current user's complete profile",
|
||||||
@@ -935,8 +992,8 @@ def get_user_preferences(request):
|
|||||||
"allow_messages": user.allow_messages,
|
"allow_messages": user.allow_messages,
|
||||||
}
|
}
|
||||||
|
|
||||||
serializer = UserPreferencesSerializer(data=data)
|
# Return the data directly - no validation needed for GET response
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
return Response(data, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
|
||||||
@extend_schema(
|
@extend_schema(
|
||||||
@@ -1056,8 +1113,8 @@ def get_notification_settings(request):
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
serializer = NotificationSettingsSerializer(data=data)
|
# Return the data directly - no validation needed for GET response
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
return Response(data, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
|
||||||
@extend_schema(
|
@extend_schema(
|
||||||
@@ -1131,8 +1188,8 @@ def get_privacy_settings(request):
|
|||||||
"allow_messages": user.allow_messages,
|
"allow_messages": user.allow_messages,
|
||||||
}
|
}
|
||||||
|
|
||||||
serializer = PrivacySettingsSerializer(data=data)
|
# Return the data directly - no validation needed for GET response
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
return Response(data, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
|
||||||
@extend_schema(
|
@extend_schema(
|
||||||
@@ -1198,8 +1255,8 @@ def get_security_settings(request):
|
|||||||
"active_sessions": getattr(user, "active_sessions", 1),
|
"active_sessions": getattr(user, "active_sessions", 1),
|
||||||
}
|
}
|
||||||
|
|
||||||
serializer = SecuritySettingsSerializer(data=data)
|
# Return the data directly - no validation needed for GET response
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
return Response(data, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
|
||||||
@extend_schema(
|
@extend_schema(
|
||||||
@@ -1273,8 +1330,8 @@ def get_user_statistics(request):
|
|||||||
"last_activity": user.last_login,
|
"last_activity": user.last_login,
|
||||||
}
|
}
|
||||||
|
|
||||||
serializer = UserStatisticsSerializer(data=data)
|
# Return the data directly - no validation needed for GET response
|
||||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
return Response(data, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
|
||||||
# === TOP LISTS ENDPOINTS ===
|
# === TOP LISTS ENDPOINTS ===
|
||||||
@@ -1640,3 +1697,227 @@ def get_login_history(request):
|
|||||||
"count": len(results),
|
"count": len(results),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="cancel_email_change",
|
||||||
|
summary="Cancel pending email change",
|
||||||
|
description=(
|
||||||
|
"Cancel a pending email change request. This will clear the new_email field "
|
||||||
|
"and prevent the email change from being completed."
|
||||||
|
),
|
||||||
|
responses={
|
||||||
|
200: {
|
||||||
|
"description": "Email change cancelled or no pending change found",
|
||||||
|
"example": {
|
||||||
|
"detail": "Email change cancelled",
|
||||||
|
"had_pending_change": True,
|
||||||
|
"cancelled_email": "newemail@example.com",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
401: {
|
||||||
|
"description": "Authentication required",
|
||||||
|
"example": {"detail": "Authentication required"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
tags=["Account Management"],
|
||||||
|
)
|
||||||
|
@api_view(["POST"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def cancel_email_change(request):
|
||||||
|
"""
|
||||||
|
Cancel a pending email change request.
|
||||||
|
|
||||||
|
This endpoint allows users to cancel their pending email change
|
||||||
|
if they change their mind before completing the verification.
|
||||||
|
|
||||||
|
**Authentication Required**: User must be logged in.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
user = request.user
|
||||||
|
|
||||||
|
# Check if user has a pending email change
|
||||||
|
pending_email = user.pending_email
|
||||||
|
|
||||||
|
if pending_email:
|
||||||
|
# Clear the pending email
|
||||||
|
user.pending_email = None
|
||||||
|
user.save(update_fields=["pending_email"])
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"User {user.username} cancelled email change to {pending_email}",
|
||||||
|
extra={
|
||||||
|
"user": user.username,
|
||||||
|
"user_id": user.user_id,
|
||||||
|
"cancelled_email": pending_email,
|
||||||
|
"action": "email_change_cancelled",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"success": True,
|
||||||
|
"detail": "Email change cancelled",
|
||||||
|
"had_pending_change": True,
|
||||||
|
"cancelled_email": pending_email,
|
||||||
|
},
|
||||||
|
status=status.HTTP_200_OK,
|
||||||
|
)
|
||||||
|
|
||||||
|
# No pending change, but still success (idempotent)
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"success": True,
|
||||||
|
"detail": "No pending email change found",
|
||||||
|
"had_pending_change": False,
|
||||||
|
"cancelled_email": None,
|
||||||
|
},
|
||||||
|
status=status.HTTP_200_OK,
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
capture_and_log(
|
||||||
|
e,
|
||||||
|
f"Cancel email change for user {request.user.username}",
|
||||||
|
source="api",
|
||||||
|
request=request,
|
||||||
|
)
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"success": False,
|
||||||
|
"error": f"Error cancelling email change: {str(e)}",
|
||||||
|
},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="bulk_get_profiles",
|
||||||
|
summary="Get multiple user profiles by user IDs",
|
||||||
|
description="Fetch profile information for multiple users at once. Useful for displaying user info in lists.",
|
||||||
|
parameters=[
|
||||||
|
OpenApiParameter(
|
||||||
|
name="user_ids",
|
||||||
|
type=OpenApiTypes.STR,
|
||||||
|
location=OpenApiParameter.QUERY,
|
||||||
|
description="Comma-separated list of user IDs",
|
||||||
|
required=True,
|
||||||
|
),
|
||||||
|
],
|
||||||
|
responses={
|
||||||
|
200: {
|
||||||
|
"description": "List of user profiles",
|
||||||
|
"example": [
|
||||||
|
{
|
||||||
|
"user_id": "123",
|
||||||
|
"username": "john_doe",
|
||||||
|
"display_name": "John Doe",
|
||||||
|
"avatar_url": "https://example.com/avatar.jpg",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
tags=["User Profile"],
|
||||||
|
)
|
||||||
|
@api_view(["GET"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def bulk_get_profiles(request):
|
||||||
|
"""Get multiple user profiles by IDs for efficient bulk lookups."""
|
||||||
|
user_ids_param = request.query_params.get("user_ids", "")
|
||||||
|
|
||||||
|
if not user_ids_param:
|
||||||
|
return Response([], status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
user_ids = [uid.strip() for uid in user_ids_param.split(",") if uid.strip()]
|
||||||
|
|
||||||
|
if not user_ids:
|
||||||
|
return Response([], status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
# Limit to prevent abuse
|
||||||
|
if len(user_ids) > 100:
|
||||||
|
user_ids = user_ids[:100]
|
||||||
|
|
||||||
|
profiles = UserProfile.objects.filter(user__user_id__in=user_ids).select_related("user", "avatar")
|
||||||
|
|
||||||
|
result = []
|
||||||
|
for profile in profiles:
|
||||||
|
result.append({
|
||||||
|
"user_id": str(profile.user.user_id),
|
||||||
|
"username": profile.user.username,
|
||||||
|
"display_name": profile.display_name,
|
||||||
|
"avatar_url": profile.get_avatar_url() if hasattr(profile, "get_avatar_url") else None,
|
||||||
|
})
|
||||||
|
|
||||||
|
return Response(result, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="get_users_with_emails",
|
||||||
|
summary="Get users with email addresses (admin/moderator only)",
|
||||||
|
description="Fetch user information including emails. Restricted to admins and moderators.",
|
||||||
|
parameters=[
|
||||||
|
OpenApiParameter(
|
||||||
|
name="user_ids",
|
||||||
|
type=OpenApiTypes.STR,
|
||||||
|
location=OpenApiParameter.QUERY,
|
||||||
|
description="Comma-separated list of user IDs",
|
||||||
|
required=True,
|
||||||
|
),
|
||||||
|
],
|
||||||
|
responses={
|
||||||
|
200: {
|
||||||
|
"description": "List of users with emails",
|
||||||
|
"example": [
|
||||||
|
{
|
||||||
|
"user_id": "123",
|
||||||
|
"username": "john_doe",
|
||||||
|
"email": "john@example.com",
|
||||||
|
"display_name": "John Doe",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
},
|
||||||
|
403: {"description": "Not authorized - admin or moderator access required"},
|
||||||
|
},
|
||||||
|
tags=["User Management"],
|
||||||
|
)
|
||||||
|
@api_view(["GET"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def get_users_with_emails(request):
|
||||||
|
"""Get users with email addresses - restricted to admins and moderators."""
|
||||||
|
user = request.user
|
||||||
|
|
||||||
|
# Check if user is admin or moderator
|
||||||
|
if not (user.is_staff or user.is_superuser or getattr(user, "role", "") in ["ADMIN", "MODERATOR"]):
|
||||||
|
return Response(
|
||||||
|
{"detail": "Admin or moderator access required"},
|
||||||
|
status=status.HTTP_403_FORBIDDEN,
|
||||||
|
)
|
||||||
|
|
||||||
|
user_ids_param = request.query_params.get("user_ids", "")
|
||||||
|
|
||||||
|
if not user_ids_param:
|
||||||
|
return Response([], status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
user_ids = [uid.strip() for uid in user_ids_param.split(",") if uid.strip()]
|
||||||
|
|
||||||
|
if not user_ids:
|
||||||
|
return Response([], status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
# Limit to prevent abuse
|
||||||
|
if len(user_ids) > 100:
|
||||||
|
user_ids = user_ids[:100]
|
||||||
|
|
||||||
|
users = User.objects.filter(user_id__in=user_ids).select_related("profile")
|
||||||
|
|
||||||
|
result = []
|
||||||
|
for u in users:
|
||||||
|
profile = getattr(u, "profile", None)
|
||||||
|
result.append({
|
||||||
|
"user_id": str(u.user_id),
|
||||||
|
"username": u.username,
|
||||||
|
"email": u.email,
|
||||||
|
"display_name": profile.display_name if profile else None,
|
||||||
|
})
|
||||||
|
|
||||||
|
return Response(result, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
|
|||||||
1
backend/apps/api/v1/admin/__init__.py
Normal file
1
backend/apps/api/v1/admin/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
# Admin API module
|
||||||
79
backend/apps/api/v1/admin/urls.py
Normal file
79
backend/apps/api/v1/admin/urls.py
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
"""
|
||||||
|
Admin API URL configuration.
|
||||||
|
Provides endpoints for admin dashboard functionality.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from django.urls import include, path
|
||||||
|
from rest_framework.routers import DefaultRouter
|
||||||
|
|
||||||
|
from apps.core.api.alert_views import (
|
||||||
|
RateLimitAlertConfigViewSet,
|
||||||
|
RateLimitAlertViewSet,
|
||||||
|
SystemAlertViewSet,
|
||||||
|
)
|
||||||
|
from apps.core.api.incident_views import IncidentViewSet
|
||||||
|
|
||||||
|
from . import views
|
||||||
|
|
||||||
|
app_name = "admin_api"
|
||||||
|
|
||||||
|
# Router for admin ViewSets
|
||||||
|
router = DefaultRouter()
|
||||||
|
router.register(r"system-alerts", SystemAlertViewSet, basename="system-alert")
|
||||||
|
router.register(r"rate-limit-alerts", RateLimitAlertViewSet, basename="rate-limit-alert")
|
||||||
|
router.register(r"rate-limit-config", RateLimitAlertConfigViewSet, basename="rate-limit-config")
|
||||||
|
router.register(r"incidents", IncidentViewSet, basename="incident")
|
||||||
|
|
||||||
|
|
||||||
|
urlpatterns = [
|
||||||
|
# Alert ViewSets (via router)
|
||||||
|
path("", include(router.urls)),
|
||||||
|
# OSM Cache Stats
|
||||||
|
path(
|
||||||
|
"osm-usage-stats/",
|
||||||
|
views.OSMUsageStatsView.as_view(),
|
||||||
|
name="osm_usage_stats",
|
||||||
|
),
|
||||||
|
# Rate Limit Metrics
|
||||||
|
path(
|
||||||
|
"rate-limit-metrics/",
|
||||||
|
views.RateLimitMetricsView.as_view(),
|
||||||
|
name="rate_limit_metrics",
|
||||||
|
),
|
||||||
|
# Database Manager (admin CRUD operations)
|
||||||
|
path(
|
||||||
|
"database-manager/",
|
||||||
|
views.DatabaseManagerView.as_view(),
|
||||||
|
name="database_manager",
|
||||||
|
),
|
||||||
|
# Celery Task Status (read-only)
|
||||||
|
path(
|
||||||
|
"tasks/status/",
|
||||||
|
views.CeleryTaskStatusView.as_view(),
|
||||||
|
name="task_status",
|
||||||
|
),
|
||||||
|
# Anomaly Detection
|
||||||
|
path(
|
||||||
|
"anomalies/detect/",
|
||||||
|
views.DetectAnomaliesView.as_view(),
|
||||||
|
name="detect_anomalies",
|
||||||
|
),
|
||||||
|
# Metrics Collection
|
||||||
|
path(
|
||||||
|
"metrics/collect/",
|
||||||
|
views.CollectMetricsView.as_view(),
|
||||||
|
name="collect_metrics",
|
||||||
|
),
|
||||||
|
# Pipeline Integrity Scan
|
||||||
|
path(
|
||||||
|
"pipeline/integrity-scan/",
|
||||||
|
views.PipelineIntegrityScanView.as_view(),
|
||||||
|
name="pipeline_integrity_scan",
|
||||||
|
),
|
||||||
|
# Admin Settings (key-value store for preferences)
|
||||||
|
path(
|
||||||
|
"settings/",
|
||||||
|
views.AdminSettingsView.as_view(),
|
||||||
|
name="admin_settings",
|
||||||
|
),
|
||||||
|
]
|
||||||
1350
backend/apps/api/v1/admin/views.py
Normal file
1350
backend/apps/api/v1/admin/views.py
Normal file
File diff suppressed because it is too large
Load Diff
418
backend/apps/api/v1/auth/account_management.py
Normal file
418
backend/apps/api/v1/auth/account_management.py
Normal file
@@ -0,0 +1,418 @@
|
|||||||
|
"""
|
||||||
|
Account Management Views for ThrillWiki API v1.
|
||||||
|
|
||||||
|
Handles email changes, account deletion, and session management.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from django.contrib.auth import get_user_model
|
||||||
|
from django.core.cache import cache
|
||||||
|
from django.utils import timezone
|
||||||
|
from drf_spectacular.utils import extend_schema, extend_schema_view
|
||||||
|
from rest_framework import status
|
||||||
|
from rest_framework.decorators import api_view, permission_classes
|
||||||
|
from rest_framework.permissions import IsAuthenticated
|
||||||
|
from rest_framework.response import Response
|
||||||
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
UserModel = get_user_model()
|
||||||
|
|
||||||
|
|
||||||
|
# ============== EMAIL CHANGE ENDPOINTS ==============
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="request_email_change",
|
||||||
|
summary="Request email change",
|
||||||
|
description="Initiates an email change request. Sends verification to new email.",
|
||||||
|
request={
|
||||||
|
"application/json": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"new_email": {"type": "string", "format": "email"},
|
||||||
|
"password": {"type": "string", "description": "Current password for verification"},
|
||||||
|
},
|
||||||
|
"required": ["new_email", "password"],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
responses={
|
||||||
|
200: {"description": "Email change requested"},
|
||||||
|
400: {"description": "Invalid request"},
|
||||||
|
},
|
||||||
|
tags=["Account"],
|
||||||
|
)
|
||||||
|
@api_view(["POST"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def request_email_change(request):
|
||||||
|
"""Request to change email address."""
|
||||||
|
user = request.user
|
||||||
|
new_email = request.data.get("new_email", "").strip().lower()
|
||||||
|
password = request.data.get("password", "")
|
||||||
|
|
||||||
|
if not new_email:
|
||||||
|
return Response(
|
||||||
|
{"detail": "New email is required"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
if not user.check_password(password):
|
||||||
|
return Response(
|
||||||
|
{"detail": "Invalid password"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check if email already in use
|
||||||
|
if UserModel.objects.filter(email=new_email).exclude(pk=user.pk).exists():
|
||||||
|
return Response(
|
||||||
|
{"detail": "This email is already in use"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Store pending email change in cache
|
||||||
|
cache_key = f"email_change:{user.pk}"
|
||||||
|
cache.set(
|
||||||
|
cache_key,
|
||||||
|
{
|
||||||
|
"new_email": new_email,
|
||||||
|
"requested_at": timezone.now().isoformat(),
|
||||||
|
},
|
||||||
|
timeout=86400, # 24 hours
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO: Send verification email to new_email
|
||||||
|
# For now, just store the pending change
|
||||||
|
|
||||||
|
return Response({
|
||||||
|
"detail": "Email change requested. Please check your new email for verification.",
|
||||||
|
"new_email": new_email,
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="get_email_change_status",
|
||||||
|
summary="Get pending email change status",
|
||||||
|
responses={
|
||||||
|
200: {
|
||||||
|
"description": "Email change status",
|
||||||
|
"example": {
|
||||||
|
"has_pending_change": True,
|
||||||
|
"new_email": "new@example.com",
|
||||||
|
"requested_at": "2026-01-06T12:00:00Z",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
tags=["Account"],
|
||||||
|
)
|
||||||
|
@api_view(["GET"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def get_email_change_status(request):
|
||||||
|
"""Get status of pending email change."""
|
||||||
|
user = request.user
|
||||||
|
cache_key = f"email_change:{user.pk}"
|
||||||
|
pending = cache.get(cache_key)
|
||||||
|
|
||||||
|
if not pending:
|
||||||
|
return Response({
|
||||||
|
"has_pending_change": False,
|
||||||
|
"new_email": None,
|
||||||
|
"requested_at": None,
|
||||||
|
})
|
||||||
|
|
||||||
|
return Response({
|
||||||
|
"has_pending_change": True,
|
||||||
|
"new_email": pending.get("new_email"),
|
||||||
|
"requested_at": pending.get("requested_at"),
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="cancel_email_change",
|
||||||
|
summary="Cancel pending email change",
|
||||||
|
responses={
|
||||||
|
200: {"description": "Email change cancelled"},
|
||||||
|
},
|
||||||
|
tags=["Account"],
|
||||||
|
)
|
||||||
|
@api_view(["POST"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def cancel_email_change(request):
|
||||||
|
"""Cancel a pending email change request."""
|
||||||
|
user = request.user
|
||||||
|
cache_key = f"email_change:{user.pk}"
|
||||||
|
cache.delete(cache_key)
|
||||||
|
|
||||||
|
return Response({"detail": "Email change cancelled"})
|
||||||
|
|
||||||
|
|
||||||
|
# ============== ACCOUNT DELETION ENDPOINTS ==============
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="request_account_deletion",
|
||||||
|
summary="Request account deletion",
|
||||||
|
description="Initiates account deletion. Requires password confirmation.",
|
||||||
|
request={
|
||||||
|
"application/json": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"password": {"type": "string"},
|
||||||
|
"reason": {"type": "string", "description": "Optional reason for leaving"},
|
||||||
|
},
|
||||||
|
"required": ["password"],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
responses={
|
||||||
|
200: {"description": "Deletion requested"},
|
||||||
|
400: {"description": "Invalid password"},
|
||||||
|
},
|
||||||
|
tags=["Account"],
|
||||||
|
)
|
||||||
|
@api_view(["POST"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def request_account_deletion(request):
|
||||||
|
"""Request account deletion."""
|
||||||
|
user = request.user
|
||||||
|
password = request.data.get("password", "")
|
||||||
|
reason = request.data.get("reason", "")
|
||||||
|
|
||||||
|
if not user.check_password(password):
|
||||||
|
return Response(
|
||||||
|
{"detail": "Invalid password"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Store deletion request in cache (will be processed by background task)
|
||||||
|
cache_key = f"account_deletion:{user.pk}"
|
||||||
|
deletion_date = timezone.now() + timezone.timedelta(days=30)
|
||||||
|
|
||||||
|
cache.set(
|
||||||
|
cache_key,
|
||||||
|
{
|
||||||
|
"requested_at": timezone.now().isoformat(),
|
||||||
|
"scheduled_deletion": deletion_date.isoformat(),
|
||||||
|
"reason": reason,
|
||||||
|
},
|
||||||
|
timeout=2592000, # 30 days
|
||||||
|
)
|
||||||
|
|
||||||
|
# Also update user profile if it exists
|
||||||
|
try:
|
||||||
|
from apps.accounts.models import Profile
|
||||||
|
profile = Profile.objects.filter(user=user).first()
|
||||||
|
if profile:
|
||||||
|
profile.deletion_requested_at = timezone.now()
|
||||||
|
profile.scheduled_deletion_date = deletion_date
|
||||||
|
profile.save(update_fields=["deletion_requested_at", "scheduled_deletion_date"])
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Could not update profile for deletion: {e}")
|
||||||
|
|
||||||
|
return Response({
|
||||||
|
"detail": "Account deletion scheduled",
|
||||||
|
"scheduled_deletion": deletion_date.isoformat(),
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="get_deletion_status",
|
||||||
|
summary="Get account deletion status",
|
||||||
|
responses={
|
||||||
|
200: {
|
||||||
|
"description": "Deletion status",
|
||||||
|
"example": {
|
||||||
|
"deletion_pending": True,
|
||||||
|
"scheduled_deletion": "2026-02-06T12:00:00Z",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
tags=["Account"],
|
||||||
|
)
|
||||||
|
@api_view(["GET"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def get_deletion_status(request):
|
||||||
|
"""Get status of pending account deletion."""
|
||||||
|
user = request.user
|
||||||
|
cache_key = f"account_deletion:{user.pk}"
|
||||||
|
pending = cache.get(cache_key)
|
||||||
|
|
||||||
|
if not pending:
|
||||||
|
# Also check profile
|
||||||
|
try:
|
||||||
|
from apps.accounts.models import Profile
|
||||||
|
profile = Profile.objects.filter(user=user).first()
|
||||||
|
if profile and profile.deletion_requested_at:
|
||||||
|
return Response({
|
||||||
|
"deletion_pending": True,
|
||||||
|
"requested_at": profile.deletion_requested_at.isoformat(),
|
||||||
|
"scheduled_deletion": profile.scheduled_deletion_date.isoformat() if profile.scheduled_deletion_date else None,
|
||||||
|
})
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return Response({
|
||||||
|
"deletion_pending": False,
|
||||||
|
"scheduled_deletion": None,
|
||||||
|
})
|
||||||
|
|
||||||
|
return Response({
|
||||||
|
"deletion_pending": True,
|
||||||
|
"requested_at": pending.get("requested_at"),
|
||||||
|
"scheduled_deletion": pending.get("scheduled_deletion"),
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="cancel_account_deletion",
|
||||||
|
summary="Cancel account deletion",
|
||||||
|
responses={
|
||||||
|
200: {"description": "Deletion cancelled"},
|
||||||
|
},
|
||||||
|
tags=["Account"],
|
||||||
|
)
|
||||||
|
@api_view(["POST"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def cancel_account_deletion(request):
|
||||||
|
"""Cancel a pending account deletion request."""
|
||||||
|
user = request.user
|
||||||
|
cache_key = f"account_deletion:{user.pk}"
|
||||||
|
cache.delete(cache_key)
|
||||||
|
|
||||||
|
# Also clear from profile
|
||||||
|
try:
|
||||||
|
from apps.accounts.models import Profile
|
||||||
|
Profile.objects.filter(user=user).update(
|
||||||
|
deletion_requested_at=None,
|
||||||
|
scheduled_deletion_date=None,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Could not clear deletion from profile: {e}")
|
||||||
|
|
||||||
|
return Response({"detail": "Account deletion cancelled"})
|
||||||
|
|
||||||
|
|
||||||
|
# ============== SESSION MANAGEMENT ENDPOINTS ==============
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="list_sessions",
|
||||||
|
summary="List active sessions",
|
||||||
|
description="Returns list of active sessions for the current user.",
|
||||||
|
responses={
|
||||||
|
200: {
|
||||||
|
"description": "List of sessions",
|
||||||
|
"example": {
|
||||||
|
"sessions": [
|
||||||
|
{
|
||||||
|
"id": "session_123",
|
||||||
|
"created_at": "2026-01-06T12:00:00Z",
|
||||||
|
"last_activity": "2026-01-06T14:00:00Z",
|
||||||
|
"ip_address": "192.168.1.1",
|
||||||
|
"user_agent": "Mozilla/5.0...",
|
||||||
|
"is_current": True,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
tags=["Account"],
|
||||||
|
)
|
||||||
|
@api_view(["GET"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def list_sessions(request):
|
||||||
|
"""List all active sessions for the user."""
|
||||||
|
# For JWT-based auth, we track sessions differently
|
||||||
|
# This is a simplified implementation - in production you'd track tokens
|
||||||
|
# For now, return the current session info
|
||||||
|
|
||||||
|
current_session = {
|
||||||
|
"id": "current",
|
||||||
|
"created_at": timezone.now().isoformat(),
|
||||||
|
"last_activity": timezone.now().isoformat(),
|
||||||
|
"ip_address": request.META.get("REMOTE_ADDR", "unknown"),
|
||||||
|
"user_agent": request.META.get("HTTP_USER_AGENT", "unknown"),
|
||||||
|
"is_current": True,
|
||||||
|
}
|
||||||
|
|
||||||
|
return Response({
|
||||||
|
"sessions": [current_session],
|
||||||
|
"count": 1,
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="revoke_session",
|
||||||
|
summary="Revoke a session",
|
||||||
|
description="Revokes a specific session. If revoking current session, user will be logged out.",
|
||||||
|
responses={
|
||||||
|
200: {"description": "Session revoked"},
|
||||||
|
404: {"description": "Session not found"},
|
||||||
|
},
|
||||||
|
tags=["Account"],
|
||||||
|
)
|
||||||
|
@api_view(["DELETE"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def revoke_session(request, session_id):
|
||||||
|
"""Revoke a specific session."""
|
||||||
|
# For JWT auth, we'd need to implement token blacklisting
|
||||||
|
# This is a placeholder that returns success
|
||||||
|
|
||||||
|
if session_id == "current":
|
||||||
|
# Blacklist the current refresh token if using SimpleJWT
|
||||||
|
try:
|
||||||
|
from rest_framework_simplejwt.token_blacklist.models import BlacklistedToken
|
||||||
|
from rest_framework_simplejwt.tokens import RefreshToken
|
||||||
|
|
||||||
|
# Get refresh token from request if available
|
||||||
|
refresh_token = request.data.get("refresh_token")
|
||||||
|
if refresh_token:
|
||||||
|
token = RefreshToken(refresh_token)
|
||||||
|
token.blacklist()
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Could not blacklist token: {e}")
|
||||||
|
|
||||||
|
return Response({"detail": "Session revoked"})
|
||||||
|
|
||||||
|
|
||||||
|
# ============== PASSWORD CHANGE ENDPOINT ==============
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="change_password",
|
||||||
|
summary="Change password",
|
||||||
|
description="Changes the user's password. Requires current password.",
|
||||||
|
request={
|
||||||
|
"application/json": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"current_password": {"type": "string"},
|
||||||
|
"new_password": {"type": "string"},
|
||||||
|
},
|
||||||
|
"required": ["current_password", "new_password"],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
responses={
|
||||||
|
200: {"description": "Password changed"},
|
||||||
|
400: {"description": "Invalid current password or weak new password"},
|
||||||
|
},
|
||||||
|
tags=["Account"],
|
||||||
|
)
|
||||||
|
@api_view(["POST"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def change_password(request):
|
||||||
|
"""Change user password."""
|
||||||
|
user = request.user
|
||||||
|
current_password = request.data.get("current_password", "")
|
||||||
|
new_password = request.data.get("new_password", "")
|
||||||
|
|
||||||
|
if not user.check_password(current_password):
|
||||||
|
return Response(
|
||||||
|
{"detail": "Current password is incorrect"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
if len(new_password) < 8:
|
||||||
|
return Response(
|
||||||
|
{"detail": "New password must be at least 8 characters"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
user.set_password(new_password)
|
||||||
|
user.save()
|
||||||
|
|
||||||
|
return Response({"detail": "Password changed successfully"})
|
||||||
@@ -51,6 +51,10 @@ def get_mfa_status(request):
|
|||||||
totp_enabled = authenticators.filter(type=Authenticator.Type.TOTP).exists()
|
totp_enabled = authenticators.filter(type=Authenticator.Type.TOTP).exists()
|
||||||
recovery_enabled = authenticators.filter(type=Authenticator.Type.RECOVERY_CODES).exists()
|
recovery_enabled = authenticators.filter(type=Authenticator.Type.RECOVERY_CODES).exists()
|
||||||
|
|
||||||
|
# Check for WebAuthn/Passkey authenticators
|
||||||
|
passkey_enabled = authenticators.filter(type=Authenticator.Type.WEBAUTHN).exists()
|
||||||
|
passkey_count = authenticators.filter(type=Authenticator.Type.WEBAUTHN).count()
|
||||||
|
|
||||||
# Count recovery codes if any
|
# Count recovery codes if any
|
||||||
recovery_count = 0
|
recovery_count = 0
|
||||||
if recovery_enabled:
|
if recovery_enabled:
|
||||||
@@ -60,12 +64,18 @@ def get_mfa_status(request):
|
|||||||
except Authenticator.DoesNotExist:
|
except Authenticator.DoesNotExist:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
# has_second_factor is True if user has either TOTP or Passkey configured
|
||||||
|
has_second_factor = totp_enabled or passkey_enabled
|
||||||
|
|
||||||
return Response(
|
return Response(
|
||||||
{
|
{
|
||||||
"mfa_enabled": totp_enabled,
|
"mfa_enabled": totp_enabled, # Backward compatibility
|
||||||
"totp_enabled": totp_enabled,
|
"totp_enabled": totp_enabled,
|
||||||
|
"passkey_enabled": passkey_enabled,
|
||||||
|
"passkey_count": passkey_count,
|
||||||
"recovery_codes_enabled": recovery_enabled,
|
"recovery_codes_enabled": recovery_enabled,
|
||||||
"recovery_codes_count": recovery_count,
|
"recovery_codes_count": recovery_count,
|
||||||
|
"has_second_factor": has_second_factor,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -156,7 +166,7 @@ def setup_totp(request):
|
|||||||
def activate_totp(request):
|
def activate_totp(request):
|
||||||
"""Verify TOTP code and activate MFA."""
|
"""Verify TOTP code and activate MFA."""
|
||||||
from allauth.mfa.models import Authenticator
|
from allauth.mfa.models import Authenticator
|
||||||
from allauth.mfa.recovery_codes.internal import auth as recovery_auth
|
from allauth.mfa.recovery_codes.internal.auth import RecoveryCodes
|
||||||
from allauth.mfa.totp.internal import auth as totp_auth
|
from allauth.mfa.totp.internal import auth as totp_auth
|
||||||
|
|
||||||
user = request.user
|
user = request.user
|
||||||
@@ -168,8 +178,9 @@ def activate_totp(request):
|
|||||||
status=status.HTTP_400_BAD_REQUEST,
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Get pending secret from session
|
# Get pending secret from session OR from request body
|
||||||
secret = request.session.get("pending_totp_secret")
|
# (request body is used as fallback for JWT auth where sessions may not persist)
|
||||||
|
secret = request.session.get("pending_totp_secret") or request.data.get("secret", "").strip()
|
||||||
if not secret:
|
if not secret:
|
||||||
return Response(
|
return Response(
|
||||||
{"detail": "No pending TOTP setup. Please start setup again."},
|
{"detail": "No pending TOTP setup. Please start setup again."},
|
||||||
@@ -197,16 +208,13 @@ def activate_totp(request):
|
|||||||
data={"secret": secret},
|
data={"secret": secret},
|
||||||
)
|
)
|
||||||
|
|
||||||
# Generate recovery codes
|
# Generate recovery codes using allauth's RecoveryCodes API
|
||||||
codes = recovery_auth.generate_recovery_codes()
|
recovery_instance = RecoveryCodes.activate(user)
|
||||||
Authenticator.objects.create(
|
codes = recovery_instance.get_unused_codes()
|
||||||
user=user,
|
|
||||||
type=Authenticator.Type.RECOVERY_CODES,
|
|
||||||
data={"codes": codes},
|
|
||||||
)
|
|
||||||
|
|
||||||
# Clear session
|
# Clear session (only if it exists - won't exist with JWT auth + secret from body)
|
||||||
del request.session["pending_totp_secret"]
|
if "pending_totp_secret" in request.session:
|
||||||
|
del request.session["pending_totp_secret"]
|
||||||
|
|
||||||
return Response(
|
return Response(
|
||||||
{
|
{
|
||||||
@@ -351,7 +359,7 @@ def verify_totp(request):
|
|||||||
def regenerate_recovery_codes(request):
|
def regenerate_recovery_codes(request):
|
||||||
"""Regenerate recovery codes."""
|
"""Regenerate recovery codes."""
|
||||||
from allauth.mfa.models import Authenticator
|
from allauth.mfa.models import Authenticator
|
||||||
from allauth.mfa.recovery_codes.internal import auth as recovery_auth
|
from allauth.mfa.recovery_codes.internal.auth import RecoveryCodes
|
||||||
|
|
||||||
user = request.user
|
user = request.user
|
||||||
password = request.data.get("password", "")
|
password = request.data.get("password", "")
|
||||||
@@ -370,15 +378,14 @@ def regenerate_recovery_codes(request):
|
|||||||
status=status.HTTP_400_BAD_REQUEST,
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Generate new codes
|
# Delete existing recovery codes first (so activate creates new ones)
|
||||||
codes = recovery_auth.generate_recovery_codes()
|
Authenticator.objects.filter(
|
||||||
|
user=user, type=Authenticator.Type.RECOVERY_CODES
|
||||||
|
).delete()
|
||||||
|
|
||||||
# Update or create recovery codes authenticator
|
# Generate new recovery codes using allauth's RecoveryCodes API
|
||||||
authenticator, created = Authenticator.objects.update_or_create(
|
recovery_instance = RecoveryCodes.activate(user)
|
||||||
user=user,
|
codes = recovery_instance.get_unused_codes()
|
||||||
type=Authenticator.Type.RECOVERY_CODES,
|
|
||||||
defaults={"data": {"codes": codes}},
|
|
||||||
)
|
|
||||||
|
|
||||||
return Response(
|
return Response(
|
||||||
{
|
{
|
||||||
|
|||||||
536
backend/apps/api/v1/auth/passkey.py
Normal file
536
backend/apps/api/v1/auth/passkey.py
Normal file
@@ -0,0 +1,536 @@
|
|||||||
|
"""
|
||||||
|
Passkey (WebAuthn) API Views
|
||||||
|
|
||||||
|
Provides REST API endpoints for WebAuthn/Passkey operations using django-allauth's
|
||||||
|
mfa.webauthn module. Supports passkey registration, authentication, and management.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from drf_spectacular.utils import extend_schema
|
||||||
|
from rest_framework import status
|
||||||
|
from rest_framework.decorators import api_view, permission_classes
|
||||||
|
from rest_framework.permissions import IsAuthenticated
|
||||||
|
from rest_framework.response import Response
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="get_passkey_status",
|
||||||
|
summary="Get passkey status for current user",
|
||||||
|
description="Returns whether passkeys are enabled and lists registered passkeys.",
|
||||||
|
responses={
|
||||||
|
200: {
|
||||||
|
"description": "Passkey status",
|
||||||
|
"example": {
|
||||||
|
"passkey_enabled": True,
|
||||||
|
"passkeys": [
|
||||||
|
{"id": "abc123", "name": "MacBook Pro", "created_at": "2026-01-06T12:00:00Z"}
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
tags=["Passkey"],
|
||||||
|
)
|
||||||
|
@api_view(["GET"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def get_passkey_status(request):
|
||||||
|
"""Get passkey status for current user."""
|
||||||
|
try:
|
||||||
|
from allauth.mfa.models import Authenticator
|
||||||
|
|
||||||
|
user = request.user
|
||||||
|
passkeys = Authenticator.objects.filter(
|
||||||
|
user=user, type=Authenticator.Type.WEBAUTHN
|
||||||
|
)
|
||||||
|
|
||||||
|
passkey_list = []
|
||||||
|
for pk in passkeys:
|
||||||
|
passkey_data = pk.data or {}
|
||||||
|
passkey_list.append({
|
||||||
|
"id": str(pk.id),
|
||||||
|
"name": passkey_data.get("name", "Passkey"),
|
||||||
|
"created_at": pk.created_at.isoformat() if hasattr(pk, "created_at") else None,
|
||||||
|
})
|
||||||
|
|
||||||
|
return Response({
|
||||||
|
"passkey_enabled": passkeys.exists(),
|
||||||
|
"passkey_count": passkeys.count(),
|
||||||
|
"passkeys": passkey_list,
|
||||||
|
})
|
||||||
|
except ImportError:
|
||||||
|
return Response({
|
||||||
|
"passkey_enabled": False,
|
||||||
|
"passkey_count": 0,
|
||||||
|
"passkeys": [],
|
||||||
|
"error": "WebAuthn module not available",
|
||||||
|
})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error getting passkey status: {e}")
|
||||||
|
return Response(
|
||||||
|
{"detail": "Failed to get passkey status"},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="get_registration_options",
|
||||||
|
summary="Get WebAuthn registration options",
|
||||||
|
description="Returns options for registering a new passkey. Start the registration flow.",
|
||||||
|
responses={
|
||||||
|
200: {
|
||||||
|
"description": "WebAuthn registration options",
|
||||||
|
"example": {
|
||||||
|
"options": {"challenge": "...", "rp": {"name": "ThrillWiki"}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
tags=["Passkey"],
|
||||||
|
)
|
||||||
|
@api_view(["GET"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def get_registration_options(request):
|
||||||
|
"""Get WebAuthn registration options for passkey setup."""
|
||||||
|
try:
|
||||||
|
from allauth.mfa.webauthn.internal import auth as webauthn_auth
|
||||||
|
|
||||||
|
# Use the correct allauth API: begin_registration
|
||||||
|
creation_options, state = webauthn_auth.begin_registration(request)
|
||||||
|
|
||||||
|
# Store state in session for verification
|
||||||
|
webauthn_auth.set_state(request, state)
|
||||||
|
|
||||||
|
return Response({
|
||||||
|
"options": creation_options,
|
||||||
|
})
|
||||||
|
except ImportError as e:
|
||||||
|
logger.error(f"WebAuthn module import error: {e}")
|
||||||
|
return Response(
|
||||||
|
{"detail": "WebAuthn module not available"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error getting registration options: {e}")
|
||||||
|
return Response(
|
||||||
|
{"detail": f"Failed to get registration options: {str(e)}"},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="register_passkey",
|
||||||
|
summary="Complete passkey registration",
|
||||||
|
description="Verifies the WebAuthn response and registers the new passkey.",
|
||||||
|
request={
|
||||||
|
"application/json": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"credential": {"type": "object", "description": "WebAuthn credential response"},
|
||||||
|
"name": {"type": "string", "description": "Name for this passkey"},
|
||||||
|
},
|
||||||
|
"required": ["credential"],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
responses={
|
||||||
|
200: {"description": "Passkey registered successfully"},
|
||||||
|
400: {"description": "Invalid credential or registration failed"},
|
||||||
|
},
|
||||||
|
tags=["Passkey"],
|
||||||
|
)
|
||||||
|
@api_view(["POST"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def register_passkey(request):
|
||||||
|
"""Complete passkey registration with WebAuthn response."""
|
||||||
|
try:
|
||||||
|
from allauth.mfa.webauthn.internal import auth as webauthn_auth
|
||||||
|
|
||||||
|
credential = request.data.get("credential")
|
||||||
|
name = request.data.get("name", "Passkey")
|
||||||
|
|
||||||
|
if not credential:
|
||||||
|
return Response(
|
||||||
|
{"detail": "Credential is required"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get stored state from session
|
||||||
|
state = webauthn_auth.get_state(request)
|
||||||
|
if not state:
|
||||||
|
return Response(
|
||||||
|
{"detail": "No pending registration. Please start registration again."},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Use the correct allauth API: complete_registration
|
||||||
|
try:
|
||||||
|
# Parse the credential response
|
||||||
|
credential_data = webauthn_auth.parse_registration_response(credential)
|
||||||
|
|
||||||
|
# Complete registration - this creates the Authenticator
|
||||||
|
authenticator = webauthn_auth.complete_registration(
|
||||||
|
request,
|
||||||
|
credential_data,
|
||||||
|
state,
|
||||||
|
name=name,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Clear session state
|
||||||
|
webauthn_auth.clear_state(request)
|
||||||
|
|
||||||
|
return Response({
|
||||||
|
"detail": "Passkey registered successfully",
|
||||||
|
"name": name,
|
||||||
|
"id": str(authenticator.id) if authenticator else None,
|
||||||
|
})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"WebAuthn registration failed: {e}")
|
||||||
|
return Response(
|
||||||
|
{"detail": f"Registration failed: {str(e)}"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
except ImportError as e:
|
||||||
|
logger.error(f"WebAuthn module import error: {e}")
|
||||||
|
return Response(
|
||||||
|
{"detail": "WebAuthn module not available"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error registering passkey: {e}")
|
||||||
|
return Response(
|
||||||
|
{"detail": f"Failed to register passkey: {str(e)}"},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="get_authentication_options",
|
||||||
|
summary="Get WebAuthn authentication options",
|
||||||
|
description="Returns options for authenticating with a passkey.",
|
||||||
|
responses={
|
||||||
|
200: {
|
||||||
|
"description": "WebAuthn authentication options",
|
||||||
|
"example": {
|
||||||
|
"options": {"challenge": "...", "allowCredentials": []},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
tags=["Passkey"],
|
||||||
|
)
|
||||||
|
@api_view(["GET"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def get_authentication_options(request):
|
||||||
|
"""Get WebAuthn authentication options for passkey verification."""
|
||||||
|
try:
|
||||||
|
from allauth.mfa.webauthn.internal import auth as webauthn_auth
|
||||||
|
|
||||||
|
# Use the correct allauth API: begin_authentication
|
||||||
|
request_options, state = webauthn_auth.begin_authentication(request)
|
||||||
|
|
||||||
|
# Store state in session for verification
|
||||||
|
webauthn_auth.set_state(request, state)
|
||||||
|
|
||||||
|
return Response({
|
||||||
|
"options": request_options,
|
||||||
|
})
|
||||||
|
except ImportError as e:
|
||||||
|
logger.error(f"WebAuthn module import error: {e}")
|
||||||
|
return Response(
|
||||||
|
{"detail": "WebAuthn module not available"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error getting authentication options: {e}")
|
||||||
|
return Response(
|
||||||
|
{"detail": f"Failed to get authentication options: {str(e)}"},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="authenticate_passkey",
|
||||||
|
summary="Authenticate with passkey",
|
||||||
|
description="Verifies the WebAuthn response for authentication.",
|
||||||
|
request={
|
||||||
|
"application/json": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"credential": {"type": "object", "description": "WebAuthn credential response"},
|
||||||
|
},
|
||||||
|
"required": ["credential"],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
responses={
|
||||||
|
200: {"description": "Authentication successful"},
|
||||||
|
400: {"description": "Invalid credential or authentication failed"},
|
||||||
|
},
|
||||||
|
tags=["Passkey"],
|
||||||
|
)
|
||||||
|
@api_view(["POST"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def authenticate_passkey(request):
|
||||||
|
"""Verify passkey authentication."""
|
||||||
|
try:
|
||||||
|
from allauth.mfa.webauthn.internal import auth as webauthn_auth
|
||||||
|
|
||||||
|
credential = request.data.get("credential")
|
||||||
|
|
||||||
|
if not credential:
|
||||||
|
return Response(
|
||||||
|
{"detail": "Credential is required"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get stored state from session
|
||||||
|
state = webauthn_auth.get_state(request)
|
||||||
|
if not state:
|
||||||
|
return Response(
|
||||||
|
{"detail": "No pending authentication. Please start authentication again."},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Use the correct allauth API: complete_authentication
|
||||||
|
try:
|
||||||
|
# Parse the credential response
|
||||||
|
credential_data = webauthn_auth.parse_authentication_response(credential)
|
||||||
|
|
||||||
|
# Complete authentication
|
||||||
|
webauthn_auth.complete_authentication(request, credential_data, state)
|
||||||
|
|
||||||
|
# Clear session state
|
||||||
|
webauthn_auth.clear_state(request)
|
||||||
|
|
||||||
|
return Response({"success": True})
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"WebAuthn authentication failed: {e}")
|
||||||
|
return Response(
|
||||||
|
{"detail": f"Authentication failed: {str(e)}"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
except ImportError as e:
|
||||||
|
logger.error(f"WebAuthn module import error: {e}")
|
||||||
|
return Response(
|
||||||
|
{"detail": "WebAuthn module not available"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error authenticating passkey: {e}")
|
||||||
|
return Response(
|
||||||
|
{"detail": f"Failed to authenticate: {str(e)}"},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="delete_passkey",
|
||||||
|
summary="Delete a passkey",
|
||||||
|
description="Removes a registered passkey from the user's account.",
|
||||||
|
request={
|
||||||
|
"application/json": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"password": {"type": "string", "description": "Current password for confirmation"},
|
||||||
|
},
|
||||||
|
"required": ["password"],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
responses={
|
||||||
|
200: {"description": "Passkey deleted successfully"},
|
||||||
|
400: {"description": "Invalid password or passkey not found"},
|
||||||
|
},
|
||||||
|
tags=["Passkey"],
|
||||||
|
)
|
||||||
|
@api_view(["DELETE"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def delete_passkey(request, passkey_id):
|
||||||
|
"""Delete a passkey."""
|
||||||
|
try:
|
||||||
|
from allauth.mfa.models import Authenticator
|
||||||
|
|
||||||
|
user = request.user
|
||||||
|
password = request.data.get("password", "")
|
||||||
|
|
||||||
|
# Verify password
|
||||||
|
if not user.check_password(password):
|
||||||
|
return Response(
|
||||||
|
{"detail": "Invalid password"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Find and delete the passkey
|
||||||
|
try:
|
||||||
|
authenticator = Authenticator.objects.get(
|
||||||
|
id=passkey_id,
|
||||||
|
user=user,
|
||||||
|
type=Authenticator.Type.WEBAUTHN,
|
||||||
|
)
|
||||||
|
authenticator.delete()
|
||||||
|
except Authenticator.DoesNotExist:
|
||||||
|
return Response(
|
||||||
|
{"detail": "Passkey not found"},
|
||||||
|
status=status.HTTP_404_NOT_FOUND,
|
||||||
|
)
|
||||||
|
|
||||||
|
return Response({"detail": "Passkey deleted successfully"})
|
||||||
|
except ImportError:
|
||||||
|
return Response(
|
||||||
|
{"detail": "WebAuthn module not available"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error deleting passkey: {e}")
|
||||||
|
return Response(
|
||||||
|
{"detail": f"Failed to delete passkey: {str(e)}"},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="rename_passkey",
|
||||||
|
summary="Rename a passkey",
|
||||||
|
description="Updates the name of a registered passkey.",
|
||||||
|
request={
|
||||||
|
"application/json": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {"type": "string", "description": "New name for the passkey"},
|
||||||
|
},
|
||||||
|
"required": ["name"],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
responses={
|
||||||
|
200: {"description": "Passkey renamed successfully"},
|
||||||
|
404: {"description": "Passkey not found"},
|
||||||
|
},
|
||||||
|
tags=["Passkey"],
|
||||||
|
)
|
||||||
|
@api_view(["PATCH"])
|
||||||
|
@permission_classes([IsAuthenticated])
|
||||||
|
def rename_passkey(request, passkey_id):
|
||||||
|
"""Rename a passkey."""
|
||||||
|
try:
|
||||||
|
from allauth.mfa.models import Authenticator
|
||||||
|
|
||||||
|
user = request.user
|
||||||
|
new_name = request.data.get("name", "").strip()
|
||||||
|
|
||||||
|
if not new_name:
|
||||||
|
return Response(
|
||||||
|
{"detail": "Name is required"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
authenticator = Authenticator.objects.get(
|
||||||
|
id=passkey_id, user=user, type=Authenticator.Type.WEBAUTHN,
|
||||||
|
)
|
||||||
|
data = authenticator.data or {}
|
||||||
|
data["name"] = new_name
|
||||||
|
authenticator.data = data
|
||||||
|
authenticator.save()
|
||||||
|
except Authenticator.DoesNotExist:
|
||||||
|
return Response(
|
||||||
|
{"detail": "Passkey not found"},
|
||||||
|
status=status.HTTP_404_NOT_FOUND,
|
||||||
|
)
|
||||||
|
|
||||||
|
return Response({"detail": "Passkey renamed successfully", "name": new_name})
|
||||||
|
except ImportError:
|
||||||
|
return Response(
|
||||||
|
{"detail": "WebAuthn module not available"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error renaming passkey: {e}")
|
||||||
|
return Response(
|
||||||
|
{"detail": f"Failed to rename passkey: {str(e)}"},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
operation_id="get_login_passkey_options",
|
||||||
|
summary="Get WebAuthn options for MFA login",
|
||||||
|
description="Returns passkey auth options using MFA token (unauthenticated).",
|
||||||
|
request={
|
||||||
|
"application/json": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"mfa_token": {"type": "string", "description": "MFA token from login"},
|
||||||
|
},
|
||||||
|
"required": ["mfa_token"],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
responses={
|
||||||
|
200: {"description": "WebAuthn authentication options"},
|
||||||
|
400: {"description": "Invalid or expired MFA token"},
|
||||||
|
},
|
||||||
|
tags=["Passkey"],
|
||||||
|
)
|
||||||
|
@api_view(["POST"])
|
||||||
|
def get_login_passkey_options(request):
|
||||||
|
"""Get WebAuthn authentication options for MFA login flow (unauthenticated)."""
|
||||||
|
from django.core.cache import cache
|
||||||
|
from django.contrib.auth import get_user_model
|
||||||
|
|
||||||
|
User = get_user_model()
|
||||||
|
mfa_token = request.data.get("mfa_token")
|
||||||
|
|
||||||
|
if not mfa_token:
|
||||||
|
return Response(
|
||||||
|
{"detail": "MFA token is required"}, status=status.HTTP_400_BAD_REQUEST
|
||||||
|
)
|
||||||
|
|
||||||
|
cache_key = f"mfa_login:{mfa_token}"
|
||||||
|
cached_data = cache.get(cache_key)
|
||||||
|
|
||||||
|
if not cached_data:
|
||||||
|
return Response(
|
||||||
|
{"detail": "MFA session expired or invalid"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
user_id = cached_data.get("user_id")
|
||||||
|
|
||||||
|
try:
|
||||||
|
user = User.objects.get(pk=user_id)
|
||||||
|
except User.DoesNotExist:
|
||||||
|
return Response({"detail": "User not found"}, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
try:
|
||||||
|
from allauth.mfa.models import Authenticator
|
||||||
|
from allauth.mfa.webauthn.internal import auth as webauthn_auth
|
||||||
|
|
||||||
|
passkeys = Authenticator.objects.filter(
|
||||||
|
user=user, type=Authenticator.Type.WEBAUTHN
|
||||||
|
)
|
||||||
|
|
||||||
|
if not passkeys.exists():
|
||||||
|
return Response(
|
||||||
|
{"detail": "No passkeys registered"}, status=status.HTTP_400_BAD_REQUEST
|
||||||
|
)
|
||||||
|
|
||||||
|
original_user = getattr(request, "user", None)
|
||||||
|
request.user = user
|
||||||
|
|
||||||
|
try:
|
||||||
|
request_options, state = webauthn_auth.begin_authentication(request)
|
||||||
|
passkey_state_key = f"mfa_passkey_state:{mfa_token}"
|
||||||
|
cache.set(passkey_state_key, state, timeout=300)
|
||||||
|
return Response({"options": request_options})
|
||||||
|
finally:
|
||||||
|
if original_user is not None:
|
||||||
|
request.user = original_user
|
||||||
|
|
||||||
|
except ImportError as e:
|
||||||
|
logger.error(f"WebAuthn module import error: {e}")
|
||||||
|
return Response(
|
||||||
|
{"detail": "WebAuthn module not available"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error getting login passkey options: {e}")
|
||||||
|
return Response(
|
||||||
|
{"detail": f"Failed to get passkey options: {str(e)}"},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
@@ -105,19 +105,36 @@ class UserOutputSerializer(serializers.ModelSerializer):
|
|||||||
|
|
||||||
|
|
||||||
class LoginInputSerializer(serializers.Serializer):
|
class LoginInputSerializer(serializers.Serializer):
|
||||||
"""Input serializer for user login."""
|
"""Input serializer for user login.
|
||||||
|
|
||||||
username = serializers.CharField(max_length=254, help_text="Username or email address")
|
Accepts either 'email' or 'username' field for backward compatibility.
|
||||||
|
The view will use whichever is provided.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Accept both email and username - frontend sends "email", but we also support "username"
|
||||||
|
email = serializers.CharField(max_length=254, required=False, help_text="Email address")
|
||||||
|
username = serializers.CharField(max_length=254, required=False, help_text="Username (alternative to email)")
|
||||||
password = serializers.CharField(max_length=128, style={"input_type": "password"}, trim_whitespace=False)
|
password = serializers.CharField(max_length=128, style={"input_type": "password"}, trim_whitespace=False)
|
||||||
|
|
||||||
def validate(self, attrs):
|
def validate(self, attrs):
|
||||||
|
email = attrs.get("email")
|
||||||
username = attrs.get("username")
|
username = attrs.get("username")
|
||||||
password = attrs.get("password")
|
password = attrs.get("password")
|
||||||
|
|
||||||
if username and password:
|
# Use email if provided, fallback to username
|
||||||
return attrs
|
identifier = email or username
|
||||||
|
|
||||||
|
if not identifier:
|
||||||
|
raise serializers.ValidationError("Either email or username is required.")
|
||||||
|
|
||||||
|
if not password:
|
||||||
|
raise serializers.ValidationError("Password is required.")
|
||||||
|
|
||||||
|
# Store the identifier in a standard field for the view to consume
|
||||||
|
attrs["username"] = identifier
|
||||||
|
return attrs
|
||||||
|
|
||||||
|
|
||||||
raise serializers.ValidationError("Must include username/email and password.")
|
|
||||||
|
|
||||||
|
|
||||||
class LoginOutputSerializer(serializers.Serializer):
|
class LoginOutputSerializer(serializers.Serializer):
|
||||||
@@ -129,6 +146,53 @@ class LoginOutputSerializer(serializers.Serializer):
|
|||||||
message = serializers.CharField()
|
message = serializers.CharField()
|
||||||
|
|
||||||
|
|
||||||
|
class MFARequiredOutputSerializer(serializers.Serializer):
|
||||||
|
"""Output serializer when MFA verification is required after password auth."""
|
||||||
|
|
||||||
|
mfa_required = serializers.BooleanField(default=True)
|
||||||
|
mfa_token = serializers.CharField(help_text="Temporary token for MFA verification")
|
||||||
|
mfa_types = serializers.ListField(
|
||||||
|
child=serializers.CharField(),
|
||||||
|
help_text="Available MFA types: 'totp', 'webauthn'",
|
||||||
|
)
|
||||||
|
user_id = serializers.IntegerField(help_text="User ID for reference")
|
||||||
|
message = serializers.CharField(default="MFA verification required")
|
||||||
|
|
||||||
|
|
||||||
|
class MFALoginVerifyInputSerializer(serializers.Serializer):
|
||||||
|
"""Input serializer for MFA login verification."""
|
||||||
|
|
||||||
|
mfa_token = serializers.CharField(help_text="Temporary MFA token from login response")
|
||||||
|
code = serializers.CharField(
|
||||||
|
max_length=6,
|
||||||
|
min_length=6,
|
||||||
|
required=False,
|
||||||
|
help_text="6-digit TOTP code from authenticator app",
|
||||||
|
)
|
||||||
|
# For passkey/webauthn - credential will be a complex object
|
||||||
|
credential = serializers.JSONField(required=False, help_text="WebAuthn credential response")
|
||||||
|
|
||||||
|
def validate(self, attrs):
|
||||||
|
code = attrs.get("code")
|
||||||
|
credential = attrs.get("credential")
|
||||||
|
|
||||||
|
if not code and not credential:
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
"Either 'code' (TOTP) or 'credential' (passkey) is required."
|
||||||
|
)
|
||||||
|
|
||||||
|
return attrs
|
||||||
|
|
||||||
|
|
||||||
|
class MFALoginVerifyOutputSerializer(serializers.Serializer):
|
||||||
|
"""Output serializer for successful MFA verification."""
|
||||||
|
|
||||||
|
access = serializers.CharField()
|
||||||
|
refresh = serializers.CharField()
|
||||||
|
user = UserOutputSerializer()
|
||||||
|
message = serializers.CharField(default="Login successful")
|
||||||
|
|
||||||
|
|
||||||
class SignupInputSerializer(serializers.ModelSerializer):
|
class SignupInputSerializer(serializers.ModelSerializer):
|
||||||
"""Input serializer for user registration."""
|
"""Input serializer for user registration."""
|
||||||
|
|
||||||
|
|||||||
@@ -9,6 +9,8 @@ from django.urls import include, path
|
|||||||
from rest_framework_simplejwt.views import TokenRefreshView
|
from rest_framework_simplejwt.views import TokenRefreshView
|
||||||
|
|
||||||
from . import mfa as mfa_views
|
from . import mfa as mfa_views
|
||||||
|
from . import passkey as passkey_views
|
||||||
|
from . import account_management as account_views
|
||||||
from .views import (
|
from .views import (
|
||||||
AuthStatusAPIView,
|
AuthStatusAPIView,
|
||||||
# Social provider management views
|
# Social provider management views
|
||||||
@@ -22,8 +24,10 @@ from .views import (
|
|||||||
# Main auth views
|
# Main auth views
|
||||||
LoginAPIView,
|
LoginAPIView,
|
||||||
LogoutAPIView,
|
LogoutAPIView,
|
||||||
|
MFALoginVerifyAPIView,
|
||||||
PasswordChangeAPIView,
|
PasswordChangeAPIView,
|
||||||
PasswordResetAPIView,
|
PasswordResetAPIView,
|
||||||
|
ProcessOAuthProfileAPIView,
|
||||||
ResendVerificationAPIView,
|
ResendVerificationAPIView,
|
||||||
SignupAPIView,
|
SignupAPIView,
|
||||||
SocialAuthStatusAPIView,
|
SocialAuthStatusAPIView,
|
||||||
@@ -33,13 +37,13 @@ from .views import (
|
|||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
# Core authentication endpoints
|
# Core authentication endpoints
|
||||||
path("login/", LoginAPIView.as_view(), name="auth-login"),
|
path("login/", LoginAPIView.as_view(), name="auth-login"),
|
||||||
|
path("login/mfa-verify/", MFALoginVerifyAPIView.as_view(), name="auth-login-mfa-verify"),
|
||||||
path("signup/", SignupAPIView.as_view(), name="auth-signup"),
|
path("signup/", SignupAPIView.as_view(), name="auth-signup"),
|
||||||
path("logout/", LogoutAPIView.as_view(), name="auth-logout"),
|
path("logout/", LogoutAPIView.as_view(), name="auth-logout"),
|
||||||
path("user/", CurrentUserAPIView.as_view(), name="auth-current-user"),
|
path("user/", CurrentUserAPIView.as_view(), name="auth-current-user"),
|
||||||
# JWT token management
|
# JWT token management
|
||||||
path("token/refresh/", TokenRefreshView.as_view(), name="auth-token-refresh"),
|
path("token/refresh/", TokenRefreshView.as_view(), name="auth-token-refresh"),
|
||||||
# Social authentication endpoints (dj-rest-auth)
|
# Note: dj_rest_auth removed - using custom social auth views below
|
||||||
path("social/", include("dj_rest_auth.registration.urls")),
|
|
||||||
path(
|
path(
|
||||||
"password/reset/",
|
"password/reset/",
|
||||||
PasswordResetAPIView.as_view(),
|
PasswordResetAPIView.as_view(),
|
||||||
@@ -81,6 +85,11 @@ urlpatterns = [
|
|||||||
SocialAuthStatusAPIView.as_view(),
|
SocialAuthStatusAPIView.as_view(),
|
||||||
name="auth-social-status",
|
name="auth-social-status",
|
||||||
),
|
),
|
||||||
|
path(
|
||||||
|
"social/process-profile/",
|
||||||
|
ProcessOAuthProfileAPIView.as_view(),
|
||||||
|
name="auth-social-process-profile",
|
||||||
|
),
|
||||||
path("status/", AuthStatusAPIView.as_view(), name="auth-status"),
|
path("status/", AuthStatusAPIView.as_view(), name="auth-status"),
|
||||||
# Email verification endpoints
|
# Email verification endpoints
|
||||||
path(
|
path(
|
||||||
@@ -100,6 +109,25 @@ urlpatterns = [
|
|||||||
path("mfa/totp/deactivate/", mfa_views.deactivate_totp, name="auth-mfa-totp-deactivate"),
|
path("mfa/totp/deactivate/", mfa_views.deactivate_totp, name="auth-mfa-totp-deactivate"),
|
||||||
path("mfa/totp/verify/", mfa_views.verify_totp, name="auth-mfa-totp-verify"),
|
path("mfa/totp/verify/", mfa_views.verify_totp, name="auth-mfa-totp-verify"),
|
||||||
path("mfa/recovery-codes/regenerate/", mfa_views.regenerate_recovery_codes, name="auth-mfa-recovery-regenerate"),
|
path("mfa/recovery-codes/regenerate/", mfa_views.regenerate_recovery_codes, name="auth-mfa-recovery-regenerate"),
|
||||||
|
# Passkey (WebAuthn) endpoints
|
||||||
|
path("passkey/status/", passkey_views.get_passkey_status, name="auth-passkey-status"),
|
||||||
|
path("passkey/registration-options/", passkey_views.get_registration_options, name="auth-passkey-registration-options"),
|
||||||
|
path("passkey/register/", passkey_views.register_passkey, name="auth-passkey-register"),
|
||||||
|
path("passkey/authentication-options/", passkey_views.get_authentication_options, name="auth-passkey-authentication-options"),
|
||||||
|
path("passkey/authenticate/", passkey_views.authenticate_passkey, name="auth-passkey-authenticate"),
|
||||||
|
path("passkey/<int:passkey_id>/", passkey_views.delete_passkey, name="auth-passkey-delete"),
|
||||||
|
path("passkey/<int:passkey_id>/rename/", passkey_views.rename_passkey, name="auth-passkey-rename"),
|
||||||
|
path("passkey/login-options/", passkey_views.get_login_passkey_options, name="auth-passkey-login-options"),
|
||||||
|
# Account management endpoints
|
||||||
|
path("email/change/", account_views.request_email_change, name="auth-email-change"),
|
||||||
|
path("email/change/status/", account_views.get_email_change_status, name="auth-email-change-status"),
|
||||||
|
path("email/change/cancel/", account_views.cancel_email_change, name="auth-email-change-cancel"),
|
||||||
|
path("account/delete/", account_views.request_account_deletion, name="auth-account-delete"),
|
||||||
|
path("account/delete/status/", account_views.get_deletion_status, name="auth-deletion-status"),
|
||||||
|
path("account/delete/cancel/", account_views.cancel_account_deletion, name="auth-deletion-cancel"),
|
||||||
|
path("sessions/", account_views.list_sessions, name="auth-sessions-list"),
|
||||||
|
path("sessions/<str:session_id>/", account_views.revoke_session, name="auth-session-revoke"),
|
||||||
|
path("password/change/", account_views.change_password, name="auth-password-change-v2"),
|
||||||
]
|
]
|
||||||
|
|
||||||
# Note: User profiles and top lists functionality is now handled by the accounts app
|
# Note: User profiles and top lists functionality is now handled by the accounts app
|
||||||
|
|||||||
@@ -6,6 +6,8 @@ login, signup, logout, password management, social authentication,
|
|||||||
user profiles, and top lists.
|
user profiles, and top lists.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
from typing import cast # added 'cast'
|
from typing import cast # added 'cast'
|
||||||
|
|
||||||
from django.contrib.auth import authenticate, get_user_model, login, logout
|
from django.contrib.auth import authenticate, get_user_model, login, logout
|
||||||
@@ -71,6 +73,7 @@ except Exception:
|
|||||||
TurnstileMixin = FallbackTurnstileMixin
|
TurnstileMixin = FallbackTurnstileMixin
|
||||||
|
|
||||||
UserModel = get_user_model()
|
UserModel = get_user_model()
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
# Helper: safely obtain underlying HttpRequest (used by Django auth)
|
# Helper: safely obtain underlying HttpRequest (used by Django auth)
|
||||||
|
|
||||||
@@ -175,6 +178,37 @@ class LoginAPIView(APIView):
|
|||||||
|
|
||||||
if user:
|
if user:
|
||||||
if getattr(user, "is_active", False):
|
if getattr(user, "is_active", False):
|
||||||
|
# Check if user has MFA enabled
|
||||||
|
mfa_info = self._check_user_mfa(user)
|
||||||
|
|
||||||
|
if mfa_info["has_mfa"]:
|
||||||
|
# MFA required - generate temp token and return mfa_required response
|
||||||
|
from django.utils.crypto import get_random_string
|
||||||
|
from django.core.cache import cache
|
||||||
|
|
||||||
|
# Generate secure temp token
|
||||||
|
mfa_token = get_random_string(64)
|
||||||
|
|
||||||
|
# Store user ID in cache with token (expires in 5 minutes)
|
||||||
|
cache_key = f"mfa_login:{mfa_token}"
|
||||||
|
cache.set(cache_key, {
|
||||||
|
"user_id": user.pk,
|
||||||
|
"username": user.username,
|
||||||
|
}, timeout=300) # 5 minutes
|
||||||
|
|
||||||
|
from .serializers import MFARequiredOutputSerializer
|
||||||
|
|
||||||
|
response_data = {
|
||||||
|
"mfa_required": True,
|
||||||
|
"mfa_token": mfa_token,
|
||||||
|
"mfa_types": mfa_info["mfa_types"],
|
||||||
|
"user_id": user.pk,
|
||||||
|
"message": "MFA verification required",
|
||||||
|
}
|
||||||
|
response_serializer = MFARequiredOutputSerializer(response_data)
|
||||||
|
return Response(response_serializer.data)
|
||||||
|
|
||||||
|
# No MFA - proceed with normal login
|
||||||
# pass a real HttpRequest to Django login with backend specified
|
# pass a real HttpRequest to Django login with backend specified
|
||||||
login(_get_underlying_request(request), user, backend="django.contrib.auth.backends.ModelBackend")
|
login(_get_underlying_request(request), user, backend="django.contrib.auth.backends.ModelBackend")
|
||||||
|
|
||||||
@@ -210,6 +244,210 @@ class LoginAPIView(APIView):
|
|||||||
|
|
||||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
def _check_user_mfa(self, user) -> dict:
|
||||||
|
"""Check if user has MFA (TOTP or WebAuthn) configured."""
|
||||||
|
try:
|
||||||
|
from allauth.mfa.models import Authenticator
|
||||||
|
|
||||||
|
authenticators = Authenticator.objects.filter(user=user)
|
||||||
|
|
||||||
|
has_totp = authenticators.filter(type=Authenticator.Type.TOTP).exists()
|
||||||
|
has_webauthn = authenticators.filter(type=Authenticator.Type.WEBAUTHN).exists()
|
||||||
|
|
||||||
|
mfa_types = []
|
||||||
|
if has_totp:
|
||||||
|
mfa_types.append("totp")
|
||||||
|
if has_webauthn:
|
||||||
|
mfa_types.append("webauthn")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"has_mfa": has_totp or has_webauthn,
|
||||||
|
"has_totp": has_totp,
|
||||||
|
"has_webauthn": has_webauthn,
|
||||||
|
"mfa_types": mfa_types,
|
||||||
|
}
|
||||||
|
except ImportError:
|
||||||
|
return {"has_mfa": False, "has_totp": False, "has_webauthn": False, "mfa_types": []}
|
||||||
|
except Exception:
|
||||||
|
return {"has_mfa": False, "has_totp": False, "has_webauthn": False, "mfa_types": []}
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema_view(
|
||||||
|
post=extend_schema(
|
||||||
|
summary="Verify MFA for login",
|
||||||
|
description="Complete MFA verification after password authentication. Submit TOTP code to receive JWT tokens.",
|
||||||
|
request={"application/json": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"mfa_token": {"type": "string", "description": "Temporary token from login response"},
|
||||||
|
"code": {"type": "string", "description": "6-digit TOTP code"},
|
||||||
|
},
|
||||||
|
"required": ["mfa_token", "code"],
|
||||||
|
}},
|
||||||
|
responses={
|
||||||
|
200: LoginOutputSerializer,
|
||||||
|
400: "Bad Request - Invalid code or expired token",
|
||||||
|
},
|
||||||
|
tags=["Authentication"],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
class MFALoginVerifyAPIView(APIView):
|
||||||
|
"""API endpoint to verify MFA code and complete login."""
|
||||||
|
|
||||||
|
permission_classes = [AllowAny]
|
||||||
|
authentication_classes = []
|
||||||
|
|
||||||
|
def post(self, request: Request) -> Response:
|
||||||
|
from django.core.cache import cache
|
||||||
|
from .serializers import MFALoginVerifyInputSerializer
|
||||||
|
|
||||||
|
serializer = MFALoginVerifyInputSerializer(data=request.data)
|
||||||
|
if not serializer.is_valid():
|
||||||
|
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||||
|
|
||||||
|
validated = serializer.validated_data
|
||||||
|
mfa_token = validated.get("mfa_token")
|
||||||
|
totp_code = validated.get("code")
|
||||||
|
credential = validated.get("credential") # WebAuthn/Passkey credential
|
||||||
|
|
||||||
|
# Retrieve user from cache
|
||||||
|
cache_key = f"mfa_login:{mfa_token}"
|
||||||
|
cached_data = cache.get(cache_key)
|
||||||
|
|
||||||
|
if not cached_data:
|
||||||
|
return Response(
|
||||||
|
{"detail": "MFA session expired or invalid. Please login again."},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
user_id = cached_data.get("user_id")
|
||||||
|
|
||||||
|
try:
|
||||||
|
user = UserModel.objects.get(pk=user_id)
|
||||||
|
except UserModel.DoesNotExist:
|
||||||
|
return Response(
|
||||||
|
{"detail": "User not found"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify MFA - either TOTP or Passkey
|
||||||
|
if totp_code:
|
||||||
|
if not self._verify_totp(user, totp_code):
|
||||||
|
return Response(
|
||||||
|
{"detail": "Invalid verification code"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
elif credential:
|
||||||
|
# Verify passkey/WebAuthn credential
|
||||||
|
passkey_result = self._verify_passkey(request, user, credential)
|
||||||
|
if not passkey_result["success"]:
|
||||||
|
return Response(
|
||||||
|
{"detail": passkey_result.get("error", "Passkey verification failed")},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return Response(
|
||||||
|
{"detail": "Either TOTP code or passkey credential is required"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Clear the MFA token from cache
|
||||||
|
cache.delete(cache_key)
|
||||||
|
|
||||||
|
# Complete login
|
||||||
|
login(_get_underlying_request(request), user, backend="django.contrib.auth.backends.ModelBackend")
|
||||||
|
|
||||||
|
# Generate JWT tokens
|
||||||
|
from rest_framework_simplejwt.tokens import RefreshToken
|
||||||
|
|
||||||
|
refresh = RefreshToken.for_user(user)
|
||||||
|
access_token = refresh.access_token
|
||||||
|
|
||||||
|
response_serializer = LoginOutputSerializer(
|
||||||
|
{
|
||||||
|
"access": str(access_token),
|
||||||
|
"refresh": str(refresh),
|
||||||
|
"user": user,
|
||||||
|
"message": "Login successful",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return Response(response_serializer.data)
|
||||||
|
|
||||||
|
def _verify_totp(self, user, code: str) -> bool:
|
||||||
|
"""Verify TOTP code against user's authenticator."""
|
||||||
|
try:
|
||||||
|
from allauth.mfa.models import Authenticator
|
||||||
|
from allauth.mfa.totp.internal import auth as totp_auth
|
||||||
|
|
||||||
|
try:
|
||||||
|
authenticator = Authenticator.objects.get(
|
||||||
|
user=user,
|
||||||
|
type=Authenticator.Type.TOTP,
|
||||||
|
)
|
||||||
|
except Authenticator.DoesNotExist:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Get the secret from authenticator data and verify
|
||||||
|
secret = authenticator.data.get("secret")
|
||||||
|
if not secret:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return totp_auth.validate_totp_code(secret, code)
|
||||||
|
|
||||||
|
except ImportError:
|
||||||
|
logger.error("allauth.mfa not available for TOTP verification")
|
||||||
|
return False
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"TOTP verification error: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _verify_passkey(self, request, user, credential: dict) -> dict:
|
||||||
|
"""Verify WebAuthn/Passkey credential."""
|
||||||
|
try:
|
||||||
|
from allauth.mfa.models import Authenticator
|
||||||
|
from allauth.mfa.webauthn.internal import auth as webauthn_auth
|
||||||
|
|
||||||
|
# Check if user has any WebAuthn authenticators
|
||||||
|
has_passkey = Authenticator.objects.filter(
|
||||||
|
user=user,
|
||||||
|
type=Authenticator.Type.WEBAUTHN,
|
||||||
|
).exists()
|
||||||
|
|
||||||
|
if not has_passkey:
|
||||||
|
return {"success": False, "error": "No passkey registered for this user"}
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Parse the authentication response
|
||||||
|
credential_data = webauthn_auth.parse_authentication_response(credential)
|
||||||
|
|
||||||
|
# Get or create authentication state
|
||||||
|
# For login flow, we need to set up the state first
|
||||||
|
state = webauthn_auth.get_state(request)
|
||||||
|
|
||||||
|
if not state:
|
||||||
|
# If no state, generate one for this user
|
||||||
|
_, state = webauthn_auth.begin_authentication(request)
|
||||||
|
webauthn_auth.set_state(request, state)
|
||||||
|
|
||||||
|
# Complete authentication
|
||||||
|
webauthn_auth.complete_authentication(request, credential_data, state)
|
||||||
|
|
||||||
|
# Clear the state
|
||||||
|
webauthn_auth.clear_state(request)
|
||||||
|
|
||||||
|
return {"success": True}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"WebAuthn authentication failed: {e}")
|
||||||
|
return {"success": False, "error": str(e)}
|
||||||
|
|
||||||
|
except ImportError as e:
|
||||||
|
logger.error(f"WebAuthn module not available: {e}")
|
||||||
|
return {"success": False, "error": "Passkey authentication not available"}
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Passkey verification error: {e}")
|
||||||
|
return {"success": False, "error": "Passkey verification failed"}
|
||||||
|
|
||||||
|
|
||||||
@extend_schema_view(
|
@extend_schema_view(
|
||||||
post=extend_schema(
|
post=extend_schema(
|
||||||
@@ -831,7 +1069,529 @@ The ThrillWiki Team
|
|||||||
# Don't reveal whether email exists
|
# Don't reveal whether email exists
|
||||||
return Response({"detail": "If the email exists, a verification email has been sent", "success": True})
|
return Response({"detail": "If the email exists, a verification email has been sent", "success": True})
|
||||||
|
|
||||||
|
|
||||||
# Note: User Profile, Top List, and Top List Item ViewSets are now handled
|
# Note: User Profile, Top List, and Top List Item ViewSets are now handled
|
||||||
# by the dedicated accounts app at backend/apps/api/v1/accounts/views.py
|
# by the dedicated accounts app at backend/apps/api/v1/accounts/views.py
|
||||||
# to avoid duplication and maintain clean separation of concerns.
|
# to avoid duplication and maintain clean separation of concerns.
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema_view(
|
||||||
|
post=extend_schema(
|
||||||
|
summary="Process OAuth profile",
|
||||||
|
description="Process OAuth profile data during social authentication flow.",
|
||||||
|
request={
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"provider": {"type": "string", "description": "OAuth provider (e.g., google, discord)"},
|
||||||
|
"profile": {
|
||||||
|
"type": "object",
|
||||||
|
"description": "Profile data from OAuth provider",
|
||||||
|
"properties": {
|
||||||
|
"id": {"type": "string"},
|
||||||
|
"email": {"type": "string", "format": "email"},
|
||||||
|
"name": {"type": "string"},
|
||||||
|
"avatar_url": {"type": "string", "format": "uri"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"access_token": {"type": "string", "description": "OAuth access token"},
|
||||||
|
},
|
||||||
|
"required": ["provider", "profile"],
|
||||||
|
},
|
||||||
|
responses={
|
||||||
|
200: {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"success": {"type": "boolean"},
|
||||||
|
"action": {"type": "string", "enum": ["created", "updated", "linked"]},
|
||||||
|
"user": {"type": "object"},
|
||||||
|
"profile_synced": {"type": "boolean"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
400: "Bad Request",
|
||||||
|
401: "Unauthorized",
|
||||||
|
403: "Account suspended",
|
||||||
|
},
|
||||||
|
tags=["Social Authentication"],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
class ProcessOAuthProfileAPIView(APIView):
|
||||||
|
"""
|
||||||
|
API endpoint to process OAuth profile data.
|
||||||
|
|
||||||
|
This endpoint is called AFTER the OAuth flow is complete to:
|
||||||
|
1. Check if user is banned (SECURITY CRITICAL)
|
||||||
|
2. Extract avatar from OAuth provider
|
||||||
|
3. Download and upload avatar to Cloudflare Images
|
||||||
|
4. Sync display name from OAuth provider
|
||||||
|
5. Update username if it's a generic UUID-based username
|
||||||
|
|
||||||
|
Called with an empty body - uses the authenticated session.
|
||||||
|
|
||||||
|
Full parity with Supabase Edge Function: process-oauth-profile
|
||||||
|
|
||||||
|
BULLETPROOFED: Comprehensive validation, sanitization, and error handling.
|
||||||
|
"""
|
||||||
|
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
|
# Security constants
|
||||||
|
MAX_AVATAR_SIZE = 10 * 1024 * 1024 # 10MB
|
||||||
|
AVATAR_DOWNLOAD_TIMEOUT = 10.0 # seconds
|
||||||
|
AVATAR_UPLOAD_TIMEOUT = 30.0 # seconds
|
||||||
|
MAX_USERNAME_LENGTH = 150
|
||||||
|
MIN_USERNAME_LENGTH = 3
|
||||||
|
ALLOWED_USERNAME_CHARS = set("abcdefghijklmnopqrstuvwxyz0123456789_")
|
||||||
|
|
||||||
|
# Rate limiting for avatar uploads (prevent abuse)
|
||||||
|
AVATAR_UPLOAD_COOLDOWN = 60 # seconds between uploads
|
||||||
|
|
||||||
|
def post(self, request: Request) -> Response:
|
||||||
|
import re
|
||||||
|
import httpx
|
||||||
|
from django.db import transaction
|
||||||
|
from django.core.cache import cache
|
||||||
|
|
||||||
|
try:
|
||||||
|
user = request.user
|
||||||
|
|
||||||
|
# ================================================================
|
||||||
|
# STEP 0: Validate user object exists and is valid
|
||||||
|
# ================================================================
|
||||||
|
if not user or not hasattr(user, 'user_id'):
|
||||||
|
logger.error("ProcessOAuthProfile called with invalid user object")
|
||||||
|
return Response({
|
||||||
|
"success": False,
|
||||||
|
"error": "Invalid user session",
|
||||||
|
}, status=status.HTTP_401_UNAUTHORIZED)
|
||||||
|
|
||||||
|
user_id_str = str(user.user_id)
|
||||||
|
|
||||||
|
# ================================================================
|
||||||
|
# STEP 1: CRITICAL - Check ban status FIRST
|
||||||
|
# ================================================================
|
||||||
|
is_banned = getattr(user, 'is_banned', False)
|
||||||
|
|
||||||
|
# Also check via profile if applicable
|
||||||
|
if not is_banned:
|
||||||
|
try:
|
||||||
|
from apps.accounts.models import UserProfile
|
||||||
|
profile_check = UserProfile.objects.filter(user=user).first()
|
||||||
|
if profile_check and getattr(profile_check, 'is_banned', False):
|
||||||
|
is_banned = True
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if is_banned:
|
||||||
|
ban_reason = getattr(user, 'ban_reason', None) or "Policy violation"
|
||||||
|
# Sanitize ban reason for response
|
||||||
|
safe_ban_reason = str(ban_reason)[:200] if ban_reason else None
|
||||||
|
|
||||||
|
logger.warning(
|
||||||
|
f"Banned user attempted OAuth profile update",
|
||||||
|
extra={"user_id": user_id_str, "ban_reason": safe_ban_reason}
|
||||||
|
)
|
||||||
|
|
||||||
|
return Response({
|
||||||
|
"error": "Account suspended",
|
||||||
|
"message": (
|
||||||
|
f"Your account has been suspended. Reason: {safe_ban_reason}"
|
||||||
|
if safe_ban_reason
|
||||||
|
else "Your account has been suspended. Contact support for assistance."
|
||||||
|
),
|
||||||
|
"ban_reason": safe_ban_reason,
|
||||||
|
}, status=status.HTTP_403_FORBIDDEN)
|
||||||
|
|
||||||
|
# ================================================================
|
||||||
|
# STEP 2: Check rate limiting for avatar uploads
|
||||||
|
# ================================================================
|
||||||
|
rate_limit_key = f"oauth_profile:avatar:{user_id_str}"
|
||||||
|
if cache.get(rate_limit_key):
|
||||||
|
return Response({
|
||||||
|
"success": True,
|
||||||
|
"action": "rate_limited",
|
||||||
|
"message": "Please wait before updating your profile again",
|
||||||
|
"avatar_uploaded": False,
|
||||||
|
"profile_updated": False,
|
||||||
|
})
|
||||||
|
|
||||||
|
# ================================================================
|
||||||
|
# STEP 3: Get OAuth provider info from social accounts
|
||||||
|
# ================================================================
|
||||||
|
try:
|
||||||
|
from allauth.socialaccount.models import SocialAccount
|
||||||
|
except ImportError:
|
||||||
|
logger.error("django-allauth not installed")
|
||||||
|
return Response({
|
||||||
|
"success": False,
|
||||||
|
"error": "Social authentication not configured",
|
||||||
|
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||||
|
|
||||||
|
social_accounts = SocialAccount.objects.filter(user=user)
|
||||||
|
|
||||||
|
if not social_accounts.exists():
|
||||||
|
return Response({
|
||||||
|
"success": True,
|
||||||
|
"action": "skipped",
|
||||||
|
"message": "No OAuth accounts linked",
|
||||||
|
})
|
||||||
|
|
||||||
|
# Get the most recent social account
|
||||||
|
social_account = social_accounts.order_by("-date_joined").first()
|
||||||
|
if not social_account:
|
||||||
|
return Response({
|
||||||
|
"success": True,
|
||||||
|
"action": "skipped",
|
||||||
|
"message": "No valid OAuth account found",
|
||||||
|
})
|
||||||
|
|
||||||
|
provider = social_account.provider or "unknown"
|
||||||
|
extra_data = social_account.extra_data or {}
|
||||||
|
|
||||||
|
# Validate extra_data is a dict
|
||||||
|
if not isinstance(extra_data, dict):
|
||||||
|
logger.warning(f"Invalid extra_data type for user {user_id_str}: {type(extra_data)}")
|
||||||
|
extra_data = {}
|
||||||
|
|
||||||
|
# ================================================================
|
||||||
|
# STEP 4: Extract profile data based on provider (with sanitization)
|
||||||
|
# ================================================================
|
||||||
|
avatar_url = None
|
||||||
|
display_name = None
|
||||||
|
username_base = None
|
||||||
|
|
||||||
|
if provider == "google":
|
||||||
|
avatar_url = self._sanitize_url(extra_data.get("picture"))
|
||||||
|
display_name = self._sanitize_display_name(extra_data.get("name"))
|
||||||
|
email = extra_data.get("email", "")
|
||||||
|
if email and isinstance(email, str):
|
||||||
|
username_base = self._sanitize_username(email.split("@")[0])
|
||||||
|
|
||||||
|
elif provider == "discord":
|
||||||
|
discord_data = extra_data
|
||||||
|
discord_id = discord_data.get("id") or discord_data.get("sub")
|
||||||
|
|
||||||
|
display_name = self._sanitize_display_name(
|
||||||
|
discord_data.get("global_name")
|
||||||
|
or discord_data.get("full_name")
|
||||||
|
or discord_data.get("name")
|
||||||
|
)
|
||||||
|
|
||||||
|
# Discord avatar URL construction with validation
|
||||||
|
avatar_hash = discord_data.get("avatar")
|
||||||
|
if discord_id and avatar_hash and isinstance(discord_id, str) and isinstance(avatar_hash, str):
|
||||||
|
# Validate discord_id is numeric
|
||||||
|
if discord_id.isdigit():
|
||||||
|
# Validate avatar_hash is alphanumeric
|
||||||
|
if re.match(r'^[a-zA-Z0-9_]+$', avatar_hash):
|
||||||
|
avatar_url = f"https://cdn.discordapp.com/avatars/{discord_id}/{avatar_hash}.png?size=256"
|
||||||
|
|
||||||
|
if not avatar_url:
|
||||||
|
avatar_url = self._sanitize_url(
|
||||||
|
discord_data.get("avatar_url") or discord_data.get("picture")
|
||||||
|
)
|
||||||
|
|
||||||
|
raw_username = discord_data.get("username") or discord_data.get("name", "")
|
||||||
|
if raw_username and isinstance(raw_username, str):
|
||||||
|
username_base = self._sanitize_username(raw_username.split("#")[0])
|
||||||
|
if not username_base and discord_id:
|
||||||
|
username_base = f"discord_{str(discord_id)[:8]}"
|
||||||
|
|
||||||
|
else:
|
||||||
|
# Generic provider handling
|
||||||
|
avatar_url = self._sanitize_url(
|
||||||
|
extra_data.get("picture")
|
||||||
|
or extra_data.get("avatar_url")
|
||||||
|
or extra_data.get("avatar")
|
||||||
|
)
|
||||||
|
display_name = self._sanitize_display_name(
|
||||||
|
extra_data.get("name") or extra_data.get("display_name")
|
||||||
|
)
|
||||||
|
|
||||||
|
# ================================================================
|
||||||
|
# STEP 5: Get or create user profile (with transaction)
|
||||||
|
# ================================================================
|
||||||
|
from apps.accounts.models import UserProfile
|
||||||
|
|
||||||
|
with transaction.atomic():
|
||||||
|
profile, profile_created = UserProfile.objects.select_for_update().get_or_create(
|
||||||
|
user=user
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check if profile already has an avatar
|
||||||
|
if profile.avatar_id:
|
||||||
|
return Response({
|
||||||
|
"success": True,
|
||||||
|
"action": "skipped",
|
||||||
|
"message": "Avatar already exists",
|
||||||
|
"avatar_uploaded": False,
|
||||||
|
"profile_updated": False,
|
||||||
|
})
|
||||||
|
|
||||||
|
# ================================================================
|
||||||
|
# STEP 6: Download and upload avatar to Cloudflare (outside transaction)
|
||||||
|
# ================================================================
|
||||||
|
avatar_uploaded = False
|
||||||
|
|
||||||
|
if avatar_url:
|
||||||
|
try:
|
||||||
|
# Validate URL scheme
|
||||||
|
if not avatar_url.startswith(('https://', 'http://')):
|
||||||
|
logger.warning(f"Invalid avatar URL scheme: {avatar_url[:50]}")
|
||||||
|
else:
|
||||||
|
# Download avatar from provider
|
||||||
|
download_response = httpx.get(
|
||||||
|
avatar_url,
|
||||||
|
timeout=self.AVATAR_DOWNLOAD_TIMEOUT,
|
||||||
|
follow_redirects=True,
|
||||||
|
headers={
|
||||||
|
"User-Agent": "ThrillWiki/1.0",
|
||||||
|
"Accept": "image/*",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
if download_response.status_code == 200:
|
||||||
|
image_data = download_response.content
|
||||||
|
content_type = download_response.headers.get("content-type", "")
|
||||||
|
|
||||||
|
# Validate content type
|
||||||
|
if not content_type.startswith("image/"):
|
||||||
|
logger.warning(f"Invalid content type for avatar: {content_type}")
|
||||||
|
# Validate file size
|
||||||
|
elif len(image_data) > self.MAX_AVATAR_SIZE:
|
||||||
|
logger.warning(
|
||||||
|
f"Avatar too large for user {user_id_str}: {len(image_data)} bytes"
|
||||||
|
)
|
||||||
|
# Validate minimum size (avoid empty images)
|
||||||
|
elif len(image_data) < 100:
|
||||||
|
logger.warning(f"Avatar too small for user {user_id_str}")
|
||||||
|
else:
|
||||||
|
avatar_uploaded = self._upload_to_cloudflare(
|
||||||
|
image_data, user_id_str, provider, profile
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.warning(
|
||||||
|
f"Avatar download failed: {download_response.status_code}",
|
||||||
|
extra={"user_id": user_id_str, "provider": provider}
|
||||||
|
)
|
||||||
|
|
||||||
|
except httpx.TimeoutException:
|
||||||
|
logger.warning(f"Avatar download timeout for user {user_id_str}")
|
||||||
|
except httpx.HTTPError as download_error:
|
||||||
|
logger.warning(f"Failed to download avatar: {download_error}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Unexpected avatar error: {e}")
|
||||||
|
|
||||||
|
# Set rate limit after successful processing
|
||||||
|
if avatar_uploaded:
|
||||||
|
cache.set(rate_limit_key, True, self.AVATAR_UPLOAD_COOLDOWN)
|
||||||
|
|
||||||
|
# ================================================================
|
||||||
|
# STEP 7: Update display name if not set (with validation)
|
||||||
|
# ================================================================
|
||||||
|
profile_updated = False
|
||||||
|
|
||||||
|
if display_name and not getattr(user, "display_name", None):
|
||||||
|
try:
|
||||||
|
user.display_name = display_name
|
||||||
|
user.save(update_fields=["display_name"])
|
||||||
|
profile_updated = True
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Failed to update display name: {e}")
|
||||||
|
|
||||||
|
# ================================================================
|
||||||
|
# STEP 8: Update username if it's a generic UUID-based username
|
||||||
|
# ================================================================
|
||||||
|
current_username = getattr(user, "username", "") or ""
|
||||||
|
if username_base and current_username.startswith("user_"):
|
||||||
|
try:
|
||||||
|
new_username = self._ensure_unique_username(username_base, user.user_id)
|
||||||
|
if new_username and new_username != current_username:
|
||||||
|
user.username = new_username
|
||||||
|
user.save(update_fields=["username"])
|
||||||
|
profile_updated = True
|
||||||
|
logger.info(
|
||||||
|
f"Username updated from {current_username} to {new_username}",
|
||||||
|
extra={"user_id": user_id_str}
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Failed to update username: {e}")
|
||||||
|
|
||||||
|
return Response({
|
||||||
|
"success": True,
|
||||||
|
"action": "processed",
|
||||||
|
"provider": provider,
|
||||||
|
"avatar_uploaded": avatar_uploaded,
|
||||||
|
"profile_updated": profile_updated,
|
||||||
|
"message": "OAuth profile processed successfully",
|
||||||
|
})
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
capture_and_log(e, "Process OAuth profile", source="api", request=request)
|
||||||
|
return Response({
|
||||||
|
"success": False,
|
||||||
|
"error": "Failed to process OAuth profile",
|
||||||
|
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||||
|
|
||||||
|
def _sanitize_url(self, url) -> str | None:
|
||||||
|
"""Sanitize and validate URL."""
|
||||||
|
if not url or not isinstance(url, str):
|
||||||
|
return None
|
||||||
|
|
||||||
|
url = url.strip()[:2000] # Limit length
|
||||||
|
|
||||||
|
# Basic URL validation
|
||||||
|
if not url.startswith(('https://', 'http://')):
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Block obviously malicious patterns
|
||||||
|
dangerous_patterns = ['javascript:', 'data:', 'file:', '<script', 'onclick']
|
||||||
|
for pattern in dangerous_patterns:
|
||||||
|
if pattern.lower() in url.lower():
|
||||||
|
return None
|
||||||
|
|
||||||
|
return url
|
||||||
|
|
||||||
|
def _sanitize_display_name(self, name) -> str | None:
|
||||||
|
"""Sanitize display name."""
|
||||||
|
if not name or not isinstance(name, str):
|
||||||
|
return None
|
||||||
|
|
||||||
|
import re
|
||||||
|
|
||||||
|
# Strip and limit length
|
||||||
|
name = name.strip()[:100]
|
||||||
|
|
||||||
|
# Remove control characters
|
||||||
|
name = re.sub(r'[\x00-\x1f\x7f-\x9f]', '', name)
|
||||||
|
|
||||||
|
# Remove excessive whitespace
|
||||||
|
name = ' '.join(name.split())
|
||||||
|
|
||||||
|
# Must have at least 1 character
|
||||||
|
if len(name) < 1:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return name
|
||||||
|
|
||||||
|
def _sanitize_username(self, username) -> str | None:
|
||||||
|
"""Sanitize username for use."""
|
||||||
|
if not username or not isinstance(username, str):
|
||||||
|
return None
|
||||||
|
|
||||||
|
import re
|
||||||
|
|
||||||
|
# Lowercase and remove non-allowed characters
|
||||||
|
username = username.lower().strip()
|
||||||
|
username = re.sub(r'[^a-z0-9_]', '', username)
|
||||||
|
|
||||||
|
# Enforce length limits
|
||||||
|
if len(username) < self.MIN_USERNAME_LENGTH:
|
||||||
|
return None
|
||||||
|
|
||||||
|
username = username[:self.MAX_USERNAME_LENGTH]
|
||||||
|
|
||||||
|
return username
|
||||||
|
|
||||||
|
def _upload_to_cloudflare(self, image_data: bytes, user_id: str, provider: str, profile) -> bool:
|
||||||
|
"""Upload image to Cloudflare Images with error handling."""
|
||||||
|
import httpx
|
||||||
|
from django.db import transaction
|
||||||
|
|
||||||
|
try:
|
||||||
|
from django_cloudflareimages_toolkit.models import CloudflareImage
|
||||||
|
from django_cloudflareimages_toolkit.services import CloudflareImagesService
|
||||||
|
|
||||||
|
cf_service = CloudflareImagesService()
|
||||||
|
|
||||||
|
# Request direct upload URL
|
||||||
|
upload_result = cf_service.get_direct_upload_url(
|
||||||
|
metadata={
|
||||||
|
"type": "avatar",
|
||||||
|
"user_id": user_id,
|
||||||
|
"provider": provider,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if not upload_result or "upload_url" not in upload_result:
|
||||||
|
logger.warning("Failed to get Cloudflare upload URL")
|
||||||
|
return False
|
||||||
|
|
||||||
|
upload_url = upload_result["upload_url"]
|
||||||
|
cloudflare_id = upload_result.get("id") or upload_result.get("cloudflare_id")
|
||||||
|
|
||||||
|
if not cloudflare_id:
|
||||||
|
logger.warning("No Cloudflare ID in upload result")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Upload image to Cloudflare
|
||||||
|
files = {"file": ("avatar.png", image_data, "image/png")}
|
||||||
|
upload_response = httpx.post(
|
||||||
|
upload_url,
|
||||||
|
files=files,
|
||||||
|
timeout=self.AVATAR_UPLOAD_TIMEOUT,
|
||||||
|
)
|
||||||
|
|
||||||
|
if upload_response.status_code not in [200, 201]:
|
||||||
|
logger.warning(f"Cloudflare upload failed: {upload_response.status_code}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Create CloudflareImage record and link to profile
|
||||||
|
with transaction.atomic():
|
||||||
|
cf_image = CloudflareImage.objects.create(
|
||||||
|
cloudflare_id=cloudflare_id,
|
||||||
|
is_uploaded=True,
|
||||||
|
metadata={
|
||||||
|
"type": "avatar",
|
||||||
|
"user_id": user_id,
|
||||||
|
"provider": provider,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
profile.avatar = cf_image
|
||||||
|
profile.save(update_fields=["avatar"])
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Avatar uploaded successfully",
|
||||||
|
extra={"user_id": user_id, "provider": provider, "cloudflare_id": cloudflare_id}
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
|
except ImportError:
|
||||||
|
logger.warning("django-cloudflareimages-toolkit not available")
|
||||||
|
return False
|
||||||
|
except Exception as cf_error:
|
||||||
|
logger.warning(f"Cloudflare upload error: {cf_error}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _ensure_unique_username(self, base_username: str, user_id: str, max_attempts: int = 10) -> str | None:
|
||||||
|
"""
|
||||||
|
Ensure username is unique by appending numbers if needed.
|
||||||
|
|
||||||
|
Returns None if no valid username can be generated.
|
||||||
|
"""
|
||||||
|
if not base_username:
|
||||||
|
return None
|
||||||
|
|
||||||
|
username = base_username.lower()[:self.MAX_USERNAME_LENGTH]
|
||||||
|
|
||||||
|
# Validate characters
|
||||||
|
if not all(c in self.ALLOWED_USERNAME_CHARS for c in username):
|
||||||
|
return None
|
||||||
|
|
||||||
|
attempt = 0
|
||||||
|
|
||||||
|
while attempt < max_attempts:
|
||||||
|
try:
|
||||||
|
existing = UserModel.objects.filter(username=username).exclude(user_id=user_id).exists()
|
||||||
|
if not existing:
|
||||||
|
return username
|
||||||
|
except Exception:
|
||||||
|
break
|
||||||
|
|
||||||
|
attempt += 1
|
||||||
|
# Ensure we don't exceed max length with suffix
|
||||||
|
suffix = f"_{attempt}"
|
||||||
|
max_base = self.MAX_USERNAME_LENGTH - len(suffix)
|
||||||
|
username = f"{base_username.lower()[:max_base]}{suffix}"
|
||||||
|
|
||||||
|
# Fallback to UUID-based username
|
||||||
|
return f"user_{str(user_id)[:8]}"
|
||||||
|
|||||||
@@ -24,4 +24,10 @@ urlpatterns = [
|
|||||||
views.QuickEntitySuggestionView.as_view(),
|
views.QuickEntitySuggestionView.as_view(),
|
||||||
name="entity_suggestions",
|
name="entity_suggestions",
|
||||||
),
|
),
|
||||||
|
# Telemetry endpoint for frontend logging
|
||||||
|
path(
|
||||||
|
"telemetry/",
|
||||||
|
views.TelemetryView.as_view(),
|
||||||
|
name="telemetry",
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -22,6 +22,108 @@ from apps.core.services.entity_fuzzy_matching import (
|
|||||||
entity_fuzzy_matcher,
|
entity_fuzzy_matcher,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class TelemetryView(APIView):
|
||||||
|
"""
|
||||||
|
Handle frontend telemetry and request metadata logging.
|
||||||
|
|
||||||
|
This endpoint accepts telemetry data from the frontend for logging and
|
||||||
|
analytics purposes. When error data is present, it persists the error
|
||||||
|
to the database for monitoring.
|
||||||
|
|
||||||
|
Note: This endpoint bypasses authentication entirely to ensure errors
|
||||||
|
can be logged even when user tokens are expired or invalid.
|
||||||
|
"""
|
||||||
|
|
||||||
|
authentication_classes = [] # Bypass JWT auth to allow error logging with expired tokens
|
||||||
|
permission_classes = [AllowAny]
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
tags=["Core"],
|
||||||
|
summary="Log request metadata",
|
||||||
|
description="Log frontend telemetry and request metadata",
|
||||||
|
)
|
||||||
|
def post(self, request):
|
||||||
|
"""Accept telemetry data from frontend."""
|
||||||
|
data = request.data
|
||||||
|
|
||||||
|
# If this is an error report, persist it to the database
|
||||||
|
if data.get('p_error_type') or data.get('p_error_message') or data.get('error_type') or data.get('error_message'):
|
||||||
|
from apps.core.services import ErrorService
|
||||||
|
|
||||||
|
# Handle both p_ prefixed params (from log_request_metadata RPC) and direct params
|
||||||
|
error_message = data.get('p_error_message') or data.get('error_message') or 'Unknown error'
|
||||||
|
error_type = data.get('p_error_type') or data.get('error_type') or 'Error'
|
||||||
|
severity = data.get('p_severity') or data.get('severity') or 'medium'
|
||||||
|
error_stack = data.get('p_error_stack') or data.get('error_stack') or ''
|
||||||
|
error_code = data.get('p_error_code') or data.get('error_code') or ''
|
||||||
|
|
||||||
|
# Build metadata from available fields
|
||||||
|
metadata = {
|
||||||
|
'action': data.get('p_action') or data.get('action'),
|
||||||
|
'breadcrumbs': data.get('p_breadcrumbs'),
|
||||||
|
'duration_ms': data.get('p_duration_ms'),
|
||||||
|
'retry_attempts': data.get('p_retry_attempts'),
|
||||||
|
'affected_route': data.get('p_affected_route'),
|
||||||
|
'request_id': data.get('p_request_id') or data.get('request_id'),
|
||||||
|
}
|
||||||
|
# Remove None values
|
||||||
|
metadata = {k: v for k, v in metadata.items() if v is not None}
|
||||||
|
|
||||||
|
# Build environment from available fields
|
||||||
|
environment = data.get('p_environment_context') or data.get('environment') or {}
|
||||||
|
if isinstance(environment, str):
|
||||||
|
import json
|
||||||
|
try:
|
||||||
|
environment = json.loads(environment)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
environment = {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
error = ErrorService.capture_error(
|
||||||
|
error=error_message,
|
||||||
|
source='frontend',
|
||||||
|
request=request,
|
||||||
|
severity=severity,
|
||||||
|
metadata=metadata,
|
||||||
|
environment=environment,
|
||||||
|
)
|
||||||
|
# Update additional fields
|
||||||
|
error.error_type = error_type
|
||||||
|
error.error_stack = error_stack[:10000] if error_stack else ''
|
||||||
|
error.error_code = error_code
|
||||||
|
error.endpoint = data.get('p_affected_route') or ''
|
||||||
|
error.http_status = data.get('p_http_status')
|
||||||
|
error.save(update_fields=['error_type', 'error_stack', 'error_code', 'endpoint', 'http_status'])
|
||||||
|
|
||||||
|
logger.info(f"Frontend error captured: {error.short_error_id}")
|
||||||
|
return Response(
|
||||||
|
{"success": True, "error_id": str(error.error_id)},
|
||||||
|
status=status.HTTP_201_CREATED,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to capture frontend error: {e}")
|
||||||
|
# Fall through to regular telemetry logging
|
||||||
|
|
||||||
|
# Non-error telemetry - just log and acknowledge
|
||||||
|
logger.debug(
|
||||||
|
"Telemetry received",
|
||||||
|
extra={
|
||||||
|
"data": data,
|
||||||
|
"user_id": getattr(request.user, "id", None),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
return Response(
|
||||||
|
{"success": True, "message": "Telemetry logged"},
|
||||||
|
status=status.HTTP_200_OK,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class EntityFuzzySearchView(APIView):
|
class EntityFuzzySearchView(APIView):
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -1,7 +1,11 @@
|
|||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
from .views import GenerateUploadURLView
|
from . import views
|
||||||
|
|
||||||
|
app_name = "images"
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path("generate-upload-url/", GenerateUploadURLView.as_view(), name="generate-upload-url"),
|
path("generate-upload-url/", views.GenerateUploadURLView.as_view(), name="generate_upload_url"),
|
||||||
|
path("delete/", views.DeleteImageView.as_view(), name="delete_image"),
|
||||||
|
path("og-image/", views.GenerateOGImageView.as_view(), name="og_image"),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
from django.conf import settings
|
||||||
from django.core.exceptions import ImproperlyConfigured
|
from django.core.exceptions import ImproperlyConfigured
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
from rest_framework.permissions import IsAuthenticated
|
from rest_framework.permissions import IsAuthenticated
|
||||||
@@ -30,3 +31,109 @@ class GenerateUploadURLView(APIView):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
capture_and_log(e, 'Generate upload URL - unexpected error', source='api')
|
capture_and_log(e, 'Generate upload URL - unexpected error', source='api')
|
||||||
return Response({"detail": "An unexpected error occurred."}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
return Response({"detail": "An unexpected error occurred."}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||||
|
|
||||||
|
|
||||||
|
class DeleteImageView(APIView):
|
||||||
|
"""
|
||||||
|
POST /images/delete/
|
||||||
|
Delete an image from Cloudflare Images.
|
||||||
|
"""
|
||||||
|
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
image_id = request.data.get("image_id")
|
||||||
|
|
||||||
|
if not image_id:
|
||||||
|
return Response(
|
||||||
|
{"detail": "image_id is required"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Get Cloudflare credentials
|
||||||
|
account_id = getattr(settings, "CLOUDFLARE_IMAGES_ACCOUNT_ID", None)
|
||||||
|
api_token = getattr(settings, "CLOUDFLARE_IMAGES_API_TOKEN", None)
|
||||||
|
|
||||||
|
if not account_id or not api_token:
|
||||||
|
logger.warning("Cloudflare Images not configured, mock deleting image")
|
||||||
|
return Response({"success": True, "mock": True})
|
||||||
|
|
||||||
|
# Delete from Cloudflare
|
||||||
|
url = f"https://api.cloudflare.com/client/v4/accounts/{account_id}/images/v1/{image_id}"
|
||||||
|
response = requests.delete(
|
||||||
|
url,
|
||||||
|
headers={"Authorization": f"Bearer {api_token}"},
|
||||||
|
timeout=10,
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.status_code in (200, 404): # 404 = already deleted
|
||||||
|
return Response({"success": True})
|
||||||
|
else:
|
||||||
|
logger.error(f"Cloudflare delete failed: {response.text}")
|
||||||
|
return Response(
|
||||||
|
{"detail": "Failed to delete image"},
|
||||||
|
status=status.HTTP_502_BAD_GATEWAY,
|
||||||
|
)
|
||||||
|
|
||||||
|
except requests.RequestException as e:
|
||||||
|
capture_and_log(e, "Delete image - Cloudflare API error", source="api")
|
||||||
|
return Response(
|
||||||
|
{"detail": "Failed to delete image"},
|
||||||
|
status=status.HTTP_502_BAD_GATEWAY,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
capture_and_log(e, "Delete image - unexpected error", source="api")
|
||||||
|
return Response(
|
||||||
|
{"detail": "An unexpected error occurred"},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class GenerateOGImageView(APIView):
|
||||||
|
"""
|
||||||
|
POST /images/og-image/
|
||||||
|
Generate an Open Graph image for social sharing.
|
||||||
|
"""
|
||||||
|
|
||||||
|
permission_classes = [] # Public endpoint
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
title = request.data.get("title", "")
|
||||||
|
description = request.data.get("description", "")
|
||||||
|
entity_type = request.data.get("entity_type", "")
|
||||||
|
image_url = request.data.get("image_url", "")
|
||||||
|
|
||||||
|
if not title:
|
||||||
|
return Response(
|
||||||
|
{"detail": "title is required"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# This is a placeholder for OG image generation
|
||||||
|
# In production, you would:
|
||||||
|
# 1. Use an image generation service (Cloudinary, imgix, etc.)
|
||||||
|
# 2. Or use a headless browser service (Puppeteer, Playwright)
|
||||||
|
# 3. Or use a dedicated OG image service
|
||||||
|
|
||||||
|
# For now, return a template URL or placeholder
|
||||||
|
base_url = getattr(settings, "SITE_URL", "https://thrillwiki.com")
|
||||||
|
og_image_url = f"{base_url}/api/v1/images/og-preview/?title={title[:100]}"
|
||||||
|
|
||||||
|
return Response({
|
||||||
|
"success": True,
|
||||||
|
"og_image_url": og_image_url,
|
||||||
|
"title": title,
|
||||||
|
"description": description[:200] if description else "",
|
||||||
|
"entity_type": entity_type,
|
||||||
|
"note": "Placeholder - configure OG image service for production",
|
||||||
|
})
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
capture_and_log(e, "Generate OG image", source="api")
|
||||||
|
return Response(
|
||||||
|
{"detail": str(e)},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -30,4 +30,8 @@ urlpatterns = [
|
|||||||
views.MapCacheAPIView.as_view(),
|
views.MapCacheAPIView.as_view(),
|
||||||
name="map_cache_invalidate",
|
name="map_cache_invalidate",
|
||||||
),
|
),
|
||||||
|
# Location detection and enrichment
|
||||||
|
path("detect-location/", views.DetectLocationView.as_view(), name="detect_location"),
|
||||||
|
path("enrich-location/", views.EnrichLocationView.as_view(), name="enrich_location"),
|
||||||
|
path("search-location/", views.SearchLocationView.as_view(), name="search_location"),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -999,3 +999,630 @@ MapSearchView = MapSearchAPIView
|
|||||||
MapBoundsView = MapBoundsAPIView
|
MapBoundsView = MapBoundsAPIView
|
||||||
MapStatsView = MapStatsAPIView
|
MapStatsView = MapStatsAPIView
|
||||||
MapCacheView = MapCacheAPIView
|
MapCacheView = MapCacheAPIView
|
||||||
|
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Location Detection / Enrichment Endpoints
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema_view(
|
||||||
|
post=extend_schema(
|
||||||
|
summary="Detect user location from IP",
|
||||||
|
description="Detect the user's approximate location based on their IP address.",
|
||||||
|
request={
|
||||||
|
"application/json": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"ip_address": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "IP address to geolocate. If not provided, uses request IP.",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
responses={
|
||||||
|
200: {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"latitude": {"type": "number"},
|
||||||
|
"longitude": {"type": "number"},
|
||||||
|
"city": {"type": "string"},
|
||||||
|
"region": {"type": "string"},
|
||||||
|
"country": {"type": "string"},
|
||||||
|
"timezone": {"type": "string"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
tags=["Maps"],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
class DetectLocationView(APIView):
|
||||||
|
"""
|
||||||
|
POST /maps/detect-location/
|
||||||
|
Detect user's location based on IP address using a geolocation service.
|
||||||
|
"""
|
||||||
|
|
||||||
|
permission_classes = [AllowAny]
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
try:
|
||||||
|
# Get IP address from request or payload
|
||||||
|
ip_address = request.data.get("ip_address")
|
||||||
|
if not ip_address:
|
||||||
|
# Get client IP from request
|
||||||
|
x_forwarded_for = request.META.get("HTTP_X_FORWARDED_FOR")
|
||||||
|
if x_forwarded_for:
|
||||||
|
ip_address = x_forwarded_for.split(",")[0].strip()
|
||||||
|
else:
|
||||||
|
ip_address = request.META.get("REMOTE_ADDR", "")
|
||||||
|
|
||||||
|
# For localhost/development, return a default location
|
||||||
|
if ip_address in ("127.0.0.1", "::1", "localhost") or ip_address.startswith("192.168."):
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"latitude": 40.7128,
|
||||||
|
"longitude": -74.006,
|
||||||
|
"city": "New York",
|
||||||
|
"region": "New York",
|
||||||
|
"country": "US",
|
||||||
|
"country_name": "United States",
|
||||||
|
"timezone": "America/New_York",
|
||||||
|
"detected": False,
|
||||||
|
"reason": "localhost_fallback",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Use IP geolocation service (ipapi.co, ipinfo.io, etc.)
|
||||||
|
import httpx
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = httpx.get(
|
||||||
|
f"https://ipapi.co/{ip_address}/json/",
|
||||||
|
timeout=5.0,
|
||||||
|
headers={"User-Agent": "ThrillWiki/1.0"},
|
||||||
|
)
|
||||||
|
if response.status_code == 200:
|
||||||
|
data = response.json()
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"latitude": data.get("latitude"),
|
||||||
|
"longitude": data.get("longitude"),
|
||||||
|
"city": data.get("city", ""),
|
||||||
|
"region": data.get("region", ""),
|
||||||
|
"country": data.get("country_code", ""),
|
||||||
|
"country_name": data.get("country_name", ""),
|
||||||
|
"timezone": data.get("timezone", ""),
|
||||||
|
"detected": True,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
except httpx.HTTPError as e:
|
||||||
|
logger.warning(f"IP geolocation failed: {e}")
|
||||||
|
|
||||||
|
# Fallback response
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"latitude": None,
|
||||||
|
"longitude": None,
|
||||||
|
"city": "",
|
||||||
|
"region": "",
|
||||||
|
"country": "",
|
||||||
|
"country_name": "",
|
||||||
|
"timezone": "",
|
||||||
|
"detected": False,
|
||||||
|
"reason": "geolocation_failed",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
capture_and_log(e, "Detect location from IP", source="api")
|
||||||
|
return Response(
|
||||||
|
{"detail": str(e)},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema_view(
|
||||||
|
post=extend_schema(
|
||||||
|
summary="Enrich location with geocoding",
|
||||||
|
description="Enrich location data with reverse geocoding (coordinates to address).",
|
||||||
|
request={
|
||||||
|
"application/json": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"latitude": {"type": "number", "required": True},
|
||||||
|
"longitude": {"type": "number", "required": True},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
responses={
|
||||||
|
200: {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"formatted_address": {"type": "string"},
|
||||||
|
"street_address": {"type": "string"},
|
||||||
|
"city": {"type": "string"},
|
||||||
|
"state": {"type": "string"},
|
||||||
|
"postal_code": {"type": "string"},
|
||||||
|
"country": {"type": "string"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
tags=["Maps"],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
class EnrichLocationView(APIView):
|
||||||
|
"""
|
||||||
|
POST /maps/enrich-location/
|
||||||
|
Enrich location with reverse geocoding (coordinates to address).
|
||||||
|
"""
|
||||||
|
|
||||||
|
permission_classes = [AllowAny]
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
try:
|
||||||
|
latitude = request.data.get("latitude")
|
||||||
|
longitude = request.data.get("longitude")
|
||||||
|
|
||||||
|
if latitude is None or longitude is None:
|
||||||
|
return Response(
|
||||||
|
{"detail": "latitude and longitude are required"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
lat = float(latitude)
|
||||||
|
lng = float(longitude)
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
return Response(
|
||||||
|
{"detail": "Invalid latitude or longitude"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Use reverse geocoding service
|
||||||
|
import httpx
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Using Nominatim (OpenStreetMap) - free, no API key required
|
||||||
|
response = httpx.get(
|
||||||
|
"https://nominatim.openstreetmap.org/reverse",
|
||||||
|
params={
|
||||||
|
"lat": lat,
|
||||||
|
"lon": lng,
|
||||||
|
"format": "json",
|
||||||
|
"addressdetails": 1,
|
||||||
|
},
|
||||||
|
timeout=5.0,
|
||||||
|
headers={"User-Agent": "ThrillWiki/1.0"},
|
||||||
|
)
|
||||||
|
if response.status_code == 200:
|
||||||
|
data = response.json()
|
||||||
|
address = data.get("address", {})
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"formatted_address": data.get("display_name", ""),
|
||||||
|
"street_address": address.get("road", ""),
|
||||||
|
"house_number": address.get("house_number", ""),
|
||||||
|
"city": (
|
||||||
|
address.get("city")
|
||||||
|
or address.get("town")
|
||||||
|
or address.get("village")
|
||||||
|
or ""
|
||||||
|
),
|
||||||
|
"state": address.get("state", ""),
|
||||||
|
"postal_code": address.get("postcode", ""),
|
||||||
|
"country": address.get("country", ""),
|
||||||
|
"country_code": address.get("country_code", "").upper(),
|
||||||
|
"enriched": True,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
except httpx.HTTPError as e:
|
||||||
|
logger.warning(f"Reverse geocoding failed: {e}")
|
||||||
|
|
||||||
|
# Fallback response
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"formatted_address": "",
|
||||||
|
"street_address": "",
|
||||||
|
"city": "",
|
||||||
|
"state": "",
|
||||||
|
"postal_code": "",
|
||||||
|
"country": "",
|
||||||
|
"country_code": "",
|
||||||
|
"enriched": False,
|
||||||
|
"reason": "geocoding_failed",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
capture_and_log(e, "Enrich location", source="api")
|
||||||
|
return Response(
|
||||||
|
{"detail": str(e)},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema_view(
|
||||||
|
post=extend_schema(
|
||||||
|
summary="Search for a location by text",
|
||||||
|
description="Forward geocoding - convert a text query (address, city name, etc.) to coordinates.",
|
||||||
|
request={
|
||||||
|
"application/json": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"query": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Location search query (address, city, place name, etc.)",
|
||||||
|
},
|
||||||
|
"limit": {
|
||||||
|
"type": "integer",
|
||||||
|
"description": "Maximum number of results to return (default: 5)",
|
||||||
|
},
|
||||||
|
"country": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "ISO 3166-1 alpha-2 country code to restrict search",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"required": ["query"],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
responses={
|
||||||
|
200: {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"results": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"latitude": {"type": "number"},
|
||||||
|
"longitude": {"type": "number"},
|
||||||
|
"formatted_address": {"type": "string"},
|
||||||
|
"city": {"type": "string"},
|
||||||
|
"state": {"type": "string"},
|
||||||
|
"country": {"type": "string"},
|
||||||
|
"importance": {"type": "number"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"query": {"type": "string"},
|
||||||
|
"count": {"type": "integer"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
400: {"description": "Missing or invalid query parameter"},
|
||||||
|
},
|
||||||
|
tags=["Maps"],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
class SearchLocationView(APIView):
|
||||||
|
"""
|
||||||
|
POST /maps/search-location/
|
||||||
|
Forward geocoding - search for locations by text query.
|
||||||
|
|
||||||
|
Full parity with Supabase Edge Function: search-location
|
||||||
|
|
||||||
|
Features:
|
||||||
|
- Query caching with SHA-256 hash (7-day expiration)
|
||||||
|
- Rate limiting (30 requests per minute per IP)
|
||||||
|
- Usage logging for monitoring
|
||||||
|
- Cache headers (X-Cache: HIT/MISS)
|
||||||
|
"""
|
||||||
|
|
||||||
|
permission_classes = [AllowAny]
|
||||||
|
|
||||||
|
# Rate limit settings matching original
|
||||||
|
RATE_LIMIT_REQUESTS = 30
|
||||||
|
RATE_LIMIT_PERIOD = 60 # 1 minute
|
||||||
|
CACHE_EXPIRATION = 7 * 24 * 60 * 60 # 7 days in seconds
|
||||||
|
|
||||||
|
def _hash_query(self, query: str) -> str:
|
||||||
|
"""Hash query for cache lookup (matching original SHA-256)."""
|
||||||
|
import hashlib
|
||||||
|
normalized = query.strip().lower()
|
||||||
|
return hashlib.sha256(normalized.encode()).hexdigest()
|
||||||
|
|
||||||
|
def _get_client_ip(self, request) -> str:
|
||||||
|
"""Get client IP from request headers."""
|
||||||
|
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
|
||||||
|
if x_forwarded_for:
|
||||||
|
return x_forwarded_for.split(',')[0].strip()
|
||||||
|
return request.META.get('HTTP_X_REAL_IP') or request.META.get('REMOTE_ADDR') or 'unknown'
|
||||||
|
|
||||||
|
def _check_rate_limit(self, client_ip: str) -> tuple[bool, int]:
|
||||||
|
"""
|
||||||
|
Check if client is rate limited.
|
||||||
|
Returns (is_allowed, current_count).
|
||||||
|
"""
|
||||||
|
from django.core.cache import cache
|
||||||
|
|
||||||
|
rate_limit_key = f"search_location:rate:{client_ip}"
|
||||||
|
current_count = cache.get(rate_limit_key, 0)
|
||||||
|
|
||||||
|
if current_count >= self.RATE_LIMIT_REQUESTS:
|
||||||
|
return False, current_count
|
||||||
|
|
||||||
|
# Increment counter with TTL
|
||||||
|
cache.set(rate_limit_key, current_count + 1, self.RATE_LIMIT_PERIOD)
|
||||||
|
return True, current_count + 1
|
||||||
|
|
||||||
|
def _get_cached_result(self, query_hash: str):
|
||||||
|
"""Get cached result if available."""
|
||||||
|
from django.core.cache import cache
|
||||||
|
|
||||||
|
cache_key = f"search_location:query:{query_hash}"
|
||||||
|
cached_data = cache.get(cache_key)
|
||||||
|
|
||||||
|
if cached_data:
|
||||||
|
# Update access count in a separate key
|
||||||
|
access_key = f"search_location:access:{query_hash}"
|
||||||
|
access_count = cache.get(access_key, 0)
|
||||||
|
cache.set(access_key, access_count + 1, self.CACHE_EXPIRATION)
|
||||||
|
|
||||||
|
return cached_data
|
||||||
|
|
||||||
|
def _set_cached_result(self, query: str, query_hash: str, results: list):
|
||||||
|
"""Cache the results."""
|
||||||
|
from django.core.cache import cache
|
||||||
|
|
||||||
|
cache_key = f"search_location:query:{query_hash}"
|
||||||
|
cache_data = {
|
||||||
|
"query": query,
|
||||||
|
"results": results,
|
||||||
|
"result_count": len(results),
|
||||||
|
}
|
||||||
|
cache.set(cache_key, cache_data, self.CACHE_EXPIRATION)
|
||||||
|
|
||||||
|
# Initialize access count
|
||||||
|
access_key = f"search_location:access:{query_hash}"
|
||||||
|
cache.set(access_key, 1, self.CACHE_EXPIRATION)
|
||||||
|
|
||||||
|
def _log_usage(self, query: str, cache_hit: bool, api_called: bool,
|
||||||
|
response_time_ms: int = None, result_count: int = None,
|
||||||
|
client_ip: str = None, user_id: str = None,
|
||||||
|
error: str = None, status_code: int = None):
|
||||||
|
"""Log API usage for monitoring."""
|
||||||
|
# Log to structured logger for now (can be enhanced to write to DB)
|
||||||
|
logger.info(
|
||||||
|
"OpenStreetMap API usage",
|
||||||
|
extra={
|
||||||
|
"query": query[:100],
|
||||||
|
"cache_hit": cache_hit,
|
||||||
|
"api_called": api_called,
|
||||||
|
"response_time_ms": response_time_ms,
|
||||||
|
"result_count": result_count,
|
||||||
|
"client_ip": client_ip,
|
||||||
|
"user_id": user_id,
|
||||||
|
"error": error,
|
||||||
|
"status_code": status_code,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
import time
|
||||||
|
import re
|
||||||
|
start_time = time.time()
|
||||||
|
|
||||||
|
client_ip = self._get_client_ip(request)
|
||||||
|
user_id = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Safely get user ID
|
||||||
|
if request.user and request.user.is_authenticated:
|
||||||
|
user_id = str(getattr(request.user, 'user_id', request.user.id))
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
# ================================================================
|
||||||
|
# STEP 0: Sanitize and validate input
|
||||||
|
# ================================================================
|
||||||
|
raw_query = request.data.get("query", "")
|
||||||
|
if not isinstance(raw_query, str):
|
||||||
|
raw_query = str(raw_query) if raw_query else ""
|
||||||
|
|
||||||
|
# Sanitize query: strip, limit length, remove control characters
|
||||||
|
query = raw_query.strip()[:500]
|
||||||
|
query = re.sub(r'[\x00-\x1f\x7f-\x9f]', '', query)
|
||||||
|
|
||||||
|
# Validate limit
|
||||||
|
try:
|
||||||
|
limit = min(int(request.data.get("limit", 5)), 10)
|
||||||
|
limit = max(limit, 1) # At least 1
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
limit = 5
|
||||||
|
|
||||||
|
# Sanitize country code (2-letter ISO code)
|
||||||
|
raw_country = request.data.get("country", "")
|
||||||
|
country_code = ""
|
||||||
|
if raw_country and isinstance(raw_country, str):
|
||||||
|
country_code = re.sub(r'[^a-zA-Z]', '', raw_country)[:2].lower()
|
||||||
|
|
||||||
|
|
||||||
|
# ================================================================
|
||||||
|
# STEP 1: Validate query (original: min 3 characters)
|
||||||
|
# ================================================================
|
||||||
|
if not query:
|
||||||
|
response_time = int((time.time() - start_time) * 1000)
|
||||||
|
self._log_usage(
|
||||||
|
query="",
|
||||||
|
cache_hit=False,
|
||||||
|
api_called=False,
|
||||||
|
response_time_ms=response_time,
|
||||||
|
client_ip=client_ip,
|
||||||
|
user_id=user_id,
|
||||||
|
error="Query is required",
|
||||||
|
status_code=400
|
||||||
|
)
|
||||||
|
return Response(
|
||||||
|
{"error": "Query is required"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
if len(query) < 3: # Match original: min 3 characters
|
||||||
|
response_time = int((time.time() - start_time) * 1000)
|
||||||
|
self._log_usage(
|
||||||
|
query=query,
|
||||||
|
cache_hit=False,
|
||||||
|
api_called=False,
|
||||||
|
response_time_ms=response_time,
|
||||||
|
client_ip=client_ip,
|
||||||
|
user_id=user_id,
|
||||||
|
error="Query must be at least 3 characters",
|
||||||
|
status_code=400
|
||||||
|
)
|
||||||
|
return Response(
|
||||||
|
{"error": "Query must be at least 3 characters"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
# ================================================================
|
||||||
|
# STEP 2: Check rate limit (30 req/min per IP)
|
||||||
|
# ================================================================
|
||||||
|
is_allowed, current_count = self._check_rate_limit(client_ip)
|
||||||
|
if not is_allowed:
|
||||||
|
response_time = int((time.time() - start_time) * 1000)
|
||||||
|
self._log_usage(
|
||||||
|
query=query,
|
||||||
|
cache_hit=False,
|
||||||
|
api_called=False,
|
||||||
|
response_time_ms=response_time,
|
||||||
|
client_ip=client_ip,
|
||||||
|
user_id=user_id,
|
||||||
|
error="Rate limit exceeded",
|
||||||
|
status_code=429
|
||||||
|
)
|
||||||
|
return Response(
|
||||||
|
{"error": "Rate limit exceeded. Please try again later."},
|
||||||
|
status=status.HTTP_429_TOO_MANY_REQUESTS,
|
||||||
|
headers={
|
||||||
|
"Retry-After": str(self.RATE_LIMIT_PERIOD),
|
||||||
|
"X-RateLimit-Limit": str(self.RATE_LIMIT_REQUESTS),
|
||||||
|
"X-RateLimit-Remaining": "0",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# ================================================================
|
||||||
|
# STEP 3: Check cache
|
||||||
|
# ================================================================
|
||||||
|
query_hash = self._hash_query(query)
|
||||||
|
cached = self._get_cached_result(query_hash)
|
||||||
|
|
||||||
|
if cached:
|
||||||
|
response_time = int((time.time() - start_time) * 1000)
|
||||||
|
results = cached.get("results", [])
|
||||||
|
|
||||||
|
self._log_usage(
|
||||||
|
query=query,
|
||||||
|
cache_hit=True,
|
||||||
|
api_called=False,
|
||||||
|
response_time_ms=response_time,
|
||||||
|
result_count=len(results),
|
||||||
|
client_ip=client_ip,
|
||||||
|
user_id=user_id,
|
||||||
|
status_code=200
|
||||||
|
)
|
||||||
|
|
||||||
|
# Return raw array like original (frontend handles both formats)
|
||||||
|
response = Response(
|
||||||
|
results,
|
||||||
|
status=status.HTTP_200_OK,
|
||||||
|
)
|
||||||
|
response["X-Cache"] = "HIT"
|
||||||
|
response["Cache-Control"] = "public, max-age=3600"
|
||||||
|
return response
|
||||||
|
|
||||||
|
# ================================================================
|
||||||
|
# STEP 4: Cache miss - call Nominatim API
|
||||||
|
# ================================================================
|
||||||
|
import httpx
|
||||||
|
|
||||||
|
try:
|
||||||
|
params = {
|
||||||
|
"q": query,
|
||||||
|
"format": "json",
|
||||||
|
"addressdetails": 1,
|
||||||
|
"limit": limit,
|
||||||
|
}
|
||||||
|
if country_code:
|
||||||
|
params["countrycodes"] = country_code.lower()
|
||||||
|
|
||||||
|
api_response = httpx.get(
|
||||||
|
"https://nominatim.openstreetmap.org/search",
|
||||||
|
params=params,
|
||||||
|
timeout=10.0,
|
||||||
|
headers={"User-Agent": "ThrillWiki/1.0 (https://thrillwiki.com)"},
|
||||||
|
)
|
||||||
|
|
||||||
|
if api_response.status_code != 200:
|
||||||
|
logger.warning(
|
||||||
|
f"Nominatim API error: {api_response.status_code}",
|
||||||
|
extra={"status": api_response.status_code}
|
||||||
|
)
|
||||||
|
return Response(
|
||||||
|
{"error": "Location search failed", "status": api_response.status_code},
|
||||||
|
status=api_response.status_code,
|
||||||
|
)
|
||||||
|
|
||||||
|
data = api_response.json()
|
||||||
|
response_time = int((time.time() - start_time) * 1000)
|
||||||
|
|
||||||
|
# ================================================================
|
||||||
|
# STEP 5: Cache the results (background-like, but sync in Django)
|
||||||
|
# ================================================================
|
||||||
|
try:
|
||||||
|
self._set_cached_result(query, query_hash, data)
|
||||||
|
except Exception as cache_error:
|
||||||
|
logger.warning(f"Failed to cache result: {cache_error}")
|
||||||
|
|
||||||
|
# Log usage
|
||||||
|
self._log_usage(
|
||||||
|
query=query,
|
||||||
|
cache_hit=False,
|
||||||
|
api_called=True,
|
||||||
|
response_time_ms=response_time,
|
||||||
|
result_count=len(data) if isinstance(data, list) else 0,
|
||||||
|
client_ip=client_ip,
|
||||||
|
user_id=user_id,
|
||||||
|
status_code=200
|
||||||
|
)
|
||||||
|
|
||||||
|
# Return raw array like original Nominatim response
|
||||||
|
response = Response(
|
||||||
|
data,
|
||||||
|
status=status.HTTP_200_OK,
|
||||||
|
)
|
||||||
|
response["X-Cache"] = "MISS"
|
||||||
|
response["Cache-Control"] = "public, max-age=3600"
|
||||||
|
return response
|
||||||
|
|
||||||
|
except httpx.HTTPError as e:
|
||||||
|
logger.warning(f"Forward geocoding failed: {e}")
|
||||||
|
response_time = int((time.time() - start_time) * 1000)
|
||||||
|
|
||||||
|
self._log_usage(
|
||||||
|
query=query,
|
||||||
|
cache_hit=False,
|
||||||
|
api_called=True,
|
||||||
|
response_time_ms=response_time,
|
||||||
|
client_ip=client_ip,
|
||||||
|
user_id=user_id,
|
||||||
|
error=str(e),
|
||||||
|
status_code=500
|
||||||
|
)
|
||||||
|
|
||||||
|
return Response(
|
||||||
|
{"error": "Failed to fetch location data"},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
except ValueError as e:
|
||||||
|
return Response(
|
||||||
|
{"error": f"Invalid parameter: {str(e)}"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
capture_and_log(e, "Search location", source="api")
|
||||||
|
return Response(
|
||||||
|
{"error": str(e)},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|||||||
@@ -333,6 +333,11 @@ class ParkListCreateAPIView(APIView):
|
|||||||
|
|
||||||
def _apply_park_attribute_filters(self, qs: QuerySet, params: dict) -> QuerySet:
|
def _apply_park_attribute_filters(self, qs: QuerySet, params: dict) -> QuerySet:
|
||||||
"""Apply park attribute filtering to the queryset."""
|
"""Apply park attribute filtering to the queryset."""
|
||||||
|
# Slug filter - exact match for single park lookup
|
||||||
|
slug = params.get("slug")
|
||||||
|
if slug:
|
||||||
|
qs = qs.filter(slug=slug)
|
||||||
|
|
||||||
park_type = params.get("park_type")
|
park_type = params.get("park_type")
|
||||||
if park_type:
|
if park_type:
|
||||||
qs = qs.filter(park_type=park_type)
|
qs = qs.filter(park_type=park_type)
|
||||||
|
|||||||
@@ -56,36 +56,26 @@ class CompanyDetailOutputSerializer(serializers.Serializer):
|
|||||||
name = serializers.CharField()
|
name = serializers.CharField()
|
||||||
slug = serializers.CharField()
|
slug = serializers.CharField()
|
||||||
roles = serializers.ListField(child=serializers.CharField())
|
roles = serializers.ListField(child=serializers.CharField())
|
||||||
description = serializers.CharField()
|
description = serializers.CharField(allow_blank=True)
|
||||||
website = serializers.URLField(required=False, allow_blank=True)
|
website = serializers.URLField(required=False, allow_blank=True, allow_null=True)
|
||||||
|
|
||||||
# Entity type and status (ported from legacy)
|
|
||||||
person_type = serializers.CharField(required=False, allow_blank=True)
|
|
||||||
status = serializers.CharField()
|
|
||||||
|
|
||||||
# Founding information
|
# Founding information
|
||||||
founded_year = serializers.IntegerField(allow_null=True)
|
founded_date = serializers.DateField(allow_null=True, required=False)
|
||||||
founded_date = serializers.DateField(allow_null=True)
|
|
||||||
founded_date_precision = serializers.CharField(required=False, allow_blank=True)
|
|
||||||
|
|
||||||
# Image URLs
|
# Counts (from model)
|
||||||
logo_url = serializers.URLField(required=False, allow_blank=True)
|
rides_count = serializers.IntegerField(required=False, default=0)
|
||||||
banner_image_url = serializers.URLField(required=False, allow_blank=True)
|
coasters_count = serializers.IntegerField(required=False, default=0)
|
||||||
card_image_url = serializers.URLField(required=False, allow_blank=True)
|
|
||||||
|
|
||||||
# Rating and review aggregates
|
# Frontend URL
|
||||||
average_rating = serializers.DecimalField(max_digits=3, decimal_places=2, allow_null=True)
|
url = serializers.URLField(required=False, allow_blank=True, allow_null=True)
|
||||||
review_count = serializers.IntegerField()
|
|
||||||
|
|
||||||
# Counts
|
|
||||||
parks_count = serializers.IntegerField()
|
|
||||||
rides_count = serializers.IntegerField()
|
|
||||||
|
|
||||||
# Metadata
|
# Metadata
|
||||||
created_at = serializers.DateTimeField()
|
created_at = serializers.DateTimeField()
|
||||||
updated_at = serializers.DateTimeField()
|
updated_at = serializers.DateTimeField()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class CompanyCreateInputSerializer(serializers.Serializer):
|
class CompanyCreateInputSerializer(serializers.Serializer):
|
||||||
"""Input serializer for creating companies."""
|
"""Input serializer for creating companies."""
|
||||||
|
|
||||||
|
|||||||
@@ -27,12 +27,23 @@ from .views.reviews import LatestReviewsAPIView
|
|||||||
from .views.stats import StatsAPIView, StatsRecalculateAPIView
|
from .views.stats import StatsAPIView, StatsRecalculateAPIView
|
||||||
from .viewsets_rankings import RideRankingViewSet, TriggerRankingCalculationView
|
from .viewsets_rankings import RideRankingViewSet, TriggerRankingCalculationView
|
||||||
|
|
||||||
|
# Import analytics views
|
||||||
|
from apps.core.api.analytics_views import (
|
||||||
|
ApprovalTransactionMetricViewSet,
|
||||||
|
ErrorSummaryView,
|
||||||
|
RequestMetadataViewSet,
|
||||||
|
)
|
||||||
|
|
||||||
# Create the main API router
|
# Create the main API router
|
||||||
router = DefaultRouter()
|
router = DefaultRouter()
|
||||||
|
|
||||||
# Register ranking endpoints
|
# Register ranking endpoints
|
||||||
router.register(r"rankings", RideRankingViewSet, basename="ranking")
|
router.register(r"rankings", RideRankingViewSet, basename="ranking")
|
||||||
|
|
||||||
|
# Register analytics endpoints
|
||||||
|
router.register(r"request_metadata", RequestMetadataViewSet, basename="request_metadata")
|
||||||
|
router.register(r"approval_transaction_metrics", ApprovalTransactionMetricViewSet, basename="approval_transaction_metrics")
|
||||||
|
|
||||||
app_name = "api_v1"
|
app_name = "api_v1"
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
@@ -40,6 +51,8 @@ urlpatterns = [
|
|||||||
# See backend/thrillwiki/urls.py for documentation endpoints
|
# See backend/thrillwiki/urls.py for documentation endpoints
|
||||||
# Authentication endpoints
|
# Authentication endpoints
|
||||||
path("auth/", include("apps.api.v1.auth.urls")),
|
path("auth/", include("apps.api.v1.auth.urls")),
|
||||||
|
# Analytics endpoints (error_summary is a view, not a viewset)
|
||||||
|
path("error_summary/", ErrorSummaryView.as_view(), name="error-summary"),
|
||||||
# Health check endpoints
|
# Health check endpoints
|
||||||
path("health/", HealthCheckAPIView.as_view(), name="health-check"),
|
path("health/", HealthCheckAPIView.as_view(), name="health-check"),
|
||||||
path("health/simple/", SimpleHealthAPIView.as_view(), name="simple-health"),
|
path("health/simple/", SimpleHealthAPIView.as_view(), name="simple-health"),
|
||||||
@@ -106,8 +119,11 @@ urlpatterns = [
|
|||||||
path("media/", include("apps.media.urls")),
|
path("media/", include("apps.media.urls")),
|
||||||
path("blog/", include("apps.blog.urls")),
|
path("blog/", include("apps.blog.urls")),
|
||||||
path("support/", include("apps.support.urls")),
|
path("support/", include("apps.support.urls")),
|
||||||
|
path("notifications/", include("apps.notifications.urls")),
|
||||||
path("errors/", include("apps.core.urls.errors")),
|
path("errors/", include("apps.core.urls.errors")),
|
||||||
path("images/", include("apps.api.v1.images.urls")),
|
path("images/", include("apps.api.v1.images.urls")),
|
||||||
|
# Admin dashboard API endpoints
|
||||||
|
path("admin/", include("apps.api.v1.admin.urls")),
|
||||||
# Cloudflare Images Toolkit API endpoints
|
# Cloudflare Images Toolkit API endpoints
|
||||||
path("cloudflare-images/", include("django_cloudflareimages_toolkit.urls")),
|
path("cloudflare-images/", include("django_cloudflareimages_toolkit.urls")),
|
||||||
# Include router URLs (for rankings and any other router-registered endpoints)
|
# Include router URLs (for rankings and any other router-registered endpoints)
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ entity completeness, and system health.
|
|||||||
|
|
||||||
from drf_spectacular.utils import extend_schema
|
from drf_spectacular.utils import extend_schema
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
from rest_framework.permissions import IsAdminUser
|
from apps.core.permissions import IsAdminWithSecondFactor
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
@@ -89,7 +89,7 @@ class DataCompletenessAPIView(APIView):
|
|||||||
companies, and ride models.
|
companies, and ride models.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
permission_classes = [IsAdminUser]
|
permission_classes = [IsAdminWithSecondFactor]
|
||||||
|
|
||||||
@extend_schema(
|
@extend_schema(
|
||||||
tags=["Admin"],
|
tags=["Admin"],
|
||||||
|
|||||||
89
backend/apps/core/api/alert_serializers.py
Normal file
89
backend/apps/core/api/alert_serializers.py
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
"""
|
||||||
|
Serializers for admin alert API endpoints.
|
||||||
|
|
||||||
|
Provides serializers for SystemAlert, RateLimitAlert, and RateLimitAlertConfig models.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from rest_framework import serializers
|
||||||
|
|
||||||
|
from apps.core.models import RateLimitAlert, RateLimitAlertConfig, SystemAlert
|
||||||
|
|
||||||
|
|
||||||
|
class SystemAlertSerializer(serializers.ModelSerializer):
|
||||||
|
"""Serializer for system alerts."""
|
||||||
|
|
||||||
|
is_resolved = serializers.BooleanField(read_only=True)
|
||||||
|
resolved_by_username = serializers.CharField(source="resolved_by.username", read_only=True, allow_null=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = SystemAlert
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"alert_type",
|
||||||
|
"severity",
|
||||||
|
"message",
|
||||||
|
"metadata",
|
||||||
|
"resolved_at",
|
||||||
|
"resolved_by",
|
||||||
|
"resolved_by_username",
|
||||||
|
"created_at",
|
||||||
|
"is_resolved",
|
||||||
|
]
|
||||||
|
read_only_fields = ["id", "created_at", "is_resolved", "resolved_by_username"]
|
||||||
|
|
||||||
|
|
||||||
|
class SystemAlertResolveSerializer(serializers.Serializer):
|
||||||
|
"""Serializer for resolving system alerts."""
|
||||||
|
|
||||||
|
notes = serializers.CharField(required=False, allow_blank=True)
|
||||||
|
|
||||||
|
|
||||||
|
class RateLimitAlertConfigSerializer(serializers.ModelSerializer):
|
||||||
|
"""Serializer for rate limit alert configurations."""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = RateLimitAlertConfig
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"metric_type",
|
||||||
|
"threshold_value",
|
||||||
|
"time_window_ms",
|
||||||
|
"function_name",
|
||||||
|
"enabled",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
]
|
||||||
|
read_only_fields = ["id", "created_at", "updated_at"]
|
||||||
|
|
||||||
|
|
||||||
|
class RateLimitAlertSerializer(serializers.ModelSerializer):
|
||||||
|
"""Serializer for rate limit alerts."""
|
||||||
|
|
||||||
|
is_resolved = serializers.BooleanField(read_only=True)
|
||||||
|
config_id = serializers.UUIDField(source="config.id", read_only=True)
|
||||||
|
resolved_by_username = serializers.CharField(source="resolved_by.username", read_only=True, allow_null=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = RateLimitAlert
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"config_id",
|
||||||
|
"metric_type",
|
||||||
|
"metric_value",
|
||||||
|
"threshold_value",
|
||||||
|
"time_window_ms",
|
||||||
|
"function_name",
|
||||||
|
"alert_message",
|
||||||
|
"resolved_at",
|
||||||
|
"resolved_by",
|
||||||
|
"resolved_by_username",
|
||||||
|
"created_at",
|
||||||
|
"is_resolved",
|
||||||
|
]
|
||||||
|
read_only_fields = ["id", "created_at", "is_resolved", "config_id", "resolved_by_username"]
|
||||||
|
|
||||||
|
|
||||||
|
class RateLimitAlertResolveSerializer(serializers.Serializer):
|
||||||
|
"""Serializer for resolving rate limit alerts."""
|
||||||
|
|
||||||
|
notes = serializers.CharField(required=False, allow_blank=True)
|
||||||
226
backend/apps/core/api/alert_views.py
Normal file
226
backend/apps/core/api/alert_views.py
Normal file
@@ -0,0 +1,226 @@
|
|||||||
|
"""
|
||||||
|
ViewSets for admin alert API endpoints.
|
||||||
|
|
||||||
|
Provides CRUD operations for SystemAlert, RateLimitAlert, and RateLimitAlertConfig.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from django.utils import timezone
|
||||||
|
from django_filters.rest_framework import DjangoFilterBackend
|
||||||
|
from drf_spectacular.utils import extend_schema, extend_schema_view
|
||||||
|
from rest_framework import status, viewsets
|
||||||
|
from rest_framework.decorators import action
|
||||||
|
from rest_framework.filters import OrderingFilter, SearchFilter
|
||||||
|
from rest_framework.permissions import IsAdminUser
|
||||||
|
from rest_framework.response import Response
|
||||||
|
|
||||||
|
from apps.core.models import RateLimitAlert, RateLimitAlertConfig, SystemAlert
|
||||||
|
|
||||||
|
from .alert_serializers import (
|
||||||
|
RateLimitAlertConfigSerializer,
|
||||||
|
RateLimitAlertResolveSerializer,
|
||||||
|
RateLimitAlertSerializer,
|
||||||
|
SystemAlertResolveSerializer,
|
||||||
|
SystemAlertSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema_view(
|
||||||
|
list=extend_schema(
|
||||||
|
summary="List system alerts",
|
||||||
|
description="Get all system alerts, optionally filtered by severity or resolved status.",
|
||||||
|
tags=["Admin - Alerts"],
|
||||||
|
),
|
||||||
|
retrieve=extend_schema(
|
||||||
|
summary="Get system alert",
|
||||||
|
description="Get details of a specific system alert.",
|
||||||
|
tags=["Admin - Alerts"],
|
||||||
|
),
|
||||||
|
create=extend_schema(
|
||||||
|
summary="Create system alert",
|
||||||
|
description="Create a new system alert.",
|
||||||
|
tags=["Admin - Alerts"],
|
||||||
|
),
|
||||||
|
update=extend_schema(
|
||||||
|
summary="Update system alert",
|
||||||
|
description="Update an existing system alert.",
|
||||||
|
tags=["Admin - Alerts"],
|
||||||
|
),
|
||||||
|
partial_update=extend_schema(
|
||||||
|
summary="Partial update system alert",
|
||||||
|
description="Partially update an existing system alert.",
|
||||||
|
tags=["Admin - Alerts"],
|
||||||
|
),
|
||||||
|
destroy=extend_schema(
|
||||||
|
summary="Delete system alert",
|
||||||
|
description="Delete a system alert.",
|
||||||
|
tags=["Admin - Alerts"],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
class SystemAlertViewSet(viewsets.ModelViewSet):
|
||||||
|
"""
|
||||||
|
ViewSet for managing system alerts.
|
||||||
|
|
||||||
|
Provides CRUD operations plus a resolve action for marking alerts as resolved.
|
||||||
|
"""
|
||||||
|
|
||||||
|
queryset = SystemAlert.objects.all()
|
||||||
|
serializer_class = SystemAlertSerializer
|
||||||
|
permission_classes = [IsAdminUser]
|
||||||
|
filter_backends = [DjangoFilterBackend, SearchFilter, OrderingFilter]
|
||||||
|
filterset_fields = ["severity", "alert_type"]
|
||||||
|
search_fields = ["message"]
|
||||||
|
ordering_fields = ["created_at", "severity"]
|
||||||
|
ordering = ["-created_at"]
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
queryset = super().get_queryset()
|
||||||
|
|
||||||
|
# Filter by resolved status
|
||||||
|
resolved = self.request.query_params.get("resolved")
|
||||||
|
if resolved is not None:
|
||||||
|
if resolved.lower() == "true":
|
||||||
|
queryset = queryset.exclude(resolved_at__isnull=True)
|
||||||
|
elif resolved.lower() == "false":
|
||||||
|
queryset = queryset.filter(resolved_at__isnull=True)
|
||||||
|
|
||||||
|
return queryset
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
summary="Resolve system alert",
|
||||||
|
description="Mark a system alert as resolved.",
|
||||||
|
request=SystemAlertResolveSerializer,
|
||||||
|
responses={200: SystemAlertSerializer},
|
||||||
|
tags=["Admin - Alerts"],
|
||||||
|
)
|
||||||
|
@action(detail=True, methods=["post"])
|
||||||
|
def resolve(self, request, pk=None):
|
||||||
|
"""Mark an alert as resolved."""
|
||||||
|
alert = self.get_object()
|
||||||
|
|
||||||
|
if alert.resolved_at:
|
||||||
|
return Response(
|
||||||
|
{"detail": "Alert is already resolved"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
alert.resolved_at = timezone.now()
|
||||||
|
alert.resolved_by = request.user
|
||||||
|
alert.save()
|
||||||
|
|
||||||
|
serializer = self.get_serializer(alert)
|
||||||
|
return Response(serializer.data)
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema_view(
|
||||||
|
list=extend_schema(
|
||||||
|
summary="List rate limit alert configs",
|
||||||
|
description="Get all rate limit alert configurations.",
|
||||||
|
tags=["Admin - Alerts"],
|
||||||
|
),
|
||||||
|
retrieve=extend_schema(
|
||||||
|
summary="Get rate limit alert config",
|
||||||
|
description="Get details of a specific rate limit alert configuration.",
|
||||||
|
tags=["Admin - Alerts"],
|
||||||
|
),
|
||||||
|
create=extend_schema(
|
||||||
|
summary="Create rate limit alert config",
|
||||||
|
description="Create a new rate limit alert configuration.",
|
||||||
|
tags=["Admin - Alerts"],
|
||||||
|
),
|
||||||
|
update=extend_schema(
|
||||||
|
summary="Update rate limit alert config",
|
||||||
|
description="Update an existing rate limit alert configuration.",
|
||||||
|
tags=["Admin - Alerts"],
|
||||||
|
),
|
||||||
|
partial_update=extend_schema(
|
||||||
|
summary="Partial update rate limit alert config",
|
||||||
|
description="Partially update an existing rate limit alert configuration.",
|
||||||
|
tags=["Admin - Alerts"],
|
||||||
|
),
|
||||||
|
destroy=extend_schema(
|
||||||
|
summary="Delete rate limit alert config",
|
||||||
|
description="Delete a rate limit alert configuration.",
|
||||||
|
tags=["Admin - Alerts"],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
class RateLimitAlertConfigViewSet(viewsets.ModelViewSet):
|
||||||
|
"""
|
||||||
|
ViewSet for managing rate limit alert configurations.
|
||||||
|
|
||||||
|
Provides CRUD operations for alert thresholds.
|
||||||
|
"""
|
||||||
|
|
||||||
|
queryset = RateLimitAlertConfig.objects.all()
|
||||||
|
serializer_class = RateLimitAlertConfigSerializer
|
||||||
|
permission_classes = [IsAdminUser]
|
||||||
|
filter_backends = [DjangoFilterBackend, OrderingFilter]
|
||||||
|
filterset_fields = ["metric_type", "enabled"]
|
||||||
|
ordering_fields = ["created_at", "metric_type", "threshold_value"]
|
||||||
|
ordering = ["metric_type", "-created_at"]
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema_view(
|
||||||
|
list=extend_schema(
|
||||||
|
summary="List rate limit alerts",
|
||||||
|
description="Get all rate limit alerts, optionally filtered by resolved status.",
|
||||||
|
tags=["Admin - Alerts"],
|
||||||
|
),
|
||||||
|
retrieve=extend_schema(
|
||||||
|
summary="Get rate limit alert",
|
||||||
|
description="Get details of a specific rate limit alert.",
|
||||||
|
tags=["Admin - Alerts"],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
class RateLimitAlertViewSet(viewsets.ReadOnlyModelViewSet):
|
||||||
|
"""
|
||||||
|
ViewSet for viewing rate limit alerts.
|
||||||
|
|
||||||
|
Provides read-only access and a resolve action.
|
||||||
|
"""
|
||||||
|
|
||||||
|
queryset = RateLimitAlert.objects.select_related("config").all()
|
||||||
|
serializer_class = RateLimitAlertSerializer
|
||||||
|
permission_classes = [IsAdminUser]
|
||||||
|
filter_backends = [DjangoFilterBackend, SearchFilter, OrderingFilter]
|
||||||
|
filterset_fields = ["metric_type"]
|
||||||
|
search_fields = ["alert_message", "function_name"]
|
||||||
|
ordering_fields = ["created_at", "metric_value"]
|
||||||
|
ordering = ["-created_at"]
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
queryset = super().get_queryset()
|
||||||
|
|
||||||
|
# Filter by resolved status
|
||||||
|
resolved = self.request.query_params.get("resolved")
|
||||||
|
if resolved is not None:
|
||||||
|
if resolved.lower() == "true":
|
||||||
|
queryset = queryset.exclude(resolved_at__isnull=True)
|
||||||
|
elif resolved.lower() == "false":
|
||||||
|
queryset = queryset.filter(resolved_at__isnull=True)
|
||||||
|
|
||||||
|
return queryset
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
summary="Resolve rate limit alert",
|
||||||
|
description="Mark a rate limit alert as resolved.",
|
||||||
|
request=RateLimitAlertResolveSerializer,
|
||||||
|
responses={200: RateLimitAlertSerializer},
|
||||||
|
tags=["Admin - Alerts"],
|
||||||
|
)
|
||||||
|
@action(detail=True, methods=["post"])
|
||||||
|
def resolve(self, request, pk=None):
|
||||||
|
"""Mark an alert as resolved."""
|
||||||
|
alert = self.get_object()
|
||||||
|
|
||||||
|
if alert.resolved_at:
|
||||||
|
return Response(
|
||||||
|
{"detail": "Alert is already resolved"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
alert.resolved_at = timezone.now()
|
||||||
|
alert.resolved_by = request.user
|
||||||
|
alert.save()
|
||||||
|
|
||||||
|
serializer = self.get_serializer(alert)
|
||||||
|
return Response(serializer.data)
|
||||||
204
backend/apps/core/api/analytics_serializers.py
Normal file
204
backend/apps/core/api/analytics_serializers.py
Normal file
@@ -0,0 +1,204 @@
|
|||||||
|
"""
|
||||||
|
Serializers for admin analytics endpoints.
|
||||||
|
|
||||||
|
Provides serialization for RequestMetadata, RequestBreadcrumb,
|
||||||
|
ApprovalTransactionMetric, and ErrorSummary aggregation.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from rest_framework import serializers
|
||||||
|
|
||||||
|
from apps.core.models import (
|
||||||
|
ApprovalTransactionMetric,
|
||||||
|
RequestBreadcrumb,
|
||||||
|
RequestMetadata,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class RequestBreadcrumbSerializer(serializers.ModelSerializer):
|
||||||
|
"""Serializer for request breadcrumb data."""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = RequestBreadcrumb
|
||||||
|
fields = [
|
||||||
|
"timestamp",
|
||||||
|
"category",
|
||||||
|
"message",
|
||||||
|
"level",
|
||||||
|
"sequence_order",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class RequestMetadataSerializer(serializers.ModelSerializer):
|
||||||
|
"""
|
||||||
|
Serializer for request metadata with nested breadcrumbs.
|
||||||
|
|
||||||
|
Supports the expand=request_breadcrumbs query parameter
|
||||||
|
to include breadcrumb data in the response.
|
||||||
|
"""
|
||||||
|
|
||||||
|
request_breadcrumbs = RequestBreadcrumbSerializer(many=True, read_only=True)
|
||||||
|
user_id = serializers.CharField(source="user_id", read_only=True, allow_null=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = RequestMetadata
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"request_id",
|
||||||
|
"trace_id",
|
||||||
|
"session_id",
|
||||||
|
"parent_request_id",
|
||||||
|
"action",
|
||||||
|
"method",
|
||||||
|
"endpoint",
|
||||||
|
"request_method",
|
||||||
|
"request_path",
|
||||||
|
"affected_route",
|
||||||
|
"http_status",
|
||||||
|
"status_code",
|
||||||
|
"response_status",
|
||||||
|
"success",
|
||||||
|
"started_at",
|
||||||
|
"completed_at",
|
||||||
|
"duration_ms",
|
||||||
|
"response_time_ms",
|
||||||
|
"error_type",
|
||||||
|
"error_message",
|
||||||
|
"error_stack",
|
||||||
|
"error_code",
|
||||||
|
"error_origin",
|
||||||
|
"component_stack",
|
||||||
|
"severity",
|
||||||
|
"is_resolved",
|
||||||
|
"resolved_at",
|
||||||
|
"resolved_by",
|
||||||
|
"resolution_notes",
|
||||||
|
"retry_count",
|
||||||
|
"retry_attempts",
|
||||||
|
"user_id",
|
||||||
|
"user_agent",
|
||||||
|
"ip_address_hash",
|
||||||
|
"client_version",
|
||||||
|
"timezone",
|
||||||
|
"referrer",
|
||||||
|
"entity_type",
|
||||||
|
"entity_id",
|
||||||
|
"created_at",
|
||||||
|
"request_breadcrumbs",
|
||||||
|
]
|
||||||
|
read_only_fields = ["id", "created_at"]
|
||||||
|
|
||||||
|
def to_representation(self, instance):
|
||||||
|
"""Conditionally include breadcrumbs based on expand parameter."""
|
||||||
|
data = super().to_representation(instance)
|
||||||
|
request = self.context.get("request")
|
||||||
|
|
||||||
|
# Only include breadcrumbs if explicitly expanded
|
||||||
|
if request:
|
||||||
|
expand = request.query_params.get("expand", "")
|
||||||
|
if "request_breadcrumbs" not in expand:
|
||||||
|
data.pop("request_breadcrumbs", None)
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
class RequestMetadataCreateSerializer(serializers.ModelSerializer):
|
||||||
|
"""Serializer for creating request metadata (log_request_metadata RPC)."""
|
||||||
|
|
||||||
|
breadcrumbs = RequestBreadcrumbSerializer(many=True, required=False)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = RequestMetadata
|
||||||
|
fields = [
|
||||||
|
"request_id",
|
||||||
|
"trace_id",
|
||||||
|
"session_id",
|
||||||
|
"parent_request_id",
|
||||||
|
"action",
|
||||||
|
"method",
|
||||||
|
"endpoint",
|
||||||
|
"request_method",
|
||||||
|
"request_path",
|
||||||
|
"affected_route",
|
||||||
|
"http_status",
|
||||||
|
"status_code",
|
||||||
|
"response_status",
|
||||||
|
"success",
|
||||||
|
"completed_at",
|
||||||
|
"duration_ms",
|
||||||
|
"response_time_ms",
|
||||||
|
"error_type",
|
||||||
|
"error_message",
|
||||||
|
"error_stack",
|
||||||
|
"error_code",
|
||||||
|
"error_origin",
|
||||||
|
"component_stack",
|
||||||
|
"severity",
|
||||||
|
"retry_count",
|
||||||
|
"retry_attempts",
|
||||||
|
"user_agent",
|
||||||
|
"ip_address_hash",
|
||||||
|
"client_version",
|
||||||
|
"timezone",
|
||||||
|
"referrer",
|
||||||
|
"entity_type",
|
||||||
|
"entity_id",
|
||||||
|
"breadcrumbs",
|
||||||
|
]
|
||||||
|
|
||||||
|
def create(self, validated_data):
|
||||||
|
breadcrumbs_data = validated_data.pop("breadcrumbs", [])
|
||||||
|
request_metadata = RequestMetadata.objects.create(**validated_data)
|
||||||
|
|
||||||
|
for i, breadcrumb_data in enumerate(breadcrumbs_data):
|
||||||
|
RequestBreadcrumb.objects.create(
|
||||||
|
request_metadata=request_metadata,
|
||||||
|
sequence_order=breadcrumb_data.get("sequence_order", i),
|
||||||
|
**{k: v for k, v in breadcrumb_data.items() if k != "sequence_order"}
|
||||||
|
)
|
||||||
|
|
||||||
|
return request_metadata
|
||||||
|
|
||||||
|
|
||||||
|
class RequestMetadataResolveSerializer(serializers.Serializer):
|
||||||
|
"""Serializer for resolving request metadata errors."""
|
||||||
|
|
||||||
|
resolution_notes = serializers.CharField(required=False, allow_blank=True)
|
||||||
|
|
||||||
|
|
||||||
|
class ApprovalTransactionMetricSerializer(serializers.ModelSerializer):
|
||||||
|
"""Serializer for approval transaction metrics."""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = ApprovalTransactionMetric
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"submission_id",
|
||||||
|
"moderator_id",
|
||||||
|
"submitter_id",
|
||||||
|
"request_id",
|
||||||
|
"success",
|
||||||
|
"duration_ms",
|
||||||
|
"items_count",
|
||||||
|
"rollback_triggered",
|
||||||
|
"error_code",
|
||||||
|
"error_message",
|
||||||
|
"error_details",
|
||||||
|
"created_at",
|
||||||
|
]
|
||||||
|
read_only_fields = ["id", "created_at"]
|
||||||
|
|
||||||
|
|
||||||
|
class ErrorSummarySerializer(serializers.Serializer):
|
||||||
|
"""
|
||||||
|
Read-only serializer for error summary aggregation.
|
||||||
|
|
||||||
|
Aggregates error data from RequestMetadata for dashboard display.
|
||||||
|
"""
|
||||||
|
|
||||||
|
date = serializers.DateField(read_only=True)
|
||||||
|
error_type = serializers.CharField(read_only=True)
|
||||||
|
severity = serializers.CharField(read_only=True)
|
||||||
|
error_count = serializers.IntegerField(read_only=True)
|
||||||
|
resolved_count = serializers.IntegerField(read_only=True)
|
||||||
|
affected_users = serializers.IntegerField(read_only=True)
|
||||||
|
avg_resolution_minutes = serializers.FloatField(read_only=True, allow_null=True)
|
||||||
184
backend/apps/core/api/analytics_views.py
Normal file
184
backend/apps/core/api/analytics_views.py
Normal file
@@ -0,0 +1,184 @@
|
|||||||
|
"""
|
||||||
|
ViewSets for admin analytics endpoints.
|
||||||
|
|
||||||
|
Provides read/write access to RequestMetadata, ApprovalTransactionMetric,
|
||||||
|
and a read-only aggregation endpoint for ErrorSummary.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from datetime import timedelta
|
||||||
|
|
||||||
|
from django.db.models import Avg, Count, F, Q
|
||||||
|
from django.db.models.functions import TruncDate
|
||||||
|
from django.utils import timezone
|
||||||
|
from django_filters import rest_framework as filters
|
||||||
|
from rest_framework import status, viewsets
|
||||||
|
from rest_framework.decorators import action
|
||||||
|
from rest_framework.permissions import IsAdminUser, IsAuthenticated
|
||||||
|
from rest_framework.response import Response
|
||||||
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
|
from apps.core.models import ApprovalTransactionMetric, RequestMetadata
|
||||||
|
|
||||||
|
from .analytics_serializers import (
|
||||||
|
ApprovalTransactionMetricSerializer,
|
||||||
|
ErrorSummarySerializer,
|
||||||
|
RequestMetadataCreateSerializer,
|
||||||
|
RequestMetadataResolveSerializer,
|
||||||
|
RequestMetadataSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class RequestMetadataFilter(filters.FilterSet):
|
||||||
|
"""Filter for RequestMetadata queries."""
|
||||||
|
|
||||||
|
error_type__ne = filters.CharFilter(field_name="error_type", method="filter_not_equal")
|
||||||
|
created_at__gte = filters.IsoDateTimeFilter(field_name="created_at", lookup_expr="gte")
|
||||||
|
created_at__lte = filters.IsoDateTimeFilter(field_name="created_at", lookup_expr="lte")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = RequestMetadata
|
||||||
|
fields = {
|
||||||
|
"error_type": ["exact", "isnull"],
|
||||||
|
"severity": ["exact"],
|
||||||
|
"is_resolved": ["exact"],
|
||||||
|
"success": ["exact"],
|
||||||
|
"http_status": ["exact", "gte", "lte"],
|
||||||
|
"user": ["exact"],
|
||||||
|
"endpoint": ["exact", "icontains"],
|
||||||
|
}
|
||||||
|
|
||||||
|
def filter_not_equal(self, queryset, name, value):
|
||||||
|
"""Handle the error_type__ne filter for non-null error types."""
|
||||||
|
# The frontend sends a JSON object for 'not null' filter
|
||||||
|
# We interpret this as 'error_type is not null'
|
||||||
|
if value:
|
||||||
|
return queryset.exclude(error_type__isnull=True)
|
||||||
|
return queryset
|
||||||
|
|
||||||
|
|
||||||
|
class RequestMetadataViewSet(viewsets.ModelViewSet):
|
||||||
|
"""
|
||||||
|
ViewSet for request metadata CRUD operations.
|
||||||
|
|
||||||
|
Supports filtering by error_type, severity, date range, etc.
|
||||||
|
Use the expand=request_breadcrumbs query parameter to include breadcrumbs.
|
||||||
|
"""
|
||||||
|
|
||||||
|
queryset = RequestMetadata.objects.all()
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
filterset_class = RequestMetadataFilter
|
||||||
|
ordering_fields = ["created_at", "severity", "error_type"]
|
||||||
|
ordering = ["-created_at"]
|
||||||
|
|
||||||
|
def get_serializer_class(self):
|
||||||
|
if self.action == "create":
|
||||||
|
return RequestMetadataCreateSerializer
|
||||||
|
return RequestMetadataSerializer
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
"""Optimize queryset with prefetch for breadcrumbs if expanded."""
|
||||||
|
queryset = super().get_queryset()
|
||||||
|
expand = self.request.query_params.get("expand", "")
|
||||||
|
|
||||||
|
if "request_breadcrumbs" in expand:
|
||||||
|
queryset = queryset.prefetch_related("request_breadcrumbs")
|
||||||
|
|
||||||
|
return queryset
|
||||||
|
|
||||||
|
def perform_create(self, serializer):
|
||||||
|
"""Associate request metadata with current user if authenticated."""
|
||||||
|
user = self.request.user if self.request.user.is_authenticated else None
|
||||||
|
serializer.save(user=user)
|
||||||
|
|
||||||
|
@action(detail=True, methods=["post"], permission_classes=[IsAdminUser])
|
||||||
|
def resolve(self, request, pk=None):
|
||||||
|
"""Mark a request metadata entry as resolved."""
|
||||||
|
instance = self.get_object()
|
||||||
|
serializer = RequestMetadataResolveSerializer(data=request.data)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
|
||||||
|
instance.is_resolved = True
|
||||||
|
instance.resolved_at = timezone.now()
|
||||||
|
instance.resolved_by = request.user
|
||||||
|
instance.resolution_notes = serializer.validated_data.get("resolution_notes", "")
|
||||||
|
instance.save(update_fields=["is_resolved", "resolved_at", "resolved_by", "resolution_notes"])
|
||||||
|
|
||||||
|
return Response(RequestMetadataSerializer(instance).data)
|
||||||
|
|
||||||
|
|
||||||
|
class ApprovalTransactionMetricFilter(filters.FilterSet):
|
||||||
|
"""Filter for ApprovalTransactionMetric queries."""
|
||||||
|
|
||||||
|
created_at__gte = filters.IsoDateTimeFilter(field_name="created_at", lookup_expr="gte")
|
||||||
|
created_at__lte = filters.IsoDateTimeFilter(field_name="created_at", lookup_expr="lte")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = ApprovalTransactionMetric
|
||||||
|
fields = {
|
||||||
|
"success": ["exact"],
|
||||||
|
"moderator_id": ["exact"],
|
||||||
|
"submitter_id": ["exact"],
|
||||||
|
"submission_id": ["exact"],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class ApprovalTransactionMetricViewSet(viewsets.ReadOnlyModelViewSet):
|
||||||
|
"""
|
||||||
|
Read-only ViewSet for approval transaction metrics.
|
||||||
|
|
||||||
|
Provides analytics data about moderation approval operations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
queryset = ApprovalTransactionMetric.objects.all()
|
||||||
|
serializer_class = ApprovalTransactionMetricSerializer
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
filterset_class = ApprovalTransactionMetricFilter
|
||||||
|
ordering_fields = ["created_at", "duration_ms", "success"]
|
||||||
|
ordering = ["-created_at"]
|
||||||
|
|
||||||
|
|
||||||
|
class ErrorSummaryView(APIView):
|
||||||
|
"""
|
||||||
|
Aggregation endpoint for error summary statistics.
|
||||||
|
|
||||||
|
Returns daily error counts grouped by error_type and severity,
|
||||||
|
similar to the Supabase error_summary view.
|
||||||
|
"""
|
||||||
|
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
|
def get(self, request):
|
||||||
|
"""Get aggregated error summary data."""
|
||||||
|
# Default to last 30 days
|
||||||
|
days = int(request.query_params.get("days", 30))
|
||||||
|
since = timezone.now() - timedelta(days=days)
|
||||||
|
|
||||||
|
# Aggregate error data by date, error_type, and severity
|
||||||
|
summary = (
|
||||||
|
RequestMetadata.objects.filter(
|
||||||
|
created_at__gte=since,
|
||||||
|
error_type__isnull=False,
|
||||||
|
)
|
||||||
|
.annotate(date=TruncDate("created_at"))
|
||||||
|
.values("date", "error_type", "severity")
|
||||||
|
.annotate(
|
||||||
|
error_count=Count("id"),
|
||||||
|
resolved_count=Count("id", filter=Q(is_resolved=True)),
|
||||||
|
affected_users=Count("user", distinct=True),
|
||||||
|
avg_resolution_minutes=Avg(
|
||||||
|
(F("resolved_at") - F("created_at")),
|
||||||
|
filter=Q(is_resolved=True, resolved_at__isnull=False),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.order_by("-date", "-error_count")
|
||||||
|
)
|
||||||
|
|
||||||
|
# Convert timedelta to minutes for avg_resolution_minutes
|
||||||
|
results = []
|
||||||
|
for item in summary:
|
||||||
|
if item["avg_resolution_minutes"]:
|
||||||
|
item["avg_resolution_minutes"] = item["avg_resolution_minutes"].total_seconds() / 60
|
||||||
|
results.append(item)
|
||||||
|
|
||||||
|
serializer = ErrorSummarySerializer(results, many=True)
|
||||||
|
return Response(serializer.data)
|
||||||
162
backend/apps/core/api/incident_serializers.py
Normal file
162
backend/apps/core/api/incident_serializers.py
Normal file
@@ -0,0 +1,162 @@
|
|||||||
|
"""
|
||||||
|
Serializers for Incident management API endpoints.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from rest_framework import serializers
|
||||||
|
|
||||||
|
from apps.core.models import Incident, IncidentAlert
|
||||||
|
|
||||||
|
|
||||||
|
class IncidentAlertSerializer(serializers.ModelSerializer):
|
||||||
|
"""Serializer for linked alerts within an incident."""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = IncidentAlert
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"alert_source",
|
||||||
|
"alert_id",
|
||||||
|
"created_at",
|
||||||
|
]
|
||||||
|
read_only_fields = ["id", "created_at"]
|
||||||
|
|
||||||
|
|
||||||
|
class IncidentSerializer(serializers.ModelSerializer):
|
||||||
|
"""Serializer for Incident model."""
|
||||||
|
|
||||||
|
acknowledged_by_username = serializers.CharField(
|
||||||
|
source="acknowledged_by.username", read_only=True, allow_null=True
|
||||||
|
)
|
||||||
|
resolved_by_username = serializers.CharField(
|
||||||
|
source="resolved_by.username", read_only=True, allow_null=True
|
||||||
|
)
|
||||||
|
status_display = serializers.CharField(source="get_status_display", read_only=True)
|
||||||
|
severity_display = serializers.CharField(source="get_severity_display", read_only=True)
|
||||||
|
linked_alerts = IncidentAlertSerializer(many=True, read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Incident
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"incident_number",
|
||||||
|
"title",
|
||||||
|
"description",
|
||||||
|
"severity",
|
||||||
|
"severity_display",
|
||||||
|
"status",
|
||||||
|
"status_display",
|
||||||
|
"detected_at",
|
||||||
|
"acknowledged_at",
|
||||||
|
"acknowledged_by",
|
||||||
|
"acknowledged_by_username",
|
||||||
|
"resolved_at",
|
||||||
|
"resolved_by",
|
||||||
|
"resolved_by_username",
|
||||||
|
"resolution_notes",
|
||||||
|
"alert_count",
|
||||||
|
"linked_alerts",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
]
|
||||||
|
read_only_fields = [
|
||||||
|
"id",
|
||||||
|
"incident_number",
|
||||||
|
"detected_at",
|
||||||
|
"acknowledged_at",
|
||||||
|
"acknowledged_by",
|
||||||
|
"resolved_at",
|
||||||
|
"resolved_by",
|
||||||
|
"alert_count",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class IncidentCreateSerializer(serializers.ModelSerializer):
|
||||||
|
"""Serializer for creating incidents with linked alerts."""
|
||||||
|
|
||||||
|
alert_ids = serializers.ListField(
|
||||||
|
child=serializers.UUIDField(),
|
||||||
|
write_only=True,
|
||||||
|
required=False,
|
||||||
|
help_text="List of alert IDs to link to this incident",
|
||||||
|
)
|
||||||
|
alert_sources = serializers.ListField(
|
||||||
|
child=serializers.ChoiceField(choices=["system", "rate_limit"]),
|
||||||
|
write_only=True,
|
||||||
|
required=False,
|
||||||
|
help_text="Source types for each alert (must match alert_ids length)",
|
||||||
|
)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Incident
|
||||||
|
fields = [
|
||||||
|
"title",
|
||||||
|
"description",
|
||||||
|
"severity",
|
||||||
|
"alert_ids",
|
||||||
|
"alert_sources",
|
||||||
|
]
|
||||||
|
|
||||||
|
def validate(self, data):
|
||||||
|
alert_ids = data.get("alert_ids", [])
|
||||||
|
alert_sources = data.get("alert_sources", [])
|
||||||
|
|
||||||
|
if alert_ids and len(alert_ids) != len(alert_sources):
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
{"alert_sources": "Must provide one source per alert_id"}
|
||||||
|
)
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
def create(self, validated_data):
|
||||||
|
alert_ids = validated_data.pop("alert_ids", [])
|
||||||
|
alert_sources = validated_data.pop("alert_sources", [])
|
||||||
|
|
||||||
|
incident = Incident.objects.create(**validated_data)
|
||||||
|
|
||||||
|
# Create linked alerts
|
||||||
|
for alert_id, source in zip(alert_ids, alert_sources):
|
||||||
|
IncidentAlert.objects.create(
|
||||||
|
incident=incident,
|
||||||
|
alert_id=alert_id,
|
||||||
|
alert_source=source,
|
||||||
|
)
|
||||||
|
|
||||||
|
return incident
|
||||||
|
|
||||||
|
|
||||||
|
class IncidentAcknowledgeSerializer(serializers.Serializer):
|
||||||
|
"""Serializer for acknowledging an incident."""
|
||||||
|
|
||||||
|
pass # No additional data needed
|
||||||
|
|
||||||
|
|
||||||
|
class IncidentResolveSerializer(serializers.Serializer):
|
||||||
|
"""Serializer for resolving an incident."""
|
||||||
|
|
||||||
|
resolution_notes = serializers.CharField(required=False, allow_blank=True)
|
||||||
|
resolve_alerts = serializers.BooleanField(
|
||||||
|
default=True,
|
||||||
|
help_text="Whether to also resolve all linked alerts",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class LinkAlertsSerializer(serializers.Serializer):
|
||||||
|
"""Serializer for linking alerts to an incident."""
|
||||||
|
|
||||||
|
alert_ids = serializers.ListField(
|
||||||
|
child=serializers.UUIDField(),
|
||||||
|
help_text="List of alert IDs to link",
|
||||||
|
)
|
||||||
|
alert_sources = serializers.ListField(
|
||||||
|
child=serializers.ChoiceField(choices=["system", "rate_limit"]),
|
||||||
|
help_text="Source types for each alert",
|
||||||
|
)
|
||||||
|
|
||||||
|
def validate(self, data):
|
||||||
|
if len(data["alert_ids"]) != len(data["alert_sources"]):
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
{"alert_sources": "Must provide one source per alert_id"}
|
||||||
|
)
|
||||||
|
return data
|
||||||
201
backend/apps/core/api/incident_views.py
Normal file
201
backend/apps/core/api/incident_views.py
Normal file
@@ -0,0 +1,201 @@
|
|||||||
|
"""
|
||||||
|
ViewSets for Incident management API endpoints.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from django.utils import timezone
|
||||||
|
from django_filters.rest_framework import DjangoFilterBackend
|
||||||
|
from drf_spectacular.utils import extend_schema, extend_schema_view
|
||||||
|
from rest_framework import status, viewsets
|
||||||
|
from rest_framework.decorators import action
|
||||||
|
from rest_framework.filters import OrderingFilter, SearchFilter
|
||||||
|
from rest_framework.permissions import IsAdminUser
|
||||||
|
from rest_framework.response import Response
|
||||||
|
|
||||||
|
from apps.core.models import Incident, IncidentAlert, RateLimitAlert, SystemAlert
|
||||||
|
|
||||||
|
from .incident_serializers import (
|
||||||
|
IncidentAcknowledgeSerializer,
|
||||||
|
IncidentAlertSerializer,
|
||||||
|
IncidentCreateSerializer,
|
||||||
|
IncidentResolveSerializer,
|
||||||
|
IncidentSerializer,
|
||||||
|
LinkAlertsSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@extend_schema_view(
|
||||||
|
list=extend_schema(
|
||||||
|
summary="List incidents",
|
||||||
|
description="Get all incidents, optionally filtered by status or severity.",
|
||||||
|
tags=["Admin - Incidents"],
|
||||||
|
),
|
||||||
|
retrieve=extend_schema(
|
||||||
|
summary="Get incident",
|
||||||
|
description="Get details of a specific incident including linked alerts.",
|
||||||
|
tags=["Admin - Incidents"],
|
||||||
|
),
|
||||||
|
create=extend_schema(
|
||||||
|
summary="Create incident",
|
||||||
|
description="Create a new incident and optionally link alerts.",
|
||||||
|
tags=["Admin - Incidents"],
|
||||||
|
),
|
||||||
|
update=extend_schema(
|
||||||
|
summary="Update incident",
|
||||||
|
description="Update an existing incident.",
|
||||||
|
tags=["Admin - Incidents"],
|
||||||
|
),
|
||||||
|
partial_update=extend_schema(
|
||||||
|
summary="Partial update incident",
|
||||||
|
description="Partially update an existing incident.",
|
||||||
|
tags=["Admin - Incidents"],
|
||||||
|
),
|
||||||
|
destroy=extend_schema(
|
||||||
|
summary="Delete incident",
|
||||||
|
description="Delete an incident.",
|
||||||
|
tags=["Admin - Incidents"],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
class IncidentViewSet(viewsets.ModelViewSet):
|
||||||
|
"""
|
||||||
|
ViewSet for managing incidents.
|
||||||
|
|
||||||
|
Provides CRUD operations plus acknowledge, resolve, and alert linking actions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
queryset = Incident.objects.prefetch_related("linked_alerts").all()
|
||||||
|
permission_classes = [IsAdminUser]
|
||||||
|
filter_backends = [DjangoFilterBackend, SearchFilter, OrderingFilter]
|
||||||
|
filterset_fields = ["status", "severity"]
|
||||||
|
search_fields = ["title", "description", "incident_number"]
|
||||||
|
ordering_fields = ["detected_at", "severity", "status", "alert_count"]
|
||||||
|
ordering = ["-detected_at"]
|
||||||
|
|
||||||
|
def get_serializer_class(self):
|
||||||
|
if self.action == "create":
|
||||||
|
return IncidentCreateSerializer
|
||||||
|
if self.action == "acknowledge":
|
||||||
|
return IncidentAcknowledgeSerializer
|
||||||
|
if self.action == "resolve":
|
||||||
|
return IncidentResolveSerializer
|
||||||
|
if self.action == "link_alerts":
|
||||||
|
return LinkAlertsSerializer
|
||||||
|
if self.action == "alerts":
|
||||||
|
return IncidentAlertSerializer
|
||||||
|
return IncidentSerializer
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
summary="Acknowledge incident",
|
||||||
|
description="Mark an incident as being investigated.",
|
||||||
|
request=IncidentAcknowledgeSerializer,
|
||||||
|
responses={200: IncidentSerializer},
|
||||||
|
tags=["Admin - Incidents"],
|
||||||
|
)
|
||||||
|
@action(detail=True, methods=["post"])
|
||||||
|
def acknowledge(self, request, pk=None):
|
||||||
|
"""Mark an incident as being investigated."""
|
||||||
|
incident = self.get_object()
|
||||||
|
|
||||||
|
if incident.status != Incident.Status.OPEN:
|
||||||
|
return Response(
|
||||||
|
{"detail": f"Cannot acknowledge incident in '{incident.status}' status"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
incident.status = Incident.Status.INVESTIGATING
|
||||||
|
incident.acknowledged_at = timezone.now()
|
||||||
|
incident.acknowledged_by = request.user
|
||||||
|
incident.save()
|
||||||
|
|
||||||
|
return Response(IncidentSerializer(incident).data)
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
summary="Resolve incident",
|
||||||
|
description="Mark an incident as resolved, optionally resolving all linked alerts.",
|
||||||
|
request=IncidentResolveSerializer,
|
||||||
|
responses={200: IncidentSerializer},
|
||||||
|
tags=["Admin - Incidents"],
|
||||||
|
)
|
||||||
|
@action(detail=True, methods=["post"])
|
||||||
|
def resolve(self, request, pk=None):
|
||||||
|
"""Mark an incident as resolved."""
|
||||||
|
incident = self.get_object()
|
||||||
|
|
||||||
|
if incident.status in (Incident.Status.RESOLVED, Incident.Status.CLOSED):
|
||||||
|
return Response(
|
||||||
|
{"detail": "Incident is already resolved or closed"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
serializer = IncidentResolveSerializer(data=request.data)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
|
||||||
|
incident.status = Incident.Status.RESOLVED
|
||||||
|
incident.resolved_at = timezone.now()
|
||||||
|
incident.resolved_by = request.user
|
||||||
|
incident.resolution_notes = serializer.validated_data.get("resolution_notes", "")
|
||||||
|
incident.save()
|
||||||
|
|
||||||
|
# Optionally resolve all linked alerts
|
||||||
|
if serializer.validated_data.get("resolve_alerts", True):
|
||||||
|
now = timezone.now()
|
||||||
|
for link in incident.linked_alerts.all():
|
||||||
|
if link.alert_source == "system":
|
||||||
|
SystemAlert.objects.filter(
|
||||||
|
id=link.alert_id, resolved_at__isnull=True
|
||||||
|
).update(resolved_at=now, resolved_by=request.user)
|
||||||
|
elif link.alert_source == "rate_limit":
|
||||||
|
RateLimitAlert.objects.filter(
|
||||||
|
id=link.alert_id, resolved_at__isnull=True
|
||||||
|
).update(resolved_at=now, resolved_by=request.user)
|
||||||
|
|
||||||
|
return Response(IncidentSerializer(incident).data)
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
summary="Get linked alerts",
|
||||||
|
description="Get all alerts linked to this incident.",
|
||||||
|
responses={200: IncidentAlertSerializer(many=True)},
|
||||||
|
tags=["Admin - Incidents"],
|
||||||
|
)
|
||||||
|
@action(detail=True, methods=["get"])
|
||||||
|
def alerts(self, request, pk=None):
|
||||||
|
"""Get all alerts linked to this incident."""
|
||||||
|
incident = self.get_object()
|
||||||
|
alerts = incident.linked_alerts.all()
|
||||||
|
serializer = IncidentAlertSerializer(alerts, many=True)
|
||||||
|
return Response(serializer.data)
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
summary="Link alerts to incident",
|
||||||
|
description="Link additional alerts to an existing incident.",
|
||||||
|
request=LinkAlertsSerializer,
|
||||||
|
responses={200: IncidentSerializer},
|
||||||
|
tags=["Admin - Incidents"],
|
||||||
|
)
|
||||||
|
@action(detail=True, methods=["post"], url_path="link-alerts")
|
||||||
|
def link_alerts(self, request, pk=None):
|
||||||
|
"""Link additional alerts to an incident."""
|
||||||
|
incident = self.get_object()
|
||||||
|
|
||||||
|
serializer = LinkAlertsSerializer(data=request.data)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
|
||||||
|
alert_ids = serializer.validated_data["alert_ids"]
|
||||||
|
alert_sources = serializer.validated_data["alert_sources"]
|
||||||
|
|
||||||
|
created = 0
|
||||||
|
for alert_id, source in zip(alert_ids, alert_sources):
|
||||||
|
_, was_created = IncidentAlert.objects.get_or_create(
|
||||||
|
incident=incident,
|
||||||
|
alert_id=alert_id,
|
||||||
|
alert_source=source,
|
||||||
|
)
|
||||||
|
if was_created:
|
||||||
|
created += 1
|
||||||
|
|
||||||
|
# Refresh to get updated alert_count
|
||||||
|
incident.refresh_from_db()
|
||||||
|
|
||||||
|
return Response({
|
||||||
|
"detail": f"Linked {created} new alerts to incident",
|
||||||
|
"incident": IncidentSerializer(incident).data,
|
||||||
|
})
|
||||||
76
backend/apps/core/migrations/0006_add_alert_models.py
Normal file
76
backend/apps/core/migrations/0006_add_alert_models.py
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
# Generated by Django 5.2.9 on 2026-01-06 17:00
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
import uuid
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('core', '0005_add_application_error'),
|
||||||
|
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='RateLimitAlertConfig',
|
||||||
|
fields=[
|
||||||
|
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||||
|
('metric_type', models.CharField(choices=[('block_rate', 'Block Rate'), ('total_requests', 'Total Requests'), ('unique_ips', 'Unique IPs'), ('function_specific', 'Function Specific')], db_index=True, help_text='Type of metric to monitor', max_length=50)),
|
||||||
|
('threshold_value', models.FloatField(help_text='Threshold value that triggers alert')),
|
||||||
|
('time_window_ms', models.IntegerField(help_text='Time window in milliseconds for measurement')),
|
||||||
|
('function_name', models.CharField(blank=True, help_text='Specific function to monitor (for function_specific metric type)', max_length=100, null=True)),
|
||||||
|
('enabled', models.BooleanField(db_index=True, default=True, help_text='Whether this config is active')),
|
||||||
|
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('updated_at', models.DateTimeField(auto_now=True)),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'verbose_name': 'Rate Limit Alert Config',
|
||||||
|
'verbose_name_plural': 'Rate Limit Alert Configs',
|
||||||
|
'ordering': ['metric_type', '-created_at'],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='RateLimitAlert',
|
||||||
|
fields=[
|
||||||
|
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||||
|
('metric_type', models.CharField(help_text='Type of metric', max_length=50)),
|
||||||
|
('metric_value', models.FloatField(help_text='Actual value that triggered the alert')),
|
||||||
|
('threshold_value', models.FloatField(help_text='Threshold that was exceeded')),
|
||||||
|
('time_window_ms', models.IntegerField(help_text='Time window of measurement')),
|
||||||
|
('function_name', models.CharField(blank=True, help_text='Function name if applicable', max_length=100, null=True)),
|
||||||
|
('alert_message', models.TextField(help_text='Descriptive alert message')),
|
||||||
|
('resolved_at', models.DateTimeField(blank=True, db_index=True, help_text='When this alert was resolved', null=True)),
|
||||||
|
('created_at', models.DateTimeField(auto_now_add=True, db_index=True)),
|
||||||
|
('resolved_by', models.ForeignKey(blank=True, help_text='Admin who resolved this alert', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='resolved_rate_limit_alerts', to=settings.AUTH_USER_MODEL)),
|
||||||
|
('config', models.ForeignKey(help_text='Configuration that triggered this alert', on_delete=django.db.models.deletion.CASCADE, related_name='alerts', to='core.ratelimitalertconfig')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'verbose_name': 'Rate Limit Alert',
|
||||||
|
'verbose_name_plural': 'Rate Limit Alerts',
|
||||||
|
'ordering': ['-created_at'],
|
||||||
|
'indexes': [models.Index(fields=['metric_type', 'created_at'], name='core_rateli_metric__6fd63e_idx'), models.Index(fields=['resolved_at', 'created_at'], name='core_rateli_resolve_98c143_idx')],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='SystemAlert',
|
||||||
|
fields=[
|
||||||
|
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||||
|
('alert_type', models.CharField(choices=[('orphaned_images', 'Orphaned Images'), ('stale_submissions', 'Stale Submissions'), ('circular_dependency', 'Circular Dependency'), ('validation_error', 'Validation Error'), ('ban_attempt', 'Ban Attempt'), ('upload_timeout', 'Upload Timeout'), ('high_error_rate', 'High Error Rate'), ('database_connection', 'Database Connection'), ('memory_usage', 'Memory Usage'), ('queue_backup', 'Queue Backup')], db_index=True, help_text='Type of system alert', max_length=50)),
|
||||||
|
('severity', models.CharField(choices=[('low', 'Low'), ('medium', 'Medium'), ('high', 'High'), ('critical', 'Critical')], db_index=True, help_text='Alert severity level', max_length=20)),
|
||||||
|
('message', models.TextField(help_text='Human-readable alert message')),
|
||||||
|
('metadata', models.JSONField(blank=True, help_text='Additional context data for this alert', null=True)),
|
||||||
|
('resolved_at', models.DateTimeField(blank=True, db_index=True, help_text='When this alert was resolved', null=True)),
|
||||||
|
('created_at', models.DateTimeField(auto_now_add=True, db_index=True)),
|
||||||
|
('resolved_by', models.ForeignKey(blank=True, help_text='Admin who resolved this alert', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='resolved_system_alerts', to=settings.AUTH_USER_MODEL)),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'verbose_name': 'System Alert',
|
||||||
|
'verbose_name_plural': 'System Alerts',
|
||||||
|
'ordering': ['-created_at'],
|
||||||
|
'indexes': [models.Index(fields=['severity', 'created_at'], name='core_system_severit_bd3efd_idx'), models.Index(fields=['alert_type', 'created_at'], name='core_system_alert_t_10942e_idx'), models.Index(fields=['resolved_at', 'created_at'], name='core_system_resolve_9da33f_idx')],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,72 @@
|
|||||||
|
# Generated by Django 5.2.9 on 2026-01-06 17:43
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
import uuid
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('core', '0006_add_alert_models'),
|
||||||
|
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='Incident',
|
||||||
|
fields=[
|
||||||
|
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||||
|
('incident_number', models.CharField(db_index=True, help_text='Auto-generated incident number (INC-YYYYMMDD-XXXX)', max_length=20, unique=True)),
|
||||||
|
('title', models.CharField(help_text='Brief description of the incident', max_length=255)),
|
||||||
|
('description', models.TextField(blank=True, help_text='Detailed description', null=True)),
|
||||||
|
('severity', models.CharField(choices=[('low', 'Low'), ('medium', 'Medium'), ('high', 'High'), ('critical', 'Critical')], db_index=True, help_text='Incident severity level', max_length=20)),
|
||||||
|
('status', models.CharField(choices=[('open', 'Open'), ('investigating', 'Investigating'), ('resolved', 'Resolved'), ('closed', 'Closed')], db_index=True, default='open', help_text='Current incident status', max_length=20)),
|
||||||
|
('detected_at', models.DateTimeField(auto_now_add=True, help_text='When the incident was detected')),
|
||||||
|
('acknowledged_at', models.DateTimeField(blank=True, help_text='When someone started investigating', null=True)),
|
||||||
|
('resolved_at', models.DateTimeField(blank=True, help_text='When the incident was resolved', null=True)),
|
||||||
|
('resolution_notes', models.TextField(blank=True, help_text='Notes about the resolution', null=True)),
|
||||||
|
('alert_count', models.PositiveIntegerField(default=0, help_text='Number of linked alerts')),
|
||||||
|
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('updated_at', models.DateTimeField(auto_now=True)),
|
||||||
|
('acknowledged_by', models.ForeignKey(blank=True, help_text='User who acknowledged the incident', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='acknowledged_incidents', to=settings.AUTH_USER_MODEL)),
|
||||||
|
('resolved_by', models.ForeignKey(blank=True, help_text='User who resolved the incident', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='resolved_incidents', to=settings.AUTH_USER_MODEL)),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'verbose_name': 'Incident',
|
||||||
|
'verbose_name_plural': 'Incidents',
|
||||||
|
'ordering': ['-detected_at'],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='IncidentAlert',
|
||||||
|
fields=[
|
||||||
|
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||||
|
('alert_source', models.CharField(choices=[('system', 'System Alert'), ('rate_limit', 'Rate Limit Alert')], help_text='Source type of the alert', max_length=20)),
|
||||||
|
('alert_id', models.UUIDField(help_text='ID of the linked alert')),
|
||||||
|
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('incident', models.ForeignKey(help_text='The incident this alert is linked to', on_delete=django.db.models.deletion.CASCADE, related_name='linked_alerts', to='core.incident')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'verbose_name': 'Incident Alert',
|
||||||
|
'verbose_name_plural': 'Incident Alerts',
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.AddIndex(
|
||||||
|
model_name='incident',
|
||||||
|
index=models.Index(fields=['status', 'detected_at'], name='core_incide_status_c17ea4_idx'),
|
||||||
|
),
|
||||||
|
migrations.AddIndex(
|
||||||
|
model_name='incident',
|
||||||
|
index=models.Index(fields=['severity', 'detected_at'], name='core_incide_severit_24b148_idx'),
|
||||||
|
),
|
||||||
|
migrations.AddIndex(
|
||||||
|
model_name='incidentalert',
|
||||||
|
index=models.Index(fields=['alert_source', 'alert_id'], name='core_incide_alert_s_9e655c_idx'),
|
||||||
|
),
|
||||||
|
migrations.AlterUniqueTogether(
|
||||||
|
name='incidentalert',
|
||||||
|
unique_together={('incident', 'alert_source', 'alert_id')},
|
||||||
|
),
|
||||||
|
]
|
||||||
335
backend/apps/core/migrations/0008_add_analytics_models.py
Normal file
335
backend/apps/core/migrations/0008_add_analytics_models.py
Normal file
@@ -0,0 +1,335 @@
|
|||||||
|
# Generated by Django 5.1.6 on 2026-01-06 18:23
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
import uuid
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("core", "0007_add_incident_and_report_models"),
|
||||||
|
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name="pageviewevent",
|
||||||
|
name="pgh_obj",
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name="pageviewevent",
|
||||||
|
name="content_type",
|
||||||
|
),
|
||||||
|
migrations.RemoveField(
|
||||||
|
model_name="pageviewevent",
|
||||||
|
name="pgh_context",
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="ApprovalTransactionMetric",
|
||||||
|
fields=[
|
||||||
|
("id", models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||||
|
(
|
||||||
|
"submission_id",
|
||||||
|
models.CharField(db_index=True, help_text="ID of the content submission", max_length=255),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"moderator_id",
|
||||||
|
models.CharField(
|
||||||
|
db_index=True, help_text="ID of the moderator who processed the submission", max_length=255
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"submitter_id",
|
||||||
|
models.CharField(
|
||||||
|
db_index=True, help_text="ID of the user who submitted the content", max_length=255
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"request_id",
|
||||||
|
models.CharField(
|
||||||
|
blank=True, db_index=True, help_text="Correlation request ID", max_length=255, null=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("success", models.BooleanField(db_index=True, help_text="Whether the approval was successful")),
|
||||||
|
(
|
||||||
|
"duration_ms",
|
||||||
|
models.PositiveIntegerField(blank=True, help_text="Processing duration in milliseconds", null=True),
|
||||||
|
),
|
||||||
|
("items_count", models.PositiveIntegerField(default=1, help_text="Number of items processed")),
|
||||||
|
(
|
||||||
|
"rollback_triggered",
|
||||||
|
models.BooleanField(default=False, help_text="Whether a rollback was triggered"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"error_code",
|
||||||
|
models.CharField(blank=True, help_text="Error code if failed", max_length=50, null=True),
|
||||||
|
),
|
||||||
|
("error_message", models.TextField(blank=True, help_text="Error message if failed", null=True)),
|
||||||
|
("error_details", models.TextField(blank=True, help_text="Detailed error information", null=True)),
|
||||||
|
(
|
||||||
|
"created_at",
|
||||||
|
models.DateTimeField(auto_now_add=True, db_index=True, help_text="When this metric was recorded"),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "Approval Transaction Metric",
|
||||||
|
"verbose_name_plural": "Approval Transaction Metrics",
|
||||||
|
"ordering": ["-created_at"],
|
||||||
|
"indexes": [
|
||||||
|
models.Index(fields=["success", "created_at"], name="core_approv_success_9c326b_idx"),
|
||||||
|
models.Index(fields=["moderator_id", "created_at"], name="core_approv_moderat_ec41ba_idx"),
|
||||||
|
],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="RequestMetadata",
|
||||||
|
fields=[
|
||||||
|
("id", models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||||
|
(
|
||||||
|
"request_id",
|
||||||
|
models.CharField(
|
||||||
|
db_index=True,
|
||||||
|
help_text="Unique request identifier for correlation",
|
||||||
|
max_length=255,
|
||||||
|
unique=True,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"trace_id",
|
||||||
|
models.CharField(
|
||||||
|
blank=True, db_index=True, help_text="Distributed tracing ID", max_length=255, null=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"session_id",
|
||||||
|
models.CharField(
|
||||||
|
blank=True, db_index=True, help_text="User session identifier", max_length=255, null=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"parent_request_id",
|
||||||
|
models.CharField(
|
||||||
|
blank=True, help_text="Parent request ID for nested requests", max_length=255, null=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"action",
|
||||||
|
models.CharField(
|
||||||
|
blank=True, help_text="Action/operation being performed", max_length=255, null=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"method",
|
||||||
|
models.CharField(blank=True, help_text="HTTP method (GET, POST, etc.)", max_length=10, null=True),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"endpoint",
|
||||||
|
models.CharField(
|
||||||
|
blank=True, db_index=True, help_text="API endpoint or URL path", max_length=500, null=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"request_method",
|
||||||
|
models.CharField(blank=True, help_text="HTTP request method", max_length=10, null=True),
|
||||||
|
),
|
||||||
|
("request_path", models.CharField(blank=True, help_text="Request URL path", max_length=500, null=True)),
|
||||||
|
(
|
||||||
|
"affected_route",
|
||||||
|
models.CharField(blank=True, help_text="Frontend route affected", max_length=255, null=True),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"http_status",
|
||||||
|
models.PositiveIntegerField(blank=True, db_index=True, help_text="HTTP status code", null=True),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"status_code",
|
||||||
|
models.PositiveIntegerField(blank=True, help_text="Status code (alias for http_status)", null=True),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"response_status",
|
||||||
|
models.PositiveIntegerField(blank=True, help_text="Response status code", null=True),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"success",
|
||||||
|
models.BooleanField(
|
||||||
|
blank=True, db_index=True, help_text="Whether the request was successful", null=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("started_at", models.DateTimeField(auto_now_add=True, help_text="When the request started")),
|
||||||
|
("completed_at", models.DateTimeField(blank=True, help_text="When the request completed", null=True)),
|
||||||
|
(
|
||||||
|
"duration_ms",
|
||||||
|
models.PositiveIntegerField(blank=True, help_text="Request duration in milliseconds", null=True),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"response_time_ms",
|
||||||
|
models.PositiveIntegerField(blank=True, help_text="Response time in milliseconds", null=True),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"error_type",
|
||||||
|
models.CharField(
|
||||||
|
blank=True, db_index=True, help_text="Type/class of error", max_length=100, null=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("error_message", models.TextField(blank=True, help_text="Error message", null=True)),
|
||||||
|
("error_stack", models.TextField(blank=True, help_text="Error stack trace", null=True)),
|
||||||
|
(
|
||||||
|
"error_code",
|
||||||
|
models.CharField(
|
||||||
|
blank=True, db_index=True, help_text="Application error code", max_length=50, null=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"error_origin",
|
||||||
|
models.CharField(blank=True, help_text="Where the error originated", max_length=100, null=True),
|
||||||
|
),
|
||||||
|
("component_stack", models.TextField(blank=True, help_text="React component stack trace", null=True)),
|
||||||
|
(
|
||||||
|
"severity",
|
||||||
|
models.CharField(
|
||||||
|
choices=[
|
||||||
|
("debug", "Debug"),
|
||||||
|
("info", "Info"),
|
||||||
|
("warning", "Warning"),
|
||||||
|
("error", "Error"),
|
||||||
|
("critical", "Critical"),
|
||||||
|
],
|
||||||
|
db_index=True,
|
||||||
|
default="info",
|
||||||
|
help_text="Error severity level",
|
||||||
|
max_length=20,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"is_resolved",
|
||||||
|
models.BooleanField(db_index=True, default=False, help_text="Whether this error has been resolved"),
|
||||||
|
),
|
||||||
|
("resolved_at", models.DateTimeField(blank=True, help_text="When the error was resolved", null=True)),
|
||||||
|
("resolution_notes", models.TextField(blank=True, help_text="Notes about resolution", null=True)),
|
||||||
|
("retry_count", models.PositiveIntegerField(default=0, help_text="Number of retry attempts")),
|
||||||
|
(
|
||||||
|
"retry_attempts",
|
||||||
|
models.PositiveIntegerField(blank=True, help_text="Total retry attempts made", null=True),
|
||||||
|
),
|
||||||
|
("user_agent", models.TextField(blank=True, help_text="User agent string", null=True)),
|
||||||
|
(
|
||||||
|
"ip_address_hash",
|
||||||
|
models.CharField(
|
||||||
|
blank=True, db_index=True, help_text="Hashed IP address", max_length=64, null=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"client_version",
|
||||||
|
models.CharField(blank=True, help_text="Client application version", max_length=50, null=True),
|
||||||
|
),
|
||||||
|
("timezone", models.CharField(blank=True, help_text="User timezone", max_length=50, null=True)),
|
||||||
|
("referrer", models.TextField(blank=True, help_text="HTTP referrer", null=True)),
|
||||||
|
(
|
||||||
|
"entity_type",
|
||||||
|
models.CharField(
|
||||||
|
blank=True, db_index=True, help_text="Type of entity affected", max_length=50, null=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"entity_id",
|
||||||
|
models.CharField(
|
||||||
|
blank=True, db_index=True, help_text="ID of entity affected", max_length=255, null=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"created_at",
|
||||||
|
models.DateTimeField(auto_now_add=True, db_index=True, help_text="When this record was created"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"resolved_by",
|
||||||
|
models.ForeignKey(
|
||||||
|
blank=True,
|
||||||
|
help_text="User who resolved this error",
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
|
related_name="resolved_request_metadata",
|
||||||
|
to=settings.AUTH_USER_MODEL,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"user",
|
||||||
|
models.ForeignKey(
|
||||||
|
blank=True,
|
||||||
|
help_text="User who made the request",
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
|
related_name="request_metadata",
|
||||||
|
to=settings.AUTH_USER_MODEL,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "Request Metadata",
|
||||||
|
"verbose_name_plural": "Request Metadata",
|
||||||
|
"ordering": ["-created_at"],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="RequestBreadcrumb",
|
||||||
|
fields=[
|
||||||
|
("id", models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||||
|
("timestamp", models.DateTimeField(help_text="When this breadcrumb occurred")),
|
||||||
|
(
|
||||||
|
"category",
|
||||||
|
models.CharField(
|
||||||
|
help_text="Breadcrumb category (e.g., 'http', 'navigation', 'console')", max_length=100
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("message", models.TextField(help_text="Breadcrumb message")),
|
||||||
|
(
|
||||||
|
"level",
|
||||||
|
models.CharField(
|
||||||
|
blank=True, help_text="Log level (debug, info, warning, error)", max_length=20, null=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("sequence_order", models.PositiveIntegerField(default=0, help_text="Order within the request")),
|
||||||
|
(
|
||||||
|
"request_metadata",
|
||||||
|
models.ForeignKey(
|
||||||
|
help_text="Parent request",
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name="request_breadcrumbs",
|
||||||
|
to="core.requestmetadata",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "Request Breadcrumb",
|
||||||
|
"verbose_name_plural": "Request Breadcrumbs",
|
||||||
|
"ordering": ["sequence_order", "timestamp"],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.DeleteModel(
|
||||||
|
name="PageView",
|
||||||
|
),
|
||||||
|
migrations.DeleteModel(
|
||||||
|
name="PageViewEvent",
|
||||||
|
),
|
||||||
|
migrations.AddIndex(
|
||||||
|
model_name="requestmetadata",
|
||||||
|
index=models.Index(fields=["error_type", "created_at"], name="core_reques_error_t_d384f1_idx"),
|
||||||
|
),
|
||||||
|
migrations.AddIndex(
|
||||||
|
model_name="requestmetadata",
|
||||||
|
index=models.Index(fields=["severity", "created_at"], name="core_reques_severit_04b88d_idx"),
|
||||||
|
),
|
||||||
|
migrations.AddIndex(
|
||||||
|
model_name="requestmetadata",
|
||||||
|
index=models.Index(fields=["is_resolved", "created_at"], name="core_reques_is_reso_614d34_idx"),
|
||||||
|
),
|
||||||
|
migrations.AddIndex(
|
||||||
|
model_name="requestmetadata",
|
||||||
|
index=models.Index(fields=["user", "created_at"], name="core_reques_user_id_db6ee3_idx"),
|
||||||
|
),
|
||||||
|
migrations.AddIndex(
|
||||||
|
model_name="requestbreadcrumb",
|
||||||
|
index=models.Index(fields=["request_metadata", "sequence_order"], name="core_reques_request_0e8be4_idx"),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,64 @@
|
|||||||
|
# Generated by Django 5.2.9 on 2026-01-07 01:23
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
import pgtrigger.compiler
|
||||||
|
import pgtrigger.migrations
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('contenttypes', '0002_remove_content_type_name'),
|
||||||
|
('core', '0008_add_analytics_models'),
|
||||||
|
('pghistory', '0006_delete_aggregateevent'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='PageView',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('object_id', models.PositiveIntegerField()),
|
||||||
|
('timestamp', models.DateTimeField(auto_now_add=True, db_index=True)),
|
||||||
|
('ip_address', models.GenericIPAddressField()),
|
||||||
|
('user_agent', models.CharField(blank=True, max_length=512)),
|
||||||
|
('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='page_views', to='contenttypes.contenttype')),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='PageViewEvent',
|
||||||
|
fields=[
|
||||||
|
('pgh_id', models.AutoField(primary_key=True, serialize=False)),
|
||||||
|
('pgh_created_at', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('pgh_label', models.TextField(help_text='The event label.')),
|
||||||
|
('id', models.BigIntegerField()),
|
||||||
|
('object_id', models.PositiveIntegerField()),
|
||||||
|
('timestamp', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('ip_address', models.GenericIPAddressField()),
|
||||||
|
('user_agent', models.CharField(blank=True, max_length=512)),
|
||||||
|
('content_type', models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='contenttypes.contenttype')),
|
||||||
|
('pgh_context', models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='pghistory.context')),
|
||||||
|
('pgh_obj', models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='events', to='core.pageview')),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'abstract': False,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.AddIndex(
|
||||||
|
model_name='pageview',
|
||||||
|
index=models.Index(fields=['timestamp'], name='core_pagevi_timesta_757ebb_idx'),
|
||||||
|
),
|
||||||
|
migrations.AddIndex(
|
||||||
|
model_name='pageview',
|
||||||
|
index=models.Index(fields=['content_type', 'object_id'], name='core_pagevi_content_eda7ad_idx'),
|
||||||
|
),
|
||||||
|
pgtrigger.migrations.AddTrigger(
|
||||||
|
model_name='pageview',
|
||||||
|
trigger=pgtrigger.compiler.Trigger(name='insert_insert', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "core_pageviewevent" ("content_type_id", "id", "ip_address", "object_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "timestamp", "user_agent") VALUES (NEW."content_type_id", NEW."id", NEW."ip_address", NEW."object_id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."timestamp", NEW."user_agent"); RETURN NULL;', hash='1682d124ea3ba215e630c7cfcde929f7444cf247', operation='INSERT', pgid='pgtrigger_insert_insert_ee1e1', table='core_pageview', when='AFTER')),
|
||||||
|
),
|
||||||
|
pgtrigger.migrations.AddTrigger(
|
||||||
|
model_name='pageview',
|
||||||
|
trigger=pgtrigger.compiler.Trigger(name='update_update', sql=pgtrigger.compiler.UpsertTriggerSql(condition='WHEN (OLD.* IS DISTINCT FROM NEW.*)', func='INSERT INTO "core_pageviewevent" ("content_type_id", "id", "ip_address", "object_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "timestamp", "user_agent") VALUES (NEW."content_type_id", NEW."id", NEW."ip_address", NEW."object_id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."timestamp", NEW."user_agent"); RETURN NULL;', hash='4221b2dd6636cae454f8d69c0c1841c40c47e6a6', operation='UPDATE', pgid='pgtrigger_update_update_3c505', table='core_pageview', when='AFTER')),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -298,3 +298,754 @@ class ApplicationError(models.Model):
|
|||||||
def short_error_id(self) -> str:
|
def short_error_id(self) -> str:
|
||||||
"""Return first 8 characters of error_id for display."""
|
"""Return first 8 characters of error_id for display."""
|
||||||
return str(self.error_id)[:8]
|
return str(self.error_id)[:8]
|
||||||
|
|
||||||
|
|
||||||
|
class SystemAlert(models.Model):
|
||||||
|
"""
|
||||||
|
System-level alerts for monitoring application health.
|
||||||
|
|
||||||
|
Alert types include orphaned images, stale submissions, circular dependencies,
|
||||||
|
validation errors, ban attempts, upload timeouts, and high error rates.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class AlertType(models.TextChoices):
|
||||||
|
ORPHANED_IMAGES = "orphaned_images", "Orphaned Images"
|
||||||
|
STALE_SUBMISSIONS = "stale_submissions", "Stale Submissions"
|
||||||
|
CIRCULAR_DEPENDENCY = "circular_dependency", "Circular Dependency"
|
||||||
|
VALIDATION_ERROR = "validation_error", "Validation Error"
|
||||||
|
BAN_ATTEMPT = "ban_attempt", "Ban Attempt"
|
||||||
|
UPLOAD_TIMEOUT = "upload_timeout", "Upload Timeout"
|
||||||
|
HIGH_ERROR_RATE = "high_error_rate", "High Error Rate"
|
||||||
|
DATABASE_CONNECTION = "database_connection", "Database Connection"
|
||||||
|
MEMORY_USAGE = "memory_usage", "Memory Usage"
|
||||||
|
QUEUE_BACKUP = "queue_backup", "Queue Backup"
|
||||||
|
|
||||||
|
class Severity(models.TextChoices):
|
||||||
|
LOW = "low", "Low"
|
||||||
|
MEDIUM = "medium", "Medium"
|
||||||
|
HIGH = "high", "High"
|
||||||
|
CRITICAL = "critical", "Critical"
|
||||||
|
|
||||||
|
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||||
|
alert_type = models.CharField(
|
||||||
|
max_length=50,
|
||||||
|
choices=AlertType.choices,
|
||||||
|
db_index=True,
|
||||||
|
help_text="Type of system alert",
|
||||||
|
)
|
||||||
|
severity = models.CharField(
|
||||||
|
max_length=20,
|
||||||
|
choices=Severity.choices,
|
||||||
|
db_index=True,
|
||||||
|
help_text="Alert severity level",
|
||||||
|
)
|
||||||
|
message = models.TextField(help_text="Human-readable alert message")
|
||||||
|
metadata = models.JSONField(
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
help_text="Additional context data for this alert",
|
||||||
|
)
|
||||||
|
resolved_at = models.DateTimeField(
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
db_index=True,
|
||||||
|
help_text="When this alert was resolved",
|
||||||
|
)
|
||||||
|
resolved_by = models.ForeignKey(
|
||||||
|
settings.AUTH_USER_MODEL,
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
on_delete=models.SET_NULL,
|
||||||
|
related_name="resolved_system_alerts",
|
||||||
|
help_text="Admin who resolved this alert",
|
||||||
|
)
|
||||||
|
created_at = models.DateTimeField(auto_now_add=True, db_index=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
ordering = ["-created_at"]
|
||||||
|
verbose_name = "System Alert"
|
||||||
|
verbose_name_plural = "System Alerts"
|
||||||
|
indexes = [
|
||||||
|
models.Index(fields=["severity", "created_at"]),
|
||||||
|
models.Index(fields=["alert_type", "created_at"]),
|
||||||
|
models.Index(fields=["resolved_at", "created_at"]),
|
||||||
|
]
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return f"[{self.get_severity_display()}] {self.get_alert_type_display()}: {self.message[:50]}"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_resolved(self) -> bool:
|
||||||
|
return self.resolved_at is not None
|
||||||
|
|
||||||
|
|
||||||
|
class RateLimitAlertConfig(models.Model):
|
||||||
|
"""
|
||||||
|
Configuration for rate limit alert thresholds.
|
||||||
|
|
||||||
|
Defines thresholds that trigger alerts when exceeded.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class MetricType(models.TextChoices):
|
||||||
|
BLOCK_RATE = "block_rate", "Block Rate"
|
||||||
|
TOTAL_REQUESTS = "total_requests", "Total Requests"
|
||||||
|
UNIQUE_IPS = "unique_ips", "Unique IPs"
|
||||||
|
FUNCTION_SPECIFIC = "function_specific", "Function Specific"
|
||||||
|
|
||||||
|
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||||
|
metric_type = models.CharField(
|
||||||
|
max_length=50,
|
||||||
|
choices=MetricType.choices,
|
||||||
|
db_index=True,
|
||||||
|
help_text="Type of metric to monitor",
|
||||||
|
)
|
||||||
|
threshold_value = models.FloatField(help_text="Threshold value that triggers alert")
|
||||||
|
time_window_ms = models.IntegerField(help_text="Time window in milliseconds for measurement")
|
||||||
|
function_name = models.CharField(
|
||||||
|
max_length=100,
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
help_text="Specific function to monitor (for function_specific metric type)",
|
||||||
|
)
|
||||||
|
enabled = models.BooleanField(default=True, db_index=True, help_text="Whether this config is active")
|
||||||
|
created_at = models.DateTimeField(auto_now_add=True)
|
||||||
|
updated_at = models.DateTimeField(auto_now=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
ordering = ["metric_type", "-created_at"]
|
||||||
|
verbose_name = "Rate Limit Alert Config"
|
||||||
|
verbose_name_plural = "Rate Limit Alert Configs"
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return f"{self.get_metric_type_display()}: threshold={self.threshold_value}"
|
||||||
|
|
||||||
|
|
||||||
|
class RateLimitAlert(models.Model):
|
||||||
|
"""
|
||||||
|
Alerts triggered when rate limit thresholds are exceeded.
|
||||||
|
"""
|
||||||
|
|
||||||
|
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||||
|
config = models.ForeignKey(
|
||||||
|
RateLimitAlertConfig,
|
||||||
|
on_delete=models.CASCADE,
|
||||||
|
related_name="alerts",
|
||||||
|
help_text="Configuration that triggered this alert",
|
||||||
|
)
|
||||||
|
metric_type = models.CharField(max_length=50, help_text="Type of metric")
|
||||||
|
metric_value = models.FloatField(help_text="Actual value that triggered the alert")
|
||||||
|
threshold_value = models.FloatField(help_text="Threshold that was exceeded")
|
||||||
|
time_window_ms = models.IntegerField(help_text="Time window of measurement")
|
||||||
|
function_name = models.CharField(
|
||||||
|
max_length=100,
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
help_text="Function name if applicable",
|
||||||
|
)
|
||||||
|
alert_message = models.TextField(help_text="Descriptive alert message")
|
||||||
|
resolved_at = models.DateTimeField(
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
db_index=True,
|
||||||
|
help_text="When this alert was resolved",
|
||||||
|
)
|
||||||
|
resolved_by = models.ForeignKey(
|
||||||
|
settings.AUTH_USER_MODEL,
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
on_delete=models.SET_NULL,
|
||||||
|
related_name="resolved_rate_limit_alerts",
|
||||||
|
help_text="Admin who resolved this alert",
|
||||||
|
)
|
||||||
|
created_at = models.DateTimeField(auto_now_add=True, db_index=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
ordering = ["-created_at"]
|
||||||
|
verbose_name = "Rate Limit Alert"
|
||||||
|
verbose_name_plural = "Rate Limit Alerts"
|
||||||
|
indexes = [
|
||||||
|
models.Index(fields=["metric_type", "created_at"]),
|
||||||
|
models.Index(fields=["resolved_at", "created_at"]),
|
||||||
|
]
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return f"{self.metric_type}: {self.metric_value} > {self.threshold_value}"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_resolved(self) -> bool:
|
||||||
|
return self.resolved_at is not None
|
||||||
|
|
||||||
|
|
||||||
|
class Incident(models.Model):
|
||||||
|
"""
|
||||||
|
Groups related alerts for coordinated investigation.
|
||||||
|
|
||||||
|
Incidents provide a higher-level view of system issues,
|
||||||
|
allowing teams to track and resolve related alerts together.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Status(models.TextChoices):
|
||||||
|
OPEN = "open", "Open"
|
||||||
|
INVESTIGATING = "investigating", "Investigating"
|
||||||
|
RESOLVED = "resolved", "Resolved"
|
||||||
|
CLOSED = "closed", "Closed"
|
||||||
|
|
||||||
|
class Severity(models.TextChoices):
|
||||||
|
LOW = "low", "Low"
|
||||||
|
MEDIUM = "medium", "Medium"
|
||||||
|
HIGH = "high", "High"
|
||||||
|
CRITICAL = "critical", "Critical"
|
||||||
|
|
||||||
|
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||||
|
incident_number = models.CharField(
|
||||||
|
max_length=20,
|
||||||
|
unique=True,
|
||||||
|
db_index=True,
|
||||||
|
help_text="Auto-generated incident number (INC-YYYYMMDD-XXXX)",
|
||||||
|
)
|
||||||
|
title = models.CharField(max_length=255, help_text="Brief description of the incident")
|
||||||
|
description = models.TextField(null=True, blank=True, help_text="Detailed description")
|
||||||
|
severity = models.CharField(
|
||||||
|
max_length=20,
|
||||||
|
choices=Severity.choices,
|
||||||
|
db_index=True,
|
||||||
|
help_text="Incident severity level",
|
||||||
|
)
|
||||||
|
status = models.CharField(
|
||||||
|
max_length=20,
|
||||||
|
choices=Status.choices,
|
||||||
|
default=Status.OPEN,
|
||||||
|
db_index=True,
|
||||||
|
help_text="Current incident status",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Timestamps
|
||||||
|
detected_at = models.DateTimeField(auto_now_add=True, help_text="When the incident was detected")
|
||||||
|
acknowledged_at = models.DateTimeField(null=True, blank=True, help_text="When someone started investigating")
|
||||||
|
acknowledged_by = models.ForeignKey(
|
||||||
|
settings.AUTH_USER_MODEL,
|
||||||
|
on_delete=models.SET_NULL,
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
related_name="acknowledged_incidents",
|
||||||
|
help_text="User who acknowledged the incident",
|
||||||
|
)
|
||||||
|
resolved_at = models.DateTimeField(null=True, blank=True, help_text="When the incident was resolved")
|
||||||
|
resolved_by = models.ForeignKey(
|
||||||
|
settings.AUTH_USER_MODEL,
|
||||||
|
on_delete=models.SET_NULL,
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
related_name="resolved_incidents",
|
||||||
|
help_text="User who resolved the incident",
|
||||||
|
)
|
||||||
|
resolution_notes = models.TextField(null=True, blank=True, help_text="Notes about the resolution")
|
||||||
|
|
||||||
|
# Computed field (denormalized for performance)
|
||||||
|
alert_count = models.PositiveIntegerField(default=0, help_text="Number of linked alerts")
|
||||||
|
|
||||||
|
created_at = models.DateTimeField(auto_now_add=True)
|
||||||
|
updated_at = models.DateTimeField(auto_now=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
ordering = ["-detected_at"]
|
||||||
|
verbose_name = "Incident"
|
||||||
|
verbose_name_plural = "Incidents"
|
||||||
|
indexes = [
|
||||||
|
models.Index(fields=["status", "detected_at"]),
|
||||||
|
models.Index(fields=["severity", "detected_at"]),
|
||||||
|
]
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return f"{self.incident_number}: {self.title}"
|
||||||
|
|
||||||
|
def save(self, *args, **kwargs):
|
||||||
|
if not self.incident_number:
|
||||||
|
# Auto-generate incident number: INC-YYYYMMDD-XXXX
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
|
today = timezone.now().strftime("%Y%m%d")
|
||||||
|
count = Incident.objects.filter(incident_number__startswith=f"INC-{today}").count() + 1
|
||||||
|
self.incident_number = f"INC-{today}-{count:04d}"
|
||||||
|
super().save(*args, **kwargs)
|
||||||
|
|
||||||
|
def update_alert_count(self):
|
||||||
|
"""Update the denormalized alert_count field."""
|
||||||
|
self.alert_count = self.linked_alerts.count()
|
||||||
|
self.save(update_fields=["alert_count"])
|
||||||
|
|
||||||
|
|
||||||
|
class IncidentAlert(models.Model):
|
||||||
|
"""
|
||||||
|
Links alerts to incidents (many-to-many through table).
|
||||||
|
|
||||||
|
Supports linking both system alerts and rate limit alerts.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class AlertSource(models.TextChoices):
|
||||||
|
SYSTEM = "system", "System Alert"
|
||||||
|
RATE_LIMIT = "rate_limit", "Rate Limit Alert"
|
||||||
|
|
||||||
|
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||||
|
incident = models.ForeignKey(
|
||||||
|
Incident,
|
||||||
|
on_delete=models.CASCADE,
|
||||||
|
related_name="linked_alerts",
|
||||||
|
help_text="The incident this alert is linked to",
|
||||||
|
)
|
||||||
|
alert_source = models.CharField(
|
||||||
|
max_length=20,
|
||||||
|
choices=AlertSource.choices,
|
||||||
|
help_text="Source type of the alert",
|
||||||
|
)
|
||||||
|
alert_id = models.UUIDField(help_text="ID of the linked alert")
|
||||||
|
created_at = models.DateTimeField(auto_now_add=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = "Incident Alert"
|
||||||
|
verbose_name_plural = "Incident Alerts"
|
||||||
|
unique_together = ["incident", "alert_source", "alert_id"]
|
||||||
|
indexes = [
|
||||||
|
models.Index(fields=["alert_source", "alert_id"]),
|
||||||
|
]
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return f"{self.incident.incident_number} <- {self.alert_source}:{self.alert_id}"
|
||||||
|
|
||||||
|
def save(self, *args, **kwargs):
|
||||||
|
super().save(*args, **kwargs)
|
||||||
|
# Update the incident's alert count
|
||||||
|
self.incident.update_alert_count()
|
||||||
|
|
||||||
|
def delete(self, *args, **kwargs):
|
||||||
|
incident = self.incident
|
||||||
|
super().delete(*args, **kwargs)
|
||||||
|
# Update the incident's alert count
|
||||||
|
incident.update_alert_count()
|
||||||
|
|
||||||
|
|
||||||
|
class RequestMetadata(models.Model):
|
||||||
|
"""
|
||||||
|
Comprehensive request tracking for monitoring and debugging.
|
||||||
|
|
||||||
|
Stores detailed information about API requests, including timing,
|
||||||
|
errors, user context, and resolution status. Used by the admin
|
||||||
|
dashboard for error monitoring and analytics.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Severity(models.TextChoices):
|
||||||
|
DEBUG = "debug", "Debug"
|
||||||
|
INFO = "info", "Info"
|
||||||
|
WARNING = "warning", "Warning"
|
||||||
|
ERROR = "error", "Error"
|
||||||
|
CRITICAL = "critical", "Critical"
|
||||||
|
|
||||||
|
# Identity & Correlation
|
||||||
|
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||||
|
request_id = models.CharField(
|
||||||
|
max_length=255,
|
||||||
|
unique=True,
|
||||||
|
db_index=True,
|
||||||
|
help_text="Unique request identifier for correlation",
|
||||||
|
)
|
||||||
|
trace_id = models.CharField(
|
||||||
|
max_length=255,
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
db_index=True,
|
||||||
|
help_text="Distributed tracing ID",
|
||||||
|
)
|
||||||
|
session_id = models.CharField(
|
||||||
|
max_length=255,
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
db_index=True,
|
||||||
|
help_text="User session identifier",
|
||||||
|
)
|
||||||
|
parent_request_id = models.CharField(
|
||||||
|
max_length=255,
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
help_text="Parent request ID for nested requests",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Request Information
|
||||||
|
action = models.CharField(
|
||||||
|
max_length=255,
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
help_text="Action/operation being performed",
|
||||||
|
)
|
||||||
|
method = models.CharField(
|
||||||
|
max_length=10,
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
help_text="HTTP method (GET, POST, etc.)",
|
||||||
|
)
|
||||||
|
endpoint = models.CharField(
|
||||||
|
max_length=500,
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
db_index=True,
|
||||||
|
help_text="API endpoint or URL path",
|
||||||
|
)
|
||||||
|
request_method = models.CharField(
|
||||||
|
max_length=10,
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
help_text="HTTP request method",
|
||||||
|
)
|
||||||
|
request_path = models.CharField(
|
||||||
|
max_length=500,
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
help_text="Request URL path",
|
||||||
|
)
|
||||||
|
affected_route = models.CharField(
|
||||||
|
max_length=255,
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
help_text="Frontend route affected",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Response Information
|
||||||
|
http_status = models.PositiveIntegerField(
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
db_index=True,
|
||||||
|
help_text="HTTP status code",
|
||||||
|
)
|
||||||
|
status_code = models.PositiveIntegerField(
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
help_text="Status code (alias for http_status)",
|
||||||
|
)
|
||||||
|
response_status = models.PositiveIntegerField(
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
help_text="Response status code",
|
||||||
|
)
|
||||||
|
success = models.BooleanField(
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
db_index=True,
|
||||||
|
help_text="Whether the request was successful",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Timing
|
||||||
|
started_at = models.DateTimeField(
|
||||||
|
auto_now_add=True,
|
||||||
|
help_text="When the request started",
|
||||||
|
)
|
||||||
|
completed_at = models.DateTimeField(
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
help_text="When the request completed",
|
||||||
|
)
|
||||||
|
duration_ms = models.PositiveIntegerField(
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
help_text="Request duration in milliseconds",
|
||||||
|
)
|
||||||
|
response_time_ms = models.PositiveIntegerField(
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
help_text="Response time in milliseconds",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Error Information
|
||||||
|
error_type = models.CharField(
|
||||||
|
max_length=100,
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
db_index=True,
|
||||||
|
help_text="Type/class of error",
|
||||||
|
)
|
||||||
|
error_message = models.TextField(
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
help_text="Error message",
|
||||||
|
)
|
||||||
|
error_stack = models.TextField(
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
help_text="Error stack trace",
|
||||||
|
)
|
||||||
|
error_code = models.CharField(
|
||||||
|
max_length=50,
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
db_index=True,
|
||||||
|
help_text="Application error code",
|
||||||
|
)
|
||||||
|
error_origin = models.CharField(
|
||||||
|
max_length=100,
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
help_text="Where the error originated",
|
||||||
|
)
|
||||||
|
component_stack = models.TextField(
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
help_text="React component stack trace",
|
||||||
|
)
|
||||||
|
severity = models.CharField(
|
||||||
|
max_length=20,
|
||||||
|
choices=Severity.choices,
|
||||||
|
default=Severity.INFO,
|
||||||
|
db_index=True,
|
||||||
|
help_text="Error severity level",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Resolution
|
||||||
|
is_resolved = models.BooleanField(
|
||||||
|
default=False,
|
||||||
|
db_index=True,
|
||||||
|
help_text="Whether this error has been resolved",
|
||||||
|
)
|
||||||
|
resolved_at = models.DateTimeField(
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
help_text="When the error was resolved",
|
||||||
|
)
|
||||||
|
resolved_by = models.ForeignKey(
|
||||||
|
settings.AUTH_USER_MODEL,
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
on_delete=models.SET_NULL,
|
||||||
|
related_name="resolved_request_metadata",
|
||||||
|
help_text="User who resolved this error",
|
||||||
|
)
|
||||||
|
resolution_notes = models.TextField(
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
help_text="Notes about resolution",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Retry Information
|
||||||
|
retry_count = models.PositiveIntegerField(
|
||||||
|
default=0,
|
||||||
|
help_text="Number of retry attempts",
|
||||||
|
)
|
||||||
|
retry_attempts = models.PositiveIntegerField(
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
help_text="Total retry attempts made",
|
||||||
|
)
|
||||||
|
|
||||||
|
# User Context
|
||||||
|
user = models.ForeignKey(
|
||||||
|
settings.AUTH_USER_MODEL,
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
on_delete=models.SET_NULL,
|
||||||
|
related_name="request_metadata",
|
||||||
|
help_text="User who made the request",
|
||||||
|
)
|
||||||
|
user_agent = models.TextField(
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
help_text="User agent string",
|
||||||
|
)
|
||||||
|
ip_address_hash = models.CharField(
|
||||||
|
max_length=64,
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
db_index=True,
|
||||||
|
help_text="Hashed IP address",
|
||||||
|
)
|
||||||
|
client_version = models.CharField(
|
||||||
|
max_length=50,
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
help_text="Client application version",
|
||||||
|
)
|
||||||
|
timezone = models.CharField(
|
||||||
|
max_length=50,
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
help_text="User timezone",
|
||||||
|
)
|
||||||
|
referrer = models.TextField(
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
help_text="HTTP referrer",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Entity Context
|
||||||
|
entity_type = models.CharField(
|
||||||
|
max_length=50,
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
db_index=True,
|
||||||
|
help_text="Type of entity affected",
|
||||||
|
)
|
||||||
|
entity_id = models.CharField(
|
||||||
|
max_length=255,
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
db_index=True,
|
||||||
|
help_text="ID of entity affected",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Timestamps
|
||||||
|
created_at = models.DateTimeField(
|
||||||
|
auto_now_add=True,
|
||||||
|
db_index=True,
|
||||||
|
help_text="When this record was created",
|
||||||
|
)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
ordering = ["-created_at"]
|
||||||
|
verbose_name = "Request Metadata"
|
||||||
|
verbose_name_plural = "Request Metadata"
|
||||||
|
indexes = [
|
||||||
|
models.Index(fields=["error_type", "created_at"]),
|
||||||
|
models.Index(fields=["severity", "created_at"]),
|
||||||
|
models.Index(fields=["is_resolved", "created_at"]),
|
||||||
|
models.Index(fields=["user", "created_at"]),
|
||||||
|
]
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return f"{self.request_id} - {self.endpoint or 'unknown'}"
|
||||||
|
|
||||||
|
|
||||||
|
class RequestBreadcrumb(models.Model):
|
||||||
|
"""
|
||||||
|
Breadcrumb trail for request tracing.
|
||||||
|
|
||||||
|
Stores individual breadcrumb events that occurred during a request,
|
||||||
|
useful for debugging and understanding request flow.
|
||||||
|
"""
|
||||||
|
|
||||||
|
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||||
|
request_metadata = models.ForeignKey(
|
||||||
|
RequestMetadata,
|
||||||
|
on_delete=models.CASCADE,
|
||||||
|
related_name="request_breadcrumbs",
|
||||||
|
help_text="Parent request",
|
||||||
|
)
|
||||||
|
timestamp = models.DateTimeField(
|
||||||
|
help_text="When this breadcrumb occurred",
|
||||||
|
)
|
||||||
|
category = models.CharField(
|
||||||
|
max_length=100,
|
||||||
|
help_text="Breadcrumb category (e.g., 'http', 'navigation', 'console')",
|
||||||
|
)
|
||||||
|
message = models.TextField(
|
||||||
|
help_text="Breadcrumb message",
|
||||||
|
)
|
||||||
|
level = models.CharField(
|
||||||
|
max_length=20,
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
help_text="Log level (debug, info, warning, error)",
|
||||||
|
)
|
||||||
|
sequence_order = models.PositiveIntegerField(
|
||||||
|
default=0,
|
||||||
|
help_text="Order within the request",
|
||||||
|
)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
ordering = ["sequence_order", "timestamp"]
|
||||||
|
verbose_name = "Request Breadcrumb"
|
||||||
|
verbose_name_plural = "Request Breadcrumbs"
|
||||||
|
indexes = [
|
||||||
|
models.Index(fields=["request_metadata", "sequence_order"]),
|
||||||
|
]
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return f"[{self.category}] {self.message[:50]}"
|
||||||
|
|
||||||
|
|
||||||
|
class ApprovalTransactionMetric(models.Model):
|
||||||
|
"""
|
||||||
|
Metrics for content approval transactions.
|
||||||
|
|
||||||
|
Tracks performance and success/failure of moderation approval
|
||||||
|
operations for analytics and debugging.
|
||||||
|
"""
|
||||||
|
|
||||||
|
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||||
|
|
||||||
|
# References
|
||||||
|
submission_id = models.CharField(
|
||||||
|
max_length=255,
|
||||||
|
db_index=True,
|
||||||
|
help_text="ID of the content submission",
|
||||||
|
)
|
||||||
|
moderator_id = models.CharField(
|
||||||
|
max_length=255,
|
||||||
|
db_index=True,
|
||||||
|
help_text="ID of the moderator who processed the submission",
|
||||||
|
)
|
||||||
|
submitter_id = models.CharField(
|
||||||
|
max_length=255,
|
||||||
|
db_index=True,
|
||||||
|
help_text="ID of the user who submitted the content",
|
||||||
|
)
|
||||||
|
request_id = models.CharField(
|
||||||
|
max_length=255,
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
db_index=True,
|
||||||
|
help_text="Correlation request ID",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Metrics
|
||||||
|
success = models.BooleanField(
|
||||||
|
db_index=True,
|
||||||
|
help_text="Whether the approval was successful",
|
||||||
|
)
|
||||||
|
duration_ms = models.PositiveIntegerField(
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
help_text="Processing duration in milliseconds",
|
||||||
|
)
|
||||||
|
items_count = models.PositiveIntegerField(
|
||||||
|
default=1,
|
||||||
|
help_text="Number of items processed",
|
||||||
|
)
|
||||||
|
rollback_triggered = models.BooleanField(
|
||||||
|
default=False,
|
||||||
|
help_text="Whether a rollback was triggered",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Error Information
|
||||||
|
error_code = models.CharField(
|
||||||
|
max_length=50,
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
help_text="Error code if failed",
|
||||||
|
)
|
||||||
|
error_message = models.TextField(
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
help_text="Error message if failed",
|
||||||
|
)
|
||||||
|
error_details = models.TextField(
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
help_text="Detailed error information",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Timestamps
|
||||||
|
created_at = models.DateTimeField(
|
||||||
|
auto_now_add=True,
|
||||||
|
db_index=True,
|
||||||
|
help_text="When this metric was recorded",
|
||||||
|
)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
ordering = ["-created_at"]
|
||||||
|
verbose_name = "Approval Transaction Metric"
|
||||||
|
verbose_name_plural = "Approval Transaction Metrics"
|
||||||
|
indexes = [
|
||||||
|
models.Index(fields=["success", "created_at"]),
|
||||||
|
models.Index(fields=["moderator_id", "created_at"]),
|
||||||
|
]
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
status = "✓" if self.success else "✗"
|
||||||
|
return f"{status} Submission {self.submission_id[:8]} by {self.moderator_id[:8]}"
|
||||||
|
|
||||||
|
|||||||
@@ -28,3 +28,65 @@ class IsStaffOrReadOnly(permissions.BasePermission):
|
|||||||
if request.method in permissions.SAFE_METHODS:
|
if request.method in permissions.SAFE_METHODS:
|
||||||
return True
|
return True
|
||||||
return request.user and request.user.is_staff
|
return request.user and request.user.is_staff
|
||||||
|
|
||||||
|
|
||||||
|
class IsAdminWithSecondFactor(permissions.BasePermission):
|
||||||
|
"""
|
||||||
|
Requires admin status AND at least one configured second factor.
|
||||||
|
|
||||||
|
Accepts either:
|
||||||
|
- TOTP (MFA/Authenticator app)
|
||||||
|
- WebAuthn (Passkey/Security key)
|
||||||
|
|
||||||
|
This permission ensures that admin users have a second factor configured
|
||||||
|
before they can access sensitive admin endpoints.
|
||||||
|
"""
|
||||||
|
|
||||||
|
message = "Admin access requires MFA or Passkey to be configured."
|
||||||
|
|
||||||
|
def has_permission(self, request, view):
|
||||||
|
user = request.user
|
||||||
|
|
||||||
|
# Must be authenticated
|
||||||
|
if not user or not user.is_authenticated:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Must be admin (staff, superuser, or ADMIN role)
|
||||||
|
if not self._is_admin(user):
|
||||||
|
self.message = "You do not have admin privileges."
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Must have at least one second factor configured
|
||||||
|
if not self._has_second_factor(user):
|
||||||
|
self.message = "Admin access requires MFA or Passkey to be configured."
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _is_admin(self, user) -> bool:
|
||||||
|
"""Check if user has admin privileges."""
|
||||||
|
if user.is_superuser:
|
||||||
|
return True
|
||||||
|
if user.is_staff:
|
||||||
|
return True
|
||||||
|
# Check custom role field if it exists
|
||||||
|
if hasattr(user, "role") and user.role in ("ADMIN", "SUPERUSER"):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _has_second_factor(self, user) -> bool:
|
||||||
|
"""Check if user has at least one second factor configured."""
|
||||||
|
try:
|
||||||
|
from allauth.mfa.models import Authenticator
|
||||||
|
|
||||||
|
# Check for TOTP or WebAuthn authenticators
|
||||||
|
return Authenticator.objects.filter(
|
||||||
|
user=user,
|
||||||
|
type__in=[Authenticator.Type.TOTP, Authenticator.Type.WEBAUTHN]
|
||||||
|
).exists()
|
||||||
|
except ImportError:
|
||||||
|
# allauth.mfa not installed
|
||||||
|
return False
|
||||||
|
except Exception:
|
||||||
|
# Any other error, fail closed (deny access)
|
||||||
|
return False
|
||||||
|
|||||||
@@ -58,8 +58,22 @@ def with_callbacks(
|
|||||||
source_state = getattr(instance, field_name, None)
|
source_state = getattr(instance, field_name, None)
|
||||||
|
|
||||||
# Get target state from the transition decorator
|
# Get target state from the transition decorator
|
||||||
# The @transition decorator sets _django_fsm_target
|
# The @transition decorator sets _django_fsm attribute (may be dict or FSMMeta object)
|
||||||
target_state = getattr(func, "_django_fsm", {}).get("target", None)
|
fsm_meta = getattr(func, "_django_fsm", None)
|
||||||
|
target_state = None
|
||||||
|
if fsm_meta is not None:
|
||||||
|
if isinstance(fsm_meta, dict):
|
||||||
|
target_state = fsm_meta.get("target", None)
|
||||||
|
elif hasattr(fsm_meta, "target"):
|
||||||
|
target_state = fsm_meta.target
|
||||||
|
elif hasattr(fsm_meta, "transitions"):
|
||||||
|
# FSMMeta object - try to get target from first transition
|
||||||
|
try:
|
||||||
|
transitions = list(fsm_meta.transitions.values())
|
||||||
|
if transitions:
|
||||||
|
target_state = transitions[0].target if hasattr(transitions[0], 'target') else None
|
||||||
|
except (AttributeError, TypeError, StopIteration):
|
||||||
|
pass
|
||||||
|
|
||||||
# If we can't determine the target from decorator metadata,
|
# If we can't determine the target from decorator metadata,
|
||||||
# we'll capture it after the transition
|
# we'll capture it after the transition
|
||||||
@@ -284,7 +298,7 @@ class TransitionMethodFactory:
|
|||||||
def create_approve_method(
|
def create_approve_method(
|
||||||
source: str,
|
source: str,
|
||||||
target: str,
|
target: str,
|
||||||
field_name: str = "status",
|
field=None,
|
||||||
permission_guard: Callable | None = None,
|
permission_guard: Callable | None = None,
|
||||||
enable_callbacks: bool = True,
|
enable_callbacks: bool = True,
|
||||||
emit_signals: bool = True,
|
emit_signals: bool = True,
|
||||||
@@ -295,7 +309,7 @@ class TransitionMethodFactory:
|
|||||||
Args:
|
Args:
|
||||||
source: Source state value(s)
|
source: Source state value(s)
|
||||||
target: Target state value
|
target: Target state value
|
||||||
field_name: Name of the FSM field
|
field: FSM field object (required for django-fsm 3.x)
|
||||||
permission_guard: Optional permission guard
|
permission_guard: Optional permission guard
|
||||||
enable_callbacks: Whether to wrap with callback execution
|
enable_callbacks: Whether to wrap with callback execution
|
||||||
emit_signals: Whether to emit Django signals
|
emit_signals: Whether to emit Django signals
|
||||||
@@ -303,13 +317,15 @@ class TransitionMethodFactory:
|
|||||||
Returns:
|
Returns:
|
||||||
Approval transition method
|
Approval transition method
|
||||||
"""
|
"""
|
||||||
|
# Get field name for callback wrapper
|
||||||
|
field_name = field.name if hasattr(field, 'name') else 'status'
|
||||||
|
|
||||||
@fsm_log_by
|
@fsm_log_by
|
||||||
@transition(
|
@transition(
|
||||||
field=field_name,
|
field=field,
|
||||||
source=source,
|
source=source,
|
||||||
target=target,
|
target=target,
|
||||||
conditions=[permission_guard] if permission_guard else [],
|
permission=permission_guard,
|
||||||
)
|
)
|
||||||
def approve(instance, user=None, comment: str = "", **kwargs):
|
def approve(instance, user=None, comment: str = "", **kwargs):
|
||||||
"""Approve and transition to approved state."""
|
"""Approve and transition to approved state."""
|
||||||
@@ -335,7 +351,7 @@ class TransitionMethodFactory:
|
|||||||
def create_reject_method(
|
def create_reject_method(
|
||||||
source: str,
|
source: str,
|
||||||
target: str,
|
target: str,
|
||||||
field_name: str = "status",
|
field=None,
|
||||||
permission_guard: Callable | None = None,
|
permission_guard: Callable | None = None,
|
||||||
enable_callbacks: bool = True,
|
enable_callbacks: bool = True,
|
||||||
emit_signals: bool = True,
|
emit_signals: bool = True,
|
||||||
@@ -346,7 +362,7 @@ class TransitionMethodFactory:
|
|||||||
Args:
|
Args:
|
||||||
source: Source state value(s)
|
source: Source state value(s)
|
||||||
target: Target state value
|
target: Target state value
|
||||||
field_name: Name of the FSM field
|
field: FSM field object (required for django-fsm 3.x)
|
||||||
permission_guard: Optional permission guard
|
permission_guard: Optional permission guard
|
||||||
enable_callbacks: Whether to wrap with callback execution
|
enable_callbacks: Whether to wrap with callback execution
|
||||||
emit_signals: Whether to emit Django signals
|
emit_signals: Whether to emit Django signals
|
||||||
@@ -354,13 +370,15 @@ class TransitionMethodFactory:
|
|||||||
Returns:
|
Returns:
|
||||||
Rejection transition method
|
Rejection transition method
|
||||||
"""
|
"""
|
||||||
|
# Get field name for callback wrapper
|
||||||
|
field_name = field.name if hasattr(field, 'name') else 'status'
|
||||||
|
|
||||||
@fsm_log_by
|
@fsm_log_by
|
||||||
@transition(
|
@transition(
|
||||||
field=field_name,
|
field=field,
|
||||||
source=source,
|
source=source,
|
||||||
target=target,
|
target=target,
|
||||||
conditions=[permission_guard] if permission_guard else [],
|
permission=permission_guard,
|
||||||
)
|
)
|
||||||
def reject(instance, user=None, reason: str = "", **kwargs):
|
def reject(instance, user=None, reason: str = "", **kwargs):
|
||||||
"""Reject and transition to rejected state."""
|
"""Reject and transition to rejected state."""
|
||||||
@@ -386,7 +404,7 @@ class TransitionMethodFactory:
|
|||||||
def create_escalate_method(
|
def create_escalate_method(
|
||||||
source: str,
|
source: str,
|
||||||
target: str,
|
target: str,
|
||||||
field_name: str = "status",
|
field=None,
|
||||||
permission_guard: Callable | None = None,
|
permission_guard: Callable | None = None,
|
||||||
enable_callbacks: bool = True,
|
enable_callbacks: bool = True,
|
||||||
emit_signals: bool = True,
|
emit_signals: bool = True,
|
||||||
@@ -397,7 +415,7 @@ class TransitionMethodFactory:
|
|||||||
Args:
|
Args:
|
||||||
source: Source state value(s)
|
source: Source state value(s)
|
||||||
target: Target state value
|
target: Target state value
|
||||||
field_name: Name of the FSM field
|
field: FSM field object (required for django-fsm 3.x)
|
||||||
permission_guard: Optional permission guard
|
permission_guard: Optional permission guard
|
||||||
enable_callbacks: Whether to wrap with callback execution
|
enable_callbacks: Whether to wrap with callback execution
|
||||||
emit_signals: Whether to emit Django signals
|
emit_signals: Whether to emit Django signals
|
||||||
@@ -405,13 +423,15 @@ class TransitionMethodFactory:
|
|||||||
Returns:
|
Returns:
|
||||||
Escalation transition method
|
Escalation transition method
|
||||||
"""
|
"""
|
||||||
|
# Get field name for callback wrapper
|
||||||
|
field_name = field.name if hasattr(field, 'name') else 'status'
|
||||||
|
|
||||||
@fsm_log_by
|
@fsm_log_by
|
||||||
@transition(
|
@transition(
|
||||||
field=field_name,
|
field=field,
|
||||||
source=source,
|
source=source,
|
||||||
target=target,
|
target=target,
|
||||||
conditions=[permission_guard] if permission_guard else [],
|
permission=permission_guard,
|
||||||
)
|
)
|
||||||
def escalate(instance, user=None, reason: str = "", **kwargs):
|
def escalate(instance, user=None, reason: str = "", **kwargs):
|
||||||
"""Escalate to higher authority."""
|
"""Escalate to higher authority."""
|
||||||
@@ -438,7 +458,7 @@ class TransitionMethodFactory:
|
|||||||
method_name: str,
|
method_name: str,
|
||||||
source: str,
|
source: str,
|
||||||
target: str,
|
target: str,
|
||||||
field_name: str = "status",
|
field=None,
|
||||||
permission_guard: Callable | None = None,
|
permission_guard: Callable | None = None,
|
||||||
docstring: str | None = None,
|
docstring: str | None = None,
|
||||||
enable_callbacks: bool = True,
|
enable_callbacks: bool = True,
|
||||||
@@ -451,7 +471,7 @@ class TransitionMethodFactory:
|
|||||||
method_name: Name for the method
|
method_name: Name for the method
|
||||||
source: Source state value(s)
|
source: Source state value(s)
|
||||||
target: Target state value
|
target: Target state value
|
||||||
field_name: Name of the FSM field
|
field: FSM field object (required for django-fsm 3.x)
|
||||||
permission_guard: Optional permission guard
|
permission_guard: Optional permission guard
|
||||||
docstring: Optional docstring for the method
|
docstring: Optional docstring for the method
|
||||||
enable_callbacks: Whether to wrap with callback execution
|
enable_callbacks: Whether to wrap with callback execution
|
||||||
@@ -460,13 +480,15 @@ class TransitionMethodFactory:
|
|||||||
Returns:
|
Returns:
|
||||||
Generic transition method
|
Generic transition method
|
||||||
"""
|
"""
|
||||||
|
# Get field name for callback wrapper
|
||||||
|
field_name = field.name if hasattr(field, 'name') else 'status'
|
||||||
|
|
||||||
@fsm_log_by
|
@fsm_log_by
|
||||||
@transition(
|
@transition(
|
||||||
field=field_name,
|
field=field,
|
||||||
source=source,
|
source=source,
|
||||||
target=target,
|
target=target,
|
||||||
conditions=[permission_guard] if permission_guard else [],
|
permission=permission_guard,
|
||||||
)
|
)
|
||||||
def generic_transition(instance, user=None, **kwargs):
|
def generic_transition(instance, user=None, **kwargs):
|
||||||
"""Execute state transition."""
|
"""Execute state transition."""
|
||||||
|
|||||||
@@ -71,69 +71,79 @@ def generate_transition_methods_for_model(
|
|||||||
choice_group: Choice group name
|
choice_group: Choice group name
|
||||||
domain: Domain namespace
|
domain: Domain namespace
|
||||||
"""
|
"""
|
||||||
|
# Get the actual field from the model class - django-fsm 3.x requires
|
||||||
|
# the field object, not just the string name, when creating methods dynamically
|
||||||
|
field = model_class._meta.get_field(field_name)
|
||||||
|
|
||||||
builder = StateTransitionBuilder(choice_group, domain)
|
builder = StateTransitionBuilder(choice_group, domain)
|
||||||
transition_graph = builder.build_transition_graph()
|
transition_graph = builder.build_transition_graph()
|
||||||
factory = TransitionMethodFactory()
|
factory = TransitionMethodFactory()
|
||||||
|
|
||||||
|
# Group transitions by target to avoid overwriting methods
|
||||||
|
# {target: [source1, source2, ...]}
|
||||||
|
target_to_sources: dict[str, list[str]] = {}
|
||||||
for source, targets in transition_graph.items():
|
for source, targets in transition_graph.items():
|
||||||
source_metadata = builder.get_choice_metadata(source)
|
|
||||||
|
|
||||||
for target in targets:
|
for target in targets:
|
||||||
# Use shared method name determination
|
if target not in target_to_sources:
|
||||||
method_name = determine_method_name_for_transition(source, target)
|
target_to_sources[target] = []
|
||||||
|
target_to_sources[target].append(source)
|
||||||
|
|
||||||
# Get target metadata for combined guards
|
# Create one transition method per target, handling all valid sources
|
||||||
target_metadata = builder.get_choice_metadata(target)
|
for target, sources in target_to_sources.items():
|
||||||
|
# Use shared method name determination (all sources go to same target = same method)
|
||||||
|
method_name = determine_method_name_for_transition(sources[0], target)
|
||||||
|
|
||||||
# Extract guards from both source and target metadata
|
# Get target metadata for guards
|
||||||
# This ensures metadata flags like requires_assignment, zero_tolerance,
|
target_metadata = builder.get_choice_metadata(target)
|
||||||
# required_permissions, and escalation_level are enforced
|
|
||||||
guards = extract_guards_from_metadata(source_metadata)
|
|
||||||
target_guards = extract_guards_from_metadata(target_metadata)
|
|
||||||
|
|
||||||
# Combine all guards
|
# For permission guard, use target metadata only (all sources share the same permission)
|
||||||
all_guards = guards + target_guards
|
# Source-specific guards would need to be checked via conditions, but for FSM 3.x
|
||||||
|
# we use permission which gets called with (instance, user)
|
||||||
|
target_guards = extract_guards_from_metadata(target_metadata)
|
||||||
|
|
||||||
# Create combined guard if we have multiple guards
|
# Create combined guard if we have multiple guards
|
||||||
combined_guard: Callable | None = None
|
combined_guard: Callable | None = None
|
||||||
if len(all_guards) == 1:
|
if len(target_guards) == 1:
|
||||||
combined_guard = all_guards[0]
|
combined_guard = target_guards[0]
|
||||||
elif len(all_guards) > 1:
|
elif len(target_guards) > 1:
|
||||||
combined_guard = CompositeGuard(guards=all_guards, operator="AND")
|
combined_guard = CompositeGuard(guards=target_guards, operator="AND")
|
||||||
|
|
||||||
# Create appropriate transition method
|
# Use list of sources for transitions with multiple valid source states
|
||||||
if "approve" in method_name or "accept" in method_name:
|
source_value = sources if len(sources) > 1 else sources[0]
|
||||||
method = factory.create_approve_method(
|
|
||||||
source=source,
|
|
||||||
target=target,
|
|
||||||
field_name=field_name,
|
|
||||||
permission_guard=combined_guard,
|
|
||||||
)
|
|
||||||
elif "reject" in method_name or "deny" in method_name:
|
|
||||||
method = factory.create_reject_method(
|
|
||||||
source=source,
|
|
||||||
target=target,
|
|
||||||
field_name=field_name,
|
|
||||||
permission_guard=combined_guard,
|
|
||||||
)
|
|
||||||
elif "escalate" in method_name:
|
|
||||||
method = factory.create_escalate_method(
|
|
||||||
source=source,
|
|
||||||
target=target,
|
|
||||||
field_name=field_name,
|
|
||||||
permission_guard=combined_guard,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
method = factory.create_generic_transition_method(
|
|
||||||
method_name=method_name,
|
|
||||||
source=source,
|
|
||||||
target=target,
|
|
||||||
field_name=field_name,
|
|
||||||
permission_guard=combined_guard,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Attach method to model class
|
# Create appropriate transition method - pass actual field object
|
||||||
setattr(model_class, method_name, method)
|
if "approve" in method_name or "accept" in method_name:
|
||||||
|
method = factory.create_approve_method(
|
||||||
|
source=source_value,
|
||||||
|
target=target,
|
||||||
|
field=field,
|
||||||
|
permission_guard=combined_guard,
|
||||||
|
)
|
||||||
|
elif "reject" in method_name or "deny" in method_name:
|
||||||
|
method = factory.create_reject_method(
|
||||||
|
source=source_value,
|
||||||
|
target=target,
|
||||||
|
field=field,
|
||||||
|
permission_guard=combined_guard,
|
||||||
|
)
|
||||||
|
elif "escalate" in method_name:
|
||||||
|
method = factory.create_escalate_method(
|
||||||
|
source=source_value,
|
||||||
|
target=target,
|
||||||
|
field=field,
|
||||||
|
permission_guard=combined_guard,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
method = factory.create_generic_transition_method(
|
||||||
|
method_name=method_name,
|
||||||
|
source=source_value,
|
||||||
|
target=target,
|
||||||
|
field=field,
|
||||||
|
permission_guard=combined_guard,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Attach method to model class
|
||||||
|
setattr(model_class, method_name, method)
|
||||||
|
|
||||||
|
|
||||||
class StateMachineModelMixin:
|
class StateMachineModelMixin:
|
||||||
|
|||||||
@@ -3,3 +3,22 @@ Core tasks package for ThrillWiki.
|
|||||||
|
|
||||||
This package contains all Celery tasks for the core application.
|
This package contains all Celery tasks for the core application.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from apps.core.tasks.scheduled import (
|
||||||
|
cleanup_old_versions,
|
||||||
|
cleanup_orphaned_images,
|
||||||
|
data_retention_cleanup,
|
||||||
|
process_closing_entities,
|
||||||
|
process_expired_bans,
|
||||||
|
process_scheduled_deletions,
|
||||||
|
)
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"process_scheduled_deletions",
|
||||||
|
"process_closing_entities",
|
||||||
|
"process_expired_bans",
|
||||||
|
"cleanup_orphaned_images",
|
||||||
|
"cleanup_old_versions",
|
||||||
|
"data_retention_cleanup",
|
||||||
|
]
|
||||||
|
|
||||||
|
|||||||
417
backend/apps/core/tasks/scheduled.py
Normal file
417
backend/apps/core/tasks/scheduled.py
Normal file
@@ -0,0 +1,417 @@
|
|||||||
|
"""
|
||||||
|
Scheduled Celery tasks for ThrillWiki.
|
||||||
|
|
||||||
|
These tasks are run on a schedule via Celery Beat for maintenance operations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from datetime import timedelta
|
||||||
|
|
||||||
|
from celery import shared_task
|
||||||
|
from django.contrib.auth import get_user_model
|
||||||
|
from django.db import transaction
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
|
from apps.core.utils import capture_and_log
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
User = get_user_model()
|
||||||
|
|
||||||
|
|
||||||
|
@shared_task(name="core.process_scheduled_deletions")
|
||||||
|
def process_scheduled_deletions() -> dict:
|
||||||
|
"""
|
||||||
|
Process scheduled account deletions.
|
||||||
|
|
||||||
|
Users who requested account deletion and whose grace period has expired
|
||||||
|
will have their accounts permanently deleted.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Summary with counts of processed, succeeded, and failed deletions
|
||||||
|
"""
|
||||||
|
from apps.accounts.models import AccountDeletionRequest
|
||||||
|
|
||||||
|
logger.info("Starting scheduled account deletions processing")
|
||||||
|
|
||||||
|
cutoff_time = timezone.now()
|
||||||
|
processed = 0
|
||||||
|
succeeded = 0
|
||||||
|
failed = 0
|
||||||
|
failures = []
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Get deletion requests that are past their scheduled time
|
||||||
|
pending_deletions = AccountDeletionRequest.objects.filter(
|
||||||
|
status="pending",
|
||||||
|
scheduled_deletion_at__lte=cutoff_time,
|
||||||
|
).select_related("user")
|
||||||
|
|
||||||
|
for request in pending_deletions:
|
||||||
|
processed += 1
|
||||||
|
try:
|
||||||
|
with transaction.atomic():
|
||||||
|
user = request.user
|
||||||
|
username = user.username
|
||||||
|
|
||||||
|
# Mark request as processing
|
||||||
|
request.status = "processing"
|
||||||
|
request.save()
|
||||||
|
|
||||||
|
# Anonymize user data (keep submissions)
|
||||||
|
user.username = f"deleted_{user.id}"
|
||||||
|
user.email = f"deleted_{user.id}@deleted.thrillwiki.com"
|
||||||
|
user.first_name = ""
|
||||||
|
user.last_name = ""
|
||||||
|
user.is_active = False
|
||||||
|
user.save()
|
||||||
|
|
||||||
|
# Mark deletion as complete
|
||||||
|
request.status = "completed"
|
||||||
|
request.completed_at = timezone.now()
|
||||||
|
request.save()
|
||||||
|
|
||||||
|
succeeded += 1
|
||||||
|
logger.info(f"Successfully processed deletion for user {username}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
failed += 1
|
||||||
|
error_msg = f"User {request.user_id}: {str(e)}"
|
||||||
|
failures.append(error_msg)
|
||||||
|
capture_and_log(e, f"Process scheduled deletion for user {request.user_id}", source="task")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
capture_and_log(e, "Process scheduled deletions", source="task")
|
||||||
|
|
||||||
|
result = {
|
||||||
|
"processed": processed,
|
||||||
|
"succeeded": succeeded,
|
||||||
|
"failed": failed,
|
||||||
|
"failures": failures[:10], # Limit failure list
|
||||||
|
"timestamp": timezone.now().isoformat(),
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Completed scheduled deletions: {processed} processed, {succeeded} succeeded, {failed} failed"
|
||||||
|
)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@shared_task(name="core.process_closing_entities")
|
||||||
|
def process_closing_entities() -> dict:
|
||||||
|
"""
|
||||||
|
Process parks and rides that have reached their closing date.
|
||||||
|
|
||||||
|
Entities in CLOSING status with a closing_date in the past will be
|
||||||
|
transitioned to their post_closing_status (typically CLOSED or SBNO).
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Summary with counts
|
||||||
|
"""
|
||||||
|
from apps.parks.models import Park
|
||||||
|
from apps.rides.models import Ride
|
||||||
|
|
||||||
|
logger.info("Starting closing entities processing")
|
||||||
|
|
||||||
|
today = timezone.now().date()
|
||||||
|
results = {"parks": {"processed": 0, "succeeded": 0, "failed": 0}, "rides": {"processed": 0, "succeeded": 0, "failed": 0}}
|
||||||
|
|
||||||
|
# Get system user for automated transitions
|
||||||
|
try:
|
||||||
|
system_user = User.objects.get(username="system")
|
||||||
|
except User.DoesNotExist:
|
||||||
|
system_user = User.objects.filter(is_staff=True).first()
|
||||||
|
|
||||||
|
# Process parks
|
||||||
|
try:
|
||||||
|
closing_parks = Park.objects.filter(
|
||||||
|
status="CLOSING",
|
||||||
|
closing_date__lte=today,
|
||||||
|
)
|
||||||
|
|
||||||
|
for park in closing_parks:
|
||||||
|
results["parks"]["processed"] += 1
|
||||||
|
try:
|
||||||
|
with transaction.atomic():
|
||||||
|
# Transition to closed status
|
||||||
|
park.status = getattr(park, "post_closing_status", "CLOSED") or "CLOSED"
|
||||||
|
park.save(update_fields=["status", "updated_at"])
|
||||||
|
results["parks"]["succeeded"] += 1
|
||||||
|
logger.info(f"Transitioned park {park.name} to {park.status}")
|
||||||
|
except Exception as e:
|
||||||
|
results["parks"]["failed"] += 1
|
||||||
|
capture_and_log(e, f"Process closing park {park.id}", source="task")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
capture_and_log(e, "Process closing parks", source="task")
|
||||||
|
|
||||||
|
# Process rides (already handled by rides.check_overdue_closings, but included for completeness)
|
||||||
|
try:
|
||||||
|
closing_rides = Ride.objects.filter(
|
||||||
|
status="CLOSING",
|
||||||
|
closing_date__lte=today,
|
||||||
|
)
|
||||||
|
|
||||||
|
for ride in closing_rides:
|
||||||
|
results["rides"]["processed"] += 1
|
||||||
|
try:
|
||||||
|
with transaction.atomic():
|
||||||
|
if hasattr(ride, "apply_post_closing_status") and system_user:
|
||||||
|
ride.apply_post_closing_status(user=system_user)
|
||||||
|
else:
|
||||||
|
ride.status = getattr(ride, "post_closing_status", "CLOSED") or "CLOSED"
|
||||||
|
ride.save(update_fields=["status", "updated_at"])
|
||||||
|
results["rides"]["succeeded"] += 1
|
||||||
|
logger.info(f"Transitioned ride {ride.name} to {ride.status}")
|
||||||
|
except Exception as e:
|
||||||
|
results["rides"]["failed"] += 1
|
||||||
|
capture_and_log(e, f"Process closing ride {ride.id}", source="task")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
capture_and_log(e, "Process closing rides", source="task")
|
||||||
|
|
||||||
|
logger.info(f"Completed closing entities: Parks {results['parks']}, Rides {results['rides']}")
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
@shared_task(name="core.process_expired_bans")
|
||||||
|
def process_expired_bans() -> dict:
|
||||||
|
"""
|
||||||
|
Process expired user bans.
|
||||||
|
|
||||||
|
Users with temporary bans that have expired will have their ban lifted.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Summary with counts
|
||||||
|
"""
|
||||||
|
from apps.accounts.models import UserBan
|
||||||
|
|
||||||
|
logger.info("Starting expired bans processing")
|
||||||
|
|
||||||
|
now = timezone.now()
|
||||||
|
processed = 0
|
||||||
|
succeeded = 0
|
||||||
|
failed = 0
|
||||||
|
|
||||||
|
try:
|
||||||
|
expired_bans = UserBan.objects.filter(
|
||||||
|
is_active=True,
|
||||||
|
expires_at__isnull=False,
|
||||||
|
expires_at__lte=now,
|
||||||
|
).select_related("user")
|
||||||
|
|
||||||
|
for ban in expired_bans:
|
||||||
|
processed += 1
|
||||||
|
try:
|
||||||
|
with transaction.atomic():
|
||||||
|
ban.is_active = False
|
||||||
|
ban.save(update_fields=["is_active", "updated_at"])
|
||||||
|
|
||||||
|
# Reactivate user if this was their only active ban
|
||||||
|
active_bans = UserBan.objects.filter(user=ban.user, is_active=True).count()
|
||||||
|
if active_bans == 0 and not ban.user.is_active:
|
||||||
|
ban.user.is_active = True
|
||||||
|
ban.user.save(update_fields=["is_active"])
|
||||||
|
|
||||||
|
succeeded += 1
|
||||||
|
logger.info(f"Lifted expired ban for user {ban.user.username}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
failed += 1
|
||||||
|
capture_and_log(e, f"Process expired ban {ban.id}", source="task")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
capture_and_log(e, "Process expired bans", source="task")
|
||||||
|
# Model may not exist yet
|
||||||
|
if "UserBan" in str(e):
|
||||||
|
logger.info("UserBan model not found, skipping expired bans processing")
|
||||||
|
return {"skipped": True, "reason": "UserBan model not found"}
|
||||||
|
|
||||||
|
result = {
|
||||||
|
"processed": processed,
|
||||||
|
"succeeded": succeeded,
|
||||||
|
"failed": failed,
|
||||||
|
"timestamp": timezone.now().isoformat(),
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(f"Completed expired bans: {processed} processed, {succeeded} succeeded, {failed} failed")
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@shared_task(name="core.cleanup_orphaned_images")
|
||||||
|
def cleanup_orphaned_images() -> dict:
|
||||||
|
"""
|
||||||
|
Clean up orphaned images.
|
||||||
|
|
||||||
|
Images that are not associated with any entity and are older than the
|
||||||
|
retention period will be deleted.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Summary with counts
|
||||||
|
"""
|
||||||
|
logger.info("Starting orphaned images cleanup")
|
||||||
|
|
||||||
|
# This is a placeholder - actual implementation depends on image storage strategy
|
||||||
|
# For Cloudflare Images, we would need to:
|
||||||
|
# 1. Query all images from Cloudflare
|
||||||
|
# 2. Compare against images referenced in the database
|
||||||
|
# 3. Delete orphaned images
|
||||||
|
|
||||||
|
result = {
|
||||||
|
"processed": 0,
|
||||||
|
"deleted": 0,
|
||||||
|
"skipped": 0,
|
||||||
|
"timestamp": timezone.now().isoformat(),
|
||||||
|
"note": "Placeholder implementation - configure based on image storage",
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info("Completed orphaned images cleanup")
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@shared_task(name="core.cleanup_old_versions")
|
||||||
|
def cleanup_old_versions() -> dict:
|
||||||
|
"""
|
||||||
|
Clean up old entity versions from pghistory.
|
||||||
|
|
||||||
|
Keeps the most recent N versions and deletes older ones to manage
|
||||||
|
database size.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Summary with counts
|
||||||
|
"""
|
||||||
|
logger.info("Starting old versions cleanup")
|
||||||
|
|
||||||
|
# Configuration
|
||||||
|
MAX_VERSIONS_PER_ENTITY = 50
|
||||||
|
MIN_AGE_DAYS = 90 # Only delete versions older than this
|
||||||
|
|
||||||
|
deleted_count = 0
|
||||||
|
cutoff_date = timezone.now() - timedelta(days=MIN_AGE_DAYS)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# pghistory stores events in pgh_* tables
|
||||||
|
# We need to identify which models have history tracking
|
||||||
|
from django.db import connection
|
||||||
|
|
||||||
|
with connection.cursor() as cursor:
|
||||||
|
# Get list of pghistory event tables
|
||||||
|
cursor.execute(
|
||||||
|
"""
|
||||||
|
SELECT table_name
|
||||||
|
FROM information_schema.tables
|
||||||
|
WHERE table_schema = 'public'
|
||||||
|
AND table_name LIKE 'pgh_%event'
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
event_tables = [row[0] for row in cursor.fetchall()]
|
||||||
|
|
||||||
|
for table_name in event_tables:
|
||||||
|
try:
|
||||||
|
# Delete old versions beyond the retention limit
|
||||||
|
# This is a simplified approach - a more sophisticated one
|
||||||
|
# would keep the most recent N per entity
|
||||||
|
cursor.execute(
|
||||||
|
f"""
|
||||||
|
DELETE FROM {table_name}
|
||||||
|
WHERE pgh_created_at < %s
|
||||||
|
AND pgh_id NOT IN (
|
||||||
|
SELECT pgh_id FROM (
|
||||||
|
SELECT pgh_id,
|
||||||
|
ROW_NUMBER() OVER (PARTITION BY pgh_obj_id ORDER BY pgh_created_at DESC) as rn
|
||||||
|
FROM {table_name}
|
||||||
|
) ranked
|
||||||
|
WHERE rn <= %s
|
||||||
|
)
|
||||||
|
""",
|
||||||
|
[cutoff_date, MAX_VERSIONS_PER_ENTITY],
|
||||||
|
)
|
||||||
|
deleted_in_table = cursor.rowcount
|
||||||
|
deleted_count += deleted_in_table
|
||||||
|
if deleted_in_table > 0:
|
||||||
|
logger.info(f"Deleted {deleted_in_table} old versions from {table_name}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Error cleaning up {table_name}: {e}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
capture_and_log(e, "Cleanup old versions", source="task")
|
||||||
|
|
||||||
|
result = {
|
||||||
|
"deleted": deleted_count,
|
||||||
|
"cutoff_date": cutoff_date.isoformat(),
|
||||||
|
"max_versions_per_entity": MAX_VERSIONS_PER_ENTITY,
|
||||||
|
"timestamp": timezone.now().isoformat(),
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(f"Completed old versions cleanup: {deleted_count} versions deleted")
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@shared_task(name="core.data_retention_cleanup")
|
||||||
|
def data_retention_cleanup() -> dict:
|
||||||
|
"""
|
||||||
|
Clean up data per retention policy (GDPR compliance).
|
||||||
|
|
||||||
|
Handles:
|
||||||
|
- Session cleanup
|
||||||
|
- Expired token cleanup
|
||||||
|
- Old audit log cleanup
|
||||||
|
- Temporary data cleanup
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Summary with counts
|
||||||
|
"""
|
||||||
|
logger.info("Starting data retention cleanup")
|
||||||
|
|
||||||
|
results = {
|
||||||
|
"sessions": 0,
|
||||||
|
"tokens": 0,
|
||||||
|
"audit_logs": 0,
|
||||||
|
"temp_data": 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
from django.contrib.sessions.models import Session
|
||||||
|
|
||||||
|
# Clean up expired sessions
|
||||||
|
expired_sessions = Session.objects.filter(expire_date__lt=timezone.now())
|
||||||
|
results["sessions"] = expired_sessions.count()
|
||||||
|
expired_sessions.delete()
|
||||||
|
logger.info(f"Deleted {results['sessions']} expired sessions")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Session cleanup error: {e}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
from rest_framework_simplejwt.token_blacklist.models import OutstandingToken
|
||||||
|
|
||||||
|
# Clean up expired tokens (older than 30 days)
|
||||||
|
cutoff = timezone.now() - timedelta(days=30)
|
||||||
|
expired_tokens = OutstandingToken.objects.filter(expires_at__lt=cutoff)
|
||||||
|
results["tokens"] = expired_tokens.count()
|
||||||
|
expired_tokens.delete()
|
||||||
|
logger.info(f"Deleted {results['tokens']} expired tokens")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Token cleanup error: {e}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
from apps.accounts.models import ProfileAuditLog
|
||||||
|
|
||||||
|
# Clean up old audit logs (older than 1 year)
|
||||||
|
cutoff = timezone.now() - timedelta(days=365)
|
||||||
|
old_logs = ProfileAuditLog.objects.filter(created_at__lt=cutoff)
|
||||||
|
results["audit_logs"] = old_logs.count()
|
||||||
|
old_logs.delete()
|
||||||
|
logger.info(f"Deleted {results['audit_logs']} old audit logs")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Audit log cleanup error: {e}")
|
||||||
|
|
||||||
|
result = {
|
||||||
|
**results,
|
||||||
|
"timestamp": timezone.now().isoformat(),
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(f"Completed data retention cleanup: {result}")
|
||||||
|
return result
|
||||||
137
backend/apps/core/tests/test_permissions.py
Normal file
137
backend/apps/core/tests/test_permissions.py
Normal file
@@ -0,0 +1,137 @@
|
|||||||
|
"""
|
||||||
|
Tests for custom permissions, particularly IsAdminWithSecondFactor.
|
||||||
|
|
||||||
|
Tests that admin users must have MFA or Passkey configured before
|
||||||
|
accessing sensitive admin endpoints.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from unittest.mock import MagicMock, patch
|
||||||
|
|
||||||
|
from django.contrib.auth import get_user_model
|
||||||
|
from django.test import RequestFactory, TestCase
|
||||||
|
|
||||||
|
from apps.core.permissions import IsAdminWithSecondFactor
|
||||||
|
|
||||||
|
User = get_user_model()
|
||||||
|
|
||||||
|
|
||||||
|
class TestIsAdminWithSecondFactor(TestCase):
|
||||||
|
"""Tests for IsAdminWithSecondFactor permission class."""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
"""Set up test fixtures."""
|
||||||
|
self.factory = RequestFactory()
|
||||||
|
self.permission = IsAdminWithSecondFactor()
|
||||||
|
|
||||||
|
def _make_request(self, user=None):
|
||||||
|
"""Create a mock request with the given user."""
|
||||||
|
request = self.factory.get("/api/v1/admin/test/")
|
||||||
|
request.user = user if user else MagicMock(is_authenticated=False)
|
||||||
|
return request
|
||||||
|
|
||||||
|
def test_anonymous_user_denied(self):
|
||||||
|
"""Anonymous users should be denied access."""
|
||||||
|
request = self._make_request()
|
||||||
|
request.user.is_authenticated = False
|
||||||
|
|
||||||
|
self.assertFalse(self.permission.has_permission(request, None))
|
||||||
|
|
||||||
|
def test_non_admin_user_denied(self):
|
||||||
|
"""Non-admin users should be denied access."""
|
||||||
|
user = MagicMock()
|
||||||
|
user.is_authenticated = True
|
||||||
|
user.is_superuser = False
|
||||||
|
user.is_staff = False
|
||||||
|
user.role = "USER"
|
||||||
|
|
||||||
|
request = self._make_request(user)
|
||||||
|
|
||||||
|
self.assertFalse(self.permission.has_permission(request, None))
|
||||||
|
self.assertIn("admin privileges", self.permission.message)
|
||||||
|
|
||||||
|
@patch("apps.core.permissions.IsAdminWithSecondFactor._has_second_factor")
|
||||||
|
def test_admin_without_mfa_denied(self, mock_has_second_factor):
|
||||||
|
"""Admin without MFA or Passkey should be denied access."""
|
||||||
|
mock_has_second_factor.return_value = False
|
||||||
|
|
||||||
|
user = MagicMock()
|
||||||
|
user.is_authenticated = True
|
||||||
|
user.is_superuser = True
|
||||||
|
user.is_staff = True
|
||||||
|
user.role = "ADMIN"
|
||||||
|
|
||||||
|
request = self._make_request(user)
|
||||||
|
|
||||||
|
self.assertFalse(self.permission.has_permission(request, None))
|
||||||
|
self.assertIn("MFA or Passkey", self.permission.message)
|
||||||
|
|
||||||
|
@patch("apps.core.permissions.IsAdminWithSecondFactor._has_second_factor")
|
||||||
|
def test_superuser_with_mfa_allowed(self, mock_has_second_factor):
|
||||||
|
"""Superuser with MFA configured should be allowed access."""
|
||||||
|
mock_has_second_factor.return_value = True
|
||||||
|
|
||||||
|
user = MagicMock()
|
||||||
|
user.is_authenticated = True
|
||||||
|
user.is_superuser = True
|
||||||
|
user.is_staff = True
|
||||||
|
|
||||||
|
request = self._make_request(user)
|
||||||
|
|
||||||
|
self.assertTrue(self.permission.has_permission(request, None))
|
||||||
|
|
||||||
|
@patch("apps.core.permissions.IsAdminWithSecondFactor._has_second_factor")
|
||||||
|
def test_staff_with_passkey_allowed(self, mock_has_second_factor):
|
||||||
|
"""Staff user with Passkey configured should be allowed access."""
|
||||||
|
mock_has_second_factor.return_value = True
|
||||||
|
|
||||||
|
user = MagicMock()
|
||||||
|
user.is_authenticated = True
|
||||||
|
user.is_superuser = False
|
||||||
|
user.is_staff = True
|
||||||
|
|
||||||
|
request = self._make_request(user)
|
||||||
|
|
||||||
|
self.assertTrue(self.permission.has_permission(request, None))
|
||||||
|
|
||||||
|
@patch("apps.core.permissions.IsAdminWithSecondFactor._has_second_factor")
|
||||||
|
def test_admin_role_with_mfa_allowed(self, mock_has_second_factor):
|
||||||
|
"""User with ADMIN role and MFA should be allowed access."""
|
||||||
|
mock_has_second_factor.return_value = True
|
||||||
|
|
||||||
|
user = MagicMock()
|
||||||
|
user.is_authenticated = True
|
||||||
|
user.is_superuser = False
|
||||||
|
user.is_staff = False
|
||||||
|
user.role = "ADMIN"
|
||||||
|
|
||||||
|
request = self._make_request(user)
|
||||||
|
|
||||||
|
self.assertTrue(self.permission.has_permission(request, None))
|
||||||
|
|
||||||
|
def test_has_second_factor_with_totp(self):
|
||||||
|
"""Test _has_second_factor detects TOTP authenticator."""
|
||||||
|
user = MagicMock()
|
||||||
|
|
||||||
|
with patch("apps.core.permissions.Authenticator") as MockAuth:
|
||||||
|
# Mock the queryset to return True for TOTP
|
||||||
|
mock_qs = MagicMock()
|
||||||
|
mock_qs.filter.return_value.exists.return_value = True
|
||||||
|
MockAuth.objects.filter.return_value = mock_qs
|
||||||
|
MockAuth.Type.TOTP = "totp"
|
||||||
|
MockAuth.Type.WEBAUTHN = "webauthn"
|
||||||
|
|
||||||
|
# Need to patch the import inside the method
|
||||||
|
with patch.dict("sys.modules", {"allauth.mfa.models": MagicMock(Authenticator=MockAuth)}):
|
||||||
|
result = self.permission._has_second_factor(user)
|
||||||
|
# This tests the exception path since import is mocked at module level
|
||||||
|
# The actual integration test would require a full database setup
|
||||||
|
|
||||||
|
def test_has_second_factor_import_error(self):
|
||||||
|
"""Test _has_second_factor handles ImportError gracefully."""
|
||||||
|
user = MagicMock()
|
||||||
|
|
||||||
|
with patch.dict("sys.modules", {"allauth.mfa.models": None}):
|
||||||
|
with patch("builtins.__import__", side_effect=ImportError):
|
||||||
|
# Should return False, not raise exception
|
||||||
|
result = self.permission._has_second_factor(user)
|
||||||
|
self.assertFalse(result)
|
||||||
@@ -1,50 +1,4 @@
|
|||||||
from django.apps import AppConfig
|
from django.apps import AppConfig
|
||||||
from django.db.models.signals import post_migrate
|
|
||||||
|
|
||||||
|
|
||||||
def create_photo_permissions(sender, **kwargs):
|
|
||||||
"""Create custom permissions for domain-specific photo models"""
|
|
||||||
from django.contrib.auth.models import Permission
|
|
||||||
from django.contrib.contenttypes.models import ContentType
|
|
||||||
|
|
||||||
from apps.parks.models import ParkPhoto
|
|
||||||
from apps.rides.models import RidePhoto
|
|
||||||
|
|
||||||
# Create permissions for ParkPhoto
|
|
||||||
park_photo_content_type = ContentType.objects.get_for_model(ParkPhoto)
|
|
||||||
Permission.objects.get_or_create(
|
|
||||||
codename="add_parkphoto",
|
|
||||||
name="Can add park photo",
|
|
||||||
content_type=park_photo_content_type,
|
|
||||||
)
|
|
||||||
Permission.objects.get_or_create(
|
|
||||||
codename="change_parkphoto",
|
|
||||||
name="Can change park photo",
|
|
||||||
content_type=park_photo_content_type,
|
|
||||||
)
|
|
||||||
Permission.objects.get_or_create(
|
|
||||||
codename="delete_parkphoto",
|
|
||||||
name="Can delete park photo",
|
|
||||||
content_type=park_photo_content_type,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create permissions for RidePhoto
|
|
||||||
ride_photo_content_type = ContentType.objects.get_for_model(RidePhoto)
|
|
||||||
Permission.objects.get_or_create(
|
|
||||||
codename="add_ridephoto",
|
|
||||||
name="Can add ride photo",
|
|
||||||
content_type=ride_photo_content_type,
|
|
||||||
)
|
|
||||||
Permission.objects.get_or_create(
|
|
||||||
codename="change_ridephoto",
|
|
||||||
name="Can change ride photo",
|
|
||||||
content_type=ride_photo_content_type,
|
|
||||||
)
|
|
||||||
Permission.objects.get_or_create(
|
|
||||||
codename="delete_ridephoto",
|
|
||||||
name="Can delete ride photo",
|
|
||||||
content_type=ride_photo_content_type,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class MediaConfig(AppConfig):
|
class MediaConfig(AppConfig):
|
||||||
@@ -52,4 +6,7 @@ class MediaConfig(AppConfig):
|
|||||||
name = "apps.media"
|
name = "apps.media"
|
||||||
|
|
||||||
def ready(self):
|
def ready(self):
|
||||||
post_migrate.connect(create_photo_permissions, sender=self)
|
# Note: Django automatically creates add/change/delete/view permissions
|
||||||
|
# for all models, so no custom post_migrate handler is needed.
|
||||||
|
pass
|
||||||
|
|
||||||
|
|||||||
@@ -206,7 +206,9 @@ class EditSubmission(StateMachineMixin, TrackedModel):
|
|||||||
if self.status != "PENDING":
|
if self.status != "PENDING":
|
||||||
raise ValidationError(f"Cannot claim submission: current status is {self.status}, expected PENDING")
|
raise ValidationError(f"Cannot claim submission: current status is {self.status}, expected PENDING")
|
||||||
|
|
||||||
self.transition_to_claimed(user=user)
|
# Set status directly (similar to unclaim method)
|
||||||
|
# The transition_to_claimed FSM method was never defined
|
||||||
|
self.status = "CLAIMED"
|
||||||
self.claimed_by = user
|
self.claimed_by = user
|
||||||
self.claimed_at = timezone.now()
|
self.claimed_at = timezone.now()
|
||||||
self.save()
|
self.save()
|
||||||
@@ -754,7 +756,9 @@ class PhotoSubmission(StateMachineMixin, TrackedModel):
|
|||||||
if self.status != "PENDING":
|
if self.status != "PENDING":
|
||||||
raise ValidationError(f"Cannot claim submission: current status is {self.status}, expected PENDING")
|
raise ValidationError(f"Cannot claim submission: current status is {self.status}, expected PENDING")
|
||||||
|
|
||||||
self.transition_to_claimed(user=user)
|
# Set status directly (similar to unclaim method)
|
||||||
|
# The transition_to_claimed FSM method was never defined
|
||||||
|
self.status = "CLAIMED"
|
||||||
self.claimed_by = user
|
self.claimed_by = user
|
||||||
self.claimed_at = timezone.now()
|
self.claimed_at = timezone.now()
|
||||||
self.save()
|
self.save()
|
||||||
|
|||||||
@@ -173,6 +173,10 @@ class IsModeratorOrAdmin(GuardMixin, permissions.BasePermission):
|
|||||||
if not request.user or not request.user.is_authenticated:
|
if not request.user or not request.user.is_authenticated:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
# Django superusers always have access
|
||||||
|
if getattr(request.user, "is_superuser", False):
|
||||||
|
return True
|
||||||
|
|
||||||
user_role = getattr(request.user, "role", "USER")
|
user_role = getattr(request.user, "role", "USER")
|
||||||
return user_role in ["MODERATOR", "ADMIN", "SUPERUSER"]
|
return user_role in ["MODERATOR", "ADMIN", "SUPERUSER"]
|
||||||
|
|
||||||
@@ -193,6 +197,10 @@ class IsAdminOrSuperuser(GuardMixin, permissions.BasePermission):
|
|||||||
if not request.user or not request.user.is_authenticated:
|
if not request.user or not request.user.is_authenticated:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
# Django superusers always have access
|
||||||
|
if getattr(request.user, "is_superuser", False):
|
||||||
|
return True
|
||||||
|
|
||||||
user_role = getattr(request.user, "role", "USER")
|
user_role = getattr(request.user, "role", "USER")
|
||||||
return user_role in ["ADMIN", "SUPERUSER"]
|
return user_role in ["ADMIN", "SUPERUSER"]
|
||||||
|
|
||||||
@@ -220,6 +228,10 @@ class CanViewModerationData(GuardMixin, permissions.BasePermission):
|
|||||||
if not request.user or not request.user.is_authenticated:
|
if not request.user or not request.user.is_authenticated:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
# Django superusers can view all data
|
||||||
|
if getattr(request.user, "is_superuser", False):
|
||||||
|
return True
|
||||||
|
|
||||||
user_role = getattr(request.user, "role", "USER")
|
user_role = getattr(request.user, "role", "USER")
|
||||||
|
|
||||||
# Moderators and above can view all data
|
# Moderators and above can view all data
|
||||||
@@ -249,6 +261,10 @@ class CanModerateContent(GuardMixin, permissions.BasePermission):
|
|||||||
if not request.user or not request.user.is_authenticated:
|
if not request.user or not request.user.is_authenticated:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
# Django superusers always have access
|
||||||
|
if getattr(request.user, "is_superuser", False):
|
||||||
|
return True
|
||||||
|
|
||||||
user_role = getattr(request.user, "role", "USER")
|
user_role = getattr(request.user, "role", "USER")
|
||||||
return user_role in ["MODERATOR", "ADMIN", "SUPERUSER"]
|
return user_role in ["MODERATOR", "ADMIN", "SUPERUSER"]
|
||||||
|
|
||||||
@@ -257,6 +273,10 @@ class CanModerateContent(GuardMixin, permissions.BasePermission):
|
|||||||
if not self.has_permission(request, view):
|
if not self.has_permission(request, view):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
# Django superusers can do everything
|
||||||
|
if getattr(request.user, "is_superuser", False):
|
||||||
|
return True
|
||||||
|
|
||||||
user_role = getattr(request.user, "role", "USER")
|
user_role = getattr(request.user, "role", "USER")
|
||||||
|
|
||||||
# Superusers can do everything
|
# Superusers can do everything
|
||||||
@@ -297,6 +317,10 @@ class CanAssignModerationTasks(GuardMixin, permissions.BasePermission):
|
|||||||
if not request.user or not request.user.is_authenticated:
|
if not request.user or not request.user.is_authenticated:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
# Django superusers always have access
|
||||||
|
if getattr(request.user, "is_superuser", False):
|
||||||
|
return True
|
||||||
|
|
||||||
user_role = getattr(request.user, "role", "USER")
|
user_role = getattr(request.user, "role", "USER")
|
||||||
return user_role in ["MODERATOR", "ADMIN", "SUPERUSER"]
|
return user_role in ["MODERATOR", "ADMIN", "SUPERUSER"]
|
||||||
|
|
||||||
@@ -341,6 +365,10 @@ class CanPerformBulkOperations(GuardMixin, permissions.BasePermission):
|
|||||||
if not request.user or not request.user.is_authenticated:
|
if not request.user or not request.user.is_authenticated:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
# Django superusers always have access
|
||||||
|
if getattr(request.user, "is_superuser", False):
|
||||||
|
return True
|
||||||
|
|
||||||
user_role = getattr(request.user, "role", "USER")
|
user_role = getattr(request.user, "role", "USER")
|
||||||
return user_role in ["ADMIN", "SUPERUSER"]
|
return user_role in ["ADMIN", "SUPERUSER"]
|
||||||
|
|
||||||
@@ -349,6 +377,10 @@ class CanPerformBulkOperations(GuardMixin, permissions.BasePermission):
|
|||||||
if not self.has_permission(request, view):
|
if not self.has_permission(request, view):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
# Django superusers can perform all bulk operations
|
||||||
|
if getattr(request.user, "is_superuser", False):
|
||||||
|
return True
|
||||||
|
|
||||||
user_role = getattr(request.user, "role", "USER")
|
user_role = getattr(request.user, "role", "USER")
|
||||||
|
|
||||||
# Superusers can perform all bulk operations
|
# Superusers can perform all bulk operations
|
||||||
@@ -386,6 +418,10 @@ class IsOwnerOrModerator(GuardMixin, permissions.BasePermission):
|
|||||||
if not request.user or not request.user.is_authenticated:
|
if not request.user or not request.user.is_authenticated:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
# Django superusers can access any object
|
||||||
|
if getattr(request.user, "is_superuser", False):
|
||||||
|
return True
|
||||||
|
|
||||||
user_role = getattr(request.user, "role", "USER")
|
user_role = getattr(request.user, "role", "USER")
|
||||||
|
|
||||||
# Moderators and above can access any object
|
# Moderators and above can access any object
|
||||||
@@ -419,6 +455,10 @@ class CanManageUserRestrictions(GuardMixin, permissions.BasePermission):
|
|||||||
if not request.user or not request.user.is_authenticated:
|
if not request.user or not request.user.is_authenticated:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
# Django superusers always have access
|
||||||
|
if getattr(request.user, "is_superuser", False):
|
||||||
|
return True
|
||||||
|
|
||||||
user_role = getattr(request.user, "role", "USER")
|
user_role = getattr(request.user, "role", "USER")
|
||||||
return user_role in ["MODERATOR", "ADMIN", "SUPERUSER"]
|
return user_role in ["MODERATOR", "ADMIN", "SUPERUSER"]
|
||||||
|
|
||||||
@@ -427,6 +467,10 @@ class CanManageUserRestrictions(GuardMixin, permissions.BasePermission):
|
|||||||
if not self.has_permission(request, view):
|
if not self.has_permission(request, view):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
# Django superusers can manage any restriction
|
||||||
|
if getattr(request.user, "is_superuser", False):
|
||||||
|
return True
|
||||||
|
|
||||||
user_role = getattr(request.user, "role", "USER")
|
user_role = getattr(request.user, "role", "USER")
|
||||||
|
|
||||||
# Superusers can manage any restriction
|
# Superusers can manage any restriction
|
||||||
|
|||||||
@@ -67,6 +67,7 @@ class EditSubmissionSerializer(serializers.ModelSerializer):
|
|||||||
"""Serializer for EditSubmission with UI metadata for Nuxt frontend."""
|
"""Serializer for EditSubmission with UI metadata for Nuxt frontend."""
|
||||||
|
|
||||||
submitted_by = UserBasicSerializer(source="user", read_only=True)
|
submitted_by = UserBasicSerializer(source="user", read_only=True)
|
||||||
|
handled_by = UserBasicSerializer(read_only=True)
|
||||||
claimed_by = UserBasicSerializer(read_only=True)
|
claimed_by = UserBasicSerializer(read_only=True)
|
||||||
content_type_name = serializers.CharField(source="content_type.model", read_only=True)
|
content_type_name = serializers.CharField(source="content_type.model", read_only=True)
|
||||||
|
|
||||||
@@ -87,22 +88,24 @@ class EditSubmissionSerializer(serializers.ModelSerializer):
|
|||||||
"content_type",
|
"content_type",
|
||||||
"content_type_name",
|
"content_type_name",
|
||||||
"object_id",
|
"object_id",
|
||||||
|
"submission_type",
|
||||||
"changes",
|
"changes",
|
||||||
"moderator_changes",
|
"moderator_changes",
|
||||||
"rejection_reason",
|
"reason",
|
||||||
|
"source",
|
||||||
|
"notes",
|
||||||
"submitted_by",
|
"submitted_by",
|
||||||
"reviewed_by",
|
"handled_by",
|
||||||
"claimed_by",
|
"claimed_by",
|
||||||
"claimed_at",
|
"claimed_at",
|
||||||
"created_at",
|
"created_at",
|
||||||
"updated_at",
|
|
||||||
"time_since_created",
|
"time_since_created",
|
||||||
]
|
]
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
"id",
|
"id",
|
||||||
"created_at",
|
"created_at",
|
||||||
"updated_at",
|
|
||||||
"submitted_by",
|
"submitted_by",
|
||||||
|
"handled_by",
|
||||||
"claimed_by",
|
"claimed_by",
|
||||||
"claimed_at",
|
"claimed_at",
|
||||||
"status_color",
|
"status_color",
|
||||||
@@ -163,6 +166,7 @@ class EditSubmissionListSerializer(serializers.ModelSerializer):
|
|||||||
fields = [
|
fields = [
|
||||||
"id",
|
"id",
|
||||||
"status",
|
"status",
|
||||||
|
"submission_type", # Added for frontend compatibility
|
||||||
"content_type_name",
|
"content_type_name",
|
||||||
"object_id",
|
"object_id",
|
||||||
"submitted_by_username",
|
"submitted_by_username",
|
||||||
@@ -195,6 +199,101 @@ class EditSubmissionListSerializer(serializers.ModelSerializer):
|
|||||||
return icons.get(obj.status, "heroicons:question-mark-circle")
|
return icons.get(obj.status, "heroicons:question-mark-circle")
|
||||||
|
|
||||||
|
|
||||||
|
class CreateEditSubmissionSerializer(serializers.ModelSerializer):
|
||||||
|
"""
|
||||||
|
Serializer for creating edit submissions.
|
||||||
|
|
||||||
|
This replaces the Supabase RPC 'create_submission_with_items' function.
|
||||||
|
Accepts entity type as a string and resolves it to ContentType.
|
||||||
|
"""
|
||||||
|
|
||||||
|
entity_type = serializers.CharField(write_only=True, help_text="Entity type: park, ride, company, ride_model")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = EditSubmission
|
||||||
|
fields = [
|
||||||
|
"entity_type",
|
||||||
|
"object_id",
|
||||||
|
"submission_type",
|
||||||
|
"changes",
|
||||||
|
"reason",
|
||||||
|
"source",
|
||||||
|
]
|
||||||
|
|
||||||
|
def validate_entity_type(self, value):
|
||||||
|
"""Convert entity_type string to ContentType."""
|
||||||
|
entity_type_map = {
|
||||||
|
"park": ("parks", "park"),
|
||||||
|
"ride": ("rides", "ride"),
|
||||||
|
"company": ("parks", "company"),
|
||||||
|
"ride_model": ("rides", "ridemodel"),
|
||||||
|
"manufacturer": ("parks", "company"),
|
||||||
|
"designer": ("parks", "company"),
|
||||||
|
"operator": ("parks", "company"),
|
||||||
|
"property_owner": ("parks", "company"),
|
||||||
|
}
|
||||||
|
|
||||||
|
if value.lower() not in entity_type_map:
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
f"Invalid entity_type. Must be one of: {', '.join(entity_type_map.keys())}"
|
||||||
|
)
|
||||||
|
|
||||||
|
return value.lower()
|
||||||
|
|
||||||
|
def validate_changes(self, value):
|
||||||
|
"""Validate changes is a proper JSON object."""
|
||||||
|
if not isinstance(value, dict):
|
||||||
|
raise serializers.ValidationError("Changes must be a JSON object")
|
||||||
|
if not value:
|
||||||
|
raise serializers.ValidationError("Changes cannot be empty")
|
||||||
|
return value
|
||||||
|
|
||||||
|
def validate(self, attrs):
|
||||||
|
"""Cross-field validation."""
|
||||||
|
submission_type = attrs.get("submission_type", "EDIT")
|
||||||
|
object_id = attrs.get("object_id")
|
||||||
|
|
||||||
|
# For EDIT submissions, object_id is required
|
||||||
|
if submission_type == "EDIT" and not object_id:
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
{"object_id": "object_id is required for EDIT submissions"}
|
||||||
|
)
|
||||||
|
|
||||||
|
# For CREATE submissions, object_id should be null
|
||||||
|
if submission_type == "CREATE" and object_id:
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
{"object_id": "object_id must be null for CREATE submissions"}
|
||||||
|
)
|
||||||
|
|
||||||
|
return attrs
|
||||||
|
|
||||||
|
def create(self, validated_data):
|
||||||
|
"""Create a new submission."""
|
||||||
|
entity_type = validated_data.pop("entity_type")
|
||||||
|
|
||||||
|
# Map entity_type to ContentType
|
||||||
|
entity_type_map = {
|
||||||
|
"park": ("parks", "park"),
|
||||||
|
"ride": ("rides", "ride"),
|
||||||
|
"company": ("parks", "company"),
|
||||||
|
"ride_model": ("rides", "ridemodel"),
|
||||||
|
"manufacturer": ("parks", "company"),
|
||||||
|
"designer": ("parks", "company"),
|
||||||
|
"operator": ("parks", "company"),
|
||||||
|
"property_owner": ("parks", "company"),
|
||||||
|
}
|
||||||
|
|
||||||
|
app_label, model_name = entity_type_map[entity_type]
|
||||||
|
content_type = ContentType.objects.get(app_label=app_label, model=model_name)
|
||||||
|
|
||||||
|
# Set automatic fields
|
||||||
|
validated_data["user"] = self.context["request"].user
|
||||||
|
validated_data["content_type"] = content_type
|
||||||
|
validated_data["status"] = "PENDING"
|
||||||
|
|
||||||
|
return super().create(validated_data)
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
# Moderation Report Serializers
|
# Moderation Report Serializers
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ from django_fsm import TransitionNotAllowed
|
|||||||
|
|
||||||
from apps.parks.models import Company as Operator
|
from apps.parks.models import Company as Operator
|
||||||
|
|
||||||
from .mixins import (
|
from ..mixins import (
|
||||||
AdminRequiredMixin,
|
AdminRequiredMixin,
|
||||||
EditSubmissionMixin,
|
EditSubmissionMixin,
|
||||||
HistoryMixin,
|
HistoryMixin,
|
||||||
@@ -33,7 +33,7 @@ from .mixins import (
|
|||||||
ModeratorRequiredMixin,
|
ModeratorRequiredMixin,
|
||||||
PhotoSubmissionMixin,
|
PhotoSubmissionMixin,
|
||||||
)
|
)
|
||||||
from .models import (
|
from ..models import (
|
||||||
BulkOperation,
|
BulkOperation,
|
||||||
EditSubmission,
|
EditSubmission,
|
||||||
ModerationAction,
|
ModerationAction,
|
||||||
@@ -9,6 +9,8 @@ This module tests end-to-end moderation workflows including:
|
|||||||
- Bulk operation workflow
|
- Bulk operation workflow
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from datetime import timedelta
|
||||||
|
|
||||||
from django.contrib.auth import get_user_model
|
from django.contrib.auth import get_user_model
|
||||||
from django.contrib.contenttypes.models import ContentType
|
from django.contrib.contenttypes.models import ContentType
|
||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
@@ -37,7 +39,7 @@ class SubmissionApprovalWorkflowTests(TestCase):
|
|||||||
"""
|
"""
|
||||||
Test complete edit submission approval workflow.
|
Test complete edit submission approval workflow.
|
||||||
|
|
||||||
Flow: User submits → Moderator reviews → Moderator approves → Changes applied
|
Flow: User submits → Moderator claims → Moderator approves → Changes applied
|
||||||
"""
|
"""
|
||||||
from apps.moderation.models import EditSubmission
|
from apps.moderation.models import EditSubmission
|
||||||
from apps.parks.models import Company
|
from apps.parks.models import Company
|
||||||
@@ -61,6 +63,13 @@ class SubmissionApprovalWorkflowTests(TestCase):
|
|||||||
self.assertIsNone(submission.handled_by)
|
self.assertIsNone(submission.handled_by)
|
||||||
self.assertIsNone(submission.handled_at)
|
self.assertIsNone(submission.handled_at)
|
||||||
|
|
||||||
|
# Moderator claims the submission first
|
||||||
|
submission.transition_to_claimed(user=self.moderator)
|
||||||
|
submission.save()
|
||||||
|
|
||||||
|
submission.refresh_from_db()
|
||||||
|
self.assertEqual(submission.status, "CLAIMED")
|
||||||
|
|
||||||
# Moderator approves
|
# Moderator approves
|
||||||
submission.transition_to_approved(user=self.moderator)
|
submission.transition_to_approved(user=self.moderator)
|
||||||
submission.handled_by = self.moderator
|
submission.handled_by = self.moderator
|
||||||
@@ -78,6 +87,8 @@ class SubmissionApprovalWorkflowTests(TestCase):
|
|||||||
|
|
||||||
Flow: User submits photo → Moderator reviews → Moderator approves → Photo created
|
Flow: User submits photo → Moderator reviews → Moderator approves → Photo created
|
||||||
"""
|
"""
|
||||||
|
from django_cloudflareimages_toolkit.models import CloudflareImage
|
||||||
|
|
||||||
from apps.moderation.models import PhotoSubmission
|
from apps.moderation.models import PhotoSubmission
|
||||||
from apps.parks.models import Company, Park
|
from apps.parks.models import Company, Park
|
||||||
|
|
||||||
@@ -87,6 +98,13 @@ class SubmissionApprovalWorkflowTests(TestCase):
|
|||||||
name="Test Park", slug="test-park", operator=operator, status="OPERATING", timezone="America/New_York"
|
name="Test Park", slug="test-park", operator=operator, status="OPERATING", timezone="America/New_York"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Create mock CloudflareImage for the photo submission
|
||||||
|
mock_image = CloudflareImage.objects.create(
|
||||||
|
cloudflare_id="test-cf-image-id-12345",
|
||||||
|
user=self.regular_user,
|
||||||
|
expires_at=timezone.now() + timedelta(days=365),
|
||||||
|
)
|
||||||
|
|
||||||
# User submits a photo
|
# User submits a photo
|
||||||
content_type = ContentType.objects.get_for_model(park)
|
content_type = ContentType.objects.get_for_model(park)
|
||||||
submission = PhotoSubmission.objects.create(
|
submission = PhotoSubmission.objects.create(
|
||||||
@@ -94,12 +112,18 @@ class SubmissionApprovalWorkflowTests(TestCase):
|
|||||||
content_type=content_type,
|
content_type=content_type,
|
||||||
object_id=park.id,
|
object_id=park.id,
|
||||||
status="PENDING",
|
status="PENDING",
|
||||||
photo_type="GENERAL",
|
photo=mock_image,
|
||||||
description="Beautiful park entrance",
|
caption="Beautiful park entrance",
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertEqual(submission.status, "PENDING")
|
self.assertEqual(submission.status, "PENDING")
|
||||||
|
|
||||||
|
# Moderator claims the submission first (required FSM step)
|
||||||
|
submission.claim(user=self.moderator)
|
||||||
|
|
||||||
|
submission.refresh_from_db()
|
||||||
|
self.assertEqual(submission.status, "CLAIMED")
|
||||||
|
|
||||||
# Moderator approves
|
# Moderator approves
|
||||||
submission.transition_to_approved(user=self.moderator)
|
submission.transition_to_approved(user=self.moderator)
|
||||||
submission.handled_by = self.moderator
|
submission.handled_by = self.moderator
|
||||||
@@ -144,7 +168,13 @@ class SubmissionRejectionWorkflowTests(TestCase):
|
|||||||
reason="Name change request",
|
reason="Name change request",
|
||||||
)
|
)
|
||||||
|
|
||||||
# Moderator rejects
|
# Moderator claims and then rejects
|
||||||
|
submission.transition_to_claimed(user=self.moderator)
|
||||||
|
submission.save()
|
||||||
|
|
||||||
|
submission.refresh_from_db()
|
||||||
|
self.assertEqual(submission.status, "CLAIMED")
|
||||||
|
|
||||||
submission.transition_to_rejected(user=self.moderator)
|
submission.transition_to_rejected(user=self.moderator)
|
||||||
submission.handled_by = self.moderator
|
submission.handled_by = self.moderator
|
||||||
submission.handled_at = timezone.now()
|
submission.handled_at = timezone.now()
|
||||||
@@ -193,7 +223,13 @@ class SubmissionEscalationWorkflowTests(TestCase):
|
|||||||
reason="Major name change",
|
reason="Major name change",
|
||||||
)
|
)
|
||||||
|
|
||||||
# Moderator escalates
|
# Moderator claims and then escalates
|
||||||
|
submission.transition_to_claimed(user=self.moderator)
|
||||||
|
submission.save()
|
||||||
|
|
||||||
|
submission.refresh_from_db()
|
||||||
|
self.assertEqual(submission.status, "CLAIMED")
|
||||||
|
|
||||||
submission.transition_to_escalated(user=self.moderator)
|
submission.transition_to_escalated(user=self.moderator)
|
||||||
submission.notes = "Escalated: Major change needs admin review"
|
submission.notes = "Escalated: Major change needs admin review"
|
||||||
submission.save()
|
submission.save()
|
||||||
@@ -447,11 +483,13 @@ class ModerationQueueWorkflowTests(TestCase):
|
|||||||
from apps.moderation.models import ModerationQueue
|
from apps.moderation.models import ModerationQueue
|
||||||
|
|
||||||
queue_item = ModerationQueue.objects.create(
|
queue_item = ModerationQueue.objects.create(
|
||||||
queue_type="SUBMISSION_REVIEW",
|
item_type="SUBMISSION_REVIEW",
|
||||||
status="PENDING",
|
status="PENDING",
|
||||||
priority="MEDIUM",
|
priority="MEDIUM",
|
||||||
item_type="edit_submission",
|
title="Review edit submission #123",
|
||||||
item_id=123,
|
description="Review and process edit submission",
|
||||||
|
entity_type="edit_submission",
|
||||||
|
entity_id=123,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertEqual(queue_item.status, "PENDING")
|
self.assertEqual(queue_item.status, "PENDING")
|
||||||
|
|||||||
@@ -15,10 +15,12 @@ from apps.core.views.views import FSMTransitionView
|
|||||||
from .sse import ModerationSSETestView, ModerationSSEView
|
from .sse import ModerationSSETestView, ModerationSSEView
|
||||||
from .views import (
|
from .views import (
|
||||||
BulkOperationViewSet,
|
BulkOperationViewSet,
|
||||||
|
ConvertSubmissionToEditView,
|
||||||
EditSubmissionViewSet,
|
EditSubmissionViewSet,
|
||||||
ModerationActionViewSet,
|
ModerationActionViewSet,
|
||||||
ModerationQueueViewSet,
|
ModerationQueueViewSet,
|
||||||
ModerationReportViewSet,
|
ModerationReportViewSet,
|
||||||
|
ModerationStatsView,
|
||||||
PhotoSubmissionViewSet,
|
PhotoSubmissionViewSet,
|
||||||
UserModerationViewSet,
|
UserModerationViewSet,
|
||||||
)
|
)
|
||||||
@@ -174,6 +176,9 @@ html_patterns = [
|
|||||||
path("", ModerationDashboardView.as_view(), name="dashboard"),
|
path("", ModerationDashboardView.as_view(), name="dashboard"),
|
||||||
path("submissions/", SubmissionListView.as_view(), name="submission_list"),
|
path("submissions/", SubmissionListView.as_view(), name="submission_list"),
|
||||||
path("history/", HistoryPageView.as_view(), name="history"),
|
path("history/", HistoryPageView.as_view(), name="history"),
|
||||||
|
# Edit submission detail for HTMX form posts
|
||||||
|
path("submissions/<int:pk>/edit/", EditSubmissionViewSet.as_view({'post': 'partial_update'}), name="edit_submission"),
|
||||||
|
path("edit-submissions/", TemplateView.as_view(template_name="moderation/edit_submissions.html"), name="edit_submissions"),
|
||||||
]
|
]
|
||||||
|
|
||||||
# SSE endpoints for real-time updates
|
# SSE endpoints for real-time updates
|
||||||
@@ -187,8 +192,12 @@ urlpatterns = [
|
|||||||
*html_patterns,
|
*html_patterns,
|
||||||
# SSE endpoints
|
# SSE endpoints
|
||||||
*sse_patterns,
|
*sse_patterns,
|
||||||
|
# Top-level stats endpoint (must be before router.urls to take precedence)
|
||||||
|
path("stats/", ModerationStatsView.as_view(), name="moderation-stats"),
|
||||||
# Include all router URLs (API endpoints)
|
# Include all router URLs (API endpoints)
|
||||||
path("api/", include(router.urls)),
|
path("api/", include(router.urls)),
|
||||||
|
# Standalone convert-to-edit endpoint (frontend calls /moderation/api/edit-submissions/ POST)
|
||||||
|
path("api/edit-submissions/", ConvertSubmissionToEditView.as_view(), name="convert-to-edit"),
|
||||||
# FSM transition convenience endpoints
|
# FSM transition convenience endpoints
|
||||||
] + fsm_transition_patterns
|
] + fsm_transition_patterns
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
10
backend/apps/notifications/__init__.py
Normal file
10
backend/apps/notifications/__init__.py
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
"""
|
||||||
|
Notifications app for ThrillWiki.
|
||||||
|
|
||||||
|
Provides notification management including:
|
||||||
|
- Subscriber management (Novu integration)
|
||||||
|
- Notification preferences
|
||||||
|
- Notification triggering and logging
|
||||||
|
"""
|
||||||
|
|
||||||
|
default_app_config = "apps.notifications.apps.NotificationsConfig"
|
||||||
38
backend/apps/notifications/admin.py
Normal file
38
backend/apps/notifications/admin.py
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
"""
|
||||||
|
Notifications admin configuration.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from django.contrib import admin
|
||||||
|
|
||||||
|
from .models import NotificationLog, NotificationPreference, Subscriber, SystemAnnouncement
|
||||||
|
|
||||||
|
|
||||||
|
@admin.register(Subscriber)
|
||||||
|
class SubscriberAdmin(admin.ModelAdmin):
|
||||||
|
list_display = ["user", "novu_subscriber_id", "email", "created_at"]
|
||||||
|
search_fields = ["user__username", "novu_subscriber_id", "email"]
|
||||||
|
readonly_fields = ["created_at", "updated_at"]
|
||||||
|
|
||||||
|
|
||||||
|
@admin.register(NotificationPreference)
|
||||||
|
class NotificationPreferenceAdmin(admin.ModelAdmin):
|
||||||
|
list_display = ["user", "is_opted_out", "updated_at"]
|
||||||
|
list_filter = ["is_opted_out"]
|
||||||
|
search_fields = ["user__username"]
|
||||||
|
readonly_fields = ["created_at", "updated_at"]
|
||||||
|
|
||||||
|
|
||||||
|
@admin.register(NotificationLog)
|
||||||
|
class NotificationLogAdmin(admin.ModelAdmin):
|
||||||
|
list_display = ["workflow_id", "user", "channel", "status", "created_at"]
|
||||||
|
list_filter = ["status", "channel", "workflow_id"]
|
||||||
|
search_fields = ["user__username", "workflow_id", "novu_transaction_id"]
|
||||||
|
readonly_fields = ["created_at", "updated_at"]
|
||||||
|
|
||||||
|
|
||||||
|
@admin.register(SystemAnnouncement)
|
||||||
|
class SystemAnnouncementAdmin(admin.ModelAdmin):
|
||||||
|
list_display = ["title", "severity", "is_active", "created_by", "created_at"]
|
||||||
|
list_filter = ["severity", "is_active"]
|
||||||
|
search_fields = ["title", "message"]
|
||||||
|
readonly_fields = ["created_at"]
|
||||||
18
backend/apps/notifications/apps.py
Normal file
18
backend/apps/notifications/apps.py
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
"""
|
||||||
|
Notifications app configuration.
|
||||||
|
|
||||||
|
This app provides Django-native notification functionality for ThrillWiki,
|
||||||
|
including in-app notifications, email notifications, and user preferences.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from django.apps import AppConfig
|
||||||
|
|
||||||
|
|
||||||
|
class NotificationsConfig(AppConfig):
|
||||||
|
"""Configuration for the ThrillWiki notifications app."""
|
||||||
|
|
||||||
|
default_auto_field = "django.db.models.BigAutoField"
|
||||||
|
name = "apps.notifications"
|
||||||
|
verbose_name = "Notifications"
|
||||||
|
|
||||||
|
|
||||||
159
backend/apps/notifications/migrations/0001_initial.py
Normal file
159
backend/apps/notifications/migrations/0001_initial.py
Normal file
@@ -0,0 +1,159 @@
|
|||||||
|
# Generated by Django 5.2.9 on 2026-01-05 13:50
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
initial = True
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="NotificationPreference",
|
||||||
|
fields=[
|
||||||
|
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
|
||||||
|
(
|
||||||
|
"channel_preferences",
|
||||||
|
models.JSONField(
|
||||||
|
blank=True, default=dict, help_text="Preferences per channel (email, push, in_app, sms)"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"workflow_preferences",
|
||||||
|
models.JSONField(blank=True, default=dict, help_text="Preferences per notification workflow"),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"frequency_settings",
|
||||||
|
models.JSONField(blank=True, default=dict, help_text="Digest and frequency settings"),
|
||||||
|
),
|
||||||
|
("is_opted_out", models.BooleanField(default=False)),
|
||||||
|
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||||
|
("updated_at", models.DateTimeField(auto_now=True)),
|
||||||
|
(
|
||||||
|
"user",
|
||||||
|
models.OneToOneField(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name="novu_notification_prefs",
|
||||||
|
to=settings.AUTH_USER_MODEL,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "Notification Preference",
|
||||||
|
"verbose_name_plural": "Notification Preferences",
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="Subscriber",
|
||||||
|
fields=[
|
||||||
|
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
|
||||||
|
("novu_subscriber_id", models.CharField(db_index=True, max_length=255, unique=True)),
|
||||||
|
("first_name", models.CharField(blank=True, max_length=100)),
|
||||||
|
("last_name", models.CharField(blank=True, max_length=100)),
|
||||||
|
("email", models.EmailField(blank=True, max_length=254)),
|
||||||
|
("phone", models.CharField(blank=True, max_length=20)),
|
||||||
|
("avatar", models.URLField(blank=True)),
|
||||||
|
("locale", models.CharField(default="en", max_length=10)),
|
||||||
|
("data", models.JSONField(blank=True, default=dict, help_text="Custom subscriber data")),
|
||||||
|
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||||
|
("updated_at", models.DateTimeField(auto_now=True)),
|
||||||
|
(
|
||||||
|
"user",
|
||||||
|
models.OneToOneField(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name="notification_subscriber",
|
||||||
|
to=settings.AUTH_USER_MODEL,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "Notification Subscriber",
|
||||||
|
"verbose_name_plural": "Notification Subscribers",
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="SystemAnnouncement",
|
||||||
|
fields=[
|
||||||
|
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
|
||||||
|
("title", models.CharField(max_length=255)),
|
||||||
|
("message", models.TextField()),
|
||||||
|
(
|
||||||
|
"severity",
|
||||||
|
models.CharField(
|
||||||
|
choices=[("info", "Information"), ("warning", "Warning"), ("critical", "Critical")],
|
||||||
|
default="info",
|
||||||
|
max_length=20,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("action_url", models.URLField(blank=True)),
|
||||||
|
("is_active", models.BooleanField(default=True)),
|
||||||
|
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||||
|
("expires_at", models.DateTimeField(blank=True, null=True)),
|
||||||
|
(
|
||||||
|
"created_by",
|
||||||
|
models.ForeignKey(
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
|
related_name="announcements_created",
|
||||||
|
to=settings.AUTH_USER_MODEL,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "System Announcement",
|
||||||
|
"verbose_name_plural": "System Announcements",
|
||||||
|
"ordering": ["-created_at"],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="NotificationLog",
|
||||||
|
fields=[
|
||||||
|
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
|
||||||
|
("workflow_id", models.CharField(db_index=True, max_length=100)),
|
||||||
|
("notification_type", models.CharField(max_length=50)),
|
||||||
|
("channel", models.CharField(max_length=20)),
|
||||||
|
(
|
||||||
|
"status",
|
||||||
|
models.CharField(
|
||||||
|
choices=[
|
||||||
|
("pending", "Pending"),
|
||||||
|
("sent", "Sent"),
|
||||||
|
("delivered", "Delivered"),
|
||||||
|
("failed", "Failed"),
|
||||||
|
],
|
||||||
|
default="pending",
|
||||||
|
max_length=20,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("payload", models.JSONField(blank=True, default=dict)),
|
||||||
|
("error_message", models.TextField(blank=True)),
|
||||||
|
("novu_transaction_id", models.CharField(blank=True, db_index=True, max_length=255)),
|
||||||
|
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||||
|
("updated_at", models.DateTimeField(auto_now=True)),
|
||||||
|
(
|
||||||
|
"user",
|
||||||
|
models.ForeignKey(
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
|
related_name="notification_logs",
|
||||||
|
to=settings.AUTH_USER_MODEL,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "Notification Log",
|
||||||
|
"verbose_name_plural": "Notification Logs",
|
||||||
|
"ordering": ["-created_at"],
|
||||||
|
"indexes": [
|
||||||
|
models.Index(fields=["user", "-created_at"], name="notificatio_user_id_57d53d_idx"),
|
||||||
|
models.Index(fields=["workflow_id", "-created_at"], name="notificatio_workflo_e1a025_idx"),
|
||||||
|
],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,93 @@
|
|||||||
|
# Generated by Django 5.2.9 on 2026-01-05 14:36
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("contenttypes", "0002_remove_content_type_name"),
|
||||||
|
("notifications", "0001_initial"),
|
||||||
|
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="subscriber",
|
||||||
|
name="novu_subscriber_id",
|
||||||
|
field=models.CharField(
|
||||||
|
db_index=True, help_text="Legacy Novu subscriber ID (deprecated)", max_length=255, unique=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="Notification",
|
||||||
|
fields=[
|
||||||
|
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
|
||||||
|
("verb", models.CharField(max_length=255)),
|
||||||
|
("description", models.TextField(blank=True)),
|
||||||
|
(
|
||||||
|
"level",
|
||||||
|
models.CharField(
|
||||||
|
choices=[("info", "Info"), ("success", "Success"), ("warning", "Warning"), ("error", "Error")],
|
||||||
|
default="info",
|
||||||
|
max_length=20,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("action_object_id", models.PositiveIntegerField(blank=True, null=True)),
|
||||||
|
("target_id", models.PositiveIntegerField(blank=True, null=True)),
|
||||||
|
("data", models.JSONField(blank=True, default=dict)),
|
||||||
|
("unread", models.BooleanField(db_index=True, default=True)),
|
||||||
|
("timestamp", models.DateTimeField(auto_now_add=True)),
|
||||||
|
("read_at", models.DateTimeField(blank=True, null=True)),
|
||||||
|
(
|
||||||
|
"action_object_content_type",
|
||||||
|
models.ForeignKey(
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name="notification_action_objects",
|
||||||
|
to="contenttypes.contenttype",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"actor",
|
||||||
|
models.ForeignKey(
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.SET_NULL,
|
||||||
|
related_name="notifications_sent",
|
||||||
|
to=settings.AUTH_USER_MODEL,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"recipient",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name="in_app_notifications",
|
||||||
|
to=settings.AUTH_USER_MODEL,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"target_content_type",
|
||||||
|
models.ForeignKey(
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name="notification_targets",
|
||||||
|
to="contenttypes.contenttype",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"verbose_name": "Notification",
|
||||||
|
"verbose_name_plural": "Notifications",
|
||||||
|
"ordering": ["-timestamp"],
|
||||||
|
"indexes": [
|
||||||
|
models.Index(fields=["recipient", "-timestamp"], name="notificatio_recipie_b8fa2a_idx"),
|
||||||
|
models.Index(fields=["recipient", "unread"], name="notificatio_recipie_8bedf2_idx"),
|
||||||
|
],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
||||||
0
backend/apps/notifications/migrations/__init__.py
Normal file
0
backend/apps/notifications/migrations/__init__.py
Normal file
298
backend/apps/notifications/models.py
Normal file
298
backend/apps/notifications/models.py
Normal file
@@ -0,0 +1,298 @@
|
|||||||
|
"""
|
||||||
|
Notifications models.
|
||||||
|
|
||||||
|
Provides models for:
|
||||||
|
- Subscriber: User notification profile (legacy, kept for compatibility)
|
||||||
|
- NotificationPreference: User notification preferences
|
||||||
|
- NotificationLog: Audit trail of sent notifications
|
||||||
|
- SystemAnnouncement: System-wide announcements
|
||||||
|
|
||||||
|
Note: Now using django-notifications-hq for the core notification system.
|
||||||
|
Subscriber model is kept for backward compatibility but is optional.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import models
|
||||||
|
|
||||||
|
|
||||||
|
class Subscriber(models.Model):
|
||||||
|
"""
|
||||||
|
User notification profile.
|
||||||
|
|
||||||
|
Note: This model is kept for backward compatibility. The new
|
||||||
|
django-notifications-hq system uses User directly for notifications.
|
||||||
|
This can be used for storing additional notification-related user data.
|
||||||
|
"""
|
||||||
|
|
||||||
|
user = models.OneToOneField(
|
||||||
|
settings.AUTH_USER_MODEL,
|
||||||
|
on_delete=models.CASCADE,
|
||||||
|
related_name="notification_subscriber",
|
||||||
|
)
|
||||||
|
# Legacy field - kept for migration compatibility
|
||||||
|
novu_subscriber_id = models.CharField(
|
||||||
|
max_length=255,
|
||||||
|
unique=True,
|
||||||
|
db_index=True,
|
||||||
|
help_text="Legacy Novu subscriber ID (deprecated)"
|
||||||
|
)
|
||||||
|
first_name = models.CharField(max_length=100, blank=True)
|
||||||
|
last_name = models.CharField(max_length=100, blank=True)
|
||||||
|
email = models.EmailField(blank=True)
|
||||||
|
phone = models.CharField(max_length=20, blank=True)
|
||||||
|
avatar = models.URLField(blank=True)
|
||||||
|
locale = models.CharField(max_length=10, default="en")
|
||||||
|
data = models.JSONField(default=dict, blank=True, help_text="Custom subscriber data")
|
||||||
|
created_at = models.DateTimeField(auto_now_add=True)
|
||||||
|
updated_at = models.DateTimeField(auto_now=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = "Notification Subscriber"
|
||||||
|
verbose_name_plural = "Notification Subscribers"
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"Subscriber({self.user.username})"
|
||||||
|
|
||||||
|
|
||||||
|
class NotificationPreference(models.Model):
|
||||||
|
"""
|
||||||
|
User notification preferences across channels and workflows.
|
||||||
|
"""
|
||||||
|
|
||||||
|
user = models.OneToOneField(
|
||||||
|
settings.AUTH_USER_MODEL,
|
||||||
|
on_delete=models.CASCADE,
|
||||||
|
related_name="novu_notification_prefs", # Renamed to avoid conflict with User.notification_preferences JSONField
|
||||||
|
)
|
||||||
|
# Channel preferences
|
||||||
|
channel_preferences = models.JSONField(
|
||||||
|
default=dict,
|
||||||
|
blank=True,
|
||||||
|
help_text="Preferences per channel (email, push, in_app, sms)",
|
||||||
|
)
|
||||||
|
# Workflow-specific preferences
|
||||||
|
workflow_preferences = models.JSONField(
|
||||||
|
default=dict,
|
||||||
|
blank=True,
|
||||||
|
help_text="Preferences per notification workflow",
|
||||||
|
)
|
||||||
|
# Frequency settings
|
||||||
|
frequency_settings = models.JSONField(
|
||||||
|
default=dict,
|
||||||
|
blank=True,
|
||||||
|
help_text="Digest and frequency settings",
|
||||||
|
)
|
||||||
|
# Global opt-out
|
||||||
|
is_opted_out = models.BooleanField(default=False)
|
||||||
|
created_at = models.DateTimeField(auto_now_add=True)
|
||||||
|
updated_at = models.DateTimeField(auto_now=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = "Notification Preference"
|
||||||
|
verbose_name_plural = "Notification Preferences"
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"Preferences({self.user.username})"
|
||||||
|
|
||||||
|
|
||||||
|
class NotificationLog(models.Model):
|
||||||
|
"""
|
||||||
|
Audit log of sent notifications.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Status(models.TextChoices):
|
||||||
|
PENDING = "pending", "Pending"
|
||||||
|
SENT = "sent", "Sent"
|
||||||
|
DELIVERED = "delivered", "Delivered"
|
||||||
|
FAILED = "failed", "Failed"
|
||||||
|
|
||||||
|
user = models.ForeignKey(
|
||||||
|
settings.AUTH_USER_MODEL,
|
||||||
|
on_delete=models.SET_NULL,
|
||||||
|
null=True,
|
||||||
|
related_name="notification_logs",
|
||||||
|
)
|
||||||
|
workflow_id = models.CharField(max_length=100, db_index=True)
|
||||||
|
notification_type = models.CharField(max_length=50)
|
||||||
|
channel = models.CharField(max_length=20) # email, push, in_app, sms
|
||||||
|
status = models.CharField(
|
||||||
|
max_length=20,
|
||||||
|
choices=Status.choices,
|
||||||
|
default=Status.PENDING,
|
||||||
|
)
|
||||||
|
payload = models.JSONField(default=dict, blank=True)
|
||||||
|
error_message = models.TextField(blank=True)
|
||||||
|
novu_transaction_id = models.CharField(max_length=255, blank=True, db_index=True)
|
||||||
|
created_at = models.DateTimeField(auto_now_add=True)
|
||||||
|
updated_at = models.DateTimeField(auto_now=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = "Notification Log"
|
||||||
|
verbose_name_plural = "Notification Logs"
|
||||||
|
ordering = ["-created_at"]
|
||||||
|
indexes = [
|
||||||
|
models.Index(fields=["user", "-created_at"]),
|
||||||
|
models.Index(fields=["workflow_id", "-created_at"]),
|
||||||
|
]
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"Log({self.workflow_id}, {self.status})"
|
||||||
|
|
||||||
|
|
||||||
|
class SystemAnnouncement(models.Model):
|
||||||
|
"""
|
||||||
|
System-wide announcements.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Severity(models.TextChoices):
|
||||||
|
INFO = "info", "Information"
|
||||||
|
WARNING = "warning", "Warning"
|
||||||
|
CRITICAL = "critical", "Critical"
|
||||||
|
|
||||||
|
title = models.CharField(max_length=255)
|
||||||
|
message = models.TextField()
|
||||||
|
severity = models.CharField(
|
||||||
|
max_length=20,
|
||||||
|
choices=Severity.choices,
|
||||||
|
default=Severity.INFO,
|
||||||
|
)
|
||||||
|
action_url = models.URLField(blank=True)
|
||||||
|
is_active = models.BooleanField(default=True)
|
||||||
|
created_by = models.ForeignKey(
|
||||||
|
settings.AUTH_USER_MODEL,
|
||||||
|
on_delete=models.SET_NULL,
|
||||||
|
null=True,
|
||||||
|
related_name="announcements_created",
|
||||||
|
)
|
||||||
|
created_at = models.DateTimeField(auto_now_add=True)
|
||||||
|
expires_at = models.DateTimeField(null=True, blank=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = "System Announcement"
|
||||||
|
verbose_name_plural = "System Announcements"
|
||||||
|
ordering = ["-created_at"]
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"{self.title} ({self.severity})"
|
||||||
|
|
||||||
|
|
||||||
|
class Notification(models.Model):
|
||||||
|
"""
|
||||||
|
In-app notification model.
|
||||||
|
|
||||||
|
This is a Django-native implementation for storing user notifications,
|
||||||
|
supporting both in-app and email notification channels.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Level(models.TextChoices):
|
||||||
|
INFO = "info", "Info"
|
||||||
|
SUCCESS = "success", "Success"
|
||||||
|
WARNING = "warning", "Warning"
|
||||||
|
ERROR = "error", "Error"
|
||||||
|
|
||||||
|
# Who receives the notification
|
||||||
|
recipient = models.ForeignKey(
|
||||||
|
settings.AUTH_USER_MODEL,
|
||||||
|
on_delete=models.CASCADE,
|
||||||
|
related_name="in_app_notifications", # Renamed to avoid clash with accounts.UserNotification
|
||||||
|
)
|
||||||
|
# Who triggered the notification (can be null for system notifications)
|
||||||
|
actor = models.ForeignKey(
|
||||||
|
settings.AUTH_USER_MODEL,
|
||||||
|
on_delete=models.SET_NULL,
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
related_name="notifications_sent",
|
||||||
|
)
|
||||||
|
# What happened
|
||||||
|
verb = models.CharField(max_length=255)
|
||||||
|
description = models.TextField(blank=True)
|
||||||
|
level = models.CharField(
|
||||||
|
max_length=20,
|
||||||
|
choices=Level.choices,
|
||||||
|
default=Level.INFO,
|
||||||
|
)
|
||||||
|
# The object that was acted upon (generic foreign key)
|
||||||
|
action_object_content_type = models.ForeignKey(
|
||||||
|
"contenttypes.ContentType",
|
||||||
|
on_delete=models.CASCADE,
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
related_name="notification_action_objects",
|
||||||
|
)
|
||||||
|
action_object_id = models.PositiveIntegerField(blank=True, null=True)
|
||||||
|
# The target of the action (generic foreign key)
|
||||||
|
target_content_type = models.ForeignKey(
|
||||||
|
"contenttypes.ContentType",
|
||||||
|
on_delete=models.CASCADE,
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
related_name="notification_targets",
|
||||||
|
)
|
||||||
|
target_id = models.PositiveIntegerField(blank=True, null=True)
|
||||||
|
# Additional data
|
||||||
|
data = models.JSONField(default=dict, blank=True)
|
||||||
|
# Status
|
||||||
|
unread = models.BooleanField(default=True, db_index=True)
|
||||||
|
# Timestamps
|
||||||
|
timestamp = models.DateTimeField(auto_now_add=True)
|
||||||
|
read_at = models.DateTimeField(null=True, blank=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = "Notification"
|
||||||
|
verbose_name_plural = "Notifications"
|
||||||
|
ordering = ["-timestamp"]
|
||||||
|
indexes = [
|
||||||
|
models.Index(fields=["recipient", "-timestamp"]),
|
||||||
|
models.Index(fields=["recipient", "unread"]),
|
||||||
|
]
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"{self.verb} -> {self.recipient}"
|
||||||
|
|
||||||
|
def mark_as_read(self):
|
||||||
|
"""Mark this notification as read."""
|
||||||
|
if self.unread:
|
||||||
|
from django.utils import timezone
|
||||||
|
self.unread = False
|
||||||
|
self.read_at = timezone.now()
|
||||||
|
self.save(update_fields=["unread", "read_at"])
|
||||||
|
|
||||||
|
@property
|
||||||
|
def action_object(self):
|
||||||
|
"""Get the action object instance."""
|
||||||
|
if self.action_object_content_type and self.action_object_id:
|
||||||
|
return self.action_object_content_type.get_object_for_this_type(
|
||||||
|
pk=self.action_object_id
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def target(self):
|
||||||
|
"""Get the target instance."""
|
||||||
|
if self.target_content_type and self.target_id:
|
||||||
|
return self.target_content_type.get_object_for_this_type(pk=self.target_id)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class NotificationManager(models.Manager):
|
||||||
|
"""Custom manager for Notification model."""
|
||||||
|
|
||||||
|
def unread(self):
|
||||||
|
"""Return only unread notifications."""
|
||||||
|
return self.filter(unread=True)
|
||||||
|
|
||||||
|
def read(self):
|
||||||
|
"""Return only read notifications."""
|
||||||
|
return self.filter(unread=False)
|
||||||
|
|
||||||
|
def mark_all_as_read(self):
|
||||||
|
"""Mark all notifications as read."""
|
||||||
|
from django.utils import timezone
|
||||||
|
return self.filter(unread=True).update(unread=False, read_at=timezone.now())
|
||||||
|
|
||||||
|
|
||||||
|
# Add custom manager to Notification model
|
||||||
|
Notification.objects = NotificationManager()
|
||||||
|
Notification.objects.model = Notification
|
||||||
|
|
||||||
156
backend/apps/notifications/serializers.py
Normal file
156
backend/apps/notifications/serializers.py
Normal file
@@ -0,0 +1,156 @@
|
|||||||
|
"""
|
||||||
|
Notification serializers.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from rest_framework import serializers
|
||||||
|
|
||||||
|
from .models import NotificationLog, NotificationPreference, Subscriber, SystemAnnouncement
|
||||||
|
|
||||||
|
|
||||||
|
class SubscriberSerializer(serializers.ModelSerializer):
|
||||||
|
"""Serializer for Subscriber model."""
|
||||||
|
|
||||||
|
subscriber_id = serializers.CharField(source="novu_subscriber_id", read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = Subscriber
|
||||||
|
fields = [
|
||||||
|
"subscriber_id",
|
||||||
|
"first_name",
|
||||||
|
"last_name",
|
||||||
|
"email",
|
||||||
|
"phone",
|
||||||
|
"avatar",
|
||||||
|
"locale",
|
||||||
|
"data",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
]
|
||||||
|
read_only_fields = ["subscriber_id", "created_at", "updated_at"]
|
||||||
|
|
||||||
|
|
||||||
|
class CreateSubscriberSerializer(serializers.Serializer):
|
||||||
|
"""Serializer for creating a new subscriber."""
|
||||||
|
|
||||||
|
subscriber_id = serializers.CharField(required=True)
|
||||||
|
first_name = serializers.CharField(required=False, allow_blank=True, default="")
|
||||||
|
last_name = serializers.CharField(required=False, allow_blank=True, default="")
|
||||||
|
email = serializers.EmailField(required=False, allow_blank=True)
|
||||||
|
phone = serializers.CharField(required=False, allow_blank=True, default="")
|
||||||
|
avatar = serializers.URLField(required=False, allow_blank=True)
|
||||||
|
locale = serializers.CharField(required=False, default="en")
|
||||||
|
data = serializers.JSONField(required=False, default=dict)
|
||||||
|
|
||||||
|
|
||||||
|
class UpdateSubscriberSerializer(serializers.Serializer):
|
||||||
|
"""Serializer for updating a subscriber."""
|
||||||
|
|
||||||
|
subscriber_id = serializers.CharField(required=True)
|
||||||
|
first_name = serializers.CharField(required=False, allow_blank=True)
|
||||||
|
last_name = serializers.CharField(required=False, allow_blank=True)
|
||||||
|
email = serializers.EmailField(required=False, allow_blank=True)
|
||||||
|
phone = serializers.CharField(required=False, allow_blank=True)
|
||||||
|
avatar = serializers.URLField(required=False, allow_blank=True)
|
||||||
|
locale = serializers.CharField(required=False)
|
||||||
|
data = serializers.JSONField(required=False)
|
||||||
|
|
||||||
|
|
||||||
|
class NotificationPreferenceSerializer(serializers.ModelSerializer):
|
||||||
|
"""Serializer for NotificationPreference model."""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = NotificationPreference
|
||||||
|
fields = [
|
||||||
|
"channel_preferences",
|
||||||
|
"workflow_preferences",
|
||||||
|
"frequency_settings",
|
||||||
|
"is_opted_out",
|
||||||
|
"updated_at",
|
||||||
|
]
|
||||||
|
read_only_fields = ["updated_at"]
|
||||||
|
|
||||||
|
|
||||||
|
class UpdatePreferencesSerializer(serializers.Serializer):
|
||||||
|
"""Serializer for updating notification preferences."""
|
||||||
|
|
||||||
|
user_id = serializers.CharField(required=True)
|
||||||
|
preferences = serializers.JSONField(required=True)
|
||||||
|
|
||||||
|
|
||||||
|
class TriggerNotificationSerializer(serializers.Serializer):
|
||||||
|
"""Serializer for triggering a notification."""
|
||||||
|
|
||||||
|
workflow_id = serializers.CharField(required=True)
|
||||||
|
subscriber_id = serializers.CharField(required=True)
|
||||||
|
payload = serializers.JSONField(required=False, default=dict)
|
||||||
|
overrides = serializers.JSONField(required=False, default=dict)
|
||||||
|
|
||||||
|
|
||||||
|
class ModeratorSubmissionNotificationSerializer(serializers.Serializer):
|
||||||
|
"""Serializer for moderator submission notifications."""
|
||||||
|
|
||||||
|
submission_id = serializers.CharField(required=True)
|
||||||
|
submission_type = serializers.CharField(required=True)
|
||||||
|
submitter_name = serializers.CharField(required=True)
|
||||||
|
action = serializers.CharField(required=True)
|
||||||
|
|
||||||
|
|
||||||
|
class ModeratorReportNotificationSerializer(serializers.Serializer):
|
||||||
|
"""Serializer for moderator report notifications."""
|
||||||
|
|
||||||
|
report_id = serializers.CharField(required=True)
|
||||||
|
report_type = serializers.CharField(required=True)
|
||||||
|
reported_entity_type = serializers.CharField(required=True)
|
||||||
|
reported_entity_id = serializers.CharField(required=True)
|
||||||
|
reporter_name = serializers.CharField(required=True)
|
||||||
|
reason = serializers.CharField(required=True)
|
||||||
|
entity_preview = serializers.CharField(required=False, allow_blank=True)
|
||||||
|
reported_at = serializers.DateTimeField(required=False)
|
||||||
|
|
||||||
|
|
||||||
|
class SystemAnnouncementSerializer(serializers.ModelSerializer):
|
||||||
|
"""Serializer for system announcements."""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = SystemAnnouncement
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"title",
|
||||||
|
"message",
|
||||||
|
"severity",
|
||||||
|
"action_url",
|
||||||
|
"is_active",
|
||||||
|
"created_at",
|
||||||
|
"expires_at",
|
||||||
|
]
|
||||||
|
read_only_fields = ["id", "created_at"]
|
||||||
|
|
||||||
|
|
||||||
|
class CreateAnnouncementSerializer(serializers.Serializer):
|
||||||
|
"""Serializer for creating system announcements."""
|
||||||
|
|
||||||
|
title = serializers.CharField(required=True, max_length=255)
|
||||||
|
message = serializers.CharField(required=True)
|
||||||
|
severity = serializers.ChoiceField(
|
||||||
|
choices=["info", "warning", "critical"],
|
||||||
|
default="info",
|
||||||
|
)
|
||||||
|
action_url = serializers.URLField(required=False, allow_blank=True)
|
||||||
|
|
||||||
|
|
||||||
|
class NotificationLogSerializer(serializers.ModelSerializer):
|
||||||
|
"""Serializer for notification logs."""
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = NotificationLog
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"workflow_id",
|
||||||
|
"notification_type",
|
||||||
|
"channel",
|
||||||
|
"status",
|
||||||
|
"payload",
|
||||||
|
"error_message",
|
||||||
|
"created_at",
|
||||||
|
]
|
||||||
|
read_only_fields = ["id", "created_at"]
|
||||||
571
backend/apps/notifications/services.py
Normal file
571
backend/apps/notifications/services.py
Normal file
@@ -0,0 +1,571 @@
|
|||||||
|
"""
|
||||||
|
Django-native notification service.
|
||||||
|
|
||||||
|
This service provides a fully Django-native notification system. Supports:
|
||||||
|
- In-app notifications
|
||||||
|
- Email notifications (via Django email backend)
|
||||||
|
- Real-time notifications (ready for Django Channels integration)
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
from django.contrib.auth import get_user_model
|
||||||
|
from django.contrib.contenttypes.models import ContentType
|
||||||
|
from django.core.mail import send_mail
|
||||||
|
from django.db.models import QuerySet
|
||||||
|
from django.template.loader import render_to_string
|
||||||
|
from django.utils import timezone
|
||||||
|
from django.utils.html import strip_tags
|
||||||
|
|
||||||
|
from .models import Notification, NotificationLog, NotificationPreference, SystemAnnouncement
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
User = get_user_model()
|
||||||
|
|
||||||
|
|
||||||
|
class NotificationService:
|
||||||
|
"""
|
||||||
|
Django-native notification service using django-notifications-hq.
|
||||||
|
|
||||||
|
This replaces the Novu-based service with a fully Django-native approach.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Notification workflow types
|
||||||
|
WORKFLOW_SUBMISSION_STATUS = "submission_status"
|
||||||
|
WORKFLOW_MODERATION_ALERT = "moderation_alert"
|
||||||
|
WORKFLOW_SYSTEM_ANNOUNCEMENT = "system_announcement"
|
||||||
|
WORKFLOW_ADMIN_ALERT = "admin_alert"
|
||||||
|
WORKFLOW_WELCOME = "welcome"
|
||||||
|
WORKFLOW_COMMENT_REPLY = "comment_reply"
|
||||||
|
WORKFLOW_MENTION = "mention"
|
||||||
|
WORKFLOW_FOLLOW = "follow"
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.from_email = getattr(
|
||||||
|
settings, "DEFAULT_FROM_EMAIL", "noreply@thrillwiki.com"
|
||||||
|
)
|
||||||
|
self.site_name = getattr(settings, "SITE_NAME", "ThrillWiki")
|
||||||
|
self.site_url = getattr(settings, "SITE_URL", "https://thrillwiki.com")
|
||||||
|
|
||||||
|
def send_notification(
|
||||||
|
self,
|
||||||
|
recipient: User,
|
||||||
|
actor: User | None,
|
||||||
|
verb: str,
|
||||||
|
action_object: Any = None,
|
||||||
|
target: Any = None,
|
||||||
|
description: str = "",
|
||||||
|
level: str = "info",
|
||||||
|
data: dict | None = None,
|
||||||
|
send_email: bool = True,
|
||||||
|
email_template: str | None = None,
|
||||||
|
) -> bool:
|
||||||
|
"""
|
||||||
|
Send a notification to a user.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
recipient: The user to notify
|
||||||
|
actor: The user who performed the action (can be None for system notifications)
|
||||||
|
verb: Description of the action (e.g., "approved your submission")
|
||||||
|
action_object: The object that was acted upon
|
||||||
|
target: The target of the action
|
||||||
|
description: Additional description text
|
||||||
|
level: Notification level (info, success, warning, error)
|
||||||
|
data: Additional data to store with the notification
|
||||||
|
send_email: Whether to also send an email notification
|
||||||
|
email_template: Template path for email (optional)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if notification was sent successfully
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# Check user preferences
|
||||||
|
if self._is_user_opted_out(recipient):
|
||||||
|
logger.debug(f"User {recipient.id} opted out of notifications")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Create in-app notification using our native model
|
||||||
|
notification_data = {
|
||||||
|
"recipient": recipient,
|
||||||
|
"actor": actor,
|
||||||
|
"verb": verb,
|
||||||
|
"description": description,
|
||||||
|
"level": level,
|
||||||
|
"data": data or {},
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add generic foreign key for action_object if provided
|
||||||
|
if action_object:
|
||||||
|
notification_data["action_object_content_type"] = ContentType.objects.get_for_model(action_object)
|
||||||
|
notification_data["action_object_id"] = action_object.pk
|
||||||
|
|
||||||
|
# Add generic foreign key for target if provided
|
||||||
|
if target:
|
||||||
|
notification_data["target_content_type"] = ContentType.objects.get_for_model(target)
|
||||||
|
notification_data["target_id"] = target.pk
|
||||||
|
|
||||||
|
Notification.objects.create(**notification_data)
|
||||||
|
|
||||||
|
# Log the notification
|
||||||
|
self._log_notification(
|
||||||
|
user=recipient,
|
||||||
|
workflow_id=data.get("workflow_id", "general") if data else "general",
|
||||||
|
notification_type=level,
|
||||||
|
channel="in_app",
|
||||||
|
status=NotificationLog.Status.SENT,
|
||||||
|
payload=data or {},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Optionally send email
|
||||||
|
if send_email and self._should_send_email(recipient, data):
|
||||||
|
self._send_email_notification(
|
||||||
|
recipient=recipient,
|
||||||
|
verb=verb,
|
||||||
|
actor=actor,
|
||||||
|
action_object=action_object,
|
||||||
|
target=target,
|
||||||
|
description=description,
|
||||||
|
template=email_template,
|
||||||
|
data=data,
|
||||||
|
)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception(f"Failed to send notification to {recipient.id}: {e}")
|
||||||
|
self._log_notification(
|
||||||
|
user=recipient,
|
||||||
|
workflow_id=data.get("workflow_id", "general") if data else "general",
|
||||||
|
notification_type=level,
|
||||||
|
channel="in_app",
|
||||||
|
status=NotificationLog.Status.FAILED,
|
||||||
|
payload=data or {},
|
||||||
|
error_message=str(e),
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def send_to_group(
|
||||||
|
self,
|
||||||
|
recipients: QuerySet | list,
|
||||||
|
actor: User | None,
|
||||||
|
verb: str,
|
||||||
|
action_object: Any = None,
|
||||||
|
target: Any = None,
|
||||||
|
description: str = "",
|
||||||
|
level: str = "info",
|
||||||
|
data: dict | None = None,
|
||||||
|
send_email: bool = False,
|
||||||
|
) -> dict:
|
||||||
|
"""
|
||||||
|
Send a notification to multiple users.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with success/failure counts
|
||||||
|
"""
|
||||||
|
results = {"success": 0, "failed": 0, "skipped": 0}
|
||||||
|
|
||||||
|
for recipient in recipients:
|
||||||
|
if self._is_user_opted_out(recipient):
|
||||||
|
results["skipped"] += 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
success = self.send_notification(
|
||||||
|
recipient=recipient,
|
||||||
|
actor=actor,
|
||||||
|
verb=verb,
|
||||||
|
action_object=action_object,
|
||||||
|
target=target,
|
||||||
|
description=description,
|
||||||
|
level=level,
|
||||||
|
data=data,
|
||||||
|
send_email=send_email,
|
||||||
|
)
|
||||||
|
|
||||||
|
if success:
|
||||||
|
results["success"] += 1
|
||||||
|
else:
|
||||||
|
results["failed"] += 1
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
def notify_moderators(
|
||||||
|
self,
|
||||||
|
verb: str,
|
||||||
|
action_object: Any = None,
|
||||||
|
description: str = "",
|
||||||
|
data: dict | None = None,
|
||||||
|
) -> dict:
|
||||||
|
"""
|
||||||
|
Send a notification to all moderators.
|
||||||
|
"""
|
||||||
|
from django.contrib.auth import get_user_model
|
||||||
|
User = get_user_model()
|
||||||
|
|
||||||
|
# Get users with moderator permissions
|
||||||
|
moderators = User.objects.filter(
|
||||||
|
is_active=True,
|
||||||
|
is_staff=True, # Or use a specific permission check
|
||||||
|
).exclude(
|
||||||
|
novu_notification_prefs__is_opted_out=True
|
||||||
|
)
|
||||||
|
|
||||||
|
return self.send_to_group(
|
||||||
|
recipients=moderators,
|
||||||
|
actor=None,
|
||||||
|
verb=verb,
|
||||||
|
action_object=action_object,
|
||||||
|
description=description,
|
||||||
|
level="info",
|
||||||
|
data={**(data or {}), "workflow_id": self.WORKFLOW_MODERATION_ALERT},
|
||||||
|
send_email=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
def notify_admins(
|
||||||
|
self,
|
||||||
|
verb: str,
|
||||||
|
description: str = "",
|
||||||
|
level: str = "warning",
|
||||||
|
data: dict | None = None,
|
||||||
|
) -> dict:
|
||||||
|
"""
|
||||||
|
Send a notification to all admins.
|
||||||
|
"""
|
||||||
|
admins = User.objects.filter(is_superuser=True, is_active=True)
|
||||||
|
|
||||||
|
return self.send_to_group(
|
||||||
|
recipients=admins,
|
||||||
|
actor=None,
|
||||||
|
verb=verb,
|
||||||
|
description=description,
|
||||||
|
level=level,
|
||||||
|
data={**(data or {}), "workflow_id": self.WORKFLOW_ADMIN_ALERT},
|
||||||
|
send_email=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
def send_system_announcement(
|
||||||
|
self,
|
||||||
|
title: str,
|
||||||
|
message: str,
|
||||||
|
severity: str = "info",
|
||||||
|
action_url: str = "",
|
||||||
|
target_users: QuerySet | None = None,
|
||||||
|
created_by: User | None = None,
|
||||||
|
) -> SystemAnnouncement:
|
||||||
|
"""
|
||||||
|
Create and broadcast a system announcement.
|
||||||
|
"""
|
||||||
|
# Create the announcement
|
||||||
|
announcement = SystemAnnouncement.objects.create(
|
||||||
|
title=title,
|
||||||
|
message=message,
|
||||||
|
severity=severity,
|
||||||
|
action_url=action_url,
|
||||||
|
created_by=created_by,
|
||||||
|
is_active=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Notify users
|
||||||
|
recipients = target_users or User.objects.filter(is_active=True)
|
||||||
|
|
||||||
|
self.send_to_group(
|
||||||
|
recipients=recipients,
|
||||||
|
actor=created_by,
|
||||||
|
verb=f"System announcement: {title}",
|
||||||
|
action_object=announcement,
|
||||||
|
description=message,
|
||||||
|
level=severity,
|
||||||
|
data={
|
||||||
|
"workflow_id": self.WORKFLOW_SYSTEM_ANNOUNCEMENT,
|
||||||
|
"announcement_id": str(announcement.id),
|
||||||
|
"action_url": action_url,
|
||||||
|
},
|
||||||
|
send_email=severity in ["warning", "critical"],
|
||||||
|
)
|
||||||
|
|
||||||
|
return announcement
|
||||||
|
|
||||||
|
def get_user_notifications(
|
||||||
|
self,
|
||||||
|
user: User,
|
||||||
|
unread_only: bool = False,
|
||||||
|
limit: int = 50,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Get notifications for a user.
|
||||||
|
"""
|
||||||
|
qs = Notification.objects.filter(recipient=user)
|
||||||
|
|
||||||
|
if unread_only:
|
||||||
|
qs = qs.unread()
|
||||||
|
|
||||||
|
return qs[:limit]
|
||||||
|
|
||||||
|
def mark_as_read(self, user: User, notification_id: int | None = None):
|
||||||
|
"""
|
||||||
|
Mark notification(s) as read.
|
||||||
|
"""
|
||||||
|
if notification_id:
|
||||||
|
try:
|
||||||
|
notification = Notification.objects.get(recipient=user, id=notification_id)
|
||||||
|
notification.mark_as_read()
|
||||||
|
except Notification.DoesNotExist:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
# Mark all as read
|
||||||
|
Notification.objects.filter(recipient=user).mark_all_as_read()
|
||||||
|
|
||||||
|
def get_unread_count(self, user: User) -> int:
|
||||||
|
"""
|
||||||
|
Get count of unread notifications.
|
||||||
|
"""
|
||||||
|
return Notification.objects.filter(recipient=user, unread=True).count()
|
||||||
|
|
||||||
|
def _is_user_opted_out(self, user: User) -> bool:
|
||||||
|
"""Check if user has opted out of notifications."""
|
||||||
|
try:
|
||||||
|
prefs = NotificationPreference.objects.get(user=user)
|
||||||
|
return prefs.is_opted_out
|
||||||
|
except NotificationPreference.DoesNotExist:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _should_send_email(self, user: User, data: dict | None) -> bool:
|
||||||
|
"""Check if email should be sent based on user preferences."""
|
||||||
|
try:
|
||||||
|
prefs = NotificationPreference.objects.get(user=user)
|
||||||
|
|
||||||
|
# Check channel preferences
|
||||||
|
channel_prefs = prefs.channel_preferences or {}
|
||||||
|
email_enabled = channel_prefs.get("email", True)
|
||||||
|
|
||||||
|
if not email_enabled:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Check workflow-specific preferences
|
||||||
|
if data and "workflow_id" in data:
|
||||||
|
workflow_prefs = prefs.workflow_preferences or {}
|
||||||
|
workflow_email = workflow_prefs.get(data["workflow_id"], {}).get("email", True)
|
||||||
|
return workflow_email
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
except NotificationPreference.DoesNotExist:
|
||||||
|
# Default to sending email if no preferences set
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _send_email_notification(
|
||||||
|
self,
|
||||||
|
recipient: User,
|
||||||
|
verb: str,
|
||||||
|
actor: User | None,
|
||||||
|
action_object: Any,
|
||||||
|
target: Any,
|
||||||
|
description: str,
|
||||||
|
template: str | None,
|
||||||
|
data: dict | None,
|
||||||
|
):
|
||||||
|
"""Send an email notification."""
|
||||||
|
try:
|
||||||
|
# Build context
|
||||||
|
context = {
|
||||||
|
"recipient": recipient,
|
||||||
|
"actor": actor,
|
||||||
|
"verb": verb,
|
||||||
|
"action_object": action_object,
|
||||||
|
"target": target,
|
||||||
|
"description": description,
|
||||||
|
"site_name": self.site_name,
|
||||||
|
"site_url": self.site_url,
|
||||||
|
"data": data or {},
|
||||||
|
}
|
||||||
|
|
||||||
|
# Render email
|
||||||
|
if template:
|
||||||
|
html_content = render_to_string(template, context)
|
||||||
|
text_content = strip_tags(html_content)
|
||||||
|
else:
|
||||||
|
# Default simple email
|
||||||
|
actor_name = actor.username if actor else self.site_name
|
||||||
|
subject = f"{actor_name} {verb}"
|
||||||
|
text_content = description or f"{actor_name} {verb}"
|
||||||
|
html_content = f"<p>{text_content}</p>"
|
||||||
|
|
||||||
|
if data and data.get("action_url"):
|
||||||
|
html_content += f'<p><a href="{data["action_url"]}">View details</a></p>'
|
||||||
|
|
||||||
|
subject = f"[{self.site_name}] {verb[:50]}"
|
||||||
|
|
||||||
|
send_mail(
|
||||||
|
subject=subject,
|
||||||
|
message=text_content,
|
||||||
|
from_email=self.from_email,
|
||||||
|
recipient_list=[recipient.email],
|
||||||
|
html_message=html_content,
|
||||||
|
fail_silently=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Log email notification
|
||||||
|
self._log_notification(
|
||||||
|
user=recipient,
|
||||||
|
workflow_id=data.get("workflow_id", "general") if data else "general",
|
||||||
|
notification_type="email",
|
||||||
|
channel="email",
|
||||||
|
status=NotificationLog.Status.SENT,
|
||||||
|
payload=data or {},
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception(f"Failed to send email to {recipient.email}: {e}")
|
||||||
|
self._log_notification(
|
||||||
|
user=recipient,
|
||||||
|
workflow_id=data.get("workflow_id", "general") if data else "general",
|
||||||
|
notification_type="email",
|
||||||
|
channel="email",
|
||||||
|
status=NotificationLog.Status.FAILED,
|
||||||
|
payload=data or {},
|
||||||
|
error_message=str(e),
|
||||||
|
)
|
||||||
|
|
||||||
|
def _log_notification(
|
||||||
|
self,
|
||||||
|
user: User,
|
||||||
|
workflow_id: str,
|
||||||
|
notification_type: str,
|
||||||
|
channel: str,
|
||||||
|
status: str,
|
||||||
|
payload: dict,
|
||||||
|
error_message: str = "",
|
||||||
|
):
|
||||||
|
"""Log a notification to the audit trail."""
|
||||||
|
NotificationLog.objects.create(
|
||||||
|
user=user,
|
||||||
|
workflow_id=workflow_id,
|
||||||
|
notification_type=notification_type,
|
||||||
|
channel=channel,
|
||||||
|
status=status,
|
||||||
|
payload=payload,
|
||||||
|
error_message=error_message,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Singleton instance
|
||||||
|
notification_service = NotificationService()
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Backward compatibility - keep old NovuService interface but delegate to native
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
class NovuServiceSync:
|
||||||
|
"""
|
||||||
|
Backward-compatible wrapper that delegates to the new notification service.
|
||||||
|
|
||||||
|
This maintains the old API signature for existing code while using
|
||||||
|
the new Django-native implementation.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self._service = notification_service
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_configured(self) -> bool:
|
||||||
|
"""Always configured since we're using Django-native system."""
|
||||||
|
return True
|
||||||
|
|
||||||
|
def create_subscriber(self, subscriber_id: str, **kwargs) -> dict[str, Any]:
|
||||||
|
"""Create subscriber - now a no-op as django-notifications-hq uses User directly."""
|
||||||
|
logger.info(f"Subscriber creation not needed for django-notifications-hq: {subscriber_id}")
|
||||||
|
return {"subscriberId": subscriber_id, "status": "native"}
|
||||||
|
|
||||||
|
def update_subscriber(self, subscriber_id: str, **kwargs) -> dict[str, Any]:
|
||||||
|
"""Update subscriber - now a no-op."""
|
||||||
|
logger.info(f"Subscriber update not needed for django-notifications-hq: {subscriber_id}")
|
||||||
|
return {"subscriberId": subscriber_id, "status": "native"}
|
||||||
|
|
||||||
|
def trigger_notification(
|
||||||
|
self,
|
||||||
|
workflow_id: str,
|
||||||
|
subscriber_id: str,
|
||||||
|
payload: dict | None = None,
|
||||||
|
overrides: dict | None = None,
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""Trigger a notification using the new native service."""
|
||||||
|
try:
|
||||||
|
user = User.objects.get(pk=subscriber_id)
|
||||||
|
|
||||||
|
verb = payload.get("message", f"Notification: {workflow_id}") if payload else f"Notification: {workflow_id}"
|
||||||
|
description = payload.get("description", "") if payload else ""
|
||||||
|
|
||||||
|
success = self._service.send_notification(
|
||||||
|
recipient=user,
|
||||||
|
actor=None,
|
||||||
|
verb=verb,
|
||||||
|
description=description,
|
||||||
|
data={**(payload or {}), "workflow_id": workflow_id},
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"status": "sent" if success else "failed",
|
||||||
|
"workflow_id": workflow_id,
|
||||||
|
}
|
||||||
|
except User.DoesNotExist:
|
||||||
|
logger.error(f"User not found for notification: {subscriber_id}")
|
||||||
|
return {"status": "failed", "error": "User not found"}
|
||||||
|
|
||||||
|
def trigger_topic_notification(
|
||||||
|
self,
|
||||||
|
workflow_id: str,
|
||||||
|
topic_key: str,
|
||||||
|
payload: dict | None = None,
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""Trigger topic notification - maps to group notification."""
|
||||||
|
logger.info(f"Topic notification: {workflow_id} -> {topic_key}")
|
||||||
|
|
||||||
|
# Map topic keys to user groups
|
||||||
|
if topic_key == "moderators":
|
||||||
|
result = self._service.notify_moderators(
|
||||||
|
verb=payload.get("message", "New moderation task") if payload else "New moderation task",
|
||||||
|
data={**(payload or {}), "workflow_id": workflow_id},
|
||||||
|
)
|
||||||
|
elif topic_key == "admins":
|
||||||
|
result = self._service.notify_admins(
|
||||||
|
verb=payload.get("message", "Admin notification") if payload else "Admin notification",
|
||||||
|
data={**(payload or {}), "workflow_id": workflow_id},
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.warning(f"Unknown topic key: {topic_key}")
|
||||||
|
result = {"success": 0, "failed": 0, "skipped": 0}
|
||||||
|
|
||||||
|
return {
|
||||||
|
"status": "sent",
|
||||||
|
"workflow_id": workflow_id,
|
||||||
|
"result": result,
|
||||||
|
}
|
||||||
|
|
||||||
|
def update_preferences(
|
||||||
|
self,
|
||||||
|
subscriber_id: str,
|
||||||
|
preferences: dict[str, Any],
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""Update notification preferences."""
|
||||||
|
try:
|
||||||
|
user = User.objects.get(pk=subscriber_id)
|
||||||
|
prefs, _ = NotificationPreference.objects.get_or_create(user=user)
|
||||||
|
|
||||||
|
if "channel_preferences" in preferences:
|
||||||
|
prefs.channel_preferences = preferences["channel_preferences"]
|
||||||
|
if "workflow_preferences" in preferences:
|
||||||
|
prefs.workflow_preferences = preferences["workflow_preferences"]
|
||||||
|
if "is_opted_out" in preferences:
|
||||||
|
prefs.is_opted_out = preferences["is_opted_out"]
|
||||||
|
|
||||||
|
prefs.save()
|
||||||
|
|
||||||
|
return {"status": "updated"}
|
||||||
|
except User.DoesNotExist:
|
||||||
|
return {"status": "failed", "error": "User not found"}
|
||||||
|
|
||||||
|
|
||||||
|
# Keep old name for backward compatibility
|
||||||
|
novu_service = NovuServiceSync()
|
||||||
76
backend/apps/notifications/urls.py
Normal file
76
backend/apps/notifications/urls.py
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
"""
|
||||||
|
Notification URL configuration.
|
||||||
|
|
||||||
|
Note: Now using django-notifications-hq for native Django notifications.
|
||||||
|
Legacy Novu endpoints are kept for backward compatibility.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from django.urls import path
|
||||||
|
|
||||||
|
from .views import (
|
||||||
|
AdminAlertView,
|
||||||
|
AdminCriticalErrorView,
|
||||||
|
CreateSubscriberView,
|
||||||
|
NotificationListView,
|
||||||
|
NotificationMarkReadView,
|
||||||
|
NotificationUnreadCountView,
|
||||||
|
NotifyModeratorsReportView,
|
||||||
|
NotifyModeratorsSubmissionView,
|
||||||
|
NotifyUserSubmissionStatusView,
|
||||||
|
SystemAnnouncementView,
|
||||||
|
TriggerNotificationView,
|
||||||
|
UpdatePreferencesView,
|
||||||
|
UpdateSubscriberView,
|
||||||
|
)
|
||||||
|
|
||||||
|
app_name = "notifications"
|
||||||
|
|
||||||
|
urlpatterns = [
|
||||||
|
# ========== Native Notification Endpoints ==========
|
||||||
|
# List notifications for current user
|
||||||
|
path("", NotificationListView.as_view(), name="list"),
|
||||||
|
# Mark notification(s) as read
|
||||||
|
path("mark-read/", NotificationMarkReadView.as_view(), name="mark_read"),
|
||||||
|
# Get unread count
|
||||||
|
path("unread-count/", NotificationUnreadCountView.as_view(), name="unread_count"),
|
||||||
|
|
||||||
|
# ========== Legacy/Compatibility Endpoints ==========
|
||||||
|
# Subscriber management (legacy - kept for backward compatibility)
|
||||||
|
path("subscribers/", CreateSubscriberView.as_view(), name="create_subscriber"),
|
||||||
|
path("subscribers/update/", UpdateSubscriberView.as_view(), name="update_subscriber"),
|
||||||
|
# Preferences
|
||||||
|
path("preferences/", UpdatePreferencesView.as_view(), name="preferences"),
|
||||||
|
# Trigger notifications
|
||||||
|
path("trigger/", TriggerNotificationView.as_view(), name="trigger"),
|
||||||
|
# Moderator notifications
|
||||||
|
path(
|
||||||
|
"moderators/submission/",
|
||||||
|
NotifyModeratorsSubmissionView.as_view(),
|
||||||
|
name="moderators_submission",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"moderators/report/",
|
||||||
|
NotifyModeratorsReportView.as_view(),
|
||||||
|
name="moderators_report",
|
||||||
|
),
|
||||||
|
# User notifications
|
||||||
|
path(
|
||||||
|
"user/submission-status/",
|
||||||
|
NotifyUserSubmissionStatusView.as_view(),
|
||||||
|
name="user_submission_status",
|
||||||
|
),
|
||||||
|
# System notifications
|
||||||
|
path(
|
||||||
|
"system/announcement/",
|
||||||
|
SystemAnnouncementView.as_view(),
|
||||||
|
name="system_announcement",
|
||||||
|
),
|
||||||
|
# Admin notifications
|
||||||
|
path("admin/alert/", AdminAlertView.as_view(), name="admin_alert"),
|
||||||
|
path(
|
||||||
|
"admin/critical-error/",
|
||||||
|
AdminCriticalErrorView.as_view(),
|
||||||
|
name="admin_critical_error",
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
617
backend/apps/notifications/views.py
Normal file
617
backend/apps/notifications/views.py
Normal file
@@ -0,0 +1,617 @@
|
|||||||
|
"""
|
||||||
|
Notification views.
|
||||||
|
|
||||||
|
Provides REST API endpoints for:
|
||||||
|
- Subscriber management (legacy compatibility)
|
||||||
|
- Preference updates
|
||||||
|
- Notification triggering
|
||||||
|
- Moderator notifications
|
||||||
|
- System announcements
|
||||||
|
- User notification list and management
|
||||||
|
|
||||||
|
Note: Now using django-notifications-hq for native Django notifications.
|
||||||
|
The novu_service import provides backward compatibility.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from django.contrib.auth import get_user_model
|
||||||
|
from rest_framework import status
|
||||||
|
from rest_framework.permissions import IsAdminUser, IsAuthenticated
|
||||||
|
from rest_framework.response import Response
|
||||||
|
from rest_framework.views import APIView
|
||||||
|
|
||||||
|
from apps.core.utils import capture_and_log
|
||||||
|
|
||||||
|
from .models import NotificationLog, NotificationPreference, Subscriber, SystemAnnouncement
|
||||||
|
from .serializers import (
|
||||||
|
CreateAnnouncementSerializer,
|
||||||
|
CreateSubscriberSerializer,
|
||||||
|
ModeratorReportNotificationSerializer,
|
||||||
|
ModeratorSubmissionNotificationSerializer,
|
||||||
|
NotificationPreferenceSerializer,
|
||||||
|
SystemAnnouncementSerializer,
|
||||||
|
TriggerNotificationSerializer,
|
||||||
|
UpdatePreferencesSerializer,
|
||||||
|
UpdateSubscriberSerializer,
|
||||||
|
)
|
||||||
|
from .services import novu_service, notification_service
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
User = get_user_model()
|
||||||
|
|
||||||
|
|
||||||
|
class CreateSubscriberView(APIView):
|
||||||
|
"""
|
||||||
|
POST /notifications/subscribers/
|
||||||
|
Create or update a Novu subscriber.
|
||||||
|
"""
|
||||||
|
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
serializer = CreateSubscriberSerializer(data=request.data)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
|
||||||
|
data = serializer.validated_data
|
||||||
|
subscriber_id = data["subscriber_id"]
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Update or create local subscriber record
|
||||||
|
subscriber, created = Subscriber.objects.update_or_create(
|
||||||
|
user=request.user,
|
||||||
|
defaults={
|
||||||
|
"novu_subscriber_id": subscriber_id,
|
||||||
|
"first_name": data.get("first_name", ""),
|
||||||
|
"last_name": data.get("last_name", ""),
|
||||||
|
"email": data.get("email") or request.user.email,
|
||||||
|
"phone": data.get("phone", ""),
|
||||||
|
"avatar": data.get("avatar", ""),
|
||||||
|
"locale": data.get("locale", "en"),
|
||||||
|
"data": data.get("data", {}),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Sync to Novu if configured
|
||||||
|
if novu_service.is_configured:
|
||||||
|
novu_service.create_subscriber(
|
||||||
|
subscriber_id=subscriber_id,
|
||||||
|
email=subscriber.email,
|
||||||
|
first_name=subscriber.first_name,
|
||||||
|
last_name=subscriber.last_name,
|
||||||
|
phone=subscriber.phone,
|
||||||
|
avatar=subscriber.avatar,
|
||||||
|
locale=subscriber.locale,
|
||||||
|
data=subscriber.data,
|
||||||
|
)
|
||||||
|
|
||||||
|
return Response(
|
||||||
|
{"subscriberId": subscriber_id, "created": created},
|
||||||
|
status=status.HTTP_201_CREATED if created else status.HTTP_200_OK,
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
capture_and_log(e, "Create notification subscriber", source="api")
|
||||||
|
return Response(
|
||||||
|
{"detail": str(e)},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class UpdateSubscriberView(APIView):
|
||||||
|
"""
|
||||||
|
POST /notifications/subscribers/update/
|
||||||
|
Update a Novu subscriber.
|
||||||
|
"""
|
||||||
|
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
serializer = UpdateSubscriberSerializer(data=request.data)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
|
||||||
|
data = serializer.validated_data
|
||||||
|
subscriber_id = data["subscriber_id"]
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Update local record
|
||||||
|
subscriber = Subscriber.objects.filter(user=request.user).first()
|
||||||
|
if not subscriber:
|
||||||
|
return Response(
|
||||||
|
{"detail": "Subscriber not found"},
|
||||||
|
status=status.HTTP_404_NOT_FOUND,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Update fields
|
||||||
|
for field in ["first_name", "last_name", "email", "phone", "avatar", "locale", "data"]:
|
||||||
|
if field in data:
|
||||||
|
setattr(subscriber, field, data[field])
|
||||||
|
subscriber.save()
|
||||||
|
|
||||||
|
# Sync to Novu
|
||||||
|
if novu_service.is_configured:
|
||||||
|
update_fields = {k: v for k, v in data.items() if k != "subscriber_id"}
|
||||||
|
novu_service.update_subscriber(subscriber_id, **update_fields)
|
||||||
|
|
||||||
|
return Response({"success": True})
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
capture_and_log(e, "Update notification subscriber", source="api")
|
||||||
|
return Response(
|
||||||
|
{"detail": str(e)},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class UpdatePreferencesView(APIView):
|
||||||
|
"""
|
||||||
|
POST /notifications/preferences/
|
||||||
|
Update notification preferences.
|
||||||
|
"""
|
||||||
|
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
serializer = UpdatePreferencesSerializer(data=request.data)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
|
||||||
|
data = serializer.validated_data
|
||||||
|
preferences = data["preferences"]
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Update local preferences
|
||||||
|
pref, created = NotificationPreference.objects.update_or_create(
|
||||||
|
user=request.user,
|
||||||
|
defaults={
|
||||||
|
"channel_preferences": preferences.get("channelPreferences", {}),
|
||||||
|
"workflow_preferences": preferences.get("workflowPreferences", {}),
|
||||||
|
"frequency_settings": preferences.get("frequencySettings", {}),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Sync to Novu
|
||||||
|
if novu_service.is_configured:
|
||||||
|
subscriber = Subscriber.objects.filter(user=request.user).first()
|
||||||
|
if subscriber:
|
||||||
|
novu_service.update_preferences(subscriber.novu_subscriber_id, preferences)
|
||||||
|
|
||||||
|
return Response({"success": True})
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
capture_and_log(e, "Update notification preferences", source="api")
|
||||||
|
return Response(
|
||||||
|
{"detail": str(e)},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
def get(self, request):
|
||||||
|
"""Get current user's notification preferences."""
|
||||||
|
try:
|
||||||
|
pref = NotificationPreference.objects.filter(user=request.user).first()
|
||||||
|
if not pref:
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"channelPreferences": {},
|
||||||
|
"workflowPreferences": {},
|
||||||
|
"frequencySettings": {},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return Response(NotificationPreferenceSerializer(pref).data)
|
||||||
|
except Exception as e:
|
||||||
|
capture_and_log(e, "Get notification preferences", source="api")
|
||||||
|
return Response(
|
||||||
|
{"detail": str(e)},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TriggerNotificationView(APIView):
|
||||||
|
"""
|
||||||
|
POST /notifications/trigger/
|
||||||
|
Trigger a notification workflow.
|
||||||
|
"""
|
||||||
|
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
serializer = TriggerNotificationSerializer(data=request.data)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
|
||||||
|
data = serializer.validated_data
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Log the notification
|
||||||
|
log = NotificationLog.objects.create(
|
||||||
|
user=request.user,
|
||||||
|
workflow_id=data["workflow_id"],
|
||||||
|
notification_type="trigger",
|
||||||
|
channel="all",
|
||||||
|
payload=data.get("payload", {}),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Trigger via Novu
|
||||||
|
if novu_service.is_configured:
|
||||||
|
result = novu_service.trigger_notification(
|
||||||
|
workflow_id=data["workflow_id"],
|
||||||
|
subscriber_id=data["subscriber_id"],
|
||||||
|
payload=data.get("payload"),
|
||||||
|
overrides=data.get("overrides"),
|
||||||
|
)
|
||||||
|
log.novu_transaction_id = result.get("transactionId", "")
|
||||||
|
log.status = NotificationLog.Status.SENT
|
||||||
|
else:
|
||||||
|
log.status = NotificationLog.Status.SENT # Mock success
|
||||||
|
log.save()
|
||||||
|
|
||||||
|
return Response({"success": True, "transactionId": log.novu_transaction_id})
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
capture_and_log(e, "Trigger notification", source="api")
|
||||||
|
return Response(
|
||||||
|
{"detail": str(e)},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class NotifyModeratorsSubmissionView(APIView):
|
||||||
|
"""
|
||||||
|
POST /notifications/moderators/submission/
|
||||||
|
Notify moderators about a new submission.
|
||||||
|
"""
|
||||||
|
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
serializer = ModeratorSubmissionNotificationSerializer(data=request.data)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
|
||||||
|
data = serializer.validated_data
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Log the notification
|
||||||
|
NotificationLog.objects.create(
|
||||||
|
user=request.user,
|
||||||
|
workflow_id="moderator-submission-notification",
|
||||||
|
notification_type="moderator_submission",
|
||||||
|
channel="in_app",
|
||||||
|
payload=data,
|
||||||
|
status=NotificationLog.Status.SENT,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Trigger to moderator topic
|
||||||
|
if novu_service.is_configured:
|
||||||
|
novu_service.trigger_topic_notification(
|
||||||
|
workflow_id="moderator-submission-notification",
|
||||||
|
topic_key="moderators",
|
||||||
|
payload=data,
|
||||||
|
)
|
||||||
|
|
||||||
|
return Response({"success": True})
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
capture_and_log(e, "Notify moderators (submission)", source="api")
|
||||||
|
return Response(
|
||||||
|
{"detail": str(e)},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class NotifyModeratorsReportView(APIView):
|
||||||
|
"""
|
||||||
|
POST /notifications/moderators/report/
|
||||||
|
Notify moderators about a new report.
|
||||||
|
"""
|
||||||
|
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
serializer = ModeratorReportNotificationSerializer(data=request.data)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
|
||||||
|
data = serializer.validated_data
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Log the notification
|
||||||
|
NotificationLog.objects.create(
|
||||||
|
user=request.user,
|
||||||
|
workflow_id="moderator-report-notification",
|
||||||
|
notification_type="moderator_report",
|
||||||
|
channel="in_app",
|
||||||
|
payload=data,
|
||||||
|
status=NotificationLog.Status.SENT,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Trigger to moderator topic
|
||||||
|
if novu_service.is_configured:
|
||||||
|
novu_service.trigger_topic_notification(
|
||||||
|
workflow_id="moderator-report-notification",
|
||||||
|
topic_key="moderators",
|
||||||
|
payload=data,
|
||||||
|
)
|
||||||
|
|
||||||
|
return Response({"success": True})
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
capture_and_log(e, "Notify moderators (report)", source="api")
|
||||||
|
return Response(
|
||||||
|
{"detail": str(e)},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class NotifyUserSubmissionStatusView(APIView):
|
||||||
|
"""
|
||||||
|
POST /notifications/user/submission-status/
|
||||||
|
Notify a user about their submission status change.
|
||||||
|
"""
|
||||||
|
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
data = request.data
|
||||||
|
|
||||||
|
try:
|
||||||
|
subscriber_id = data.get("subscriber_id") or str(request.user.id)
|
||||||
|
|
||||||
|
# Log the notification
|
||||||
|
NotificationLog.objects.create(
|
||||||
|
user=request.user,
|
||||||
|
workflow_id="submission-status-update",
|
||||||
|
notification_type="submission_status",
|
||||||
|
channel="email",
|
||||||
|
payload=data,
|
||||||
|
status=NotificationLog.Status.SENT,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Trigger notification
|
||||||
|
if novu_service.is_configured:
|
||||||
|
novu_service.trigger_notification(
|
||||||
|
workflow_id="submission-status-update",
|
||||||
|
subscriber_id=subscriber_id,
|
||||||
|
payload=data,
|
||||||
|
)
|
||||||
|
|
||||||
|
return Response({"success": True})
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
capture_and_log(e, "Notify user submission status", source="api")
|
||||||
|
return Response(
|
||||||
|
{"detail": str(e)},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class SystemAnnouncementView(APIView):
|
||||||
|
"""
|
||||||
|
POST /notifications/system/announcement/
|
||||||
|
Send a system-wide announcement (admin only).
|
||||||
|
"""
|
||||||
|
|
||||||
|
permission_classes = [IsAdminUser]
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
serializer = CreateAnnouncementSerializer(data=request.data)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
|
||||||
|
data = serializer.validated_data
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Create announcement record
|
||||||
|
announcement = SystemAnnouncement.objects.create(
|
||||||
|
title=data["title"],
|
||||||
|
message=data["message"],
|
||||||
|
severity=data.get("severity", "info"),
|
||||||
|
action_url=data.get("action_url", ""),
|
||||||
|
created_by=request.user,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Trigger to all users topic
|
||||||
|
if novu_service.is_configured:
|
||||||
|
novu_service.trigger_topic_notification(
|
||||||
|
workflow_id="system-announcement",
|
||||||
|
topic_key="users",
|
||||||
|
payload={
|
||||||
|
"title": announcement.title,
|
||||||
|
"message": announcement.message,
|
||||||
|
"severity": announcement.severity,
|
||||||
|
"actionUrl": announcement.action_url,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"success": True,
|
||||||
|
"announcementId": str(announcement.id),
|
||||||
|
},
|
||||||
|
status=status.HTTP_201_CREATED,
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
capture_and_log(e, "System announcement", source="api")
|
||||||
|
return Response(
|
||||||
|
{"detail": str(e)},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class AdminAlertView(APIView):
|
||||||
|
"""
|
||||||
|
POST /notifications/admin/alert/
|
||||||
|
Send alert to admins.
|
||||||
|
"""
|
||||||
|
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
data = request.data
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Log the alert
|
||||||
|
NotificationLog.objects.create(
|
||||||
|
user=request.user,
|
||||||
|
workflow_id="admin-alert",
|
||||||
|
notification_type="admin_alert",
|
||||||
|
channel="email",
|
||||||
|
payload=data,
|
||||||
|
status=NotificationLog.Status.SENT,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Trigger to admin topic
|
||||||
|
if novu_service.is_configured:
|
||||||
|
novu_service.trigger_topic_notification(
|
||||||
|
workflow_id="admin-alert",
|
||||||
|
topic_key="admins",
|
||||||
|
payload=data,
|
||||||
|
)
|
||||||
|
|
||||||
|
return Response({"success": True})
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
capture_and_log(e, "Admin alert", source="api")
|
||||||
|
return Response(
|
||||||
|
{"detail": str(e)},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class AdminCriticalErrorView(APIView):
|
||||||
|
"""
|
||||||
|
POST /notifications/admin/critical-error/
|
||||||
|
Send critical error alert to admins.
|
||||||
|
"""
|
||||||
|
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
data = request.data
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Log the alert
|
||||||
|
NotificationLog.objects.create(
|
||||||
|
user=request.user,
|
||||||
|
workflow_id="admin-critical-error",
|
||||||
|
notification_type="critical_error",
|
||||||
|
channel="email",
|
||||||
|
payload=data,
|
||||||
|
status=NotificationLog.Status.SENT,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Trigger to admin topic with urgent priority
|
||||||
|
if novu_service.is_configured:
|
||||||
|
novu_service.trigger_topic_notification(
|
||||||
|
workflow_id="admin-critical-error",
|
||||||
|
topic_key="admins",
|
||||||
|
payload=data,
|
||||||
|
)
|
||||||
|
|
||||||
|
return Response({"success": True})
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
capture_and_log(e, "Admin critical error", source="api")
|
||||||
|
return Response(
|
||||||
|
{"detail": str(e)},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Native Notification Views (django-notifications-hq)
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
|
||||||
|
class NotificationListView(APIView):
|
||||||
|
"""
|
||||||
|
GET /notifications/
|
||||||
|
Get list of notifications for the current user.
|
||||||
|
"""
|
||||||
|
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
|
def get(self, request):
|
||||||
|
try:
|
||||||
|
unread_only = request.query_params.get("unread_only", "false").lower() == "true"
|
||||||
|
limit = min(int(request.query_params.get("limit", 50)), 100)
|
||||||
|
|
||||||
|
notifications = notification_service.get_user_notifications(
|
||||||
|
user=request.user,
|
||||||
|
unread_only=unread_only,
|
||||||
|
limit=limit,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Serialize notifications
|
||||||
|
notification_list = []
|
||||||
|
for notif in notifications:
|
||||||
|
notification_list.append({
|
||||||
|
"id": notif.id,
|
||||||
|
"actor": str(notif.actor) if notif.actor else None,
|
||||||
|
"verb": notif.verb,
|
||||||
|
"description": notif.description or "",
|
||||||
|
"target": str(notif.target) if notif.target else None,
|
||||||
|
"actionObject": str(notif.action_object) if notif.action_object else None,
|
||||||
|
"level": notif.level,
|
||||||
|
"unread": notif.unread,
|
||||||
|
"data": notif.data or {},
|
||||||
|
"timestamp": notif.timestamp.isoformat(),
|
||||||
|
})
|
||||||
|
|
||||||
|
return Response({
|
||||||
|
"notifications": notification_list,
|
||||||
|
"unreadCount": notification_service.get_unread_count(request.user),
|
||||||
|
})
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
capture_and_log(e, "Get notifications", source="api")
|
||||||
|
return Response(
|
||||||
|
{"detail": str(e)},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class NotificationMarkReadView(APIView):
|
||||||
|
"""
|
||||||
|
POST /notifications/mark-read/
|
||||||
|
Mark notification(s) as read.
|
||||||
|
"""
|
||||||
|
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
try:
|
||||||
|
notification_id = request.data.get("notification_id")
|
||||||
|
|
||||||
|
notification_service.mark_as_read(
|
||||||
|
user=request.user,
|
||||||
|
notification_id=notification_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
return Response({
|
||||||
|
"success": True,
|
||||||
|
"unreadCount": notification_service.get_unread_count(request.user),
|
||||||
|
})
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
capture_and_log(e, "Mark notification read", source="api")
|
||||||
|
return Response(
|
||||||
|
{"detail": str(e)},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class NotificationUnreadCountView(APIView):
|
||||||
|
"""
|
||||||
|
GET /notifications/unread-count/
|
||||||
|
Get count of unread notifications.
|
||||||
|
"""
|
||||||
|
|
||||||
|
permission_classes = [IsAuthenticated]
|
||||||
|
|
||||||
|
def get(self, request):
|
||||||
|
try:
|
||||||
|
count = notification_service.get_unread_count(request.user)
|
||||||
|
return Response({"unreadCount": count})
|
||||||
|
except Exception as e:
|
||||||
|
capture_and_log(e, "Get unread count", source="api")
|
||||||
|
return Response(
|
||||||
|
{"detail": str(e)},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
@@ -4,7 +4,7 @@ from rest_framework.routers import DefaultRouter
|
|||||||
from .views import ReviewViewSet
|
from .views import ReviewViewSet
|
||||||
|
|
||||||
router = DefaultRouter()
|
router = DefaultRouter()
|
||||||
router.register(r"reviews", ReviewViewSet, basename="review")
|
router.register(r"", ReviewViewSet, basename="review")
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path("", include(router.urls)),
|
path("", include(router.urls)),
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ from django.db import migrations, models
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("pghistory", "0007_auto_20250421_0444"),
|
("pghistory", "0006_delete_aggregateevent"),
|
||||||
("rides", "0028_ridecredit_ridecreditevent_ridecredit_insert_insert_and_more"),
|
("rides", "0028_ridecredit_ridecreditevent_ridecredit_insert_insert_and_more"),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ from django.db import migrations, models
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("pghistory", "0007_auto_20250421_0444"),
|
("pghistory", "0006_delete_aggregateevent"),
|
||||||
("rides", "0029_darkridestats_darkridestatsevent_flatridestats_and_more"),
|
("rides", "0029_darkridestats_darkridestatsevent_flatridestats_and_more"),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ from django.db import migrations, models
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("pghistory", "0007_auto_20250421_0444"),
|
("pghistory", "0006_delete_aggregateevent"),
|
||||||
("rides", "0030_add_kiddie_and_transportation_stats"),
|
("rides", "0030_add_kiddie_and_transportation_stats"),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,41 @@
|
|||||||
|
# Generated by Django 5.2.9 on 2026-01-06 17:43
|
||||||
|
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('contenttypes', '0002_remove_content_type_name'),
|
||||||
|
('support', '0002_add_category_to_ticket'),
|
||||||
|
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='Report',
|
||||||
|
fields=[
|
||||||
|
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||||
|
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||||
|
('updated_at', models.DateTimeField(auto_now=True)),
|
||||||
|
('object_id', models.CharField(help_text='ID of the entity being reported', max_length=50)),
|
||||||
|
('report_type', models.CharField(choices=[('inaccurate', 'Inaccurate Information'), ('inappropriate', 'Inappropriate Content'), ('spam', 'Spam'), ('copyright', 'Copyright Violation'), ('duplicate', 'Duplicate Content'), ('other', 'Other')], db_index=True, help_text='Type of issue being reported', max_length=20)),
|
||||||
|
('reason', models.TextField(help_text='Detailed description of the issue')),
|
||||||
|
('status', models.CharField(choices=[('pending', 'Pending'), ('investigating', 'Investigating'), ('resolved', 'Resolved'), ('dismissed', 'Dismissed')], db_index=True, default='pending', help_text='Current status of the report', max_length=20)),
|
||||||
|
('resolved_at', models.DateTimeField(blank=True, help_text='When the report was resolved', null=True)),
|
||||||
|
('resolution_notes', models.TextField(blank=True, help_text='Notes about how the report was resolved')),
|
||||||
|
('content_type', models.ForeignKey(help_text='Type of entity being reported', on_delete=django.db.models.deletion.CASCADE, to='contenttypes.contenttype')),
|
||||||
|
('reporter', models.ForeignKey(blank=True, help_text='User who submitted the report', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='submitted_reports', to=settings.AUTH_USER_MODEL)),
|
||||||
|
('resolved_by', models.ForeignKey(blank=True, help_text='Moderator who resolved the report', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='resolved_reports', to=settings.AUTH_USER_MODEL)),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
'verbose_name': 'Report',
|
||||||
|
'verbose_name_plural': 'Reports',
|
||||||
|
'ordering': ['-created_at'],
|
||||||
|
'abstract': False,
|
||||||
|
'indexes': [models.Index(fields=['status', 'created_at'], name='support_rep_status_aea90b_idx'), models.Index(fields=['content_type', 'object_id'], name='support_rep_content_e9be3b_idx'), models.Index(fields=['report_type', 'created_at'], name='support_rep_report__a54360_idx')],
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -66,3 +66,105 @@ class Ticket(TrackedModel):
|
|||||||
if self.user and not self.email:
|
if self.user and not self.email:
|
||||||
self.email = self.user.email
|
self.email = self.user.email
|
||||||
super().save(*args, **kwargs)
|
super().save(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class Report(TrackedModel):
|
||||||
|
"""
|
||||||
|
User-submitted reports about content issues.
|
||||||
|
|
||||||
|
Reports allow users to flag problems with specific entities
|
||||||
|
(parks, rides, reviews, etc.) for moderator review.
|
||||||
|
"""
|
||||||
|
|
||||||
|
class ReportType(models.TextChoices):
|
||||||
|
INACCURATE = "inaccurate", "Inaccurate Information"
|
||||||
|
INAPPROPRIATE = "inappropriate", "Inappropriate Content"
|
||||||
|
SPAM = "spam", "Spam"
|
||||||
|
COPYRIGHT = "copyright", "Copyright Violation"
|
||||||
|
DUPLICATE = "duplicate", "Duplicate Content"
|
||||||
|
OTHER = "other", "Other"
|
||||||
|
|
||||||
|
class Status(models.TextChoices):
|
||||||
|
PENDING = "pending", "Pending"
|
||||||
|
INVESTIGATING = "investigating", "Investigating"
|
||||||
|
RESOLVED = "resolved", "Resolved"
|
||||||
|
DISMISSED = "dismissed", "Dismissed"
|
||||||
|
|
||||||
|
# Reporter (optional for anonymous reports)
|
||||||
|
reporter = models.ForeignKey(
|
||||||
|
settings.AUTH_USER_MODEL,
|
||||||
|
on_delete=models.SET_NULL,
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
related_name="submitted_reports",
|
||||||
|
help_text="User who submitted the report",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Target entity using GenericForeignKey
|
||||||
|
content_type = models.ForeignKey(
|
||||||
|
"contenttypes.ContentType",
|
||||||
|
on_delete=models.CASCADE,
|
||||||
|
help_text="Type of entity being reported",
|
||||||
|
)
|
||||||
|
object_id = models.CharField(
|
||||||
|
max_length=50,
|
||||||
|
help_text="ID of the entity being reported",
|
||||||
|
)
|
||||||
|
# Note: GenericForeignKey doesn't create a database column
|
||||||
|
# It's a convenience for accessing the related object
|
||||||
|
# content_object = GenericForeignKey("content_type", "object_id")
|
||||||
|
|
||||||
|
# Report details
|
||||||
|
report_type = models.CharField(
|
||||||
|
max_length=20,
|
||||||
|
choices=ReportType.choices,
|
||||||
|
db_index=True,
|
||||||
|
help_text="Type of issue being reported",
|
||||||
|
)
|
||||||
|
reason = models.TextField(
|
||||||
|
help_text="Detailed description of the issue",
|
||||||
|
)
|
||||||
|
status = models.CharField(
|
||||||
|
max_length=20,
|
||||||
|
choices=Status.choices,
|
||||||
|
default=Status.PENDING,
|
||||||
|
db_index=True,
|
||||||
|
help_text="Current status of the report",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Resolution
|
||||||
|
resolved_at = models.DateTimeField(
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
help_text="When the report was resolved",
|
||||||
|
)
|
||||||
|
resolved_by = models.ForeignKey(
|
||||||
|
settings.AUTH_USER_MODEL,
|
||||||
|
on_delete=models.SET_NULL,
|
||||||
|
null=True,
|
||||||
|
blank=True,
|
||||||
|
related_name="resolved_reports",
|
||||||
|
help_text="Moderator who resolved the report",
|
||||||
|
)
|
||||||
|
resolution_notes = models.TextField(
|
||||||
|
blank=True,
|
||||||
|
help_text="Notes about how the report was resolved",
|
||||||
|
)
|
||||||
|
|
||||||
|
class Meta(TrackedModel.Meta):
|
||||||
|
verbose_name = "Report"
|
||||||
|
verbose_name_plural = "Reports"
|
||||||
|
ordering = ["-created_at"]
|
||||||
|
indexes = [
|
||||||
|
models.Index(fields=["status", "created_at"]),
|
||||||
|
models.Index(fields=["content_type", "object_id"]),
|
||||||
|
models.Index(fields=["report_type", "created_at"]),
|
||||||
|
]
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"[{self.get_report_type_display()}] {self.content_type} #{self.object_id}"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_resolved(self) -> bool:
|
||||||
|
return self.status in (self.Status.RESOLVED, self.Status.DISMISSED)
|
||||||
|
|
||||||
|
|||||||
@@ -33,3 +33,110 @@ class TicketSerializer(serializers.ModelSerializer):
|
|||||||
if request and not request.user.is_authenticated and not data.get("email"):
|
if request and not request.user.is_authenticated and not data.get("email"):
|
||||||
raise serializers.ValidationError({"email": "Email is required for guests."})
|
raise serializers.ValidationError({"email": "Email is required for guests."})
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
class ReportSerializer(serializers.ModelSerializer):
|
||||||
|
"""Serializer for Report model."""
|
||||||
|
|
||||||
|
reporter_username = serializers.CharField(source="reporter.username", read_only=True, allow_null=True)
|
||||||
|
resolved_by_username = serializers.CharField(source="resolved_by.username", read_only=True, allow_null=True)
|
||||||
|
report_type_display = serializers.CharField(source="get_report_type_display", read_only=True)
|
||||||
|
status_display = serializers.CharField(source="get_status_display", read_only=True)
|
||||||
|
content_type_name = serializers.CharField(source="content_type.model", read_only=True)
|
||||||
|
is_resolved = serializers.BooleanField(read_only=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
from .models import Report
|
||||||
|
|
||||||
|
model = Report
|
||||||
|
fields = [
|
||||||
|
"id",
|
||||||
|
"reporter",
|
||||||
|
"reporter_username",
|
||||||
|
"content_type",
|
||||||
|
"content_type_name",
|
||||||
|
"object_id",
|
||||||
|
"report_type",
|
||||||
|
"report_type_display",
|
||||||
|
"reason",
|
||||||
|
"status",
|
||||||
|
"status_display",
|
||||||
|
"resolved_at",
|
||||||
|
"resolved_by",
|
||||||
|
"resolved_by_username",
|
||||||
|
"resolution_notes",
|
||||||
|
"is_resolved",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
]
|
||||||
|
read_only_fields = [
|
||||||
|
"id",
|
||||||
|
"reporter",
|
||||||
|
"resolved_at",
|
||||||
|
"resolved_by",
|
||||||
|
"created_at",
|
||||||
|
"updated_at",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class ReportCreateSerializer(serializers.ModelSerializer):
|
||||||
|
"""Serializer for creating reports with entity type as string."""
|
||||||
|
|
||||||
|
entity_type = serializers.CharField(write_only=True, help_text="Type of entity: park, ride, review, etc.")
|
||||||
|
entity_id = serializers.CharField(write_only=True, help_text="ID of the entity being reported")
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
from .models import Report
|
||||||
|
|
||||||
|
model = Report
|
||||||
|
fields = [
|
||||||
|
"entity_type",
|
||||||
|
"entity_id",
|
||||||
|
"report_type",
|
||||||
|
"reason",
|
||||||
|
]
|
||||||
|
|
||||||
|
def validate(self, data):
|
||||||
|
from django.contrib.contenttypes.models import ContentType
|
||||||
|
|
||||||
|
entity_type = data.pop("entity_type")
|
||||||
|
entity_id = data.pop("entity_id")
|
||||||
|
|
||||||
|
# Map common entity types to app.model
|
||||||
|
type_mapping = {
|
||||||
|
"park": ("parks", "park"),
|
||||||
|
"ride": ("rides", "ride"),
|
||||||
|
"review": ("reviews", "review"),
|
||||||
|
"user": ("accounts", "user"),
|
||||||
|
}
|
||||||
|
|
||||||
|
if entity_type in type_mapping:
|
||||||
|
app_label, model_name = type_mapping[entity_type]
|
||||||
|
else:
|
||||||
|
# Try to parse as app.model
|
||||||
|
parts = entity_type.split(".")
|
||||||
|
if len(parts) != 2:
|
||||||
|
raise serializers.ValidationError(
|
||||||
|
{"entity_type": f"Unknown entity type: {entity_type}. Use 'park', 'ride', 'review', or 'app.model'."}
|
||||||
|
)
|
||||||
|
app_label, model_name = parts
|
||||||
|
|
||||||
|
try:
|
||||||
|
content_type = ContentType.objects.get(app_label=app_label, model=model_name)
|
||||||
|
except ContentType.DoesNotExist:
|
||||||
|
raise serializers.ValidationError({"entity_type": f"Unknown entity type: {entity_type}"})
|
||||||
|
|
||||||
|
data["content_type"] = content_type
|
||||||
|
data["object_id"] = entity_id
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
class ReportResolveSerializer(serializers.Serializer):
|
||||||
|
"""Serializer for resolving reports."""
|
||||||
|
|
||||||
|
status = serializers.ChoiceField(
|
||||||
|
choices=[("resolved", "Resolved"), ("dismissed", "Dismissed")],
|
||||||
|
default="resolved",
|
||||||
|
)
|
||||||
|
notes = serializers.CharField(required=False, allow_blank=True)
|
||||||
|
|
||||||
|
|||||||
@@ -1,11 +1,13 @@
|
|||||||
from django.urls import include, path
|
from django.urls import include, path
|
||||||
from rest_framework.routers import DefaultRouter
|
from rest_framework.routers import DefaultRouter
|
||||||
|
|
||||||
from .views import TicketViewSet
|
from .views import ReportViewSet, TicketViewSet
|
||||||
|
|
||||||
router = DefaultRouter()
|
router = DefaultRouter()
|
||||||
router.register(r"tickets", TicketViewSet, basename="ticket")
|
router.register(r"tickets", TicketViewSet, basename="ticket")
|
||||||
|
router.register(r"reports", ReportViewSet, basename="report")
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path("", include(router.urls)),
|
path("", include(router.urls)),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|||||||
@@ -1,8 +1,16 @@
|
|||||||
|
from django.utils import timezone
|
||||||
from django_filters.rest_framework import DjangoFilterBackend
|
from django_filters.rest_framework import DjangoFilterBackend
|
||||||
from rest_framework import filters, permissions, viewsets
|
from rest_framework import filters, permissions, status, viewsets
|
||||||
|
from rest_framework.decorators import action
|
||||||
|
from rest_framework.response import Response
|
||||||
|
|
||||||
from .models import Ticket
|
from .models import Report, Ticket
|
||||||
from .serializers import TicketSerializer
|
from .serializers import (
|
||||||
|
ReportCreateSerializer,
|
||||||
|
ReportResolveSerializer,
|
||||||
|
ReportSerializer,
|
||||||
|
TicketSerializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class TicketViewSet(viewsets.ModelViewSet):
|
class TicketViewSet(viewsets.ModelViewSet):
|
||||||
@@ -33,3 +41,61 @@ class TicketViewSet(viewsets.ModelViewSet):
|
|||||||
serializer.save(user=self.request.user, email=self.request.user.email)
|
serializer.save(user=self.request.user, email=self.request.user.email)
|
||||||
else:
|
else:
|
||||||
serializer.save()
|
serializer.save()
|
||||||
|
|
||||||
|
|
||||||
|
class ReportViewSet(viewsets.ModelViewSet):
|
||||||
|
"""
|
||||||
|
ViewSet for handling user-submitted content reports.
|
||||||
|
|
||||||
|
- Authenticated users can CREATE reports
|
||||||
|
- Staff can LIST/RETRIEVE all reports
|
||||||
|
- Users can LIST/RETRIEVE their own reports
|
||||||
|
- Staff can RESOLVE reports
|
||||||
|
"""
|
||||||
|
|
||||||
|
queryset = Report.objects.select_related("reporter", "resolved_by", "content_type").all()
|
||||||
|
permission_classes = [permissions.IsAuthenticated]
|
||||||
|
filter_backends = [DjangoFilterBackend, filters.OrderingFilter, filters.SearchFilter]
|
||||||
|
filterset_fields = ["status", "report_type"]
|
||||||
|
search_fields = ["reason", "resolution_notes"]
|
||||||
|
ordering_fields = ["created_at", "status", "report_type"]
|
||||||
|
ordering = ["-created_at"]
|
||||||
|
|
||||||
|
def get_serializer_class(self):
|
||||||
|
if self.action == "create":
|
||||||
|
return ReportCreateSerializer
|
||||||
|
if self.action == "resolve":
|
||||||
|
return ReportResolveSerializer
|
||||||
|
return ReportSerializer
|
||||||
|
|
||||||
|
def get_queryset(self):
|
||||||
|
user = self.request.user
|
||||||
|
if user.is_staff:
|
||||||
|
return Report.objects.select_related("reporter", "resolved_by", "content_type").all()
|
||||||
|
return Report.objects.select_related("reporter", "resolved_by", "content_type").filter(reporter=user)
|
||||||
|
|
||||||
|
def perform_create(self, serializer):
|
||||||
|
serializer.save(reporter=self.request.user)
|
||||||
|
|
||||||
|
@action(detail=True, methods=["post"], permission_classes=[permissions.IsAdminUser])
|
||||||
|
def resolve(self, request, pk=None):
|
||||||
|
"""Mark a report as resolved or dismissed."""
|
||||||
|
report = self.get_object()
|
||||||
|
|
||||||
|
if report.is_resolved:
|
||||||
|
return Response(
|
||||||
|
{"detail": "Report is already resolved"},
|
||||||
|
status=status.HTTP_400_BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
serializer = ReportResolveSerializer(data=request.data)
|
||||||
|
serializer.is_valid(raise_exception=True)
|
||||||
|
|
||||||
|
report.status = serializer.validated_data.get("status", "resolved")
|
||||||
|
report.resolved_at = timezone.now()
|
||||||
|
report.resolved_by = request.user
|
||||||
|
report.resolution_notes = serializer.validated_data.get("notes", "")
|
||||||
|
report.save()
|
||||||
|
|
||||||
|
return Response(ReportSerializer(report).data)
|
||||||
|
|
||||||
|
|||||||
@@ -66,6 +66,31 @@ app.conf.update(
|
|||||||
"task": "rides.check_overdue_closings",
|
"task": "rides.check_overdue_closings",
|
||||||
"schedule": 86400.0, # Daily at midnight
|
"schedule": 86400.0, # Daily at midnight
|
||||||
},
|
},
|
||||||
|
# ====== New scheduled tasks ======
|
||||||
|
"process-scheduled-deletions": {
|
||||||
|
"task": "core.process_scheduled_deletions",
|
||||||
|
"schedule": 86400.0, # Daily
|
||||||
|
},
|
||||||
|
"process-closing-entities": {
|
||||||
|
"task": "core.process_closing_entities",
|
||||||
|
"schedule": 86400.0, # Daily
|
||||||
|
},
|
||||||
|
"process-expired-bans": {
|
||||||
|
"task": "core.process_expired_bans",
|
||||||
|
"schedule": 3600.0, # Hourly
|
||||||
|
},
|
||||||
|
"cleanup-orphaned-images": {
|
||||||
|
"task": "core.cleanup_orphaned_images",
|
||||||
|
"schedule": 604800.0, # Weekly
|
||||||
|
},
|
||||||
|
"cleanup-old-versions": {
|
||||||
|
"task": "core.cleanup_old_versions",
|
||||||
|
"schedule": 2592000.0, # Monthly (30 days)
|
||||||
|
},
|
||||||
|
"data-retention-cleanup": {
|
||||||
|
"task": "core.data_retention_cleanup",
|
||||||
|
"schedule": 86400.0, # Daily
|
||||||
|
},
|
||||||
},
|
},
|
||||||
# Task result settings
|
# Task result settings
|
||||||
result_expires=3600, # 1 hour
|
result_expires=3600, # 1 hour
|
||||||
|
|||||||
@@ -73,8 +73,7 @@ THIRD_PARTY_APPS = [
|
|||||||
"rest_framework.authtoken",
|
"rest_framework.authtoken",
|
||||||
"rest_framework_simplejwt", # JWT authentication
|
"rest_framework_simplejwt", # JWT authentication
|
||||||
"rest_framework_simplejwt.token_blacklist", # JWT token blacklist
|
"rest_framework_simplejwt.token_blacklist", # JWT token blacklist
|
||||||
"dj_rest_auth", # REST authentication with JWT support
|
# Note: dj_rest_auth removed - using custom auth views in apps.api.v1.auth
|
||||||
"dj_rest_auth.registration", # REST registration support
|
|
||||||
"drf_spectacular", # OpenAPI 3.0 documentation
|
"drf_spectacular", # OpenAPI 3.0 documentation
|
||||||
"corsheaders", # CORS headers for API
|
"corsheaders", # CORS headers for API
|
||||||
"pghistory", # django-pghistory
|
"pghistory", # django-pghistory
|
||||||
@@ -83,6 +82,7 @@ THIRD_PARTY_APPS = [
|
|||||||
"allauth",
|
"allauth",
|
||||||
"allauth.account",
|
"allauth.account",
|
||||||
"allauth.mfa", # MFA/TOTP support
|
"allauth.mfa", # MFA/TOTP support
|
||||||
|
"allauth.mfa.webauthn", # WebAuthn/Passkey support
|
||||||
"allauth.socialaccount",
|
"allauth.socialaccount",
|
||||||
"allauth.socialaccount.providers.google",
|
"allauth.socialaccount.providers.google",
|
||||||
"allauth.socialaccount.providers.discord",
|
"allauth.socialaccount.providers.discord",
|
||||||
@@ -102,6 +102,8 @@ THIRD_PARTY_APPS = [
|
|||||||
"django_celery_beat", # Celery beat scheduler
|
"django_celery_beat", # Celery beat scheduler
|
||||||
"django_celery_results", # Celery result backend
|
"django_celery_results", # Celery result backend
|
||||||
"django_extensions", # Django Extensions for enhanced development tools
|
"django_extensions", # Django Extensions for enhanced development tools
|
||||||
|
# Note: django-notifications-hq is installed but not in INSTALLED_APPS
|
||||||
|
# to avoid app label conflict. We use a custom implementation instead.
|
||||||
]
|
]
|
||||||
|
|
||||||
LOCAL_APPS = [
|
LOCAL_APPS = [
|
||||||
@@ -117,6 +119,7 @@ LOCAL_APPS = [
|
|||||||
"apps.media",
|
"apps.media",
|
||||||
"apps.blog",
|
"apps.blog",
|
||||||
"apps.support",
|
"apps.support",
|
||||||
|
"apps.notifications", # Notification service
|
||||||
]
|
]
|
||||||
|
|
||||||
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
|
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
|
||||||
|
|||||||
@@ -48,8 +48,18 @@ DATABASES = {
|
|||||||
# CONN_MAX_AGE: How long to keep connections open (in seconds)
|
# CONN_MAX_AGE: How long to keep connections open (in seconds)
|
||||||
# 0 = Close after each request (default Django behavior)
|
# 0 = Close after each request (default Django behavior)
|
||||||
# None = Unlimited reuse (not recommended)
|
# None = Unlimited reuse (not recommended)
|
||||||
# 600 = 10 minutes (good balance for most applications)
|
# 60 = 1 minute (good for development to prevent connection accumulation)
|
||||||
CONN_MAX_AGE = config("DATABASE_CONN_MAX_AGE", default=600, cast=int)
|
# 600 = 10 minutes (good for production)
|
||||||
|
|
||||||
|
# Check if we're in debug mode (imported from base settings)
|
||||||
|
DEBUG = config("DEBUG", default=False, cast=bool)
|
||||||
|
|
||||||
|
# Use shorter connection lifetime in development to prevent accumulation
|
||||||
|
CONN_MAX_AGE = config(
|
||||||
|
"DATABASE_CONN_MAX_AGE",
|
||||||
|
default=60 if DEBUG else 600,
|
||||||
|
cast=int
|
||||||
|
)
|
||||||
|
|
||||||
# Apply CONN_MAX_AGE to the default database
|
# Apply CONN_MAX_AGE to the default database
|
||||||
DATABASES["default"]["CONN_MAX_AGE"] = CONN_MAX_AGE
|
DATABASES["default"]["CONN_MAX_AGE"] = CONN_MAX_AGE
|
||||||
@@ -59,12 +69,21 @@ DATABASES["default"]["CONN_MAX_AGE"] = CONN_MAX_AGE
|
|||||||
# =============================================================================
|
# =============================================================================
|
||||||
# These settings are passed to psycopg2 when creating new connections
|
# These settings are passed to psycopg2 when creating new connections
|
||||||
|
|
||||||
|
# Shorter timeouts in development to fail fast
|
||||||
|
connect_timeout = config("DATABASE_CONNECT_TIMEOUT", default=5 if DEBUG else 10, cast=int)
|
||||||
|
statement_timeout = config("DATABASE_STATEMENT_TIMEOUT", default=30000, cast=int)
|
||||||
|
# Idle in transaction timeout: close connections that sit idle in a transaction
|
||||||
|
# This prevents connection leaks from unclosed transactions
|
||||||
|
idle_in_transaction_timeout = config("DATABASE_IDLE_IN_TRANSACTION_TIMEOUT", default=60000, cast=int)
|
||||||
|
|
||||||
DATABASE_OPTIONS = {
|
DATABASE_OPTIONS = {
|
||||||
# Connection timeout in seconds
|
# Connection timeout in seconds
|
||||||
"connect_timeout": config("DATABASE_CONNECT_TIMEOUT", default=10, cast=int),
|
"connect_timeout": connect_timeout,
|
||||||
# Query timeout in milliseconds (30 seconds default)
|
# PostgreSQL server-side options
|
||||||
# This prevents runaway queries from blocking the database
|
"options": (
|
||||||
"options": f"-c statement_timeout={config('DATABASE_STATEMENT_TIMEOUT', default=30000, cast=int)}",
|
f"-c statement_timeout={statement_timeout} "
|
||||||
|
f"-c idle_in_transaction_session_timeout={idle_in_transaction_timeout}"
|
||||||
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
# Apply options to PostgreSQL databases
|
# Apply options to PostgreSQL databases
|
||||||
@@ -72,6 +91,7 @@ if "postgis" in DATABASE_URL or "postgresql" in DATABASE_URL:
|
|||||||
DATABASES["default"].setdefault("OPTIONS", {})
|
DATABASES["default"].setdefault("OPTIONS", {})
|
||||||
DATABASES["default"]["OPTIONS"].update(DATABASE_OPTIONS)
|
DATABASES["default"]["OPTIONS"].update(DATABASE_OPTIONS)
|
||||||
|
|
||||||
|
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
# GeoDjango Settings
|
# GeoDjango Settings
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
|
|||||||
@@ -34,7 +34,7 @@ ACCOUNT_LOGIN_METHODS = {"email", "username"}
|
|||||||
|
|
||||||
# Email verification settings
|
# Email verification settings
|
||||||
ACCOUNT_EMAIL_VERIFICATION = config("ACCOUNT_EMAIL_VERIFICATION", default="mandatory")
|
ACCOUNT_EMAIL_VERIFICATION = config("ACCOUNT_EMAIL_VERIFICATION", default="mandatory")
|
||||||
ACCOUNT_EMAIL_REQUIRED = True
|
# Note: ACCOUNT_EMAIL_REQUIRED is handled by ACCOUNT_SIGNUP_FIELDS above (email* = required)
|
||||||
ACCOUNT_EMAIL_VERIFICATION_SUPPORTS_CHANGE = True
|
ACCOUNT_EMAIL_VERIFICATION_SUPPORTS_CHANGE = True
|
||||||
ACCOUNT_EMAIL_VERIFICATION_SUPPORTS_RESEND = True
|
ACCOUNT_EMAIL_VERIFICATION_SUPPORTS_RESEND = True
|
||||||
|
|
||||||
@@ -76,8 +76,8 @@ SOCIALACCOUNT_STORE_TOKENS = True
|
|||||||
# =============================================================================
|
# =============================================================================
|
||||||
# https://docs.allauth.org/en/latest/mfa/index.html
|
# https://docs.allauth.org/en/latest/mfa/index.html
|
||||||
|
|
||||||
# Supported authenticator types
|
# Supported authenticator types - TOTP and WebAuthn (Passkeys)
|
||||||
MFA_SUPPORTED_TYPES = ["totp"]
|
MFA_SUPPORTED_TYPES = ["totp", "webauthn"]
|
||||||
|
|
||||||
# TOTP settings
|
# TOTP settings
|
||||||
MFA_TOTP_ISSUER = config("MFA_TOTP_ISSUER", default="ThrillWiki")
|
MFA_TOTP_ISSUER = config("MFA_TOTP_ISSUER", default="ThrillWiki")
|
||||||
@@ -88,6 +88,17 @@ MFA_TOTP_DIGITS = 6
|
|||||||
# Interval in seconds for TOTP code generation (default 30)
|
# Interval in seconds for TOTP code generation (default 30)
|
||||||
MFA_TOTP_PERIOD = 30
|
MFA_TOTP_PERIOD = 30
|
||||||
|
|
||||||
|
# WebAuthn/Passkey settings
|
||||||
|
MFA_PASSKEY_LOGIN_ENABLED = config("MFA_PASSKEY_LOGIN_ENABLED", default=True, cast=bool)
|
||||||
|
|
||||||
|
# Read DEBUG directly (same source as base.py) to avoid circular import
|
||||||
|
_DEBUG_MFA = config("DEBUG", default=True, cast=bool)
|
||||||
|
|
||||||
|
# Allow insecure origin (http://localhost) for WebAuthn in development
|
||||||
|
MFA_WEBAUTHN_ALLOW_INSECURE_ORIGIN = config(
|
||||||
|
"MFA_WEBAUTHN_ALLOW_INSECURE_ORIGIN", default=_DEBUG_MFA, cast=bool
|
||||||
|
)
|
||||||
|
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
# Login By Code (Magic Link) Configuration
|
# Login By Code (Magic Link) Configuration
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
@@ -202,7 +213,10 @@ FRONTEND_DOMAIN = config("FRONTEND_DOMAIN", default="https://thrillwiki.com")
|
|||||||
TURNSTILE_SITEKEY = config("TURNSTILE_SITEKEY", default="")
|
TURNSTILE_SITEKEY = config("TURNSTILE_SITEKEY", default="")
|
||||||
TURNSTILE_SECRET = config("TURNSTILE_SECRET", default="")
|
TURNSTILE_SECRET = config("TURNSTILE_SECRET", default="")
|
||||||
|
|
||||||
# Skip Turnstile validation in development if keys not set
|
# Read DEBUG directly (same source as base.py) to avoid circular import
|
||||||
|
_DEBUG = config("DEBUG", default=True, cast=bool)
|
||||||
|
|
||||||
|
# Skip Turnstile validation in debug mode or if no secret configured
|
||||||
TURNSTILE_SKIP_VALIDATION = config(
|
TURNSTILE_SKIP_VALIDATION = config(
|
||||||
"TURNSTILE_SKIP_VALIDATION", default=not TURNSTILE_SECRET, cast=bool # Skip if no secret
|
"TURNSTILE_SKIP_VALIDATION", default=(_DEBUG or not TURNSTILE_SECRET), cast=bool
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -23,8 +23,7 @@ dependencies = [
|
|||||||
# =============================================================================
|
# =============================================================================
|
||||||
# Authentication & Security
|
# Authentication & Security
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
"django-allauth>=65.3.0",
|
"django-allauth>=65.9.0",
|
||||||
"dj-rest-auth>=7.0.0",
|
|
||||||
"djangorestframework-simplejwt>=5.5.1",
|
"djangorestframework-simplejwt>=5.5.1",
|
||||||
"pyjwt>=2.10.1",
|
"pyjwt>=2.10.1",
|
||||||
"cryptography>=44.0.0",
|
"cryptography>=44.0.0",
|
||||||
@@ -58,7 +57,6 @@ dependencies = [
|
|||||||
# Database & History Tracking
|
# Database & History Tracking
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
"django-pghistory>=3.5.2",
|
"django-pghistory>=3.5.2",
|
||||||
"django-fsm>=2.8.1",
|
|
||||||
"django-fsm-log>=3.1.0",
|
"django-fsm-log>=3.1.0",
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
# Monitoring & Observability
|
# Monitoring & Observability
|
||||||
@@ -79,6 +77,10 @@ dependencies = [
|
|||||||
"django-turnstile>=0.1.2",
|
"django-turnstile>=0.1.2",
|
||||||
"fido2>=2.0.0",
|
"fido2>=2.0.0",
|
||||||
"qrcode[pil]>=8.2",
|
"qrcode[pil]>=8.2",
|
||||||
|
"httpx>=0.28.1",
|
||||||
|
"django-fsm-2>=4.1.0",
|
||||||
|
"django-notifications-hq>=1.8.3",
|
||||||
|
"deepdiff>=8.0.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[dependency-groups]
|
[dependency-groups]
|
||||||
@@ -90,6 +92,7 @@ dev = [
|
|||||||
"rope>=1.14.0",
|
"rope>=1.14.0",
|
||||||
"ruff>=0.9.2",
|
"ruff>=0.9.2",
|
||||||
"pyright>=1.1.405",
|
"pyright>=1.1.405",
|
||||||
|
"factory-boy>=3.3.3",
|
||||||
]
|
]
|
||||||
test = [
|
test = [
|
||||||
"pytest>=8.3.5",
|
"pytest>=8.3.5",
|
||||||
|
|||||||
@@ -128,7 +128,7 @@
|
|||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
<!-- FSM Actions -->
|
<!-- FSM Actions -->
|
||||||
{% if object.status == 'PENDING' or object.status == 'ESCALATED' and user.role in 'ADMIN','SUPERUSER' %}
|
{% if object.status == 'PENDING' or object.status == 'ESCALATED' and user.role == 'ADMIN' or user.role == 'SUPERUSER' %}
|
||||||
<div class="mt-6 review-notes" x-data="{ showNotes: false }">
|
<div class="mt-6 review-notes" x-data="{ showNotes: false }">
|
||||||
<div x-show="showNotes"
|
<div x-show="showNotes"
|
||||||
x-transition:enter="transition ease-out duration-200"
|
x-transition:enter="transition ease-out duration-200"
|
||||||
|
|||||||
@@ -57,7 +57,7 @@
|
|||||||
</div>
|
</div>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% if submission.status == 'PENDING' or submission.status == 'ESCALATED' and user.role in 'ADMIN','SUPERUSER' %}
|
{% if submission.status == 'PENDING' or submission.status == 'ESCALATED' and user.role == 'ADMIN' or user.role == 'SUPERUSER' %}
|
||||||
<div class="mt-4 review-notes" x-data="{ showNotes: false }">
|
<div class="mt-4 review-notes" x-data="{ showNotes: false }">
|
||||||
<textarea x-show="showNotes"
|
<textarea x-show="showNotes"
|
||||||
name="notes"
|
name="notes"
|
||||||
|
|||||||
@@ -52,7 +52,7 @@
|
|||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
<!-- FSM Actions -->
|
<!-- FSM Actions -->
|
||||||
{% if object.status == 'PENDING' or object.status == 'ESCALATED' and user.role in 'ADMIN','SUPERUSER' %}
|
{% if object.status == 'PENDING' or object.status == 'ESCALATED' and user.role == 'ADMIN' or user.role == 'SUPERUSER' %}
|
||||||
<div class="mt-4 review-notes" x-data="{ showNotes: false }">
|
<div class="mt-4 review-notes" x-data="{ showNotes: false }">
|
||||||
<textarea x-show="showNotes"
|
<textarea x-show="showNotes"
|
||||||
name="notes"
|
name="notes"
|
||||||
|
|||||||
@@ -410,7 +410,7 @@
|
|||||||
</div>
|
</div>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% if submission.status == 'PENDING' or submission.status == 'ESCALATED' and user.role in 'ADMIN','SUPERUSER' %}
|
{% if submission.status == 'PENDING' or submission.status == 'ESCALATED' and user.role == 'ADMIN' or user.role == 'SUPERUSER' %}
|
||||||
<div class="mt-6 review-notes" x-data="{ showNotes: false }">
|
<div class="mt-6 review-notes" x-data="{ showNotes: false }">
|
||||||
<div x-show="showNotes"
|
<div x-show="showNotes"
|
||||||
x-transition:enter="transition ease-out duration-200"
|
x-transition:enter="transition ease-out duration-200"
|
||||||
|
|||||||
93
docs/ENDPOINT_MAPPING.md
Normal file
93
docs/ENDPOINT_MAPPING.md
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
# Supabase → Django Endpoint Mapping
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
This document maps all Supabase endpoints to their Django REST Framework equivalents.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Edge Function Mappings
|
||||||
|
|
||||||
|
### Images
|
||||||
|
| Supabase Function | Django Endpoint | Method | Status |
|
||||||
|
|-------------------|-----------------|--------|--------|
|
||||||
|
| `upload-image` | `/api/v1/images/generate-upload-url/` | POST | ✅ |
|
||||||
|
| `delete-image` | `/api/v1/images/delete/` | POST | ✅ |
|
||||||
|
| `generate-og-image` | `/api/v1/images/og-image/` | POST | ✅ |
|
||||||
|
|
||||||
|
### Location/Maps
|
||||||
|
| Supabase Function | Django Endpoint | Method | Status |
|
||||||
|
|-------------------|-----------------|--------|--------|
|
||||||
|
| `detect-location` | `/api/v1/maps/detect-location/` | POST | ✅ |
|
||||||
|
| `enrich-location` | `/api/v1/maps/enrich-location/` | POST | ✅ |
|
||||||
|
| `search-location` | `/api/v1/maps/search-location/` | POST | ✅ |
|
||||||
|
|
||||||
|
### Notifications (Django Native)
|
||||||
|
| Supabase Function | Django Endpoint | Method | Status |
|
||||||
|
|-------------------|-----------------|--------|--------|
|
||||||
|
| `get-notifications` | `/api/v1/notifications/` | GET | ✅ |
|
||||||
|
| `mark-notification-read` | `/api/v1/notifications/mark-read/` | POST | ✅ |
|
||||||
|
| `get-unread-count` | `/api/v1/notifications/unread-count/` | GET | ✅ |
|
||||||
|
|
||||||
|
### Moderation
|
||||||
|
| Supabase Function | Django Endpoint | Method | Status |
|
||||||
|
|-------------------|-----------------|--------|--------|
|
||||||
|
| `process-bulk-approval` | `/api/v1/moderation/api/bulk-operations/` | POST | ✅ |
|
||||||
|
| `claim-submission` | `/api/v1/moderation/api/queue/` | POST | ✅ |
|
||||||
|
| `convert-submission-to-edit` | `/api/v1/moderation/api/edit-submissions/` | POST | ✅ |
|
||||||
|
|
||||||
|
### Auth/MFA
|
||||||
|
| Supabase Function | Django Endpoint | Method | Status |
|
||||||
|
|-------------------|-----------------|--------|--------|
|
||||||
|
| `mfa-unenroll` | `/api/v1/auth/mfa/totp/deactivate/` | POST | ✅ |
|
||||||
|
| `process-oauth-profile` | `/api/v1/auth/social/process-profile/` | POST | ✅ |
|
||||||
|
|
||||||
|
### Account Management
|
||||||
|
| Supabase Function | Django Endpoint | Method | Status |
|
||||||
|
|-------------------|-----------------|--------|--------|
|
||||||
|
| `cancel-account-deletion` | `/api/v1/accounts/delete-account/cancel/` | POST | ✅ |
|
||||||
|
| `confirm-account-deletion` | `/api/v1/accounts/delete-account/verify/` | POST | ✅ |
|
||||||
|
| `cancel-email-change` | `/api/v1/accounts/email-change/cancel/` | POST | ✅ |
|
||||||
|
| `export-user-data` | `/api/v1/accounts/data-export/` | POST | ✅ |
|
||||||
|
|
||||||
|
### Admin Dashboard
|
||||||
|
| Supabase Function | Django Endpoint | Method | Status |
|
||||||
|
|-------------------|-----------------|--------|--------|
|
||||||
|
| `detect-anomalies` | `/api/v1/admin/anomalies/detect/` | POST | ✅ |
|
||||||
|
| `collect-metrics` | `/api/v1/admin/metrics/collect/` | POST | ✅ |
|
||||||
|
| `pipeline-integrity-scan` | `/api/v1/admin/pipeline/integrity-scan/` | POST | ✅ |
|
||||||
|
| `task-status` | `/api/v1/admin/tasks/status/` | GET | ✅ |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Table Mappings
|
||||||
|
|
||||||
|
| Supabase Table | Django Endpoint |
|
||||||
|
|----------------|-----------------|
|
||||||
|
| `parks` | `/api/v1/parks/` |
|
||||||
|
| `rides` | `/api/v1/rides/` |
|
||||||
|
| `companies` | `/api/v1/companies/` |
|
||||||
|
| `ride_models` | `/api/v1/rides/models/` |
|
||||||
|
| `profiles` | `/api/v1/accounts/profiles/` |
|
||||||
|
| `reviews` | `/api/v1/reviews/` |
|
||||||
|
| `photos` | `/api/v1/media/photos/` |
|
||||||
|
| `content_submissions` | `/api/v1/moderation/submissions/` |
|
||||||
|
| `ride_credits` | `/api/v1/accounts/credits/` |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Scheduled Tasks (Celery)
|
||||||
|
|
||||||
|
| Supabase Function | Celery Task |
|
||||||
|
|-------------------|-------------|
|
||||||
|
| `process-scheduled-deletions` | `apps.core.tasks.scheduled` |
|
||||||
|
| `process-closing-entities` | `apps.core.tasks.scheduled` |
|
||||||
|
| `cleanup-orphaned-images` | `apps.core.tasks.scheduled` |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Deprecated
|
||||||
|
|
||||||
|
| Function | Status |
|
||||||
|
|----------|--------|
|
||||||
|
| `migrate-novu-users` | 🚫 Replaced by Django native |
|
||||||
|
| `novu-webhook` | 🚫 Replaced by Django native |
|
||||||
361
uv.lock
generated
361
uv.lock
generated
@@ -20,6 +20,18 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/26/99/fc813cd978842c26c82534010ea849eee9ab3a13ea2b74e95cb9c99e747b/amqp-5.3.1-py3-none-any.whl", hash = "sha256:43b3319e1b4e7d1251833a93d672b4af1e40f3d632d479b98661a95f117880a2", size = 50944, upload-time = "2024-11-12T19:55:41.782Z" },
|
{ url = "https://files.pythonhosted.org/packages/26/99/fc813cd978842c26c82534010ea849eee9ab3a13ea2b74e95cb9c99e747b/amqp-5.3.1-py3-none-any.whl", hash = "sha256:43b3319e1b4e7d1251833a93d672b4af1e40f3d632d479b98661a95f117880a2", size = 50944, upload-time = "2024-11-12T19:55:41.782Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "anyio"
|
||||||
|
version = "4.12.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "idna" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/96/f0/5eb65b2bb0d09ac6776f2eb54adee6abe8228ea05b20a5ad0e4945de8aac/anyio-4.12.1.tar.gz", hash = "sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703", size = 228685, upload-time = "2026-01-06T11:45:21.246Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592, upload-time = "2026-01-06T11:45:19.497Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "asgiref"
|
name = "asgiref"
|
||||||
version = "3.11.0"
|
version = "3.11.0"
|
||||||
@@ -122,7 +134,7 @@ wheels = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "celery"
|
name = "celery"
|
||||||
version = "5.6.0"
|
version = "5.6.2"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "billiard" },
|
{ name = "billiard" },
|
||||||
@@ -130,24 +142,23 @@ dependencies = [
|
|||||||
{ name = "click-didyoumean" },
|
{ name = "click-didyoumean" },
|
||||||
{ name = "click-plugins" },
|
{ name = "click-plugins" },
|
||||||
{ name = "click-repl" },
|
{ name = "click-repl" },
|
||||||
{ name = "exceptiongroup" },
|
|
||||||
{ name = "kombu" },
|
{ name = "kombu" },
|
||||||
{ name = "python-dateutil" },
|
{ name = "python-dateutil" },
|
||||||
{ name = "tzlocal" },
|
{ name = "tzlocal" },
|
||||||
{ name = "vine" },
|
{ name = "vine" },
|
||||||
]
|
]
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/ad/5f/b681ae3c89290d2ea6562ea96b40f5af6f6fc5f7743e2cd1a19e47721548/celery-5.6.0.tar.gz", hash = "sha256:641405206042d52ae460e4e9751a2e31b06cf80ab836fcf92e0b9311d7ea8113", size = 1712522, upload-time = "2025-11-30T17:39:46.282Z" }
|
sdist = { url = "https://files.pythonhosted.org/packages/8f/9d/3d13596519cfa7207a6f9834f4b082554845eb3cd2684b5f8535d50c7c44/celery-5.6.2.tar.gz", hash = "sha256:4a8921c3fcf2ad76317d3b29020772103581ed2454c4c042cc55dcc43585009b", size = 1718802, upload-time = "2026-01-04T12:35:58.012Z" }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/01/4e/53a125038d6a814491a0ae3457435c13cf8821eb602292cf9db37ce35f62/celery-5.6.0-py3-none-any.whl", hash = "sha256:33cf01477b175017fc8f22c5ee8a65157591043ba8ca78a443fe703aa910f581", size = 444561, upload-time = "2025-11-30T17:39:44.314Z" },
|
{ url = "https://files.pythonhosted.org/packages/dd/bd/9ecd619e456ae4ba73b6583cc313f26152afae13e9a82ac4fe7f8856bfd1/celery-5.6.2-py3-none-any.whl", hash = "sha256:3ffafacbe056951b629c7abcf9064c4a2366de0bdfc9fdba421b97ebb68619a5", size = 445502, upload-time = "2026-01-04T12:35:55.894Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "certifi"
|
name = "certifi"
|
||||||
version = "2025.11.12"
|
version = "2026.1.4"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/a2/8c/58f469717fa48465e4a50c014a0400602d3c437d7c0c468e17ada824da3a/certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316", size = 160538, upload-time = "2025-11-12T02:54:51.517Z" }
|
sdist = { url = "https://files.pythonhosted.org/packages/e0/2d/a891ca51311197f6ad14a7ef42e2399f36cf2f9bd44752b3dc4eab60fdc5/certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120", size = 154268, upload-time = "2026-01-04T02:42:41.825Z" }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/70/7d/9bc192684cea499815ff478dfcdc13835ddf401365057044fb721ec6bddb/certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b", size = 159438, upload-time = "2025-11-12T02:54:49.735Z" },
|
{ url = "https://files.pythonhosted.org/packages/e6/ad/3cc14f097111b4de0040c83a525973216457bbeeb63739ef1ed275c1c021/certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c", size = 152900, upload-time = "2026-01-04T02:42:40.15Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -296,63 +307,63 @@ wheels = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "coverage"
|
name = "coverage"
|
||||||
version = "7.13.0"
|
version = "7.13.1"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/b6/45/2c665ca77ec32ad67e25c77daf1cee28ee4558f3bc571cdbaf88a00b9f23/coverage-7.13.0.tar.gz", hash = "sha256:a394aa27f2d7ff9bc04cf703817773a59ad6dfbd577032e690f961d2460ee936", size = 820905, upload-time = "2025-12-08T13:14:38.055Z" }
|
sdist = { url = "https://files.pythonhosted.org/packages/23/f9/e92df5e07f3fc8d4c7f9a0f146ef75446bf870351cd37b788cf5897f8079/coverage-7.13.1.tar.gz", hash = "sha256:b7593fe7eb5feaa3fbb461ac79aac9f9fc0387a5ca8080b0c6fe2ca27b091afd", size = 825862, upload-time = "2025-12-28T15:42:56.969Z" }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/7c/cc/bce226595eb3bf7d13ccffe154c3c487a22222d87ff018525ab4dd2e9542/coverage-7.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:28ee1c96109974af104028a8ef57cec21447d42d0e937c0275329272e370ebcf", size = 218297, upload-time = "2025-12-08T13:13:10.977Z" },
|
{ url = "https://files.pythonhosted.org/packages/a3/a4/e98e689347a1ff1a7f67932ab535cef82eb5e78f32a9e4132e114bbb3a0a/coverage-7.13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cb237bfd0ef4d5eb6a19e29f9e528ac67ac3be932ea6b44fb6cc09b9f3ecff78", size = 218951, upload-time = "2025-12-28T15:41:16.653Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/3b/9f/73c4d34600aae03447dff3d7ad1d0ac649856bfb87d1ca7d681cfc913f9e/coverage-7.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d1e97353dcc5587b85986cda4ff3ec98081d7e84dd95e8b2a6d59820f0545f8a", size = 218673, upload-time = "2025-12-08T13:13:12.562Z" },
|
{ url = "https://files.pythonhosted.org/packages/32/33/7cbfe2bdc6e2f03d6b240d23dc45fdaf3fd270aaf2d640be77b7f16989ab/coverage-7.13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1dcb645d7e34dcbcc96cd7c132b1fc55c39263ca62eb961c064eb3928997363b", size = 219325, upload-time = "2025-12-28T15:41:18.609Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/63/ab/8fa097db361a1e8586535ae5073559e6229596b3489ec3ef2f5b38df8cb2/coverage-7.13.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:99acd4dfdfeb58e1937629eb1ab6ab0899b131f183ee5f23e0b5da5cba2fec74", size = 249652, upload-time = "2025-12-08T13:13:13.909Z" },
|
{ url = "https://files.pythonhosted.org/packages/59/f6/efdabdb4929487baeb7cb2a9f7dac457d9356f6ad1b255be283d58b16316/coverage-7.13.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3d42df8201e00384736f0df9be2ced39324c3907607d17d50d50116c989d84cd", size = 250309, upload-time = "2025-12-28T15:41:20.629Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/90/3a/9bfd4de2ff191feb37ef9465855ca56a6f2f30a3bca172e474130731ac3d/coverage-7.13.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:ff45e0cd8451e293b63ced93161e189780baf444119391b3e7d25315060368a6", size = 252251, upload-time = "2025-12-08T13:13:15.553Z" },
|
{ url = "https://files.pythonhosted.org/packages/12/da/91a52516e9d5aea87d32d1523f9cdcf7a35a3b298e6be05d6509ba3cfab2/coverage-7.13.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fa3edde1aa8807de1d05934982416cb3ec46d1d4d91e280bcce7cca01c507992", size = 252907, upload-time = "2025-12-28T15:41:22.257Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/df/61/b5d8105f016e1b5874af0d7c67542da780ccd4a5f2244a433d3e20ceb1ad/coverage-7.13.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f4f72a85316d8e13234cafe0a9f81b40418ad7a082792fa4165bd7d45d96066b", size = 253492, upload-time = "2025-12-08T13:13:16.849Z" },
|
{ url = "https://files.pythonhosted.org/packages/75/38/f1ea837e3dc1231e086db1638947e00d264e7e8c41aa8ecacf6e1e0c05f4/coverage-7.13.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9edd0e01a343766add6817bc448408858ba6b489039eaaa2018474e4001651a4", size = 254148, upload-time = "2025-12-28T15:41:23.87Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/f3/b8/0fad449981803cc47a4694768b99823fb23632150743f9c83af329bb6090/coverage-7.13.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:11c21557d0e0a5a38632cbbaca5f008723b26a89d70db6315523df6df77d6232", size = 249850, upload-time = "2025-12-08T13:13:18.142Z" },
|
{ url = "https://files.pythonhosted.org/packages/7f/43/f4f16b881aaa34954ba446318dea6b9ed5405dd725dd8daac2358eda869a/coverage-7.13.1-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:985b7836931d033570b94c94713c6dba5f9d3ff26045f72c3e5dbc5fe3361e5a", size = 250515, upload-time = "2025-12-28T15:41:25.437Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/9a/e9/8d68337c3125014d918cf4327d5257553a710a2995a6a6de2ac77e5aa429/coverage-7.13.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:76541dc8d53715fb4f7a3a06b34b0dc6846e3c69bc6204c55653a85dd6220971", size = 251633, upload-time = "2025-12-08T13:13:19.56Z" },
|
{ url = "https://files.pythonhosted.org/packages/84/34/8cba7f00078bd468ea914134e0144263194ce849ec3baad187ffb6203d1c/coverage-7.13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ffed1e4980889765c84a5d1a566159e363b71d6b6fbaf0bebc9d3c30bc016766", size = 252292, upload-time = "2025-12-28T15:41:28.459Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/55/14/d4112ab26b3a1bc4b3c1295d8452dcf399ed25be4cf649002fb3e64b2d93/coverage-7.13.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:6e9e451dee940a86789134b6b0ffbe31c454ade3b849bb8a9d2cca2541a8e91d", size = 249586, upload-time = "2025-12-08T13:13:20.883Z" },
|
{ url = "https://files.pythonhosted.org/packages/8c/a4/cffac66c7652d84ee4ac52d3ccb94c015687d3b513f9db04bfcac2ac800d/coverage-7.13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8842af7f175078456b8b17f1b73a0d16a65dcbdc653ecefeb00a56b3c8c298c4", size = 250242, upload-time = "2025-12-28T15:41:30.02Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/2c/a9/22b0000186db663b0d82f86c2f1028099ae9ac202491685051e2a11a5218/coverage-7.13.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:5c67dace46f361125e6b9cace8fe0b729ed8479f47e70c89b838d319375c8137", size = 249412, upload-time = "2025-12-08T13:13:22.22Z" },
|
{ url = "https://files.pythonhosted.org/packages/f4/78/9a64d462263dde416f3c0067efade7b52b52796f489b1037a95b0dc389c9/coverage-7.13.1-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:ccd7a6fca48ca9c131d9b0a2972a581e28b13416fc313fb98b6d24a03ce9a398", size = 250068, upload-time = "2025-12-28T15:41:32.007Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/a1/2e/42d8e0d9e7527fba439acdc6ed24a2b97613b1dc85849b1dd935c2cffef0/coverage-7.13.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f59883c643cb19630500f57016f76cfdcd6845ca8c5b5ea1f6e17f74c8e5f511", size = 251191, upload-time = "2025-12-08T13:13:23.899Z" },
|
{ url = "https://files.pythonhosted.org/packages/69/c8/a8994f5fece06db7c4a97c8fc1973684e178599b42e66280dded0524ef00/coverage-7.13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0403f647055de2609be776965108447deb8e384fe4a553c119e3ff6bfbab4784", size = 251846, upload-time = "2025-12-28T15:41:33.946Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/a4/af/8c7af92b1377fd8860536aadd58745119252aaaa71a5213e5a8e8007a9f5/coverage-7.13.0-cp313-cp313-win32.whl", hash = "sha256:58632b187be6f0be500f553be41e277712baa278147ecb7559983c6d9faf7ae1", size = 220829, upload-time = "2025-12-08T13:13:25.182Z" },
|
{ url = "https://files.pythonhosted.org/packages/cc/f7/91fa73c4b80305c86598a2d4e54ba22df6bf7d0d97500944af7ef155d9f7/coverage-7.13.1-cp313-cp313-win32.whl", hash = "sha256:549d195116a1ba1e1ae2f5ca143f9777800f6636eab917d4f02b5310d6d73461", size = 221512, upload-time = "2025-12-28T15:41:35.519Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/58/f9/725e8bf16f343d33cbe076c75dc8370262e194ff10072c0608b8e5cf33a3/coverage-7.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:73419b89f812f498aca53f757dd834919b48ce4799f9d5cad33ca0ae442bdb1a", size = 221640, upload-time = "2025-12-08T13:13:26.836Z" },
|
{ url = "https://files.pythonhosted.org/packages/45/0b/0768b4231d5a044da8f75e097a8714ae1041246bb765d6b5563bab456735/coverage-7.13.1-cp313-cp313-win_amd64.whl", hash = "sha256:5899d28b5276f536fcf840b18b61a9fce23cc3aec1d114c44c07fe94ebeaa500", size = 222321, upload-time = "2025-12-28T15:41:37.371Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/8a/ff/e98311000aa6933cc79274e2b6b94a2fe0fe3434fca778eba82003675496/coverage-7.13.0-cp313-cp313-win_arm64.whl", hash = "sha256:eb76670874fdd6091eedcc856128ee48c41a9bbbb9c3f1c7c3cf169290e3ffd6", size = 220269, upload-time = "2025-12-08T13:13:28.116Z" },
|
{ url = "https://files.pythonhosted.org/packages/9b/b8/bdcb7253b7e85157282450262008f1366aa04663f3e3e4c30436f596c3e2/coverage-7.13.1-cp313-cp313-win_arm64.whl", hash = "sha256:868a2fae76dfb06e87291bcbd4dcbcc778a8500510b618d50496e520bd94d9b9", size = 220949, upload-time = "2025-12-28T15:41:39.553Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/cf/cf/bbaa2e1275b300343ea865f7d424cc0a2e2a1df6925a070b2b2d5d765330/coverage-7.13.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:6e63ccc6e0ad8986386461c3c4b737540f20426e7ec932f42e030320896c311a", size = 218990, upload-time = "2025-12-08T13:13:29.463Z" },
|
{ url = "https://files.pythonhosted.org/packages/70/52/f2be52cc445ff75ea8397948c96c1b4ee14f7f9086ea62fc929c5ae7b717/coverage-7.13.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:67170979de0dacac3f3097d02b0ad188d8edcea44ccc44aaa0550af49150c7dc", size = 219643, upload-time = "2025-12-28T15:41:41.567Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/21/1d/82f0b3323b3d149d7672e7744c116e9c170f4957e0c42572f0366dbb4477/coverage-7.13.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:494f5459ffa1bd45e18558cd98710c36c0b8fbfa82a5eabcbe671d80ecffbfe8", size = 219340, upload-time = "2025-12-08T13:13:31.524Z" },
|
{ url = "https://files.pythonhosted.org/packages/47/79/c85e378eaa239e2edec0c5523f71542c7793fe3340954eafb0bc3904d32d/coverage-7.13.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f80e2bb21bfab56ed7405c2d79d34b5dc0bc96c2c1d2a067b643a09fb756c43a", size = 219997, upload-time = "2025-12-28T15:41:43.418Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/fb/e3/fe3fd4702a3832a255f4d43013eacb0ef5fc155a5960ea9269d8696db28b/coverage-7.13.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:06cac81bf10f74034e055e903f5f946e3e26fc51c09fc9f584e4a1605d977053", size = 260638, upload-time = "2025-12-08T13:13:32.965Z" },
|
{ url = "https://files.pythonhosted.org/packages/fe/9b/b1ade8bfb653c0bbce2d6d6e90cc6c254cbb99b7248531cc76253cb4da6d/coverage-7.13.1-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f83351e0f7dcdb14d7326c3d8d8c4e915fa685cbfdc6281f9470d97a04e9dfe4", size = 261296, upload-time = "2025-12-28T15:41:45.207Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/ad/01/63186cb000307f2b4da463f72af9b85d380236965574c78e7e27680a2593/coverage-7.13.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f2ffc92b46ed6e6760f1d47a71e56b5664781bc68986dbd1836b2b70c0ce2071", size = 262705, upload-time = "2025-12-08T13:13:34.378Z" },
|
{ url = "https://files.pythonhosted.org/packages/1f/af/ebf91e3e1a2473d523e87e87fd8581e0aa08741b96265730e2d79ce78d8d/coverage-7.13.1-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bb3f6562e89bad0110afbe64e485aac2462efdce6232cdec7862a095dc3412f6", size = 263363, upload-time = "2025-12-28T15:41:47.163Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/7c/a1/c0dacef0cc865f2455d59eed3548573ce47ed603205ffd0735d1d78b5906/coverage-7.13.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0602f701057c6823e5db1b74530ce85f17c3c5be5c85fc042ac939cbd909426e", size = 265125, upload-time = "2025-12-08T13:13:35.73Z" },
|
{ url = "https://files.pythonhosted.org/packages/c4/8b/fb2423526d446596624ac7fde12ea4262e66f86f5120114c3cfd0bb2befa/coverage-7.13.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:77545b5dcda13b70f872c3b5974ac64c21d05e65b1590b441c8560115dc3a0d1", size = 265783, upload-time = "2025-12-28T15:41:49.03Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/ef/92/82b99223628b61300bd382c205795533bed021505eab6dd86e11fb5d7925/coverage-7.13.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:25dc33618d45456ccb1d37bce44bc78cf269909aa14c4db2e03d63146a8a1493", size = 259844, upload-time = "2025-12-08T13:13:37.69Z" },
|
{ url = "https://files.pythonhosted.org/packages/9b/26/ef2adb1e22674913b89f0fe7490ecadcef4a71fa96f5ced90c60ec358789/coverage-7.13.1-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a4d240d260a1aed814790bbe1f10a5ff31ce6c21bc78f0da4a1e8268d6c80dbd", size = 260508, upload-time = "2025-12-28T15:41:51.035Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/cf/2c/89b0291ae4e6cd59ef042708e1c438e2290f8c31959a20055d8768349ee2/coverage-7.13.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:71936a8b3b977ddd0b694c28c6a34f4fff2e9dd201969a4ff5d5fc7742d614b0", size = 262700, upload-time = "2025-12-08T13:13:39.525Z" },
|
{ url = "https://files.pythonhosted.org/packages/ce/7d/f0f59b3404caf662e7b5346247883887687c074ce67ba453ea08c612b1d5/coverage-7.13.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d2287ac9360dec3837bfdad969963a5d073a09a85d898bd86bea82aa8876ef3c", size = 263357, upload-time = "2025-12-28T15:41:52.631Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/bf/f9/a5f992efae1996245e796bae34ceb942b05db275e4b34222a9a40b9fbd3b/coverage-7.13.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:936bc20503ce24770c71938d1369461f0c5320830800933bc3956e2a4ded930e", size = 260321, upload-time = "2025-12-08T13:13:41.172Z" },
|
{ url = "https://files.pythonhosted.org/packages/1a/b1/29896492b0b1a047604d35d6fa804f12818fa30cdad660763a5f3159e158/coverage-7.13.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:0d2c11f3ea4db66b5cbded23b20185c35066892c67d80ec4be4bab257b9ad1e0", size = 260978, upload-time = "2025-12-28T15:41:54.589Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/4c/89/a29f5d98c64fedbe32e2ac3c227fbf78edc01cc7572eee17d61024d89889/coverage-7.13.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:af0a583efaacc52ae2521f8d7910aff65cdb093091d76291ac5820d5e947fc1c", size = 259222, upload-time = "2025-12-08T13:13:43.282Z" },
|
{ url = "https://files.pythonhosted.org/packages/48/f2/971de1238a62e6f0a4128d37adadc8bb882ee96afbe03ff1570291754629/coverage-7.13.1-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:3fc6a169517ca0d7ca6846c3c5392ef2b9e38896f61d615cb75b9e7134d4ee1e", size = 259877, upload-time = "2025-12-28T15:41:56.263Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/b3/c3/940fe447aae302a6701ee51e53af7e08b86ff6eed7631e5740c157ee22b9/coverage-7.13.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f1c23e24a7000da892a312fb17e33c5f94f8b001de44b7cf8ba2e36fbd15859e", size = 261411, upload-time = "2025-12-08T13:13:44.72Z" },
|
{ url = "https://files.pythonhosted.org/packages/6a/fc/0474efcbb590ff8628830e9aaec5f1831594874360e3251f1fdec31d07a3/coverage-7.13.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d10a2ed46386e850bb3de503a54f9fe8192e5917fcbb143bfef653a9355e9a53", size = 262069, upload-time = "2025-12-28T15:41:58.093Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/eb/31/12a4aec689cb942a89129587860ed4d0fd522d5fda81237147fde554b8ae/coverage-7.13.0-cp313-cp313t-win32.whl", hash = "sha256:5f8a0297355e652001015e93be345ee54393e45dc3050af4a0475c5a2b767d46", size = 221505, upload-time = "2025-12-08T13:13:46.332Z" },
|
{ url = "https://files.pythonhosted.org/packages/88/4f/3c159b7953db37a7b44c0eab8a95c37d1aa4257c47b4602c04022d5cb975/coverage-7.13.1-cp313-cp313t-win32.whl", hash = "sha256:75a6f4aa904301dab8022397a22c0039edc1f51e90b83dbd4464b8a38dc87842", size = 222184, upload-time = "2025-12-28T15:41:59.763Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/65/8c/3b5fe3259d863572d2b0827642c50c3855d26b3aefe80bdc9eba1f0af3b0/coverage-7.13.0-cp313-cp313t-win_amd64.whl", hash = "sha256:6abb3a4c52f05e08460bd9acf04fec027f8718ecaa0d09c40ffbc3fbd70ecc39", size = 222569, upload-time = "2025-12-08T13:13:47.79Z" },
|
{ url = "https://files.pythonhosted.org/packages/58/a5/6b57d28f81417f9335774f20679d9d13b9a8fb90cd6160957aa3b54a2379/coverage-7.13.1-cp313-cp313t-win_amd64.whl", hash = "sha256:309ef5706e95e62578cda256b97f5e097916a2c26247c287bbe74794e7150df2", size = 223250, upload-time = "2025-12-28T15:42:01.52Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/b0/39/f71fa8316a96ac72fc3908839df651e8eccee650001a17f2c78cdb355624/coverage-7.13.0-cp313-cp313t-win_arm64.whl", hash = "sha256:3ad968d1e3aa6ce5be295ab5fe3ae1bf5bb4769d0f98a80a0252d543a2ef2e9e", size = 220841, upload-time = "2025-12-08T13:13:49.243Z" },
|
{ url = "https://files.pythonhosted.org/packages/81/7c/160796f3b035acfbb58be80e02e484548595aa67e16a6345e7910ace0a38/coverage-7.13.1-cp313-cp313t-win_arm64.whl", hash = "sha256:92f980729e79b5d16d221038dbf2e8f9a9136afa072f9d5d6ed4cb984b126a09", size = 221521, upload-time = "2025-12-28T15:42:03.275Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/f8/4b/9b54bedda55421449811dcd5263a2798a63f48896c24dfb92b0f1b0845bd/coverage-7.13.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:453b7ec753cf5e4356e14fe858064e5520c460d3bbbcb9c35e55c0d21155c256", size = 218343, upload-time = "2025-12-08T13:13:50.811Z" },
|
{ url = "https://files.pythonhosted.org/packages/aa/8e/ba0e597560c6563fc0adb902fda6526df5d4aa73bb10adf0574d03bd2206/coverage-7.13.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:97ab3647280d458a1f9adb85244e81587505a43c0c7cff851f5116cd2814b894", size = 218996, upload-time = "2025-12-28T15:42:04.978Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/59/df/c3a1f34d4bba2e592c8979f924da4d3d4598b0df2392fbddb7761258e3dc/coverage-7.13.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:af827b7cbb303e1befa6c4f94fd2bf72f108089cfa0f8abab8f4ca553cf5ca5a", size = 218672, upload-time = "2025-12-08T13:13:52.284Z" },
|
{ url = "https://files.pythonhosted.org/packages/6b/8e/764c6e116f4221dc7aa26c4061181ff92edb9c799adae6433d18eeba7a14/coverage-7.13.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8f572d989142e0908e6acf57ad1b9b86989ff057c006d13b76c146ec6a20216a", size = 219326, upload-time = "2025-12-28T15:42:06.691Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/07/62/eec0659e47857698645ff4e6ad02e30186eb8afd65214fd43f02a76537cb/coverage-7.13.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:9987a9e4f8197a1000280f7cc089e3ea2c8b3c0a64d750537809879a7b4ceaf9", size = 249715, upload-time = "2025-12-08T13:13:53.791Z" },
|
{ url = "https://files.pythonhosted.org/packages/4f/a6/6130dc6d8da28cdcbb0f2bf8865aeca9b157622f7c0031e48c6cf9a0e591/coverage-7.13.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d72140ccf8a147e94274024ff6fd8fb7811354cf7ef88b1f0a988ebaa5bc774f", size = 250374, upload-time = "2025-12-28T15:42:08.786Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/23/2d/3c7ff8b2e0e634c1f58d095f071f52ed3c23ff25be524b0ccae8b71f99f8/coverage-7.13.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:3188936845cd0cb114fa6a51842a304cdbac2958145d03be2377ec41eb285d19", size = 252225, upload-time = "2025-12-08T13:13:55.274Z" },
|
{ url = "https://files.pythonhosted.org/packages/82/2b/783ded568f7cd6b677762f780ad338bf4b4750205860c17c25f7c708995e/coverage-7.13.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d3c9f051b028810f5a87c88e5d6e9af3c0ff32ef62763bf15d29f740453ca909", size = 252882, upload-time = "2025-12-28T15:42:10.515Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/aa/ac/fb03b469d20e9c9a81093575003f959cf91a4a517b783aab090e4538764b/coverage-7.13.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a2bdb3babb74079f021696cb46b8bb5f5661165c385d3a238712b031a12355be", size = 253559, upload-time = "2025-12-08T13:13:57.161Z" },
|
{ url = "https://files.pythonhosted.org/packages/cd/b2/9808766d082e6a4d59eb0cc881a57fc1600eb2c5882813eefff8254f71b5/coverage-7.13.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f398ba4df52d30b1763f62eed9de5620dcde96e6f491f4c62686736b155aa6e4", size = 254218, upload-time = "2025-12-28T15:42:12.208Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/29/62/14afa9e792383c66cc0a3b872a06ded6e4ed1079c7d35de274f11d27064e/coverage-7.13.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7464663eaca6adba4175f6c19354feea61ebbdd735563a03d1e472c7072d27bb", size = 249724, upload-time = "2025-12-08T13:13:58.692Z" },
|
{ url = "https://files.pythonhosted.org/packages/44/ea/52a985bb447c871cb4d2e376e401116520991b597c85afdde1ea9ef54f2c/coverage-7.13.1-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:132718176cc723026d201e347f800cd1a9e4b62ccd3f82476950834dad501c75", size = 250391, upload-time = "2025-12-28T15:42:14.21Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/31/b7/333f3dab2939070613696ab3ee91738950f0467778c6e5a5052e840646b7/coverage-7.13.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8069e831f205d2ff1f3d355e82f511eb7c5522d7d413f5db5756b772ec8697f8", size = 251582, upload-time = "2025-12-08T13:14:00.642Z" },
|
{ url = "https://files.pythonhosted.org/packages/7f/1d/125b36cc12310718873cfc8209ecfbc1008f14f4f5fa0662aa608e579353/coverage-7.13.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:9e549d642426e3579b3f4b92d0431543b012dcb6e825c91619d4e93b7363c3f9", size = 252239, upload-time = "2025-12-28T15:42:16.292Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/81/cb/69162bda9381f39b2287265d7e29ee770f7c27c19f470164350a38318764/coverage-7.13.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:6fb2d5d272341565f08e962cce14cdf843a08ac43bd621783527adb06b089c4b", size = 249538, upload-time = "2025-12-08T13:14:02.556Z" },
|
{ url = "https://files.pythonhosted.org/packages/6a/16/10c1c164950cade470107f9f14bbac8485f8fb8515f515fca53d337e4a7f/coverage-7.13.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:90480b2134999301eea795b3a9dbf606c6fbab1b489150c501da84a959442465", size = 250196, upload-time = "2025-12-28T15:42:18.54Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/e0/76/350387b56a30f4970abe32b90b2a434f87d29f8b7d4ae40d2e8a85aacfb3/coverage-7.13.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:5e70f92ef89bac1ac8a99b3324923b4749f008fdbd7aa9cb35e01d7a284a04f9", size = 249349, upload-time = "2025-12-08T13:14:04.015Z" },
|
{ url = "https://files.pythonhosted.org/packages/2a/c6/cd860fac08780c6fd659732f6ced1b40b79c35977c1356344e44d72ba6c4/coverage-7.13.1-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:e825dbb7f84dfa24663dd75835e7257f8882629fc11f03ecf77d84a75134b864", size = 250008, upload-time = "2025-12-28T15:42:20.365Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/86/0d/7f6c42b8d59f4c7e43ea3059f573c0dcfed98ba46eb43c68c69e52ae095c/coverage-7.13.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:4b5de7d4583e60d5fd246dd57fcd3a8aa23c6e118a8c72b38adf666ba8e7e927", size = 251011, upload-time = "2025-12-08T13:14:05.505Z" },
|
{ url = "https://files.pythonhosted.org/packages/f0/3a/a8c58d3d38f82a5711e1e0a67268362af48e1a03df27c03072ac30feefcf/coverage-7.13.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:623dcc6d7a7ba450bbdbeedbaa0c42b329bdae16491af2282f12a7e809be7eb9", size = 251671, upload-time = "2025-12-28T15:42:22.114Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/d7/f1/4bb2dff379721bb0b5c649d5c5eaf438462cad824acf32eb1b7ca0c7078e/coverage-7.13.0-cp314-cp314-win32.whl", hash = "sha256:a6c6e16b663be828a8f0b6c5027d36471d4a9f90d28444aa4ced4d48d7d6ae8f", size = 221091, upload-time = "2025-12-08T13:14:07.127Z" },
|
{ url = "https://files.pythonhosted.org/packages/f0/bc/fd4c1da651d037a1e3d53e8cb3f8182f4b53271ffa9a95a2e211bacc0349/coverage-7.13.1-cp314-cp314-win32.whl", hash = "sha256:6e73ebb44dca5f708dc871fe0b90cf4cff1a13f9956f747cc87b535a840386f5", size = 221777, upload-time = "2025-12-28T15:42:23.919Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/ba/44/c239da52f373ce379c194b0ee3bcc121020e397242b85f99e0afc8615066/coverage-7.13.0-cp314-cp314-win_amd64.whl", hash = "sha256:0900872f2fdb3ee5646b557918d02279dc3af3dfb39029ac4e945458b13f73bc", size = 221904, upload-time = "2025-12-08T13:14:08.542Z" },
|
{ url = "https://files.pythonhosted.org/packages/4b/50/71acabdc8948464c17e90b5ffd92358579bd0910732c2a1c9537d7536aa6/coverage-7.13.1-cp314-cp314-win_amd64.whl", hash = "sha256:be753b225d159feb397bd0bf91ae86f689bad0da09d3b301478cd39b878ab31a", size = 222592, upload-time = "2025-12-28T15:42:25.619Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/89/1f/b9f04016d2a29c2e4a0307baefefad1a4ec5724946a2b3e482690486cade/coverage-7.13.0-cp314-cp314-win_arm64.whl", hash = "sha256:3a10260e6a152e5f03f26db4a407c4c62d3830b9af9b7c0450b183615f05d43b", size = 220480, upload-time = "2025-12-08T13:14:10.958Z" },
|
{ url = "https://files.pythonhosted.org/packages/f7/c8/a6fb943081bb0cc926499c7907731a6dc9efc2cbdc76d738c0ab752f1a32/coverage-7.13.1-cp314-cp314-win_arm64.whl", hash = "sha256:228b90f613b25ba0019361e4ab81520b343b622fc657daf7e501c4ed6a2366c0", size = 221169, upload-time = "2025-12-28T15:42:27.629Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/16/d4/364a1439766c8e8647860584171c36010ca3226e6e45b1753b1b249c5161/coverage-7.13.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:9097818b6cc1cfb5f174e3263eba4a62a17683bcfe5c4b5d07f4c97fa51fbf28", size = 219074, upload-time = "2025-12-08T13:14:13.345Z" },
|
{ url = "https://files.pythonhosted.org/packages/16/61/d5b7a0a0e0e40d62e59bc8c7aa1afbd86280d82728ba97f0673b746b78e2/coverage-7.13.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:60cfb538fe9ef86e5b2ab0ca8fc8d62524777f6c611dcaf76dc16fbe9b8e698a", size = 219730, upload-time = "2025-12-28T15:42:29.306Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/ce/f4/71ba8be63351e099911051b2089662c03d5671437a0ec2171823c8e03bec/coverage-7.13.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0018f73dfb4301a89292c73be6ba5f58722ff79f51593352759c1790ded1cabe", size = 219342, upload-time = "2025-12-08T13:14:15.02Z" },
|
{ url = "https://files.pythonhosted.org/packages/a3/2c/8881326445fd071bb49514d1ce97d18a46a980712b51fee84f9ab42845b4/coverage-7.13.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:57dfc8048c72ba48a8c45e188d811e5efd7e49b387effc8fb17e97936dde5bf6", size = 220001, upload-time = "2025-12-28T15:42:31.319Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/5e/25/127d8ed03d7711a387d96f132589057213e3aef7475afdaa303412463f22/coverage-7.13.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:166ad2a22ee770f5656e1257703139d3533b4a0b6909af67c6b4a3adc1c98657", size = 260713, upload-time = "2025-12-08T13:14:16.907Z" },
|
{ url = "https://files.pythonhosted.org/packages/b5/d7/50de63af51dfa3a7f91cc37ad8fcc1e244b734232fbc8b9ab0f3c834a5cd/coverage-7.13.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3f2f725aa3e909b3c5fdb8192490bdd8e1495e85906af74fe6e34a2a77ba0673", size = 261370, upload-time = "2025-12-28T15:42:32.992Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/fd/db/559fbb6def07d25b2243663b46ba9eb5a3c6586c0c6f4e62980a68f0ee1c/coverage-7.13.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f6aaef16d65d1787280943f1c8718dc32e9cf141014e4634d64446702d26e0ff", size = 262825, upload-time = "2025-12-08T13:14:18.68Z" },
|
{ url = "https://files.pythonhosted.org/packages/e1/2c/d31722f0ec918fd7453b2758312729f645978d212b410cd0f7c2aed88a94/coverage-7.13.1-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9ee68b21909686eeb21dfcba2c3b81fee70dcf38b140dcd5aa70680995fa3aa5", size = 263485, upload-time = "2025-12-28T15:42:34.759Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/37/99/6ee5bf7eff884766edb43bd8736b5e1c5144d0fe47498c3779326fe75a35/coverage-7.13.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e999e2dcc094002d6e2c7bbc1fb85b58ba4f465a760a8014d97619330cdbbbf3", size = 265233, upload-time = "2025-12-08T13:14:20.55Z" },
|
{ url = "https://files.pythonhosted.org/packages/fa/7a/2c114fa5c5fc08ba0777e4aec4c97e0b4a1afcb69c75f1f54cff78b073ab/coverage-7.13.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:724b1b270cb13ea2e6503476e34541a0b1f62280bc997eab443f87790202033d", size = 265890, upload-time = "2025-12-28T15:42:36.517Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/d8/90/92f18fe0356ea69e1f98f688ed80cec39f44e9f09a1f26a1bbf017cc67f2/coverage-7.13.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:00c3d22cf6fb1cf3bf662aaaa4e563be8243a5ed2630339069799835a9cc7f9b", size = 259779, upload-time = "2025-12-08T13:14:22.367Z" },
|
{ url = "https://files.pythonhosted.org/packages/65/d9/f0794aa1c74ceabc780fe17f6c338456bbc4e96bd950f2e969f48ac6fb20/coverage-7.13.1-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:916abf1ac5cf7eb16bc540a5bf75c71c43a676f5c52fcb9fe75a2bd75fb944e8", size = 260445, upload-time = "2025-12-28T15:42:38.646Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/90/5d/b312a8b45b37a42ea7d27d7d3ff98ade3a6c892dd48d1d503e773503373f/coverage-7.13.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22ccfe8d9bb0d6134892cbe1262493a8c70d736b9df930f3f3afae0fe3ac924d", size = 262700, upload-time = "2025-12-08T13:14:24.309Z" },
|
{ url = "https://files.pythonhosted.org/packages/49/23/184b22a00d9bb97488863ced9454068c79e413cb23f472da6cbddc6cfc52/coverage-7.13.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:776483fd35b58d8afe3acbd9988d5de592ab6da2d2a865edfdbc9fdb43e7c486", size = 263357, upload-time = "2025-12-28T15:42:40.788Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/63/f8/b1d0de5c39351eb71c366f872376d09386640840a2e09b0d03973d791e20/coverage-7.13.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:9372dff5ea15930fea0445eaf37bbbafbc771a49e70c0aeed8b4e2c2614cc00e", size = 260302, upload-time = "2025-12-08T13:14:26.068Z" },
|
{ url = "https://files.pythonhosted.org/packages/7d/bd/58af54c0c9199ea4190284f389005779d7daf7bf3ce40dcd2d2b2f96da69/coverage-7.13.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:b6f3b96617e9852703f5b633ea01315ca45c77e879584f283c44127f0f1ec564", size = 260959, upload-time = "2025-12-28T15:42:42.808Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/aa/7c/d42f4435bc40c55558b3109a39e2d456cddcec37434f62a1f1230991667a/coverage-7.13.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:69ac2c492918c2461bc6ace42d0479638e60719f2a4ef3f0815fa2df88e9f940", size = 259136, upload-time = "2025-12-08T13:14:27.604Z" },
|
{ url = "https://files.pythonhosted.org/packages/4b/2a/6839294e8f78a4891bf1df79d69c536880ba2f970d0ff09e7513d6e352e9/coverage-7.13.1-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:bd63e7b74661fed317212fab774e2a648bc4bb09b35f25474f8e3325d2945cd7", size = 259792, upload-time = "2025-12-28T15:42:44.818Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/b8/d3/23413241dc04d47cfe19b9a65b32a2edd67ecd0b817400c2843ebc58c847/coverage-7.13.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:739c6c051a7540608d097b8e13c76cfa85263ced467168dc6b477bae3df7d0e2", size = 261467, upload-time = "2025-12-08T13:14:29.09Z" },
|
{ url = "https://files.pythonhosted.org/packages/ba/c3/528674d4623283310ad676c5af7414b9850ab6d55c2300e8aa4b945ec554/coverage-7.13.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:933082f161bbb3e9f90d00990dc956120f608cdbcaeea15c4d897f56ef4fe416", size = 262123, upload-time = "2025-12-28T15:42:47.108Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/13/e6/6e063174500eee216b96272c0d1847bf215926786f85c2bd024cf4d02d2f/coverage-7.13.0-cp314-cp314t-win32.whl", hash = "sha256:fe81055d8c6c9de76d60c94ddea73c290b416e061d40d542b24a5871bad498b7", size = 221875, upload-time = "2025-12-08T13:14:31.106Z" },
|
{ url = "https://files.pythonhosted.org/packages/06/c5/8c0515692fb4c73ac379d8dc09b18eaf0214ecb76ea6e62467ba7a1556ff/coverage-7.13.1-cp314-cp314t-win32.whl", hash = "sha256:18be793c4c87de2965e1c0f060f03d9e5aff66cfeae8e1dbe6e5b88056ec153f", size = 222562, upload-time = "2025-12-28T15:42:49.144Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/3b/46/f4fb293e4cbe3620e3ac2a3e8fd566ed33affb5861a9b20e3dd6c1896cbc/coverage-7.13.0-cp314-cp314t-win_amd64.whl", hash = "sha256:445badb539005283825959ac9fa4a28f712c214b65af3a2c464f1adc90f5fcbc", size = 222982, upload-time = "2025-12-08T13:14:33.1Z" },
|
{ url = "https://files.pythonhosted.org/packages/05/0e/c0a0c4678cb30dac735811db529b321d7e1c9120b79bd728d4f4d6b010e9/coverage-7.13.1-cp314-cp314t-win_amd64.whl", hash = "sha256:0e42e0ec0cd3e0d851cb3c91f770c9301f48647cb2877cb78f74bdaa07639a79", size = 223670, upload-time = "2025-12-28T15:42:51.218Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/68/62/5b3b9018215ed9733fbd1ae3b2ed75c5de62c3b55377a52cae732e1b7805/coverage-7.13.0-cp314-cp314t-win_arm64.whl", hash = "sha256:de7f6748b890708578fc4b7bb967d810aeb6fcc9bff4bb77dbca77dab2f9df6a", size = 221016, upload-time = "2025-12-08T13:14:34.601Z" },
|
{ url = "https://files.pythonhosted.org/packages/f5/5f/b177aa0011f354abf03a8f30a85032686d290fdeed4222b27d36b4372a50/coverage-7.13.1-cp314-cp314t-win_arm64.whl", hash = "sha256:eaecf47ef10c72ece9a2a92118257da87e460e113b83cc0d2905cbbe931792b4", size = 221707, upload-time = "2025-12-28T15:42:53.034Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/8d/4c/1968f32fb9a2604645827e11ff84a31e59d532e01995f904723b4f5328b3/coverage-7.13.0-py3-none-any.whl", hash = "sha256:850d2998f380b1e266459ca5b47bc9e7daf9af1d070f66317972f382d46f1904", size = 210068, upload-time = "2025-12-08T13:14:36.236Z" },
|
{ url = "https://files.pythonhosted.org/packages/cc/48/d9f421cb8da5afaa1a64570d9989e00fb7955e6acddc5a12979f7666ef60/coverage-7.13.1-py3-none-any.whl", hash = "sha256:2016745cb3ba554469d02819d78958b571792bb68e31302610e898f80dd3a573", size = 210722, upload-time = "2025-12-28T15:42:54.901Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -424,26 +435,28 @@ wheels = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "dj-database-url"
|
name = "deepdiff"
|
||||||
version = "3.0.1"
|
version = "8.6.1"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "django" },
|
{ name = "orderly-set" },
|
||||||
]
|
]
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/75/05/2ec51009f4ce424877dbd8ad95868faec0c3494ed0ff1635f9ab53d9e0ee/dj_database_url-3.0.1.tar.gz", hash = "sha256:8994961efb888fc6bf8c41550870c91f6f7691ca751888ebaa71442b7f84eff8", size = 12556, upload-time = "2025-07-02T09:40:11.424Z" }
|
sdist = { url = "https://files.pythonhosted.org/packages/19/76/36c9aab3d5c19a94091f7c6c6e784efca50d87b124bf026c36e94719f33c/deepdiff-8.6.1.tar.gz", hash = "sha256:ec56d7a769ca80891b5200ec7bd41eec300ced91ebcc7797b41eb2b3f3ff643a", size = 634054, upload-time = "2025-09-03T19:40:41.461Z" }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/aa/5e/86a43c6fdaa41c12d58e4ff3ebbfd6b71a7cb0360a08614e3754ef2e9afb/dj_database_url-3.0.1-py3-none-any.whl", hash = "sha256:43950018e1eeea486bf11136384aec0fe55b29fe6fd8a44553231b85661d9383", size = 8808, upload-time = "2025-07-02T09:40:26.326Z" },
|
{ url = "https://files.pythonhosted.org/packages/f7/e6/efe534ef0952b531b630780e19cabd416e2032697019d5295defc6ef9bd9/deepdiff-8.6.1-py3-none-any.whl", hash = "sha256:ee8708a7f7d37fb273a541fa24ad010ed484192cd0c4ffc0fa0ed5e2d4b9e78b", size = 91378, upload-time = "2025-09-03T19:40:39.679Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "dj-rest-auth"
|
name = "dj-database-url"
|
||||||
version = "7.0.1"
|
version = "3.1.0"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "django" },
|
{ name = "django" },
|
||||||
{ name = "djangorestframework" },
|
|
||||||
]
|
]
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/b7/19/00150c8bedf7b6d4c44ecf7c2be9e58ae2203b42741ca734152d34f549f1/dj-rest-auth-7.0.1.tar.gz", hash = "sha256:3f8c744cbcf05355ff4bcbef0c8a63645da38e29a0fdef3c3332d4aced52fb90", size = 220541, upload-time = "2025-01-04T23:37:38.688Z" }
|
sdist = { url = "https://files.pythonhosted.org/packages/95/c6/88676a7333fb7c668e626b55f8bfc8527dd863973eb1c40412b95d27747d/dj_database_url-3.1.0.tar.gz", hash = "sha256:d80218426b83f9302c8d27d4fccf52de5cf0cab179f0645fb2839f37605d1353", size = 7924, upload-time = "2026-01-04T09:18:32.693Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/68/1b/e84f7472ab0bdacc3fd09556eb4dd40d88246941d465cc103b36a8dabcd8/dj_database_url-3.1.0-py3-none-any.whl", hash = "sha256:155a56fbbecbaaf1348ccd73bf29138b4c9988363ba08261a0f0145e392e638c", size = 8849, upload-time = "2026-01-04T09:18:43.77Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "django"
|
name = "django"
|
||||||
@@ -619,6 +632,18 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/fc/87/ad5a38d1a8241b485835c6e6158634b29e885be78424ca42fb63df15b965/django_fsm-3.0.1-py2.py3-none-any.whl", hash = "sha256:ea07be2da221efa5cb8743cc94e0bb64fd962adff594f82269040eb4708c30c6", size = 12454, upload-time = "2025-10-07T16:33:26.218Z" },
|
{ url = "https://files.pythonhosted.org/packages/fc/87/ad5a38d1a8241b485835c6e6158634b29e885be78424ca42fb63df15b965/django_fsm-3.0.1-py2.py3-none-any.whl", hash = "sha256:ea07be2da221efa5cb8743cc94e0bb64fd962adff594f82269040eb4708c30c6", size = 12454, upload-time = "2025-10-07T16:33:26.218Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "django-fsm-2"
|
||||||
|
version = "4.1.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "django" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/7e/8f/d1ec9bafdfd7830a40ab1f72887cd931e07f43552b03869495598cb1170c/django_fsm_2-4.1.0.tar.gz", hash = "sha256:5fbe34839f315a06e29052ded8868292fc469f8f37c8d4d88427ad15a92680ae", size = 17695, upload-time = "2025-11-03T15:03:43.477Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/76/97/f4ce5f7b3f389e03c259b0501fc28a9d1db359b09776251130ae9c5e9590/django_fsm_2-4.1.0-py3-none-any.whl", hash = "sha256:58e20abe633c1375d80aca55fd66ca2431794d32f44751f333f386de869f0e6f", size = 14976, upload-time = "2025-11-03T15:03:41.938Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "django-fsm-log"
|
name = "django-fsm-log"
|
||||||
version = "3.1.0"
|
version = "3.1.0"
|
||||||
@@ -670,6 +695,31 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/bf/be/c00a3c861e5356105176c9f120fad3ff95698c1d61e172dd0a0a64acdb9b/django_htmx_autocomplete-1.0.15-py3-none-any.whl", hash = "sha256:c895ea457c0b2a79d14a0b6ead4fba8270fd910ad0d7a0fcbd3ae0b2cb8b6a1e", size = 54059, upload-time = "2025-12-19T17:46:49.595Z" },
|
{ url = "https://files.pythonhosted.org/packages/bf/be/c00a3c861e5356105176c9f120fad3ff95698c1d61e172dd0a0a64acdb9b/django_htmx_autocomplete-1.0.15-py3-none-any.whl", hash = "sha256:c895ea457c0b2a79d14a0b6ead4fba8270fd910ad0d7a0fcbd3ae0b2cb8b6a1e", size = 54059, upload-time = "2025-12-19T17:46:49.595Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "django-model-utils"
|
||||||
|
version = "5.0.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "django" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/81/60/5e232c32a2c977cc1af8c70a38ef436598bc649ad89c2c4568454edde2c9/django_model_utils-5.0.0.tar.gz", hash = "sha256:041cdd6230d2fbf6cd943e1969318bce762272077f4ecd333ab2263924b4e5eb", size = 80559, upload-time = "2024-09-04T11:35:22.858Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/fd/13/87a42048700c54bfce35900a34e2031245132775fb24363fc0e33664aa9c/django_model_utils-5.0.0-py3-none-any.whl", hash = "sha256:fec78e6c323d565a221f7c4edc703f4567d7bb1caeafe1acd16a80c5ff82056b", size = 42630, upload-time = "2024-09-04T11:36:23.166Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "django-notifications-hq"
|
||||||
|
version = "1.8.3"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "django" },
|
||||||
|
{ name = "django-model-utils" },
|
||||||
|
{ name = "jsonfield" },
|
||||||
|
{ name = "pytz" },
|
||||||
|
{ name = "swapper" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/36/18/1b42038963d8b0aeeb380a24ff86a650067833cf6d2d87b678be2d27c609/django-notifications-hq-1.8.3.tar.gz", hash = "sha256:0f4b216bb382b7c7c4eef273eb211e59c1c6a0ea38cba6077415ac031d330725", size = 32238, upload-time = "2023-10-19T00:03:42.703Z" }
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "django-pghistory"
|
name = "django-pghistory"
|
||||||
version = "3.9.1"
|
version = "3.9.1"
|
||||||
@@ -752,16 +802,16 @@ wheels = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "django-tailwind-cli"
|
name = "django-tailwind-cli"
|
||||||
version = "4.4.2"
|
version = "4.5.1"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "django" },
|
{ name = "django" },
|
||||||
{ name = "django-typer" },
|
{ name = "django-typer" },
|
||||||
{ name = "semver" },
|
{ name = "semver" },
|
||||||
]
|
]
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/86/09/8359181201a03871e34d8d47685b15244e778c8ece9f209a86d543cb7767/django_tailwind_cli-4.4.2.tar.gz", hash = "sha256:c3ad962710fc95acf1bb45b1b7747fe549d50ff99228cadc4cf2f28fd8d4e8ce", size = 97420, upload-time = "2025-09-23T15:07:23.876Z" }
|
sdist = { url = "https://files.pythonhosted.org/packages/f0/6d/ad632a539d7cc74a07e43f6292dae66ea2c8944c637da22945768cc9b846/django_tailwind_cli-4.5.1.tar.gz", hash = "sha256:e3cdacab1d7e81f08c3ec44a4e8217c7b3a1e986825c1cd4c2acca58fbc584ac", size = 99414, upload-time = "2025-12-29T17:11:55.275Z" }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/cf/08/8b8c7c4a4f9f4ad3c4815f53c4f98de19b5c37803a9af767d0cebd779af4/django_tailwind_cli-4.4.2-py3-none-any.whl", hash = "sha256:8d1d69ae19209b5d6fd66150d916edbced1d154eee55895d807441dbfe282cae", size = 31688, upload-time = "2025-09-23T15:07:22.16Z" },
|
{ url = "https://files.pythonhosted.org/packages/28/3f/e1fa6aa71e15b2c7f1d4807657be2dfb3b17c7ed9b9595fea30e8a8f36bc/django_tailwind_cli-4.5.1-py3-none-any.whl", hash = "sha256:72991e93c070da864c63d2af96cf601b70af51fadfae786b9326b2ff0e124a72", size = 34275, upload-time = "2025-12-29T17:11:53.599Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -802,11 +852,14 @@ wheels = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "django-widget-tweaks"
|
name = "django-widget-tweaks"
|
||||||
version = "1.5.0"
|
version = "1.5.1"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/a5/fe/26eb92fba83844e71bbec0ced7fc2e843e5990020e3cc676925204031654/django-widget-tweaks-1.5.0.tar.gz", hash = "sha256:1c2180681ebb994e922c754804c7ffebbe1245014777ac47897a81f57cc629c7", size = 14767, upload-time = "2023-08-25T15:29:12.778Z" }
|
dependencies = [
|
||||||
|
{ name = "django" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/01/6d/d1b5a3ae3bccfee96e10315373298cea51e5e0d6853d022181b7b0861a4d/django_widget_tweaks-1.5.1.tar.gz", hash = "sha256:084acc9eeb5a3208f2670522de6284287973e54d54488ce6d402f4b99bc5f452", size = 16233, upload-time = "2026-01-02T12:46:28.907Z" }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/46/6a/6cb6deb5c38b785c77c3ba66f53051eada49205979c407323eb666930915/django_widget_tweaks-1.5.0-py3-none-any.whl", hash = "sha256:a41b7b2f05bd44d673d11ebd6c09a96f1d013ee98121cb98c384fe84e33b881e", size = 8960, upload-time = "2023-08-25T15:29:05.644Z" },
|
{ url = "https://files.pythonhosted.org/packages/64/6a/ad176284371005426b9a1c424e6cd77a9018ab1b17dc23948bfbeb2f6a21/django_widget_tweaks-1.5.1-py3-none-any.whl", hash = "sha256:3f5080f8365740fc1c14607498c975cbfed896dd0c40e1b563095716ee31e3b5", size = 9634, upload-time = "2026-01-02T12:46:02.18Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -852,15 +905,6 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/32/d9/502c56fc3ca960075d00956283f1c44e8cafe433dada03f9ed2821f3073b/drf_spectacular-0.29.0-py3-none-any.whl", hash = "sha256:d1ee7c9535d89848affb4427347f7c4a22c5d22530b8842ef133d7b72e19b41a", size = 105433, upload-time = "2025-11-02T03:40:24.823Z" },
|
{ url = "https://files.pythonhosted.org/packages/32/d9/502c56fc3ca960075d00956283f1c44e8cafe433dada03f9ed2821f3073b/drf_spectacular-0.29.0-py3-none-any.whl", hash = "sha256:d1ee7c9535d89848affb4427347f7c4a22c5d22530b8842ef133d7b72e19b41a", size = 105433, upload-time = "2025-11-02T03:40:24.823Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "exceptiongroup"
|
|
||||||
version = "1.3.1"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371, upload-time = "2025-11-21T23:01:54.787Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598", size = 16740, upload-time = "2025-11-21T23:01:53.443Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "factory-boy"
|
name = "factory-boy"
|
||||||
version = "3.3.3"
|
version = "3.3.3"
|
||||||
@@ -875,14 +919,14 @@ wheels = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "faker"
|
name = "faker"
|
||||||
version = "39.0.0"
|
version = "40.1.0"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "tzdata" },
|
{ name = "tzdata" },
|
||||||
]
|
]
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/30/b9/0897fb5888ddda099dc0f314a8a9afb5faa7e52eaf6865c00686dfb394db/faker-39.0.0.tar.gz", hash = "sha256:ddae46d3b27e01cea7894651d687b33bcbe19a45ef044042c721ceac6d3da0ff", size = 1941757, upload-time = "2025-12-17T19:19:04.762Z" }
|
sdist = { url = "https://files.pythonhosted.org/packages/d7/1d/aa43ef59589ddf3647df918143f1bac9eb004cce1c43124ee3347061797d/faker-40.1.0.tar.gz", hash = "sha256:c402212a981a8a28615fea9120d789e3f6062c0c259a82bfb8dff5d273e539d2", size = 1948784, upload-time = "2025-12-29T18:06:00.659Z" }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/eb/5a/26cdb1b10a55ac6eb11a738cea14865fa753606c4897d7be0f5dc230df00/faker-39.0.0-py3-none-any.whl", hash = "sha256:c72f1fca8f1a24b8da10fcaa45739135a19772218ddd61b86b7ea1b8c790dce7", size = 1980775, upload-time = "2025-12-17T19:19:02.926Z" },
|
{ url = "https://files.pythonhosted.org/packages/fc/23/e22da510e1ec1488966330bf76d8ff4bd535cbfc93660eeb7657761a1bb2/faker-40.1.0-py3-none-any.whl", hash = "sha256:a616d35818e2a2387c297de80e2288083bc915e24b7e39d2fb5bc66cce3a929f", size = 1985317, upload-time = "2025-12-29T18:05:58.831Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -1007,6 +1051,34 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/b2/2f/8a0befeed8bbe142d5a6cf3b51e8cbe019c32a64a596b0ebcbc007a8f8f1/hiredis-3.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b442b6ab038a6f3b5109874d2514c4edf389d8d8b553f10f12654548808683bc", size = 23808, upload-time = "2025-10-14T16:33:04.965Z" },
|
{ url = "https://files.pythonhosted.org/packages/b2/2f/8a0befeed8bbe142d5a6cf3b51e8cbe019c32a64a596b0ebcbc007a8f8f1/hiredis-3.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b442b6ab038a6f3b5109874d2514c4edf389d8d8b553f10f12654548808683bc", size = 23808, upload-time = "2025-10-14T16:33:04.965Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "httpcore"
|
||||||
|
version = "1.0.9"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "certifi" },
|
||||||
|
{ name = "h11" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "httpx"
|
||||||
|
version = "0.28.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "anyio" },
|
||||||
|
{ name = "certifi" },
|
||||||
|
{ name = "httpcore" },
|
||||||
|
{ name = "idna" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "idna"
|
name = "idna"
|
||||||
version = "3.11"
|
version = "3.11"
|
||||||
@@ -1034,6 +1106,18 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" },
|
{ url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "jsonfield"
|
||||||
|
version = "3.2.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "django" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/fa/e9/537e105246dba81d898853dbbe17eb3edd23d47a35074b99fd4add6f1662/jsonfield-3.2.0.tar.gz", hash = "sha256:ca53871bc3308ae4f4cddc3b4f99ed5c6fc6abb1832fbfb499bc6da566c70e4a", size = 17156, upload-time = "2025-07-04T23:06:24.883Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/0a/22/2e08e7b957f50e5eceefde018ce9ee88aceb5126231128d9c1cb8167c1c8/jsonfield-3.2.0-py3-none-any.whl", hash = "sha256:ca4f6bf89c819f293e77074d613c0021e3c4e8521be95c73d03caecb4372e1ee", size = 8316, upload-time = "2025-07-04T23:06:23.588Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "jsonschema"
|
name = "jsonschema"
|
||||||
version = "4.25.1"
|
version = "4.25.1"
|
||||||
@@ -1063,7 +1147,7 @@ wheels = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "kombu"
|
name = "kombu"
|
||||||
version = "5.6.1"
|
version = "5.6.2"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "amqp" },
|
{ name = "amqp" },
|
||||||
@@ -1071,9 +1155,9 @@ dependencies = [
|
|||||||
{ name = "tzdata" },
|
{ name = "tzdata" },
|
||||||
{ name = "vine" },
|
{ name = "vine" },
|
||||||
]
|
]
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/ac/05/749ada8e51718445d915af13f1d18bc4333848e8faa0cb234028a3328ec8/kombu-5.6.1.tar.gz", hash = "sha256:90f1febb57ad4f53ca327a87598191b2520e0c793c75ea3b88d98e3b111282e4", size = 471548, upload-time = "2025-11-25T11:07:33.504Z" }
|
sdist = { url = "https://files.pythonhosted.org/packages/b6/a5/607e533ed6c83ae1a696969b8e1c137dfebd5759a2e9682e26ff1b97740b/kombu-5.6.2.tar.gz", hash = "sha256:8060497058066c6f5aed7c26d7cd0d3b574990b09de842a8c5aaed0b92cc5a55", size = 472594, upload-time = "2025-12-29T20:30:07.779Z" }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/14/d6/943cf84117cd9ddecf6e1707a3f712a49fc64abdb8ac31b19132871af1dd/kombu-5.6.1-py3-none-any.whl", hash = "sha256:b69e3f5527ec32fc5196028a36376501682973e9620d6175d1c3d4eaf7e95409", size = 214141, upload-time = "2025-11-25T11:07:31.54Z" },
|
{ url = "https://files.pythonhosted.org/packages/fb/0f/834427d8c03ff1d7e867d3db3d176470c64871753252b21b4f4897d1fa45/kombu-5.6.2-py3-none-any.whl", hash = "sha256:efcfc559da324d41d61ca311b0c64965ea35b4c55cc04ee36e55386145dace93", size = 214219, upload-time = "2025-12-29T20:30:05.74Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -1168,6 +1252,15 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/13/6b/9721ba7c68036316bd8aeb596b397253590c87d7045c9d6fc82b7364eff4/nplusone-1.0.0-py2.py3-none-any.whl", hash = "sha256:96b1e6e29e6af3e71b67d0cc012a5ec8c97c6a2f5399f4ba41a2bbe0e253a9ac", size = 15920, upload-time = "2018-05-21T03:40:23.69Z" },
|
{ url = "https://files.pythonhosted.org/packages/13/6b/9721ba7c68036316bd8aeb596b397253590c87d7045c9d6fc82b7364eff4/nplusone-1.0.0-py2.py3-none-any.whl", hash = "sha256:96b1e6e29e6af3e71b67d0cc012a5ec8c97c6a2f5399f4ba41a2bbe0e253a9ac", size = 15920, upload-time = "2018-05-21T03:40:23.69Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "orderly-set"
|
||||||
|
version = "5.5.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/4a/88/39c83c35d5e97cc203e9e77a4f93bf87ec89cf6a22ac4818fdcc65d66584/orderly_set-5.5.0.tar.gz", hash = "sha256:e87185c8e4d8afa64e7f8160ee2c542a475b738bc891dc3f58102e654125e6ce", size = 27414, upload-time = "2025-07-10T20:10:55.885Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/12/27/fb8d7338b4d551900fa3e580acbe7a0cf655d940e164cb5c00ec31961094/orderly_set-5.5.0-py3-none-any.whl", hash = "sha256:46f0b801948e98f427b412fcabb831677194c05c3b699b80de260374baa0b1e7", size = 13068, upload-time = "2025-07-10T20:10:54.377Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "outcome"
|
name = "outcome"
|
||||||
version = "1.3.0.post0"
|
version = "1.3.0.post0"
|
||||||
@@ -1191,11 +1284,11 @@ wheels = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pathspec"
|
name = "pathspec"
|
||||||
version = "0.12.1"
|
version = "1.0.1"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" }
|
sdist = { url = "https://files.pythonhosted.org/packages/28/2e/83722ece0f6ee24387d6cb830dd562ddbcd6ce0b9d76072c6849670c31b4/pathspec-1.0.1.tar.gz", hash = "sha256:e2769b508d0dd47b09af6ee2c75b2744a2cb1f474ae4b1494fd6a1b7a841613c", size = 129791, upload-time = "2026-01-06T13:02:55.15Z" }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" },
|
{ url = "https://files.pythonhosted.org/packages/d2/fe/2257c71721aeab6a6e8aa1f00d01f2a20f58547d249a6c8fef5791f559fc/pathspec-1.0.1-py3-none-any.whl", hash = "sha256:8870061f22c58e6d83463cfce9a7dd6eca0512c772c1001fb09ac64091816721", size = 54584, upload-time = "2026-01-06T13:02:53.601Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -1285,30 +1378,30 @@ wheels = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "psutil"
|
name = "psutil"
|
||||||
version = "7.2.0"
|
version = "7.2.1"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/be/7c/31d1c3ceb1260301f87565f50689dc6da3db427ece1e1e012af22abca54e/psutil-7.2.0.tar.gz", hash = "sha256:2e4f8e1552f77d14dc96fb0f6240c5b34a37081c0889f0853b3b29a496e5ef64", size = 489863, upload-time = "2025-12-23T20:26:24.616Z" }
|
sdist = { url = "https://files.pythonhosted.org/packages/73/cb/09e5184fb5fc0358d110fc3ca7f6b1d033800734d34cac10f4136cfac10e/psutil-7.2.1.tar.gz", hash = "sha256:f7583aec590485b43ca601dd9cea0dcd65bd7bb21d30ef4ddbf4ea6b5ed1bdd3", size = 490253, upload-time = "2025-12-29T08:26:00.169Z" }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/a8/8e/b35aae6ed19bc4e2286cac4832e4d522fcf00571867b0a85a3f77ef96a80/psutil-7.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:c31e927555539132a00380c971816ea43d089bf4bd5f3e918ed8c16776d68474", size = 129593, upload-time = "2025-12-23T20:26:28.019Z" },
|
{ url = "https://files.pythonhosted.org/packages/77/8e/f0c242053a368c2aa89584ecd1b054a18683f13d6e5a318fc9ec36582c94/psutil-7.2.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:ba9f33bb525b14c3ea563b2fd521a84d2fa214ec59e3e6a2858f78d0844dd60d", size = 129624, upload-time = "2025-12-29T08:26:04.255Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/61/a2/773d17d74e122bbffe08b97f73f2d4a01ef53fb03b98e61b8e4f64a9c6b9/psutil-7.2.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:db8e44e766cef86dea47d9a1fa535d38dc76449e5878a92f33683b7dba5bfcb2", size = 130104, upload-time = "2025-12-23T20:26:30.27Z" },
|
{ url = "https://files.pythonhosted.org/packages/26/97/a58a4968f8990617decee234258a2b4fc7cd9e35668387646c1963e69f26/psutil-7.2.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:81442dac7abfc2f4f4385ea9e12ddf5a796721c0f6133260687fec5c3780fa49", size = 130132, upload-time = "2025-12-29T08:26:06.228Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/0d/e3/d3a9b3f4bd231abbd70a988beb2e3edd15306051bccbfc4472bd34a56e01/psutil-7.2.0-cp313-cp313t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85ef849ac92169dedc59a7ac2fb565f47b3468fbe1524bf748746bc21afb94c7", size = 180579, upload-time = "2025-12-23T20:26:32.628Z" },
|
{ url = "https://files.pythonhosted.org/packages/db/6d/ed44901e830739af5f72a85fa7ec5ff1edea7f81bfbf4875e409007149bd/psutil-7.2.1-cp313-cp313t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ea46c0d060491051d39f0d2cff4f98d5c72b288289f57a21556cc7d504db37fc", size = 180612, upload-time = "2025-12-29T08:26:08.276Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/66/f8/6c73044424aabe1b7824d4d4504029d406648286d8fe7ba8c4682e0d3042/psutil-7.2.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:26782bdbae2f5c14ce9ebe8ad2411dc2ca870495e0cd90f8910ede7fa5e27117", size = 183171, upload-time = "2025-12-23T20:26:34.972Z" },
|
{ url = "https://files.pythonhosted.org/packages/c7/65/b628f8459bca4efbfae50d4bf3feaab803de9a160b9d5f3bd9295a33f0c2/psutil-7.2.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:35630d5af80d5d0d49cfc4d64c1c13838baf6717a13effb35869a5919b854cdf", size = 183201, upload-time = "2025-12-29T08:26:10.622Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/48/7d/76d7a863340885d41826562225a566683e653ee6c9ba03c9f3856afa7d80/psutil-7.2.0-cp313-cp313t-win_amd64.whl", hash = "sha256:b7665f612d3b38a583391b95969667a53aaf6c5706dc27a602c9a4874fbf09e4", size = 139055, upload-time = "2025-12-23T20:26:36.848Z" },
|
{ url = "https://files.pythonhosted.org/packages/fb/23/851cadc9764edcc18f0effe7d0bf69f727d4cf2442deb4a9f78d4e4f30f2/psutil-7.2.1-cp313-cp313t-win_amd64.whl", hash = "sha256:923f8653416604e356073e6e0bccbe7c09990acef442def2f5640dd0faa9689f", size = 139081, upload-time = "2025-12-29T08:26:12.483Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/a0/48/200054ada0ae4872c8a71db54f3eb6a9af4101680ee6830d373b7fda526b/psutil-7.2.0-cp313-cp313t-win_arm64.whl", hash = "sha256:4413373c174520ae28a24a8974ad8ce6b21f060d27dde94e25f8c73a7effe57a", size = 134737, upload-time = "2025-12-23T20:26:38.784Z" },
|
{ url = "https://files.pythonhosted.org/packages/59/82/d63e8494ec5758029f31c6cb06d7d161175d8281e91d011a4a441c8a43b5/psutil-7.2.1-cp313-cp313t-win_arm64.whl", hash = "sha256:cfbe6b40ca48019a51827f20d830887b3107a74a79b01ceb8cc8de4ccb17b672", size = 134767, upload-time = "2025-12-29T08:26:14.528Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/44/86/98da45dff471b93ef5ce5bcaefa00e3038295a7880a77cf74018243d37fb/psutil-7.2.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:2f2f53fd114e7946dfba3afb98c9b7c7f376009447360ca15bfb73f2066f84c7", size = 129692, upload-time = "2025-12-23T20:26:40.623Z" },
|
{ url = "https://files.pythonhosted.org/packages/05/c2/5fb764bd61e40e1fe756a44bd4c21827228394c17414ade348e28f83cd79/psutil-7.2.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:494c513ccc53225ae23eec7fe6e1482f1b8a44674241b54561f755a898650679", size = 129716, upload-time = "2025-12-29T08:26:16.017Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/50/ee/10eae91ba4ad071c92db3c178ba861f30406342de9f0ddbe6d51fd741236/psutil-7.2.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:e65c41d7e60068f60ce43b31a3a7fc90deb0dfd34ffc824a2574c2e5279b377e", size = 130110, upload-time = "2025-12-23T20:26:42.569Z" },
|
{ url = "https://files.pythonhosted.org/packages/c9/d2/935039c20e06f615d9ca6ca0ab756cf8408a19d298ffaa08666bc18dc805/psutil-7.2.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:3fce5f92c22b00cdefd1645aa58ab4877a01679e901555067b1bd77039aa589f", size = 130133, upload-time = "2025-12-29T08:26:18.009Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/87/3a/2b2897443d56fedbbc34ac68a0dc7d55faa05d555372a2f989109052f86d/psutil-7.2.0-cp314-cp314t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cc66d21366850a4261412ce994ae9976bba9852dafb4f2fa60db68ed17ff5281", size = 181487, upload-time = "2025-12-23T20:26:44.633Z" },
|
{ url = "https://files.pythonhosted.org/packages/77/69/19f1eb0e01d24c2b3eacbc2f78d3b5add8a89bf0bb69465bc8d563cc33de/psutil-7.2.1-cp314-cp314t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:93f3f7b0bb07711b49626e7940d6fe52aa9940ad86e8f7e74842e73189712129", size = 181518, upload-time = "2025-12-29T08:26:20.241Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/11/66/44308428f7333db42c5ea7390c52af1b38f59b80b80c437291f58b5dfdad/psutil-7.2.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e025d67b42b8f22b096d5d20f5171de0e0fefb2f0ce983a13c5a1b5ed9872706", size = 184320, upload-time = "2025-12-23T20:26:46.83Z" },
|
{ url = "https://files.pythonhosted.org/packages/e1/6d/7e18b1b4fa13ad370787626c95887b027656ad4829c156bb6569d02f3262/psutil-7.2.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d34d2ca888208eea2b5c68186841336a7f5e0b990edec929be909353a202768a", size = 184348, upload-time = "2025-12-29T08:26:22.215Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/18/28/d2feadc7f18e501c5ce687c377db7dca924585418fd694272b8e488ea99f/psutil-7.2.0-cp314-cp314t-win_amd64.whl", hash = "sha256:45f6b91f7ad63414d6454fd609e5e3556d0e1038d5d9c75a1368513bdf763f57", size = 140372, upload-time = "2025-12-23T20:26:49.334Z" },
|
{ url = "https://files.pythonhosted.org/packages/98/60/1672114392dd879586d60dd97896325df47d9a130ac7401318005aab28ec/psutil-7.2.1-cp314-cp314t-win_amd64.whl", hash = "sha256:2ceae842a78d1603753561132d5ad1b2f8a7979cb0c283f5b52fb4e6e14b1a79", size = 140400, upload-time = "2025-12-29T08:26:23.993Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/b2/1d/48381f5fd0425aa054c4ee3de24f50de3d6c347019f3aec75f357377d447/psutil-7.2.0-cp314-cp314t-win_arm64.whl", hash = "sha256:87b18a19574139d60a546e88b5f5b9cbad598e26cdc790d204ab95d7024f03ee", size = 135400, upload-time = "2025-12-23T20:26:51.585Z" },
|
{ url = "https://files.pythonhosted.org/packages/fb/7b/d0e9d4513c46e46897b46bcfc410d51fc65735837ea57a25170f298326e6/psutil-7.2.1-cp314-cp314t-win_arm64.whl", hash = "sha256:08a2f175e48a898c8eb8eace45ce01777f4785bc744c90aa2cc7f2fa5462a266", size = 135430, upload-time = "2025-12-29T08:26:25.999Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/40/c5/a49160bf3e165b7b93a60579a353cf5d939d7f878fe5fd369110f1d18043/psutil-7.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:977a2fcd132d15cb05b32b2d85b98d087cad039b0ce435731670ba74da9e6133", size = 128116, upload-time = "2025-12-23T20:26:53.516Z" },
|
{ url = "https://files.pythonhosted.org/packages/c5/cf/5180eb8c8bdf6a503c6919f1da28328bd1e6b3b1b5b9d5b01ae64f019616/psutil-7.2.1-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:b2e953fcfaedcfbc952b44744f22d16575d3aa78eb4f51ae74165b4e96e55f42", size = 128137, upload-time = "2025-12-29T08:26:27.759Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/10/a1/c75feb480f60cd768fb6ed00ac362a16a33e5076ec8475a22d8162fb2659/psutil-7.2.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:24151011c21fadd94214d7139d7c6c54569290d7e553989bdf0eab73b13beb8c", size = 128925, upload-time = "2025-12-23T20:26:55.573Z" },
|
{ url = "https://files.pythonhosted.org/packages/c5/2c/78e4a789306a92ade5000da4f5de3255202c534acdadc3aac7b5458fadef/psutil-7.2.1-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:05cc68dbb8c174828624062e73078e7e35406f4ca2d0866c272c2410d8ef06d1", size = 128947, upload-time = "2025-12-29T08:26:29.548Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/12/ff/e93136587c00a543f4bc768b157fac2c47cd77b180d4f4e5c6efb6ea53a2/psutil-7.2.0-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:91f211ba9279e7c61d9d8f84b713cfc38fa161cb0597d5cb3f1ca742f6848254", size = 154666, upload-time = "2025-12-23T20:26:57.312Z" },
|
{ url = "https://files.pythonhosted.org/packages/29/f8/40e01c350ad9a2b3cb4e6adbcc8a83b17ee50dd5792102b6142385937db5/psutil-7.2.1-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5e38404ca2bb30ed7267a46c02f06ff842e92da3bb8c5bfdadbd35a5722314d8", size = 154694, upload-time = "2025-12-29T08:26:32.147Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/b8/dd/4c2de9c3827c892599d277a69d2224136800870a8a88a80981de905de28d/psutil-7.2.0-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f37415188b7ea98faf90fed51131181646c59098b077550246e2e092e127418b", size = 156109, upload-time = "2025-12-23T20:26:58.851Z" },
|
{ url = "https://files.pythonhosted.org/packages/06/e4/b751cdf839c011a9714a783f120e6a86b7494eb70044d7d81a25a5cd295f/psutil-7.2.1-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ab2b98c9fc19f13f59628d94df5cc4cc4844bc572467d113a8b517d634e362c6", size = 156136, upload-time = "2025-12-29T08:26:34.079Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/81/3f/090943c682d3629968dd0b04826ddcbc760ee1379021dbe316e2ddfcd01b/psutil-7.2.0-cp36-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0d12c7ce6ed1128cd81fd54606afa054ac7dbb9773469ebb58cf2f171c49f2ac", size = 148081, upload-time = "2025-12-23T20:27:01.318Z" },
|
{ url = "https://files.pythonhosted.org/packages/44/ad/bbf6595a8134ee1e94a4487af3f132cef7fce43aef4a93b49912a48c3af7/psutil-7.2.1-cp36-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:f78baafb38436d5a128f837fab2d92c276dfb48af01a240b861ae02b2413ada8", size = 148108, upload-time = "2025-12-29T08:26:36.225Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/c4/88/c39648ebb8ec182d0364af53cdefe6eddb5f3872ba718b5855a8ff65d6d4/psutil-7.2.0-cp36-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ca0faef7976530940dcd39bc5382d0d0d5eb023b186a4901ca341bd8d8684151", size = 147376, upload-time = "2025-12-23T20:27:03.347Z" },
|
{ url = "https://files.pythonhosted.org/packages/1c/15/dd6fd869753ce82ff64dcbc18356093471a5a5adf4f77ed1f805d473d859/psutil-7.2.1-cp36-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:99a4cd17a5fdd1f3d014396502daa70b5ec21bf4ffe38393e152f8e449757d67", size = 147402, upload-time = "2025-12-29T08:26:39.21Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/01/a2/5b39e08bd9b27476bc7cce7e21c71a481ad60b81ffac49baf02687a50d7f/psutil-7.2.0-cp37-abi3-win_amd64.whl", hash = "sha256:abdb74137ca232d20250e9ad471f58d500e7743bc8253ba0bfbf26e570c0e437", size = 136910, upload-time = "2025-12-23T20:27:05.289Z" },
|
{ url = "https://files.pythonhosted.org/packages/34/68/d9317542e3f2b180c4306e3f45d3c922d7e86d8ce39f941bb9e2e9d8599e/psutil-7.2.1-cp37-abi3-win_amd64.whl", hash = "sha256:b1b0671619343aa71c20ff9767eced0483e4fc9e1f489d50923738caf6a03c17", size = 136938, upload-time = "2025-12-29T08:26:41.036Z" },
|
||||||
{ url = "https://files.pythonhosted.org/packages/59/54/53839db1258c1eaeb4ded57ff202144ebc75b23facc05a74fd98d338b0c6/psutil-7.2.0-cp37-abi3-win_arm64.whl", hash = "sha256:284e71038b3139e7ab3834b63b3eb5aa5565fcd61a681ec746ef9a0a8c457fd2", size = 133807, upload-time = "2025-12-23T20:27:06.825Z" },
|
{ url = "https://files.pythonhosted.org/packages/3e/73/2ce007f4198c80fcf2cb24c169884f833fe93fbc03d55d302627b094ee91/psutil-7.2.1-cp37-abi3-win_arm64.whl", hash = "sha256:0d67c1822c355aa6f7314d92018fb4268a76668a536f133599b91edd48759442", size = 133836, upload-time = "2025-12-29T08:26:43.086Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -1580,6 +1673,15 @@ global = [
|
|||||||
{ name = "platformdirs" },
|
{ name = "platformdirs" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pytz"
|
||||||
|
version = "2025.2"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884, upload-time = "2025-03-25T02:25:00.538Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225, upload-time = "2025-03-25T02:24:58.468Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyyaml"
|
name = "pyyaml"
|
||||||
version = "6.0.3"
|
version = "6.0.3"
|
||||||
@@ -1922,6 +2024,15 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/49/4b/359f28a903c13438ef59ebeee215fb25da53066db67b305c125f1c6d2a25/sqlparse-0.5.5-py3-none-any.whl", hash = "sha256:12a08b3bf3eec877c519589833aed092e2444e68240a3577e8e26148acc7b1ba", size = 46138, upload-time = "2025-12-19T07:17:46.573Z" },
|
{ url = "https://files.pythonhosted.org/packages/49/4b/359f28a903c13438ef59ebeee215fb25da53066db67b305c125f1c6d2a25/sqlparse-0.5.5-py3-none-any.whl", hash = "sha256:12a08b3bf3eec877c519589833aed092e2444e68240a3577e8e26148acc7b1ba", size = 46138, upload-time = "2025-12-19T07:17:46.573Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "swapper"
|
||||||
|
version = "1.4.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/9b/3b/98ea1cfc04dc9805d58c5a96dd006f5d88a5a32b7b05e1f5a1c00363bb9a/swapper-1.4.0.tar.gz", hash = "sha256:9e083af114ee0593241a7b877e3e0e7d3a580454f5d59016c667a5563306f8fe", size = 12668, upload-time = "2024-08-14T19:36:07.539Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e9/53/c59363308ef97507a680372471e25e1ebab2e706a45a7c416eea6474c928/swapper-1.4.0-py2.py3-none-any.whl", hash = "sha256:57b8378aad234242542fe32dc6e8cff0ed24b63493d20b3c88ee01f894b9345e", size = 7106, upload-time = "2024-08-14T19:36:06.247Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "text-unidecode"
|
name = "text-unidecode"
|
||||||
version = "1.3"
|
version = "1.3"
|
||||||
@@ -1938,8 +2049,8 @@ source = { virtual = "backend" }
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "celery" },
|
{ name = "celery" },
|
||||||
{ name = "cryptography" },
|
{ name = "cryptography" },
|
||||||
|
{ name = "deepdiff" },
|
||||||
{ name = "dj-database-url" },
|
{ name = "dj-database-url" },
|
||||||
{ name = "dj-rest-auth" },
|
|
||||||
{ name = "django" },
|
{ name = "django" },
|
||||||
{ name = "django-allauth" },
|
{ name = "django-allauth" },
|
||||||
{ name = "django-celery-beat" },
|
{ name = "django-celery-beat" },
|
||||||
@@ -1951,11 +2062,12 @@ dependencies = [
|
|||||||
{ name = "django-extensions" },
|
{ name = "django-extensions" },
|
||||||
{ name = "django-filter" },
|
{ name = "django-filter" },
|
||||||
{ name = "django-forwardemail" },
|
{ name = "django-forwardemail" },
|
||||||
{ name = "django-fsm" },
|
{ name = "django-fsm-2" },
|
||||||
{ name = "django-fsm-log" },
|
{ name = "django-fsm-log" },
|
||||||
{ name = "django-health-check" },
|
{ name = "django-health-check" },
|
||||||
{ name = "django-htmx" },
|
{ name = "django-htmx" },
|
||||||
{ name = "django-htmx-autocomplete" },
|
{ name = "django-htmx-autocomplete" },
|
||||||
|
{ name = "django-notifications-hq" },
|
||||||
{ name = "django-pghistory" },
|
{ name = "django-pghistory" },
|
||||||
{ name = "django-redis" },
|
{ name = "django-redis" },
|
||||||
{ name = "django-tailwind-cli" },
|
{ name = "django-tailwind-cli" },
|
||||||
@@ -1966,6 +2078,7 @@ dependencies = [
|
|||||||
{ name = "drf-spectacular" },
|
{ name = "drf-spectacular" },
|
||||||
{ name = "fido2" },
|
{ name = "fido2" },
|
||||||
{ name = "hiredis" },
|
{ name = "hiredis" },
|
||||||
|
{ name = "httpx" },
|
||||||
{ name = "nplusone" },
|
{ name = "nplusone" },
|
||||||
{ name = "piexif" },
|
{ name = "piexif" },
|
||||||
{ name = "pillow" },
|
{ name = "pillow" },
|
||||||
@@ -1992,6 +2105,7 @@ dev = [
|
|||||||
{ name = "autopep8" },
|
{ name = "autopep8" },
|
||||||
{ name = "black" },
|
{ name = "black" },
|
||||||
{ name = "django-stubs" },
|
{ name = "django-stubs" },
|
||||||
|
{ name = "factory-boy" },
|
||||||
{ name = "pyright" },
|
{ name = "pyright" },
|
||||||
{ name = "rope" },
|
{ name = "rope" },
|
||||||
{ name = "ruff" },
|
{ name = "ruff" },
|
||||||
@@ -2020,10 +2134,10 @@ test = [
|
|||||||
requires-dist = [
|
requires-dist = [
|
||||||
{ name = "celery", specifier = ">=5.5.3,<6" },
|
{ name = "celery", specifier = ">=5.5.3,<6" },
|
||||||
{ name = "cryptography", specifier = ">=44.0.0" },
|
{ name = "cryptography", specifier = ">=44.0.0" },
|
||||||
|
{ name = "deepdiff", specifier = ">=8.0.0" },
|
||||||
{ name = "dj-database-url", specifier = ">=2.3.0" },
|
{ name = "dj-database-url", specifier = ">=2.3.0" },
|
||||||
{ name = "dj-rest-auth", specifier = ">=7.0.0" },
|
|
||||||
{ name = "django", specifier = ">=5.2.8" },
|
{ name = "django", specifier = ">=5.2.8" },
|
||||||
{ name = "django-allauth", specifier = ">=65.3.0" },
|
{ name = "django-allauth", specifier = ">=65.9.0" },
|
||||||
{ name = "django-celery-beat", specifier = ">=2.8.1" },
|
{ name = "django-celery-beat", specifier = ">=2.8.1" },
|
||||||
{ name = "django-celery-results", specifier = ">=2.6.0" },
|
{ name = "django-celery-results", specifier = ">=2.6.0" },
|
||||||
{ name = "django-cleanup", specifier = ">=8.1.0" },
|
{ name = "django-cleanup", specifier = ">=8.1.0" },
|
||||||
@@ -2033,11 +2147,12 @@ requires-dist = [
|
|||||||
{ name = "django-extensions", specifier = ">=4.1" },
|
{ name = "django-extensions", specifier = ">=4.1" },
|
||||||
{ name = "django-filter", specifier = ">=24.3" },
|
{ name = "django-filter", specifier = ">=24.3" },
|
||||||
{ name = "django-forwardemail", specifier = ">=1.0.0" },
|
{ name = "django-forwardemail", specifier = ">=1.0.0" },
|
||||||
{ name = "django-fsm", specifier = ">=2.8.1" },
|
{ name = "django-fsm-2", specifier = ">=4.1.0" },
|
||||||
{ name = "django-fsm-log", specifier = ">=3.1.0" },
|
{ name = "django-fsm-log", specifier = ">=3.1.0" },
|
||||||
{ name = "django-health-check", specifier = ">=3.17.0" },
|
{ name = "django-health-check", specifier = ">=3.17.0" },
|
||||||
{ name = "django-htmx", specifier = ">=1.20.0" },
|
{ name = "django-htmx", specifier = ">=1.20.0" },
|
||||||
{ name = "django-htmx-autocomplete", specifier = ">=1.0.5" },
|
{ name = "django-htmx-autocomplete", specifier = ">=1.0.5" },
|
||||||
|
{ name = "django-notifications-hq", specifier = ">=1.8.3" },
|
||||||
{ name = "django-pghistory", specifier = ">=3.5.2" },
|
{ name = "django-pghistory", specifier = ">=3.5.2" },
|
||||||
{ name = "django-redis", specifier = ">=5.4.0" },
|
{ name = "django-redis", specifier = ">=5.4.0" },
|
||||||
{ name = "django-tailwind-cli", specifier = ">=2.21.1" },
|
{ name = "django-tailwind-cli", specifier = ">=2.21.1" },
|
||||||
@@ -2048,6 +2163,7 @@ requires-dist = [
|
|||||||
{ name = "drf-spectacular", specifier = ">=0.28.0" },
|
{ name = "drf-spectacular", specifier = ">=0.28.0" },
|
||||||
{ name = "fido2", specifier = ">=2.0.0" },
|
{ name = "fido2", specifier = ">=2.0.0" },
|
||||||
{ name = "hiredis", specifier = ">=3.1.0" },
|
{ name = "hiredis", specifier = ">=3.1.0" },
|
||||||
|
{ name = "httpx", specifier = ">=0.28.1" },
|
||||||
{ name = "nplusone", specifier = ">=1.0.0" },
|
{ name = "nplusone", specifier = ">=1.0.0" },
|
||||||
{ name = "piexif", specifier = ">=1.1.3" },
|
{ name = "piexif", specifier = ">=1.1.3" },
|
||||||
{ name = "pillow", specifier = ">=10.4.0,<11.2" },
|
{ name = "pillow", specifier = ">=10.4.0,<11.2" },
|
||||||
@@ -2074,6 +2190,7 @@ dev = [
|
|||||||
{ name = "autopep8", specifier = ">=2.3.2" },
|
{ name = "autopep8", specifier = ">=2.3.2" },
|
||||||
{ name = "black", specifier = ">=25.1.0" },
|
{ name = "black", specifier = ">=25.1.0" },
|
||||||
{ name = "django-stubs", specifier = ">=5.2.2" },
|
{ name = "django-stubs", specifier = ">=5.2.2" },
|
||||||
|
{ name = "factory-boy", specifier = ">=3.3.3" },
|
||||||
{ name = "pyright", specifier = ">=1.1.405" },
|
{ name = "pyright", specifier = ">=1.1.405" },
|
||||||
{ name = "rope", specifier = ">=1.14.0" },
|
{ name = "rope", specifier = ">=1.14.0" },
|
||||||
{ name = "ruff", specifier = ">=0.9.2" },
|
{ name = "ruff", specifier = ">=0.9.2" },
|
||||||
|
|||||||
Reference in New Issue
Block a user