mirror of
https://github.com/pacnpal/thrillwiki_django_no_react.git
synced 2026-02-05 11:45:18 -05:00
Compare commits
2 Commits
4da7e52fb0
...
28c9ec56da
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
28c9ec56da | ||
|
|
3ec5a4857d |
@@ -11,7 +11,7 @@ class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("accounts", "0014_remove_toplist_user_remove_toplistitem_top_list_and_more"),
|
||||
("pghistory", "0007_auto_20250421_0444"),
|
||||
("pghistory", "0006_delete_aggregateevent"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
|
||||
@@ -0,0 +1,41 @@
|
||||
# Generated by Django 5.2.9 on 2026-01-07 01:23
|
||||
|
||||
import pgtrigger.compiler
|
||||
import pgtrigger.migrations
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0015_loginhistory_loginhistoryevent_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
pgtrigger.migrations.RemoveTrigger(
|
||||
model_name='emailverification',
|
||||
name='insert_insert',
|
||||
),
|
||||
pgtrigger.migrations.RemoveTrigger(
|
||||
model_name='emailverification',
|
||||
name='update_update',
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='emailverification',
|
||||
name='updated_at',
|
||||
field=models.DateTimeField(auto_now=True, help_text='When this verification was last updated'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='emailverificationevent',
|
||||
name='updated_at',
|
||||
field=models.DateTimeField(auto_now=True, help_text='When this verification was last updated'),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name='emailverification',
|
||||
trigger=pgtrigger.compiler.Trigger(name='insert_insert', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "accounts_emailverificationevent" ("created_at", "id", "last_sent", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "token", "updated_at", "user_id") VALUES (NEW."created_at", NEW."id", NEW."last_sent", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."token", NEW."updated_at", NEW."user_id"); RETURN NULL;', hash='53c568e932b1b55a3c79e79220e6d6f269458003', operation='INSERT', pgid='pgtrigger_insert_insert_53748', table='accounts_emailverification', when='AFTER')),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name='emailverification',
|
||||
trigger=pgtrigger.compiler.Trigger(name='update_update', sql=pgtrigger.compiler.UpsertTriggerSql(condition='WHEN (OLD.* IS DISTINCT FROM NEW.*)', func='INSERT INTO "accounts_emailverificationevent" ("created_at", "id", "last_sent", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "token", "updated_at", "user_id") VALUES (NEW."created_at", NEW."id", NEW."last_sent", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."token", NEW."updated_at", NEW."user_id"); RETURN NULL;', hash='8b45a9a0a1810564cb46c098552ab4ec7920daeb', operation='UPDATE', pgid='pgtrigger_update_update_7a2a8', table='accounts_emailverification', when='AFTER')),
|
||||
),
|
||||
]
|
||||
@@ -110,6 +110,8 @@ urlpatterns = [
|
||||
path("profile/avatar/upload/", views.upload_avatar, name="upload_avatar"),
|
||||
path("profile/avatar/save/", views.save_avatar_image, name="save_avatar_image"),
|
||||
path("profile/avatar/delete/", views.delete_avatar, name="delete_avatar"),
|
||||
# User permissions endpoint
|
||||
path("permissions/", views.get_user_permissions, name="get_user_permissions"),
|
||||
# Login history endpoint
|
||||
path("login-history/", views.get_login_history, name="get_login_history"),
|
||||
# Email change cancellation endpoint
|
||||
@@ -119,6 +121,9 @@ urlpatterns = [
|
||||
path("magic-link/verify/", views_magic_link.verify_magic_link, name="verify_magic_link"),
|
||||
# Public Profile
|
||||
path("profiles/<str:username>/", views.get_public_user_profile, name="get_public_user_profile"),
|
||||
# Bulk lookup endpoints
|
||||
path("profiles/bulk/", views.bulk_get_profiles, name="bulk_get_profiles"),
|
||||
path("users/bulk/", views.get_users_with_emails, name="get_users_with_emails"),
|
||||
# ViewSet routes
|
||||
path("", include(router.urls)),
|
||||
]
|
||||
|
||||
@@ -826,6 +826,63 @@ def check_user_deletion_eligibility(request, user_id):
|
||||
# === USER PROFILE ENDPOINTS ===
|
||||
|
||||
|
||||
@extend_schema(
|
||||
operation_id="get_user_permissions",
|
||||
summary="Get current user's management permissions",
|
||||
description="Get the authenticated user's management permissions including role information.",
|
||||
responses={
|
||||
200: {
|
||||
"description": "User permissions",
|
||||
"example": {
|
||||
"user_id": "uuid",
|
||||
"is_superuser": True,
|
||||
"is_staff": True,
|
||||
"is_moderator": False,
|
||||
"roles": ["admin"],
|
||||
"permissions": ["can_moderate", "can_manage_users"],
|
||||
},
|
||||
},
|
||||
401: {
|
||||
"description": "Authentication required",
|
||||
"example": {"detail": "Authentication credentials were not provided."},
|
||||
},
|
||||
},
|
||||
tags=["User Profile"],
|
||||
)
|
||||
@api_view(["GET"])
|
||||
@permission_classes([IsAuthenticated])
|
||||
def get_user_permissions(request):
|
||||
"""Get the authenticated user's management permissions."""
|
||||
user = request.user
|
||||
profile = getattr(user, "profile", None)
|
||||
|
||||
# Get roles from profile if exists
|
||||
roles = []
|
||||
if profile:
|
||||
if hasattr(profile, "role") and profile.role:
|
||||
roles.append(profile.role)
|
||||
if user.is_superuser:
|
||||
roles.append("admin")
|
||||
if user.is_staff:
|
||||
roles.append("staff")
|
||||
|
||||
# Build permissions list based on flags
|
||||
permissions = []
|
||||
if user.is_superuser or user.is_staff:
|
||||
permissions.extend(["can_moderate", "can_manage_users", "can_view_admin"])
|
||||
elif profile and getattr(profile, "is_moderator", False):
|
||||
permissions.append("can_moderate")
|
||||
|
||||
return Response({
|
||||
"user_id": str(user.id),
|
||||
"is_superuser": user.is_superuser,
|
||||
"is_staff": user.is_staff,
|
||||
"is_moderator": profile and getattr(profile, "is_moderator", False) if profile else False,
|
||||
"roles": list(set(roles)), # Deduplicate
|
||||
"permissions": list(set(permissions)), # Deduplicate
|
||||
}, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
@extend_schema(
|
||||
operation_id="get_user_profile",
|
||||
summary="Get current user's complete profile",
|
||||
@@ -935,8 +992,8 @@ def get_user_preferences(request):
|
||||
"allow_messages": user.allow_messages,
|
||||
}
|
||||
|
||||
serializer = UserPreferencesSerializer(data=data)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
# Return the data directly - no validation needed for GET response
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
@extend_schema(
|
||||
@@ -1056,8 +1113,8 @@ def get_notification_settings(request):
|
||||
},
|
||||
}
|
||||
|
||||
serializer = NotificationSettingsSerializer(data=data)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
# Return the data directly - no validation needed for GET response
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
@extend_schema(
|
||||
@@ -1131,8 +1188,8 @@ def get_privacy_settings(request):
|
||||
"allow_messages": user.allow_messages,
|
||||
}
|
||||
|
||||
serializer = PrivacySettingsSerializer(data=data)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
# Return the data directly - no validation needed for GET response
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
@extend_schema(
|
||||
@@ -1198,8 +1255,8 @@ def get_security_settings(request):
|
||||
"active_sessions": getattr(user, "active_sessions", 1),
|
||||
}
|
||||
|
||||
serializer = SecuritySettingsSerializer(data=data)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
# Return the data directly - no validation needed for GET response
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
@extend_schema(
|
||||
@@ -1273,8 +1330,8 @@ def get_user_statistics(request):
|
||||
"last_activity": user.last_login,
|
||||
}
|
||||
|
||||
serializer = UserStatisticsSerializer(data=data)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
# Return the data directly - no validation needed for GET response
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
# === TOP LISTS ENDPOINTS ===
|
||||
@@ -1732,3 +1789,135 @@ def cancel_email_change(request):
|
||||
},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
@extend_schema(
|
||||
operation_id="bulk_get_profiles",
|
||||
summary="Get multiple user profiles by user IDs",
|
||||
description="Fetch profile information for multiple users at once. Useful for displaying user info in lists.",
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name="user_ids",
|
||||
type=OpenApiTypes.STR,
|
||||
location=OpenApiParameter.QUERY,
|
||||
description="Comma-separated list of user IDs",
|
||||
required=True,
|
||||
),
|
||||
],
|
||||
responses={
|
||||
200: {
|
||||
"description": "List of user profiles",
|
||||
"example": [
|
||||
{
|
||||
"user_id": "123",
|
||||
"username": "john_doe",
|
||||
"display_name": "John Doe",
|
||||
"avatar_url": "https://example.com/avatar.jpg",
|
||||
}
|
||||
],
|
||||
},
|
||||
},
|
||||
tags=["User Profile"],
|
||||
)
|
||||
@api_view(["GET"])
|
||||
@permission_classes([IsAuthenticated])
|
||||
def bulk_get_profiles(request):
|
||||
"""Get multiple user profiles by IDs for efficient bulk lookups."""
|
||||
user_ids_param = request.query_params.get("user_ids", "")
|
||||
|
||||
if not user_ids_param:
|
||||
return Response([], status=status.HTTP_200_OK)
|
||||
|
||||
user_ids = [uid.strip() for uid in user_ids_param.split(",") if uid.strip()]
|
||||
|
||||
if not user_ids:
|
||||
return Response([], status=status.HTTP_200_OK)
|
||||
|
||||
# Limit to prevent abuse
|
||||
if len(user_ids) > 100:
|
||||
user_ids = user_ids[:100]
|
||||
|
||||
profiles = UserProfile.objects.filter(user__user_id__in=user_ids).select_related("user", "avatar")
|
||||
|
||||
result = []
|
||||
for profile in profiles:
|
||||
result.append({
|
||||
"user_id": str(profile.user.user_id),
|
||||
"username": profile.user.username,
|
||||
"display_name": profile.display_name,
|
||||
"avatar_url": profile.get_avatar_url() if hasattr(profile, "get_avatar_url") else None,
|
||||
})
|
||||
|
||||
return Response(result, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
@extend_schema(
|
||||
operation_id="get_users_with_emails",
|
||||
summary="Get users with email addresses (admin/moderator only)",
|
||||
description="Fetch user information including emails. Restricted to admins and moderators.",
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name="user_ids",
|
||||
type=OpenApiTypes.STR,
|
||||
location=OpenApiParameter.QUERY,
|
||||
description="Comma-separated list of user IDs",
|
||||
required=True,
|
||||
),
|
||||
],
|
||||
responses={
|
||||
200: {
|
||||
"description": "List of users with emails",
|
||||
"example": [
|
||||
{
|
||||
"user_id": "123",
|
||||
"username": "john_doe",
|
||||
"email": "john@example.com",
|
||||
"display_name": "John Doe",
|
||||
}
|
||||
],
|
||||
},
|
||||
403: {"description": "Not authorized - admin or moderator access required"},
|
||||
},
|
||||
tags=["User Management"],
|
||||
)
|
||||
@api_view(["GET"])
|
||||
@permission_classes([IsAuthenticated])
|
||||
def get_users_with_emails(request):
|
||||
"""Get users with email addresses - restricted to admins and moderators."""
|
||||
user = request.user
|
||||
|
||||
# Check if user is admin or moderator
|
||||
if not (user.is_staff or user.is_superuser or getattr(user, "role", "") in ["ADMIN", "MODERATOR"]):
|
||||
return Response(
|
||||
{"detail": "Admin or moderator access required"},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
user_ids_param = request.query_params.get("user_ids", "")
|
||||
|
||||
if not user_ids_param:
|
||||
return Response([], status=status.HTTP_200_OK)
|
||||
|
||||
user_ids = [uid.strip() for uid in user_ids_param.split(",") if uid.strip()]
|
||||
|
||||
if not user_ids:
|
||||
return Response([], status=status.HTTP_200_OK)
|
||||
|
||||
# Limit to prevent abuse
|
||||
if len(user_ids) > 100:
|
||||
user_ids = user_ids[:100]
|
||||
|
||||
users = User.objects.filter(user_id__in=user_ids).select_related("profile")
|
||||
|
||||
result = []
|
||||
for u in users:
|
||||
profile = getattr(u, "profile", None)
|
||||
result.append({
|
||||
"user_id": str(u.user_id),
|
||||
"username": u.username,
|
||||
"email": u.email,
|
||||
"display_name": profile.display_name if profile else None,
|
||||
})
|
||||
|
||||
return Response(result, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
@@ -3,13 +3,31 @@ Admin API URL configuration.
|
||||
Provides endpoints for admin dashboard functionality.
|
||||
"""
|
||||
|
||||
from django.urls import path
|
||||
from django.urls import include, path
|
||||
from rest_framework.routers import DefaultRouter
|
||||
|
||||
from apps.core.api.alert_views import (
|
||||
RateLimitAlertConfigViewSet,
|
||||
RateLimitAlertViewSet,
|
||||
SystemAlertViewSet,
|
||||
)
|
||||
from apps.core.api.incident_views import IncidentViewSet
|
||||
|
||||
from . import views
|
||||
|
||||
app_name = "admin_api"
|
||||
|
||||
# Router for admin ViewSets
|
||||
router = DefaultRouter()
|
||||
router.register(r"system-alerts", SystemAlertViewSet, basename="system-alert")
|
||||
router.register(r"rate-limit-alerts", RateLimitAlertViewSet, basename="rate-limit-alert")
|
||||
router.register(r"rate-limit-config", RateLimitAlertConfigViewSet, basename="rate-limit-config")
|
||||
router.register(r"incidents", IncidentViewSet, basename="incident")
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
# Alert ViewSets (via router)
|
||||
path("", include(router.urls)),
|
||||
# OSM Cache Stats
|
||||
path(
|
||||
"osm-usage-stats/",
|
||||
@@ -52,4 +70,10 @@ urlpatterns = [
|
||||
views.PipelineIntegrityScanView.as_view(),
|
||||
name="pipeline_integrity_scan",
|
||||
),
|
||||
# Admin Settings (key-value store for preferences)
|
||||
path(
|
||||
"settings/",
|
||||
views.AdminSettingsView.as_view(),
|
||||
name="admin_settings",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -1263,3 +1263,88 @@ class PipelineIntegrityScanView(APIView):
|
||||
{"detail": "Failed to run integrity scan"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
class AdminSettingsView(APIView):
|
||||
"""
|
||||
GET/POST /admin/settings/
|
||||
Simple key-value store for admin preferences.
|
||||
|
||||
Settings are stored in Django cache with admin-specific keys.
|
||||
For persistent storage, a database model can be added later.
|
||||
"""
|
||||
|
||||
permission_classes = [IsAdminWithSecondFactor]
|
||||
|
||||
def get(self, request):
|
||||
"""Get all admin settings or a specific setting."""
|
||||
try:
|
||||
key = request.query_params.get("key")
|
||||
|
||||
if key:
|
||||
# Get specific setting
|
||||
value = cache.get(f"admin_setting_{key}")
|
||||
if value is None:
|
||||
return Response(
|
||||
{"results": []},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
return Response(
|
||||
{"results": [{"key": key, "value": value}]},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
# Get all settings (return empty list if none exist)
|
||||
# In a real implementation, you'd query a database model
|
||||
settings_keys = cache.get("admin_settings_keys", [])
|
||||
results = []
|
||||
for k in settings_keys:
|
||||
val = cache.get(f"admin_setting_{k}")
|
||||
if val is not None:
|
||||
results.append({"key": k, "value": val})
|
||||
|
||||
return Response(
|
||||
{"results": results, "count": len(results)},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
capture_and_log(e, "Admin settings GET - error", source="api")
|
||||
return Response(
|
||||
{"detail": "Failed to fetch admin settings"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
def post(self, request):
|
||||
"""Create or update an admin setting."""
|
||||
try:
|
||||
key = request.data.get("key")
|
||||
value = request.data.get("value")
|
||||
|
||||
if not key:
|
||||
return Response(
|
||||
{"detail": "key is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Store in cache (30 days TTL)
|
||||
cache.set(f"admin_setting_{key}", value, 60 * 60 * 24 * 30)
|
||||
|
||||
# Track keys
|
||||
settings_keys = cache.get("admin_settings_keys", [])
|
||||
if key not in settings_keys:
|
||||
settings_keys.append(key)
|
||||
cache.set("admin_settings_keys", settings_keys, 60 * 60 * 24 * 30)
|
||||
|
||||
return Response(
|
||||
{"success": True, "key": key, "value": value},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
capture_and_log(e, "Admin settings POST - error", source="api")
|
||||
return Response(
|
||||
{"detail": "Failed to save admin setting"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
@@ -166,7 +166,7 @@ def setup_totp(request):
|
||||
def activate_totp(request):
|
||||
"""Verify TOTP code and activate MFA."""
|
||||
from allauth.mfa.models import Authenticator
|
||||
from allauth.mfa.recovery_codes.internal import auth as recovery_auth
|
||||
from allauth.mfa.recovery_codes.internal.auth import RecoveryCodes
|
||||
from allauth.mfa.totp.internal import auth as totp_auth
|
||||
|
||||
user = request.user
|
||||
@@ -178,8 +178,9 @@ def activate_totp(request):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Get pending secret from session
|
||||
secret = request.session.get("pending_totp_secret")
|
||||
# Get pending secret from session OR from request body
|
||||
# (request body is used as fallback for JWT auth where sessions may not persist)
|
||||
secret = request.session.get("pending_totp_secret") or request.data.get("secret", "").strip()
|
||||
if not secret:
|
||||
return Response(
|
||||
{"detail": "No pending TOTP setup. Please start setup again."},
|
||||
@@ -207,15 +208,12 @@ def activate_totp(request):
|
||||
data={"secret": secret},
|
||||
)
|
||||
|
||||
# Generate recovery codes
|
||||
codes = recovery_auth.generate_recovery_codes()
|
||||
Authenticator.objects.create(
|
||||
user=user,
|
||||
type=Authenticator.Type.RECOVERY_CODES,
|
||||
data={"codes": codes},
|
||||
)
|
||||
# Generate recovery codes using allauth's RecoveryCodes API
|
||||
recovery_instance = RecoveryCodes.activate(user)
|
||||
codes = recovery_instance.get_unused_codes()
|
||||
|
||||
# Clear session
|
||||
# Clear session (only if it exists - won't exist with JWT auth + secret from body)
|
||||
if "pending_totp_secret" in request.session:
|
||||
del request.session["pending_totp_secret"]
|
||||
|
||||
return Response(
|
||||
@@ -361,7 +359,7 @@ def verify_totp(request):
|
||||
def regenerate_recovery_codes(request):
|
||||
"""Regenerate recovery codes."""
|
||||
from allauth.mfa.models import Authenticator
|
||||
from allauth.mfa.recovery_codes.internal import auth as recovery_auth
|
||||
from allauth.mfa.recovery_codes.internal.auth import RecoveryCodes
|
||||
|
||||
user = request.user
|
||||
password = request.data.get("password", "")
|
||||
@@ -380,15 +378,14 @@ def regenerate_recovery_codes(request):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Generate new codes
|
||||
codes = recovery_auth.generate_recovery_codes()
|
||||
# Delete existing recovery codes first (so activate creates new ones)
|
||||
Authenticator.objects.filter(
|
||||
user=user, type=Authenticator.Type.RECOVERY_CODES
|
||||
).delete()
|
||||
|
||||
# Update or create recovery codes authenticator
|
||||
authenticator, created = Authenticator.objects.update_or_create(
|
||||
user=user,
|
||||
type=Authenticator.Type.RECOVERY_CODES,
|
||||
defaults={"data": {"codes": codes}},
|
||||
)
|
||||
# Generate new recovery codes using allauth's RecoveryCodes API
|
||||
recovery_instance = RecoveryCodes.activate(user)
|
||||
codes = recovery_instance.get_unused_codes()
|
||||
|
||||
return Response(
|
||||
{
|
||||
|
||||
@@ -377,7 +377,7 @@ class MFALoginVerifyAPIView(APIView):
|
||||
"""Verify TOTP code against user's authenticator."""
|
||||
try:
|
||||
from allauth.mfa.models import Authenticator
|
||||
from allauth.mfa.totp import TOTP
|
||||
from allauth.mfa.totp.internal import auth as totp_auth
|
||||
|
||||
try:
|
||||
authenticator = Authenticator.objects.get(
|
||||
@@ -387,9 +387,12 @@ class MFALoginVerifyAPIView(APIView):
|
||||
except Authenticator.DoesNotExist:
|
||||
return False
|
||||
|
||||
# Get the TOTP instance and verify
|
||||
totp = TOTP(authenticator)
|
||||
return totp.validate_code(code)
|
||||
# Get the secret from authenticator data and verify
|
||||
secret = authenticator.data.get("secret")
|
||||
if not secret:
|
||||
return False
|
||||
|
||||
return totp_auth.validate_totp_code(secret, code)
|
||||
|
||||
except ImportError:
|
||||
logger.error("allauth.mfa not available for TOTP verification")
|
||||
|
||||
@@ -24,4 +24,10 @@ urlpatterns = [
|
||||
views.QuickEntitySuggestionView.as_view(),
|
||||
name="entity_suggestions",
|
||||
),
|
||||
# Telemetry endpoint for frontend logging
|
||||
path(
|
||||
"telemetry/",
|
||||
views.TelemetryView.as_view(),
|
||||
name="telemetry",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -22,6 +22,108 @@ from apps.core.services.entity_fuzzy_matching import (
|
||||
entity_fuzzy_matcher,
|
||||
)
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TelemetryView(APIView):
|
||||
"""
|
||||
Handle frontend telemetry and request metadata logging.
|
||||
|
||||
This endpoint accepts telemetry data from the frontend for logging and
|
||||
analytics purposes. When error data is present, it persists the error
|
||||
to the database for monitoring.
|
||||
|
||||
Note: This endpoint bypasses authentication entirely to ensure errors
|
||||
can be logged even when user tokens are expired or invalid.
|
||||
"""
|
||||
|
||||
authentication_classes = [] # Bypass JWT auth to allow error logging with expired tokens
|
||||
permission_classes = [AllowAny]
|
||||
|
||||
|
||||
@extend_schema(
|
||||
tags=["Core"],
|
||||
summary="Log request metadata",
|
||||
description="Log frontend telemetry and request metadata",
|
||||
)
|
||||
def post(self, request):
|
||||
"""Accept telemetry data from frontend."""
|
||||
data = request.data
|
||||
|
||||
# If this is an error report, persist it to the database
|
||||
if data.get('p_error_type') or data.get('p_error_message') or data.get('error_type') or data.get('error_message'):
|
||||
from apps.core.services import ErrorService
|
||||
|
||||
# Handle both p_ prefixed params (from log_request_metadata RPC) and direct params
|
||||
error_message = data.get('p_error_message') or data.get('error_message') or 'Unknown error'
|
||||
error_type = data.get('p_error_type') or data.get('error_type') or 'Error'
|
||||
severity = data.get('p_severity') or data.get('severity') or 'medium'
|
||||
error_stack = data.get('p_error_stack') or data.get('error_stack') or ''
|
||||
error_code = data.get('p_error_code') or data.get('error_code') or ''
|
||||
|
||||
# Build metadata from available fields
|
||||
metadata = {
|
||||
'action': data.get('p_action') or data.get('action'),
|
||||
'breadcrumbs': data.get('p_breadcrumbs'),
|
||||
'duration_ms': data.get('p_duration_ms'),
|
||||
'retry_attempts': data.get('p_retry_attempts'),
|
||||
'affected_route': data.get('p_affected_route'),
|
||||
'request_id': data.get('p_request_id') or data.get('request_id'),
|
||||
}
|
||||
# Remove None values
|
||||
metadata = {k: v for k, v in metadata.items() if v is not None}
|
||||
|
||||
# Build environment from available fields
|
||||
environment = data.get('p_environment_context') or data.get('environment') or {}
|
||||
if isinstance(environment, str):
|
||||
import json
|
||||
try:
|
||||
environment = json.loads(environment)
|
||||
except json.JSONDecodeError:
|
||||
environment = {}
|
||||
|
||||
try:
|
||||
error = ErrorService.capture_error(
|
||||
error=error_message,
|
||||
source='frontend',
|
||||
request=request,
|
||||
severity=severity,
|
||||
metadata=metadata,
|
||||
environment=environment,
|
||||
)
|
||||
# Update additional fields
|
||||
error.error_type = error_type
|
||||
error.error_stack = error_stack[:10000] if error_stack else ''
|
||||
error.error_code = error_code
|
||||
error.endpoint = data.get('p_affected_route') or ''
|
||||
error.http_status = data.get('p_http_status')
|
||||
error.save(update_fields=['error_type', 'error_stack', 'error_code', 'endpoint', 'http_status'])
|
||||
|
||||
logger.info(f"Frontend error captured: {error.short_error_id}")
|
||||
return Response(
|
||||
{"success": True, "error_id": str(error.error_id)},
|
||||
status=status.HTTP_201_CREATED,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to capture frontend error: {e}")
|
||||
# Fall through to regular telemetry logging
|
||||
|
||||
# Non-error telemetry - just log and acknowledge
|
||||
logger.debug(
|
||||
"Telemetry received",
|
||||
extra={
|
||||
"data": data,
|
||||
"user_id": getattr(request.user, "id", None),
|
||||
},
|
||||
)
|
||||
return Response(
|
||||
{"success": True, "message": "Telemetry logged"},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
|
||||
|
||||
class EntityFuzzySearchView(APIView):
|
||||
"""
|
||||
|
||||
@@ -27,12 +27,23 @@ from .views.reviews import LatestReviewsAPIView
|
||||
from .views.stats import StatsAPIView, StatsRecalculateAPIView
|
||||
from .viewsets_rankings import RideRankingViewSet, TriggerRankingCalculationView
|
||||
|
||||
# Import analytics views
|
||||
from apps.core.api.analytics_views import (
|
||||
ApprovalTransactionMetricViewSet,
|
||||
ErrorSummaryView,
|
||||
RequestMetadataViewSet,
|
||||
)
|
||||
|
||||
# Create the main API router
|
||||
router = DefaultRouter()
|
||||
|
||||
# Register ranking endpoints
|
||||
router.register(r"rankings", RideRankingViewSet, basename="ranking")
|
||||
|
||||
# Register analytics endpoints
|
||||
router.register(r"request_metadata", RequestMetadataViewSet, basename="request_metadata")
|
||||
router.register(r"approval_transaction_metrics", ApprovalTransactionMetricViewSet, basename="approval_transaction_metrics")
|
||||
|
||||
app_name = "api_v1"
|
||||
|
||||
urlpatterns = [
|
||||
@@ -40,6 +51,8 @@ urlpatterns = [
|
||||
# See backend/thrillwiki/urls.py for documentation endpoints
|
||||
# Authentication endpoints
|
||||
path("auth/", include("apps.api.v1.auth.urls")),
|
||||
# Analytics endpoints (error_summary is a view, not a viewset)
|
||||
path("error_summary/", ErrorSummaryView.as_view(), name="error-summary"),
|
||||
# Health check endpoints
|
||||
path("health/", HealthCheckAPIView.as_view(), name="health-check"),
|
||||
path("health/simple/", SimpleHealthAPIView.as_view(), name="simple-health"),
|
||||
|
||||
89
backend/apps/core/api/alert_serializers.py
Normal file
89
backend/apps/core/api/alert_serializers.py
Normal file
@@ -0,0 +1,89 @@
|
||||
"""
|
||||
Serializers for admin alert API endpoints.
|
||||
|
||||
Provides serializers for SystemAlert, RateLimitAlert, and RateLimitAlertConfig models.
|
||||
"""
|
||||
|
||||
from rest_framework import serializers
|
||||
|
||||
from apps.core.models import RateLimitAlert, RateLimitAlertConfig, SystemAlert
|
||||
|
||||
|
||||
class SystemAlertSerializer(serializers.ModelSerializer):
|
||||
"""Serializer for system alerts."""
|
||||
|
||||
is_resolved = serializers.BooleanField(read_only=True)
|
||||
resolved_by_username = serializers.CharField(source="resolved_by.username", read_only=True, allow_null=True)
|
||||
|
||||
class Meta:
|
||||
model = SystemAlert
|
||||
fields = [
|
||||
"id",
|
||||
"alert_type",
|
||||
"severity",
|
||||
"message",
|
||||
"metadata",
|
||||
"resolved_at",
|
||||
"resolved_by",
|
||||
"resolved_by_username",
|
||||
"created_at",
|
||||
"is_resolved",
|
||||
]
|
||||
read_only_fields = ["id", "created_at", "is_resolved", "resolved_by_username"]
|
||||
|
||||
|
||||
class SystemAlertResolveSerializer(serializers.Serializer):
|
||||
"""Serializer for resolving system alerts."""
|
||||
|
||||
notes = serializers.CharField(required=False, allow_blank=True)
|
||||
|
||||
|
||||
class RateLimitAlertConfigSerializer(serializers.ModelSerializer):
|
||||
"""Serializer for rate limit alert configurations."""
|
||||
|
||||
class Meta:
|
||||
model = RateLimitAlertConfig
|
||||
fields = [
|
||||
"id",
|
||||
"metric_type",
|
||||
"threshold_value",
|
||||
"time_window_ms",
|
||||
"function_name",
|
||||
"enabled",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
]
|
||||
read_only_fields = ["id", "created_at", "updated_at"]
|
||||
|
||||
|
||||
class RateLimitAlertSerializer(serializers.ModelSerializer):
|
||||
"""Serializer for rate limit alerts."""
|
||||
|
||||
is_resolved = serializers.BooleanField(read_only=True)
|
||||
config_id = serializers.UUIDField(source="config.id", read_only=True)
|
||||
resolved_by_username = serializers.CharField(source="resolved_by.username", read_only=True, allow_null=True)
|
||||
|
||||
class Meta:
|
||||
model = RateLimitAlert
|
||||
fields = [
|
||||
"id",
|
||||
"config_id",
|
||||
"metric_type",
|
||||
"metric_value",
|
||||
"threshold_value",
|
||||
"time_window_ms",
|
||||
"function_name",
|
||||
"alert_message",
|
||||
"resolved_at",
|
||||
"resolved_by",
|
||||
"resolved_by_username",
|
||||
"created_at",
|
||||
"is_resolved",
|
||||
]
|
||||
read_only_fields = ["id", "created_at", "is_resolved", "config_id", "resolved_by_username"]
|
||||
|
||||
|
||||
class RateLimitAlertResolveSerializer(serializers.Serializer):
|
||||
"""Serializer for resolving rate limit alerts."""
|
||||
|
||||
notes = serializers.CharField(required=False, allow_blank=True)
|
||||
226
backend/apps/core/api/alert_views.py
Normal file
226
backend/apps/core/api/alert_views.py
Normal file
@@ -0,0 +1,226 @@
|
||||
"""
|
||||
ViewSets for admin alert API endpoints.
|
||||
|
||||
Provides CRUD operations for SystemAlert, RateLimitAlert, and RateLimitAlertConfig.
|
||||
"""
|
||||
|
||||
from django.utils import timezone
|
||||
from django_filters.rest_framework import DjangoFilterBackend
|
||||
from drf_spectacular.utils import extend_schema, extend_schema_view
|
||||
from rest_framework import status, viewsets
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.filters import OrderingFilter, SearchFilter
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.response import Response
|
||||
|
||||
from apps.core.models import RateLimitAlert, RateLimitAlertConfig, SystemAlert
|
||||
|
||||
from .alert_serializers import (
|
||||
RateLimitAlertConfigSerializer,
|
||||
RateLimitAlertResolveSerializer,
|
||||
RateLimitAlertSerializer,
|
||||
SystemAlertResolveSerializer,
|
||||
SystemAlertSerializer,
|
||||
)
|
||||
|
||||
|
||||
@extend_schema_view(
|
||||
list=extend_schema(
|
||||
summary="List system alerts",
|
||||
description="Get all system alerts, optionally filtered by severity or resolved status.",
|
||||
tags=["Admin - Alerts"],
|
||||
),
|
||||
retrieve=extend_schema(
|
||||
summary="Get system alert",
|
||||
description="Get details of a specific system alert.",
|
||||
tags=["Admin - Alerts"],
|
||||
),
|
||||
create=extend_schema(
|
||||
summary="Create system alert",
|
||||
description="Create a new system alert.",
|
||||
tags=["Admin - Alerts"],
|
||||
),
|
||||
update=extend_schema(
|
||||
summary="Update system alert",
|
||||
description="Update an existing system alert.",
|
||||
tags=["Admin - Alerts"],
|
||||
),
|
||||
partial_update=extend_schema(
|
||||
summary="Partial update system alert",
|
||||
description="Partially update an existing system alert.",
|
||||
tags=["Admin - Alerts"],
|
||||
),
|
||||
destroy=extend_schema(
|
||||
summary="Delete system alert",
|
||||
description="Delete a system alert.",
|
||||
tags=["Admin - Alerts"],
|
||||
),
|
||||
)
|
||||
class SystemAlertViewSet(viewsets.ModelViewSet):
|
||||
"""
|
||||
ViewSet for managing system alerts.
|
||||
|
||||
Provides CRUD operations plus a resolve action for marking alerts as resolved.
|
||||
"""
|
||||
|
||||
queryset = SystemAlert.objects.all()
|
||||
serializer_class = SystemAlertSerializer
|
||||
permission_classes = [IsAdminUser]
|
||||
filter_backends = [DjangoFilterBackend, SearchFilter, OrderingFilter]
|
||||
filterset_fields = ["severity", "alert_type"]
|
||||
search_fields = ["message"]
|
||||
ordering_fields = ["created_at", "severity"]
|
||||
ordering = ["-created_at"]
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = super().get_queryset()
|
||||
|
||||
# Filter by resolved status
|
||||
resolved = self.request.query_params.get("resolved")
|
||||
if resolved is not None:
|
||||
if resolved.lower() == "true":
|
||||
queryset = queryset.exclude(resolved_at__isnull=True)
|
||||
elif resolved.lower() == "false":
|
||||
queryset = queryset.filter(resolved_at__isnull=True)
|
||||
|
||||
return queryset
|
||||
|
||||
@extend_schema(
|
||||
summary="Resolve system alert",
|
||||
description="Mark a system alert as resolved.",
|
||||
request=SystemAlertResolveSerializer,
|
||||
responses={200: SystemAlertSerializer},
|
||||
tags=["Admin - Alerts"],
|
||||
)
|
||||
@action(detail=True, methods=["post"])
|
||||
def resolve(self, request, pk=None):
|
||||
"""Mark an alert as resolved."""
|
||||
alert = self.get_object()
|
||||
|
||||
if alert.resolved_at:
|
||||
return Response(
|
||||
{"detail": "Alert is already resolved"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
alert.resolved_at = timezone.now()
|
||||
alert.resolved_by = request.user
|
||||
alert.save()
|
||||
|
||||
serializer = self.get_serializer(alert)
|
||||
return Response(serializer.data)
|
||||
|
||||
|
||||
@extend_schema_view(
|
||||
list=extend_schema(
|
||||
summary="List rate limit alert configs",
|
||||
description="Get all rate limit alert configurations.",
|
||||
tags=["Admin - Alerts"],
|
||||
),
|
||||
retrieve=extend_schema(
|
||||
summary="Get rate limit alert config",
|
||||
description="Get details of a specific rate limit alert configuration.",
|
||||
tags=["Admin - Alerts"],
|
||||
),
|
||||
create=extend_schema(
|
||||
summary="Create rate limit alert config",
|
||||
description="Create a new rate limit alert configuration.",
|
||||
tags=["Admin - Alerts"],
|
||||
),
|
||||
update=extend_schema(
|
||||
summary="Update rate limit alert config",
|
||||
description="Update an existing rate limit alert configuration.",
|
||||
tags=["Admin - Alerts"],
|
||||
),
|
||||
partial_update=extend_schema(
|
||||
summary="Partial update rate limit alert config",
|
||||
description="Partially update an existing rate limit alert configuration.",
|
||||
tags=["Admin - Alerts"],
|
||||
),
|
||||
destroy=extend_schema(
|
||||
summary="Delete rate limit alert config",
|
||||
description="Delete a rate limit alert configuration.",
|
||||
tags=["Admin - Alerts"],
|
||||
),
|
||||
)
|
||||
class RateLimitAlertConfigViewSet(viewsets.ModelViewSet):
|
||||
"""
|
||||
ViewSet for managing rate limit alert configurations.
|
||||
|
||||
Provides CRUD operations for alert thresholds.
|
||||
"""
|
||||
|
||||
queryset = RateLimitAlertConfig.objects.all()
|
||||
serializer_class = RateLimitAlertConfigSerializer
|
||||
permission_classes = [IsAdminUser]
|
||||
filter_backends = [DjangoFilterBackend, OrderingFilter]
|
||||
filterset_fields = ["metric_type", "enabled"]
|
||||
ordering_fields = ["created_at", "metric_type", "threshold_value"]
|
||||
ordering = ["metric_type", "-created_at"]
|
||||
|
||||
|
||||
@extend_schema_view(
|
||||
list=extend_schema(
|
||||
summary="List rate limit alerts",
|
||||
description="Get all rate limit alerts, optionally filtered by resolved status.",
|
||||
tags=["Admin - Alerts"],
|
||||
),
|
||||
retrieve=extend_schema(
|
||||
summary="Get rate limit alert",
|
||||
description="Get details of a specific rate limit alert.",
|
||||
tags=["Admin - Alerts"],
|
||||
),
|
||||
)
|
||||
class RateLimitAlertViewSet(viewsets.ReadOnlyModelViewSet):
|
||||
"""
|
||||
ViewSet for viewing rate limit alerts.
|
||||
|
||||
Provides read-only access and a resolve action.
|
||||
"""
|
||||
|
||||
queryset = RateLimitAlert.objects.select_related("config").all()
|
||||
serializer_class = RateLimitAlertSerializer
|
||||
permission_classes = [IsAdminUser]
|
||||
filter_backends = [DjangoFilterBackend, SearchFilter, OrderingFilter]
|
||||
filterset_fields = ["metric_type"]
|
||||
search_fields = ["alert_message", "function_name"]
|
||||
ordering_fields = ["created_at", "metric_value"]
|
||||
ordering = ["-created_at"]
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = super().get_queryset()
|
||||
|
||||
# Filter by resolved status
|
||||
resolved = self.request.query_params.get("resolved")
|
||||
if resolved is not None:
|
||||
if resolved.lower() == "true":
|
||||
queryset = queryset.exclude(resolved_at__isnull=True)
|
||||
elif resolved.lower() == "false":
|
||||
queryset = queryset.filter(resolved_at__isnull=True)
|
||||
|
||||
return queryset
|
||||
|
||||
@extend_schema(
|
||||
summary="Resolve rate limit alert",
|
||||
description="Mark a rate limit alert as resolved.",
|
||||
request=RateLimitAlertResolveSerializer,
|
||||
responses={200: RateLimitAlertSerializer},
|
||||
tags=["Admin - Alerts"],
|
||||
)
|
||||
@action(detail=True, methods=["post"])
|
||||
def resolve(self, request, pk=None):
|
||||
"""Mark an alert as resolved."""
|
||||
alert = self.get_object()
|
||||
|
||||
if alert.resolved_at:
|
||||
return Response(
|
||||
{"detail": "Alert is already resolved"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
alert.resolved_at = timezone.now()
|
||||
alert.resolved_by = request.user
|
||||
alert.save()
|
||||
|
||||
serializer = self.get_serializer(alert)
|
||||
return Response(serializer.data)
|
||||
204
backend/apps/core/api/analytics_serializers.py
Normal file
204
backend/apps/core/api/analytics_serializers.py
Normal file
@@ -0,0 +1,204 @@
|
||||
"""
|
||||
Serializers for admin analytics endpoints.
|
||||
|
||||
Provides serialization for RequestMetadata, RequestBreadcrumb,
|
||||
ApprovalTransactionMetric, and ErrorSummary aggregation.
|
||||
"""
|
||||
|
||||
from rest_framework import serializers
|
||||
|
||||
from apps.core.models import (
|
||||
ApprovalTransactionMetric,
|
||||
RequestBreadcrumb,
|
||||
RequestMetadata,
|
||||
)
|
||||
|
||||
|
||||
class RequestBreadcrumbSerializer(serializers.ModelSerializer):
|
||||
"""Serializer for request breadcrumb data."""
|
||||
|
||||
class Meta:
|
||||
model = RequestBreadcrumb
|
||||
fields = [
|
||||
"timestamp",
|
||||
"category",
|
||||
"message",
|
||||
"level",
|
||||
"sequence_order",
|
||||
]
|
||||
|
||||
|
||||
class RequestMetadataSerializer(serializers.ModelSerializer):
|
||||
"""
|
||||
Serializer for request metadata with nested breadcrumbs.
|
||||
|
||||
Supports the expand=request_breadcrumbs query parameter
|
||||
to include breadcrumb data in the response.
|
||||
"""
|
||||
|
||||
request_breadcrumbs = RequestBreadcrumbSerializer(many=True, read_only=True)
|
||||
user_id = serializers.CharField(source="user_id", read_only=True, allow_null=True)
|
||||
|
||||
class Meta:
|
||||
model = RequestMetadata
|
||||
fields = [
|
||||
"id",
|
||||
"request_id",
|
||||
"trace_id",
|
||||
"session_id",
|
||||
"parent_request_id",
|
||||
"action",
|
||||
"method",
|
||||
"endpoint",
|
||||
"request_method",
|
||||
"request_path",
|
||||
"affected_route",
|
||||
"http_status",
|
||||
"status_code",
|
||||
"response_status",
|
||||
"success",
|
||||
"started_at",
|
||||
"completed_at",
|
||||
"duration_ms",
|
||||
"response_time_ms",
|
||||
"error_type",
|
||||
"error_message",
|
||||
"error_stack",
|
||||
"error_code",
|
||||
"error_origin",
|
||||
"component_stack",
|
||||
"severity",
|
||||
"is_resolved",
|
||||
"resolved_at",
|
||||
"resolved_by",
|
||||
"resolution_notes",
|
||||
"retry_count",
|
||||
"retry_attempts",
|
||||
"user_id",
|
||||
"user_agent",
|
||||
"ip_address_hash",
|
||||
"client_version",
|
||||
"timezone",
|
||||
"referrer",
|
||||
"entity_type",
|
||||
"entity_id",
|
||||
"created_at",
|
||||
"request_breadcrumbs",
|
||||
]
|
||||
read_only_fields = ["id", "created_at"]
|
||||
|
||||
def to_representation(self, instance):
|
||||
"""Conditionally include breadcrumbs based on expand parameter."""
|
||||
data = super().to_representation(instance)
|
||||
request = self.context.get("request")
|
||||
|
||||
# Only include breadcrumbs if explicitly expanded
|
||||
if request:
|
||||
expand = request.query_params.get("expand", "")
|
||||
if "request_breadcrumbs" not in expand:
|
||||
data.pop("request_breadcrumbs", None)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
class RequestMetadataCreateSerializer(serializers.ModelSerializer):
|
||||
"""Serializer for creating request metadata (log_request_metadata RPC)."""
|
||||
|
||||
breadcrumbs = RequestBreadcrumbSerializer(many=True, required=False)
|
||||
|
||||
class Meta:
|
||||
model = RequestMetadata
|
||||
fields = [
|
||||
"request_id",
|
||||
"trace_id",
|
||||
"session_id",
|
||||
"parent_request_id",
|
||||
"action",
|
||||
"method",
|
||||
"endpoint",
|
||||
"request_method",
|
||||
"request_path",
|
||||
"affected_route",
|
||||
"http_status",
|
||||
"status_code",
|
||||
"response_status",
|
||||
"success",
|
||||
"completed_at",
|
||||
"duration_ms",
|
||||
"response_time_ms",
|
||||
"error_type",
|
||||
"error_message",
|
||||
"error_stack",
|
||||
"error_code",
|
||||
"error_origin",
|
||||
"component_stack",
|
||||
"severity",
|
||||
"retry_count",
|
||||
"retry_attempts",
|
||||
"user_agent",
|
||||
"ip_address_hash",
|
||||
"client_version",
|
||||
"timezone",
|
||||
"referrer",
|
||||
"entity_type",
|
||||
"entity_id",
|
||||
"breadcrumbs",
|
||||
]
|
||||
|
||||
def create(self, validated_data):
|
||||
breadcrumbs_data = validated_data.pop("breadcrumbs", [])
|
||||
request_metadata = RequestMetadata.objects.create(**validated_data)
|
||||
|
||||
for i, breadcrumb_data in enumerate(breadcrumbs_data):
|
||||
RequestBreadcrumb.objects.create(
|
||||
request_metadata=request_metadata,
|
||||
sequence_order=breadcrumb_data.get("sequence_order", i),
|
||||
**{k: v for k, v in breadcrumb_data.items() if k != "sequence_order"}
|
||||
)
|
||||
|
||||
return request_metadata
|
||||
|
||||
|
||||
class RequestMetadataResolveSerializer(serializers.Serializer):
|
||||
"""Serializer for resolving request metadata errors."""
|
||||
|
||||
resolution_notes = serializers.CharField(required=False, allow_blank=True)
|
||||
|
||||
|
||||
class ApprovalTransactionMetricSerializer(serializers.ModelSerializer):
|
||||
"""Serializer for approval transaction metrics."""
|
||||
|
||||
class Meta:
|
||||
model = ApprovalTransactionMetric
|
||||
fields = [
|
||||
"id",
|
||||
"submission_id",
|
||||
"moderator_id",
|
||||
"submitter_id",
|
||||
"request_id",
|
||||
"success",
|
||||
"duration_ms",
|
||||
"items_count",
|
||||
"rollback_triggered",
|
||||
"error_code",
|
||||
"error_message",
|
||||
"error_details",
|
||||
"created_at",
|
||||
]
|
||||
read_only_fields = ["id", "created_at"]
|
||||
|
||||
|
||||
class ErrorSummarySerializer(serializers.Serializer):
|
||||
"""
|
||||
Read-only serializer for error summary aggregation.
|
||||
|
||||
Aggregates error data from RequestMetadata for dashboard display.
|
||||
"""
|
||||
|
||||
date = serializers.DateField(read_only=True)
|
||||
error_type = serializers.CharField(read_only=True)
|
||||
severity = serializers.CharField(read_only=True)
|
||||
error_count = serializers.IntegerField(read_only=True)
|
||||
resolved_count = serializers.IntegerField(read_only=True)
|
||||
affected_users = serializers.IntegerField(read_only=True)
|
||||
avg_resolution_minutes = serializers.FloatField(read_only=True, allow_null=True)
|
||||
184
backend/apps/core/api/analytics_views.py
Normal file
184
backend/apps/core/api/analytics_views.py
Normal file
@@ -0,0 +1,184 @@
|
||||
"""
|
||||
ViewSets for admin analytics endpoints.
|
||||
|
||||
Provides read/write access to RequestMetadata, ApprovalTransactionMetric,
|
||||
and a read-only aggregation endpoint for ErrorSummary.
|
||||
"""
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
from django.db.models import Avg, Count, F, Q
|
||||
from django.db.models.functions import TruncDate
|
||||
from django.utils import timezone
|
||||
from django_filters import rest_framework as filters
|
||||
from rest_framework import status, viewsets
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.permissions import IsAdminUser, IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from apps.core.models import ApprovalTransactionMetric, RequestMetadata
|
||||
|
||||
from .analytics_serializers import (
|
||||
ApprovalTransactionMetricSerializer,
|
||||
ErrorSummarySerializer,
|
||||
RequestMetadataCreateSerializer,
|
||||
RequestMetadataResolveSerializer,
|
||||
RequestMetadataSerializer,
|
||||
)
|
||||
|
||||
|
||||
class RequestMetadataFilter(filters.FilterSet):
|
||||
"""Filter for RequestMetadata queries."""
|
||||
|
||||
error_type__ne = filters.CharFilter(field_name="error_type", method="filter_not_equal")
|
||||
created_at__gte = filters.IsoDateTimeFilter(field_name="created_at", lookup_expr="gte")
|
||||
created_at__lte = filters.IsoDateTimeFilter(field_name="created_at", lookup_expr="lte")
|
||||
|
||||
class Meta:
|
||||
model = RequestMetadata
|
||||
fields = {
|
||||
"error_type": ["exact", "isnull"],
|
||||
"severity": ["exact"],
|
||||
"is_resolved": ["exact"],
|
||||
"success": ["exact"],
|
||||
"http_status": ["exact", "gte", "lte"],
|
||||
"user": ["exact"],
|
||||
"endpoint": ["exact", "icontains"],
|
||||
}
|
||||
|
||||
def filter_not_equal(self, queryset, name, value):
|
||||
"""Handle the error_type__ne filter for non-null error types."""
|
||||
# The frontend sends a JSON object for 'not null' filter
|
||||
# We interpret this as 'error_type is not null'
|
||||
if value:
|
||||
return queryset.exclude(error_type__isnull=True)
|
||||
return queryset
|
||||
|
||||
|
||||
class RequestMetadataViewSet(viewsets.ModelViewSet):
|
||||
"""
|
||||
ViewSet for request metadata CRUD operations.
|
||||
|
||||
Supports filtering by error_type, severity, date range, etc.
|
||||
Use the expand=request_breadcrumbs query parameter to include breadcrumbs.
|
||||
"""
|
||||
|
||||
queryset = RequestMetadata.objects.all()
|
||||
permission_classes = [IsAuthenticated]
|
||||
filterset_class = RequestMetadataFilter
|
||||
ordering_fields = ["created_at", "severity", "error_type"]
|
||||
ordering = ["-created_at"]
|
||||
|
||||
def get_serializer_class(self):
|
||||
if self.action == "create":
|
||||
return RequestMetadataCreateSerializer
|
||||
return RequestMetadataSerializer
|
||||
|
||||
def get_queryset(self):
|
||||
"""Optimize queryset with prefetch for breadcrumbs if expanded."""
|
||||
queryset = super().get_queryset()
|
||||
expand = self.request.query_params.get("expand", "")
|
||||
|
||||
if "request_breadcrumbs" in expand:
|
||||
queryset = queryset.prefetch_related("request_breadcrumbs")
|
||||
|
||||
return queryset
|
||||
|
||||
def perform_create(self, serializer):
|
||||
"""Associate request metadata with current user if authenticated."""
|
||||
user = self.request.user if self.request.user.is_authenticated else None
|
||||
serializer.save(user=user)
|
||||
|
||||
@action(detail=True, methods=["post"], permission_classes=[IsAdminUser])
|
||||
def resolve(self, request, pk=None):
|
||||
"""Mark a request metadata entry as resolved."""
|
||||
instance = self.get_object()
|
||||
serializer = RequestMetadataResolveSerializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
instance.is_resolved = True
|
||||
instance.resolved_at = timezone.now()
|
||||
instance.resolved_by = request.user
|
||||
instance.resolution_notes = serializer.validated_data.get("resolution_notes", "")
|
||||
instance.save(update_fields=["is_resolved", "resolved_at", "resolved_by", "resolution_notes"])
|
||||
|
||||
return Response(RequestMetadataSerializer(instance).data)
|
||||
|
||||
|
||||
class ApprovalTransactionMetricFilter(filters.FilterSet):
|
||||
"""Filter for ApprovalTransactionMetric queries."""
|
||||
|
||||
created_at__gte = filters.IsoDateTimeFilter(field_name="created_at", lookup_expr="gte")
|
||||
created_at__lte = filters.IsoDateTimeFilter(field_name="created_at", lookup_expr="lte")
|
||||
|
||||
class Meta:
|
||||
model = ApprovalTransactionMetric
|
||||
fields = {
|
||||
"success": ["exact"],
|
||||
"moderator_id": ["exact"],
|
||||
"submitter_id": ["exact"],
|
||||
"submission_id": ["exact"],
|
||||
}
|
||||
|
||||
|
||||
class ApprovalTransactionMetricViewSet(viewsets.ReadOnlyModelViewSet):
|
||||
"""
|
||||
Read-only ViewSet for approval transaction metrics.
|
||||
|
||||
Provides analytics data about moderation approval operations.
|
||||
"""
|
||||
|
||||
queryset = ApprovalTransactionMetric.objects.all()
|
||||
serializer_class = ApprovalTransactionMetricSerializer
|
||||
permission_classes = [IsAuthenticated]
|
||||
filterset_class = ApprovalTransactionMetricFilter
|
||||
ordering_fields = ["created_at", "duration_ms", "success"]
|
||||
ordering = ["-created_at"]
|
||||
|
||||
|
||||
class ErrorSummaryView(APIView):
|
||||
"""
|
||||
Aggregation endpoint for error summary statistics.
|
||||
|
||||
Returns daily error counts grouped by error_type and severity,
|
||||
similar to the Supabase error_summary view.
|
||||
"""
|
||||
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
def get(self, request):
|
||||
"""Get aggregated error summary data."""
|
||||
# Default to last 30 days
|
||||
days = int(request.query_params.get("days", 30))
|
||||
since = timezone.now() - timedelta(days=days)
|
||||
|
||||
# Aggregate error data by date, error_type, and severity
|
||||
summary = (
|
||||
RequestMetadata.objects.filter(
|
||||
created_at__gte=since,
|
||||
error_type__isnull=False,
|
||||
)
|
||||
.annotate(date=TruncDate("created_at"))
|
||||
.values("date", "error_type", "severity")
|
||||
.annotate(
|
||||
error_count=Count("id"),
|
||||
resolved_count=Count("id", filter=Q(is_resolved=True)),
|
||||
affected_users=Count("user", distinct=True),
|
||||
avg_resolution_minutes=Avg(
|
||||
(F("resolved_at") - F("created_at")),
|
||||
filter=Q(is_resolved=True, resolved_at__isnull=False),
|
||||
),
|
||||
)
|
||||
.order_by("-date", "-error_count")
|
||||
)
|
||||
|
||||
# Convert timedelta to minutes for avg_resolution_minutes
|
||||
results = []
|
||||
for item in summary:
|
||||
if item["avg_resolution_minutes"]:
|
||||
item["avg_resolution_minutes"] = item["avg_resolution_minutes"].total_seconds() / 60
|
||||
results.append(item)
|
||||
|
||||
serializer = ErrorSummarySerializer(results, many=True)
|
||||
return Response(serializer.data)
|
||||
162
backend/apps/core/api/incident_serializers.py
Normal file
162
backend/apps/core/api/incident_serializers.py
Normal file
@@ -0,0 +1,162 @@
|
||||
"""
|
||||
Serializers for Incident management API endpoints.
|
||||
"""
|
||||
|
||||
from rest_framework import serializers
|
||||
|
||||
from apps.core.models import Incident, IncidentAlert
|
||||
|
||||
|
||||
class IncidentAlertSerializer(serializers.ModelSerializer):
|
||||
"""Serializer for linked alerts within an incident."""
|
||||
|
||||
class Meta:
|
||||
model = IncidentAlert
|
||||
fields = [
|
||||
"id",
|
||||
"alert_source",
|
||||
"alert_id",
|
||||
"created_at",
|
||||
]
|
||||
read_only_fields = ["id", "created_at"]
|
||||
|
||||
|
||||
class IncidentSerializer(serializers.ModelSerializer):
|
||||
"""Serializer for Incident model."""
|
||||
|
||||
acknowledged_by_username = serializers.CharField(
|
||||
source="acknowledged_by.username", read_only=True, allow_null=True
|
||||
)
|
||||
resolved_by_username = serializers.CharField(
|
||||
source="resolved_by.username", read_only=True, allow_null=True
|
||||
)
|
||||
status_display = serializers.CharField(source="get_status_display", read_only=True)
|
||||
severity_display = serializers.CharField(source="get_severity_display", read_only=True)
|
||||
linked_alerts = IncidentAlertSerializer(many=True, read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Incident
|
||||
fields = [
|
||||
"id",
|
||||
"incident_number",
|
||||
"title",
|
||||
"description",
|
||||
"severity",
|
||||
"severity_display",
|
||||
"status",
|
||||
"status_display",
|
||||
"detected_at",
|
||||
"acknowledged_at",
|
||||
"acknowledged_by",
|
||||
"acknowledged_by_username",
|
||||
"resolved_at",
|
||||
"resolved_by",
|
||||
"resolved_by_username",
|
||||
"resolution_notes",
|
||||
"alert_count",
|
||||
"linked_alerts",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
]
|
||||
read_only_fields = [
|
||||
"id",
|
||||
"incident_number",
|
||||
"detected_at",
|
||||
"acknowledged_at",
|
||||
"acknowledged_by",
|
||||
"resolved_at",
|
||||
"resolved_by",
|
||||
"alert_count",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
]
|
||||
|
||||
|
||||
class IncidentCreateSerializer(serializers.ModelSerializer):
|
||||
"""Serializer for creating incidents with linked alerts."""
|
||||
|
||||
alert_ids = serializers.ListField(
|
||||
child=serializers.UUIDField(),
|
||||
write_only=True,
|
||||
required=False,
|
||||
help_text="List of alert IDs to link to this incident",
|
||||
)
|
||||
alert_sources = serializers.ListField(
|
||||
child=serializers.ChoiceField(choices=["system", "rate_limit"]),
|
||||
write_only=True,
|
||||
required=False,
|
||||
help_text="Source types for each alert (must match alert_ids length)",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Incident
|
||||
fields = [
|
||||
"title",
|
||||
"description",
|
||||
"severity",
|
||||
"alert_ids",
|
||||
"alert_sources",
|
||||
]
|
||||
|
||||
def validate(self, data):
|
||||
alert_ids = data.get("alert_ids", [])
|
||||
alert_sources = data.get("alert_sources", [])
|
||||
|
||||
if alert_ids and len(alert_ids) != len(alert_sources):
|
||||
raise serializers.ValidationError(
|
||||
{"alert_sources": "Must provide one source per alert_id"}
|
||||
)
|
||||
|
||||
return data
|
||||
|
||||
def create(self, validated_data):
|
||||
alert_ids = validated_data.pop("alert_ids", [])
|
||||
alert_sources = validated_data.pop("alert_sources", [])
|
||||
|
||||
incident = Incident.objects.create(**validated_data)
|
||||
|
||||
# Create linked alerts
|
||||
for alert_id, source in zip(alert_ids, alert_sources):
|
||||
IncidentAlert.objects.create(
|
||||
incident=incident,
|
||||
alert_id=alert_id,
|
||||
alert_source=source,
|
||||
)
|
||||
|
||||
return incident
|
||||
|
||||
|
||||
class IncidentAcknowledgeSerializer(serializers.Serializer):
|
||||
"""Serializer for acknowledging an incident."""
|
||||
|
||||
pass # No additional data needed
|
||||
|
||||
|
||||
class IncidentResolveSerializer(serializers.Serializer):
|
||||
"""Serializer for resolving an incident."""
|
||||
|
||||
resolution_notes = serializers.CharField(required=False, allow_blank=True)
|
||||
resolve_alerts = serializers.BooleanField(
|
||||
default=True,
|
||||
help_text="Whether to also resolve all linked alerts",
|
||||
)
|
||||
|
||||
|
||||
class LinkAlertsSerializer(serializers.Serializer):
|
||||
"""Serializer for linking alerts to an incident."""
|
||||
|
||||
alert_ids = serializers.ListField(
|
||||
child=serializers.UUIDField(),
|
||||
help_text="List of alert IDs to link",
|
||||
)
|
||||
alert_sources = serializers.ListField(
|
||||
child=serializers.ChoiceField(choices=["system", "rate_limit"]),
|
||||
help_text="Source types for each alert",
|
||||
)
|
||||
|
||||
def validate(self, data):
|
||||
if len(data["alert_ids"]) != len(data["alert_sources"]):
|
||||
raise serializers.ValidationError(
|
||||
{"alert_sources": "Must provide one source per alert_id"}
|
||||
)
|
||||
return data
|
||||
201
backend/apps/core/api/incident_views.py
Normal file
201
backend/apps/core/api/incident_views.py
Normal file
@@ -0,0 +1,201 @@
|
||||
"""
|
||||
ViewSets for Incident management API endpoints.
|
||||
"""
|
||||
|
||||
from django.utils import timezone
|
||||
from django_filters.rest_framework import DjangoFilterBackend
|
||||
from drf_spectacular.utils import extend_schema, extend_schema_view
|
||||
from rest_framework import status, viewsets
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.filters import OrderingFilter, SearchFilter
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.response import Response
|
||||
|
||||
from apps.core.models import Incident, IncidentAlert, RateLimitAlert, SystemAlert
|
||||
|
||||
from .incident_serializers import (
|
||||
IncidentAcknowledgeSerializer,
|
||||
IncidentAlertSerializer,
|
||||
IncidentCreateSerializer,
|
||||
IncidentResolveSerializer,
|
||||
IncidentSerializer,
|
||||
LinkAlertsSerializer,
|
||||
)
|
||||
|
||||
|
||||
@extend_schema_view(
|
||||
list=extend_schema(
|
||||
summary="List incidents",
|
||||
description="Get all incidents, optionally filtered by status or severity.",
|
||||
tags=["Admin - Incidents"],
|
||||
),
|
||||
retrieve=extend_schema(
|
||||
summary="Get incident",
|
||||
description="Get details of a specific incident including linked alerts.",
|
||||
tags=["Admin - Incidents"],
|
||||
),
|
||||
create=extend_schema(
|
||||
summary="Create incident",
|
||||
description="Create a new incident and optionally link alerts.",
|
||||
tags=["Admin - Incidents"],
|
||||
),
|
||||
update=extend_schema(
|
||||
summary="Update incident",
|
||||
description="Update an existing incident.",
|
||||
tags=["Admin - Incidents"],
|
||||
),
|
||||
partial_update=extend_schema(
|
||||
summary="Partial update incident",
|
||||
description="Partially update an existing incident.",
|
||||
tags=["Admin - Incidents"],
|
||||
),
|
||||
destroy=extend_schema(
|
||||
summary="Delete incident",
|
||||
description="Delete an incident.",
|
||||
tags=["Admin - Incidents"],
|
||||
),
|
||||
)
|
||||
class IncidentViewSet(viewsets.ModelViewSet):
|
||||
"""
|
||||
ViewSet for managing incidents.
|
||||
|
||||
Provides CRUD operations plus acknowledge, resolve, and alert linking actions.
|
||||
"""
|
||||
|
||||
queryset = Incident.objects.prefetch_related("linked_alerts").all()
|
||||
permission_classes = [IsAdminUser]
|
||||
filter_backends = [DjangoFilterBackend, SearchFilter, OrderingFilter]
|
||||
filterset_fields = ["status", "severity"]
|
||||
search_fields = ["title", "description", "incident_number"]
|
||||
ordering_fields = ["detected_at", "severity", "status", "alert_count"]
|
||||
ordering = ["-detected_at"]
|
||||
|
||||
def get_serializer_class(self):
|
||||
if self.action == "create":
|
||||
return IncidentCreateSerializer
|
||||
if self.action == "acknowledge":
|
||||
return IncidentAcknowledgeSerializer
|
||||
if self.action == "resolve":
|
||||
return IncidentResolveSerializer
|
||||
if self.action == "link_alerts":
|
||||
return LinkAlertsSerializer
|
||||
if self.action == "alerts":
|
||||
return IncidentAlertSerializer
|
||||
return IncidentSerializer
|
||||
|
||||
@extend_schema(
|
||||
summary="Acknowledge incident",
|
||||
description="Mark an incident as being investigated.",
|
||||
request=IncidentAcknowledgeSerializer,
|
||||
responses={200: IncidentSerializer},
|
||||
tags=["Admin - Incidents"],
|
||||
)
|
||||
@action(detail=True, methods=["post"])
|
||||
def acknowledge(self, request, pk=None):
|
||||
"""Mark an incident as being investigated."""
|
||||
incident = self.get_object()
|
||||
|
||||
if incident.status != Incident.Status.OPEN:
|
||||
return Response(
|
||||
{"detail": f"Cannot acknowledge incident in '{incident.status}' status"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
incident.status = Incident.Status.INVESTIGATING
|
||||
incident.acknowledged_at = timezone.now()
|
||||
incident.acknowledged_by = request.user
|
||||
incident.save()
|
||||
|
||||
return Response(IncidentSerializer(incident).data)
|
||||
|
||||
@extend_schema(
|
||||
summary="Resolve incident",
|
||||
description="Mark an incident as resolved, optionally resolving all linked alerts.",
|
||||
request=IncidentResolveSerializer,
|
||||
responses={200: IncidentSerializer},
|
||||
tags=["Admin - Incidents"],
|
||||
)
|
||||
@action(detail=True, methods=["post"])
|
||||
def resolve(self, request, pk=None):
|
||||
"""Mark an incident as resolved."""
|
||||
incident = self.get_object()
|
||||
|
||||
if incident.status in (Incident.Status.RESOLVED, Incident.Status.CLOSED):
|
||||
return Response(
|
||||
{"detail": "Incident is already resolved or closed"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
serializer = IncidentResolveSerializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
incident.status = Incident.Status.RESOLVED
|
||||
incident.resolved_at = timezone.now()
|
||||
incident.resolved_by = request.user
|
||||
incident.resolution_notes = serializer.validated_data.get("resolution_notes", "")
|
||||
incident.save()
|
||||
|
||||
# Optionally resolve all linked alerts
|
||||
if serializer.validated_data.get("resolve_alerts", True):
|
||||
now = timezone.now()
|
||||
for link in incident.linked_alerts.all():
|
||||
if link.alert_source == "system":
|
||||
SystemAlert.objects.filter(
|
||||
id=link.alert_id, resolved_at__isnull=True
|
||||
).update(resolved_at=now, resolved_by=request.user)
|
||||
elif link.alert_source == "rate_limit":
|
||||
RateLimitAlert.objects.filter(
|
||||
id=link.alert_id, resolved_at__isnull=True
|
||||
).update(resolved_at=now, resolved_by=request.user)
|
||||
|
||||
return Response(IncidentSerializer(incident).data)
|
||||
|
||||
@extend_schema(
|
||||
summary="Get linked alerts",
|
||||
description="Get all alerts linked to this incident.",
|
||||
responses={200: IncidentAlertSerializer(many=True)},
|
||||
tags=["Admin - Incidents"],
|
||||
)
|
||||
@action(detail=True, methods=["get"])
|
||||
def alerts(self, request, pk=None):
|
||||
"""Get all alerts linked to this incident."""
|
||||
incident = self.get_object()
|
||||
alerts = incident.linked_alerts.all()
|
||||
serializer = IncidentAlertSerializer(alerts, many=True)
|
||||
return Response(serializer.data)
|
||||
|
||||
@extend_schema(
|
||||
summary="Link alerts to incident",
|
||||
description="Link additional alerts to an existing incident.",
|
||||
request=LinkAlertsSerializer,
|
||||
responses={200: IncidentSerializer},
|
||||
tags=["Admin - Incidents"],
|
||||
)
|
||||
@action(detail=True, methods=["post"], url_path="link-alerts")
|
||||
def link_alerts(self, request, pk=None):
|
||||
"""Link additional alerts to an incident."""
|
||||
incident = self.get_object()
|
||||
|
||||
serializer = LinkAlertsSerializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
alert_ids = serializer.validated_data["alert_ids"]
|
||||
alert_sources = serializer.validated_data["alert_sources"]
|
||||
|
||||
created = 0
|
||||
for alert_id, source in zip(alert_ids, alert_sources):
|
||||
_, was_created = IncidentAlert.objects.get_or_create(
|
||||
incident=incident,
|
||||
alert_id=alert_id,
|
||||
alert_source=source,
|
||||
)
|
||||
if was_created:
|
||||
created += 1
|
||||
|
||||
# Refresh to get updated alert_count
|
||||
incident.refresh_from_db()
|
||||
|
||||
return Response({
|
||||
"detail": f"Linked {created} new alerts to incident",
|
||||
"incident": IncidentSerializer(incident).data,
|
||||
})
|
||||
76
backend/apps/core/migrations/0006_add_alert_models.py
Normal file
76
backend/apps/core/migrations/0006_add_alert_models.py
Normal file
@@ -0,0 +1,76 @@
|
||||
# Generated by Django 5.2.9 on 2026-01-06 17:00
|
||||
|
||||
import django.db.models.deletion
|
||||
import uuid
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0005_add_application_error'),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='RateLimitAlertConfig',
|
||||
fields=[
|
||||
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||
('metric_type', models.CharField(choices=[('block_rate', 'Block Rate'), ('total_requests', 'Total Requests'), ('unique_ips', 'Unique IPs'), ('function_specific', 'Function Specific')], db_index=True, help_text='Type of metric to monitor', max_length=50)),
|
||||
('threshold_value', models.FloatField(help_text='Threshold value that triggers alert')),
|
||||
('time_window_ms', models.IntegerField(help_text='Time window in milliseconds for measurement')),
|
||||
('function_name', models.CharField(blank=True, help_text='Specific function to monitor (for function_specific metric type)', max_length=100, null=True)),
|
||||
('enabled', models.BooleanField(db_index=True, default=True, help_text='Whether this config is active')),
|
||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||
('updated_at', models.DateTimeField(auto_now=True)),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Rate Limit Alert Config',
|
||||
'verbose_name_plural': 'Rate Limit Alert Configs',
|
||||
'ordering': ['metric_type', '-created_at'],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='RateLimitAlert',
|
||||
fields=[
|
||||
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||
('metric_type', models.CharField(help_text='Type of metric', max_length=50)),
|
||||
('metric_value', models.FloatField(help_text='Actual value that triggered the alert')),
|
||||
('threshold_value', models.FloatField(help_text='Threshold that was exceeded')),
|
||||
('time_window_ms', models.IntegerField(help_text='Time window of measurement')),
|
||||
('function_name', models.CharField(blank=True, help_text='Function name if applicable', max_length=100, null=True)),
|
||||
('alert_message', models.TextField(help_text='Descriptive alert message')),
|
||||
('resolved_at', models.DateTimeField(blank=True, db_index=True, help_text='When this alert was resolved', null=True)),
|
||||
('created_at', models.DateTimeField(auto_now_add=True, db_index=True)),
|
||||
('resolved_by', models.ForeignKey(blank=True, help_text='Admin who resolved this alert', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='resolved_rate_limit_alerts', to=settings.AUTH_USER_MODEL)),
|
||||
('config', models.ForeignKey(help_text='Configuration that triggered this alert', on_delete=django.db.models.deletion.CASCADE, related_name='alerts', to='core.ratelimitalertconfig')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Rate Limit Alert',
|
||||
'verbose_name_plural': 'Rate Limit Alerts',
|
||||
'ordering': ['-created_at'],
|
||||
'indexes': [models.Index(fields=['metric_type', 'created_at'], name='core_rateli_metric__6fd63e_idx'), models.Index(fields=['resolved_at', 'created_at'], name='core_rateli_resolve_98c143_idx')],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='SystemAlert',
|
||||
fields=[
|
||||
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||
('alert_type', models.CharField(choices=[('orphaned_images', 'Orphaned Images'), ('stale_submissions', 'Stale Submissions'), ('circular_dependency', 'Circular Dependency'), ('validation_error', 'Validation Error'), ('ban_attempt', 'Ban Attempt'), ('upload_timeout', 'Upload Timeout'), ('high_error_rate', 'High Error Rate'), ('database_connection', 'Database Connection'), ('memory_usage', 'Memory Usage'), ('queue_backup', 'Queue Backup')], db_index=True, help_text='Type of system alert', max_length=50)),
|
||||
('severity', models.CharField(choices=[('low', 'Low'), ('medium', 'Medium'), ('high', 'High'), ('critical', 'Critical')], db_index=True, help_text='Alert severity level', max_length=20)),
|
||||
('message', models.TextField(help_text='Human-readable alert message')),
|
||||
('metadata', models.JSONField(blank=True, help_text='Additional context data for this alert', null=True)),
|
||||
('resolved_at', models.DateTimeField(blank=True, db_index=True, help_text='When this alert was resolved', null=True)),
|
||||
('created_at', models.DateTimeField(auto_now_add=True, db_index=True)),
|
||||
('resolved_by', models.ForeignKey(blank=True, help_text='Admin who resolved this alert', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='resolved_system_alerts', to=settings.AUTH_USER_MODEL)),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'System Alert',
|
||||
'verbose_name_plural': 'System Alerts',
|
||||
'ordering': ['-created_at'],
|
||||
'indexes': [models.Index(fields=['severity', 'created_at'], name='core_system_severit_bd3efd_idx'), models.Index(fields=['alert_type', 'created_at'], name='core_system_alert_t_10942e_idx'), models.Index(fields=['resolved_at', 'created_at'], name='core_system_resolve_9da33f_idx')],
|
||||
},
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,72 @@
|
||||
# Generated by Django 5.2.9 on 2026-01-06 17:43
|
||||
|
||||
import django.db.models.deletion
|
||||
import uuid
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0006_add_alert_models'),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Incident',
|
||||
fields=[
|
||||
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||
('incident_number', models.CharField(db_index=True, help_text='Auto-generated incident number (INC-YYYYMMDD-XXXX)', max_length=20, unique=True)),
|
||||
('title', models.CharField(help_text='Brief description of the incident', max_length=255)),
|
||||
('description', models.TextField(blank=True, help_text='Detailed description', null=True)),
|
||||
('severity', models.CharField(choices=[('low', 'Low'), ('medium', 'Medium'), ('high', 'High'), ('critical', 'Critical')], db_index=True, help_text='Incident severity level', max_length=20)),
|
||||
('status', models.CharField(choices=[('open', 'Open'), ('investigating', 'Investigating'), ('resolved', 'Resolved'), ('closed', 'Closed')], db_index=True, default='open', help_text='Current incident status', max_length=20)),
|
||||
('detected_at', models.DateTimeField(auto_now_add=True, help_text='When the incident was detected')),
|
||||
('acknowledged_at', models.DateTimeField(blank=True, help_text='When someone started investigating', null=True)),
|
||||
('resolved_at', models.DateTimeField(blank=True, help_text='When the incident was resolved', null=True)),
|
||||
('resolution_notes', models.TextField(blank=True, help_text='Notes about the resolution', null=True)),
|
||||
('alert_count', models.PositiveIntegerField(default=0, help_text='Number of linked alerts')),
|
||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||
('updated_at', models.DateTimeField(auto_now=True)),
|
||||
('acknowledged_by', models.ForeignKey(blank=True, help_text='User who acknowledged the incident', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='acknowledged_incidents', to=settings.AUTH_USER_MODEL)),
|
||||
('resolved_by', models.ForeignKey(blank=True, help_text='User who resolved the incident', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='resolved_incidents', to=settings.AUTH_USER_MODEL)),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Incident',
|
||||
'verbose_name_plural': 'Incidents',
|
||||
'ordering': ['-detected_at'],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='IncidentAlert',
|
||||
fields=[
|
||||
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||
('alert_source', models.CharField(choices=[('system', 'System Alert'), ('rate_limit', 'Rate Limit Alert')], help_text='Source type of the alert', max_length=20)),
|
||||
('alert_id', models.UUIDField(help_text='ID of the linked alert')),
|
||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||
('incident', models.ForeignKey(help_text='The incident this alert is linked to', on_delete=django.db.models.deletion.CASCADE, related_name='linked_alerts', to='core.incident')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Incident Alert',
|
||||
'verbose_name_plural': 'Incident Alerts',
|
||||
},
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='incident',
|
||||
index=models.Index(fields=['status', 'detected_at'], name='core_incide_status_c17ea4_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='incident',
|
||||
index=models.Index(fields=['severity', 'detected_at'], name='core_incide_severit_24b148_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='incidentalert',
|
||||
index=models.Index(fields=['alert_source', 'alert_id'], name='core_incide_alert_s_9e655c_idx'),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='incidentalert',
|
||||
unique_together={('incident', 'alert_source', 'alert_id')},
|
||||
),
|
||||
]
|
||||
335
backend/apps/core/migrations/0008_add_analytics_models.py
Normal file
335
backend/apps/core/migrations/0008_add_analytics_models.py
Normal file
@@ -0,0 +1,335 @@
|
||||
# Generated by Django 5.1.6 on 2026-01-06 18:23
|
||||
|
||||
import django.db.models.deletion
|
||||
import uuid
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("core", "0007_add_incident_and_report_models"),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="pageviewevent",
|
||||
name="pgh_obj",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="pageviewevent",
|
||||
name="content_type",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="pageviewevent",
|
||||
name="pgh_context",
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="ApprovalTransactionMetric",
|
||||
fields=[
|
||||
("id", models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||
(
|
||||
"submission_id",
|
||||
models.CharField(db_index=True, help_text="ID of the content submission", max_length=255),
|
||||
),
|
||||
(
|
||||
"moderator_id",
|
||||
models.CharField(
|
||||
db_index=True, help_text="ID of the moderator who processed the submission", max_length=255
|
||||
),
|
||||
),
|
||||
(
|
||||
"submitter_id",
|
||||
models.CharField(
|
||||
db_index=True, help_text="ID of the user who submitted the content", max_length=255
|
||||
),
|
||||
),
|
||||
(
|
||||
"request_id",
|
||||
models.CharField(
|
||||
blank=True, db_index=True, help_text="Correlation request ID", max_length=255, null=True
|
||||
),
|
||||
),
|
||||
("success", models.BooleanField(db_index=True, help_text="Whether the approval was successful")),
|
||||
(
|
||||
"duration_ms",
|
||||
models.PositiveIntegerField(blank=True, help_text="Processing duration in milliseconds", null=True),
|
||||
),
|
||||
("items_count", models.PositiveIntegerField(default=1, help_text="Number of items processed")),
|
||||
(
|
||||
"rollback_triggered",
|
||||
models.BooleanField(default=False, help_text="Whether a rollback was triggered"),
|
||||
),
|
||||
(
|
||||
"error_code",
|
||||
models.CharField(blank=True, help_text="Error code if failed", max_length=50, null=True),
|
||||
),
|
||||
("error_message", models.TextField(blank=True, help_text="Error message if failed", null=True)),
|
||||
("error_details", models.TextField(blank=True, help_text="Detailed error information", null=True)),
|
||||
(
|
||||
"created_at",
|
||||
models.DateTimeField(auto_now_add=True, db_index=True, help_text="When this metric was recorded"),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Approval Transaction Metric",
|
||||
"verbose_name_plural": "Approval Transaction Metrics",
|
||||
"ordering": ["-created_at"],
|
||||
"indexes": [
|
||||
models.Index(fields=["success", "created_at"], name="core_approv_success_9c326b_idx"),
|
||||
models.Index(fields=["moderator_id", "created_at"], name="core_approv_moderat_ec41ba_idx"),
|
||||
],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="RequestMetadata",
|
||||
fields=[
|
||||
("id", models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||
(
|
||||
"request_id",
|
||||
models.CharField(
|
||||
db_index=True,
|
||||
help_text="Unique request identifier for correlation",
|
||||
max_length=255,
|
||||
unique=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"trace_id",
|
||||
models.CharField(
|
||||
blank=True, db_index=True, help_text="Distributed tracing ID", max_length=255, null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"session_id",
|
||||
models.CharField(
|
||||
blank=True, db_index=True, help_text="User session identifier", max_length=255, null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"parent_request_id",
|
||||
models.CharField(
|
||||
blank=True, help_text="Parent request ID for nested requests", max_length=255, null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"action",
|
||||
models.CharField(
|
||||
blank=True, help_text="Action/operation being performed", max_length=255, null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"method",
|
||||
models.CharField(blank=True, help_text="HTTP method (GET, POST, etc.)", max_length=10, null=True),
|
||||
),
|
||||
(
|
||||
"endpoint",
|
||||
models.CharField(
|
||||
blank=True, db_index=True, help_text="API endpoint or URL path", max_length=500, null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"request_method",
|
||||
models.CharField(blank=True, help_text="HTTP request method", max_length=10, null=True),
|
||||
),
|
||||
("request_path", models.CharField(blank=True, help_text="Request URL path", max_length=500, null=True)),
|
||||
(
|
||||
"affected_route",
|
||||
models.CharField(blank=True, help_text="Frontend route affected", max_length=255, null=True),
|
||||
),
|
||||
(
|
||||
"http_status",
|
||||
models.PositiveIntegerField(blank=True, db_index=True, help_text="HTTP status code", null=True),
|
||||
),
|
||||
(
|
||||
"status_code",
|
||||
models.PositiveIntegerField(blank=True, help_text="Status code (alias for http_status)", null=True),
|
||||
),
|
||||
(
|
||||
"response_status",
|
||||
models.PositiveIntegerField(blank=True, help_text="Response status code", null=True),
|
||||
),
|
||||
(
|
||||
"success",
|
||||
models.BooleanField(
|
||||
blank=True, db_index=True, help_text="Whether the request was successful", null=True
|
||||
),
|
||||
),
|
||||
("started_at", models.DateTimeField(auto_now_add=True, help_text="When the request started")),
|
||||
("completed_at", models.DateTimeField(blank=True, help_text="When the request completed", null=True)),
|
||||
(
|
||||
"duration_ms",
|
||||
models.PositiveIntegerField(blank=True, help_text="Request duration in milliseconds", null=True),
|
||||
),
|
||||
(
|
||||
"response_time_ms",
|
||||
models.PositiveIntegerField(blank=True, help_text="Response time in milliseconds", null=True),
|
||||
),
|
||||
(
|
||||
"error_type",
|
||||
models.CharField(
|
||||
blank=True, db_index=True, help_text="Type/class of error", max_length=100, null=True
|
||||
),
|
||||
),
|
||||
("error_message", models.TextField(blank=True, help_text="Error message", null=True)),
|
||||
("error_stack", models.TextField(blank=True, help_text="Error stack trace", null=True)),
|
||||
(
|
||||
"error_code",
|
||||
models.CharField(
|
||||
blank=True, db_index=True, help_text="Application error code", max_length=50, null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"error_origin",
|
||||
models.CharField(blank=True, help_text="Where the error originated", max_length=100, null=True),
|
||||
),
|
||||
("component_stack", models.TextField(blank=True, help_text="React component stack trace", null=True)),
|
||||
(
|
||||
"severity",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("debug", "Debug"),
|
||||
("info", "Info"),
|
||||
("warning", "Warning"),
|
||||
("error", "Error"),
|
||||
("critical", "Critical"),
|
||||
],
|
||||
db_index=True,
|
||||
default="info",
|
||||
help_text="Error severity level",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
(
|
||||
"is_resolved",
|
||||
models.BooleanField(db_index=True, default=False, help_text="Whether this error has been resolved"),
|
||||
),
|
||||
("resolved_at", models.DateTimeField(blank=True, help_text="When the error was resolved", null=True)),
|
||||
("resolution_notes", models.TextField(blank=True, help_text="Notes about resolution", null=True)),
|
||||
("retry_count", models.PositiveIntegerField(default=0, help_text="Number of retry attempts")),
|
||||
(
|
||||
"retry_attempts",
|
||||
models.PositiveIntegerField(blank=True, help_text="Total retry attempts made", null=True),
|
||||
),
|
||||
("user_agent", models.TextField(blank=True, help_text="User agent string", null=True)),
|
||||
(
|
||||
"ip_address_hash",
|
||||
models.CharField(
|
||||
blank=True, db_index=True, help_text="Hashed IP address", max_length=64, null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"client_version",
|
||||
models.CharField(blank=True, help_text="Client application version", max_length=50, null=True),
|
||||
),
|
||||
("timezone", models.CharField(blank=True, help_text="User timezone", max_length=50, null=True)),
|
||||
("referrer", models.TextField(blank=True, help_text="HTTP referrer", null=True)),
|
||||
(
|
||||
"entity_type",
|
||||
models.CharField(
|
||||
blank=True, db_index=True, help_text="Type of entity affected", max_length=50, null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"entity_id",
|
||||
models.CharField(
|
||||
blank=True, db_index=True, help_text="ID of entity affected", max_length=255, null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"created_at",
|
||||
models.DateTimeField(auto_now_add=True, db_index=True, help_text="When this record was created"),
|
||||
),
|
||||
(
|
||||
"resolved_by",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
help_text="User who resolved this error",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="resolved_request_metadata",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
(
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
help_text="User who made the request",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="request_metadata",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Request Metadata",
|
||||
"verbose_name_plural": "Request Metadata",
|
||||
"ordering": ["-created_at"],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="RequestBreadcrumb",
|
||||
fields=[
|
||||
("id", models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||
("timestamp", models.DateTimeField(help_text="When this breadcrumb occurred")),
|
||||
(
|
||||
"category",
|
||||
models.CharField(
|
||||
help_text="Breadcrumb category (e.g., 'http', 'navigation', 'console')", max_length=100
|
||||
),
|
||||
),
|
||||
("message", models.TextField(help_text="Breadcrumb message")),
|
||||
(
|
||||
"level",
|
||||
models.CharField(
|
||||
blank=True, help_text="Log level (debug, info, warning, error)", max_length=20, null=True
|
||||
),
|
||||
),
|
||||
("sequence_order", models.PositiveIntegerField(default=0, help_text="Order within the request")),
|
||||
(
|
||||
"request_metadata",
|
||||
models.ForeignKey(
|
||||
help_text="Parent request",
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="request_breadcrumbs",
|
||||
to="core.requestmetadata",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Request Breadcrumb",
|
||||
"verbose_name_plural": "Request Breadcrumbs",
|
||||
"ordering": ["sequence_order", "timestamp"],
|
||||
},
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name="PageView",
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name="PageViewEvent",
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="requestmetadata",
|
||||
index=models.Index(fields=["error_type", "created_at"], name="core_reques_error_t_d384f1_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="requestmetadata",
|
||||
index=models.Index(fields=["severity", "created_at"], name="core_reques_severit_04b88d_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="requestmetadata",
|
||||
index=models.Index(fields=["is_resolved", "created_at"], name="core_reques_is_reso_614d34_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="requestmetadata",
|
||||
index=models.Index(fields=["user", "created_at"], name="core_reques_user_id_db6ee3_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="requestbreadcrumb",
|
||||
index=models.Index(fields=["request_metadata", "sequence_order"], name="core_reques_request_0e8be4_idx"),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,64 @@
|
||||
# Generated by Django 5.2.9 on 2026-01-07 01:23
|
||||
|
||||
import django.db.models.deletion
|
||||
import pgtrigger.compiler
|
||||
import pgtrigger.migrations
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('contenttypes', '0002_remove_content_type_name'),
|
||||
('core', '0008_add_analytics_models'),
|
||||
('pghistory', '0006_delete_aggregateevent'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='PageView',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('object_id', models.PositiveIntegerField()),
|
||||
('timestamp', models.DateTimeField(auto_now_add=True, db_index=True)),
|
||||
('ip_address', models.GenericIPAddressField()),
|
||||
('user_agent', models.CharField(blank=True, max_length=512)),
|
||||
('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='page_views', to='contenttypes.contenttype')),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='PageViewEvent',
|
||||
fields=[
|
||||
('pgh_id', models.AutoField(primary_key=True, serialize=False)),
|
||||
('pgh_created_at', models.DateTimeField(auto_now_add=True)),
|
||||
('pgh_label', models.TextField(help_text='The event label.')),
|
||||
('id', models.BigIntegerField()),
|
||||
('object_id', models.PositiveIntegerField()),
|
||||
('timestamp', models.DateTimeField(auto_now_add=True)),
|
||||
('ip_address', models.GenericIPAddressField()),
|
||||
('user_agent', models.CharField(blank=True, max_length=512)),
|
||||
('content_type', models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='contenttypes.contenttype')),
|
||||
('pgh_context', models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='pghistory.context')),
|
||||
('pgh_obj', models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='events', to='core.pageview')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='pageview',
|
||||
index=models.Index(fields=['timestamp'], name='core_pagevi_timesta_757ebb_idx'),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='pageview',
|
||||
index=models.Index(fields=['content_type', 'object_id'], name='core_pagevi_content_eda7ad_idx'),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name='pageview',
|
||||
trigger=pgtrigger.compiler.Trigger(name='insert_insert', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "core_pageviewevent" ("content_type_id", "id", "ip_address", "object_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "timestamp", "user_agent") VALUES (NEW."content_type_id", NEW."id", NEW."ip_address", NEW."object_id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."timestamp", NEW."user_agent"); RETURN NULL;', hash='1682d124ea3ba215e630c7cfcde929f7444cf247', operation='INSERT', pgid='pgtrigger_insert_insert_ee1e1', table='core_pageview', when='AFTER')),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name='pageview',
|
||||
trigger=pgtrigger.compiler.Trigger(name='update_update', sql=pgtrigger.compiler.UpsertTriggerSql(condition='WHEN (OLD.* IS DISTINCT FROM NEW.*)', func='INSERT INTO "core_pageviewevent" ("content_type_id", "id", "ip_address", "object_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "timestamp", "user_agent") VALUES (NEW."content_type_id", NEW."id", NEW."ip_address", NEW."object_id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."timestamp", NEW."user_agent"); RETURN NULL;', hash='4221b2dd6636cae454f8d69c0c1841c40c47e6a6', operation='UPDATE', pgid='pgtrigger_update_update_3c505', table='core_pageview', when='AFTER')),
|
||||
),
|
||||
]
|
||||
@@ -298,3 +298,754 @@ class ApplicationError(models.Model):
|
||||
def short_error_id(self) -> str:
|
||||
"""Return first 8 characters of error_id for display."""
|
||||
return str(self.error_id)[:8]
|
||||
|
||||
|
||||
class SystemAlert(models.Model):
|
||||
"""
|
||||
System-level alerts for monitoring application health.
|
||||
|
||||
Alert types include orphaned images, stale submissions, circular dependencies,
|
||||
validation errors, ban attempts, upload timeouts, and high error rates.
|
||||
"""
|
||||
|
||||
class AlertType(models.TextChoices):
|
||||
ORPHANED_IMAGES = "orphaned_images", "Orphaned Images"
|
||||
STALE_SUBMISSIONS = "stale_submissions", "Stale Submissions"
|
||||
CIRCULAR_DEPENDENCY = "circular_dependency", "Circular Dependency"
|
||||
VALIDATION_ERROR = "validation_error", "Validation Error"
|
||||
BAN_ATTEMPT = "ban_attempt", "Ban Attempt"
|
||||
UPLOAD_TIMEOUT = "upload_timeout", "Upload Timeout"
|
||||
HIGH_ERROR_RATE = "high_error_rate", "High Error Rate"
|
||||
DATABASE_CONNECTION = "database_connection", "Database Connection"
|
||||
MEMORY_USAGE = "memory_usage", "Memory Usage"
|
||||
QUEUE_BACKUP = "queue_backup", "Queue Backup"
|
||||
|
||||
class Severity(models.TextChoices):
|
||||
LOW = "low", "Low"
|
||||
MEDIUM = "medium", "Medium"
|
||||
HIGH = "high", "High"
|
||||
CRITICAL = "critical", "Critical"
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||
alert_type = models.CharField(
|
||||
max_length=50,
|
||||
choices=AlertType.choices,
|
||||
db_index=True,
|
||||
help_text="Type of system alert",
|
||||
)
|
||||
severity = models.CharField(
|
||||
max_length=20,
|
||||
choices=Severity.choices,
|
||||
db_index=True,
|
||||
help_text="Alert severity level",
|
||||
)
|
||||
message = models.TextField(help_text="Human-readable alert message")
|
||||
metadata = models.JSONField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="Additional context data for this alert",
|
||||
)
|
||||
resolved_at = models.DateTimeField(
|
||||
null=True,
|
||||
blank=True,
|
||||
db_index=True,
|
||||
help_text="When this alert was resolved",
|
||||
)
|
||||
resolved_by = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
null=True,
|
||||
blank=True,
|
||||
on_delete=models.SET_NULL,
|
||||
related_name="resolved_system_alerts",
|
||||
help_text="Admin who resolved this alert",
|
||||
)
|
||||
created_at = models.DateTimeField(auto_now_add=True, db_index=True)
|
||||
|
||||
class Meta:
|
||||
ordering = ["-created_at"]
|
||||
verbose_name = "System Alert"
|
||||
verbose_name_plural = "System Alerts"
|
||||
indexes = [
|
||||
models.Index(fields=["severity", "created_at"]),
|
||||
models.Index(fields=["alert_type", "created_at"]),
|
||||
models.Index(fields=["resolved_at", "created_at"]),
|
||||
]
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"[{self.get_severity_display()}] {self.get_alert_type_display()}: {self.message[:50]}"
|
||||
|
||||
@property
|
||||
def is_resolved(self) -> bool:
|
||||
return self.resolved_at is not None
|
||||
|
||||
|
||||
class RateLimitAlertConfig(models.Model):
|
||||
"""
|
||||
Configuration for rate limit alert thresholds.
|
||||
|
||||
Defines thresholds that trigger alerts when exceeded.
|
||||
"""
|
||||
|
||||
class MetricType(models.TextChoices):
|
||||
BLOCK_RATE = "block_rate", "Block Rate"
|
||||
TOTAL_REQUESTS = "total_requests", "Total Requests"
|
||||
UNIQUE_IPS = "unique_ips", "Unique IPs"
|
||||
FUNCTION_SPECIFIC = "function_specific", "Function Specific"
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||
metric_type = models.CharField(
|
||||
max_length=50,
|
||||
choices=MetricType.choices,
|
||||
db_index=True,
|
||||
help_text="Type of metric to monitor",
|
||||
)
|
||||
threshold_value = models.FloatField(help_text="Threshold value that triggers alert")
|
||||
time_window_ms = models.IntegerField(help_text="Time window in milliseconds for measurement")
|
||||
function_name = models.CharField(
|
||||
max_length=100,
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="Specific function to monitor (for function_specific metric type)",
|
||||
)
|
||||
enabled = models.BooleanField(default=True, db_index=True, help_text="Whether this config is active")
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
class Meta:
|
||||
ordering = ["metric_type", "-created_at"]
|
||||
verbose_name = "Rate Limit Alert Config"
|
||||
verbose_name_plural = "Rate Limit Alert Configs"
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.get_metric_type_display()}: threshold={self.threshold_value}"
|
||||
|
||||
|
||||
class RateLimitAlert(models.Model):
|
||||
"""
|
||||
Alerts triggered when rate limit thresholds are exceeded.
|
||||
"""
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||
config = models.ForeignKey(
|
||||
RateLimitAlertConfig,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="alerts",
|
||||
help_text="Configuration that triggered this alert",
|
||||
)
|
||||
metric_type = models.CharField(max_length=50, help_text="Type of metric")
|
||||
metric_value = models.FloatField(help_text="Actual value that triggered the alert")
|
||||
threshold_value = models.FloatField(help_text="Threshold that was exceeded")
|
||||
time_window_ms = models.IntegerField(help_text="Time window of measurement")
|
||||
function_name = models.CharField(
|
||||
max_length=100,
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="Function name if applicable",
|
||||
)
|
||||
alert_message = models.TextField(help_text="Descriptive alert message")
|
||||
resolved_at = models.DateTimeField(
|
||||
null=True,
|
||||
blank=True,
|
||||
db_index=True,
|
||||
help_text="When this alert was resolved",
|
||||
)
|
||||
resolved_by = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
null=True,
|
||||
blank=True,
|
||||
on_delete=models.SET_NULL,
|
||||
related_name="resolved_rate_limit_alerts",
|
||||
help_text="Admin who resolved this alert",
|
||||
)
|
||||
created_at = models.DateTimeField(auto_now_add=True, db_index=True)
|
||||
|
||||
class Meta:
|
||||
ordering = ["-created_at"]
|
||||
verbose_name = "Rate Limit Alert"
|
||||
verbose_name_plural = "Rate Limit Alerts"
|
||||
indexes = [
|
||||
models.Index(fields=["metric_type", "created_at"]),
|
||||
models.Index(fields=["resolved_at", "created_at"]),
|
||||
]
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.metric_type}: {self.metric_value} > {self.threshold_value}"
|
||||
|
||||
@property
|
||||
def is_resolved(self) -> bool:
|
||||
return self.resolved_at is not None
|
||||
|
||||
|
||||
class Incident(models.Model):
|
||||
"""
|
||||
Groups related alerts for coordinated investigation.
|
||||
|
||||
Incidents provide a higher-level view of system issues,
|
||||
allowing teams to track and resolve related alerts together.
|
||||
"""
|
||||
|
||||
class Status(models.TextChoices):
|
||||
OPEN = "open", "Open"
|
||||
INVESTIGATING = "investigating", "Investigating"
|
||||
RESOLVED = "resolved", "Resolved"
|
||||
CLOSED = "closed", "Closed"
|
||||
|
||||
class Severity(models.TextChoices):
|
||||
LOW = "low", "Low"
|
||||
MEDIUM = "medium", "Medium"
|
||||
HIGH = "high", "High"
|
||||
CRITICAL = "critical", "Critical"
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||
incident_number = models.CharField(
|
||||
max_length=20,
|
||||
unique=True,
|
||||
db_index=True,
|
||||
help_text="Auto-generated incident number (INC-YYYYMMDD-XXXX)",
|
||||
)
|
||||
title = models.CharField(max_length=255, help_text="Brief description of the incident")
|
||||
description = models.TextField(null=True, blank=True, help_text="Detailed description")
|
||||
severity = models.CharField(
|
||||
max_length=20,
|
||||
choices=Severity.choices,
|
||||
db_index=True,
|
||||
help_text="Incident severity level",
|
||||
)
|
||||
status = models.CharField(
|
||||
max_length=20,
|
||||
choices=Status.choices,
|
||||
default=Status.OPEN,
|
||||
db_index=True,
|
||||
help_text="Current incident status",
|
||||
)
|
||||
|
||||
# Timestamps
|
||||
detected_at = models.DateTimeField(auto_now_add=True, help_text="When the incident was detected")
|
||||
acknowledged_at = models.DateTimeField(null=True, blank=True, help_text="When someone started investigating")
|
||||
acknowledged_by = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name="acknowledged_incidents",
|
||||
help_text="User who acknowledged the incident",
|
||||
)
|
||||
resolved_at = models.DateTimeField(null=True, blank=True, help_text="When the incident was resolved")
|
||||
resolved_by = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name="resolved_incidents",
|
||||
help_text="User who resolved the incident",
|
||||
)
|
||||
resolution_notes = models.TextField(null=True, blank=True, help_text="Notes about the resolution")
|
||||
|
||||
# Computed field (denormalized for performance)
|
||||
alert_count = models.PositiveIntegerField(default=0, help_text="Number of linked alerts")
|
||||
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
class Meta:
|
||||
ordering = ["-detected_at"]
|
||||
verbose_name = "Incident"
|
||||
verbose_name_plural = "Incidents"
|
||||
indexes = [
|
||||
models.Index(fields=["status", "detected_at"]),
|
||||
models.Index(fields=["severity", "detected_at"]),
|
||||
]
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.incident_number}: {self.title}"
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if not self.incident_number:
|
||||
# Auto-generate incident number: INC-YYYYMMDD-XXXX
|
||||
from django.utils import timezone
|
||||
|
||||
today = timezone.now().strftime("%Y%m%d")
|
||||
count = Incident.objects.filter(incident_number__startswith=f"INC-{today}").count() + 1
|
||||
self.incident_number = f"INC-{today}-{count:04d}"
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
def update_alert_count(self):
|
||||
"""Update the denormalized alert_count field."""
|
||||
self.alert_count = self.linked_alerts.count()
|
||||
self.save(update_fields=["alert_count"])
|
||||
|
||||
|
||||
class IncidentAlert(models.Model):
|
||||
"""
|
||||
Links alerts to incidents (many-to-many through table).
|
||||
|
||||
Supports linking both system alerts and rate limit alerts.
|
||||
"""
|
||||
|
||||
class AlertSource(models.TextChoices):
|
||||
SYSTEM = "system", "System Alert"
|
||||
RATE_LIMIT = "rate_limit", "Rate Limit Alert"
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||
incident = models.ForeignKey(
|
||||
Incident,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="linked_alerts",
|
||||
help_text="The incident this alert is linked to",
|
||||
)
|
||||
alert_source = models.CharField(
|
||||
max_length=20,
|
||||
choices=AlertSource.choices,
|
||||
help_text="Source type of the alert",
|
||||
)
|
||||
alert_id = models.UUIDField(help_text="ID of the linked alert")
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
class Meta:
|
||||
verbose_name = "Incident Alert"
|
||||
verbose_name_plural = "Incident Alerts"
|
||||
unique_together = ["incident", "alert_source", "alert_id"]
|
||||
indexes = [
|
||||
models.Index(fields=["alert_source", "alert_id"]),
|
||||
]
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.incident.incident_number} <- {self.alert_source}:{self.alert_id}"
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
super().save(*args, **kwargs)
|
||||
# Update the incident's alert count
|
||||
self.incident.update_alert_count()
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
incident = self.incident
|
||||
super().delete(*args, **kwargs)
|
||||
# Update the incident's alert count
|
||||
incident.update_alert_count()
|
||||
|
||||
|
||||
class RequestMetadata(models.Model):
|
||||
"""
|
||||
Comprehensive request tracking for monitoring and debugging.
|
||||
|
||||
Stores detailed information about API requests, including timing,
|
||||
errors, user context, and resolution status. Used by the admin
|
||||
dashboard for error monitoring and analytics.
|
||||
"""
|
||||
|
||||
class Severity(models.TextChoices):
|
||||
DEBUG = "debug", "Debug"
|
||||
INFO = "info", "Info"
|
||||
WARNING = "warning", "Warning"
|
||||
ERROR = "error", "Error"
|
||||
CRITICAL = "critical", "Critical"
|
||||
|
||||
# Identity & Correlation
|
||||
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||
request_id = models.CharField(
|
||||
max_length=255,
|
||||
unique=True,
|
||||
db_index=True,
|
||||
help_text="Unique request identifier for correlation",
|
||||
)
|
||||
trace_id = models.CharField(
|
||||
max_length=255,
|
||||
blank=True,
|
||||
null=True,
|
||||
db_index=True,
|
||||
help_text="Distributed tracing ID",
|
||||
)
|
||||
session_id = models.CharField(
|
||||
max_length=255,
|
||||
blank=True,
|
||||
null=True,
|
||||
db_index=True,
|
||||
help_text="User session identifier",
|
||||
)
|
||||
parent_request_id = models.CharField(
|
||||
max_length=255,
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="Parent request ID for nested requests",
|
||||
)
|
||||
|
||||
# Request Information
|
||||
action = models.CharField(
|
||||
max_length=255,
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="Action/operation being performed",
|
||||
)
|
||||
method = models.CharField(
|
||||
max_length=10,
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="HTTP method (GET, POST, etc.)",
|
||||
)
|
||||
endpoint = models.CharField(
|
||||
max_length=500,
|
||||
blank=True,
|
||||
null=True,
|
||||
db_index=True,
|
||||
help_text="API endpoint or URL path",
|
||||
)
|
||||
request_method = models.CharField(
|
||||
max_length=10,
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="HTTP request method",
|
||||
)
|
||||
request_path = models.CharField(
|
||||
max_length=500,
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="Request URL path",
|
||||
)
|
||||
affected_route = models.CharField(
|
||||
max_length=255,
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="Frontend route affected",
|
||||
)
|
||||
|
||||
# Response Information
|
||||
http_status = models.PositiveIntegerField(
|
||||
blank=True,
|
||||
null=True,
|
||||
db_index=True,
|
||||
help_text="HTTP status code",
|
||||
)
|
||||
status_code = models.PositiveIntegerField(
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="Status code (alias for http_status)",
|
||||
)
|
||||
response_status = models.PositiveIntegerField(
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="Response status code",
|
||||
)
|
||||
success = models.BooleanField(
|
||||
blank=True,
|
||||
null=True,
|
||||
db_index=True,
|
||||
help_text="Whether the request was successful",
|
||||
)
|
||||
|
||||
# Timing
|
||||
started_at = models.DateTimeField(
|
||||
auto_now_add=True,
|
||||
help_text="When the request started",
|
||||
)
|
||||
completed_at = models.DateTimeField(
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="When the request completed",
|
||||
)
|
||||
duration_ms = models.PositiveIntegerField(
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="Request duration in milliseconds",
|
||||
)
|
||||
response_time_ms = models.PositiveIntegerField(
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="Response time in milliseconds",
|
||||
)
|
||||
|
||||
# Error Information
|
||||
error_type = models.CharField(
|
||||
max_length=100,
|
||||
blank=True,
|
||||
null=True,
|
||||
db_index=True,
|
||||
help_text="Type/class of error",
|
||||
)
|
||||
error_message = models.TextField(
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="Error message",
|
||||
)
|
||||
error_stack = models.TextField(
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="Error stack trace",
|
||||
)
|
||||
error_code = models.CharField(
|
||||
max_length=50,
|
||||
blank=True,
|
||||
null=True,
|
||||
db_index=True,
|
||||
help_text="Application error code",
|
||||
)
|
||||
error_origin = models.CharField(
|
||||
max_length=100,
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="Where the error originated",
|
||||
)
|
||||
component_stack = models.TextField(
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="React component stack trace",
|
||||
)
|
||||
severity = models.CharField(
|
||||
max_length=20,
|
||||
choices=Severity.choices,
|
||||
default=Severity.INFO,
|
||||
db_index=True,
|
||||
help_text="Error severity level",
|
||||
)
|
||||
|
||||
# Resolution
|
||||
is_resolved = models.BooleanField(
|
||||
default=False,
|
||||
db_index=True,
|
||||
help_text="Whether this error has been resolved",
|
||||
)
|
||||
resolved_at = models.DateTimeField(
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="When the error was resolved",
|
||||
)
|
||||
resolved_by = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=models.SET_NULL,
|
||||
related_name="resolved_request_metadata",
|
||||
help_text="User who resolved this error",
|
||||
)
|
||||
resolution_notes = models.TextField(
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="Notes about resolution",
|
||||
)
|
||||
|
||||
# Retry Information
|
||||
retry_count = models.PositiveIntegerField(
|
||||
default=0,
|
||||
help_text="Number of retry attempts",
|
||||
)
|
||||
retry_attempts = models.PositiveIntegerField(
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="Total retry attempts made",
|
||||
)
|
||||
|
||||
# User Context
|
||||
user = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=models.SET_NULL,
|
||||
related_name="request_metadata",
|
||||
help_text="User who made the request",
|
||||
)
|
||||
user_agent = models.TextField(
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="User agent string",
|
||||
)
|
||||
ip_address_hash = models.CharField(
|
||||
max_length=64,
|
||||
blank=True,
|
||||
null=True,
|
||||
db_index=True,
|
||||
help_text="Hashed IP address",
|
||||
)
|
||||
client_version = models.CharField(
|
||||
max_length=50,
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="Client application version",
|
||||
)
|
||||
timezone = models.CharField(
|
||||
max_length=50,
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="User timezone",
|
||||
)
|
||||
referrer = models.TextField(
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="HTTP referrer",
|
||||
)
|
||||
|
||||
# Entity Context
|
||||
entity_type = models.CharField(
|
||||
max_length=50,
|
||||
blank=True,
|
||||
null=True,
|
||||
db_index=True,
|
||||
help_text="Type of entity affected",
|
||||
)
|
||||
entity_id = models.CharField(
|
||||
max_length=255,
|
||||
blank=True,
|
||||
null=True,
|
||||
db_index=True,
|
||||
help_text="ID of entity affected",
|
||||
)
|
||||
|
||||
# Timestamps
|
||||
created_at = models.DateTimeField(
|
||||
auto_now_add=True,
|
||||
db_index=True,
|
||||
help_text="When this record was created",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
ordering = ["-created_at"]
|
||||
verbose_name = "Request Metadata"
|
||||
verbose_name_plural = "Request Metadata"
|
||||
indexes = [
|
||||
models.Index(fields=["error_type", "created_at"]),
|
||||
models.Index(fields=["severity", "created_at"]),
|
||||
models.Index(fields=["is_resolved", "created_at"]),
|
||||
models.Index(fields=["user", "created_at"]),
|
||||
]
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.request_id} - {self.endpoint or 'unknown'}"
|
||||
|
||||
|
||||
class RequestBreadcrumb(models.Model):
|
||||
"""
|
||||
Breadcrumb trail for request tracing.
|
||||
|
||||
Stores individual breadcrumb events that occurred during a request,
|
||||
useful for debugging and understanding request flow.
|
||||
"""
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||
request_metadata = models.ForeignKey(
|
||||
RequestMetadata,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="request_breadcrumbs",
|
||||
help_text="Parent request",
|
||||
)
|
||||
timestamp = models.DateTimeField(
|
||||
help_text="When this breadcrumb occurred",
|
||||
)
|
||||
category = models.CharField(
|
||||
max_length=100,
|
||||
help_text="Breadcrumb category (e.g., 'http', 'navigation', 'console')",
|
||||
)
|
||||
message = models.TextField(
|
||||
help_text="Breadcrumb message",
|
||||
)
|
||||
level = models.CharField(
|
||||
max_length=20,
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="Log level (debug, info, warning, error)",
|
||||
)
|
||||
sequence_order = models.PositiveIntegerField(
|
||||
default=0,
|
||||
help_text="Order within the request",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
ordering = ["sequence_order", "timestamp"]
|
||||
verbose_name = "Request Breadcrumb"
|
||||
verbose_name_plural = "Request Breadcrumbs"
|
||||
indexes = [
|
||||
models.Index(fields=["request_metadata", "sequence_order"]),
|
||||
]
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"[{self.category}] {self.message[:50]}"
|
||||
|
||||
|
||||
class ApprovalTransactionMetric(models.Model):
|
||||
"""
|
||||
Metrics for content approval transactions.
|
||||
|
||||
Tracks performance and success/failure of moderation approval
|
||||
operations for analytics and debugging.
|
||||
"""
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||
|
||||
# References
|
||||
submission_id = models.CharField(
|
||||
max_length=255,
|
||||
db_index=True,
|
||||
help_text="ID of the content submission",
|
||||
)
|
||||
moderator_id = models.CharField(
|
||||
max_length=255,
|
||||
db_index=True,
|
||||
help_text="ID of the moderator who processed the submission",
|
||||
)
|
||||
submitter_id = models.CharField(
|
||||
max_length=255,
|
||||
db_index=True,
|
||||
help_text="ID of the user who submitted the content",
|
||||
)
|
||||
request_id = models.CharField(
|
||||
max_length=255,
|
||||
blank=True,
|
||||
null=True,
|
||||
db_index=True,
|
||||
help_text="Correlation request ID",
|
||||
)
|
||||
|
||||
# Metrics
|
||||
success = models.BooleanField(
|
||||
db_index=True,
|
||||
help_text="Whether the approval was successful",
|
||||
)
|
||||
duration_ms = models.PositiveIntegerField(
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="Processing duration in milliseconds",
|
||||
)
|
||||
items_count = models.PositiveIntegerField(
|
||||
default=1,
|
||||
help_text="Number of items processed",
|
||||
)
|
||||
rollback_triggered = models.BooleanField(
|
||||
default=False,
|
||||
help_text="Whether a rollback was triggered",
|
||||
)
|
||||
|
||||
# Error Information
|
||||
error_code = models.CharField(
|
||||
max_length=50,
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="Error code if failed",
|
||||
)
|
||||
error_message = models.TextField(
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="Error message if failed",
|
||||
)
|
||||
error_details = models.TextField(
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="Detailed error information",
|
||||
)
|
||||
|
||||
# Timestamps
|
||||
created_at = models.DateTimeField(
|
||||
auto_now_add=True,
|
||||
db_index=True,
|
||||
help_text="When this metric was recorded",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
ordering = ["-created_at"]
|
||||
verbose_name = "Approval Transaction Metric"
|
||||
verbose_name_plural = "Approval Transaction Metrics"
|
||||
indexes = [
|
||||
models.Index(fields=["success", "created_at"]),
|
||||
models.Index(fields=["moderator_id", "created_at"]),
|
||||
]
|
||||
|
||||
def __str__(self) -> str:
|
||||
status = "✓" if self.success else "✗"
|
||||
return f"{status} Submission {self.submission_id[:8]} by {self.moderator_id[:8]}"
|
||||
|
||||
|
||||
@@ -58,8 +58,22 @@ def with_callbacks(
|
||||
source_state = getattr(instance, field_name, None)
|
||||
|
||||
# Get target state from the transition decorator
|
||||
# The @transition decorator sets _django_fsm_target
|
||||
target_state = getattr(func, "_django_fsm", {}).get("target", None)
|
||||
# The @transition decorator sets _django_fsm attribute (may be dict or FSMMeta object)
|
||||
fsm_meta = getattr(func, "_django_fsm", None)
|
||||
target_state = None
|
||||
if fsm_meta is not None:
|
||||
if isinstance(fsm_meta, dict):
|
||||
target_state = fsm_meta.get("target", None)
|
||||
elif hasattr(fsm_meta, "target"):
|
||||
target_state = fsm_meta.target
|
||||
elif hasattr(fsm_meta, "transitions"):
|
||||
# FSMMeta object - try to get target from first transition
|
||||
try:
|
||||
transitions = list(fsm_meta.transitions.values())
|
||||
if transitions:
|
||||
target_state = transitions[0].target if hasattr(transitions[0], 'target') else None
|
||||
except (AttributeError, TypeError, StopIteration):
|
||||
pass
|
||||
|
||||
# If we can't determine the target from decorator metadata,
|
||||
# we'll capture it after the transition
|
||||
@@ -284,7 +298,7 @@ class TransitionMethodFactory:
|
||||
def create_approve_method(
|
||||
source: str,
|
||||
target: str,
|
||||
field_name: str = "status",
|
||||
field=None,
|
||||
permission_guard: Callable | None = None,
|
||||
enable_callbacks: bool = True,
|
||||
emit_signals: bool = True,
|
||||
@@ -295,7 +309,7 @@ class TransitionMethodFactory:
|
||||
Args:
|
||||
source: Source state value(s)
|
||||
target: Target state value
|
||||
field_name: Name of the FSM field
|
||||
field: FSM field object (required for django-fsm 3.x)
|
||||
permission_guard: Optional permission guard
|
||||
enable_callbacks: Whether to wrap with callback execution
|
||||
emit_signals: Whether to emit Django signals
|
||||
@@ -303,13 +317,15 @@ class TransitionMethodFactory:
|
||||
Returns:
|
||||
Approval transition method
|
||||
"""
|
||||
# Get field name for callback wrapper
|
||||
field_name = field.name if hasattr(field, 'name') else 'status'
|
||||
|
||||
@fsm_log_by
|
||||
@transition(
|
||||
field=field_name,
|
||||
field=field,
|
||||
source=source,
|
||||
target=target,
|
||||
conditions=[permission_guard] if permission_guard else [],
|
||||
permission=permission_guard,
|
||||
)
|
||||
def approve(instance, user=None, comment: str = "", **kwargs):
|
||||
"""Approve and transition to approved state."""
|
||||
@@ -335,7 +351,7 @@ class TransitionMethodFactory:
|
||||
def create_reject_method(
|
||||
source: str,
|
||||
target: str,
|
||||
field_name: str = "status",
|
||||
field=None,
|
||||
permission_guard: Callable | None = None,
|
||||
enable_callbacks: bool = True,
|
||||
emit_signals: bool = True,
|
||||
@@ -346,7 +362,7 @@ class TransitionMethodFactory:
|
||||
Args:
|
||||
source: Source state value(s)
|
||||
target: Target state value
|
||||
field_name: Name of the FSM field
|
||||
field: FSM field object (required for django-fsm 3.x)
|
||||
permission_guard: Optional permission guard
|
||||
enable_callbacks: Whether to wrap with callback execution
|
||||
emit_signals: Whether to emit Django signals
|
||||
@@ -354,13 +370,15 @@ class TransitionMethodFactory:
|
||||
Returns:
|
||||
Rejection transition method
|
||||
"""
|
||||
# Get field name for callback wrapper
|
||||
field_name = field.name if hasattr(field, 'name') else 'status'
|
||||
|
||||
@fsm_log_by
|
||||
@transition(
|
||||
field=field_name,
|
||||
field=field,
|
||||
source=source,
|
||||
target=target,
|
||||
conditions=[permission_guard] if permission_guard else [],
|
||||
permission=permission_guard,
|
||||
)
|
||||
def reject(instance, user=None, reason: str = "", **kwargs):
|
||||
"""Reject and transition to rejected state."""
|
||||
@@ -386,7 +404,7 @@ class TransitionMethodFactory:
|
||||
def create_escalate_method(
|
||||
source: str,
|
||||
target: str,
|
||||
field_name: str = "status",
|
||||
field=None,
|
||||
permission_guard: Callable | None = None,
|
||||
enable_callbacks: bool = True,
|
||||
emit_signals: bool = True,
|
||||
@@ -397,7 +415,7 @@ class TransitionMethodFactory:
|
||||
Args:
|
||||
source: Source state value(s)
|
||||
target: Target state value
|
||||
field_name: Name of the FSM field
|
||||
field: FSM field object (required for django-fsm 3.x)
|
||||
permission_guard: Optional permission guard
|
||||
enable_callbacks: Whether to wrap with callback execution
|
||||
emit_signals: Whether to emit Django signals
|
||||
@@ -405,13 +423,15 @@ class TransitionMethodFactory:
|
||||
Returns:
|
||||
Escalation transition method
|
||||
"""
|
||||
# Get field name for callback wrapper
|
||||
field_name = field.name if hasattr(field, 'name') else 'status'
|
||||
|
||||
@fsm_log_by
|
||||
@transition(
|
||||
field=field_name,
|
||||
field=field,
|
||||
source=source,
|
||||
target=target,
|
||||
conditions=[permission_guard] if permission_guard else [],
|
||||
permission=permission_guard,
|
||||
)
|
||||
def escalate(instance, user=None, reason: str = "", **kwargs):
|
||||
"""Escalate to higher authority."""
|
||||
@@ -438,7 +458,7 @@ class TransitionMethodFactory:
|
||||
method_name: str,
|
||||
source: str,
|
||||
target: str,
|
||||
field_name: str = "status",
|
||||
field=None,
|
||||
permission_guard: Callable | None = None,
|
||||
docstring: str | None = None,
|
||||
enable_callbacks: bool = True,
|
||||
@@ -451,7 +471,7 @@ class TransitionMethodFactory:
|
||||
method_name: Name for the method
|
||||
source: Source state value(s)
|
||||
target: Target state value
|
||||
field_name: Name of the FSM field
|
||||
field: FSM field object (required for django-fsm 3.x)
|
||||
permission_guard: Optional permission guard
|
||||
docstring: Optional docstring for the method
|
||||
enable_callbacks: Whether to wrap with callback execution
|
||||
@@ -460,13 +480,15 @@ class TransitionMethodFactory:
|
||||
Returns:
|
||||
Generic transition method
|
||||
"""
|
||||
# Get field name for callback wrapper
|
||||
field_name = field.name if hasattr(field, 'name') else 'status'
|
||||
|
||||
@fsm_log_by
|
||||
@transition(
|
||||
field=field_name,
|
||||
field=field,
|
||||
source=source,
|
||||
target=target,
|
||||
conditions=[permission_guard] if permission_guard else [],
|
||||
permission=permission_guard,
|
||||
)
|
||||
def generic_transition(instance, user=None, **kwargs):
|
||||
"""Execute state transition."""
|
||||
|
||||
@@ -71,64 +71,74 @@ def generate_transition_methods_for_model(
|
||||
choice_group: Choice group name
|
||||
domain: Domain namespace
|
||||
"""
|
||||
# Get the actual field from the model class - django-fsm 3.x requires
|
||||
# the field object, not just the string name, when creating methods dynamically
|
||||
field = model_class._meta.get_field(field_name)
|
||||
|
||||
builder = StateTransitionBuilder(choice_group, domain)
|
||||
transition_graph = builder.build_transition_graph()
|
||||
factory = TransitionMethodFactory()
|
||||
|
||||
# Group transitions by target to avoid overwriting methods
|
||||
# {target: [source1, source2, ...]}
|
||||
target_to_sources: dict[str, list[str]] = {}
|
||||
for source, targets in transition_graph.items():
|
||||
source_metadata = builder.get_choice_metadata(source)
|
||||
|
||||
for target in targets:
|
||||
# Use shared method name determination
|
||||
method_name = determine_method_name_for_transition(source, target)
|
||||
if target not in target_to_sources:
|
||||
target_to_sources[target] = []
|
||||
target_to_sources[target].append(source)
|
||||
|
||||
# Get target metadata for combined guards
|
||||
# Create one transition method per target, handling all valid sources
|
||||
for target, sources in target_to_sources.items():
|
||||
# Use shared method name determination (all sources go to same target = same method)
|
||||
method_name = determine_method_name_for_transition(sources[0], target)
|
||||
|
||||
# Get target metadata for guards
|
||||
target_metadata = builder.get_choice_metadata(target)
|
||||
|
||||
# Extract guards from both source and target metadata
|
||||
# This ensures metadata flags like requires_assignment, zero_tolerance,
|
||||
# required_permissions, and escalation_level are enforced
|
||||
guards = extract_guards_from_metadata(source_metadata)
|
||||
# For permission guard, use target metadata only (all sources share the same permission)
|
||||
# Source-specific guards would need to be checked via conditions, but for FSM 3.x
|
||||
# we use permission which gets called with (instance, user)
|
||||
target_guards = extract_guards_from_metadata(target_metadata)
|
||||
|
||||
# Combine all guards
|
||||
all_guards = guards + target_guards
|
||||
|
||||
# Create combined guard if we have multiple guards
|
||||
combined_guard: Callable | None = None
|
||||
if len(all_guards) == 1:
|
||||
combined_guard = all_guards[0]
|
||||
elif len(all_guards) > 1:
|
||||
combined_guard = CompositeGuard(guards=all_guards, operator="AND")
|
||||
if len(target_guards) == 1:
|
||||
combined_guard = target_guards[0]
|
||||
elif len(target_guards) > 1:
|
||||
combined_guard = CompositeGuard(guards=target_guards, operator="AND")
|
||||
|
||||
# Create appropriate transition method
|
||||
# Use list of sources for transitions with multiple valid source states
|
||||
source_value = sources if len(sources) > 1 else sources[0]
|
||||
|
||||
# Create appropriate transition method - pass actual field object
|
||||
if "approve" in method_name or "accept" in method_name:
|
||||
method = factory.create_approve_method(
|
||||
source=source,
|
||||
source=source_value,
|
||||
target=target,
|
||||
field_name=field_name,
|
||||
field=field,
|
||||
permission_guard=combined_guard,
|
||||
)
|
||||
elif "reject" in method_name or "deny" in method_name:
|
||||
method = factory.create_reject_method(
|
||||
source=source,
|
||||
source=source_value,
|
||||
target=target,
|
||||
field_name=field_name,
|
||||
field=field,
|
||||
permission_guard=combined_guard,
|
||||
)
|
||||
elif "escalate" in method_name:
|
||||
method = factory.create_escalate_method(
|
||||
source=source,
|
||||
source=source_value,
|
||||
target=target,
|
||||
field_name=field_name,
|
||||
field=field,
|
||||
permission_guard=combined_guard,
|
||||
)
|
||||
else:
|
||||
method = factory.create_generic_transition_method(
|
||||
method_name=method_name,
|
||||
source=source,
|
||||
source=source_value,
|
||||
target=target,
|
||||
field_name=field_name,
|
||||
field=field,
|
||||
permission_guard=combined_guard,
|
||||
)
|
||||
|
||||
|
||||
@@ -1,50 +1,4 @@
|
||||
from django.apps import AppConfig
|
||||
from django.db.models.signals import post_migrate
|
||||
|
||||
|
||||
def create_photo_permissions(sender, **kwargs):
|
||||
"""Create custom permissions for domain-specific photo models"""
|
||||
from django.contrib.auth.models import Permission
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
|
||||
from apps.parks.models import ParkPhoto
|
||||
from apps.rides.models import RidePhoto
|
||||
|
||||
# Create permissions for ParkPhoto
|
||||
park_photo_content_type = ContentType.objects.get_for_model(ParkPhoto)
|
||||
Permission.objects.get_or_create(
|
||||
codename="add_parkphoto",
|
||||
name="Can add park photo",
|
||||
content_type=park_photo_content_type,
|
||||
)
|
||||
Permission.objects.get_or_create(
|
||||
codename="change_parkphoto",
|
||||
name="Can change park photo",
|
||||
content_type=park_photo_content_type,
|
||||
)
|
||||
Permission.objects.get_or_create(
|
||||
codename="delete_parkphoto",
|
||||
name="Can delete park photo",
|
||||
content_type=park_photo_content_type,
|
||||
)
|
||||
|
||||
# Create permissions for RidePhoto
|
||||
ride_photo_content_type = ContentType.objects.get_for_model(RidePhoto)
|
||||
Permission.objects.get_or_create(
|
||||
codename="add_ridephoto",
|
||||
name="Can add ride photo",
|
||||
content_type=ride_photo_content_type,
|
||||
)
|
||||
Permission.objects.get_or_create(
|
||||
codename="change_ridephoto",
|
||||
name="Can change ride photo",
|
||||
content_type=ride_photo_content_type,
|
||||
)
|
||||
Permission.objects.get_or_create(
|
||||
codename="delete_ridephoto",
|
||||
name="Can delete ride photo",
|
||||
content_type=ride_photo_content_type,
|
||||
)
|
||||
|
||||
|
||||
class MediaConfig(AppConfig):
|
||||
@@ -52,4 +6,7 @@ class MediaConfig(AppConfig):
|
||||
name = "apps.media"
|
||||
|
||||
def ready(self):
|
||||
post_migrate.connect(create_photo_permissions, sender=self)
|
||||
# Note: Django automatically creates add/change/delete/view permissions
|
||||
# for all models, so no custom post_migrate handler is needed.
|
||||
pass
|
||||
|
||||
|
||||
@@ -206,7 +206,9 @@ class EditSubmission(StateMachineMixin, TrackedModel):
|
||||
if self.status != "PENDING":
|
||||
raise ValidationError(f"Cannot claim submission: current status is {self.status}, expected PENDING")
|
||||
|
||||
self.transition_to_claimed(user=user)
|
||||
# Set status directly (similar to unclaim method)
|
||||
# The transition_to_claimed FSM method was never defined
|
||||
self.status = "CLAIMED"
|
||||
self.claimed_by = user
|
||||
self.claimed_at = timezone.now()
|
||||
self.save()
|
||||
@@ -754,7 +756,9 @@ class PhotoSubmission(StateMachineMixin, TrackedModel):
|
||||
if self.status != "PENDING":
|
||||
raise ValidationError(f"Cannot claim submission: current status is {self.status}, expected PENDING")
|
||||
|
||||
self.transition_to_claimed(user=user)
|
||||
# Set status directly (similar to unclaim method)
|
||||
# The transition_to_claimed FSM method was never defined
|
||||
self.status = "CLAIMED"
|
||||
self.claimed_by = user
|
||||
self.claimed_at = timezone.now()
|
||||
self.save()
|
||||
|
||||
@@ -67,6 +67,7 @@ class EditSubmissionSerializer(serializers.ModelSerializer):
|
||||
"""Serializer for EditSubmission with UI metadata for Nuxt frontend."""
|
||||
|
||||
submitted_by = UserBasicSerializer(source="user", read_only=True)
|
||||
handled_by = UserBasicSerializer(read_only=True)
|
||||
claimed_by = UserBasicSerializer(read_only=True)
|
||||
content_type_name = serializers.CharField(source="content_type.model", read_only=True)
|
||||
|
||||
@@ -87,22 +88,24 @@ class EditSubmissionSerializer(serializers.ModelSerializer):
|
||||
"content_type",
|
||||
"content_type_name",
|
||||
"object_id",
|
||||
"submission_type",
|
||||
"changes",
|
||||
"moderator_changes",
|
||||
"rejection_reason",
|
||||
"reason",
|
||||
"source",
|
||||
"notes",
|
||||
"submitted_by",
|
||||
"reviewed_by",
|
||||
"handled_by",
|
||||
"claimed_by",
|
||||
"claimed_at",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"time_since_created",
|
||||
]
|
||||
read_only_fields = [
|
||||
"id",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"submitted_by",
|
||||
"handled_by",
|
||||
"claimed_by",
|
||||
"claimed_at",
|
||||
"status_color",
|
||||
@@ -163,6 +166,7 @@ class EditSubmissionListSerializer(serializers.ModelSerializer):
|
||||
fields = [
|
||||
"id",
|
||||
"status",
|
||||
"submission_type", # Added for frontend compatibility
|
||||
"content_type_name",
|
||||
"object_id",
|
||||
"submitted_by_username",
|
||||
@@ -195,6 +199,101 @@ class EditSubmissionListSerializer(serializers.ModelSerializer):
|
||||
return icons.get(obj.status, "heroicons:question-mark-circle")
|
||||
|
||||
|
||||
class CreateEditSubmissionSerializer(serializers.ModelSerializer):
|
||||
"""
|
||||
Serializer for creating edit submissions.
|
||||
|
||||
This replaces the Supabase RPC 'create_submission_with_items' function.
|
||||
Accepts entity type as a string and resolves it to ContentType.
|
||||
"""
|
||||
|
||||
entity_type = serializers.CharField(write_only=True, help_text="Entity type: park, ride, company, ride_model")
|
||||
|
||||
class Meta:
|
||||
model = EditSubmission
|
||||
fields = [
|
||||
"entity_type",
|
||||
"object_id",
|
||||
"submission_type",
|
||||
"changes",
|
||||
"reason",
|
||||
"source",
|
||||
]
|
||||
|
||||
def validate_entity_type(self, value):
|
||||
"""Convert entity_type string to ContentType."""
|
||||
entity_type_map = {
|
||||
"park": ("parks", "park"),
|
||||
"ride": ("rides", "ride"),
|
||||
"company": ("parks", "company"),
|
||||
"ride_model": ("rides", "ridemodel"),
|
||||
"manufacturer": ("parks", "company"),
|
||||
"designer": ("parks", "company"),
|
||||
"operator": ("parks", "company"),
|
||||
"property_owner": ("parks", "company"),
|
||||
}
|
||||
|
||||
if value.lower() not in entity_type_map:
|
||||
raise serializers.ValidationError(
|
||||
f"Invalid entity_type. Must be one of: {', '.join(entity_type_map.keys())}"
|
||||
)
|
||||
|
||||
return value.lower()
|
||||
|
||||
def validate_changes(self, value):
|
||||
"""Validate changes is a proper JSON object."""
|
||||
if not isinstance(value, dict):
|
||||
raise serializers.ValidationError("Changes must be a JSON object")
|
||||
if not value:
|
||||
raise serializers.ValidationError("Changes cannot be empty")
|
||||
return value
|
||||
|
||||
def validate(self, attrs):
|
||||
"""Cross-field validation."""
|
||||
submission_type = attrs.get("submission_type", "EDIT")
|
||||
object_id = attrs.get("object_id")
|
||||
|
||||
# For EDIT submissions, object_id is required
|
||||
if submission_type == "EDIT" and not object_id:
|
||||
raise serializers.ValidationError(
|
||||
{"object_id": "object_id is required for EDIT submissions"}
|
||||
)
|
||||
|
||||
# For CREATE submissions, object_id should be null
|
||||
if submission_type == "CREATE" and object_id:
|
||||
raise serializers.ValidationError(
|
||||
{"object_id": "object_id must be null for CREATE submissions"}
|
||||
)
|
||||
|
||||
return attrs
|
||||
|
||||
def create(self, validated_data):
|
||||
"""Create a new submission."""
|
||||
entity_type = validated_data.pop("entity_type")
|
||||
|
||||
# Map entity_type to ContentType
|
||||
entity_type_map = {
|
||||
"park": ("parks", "park"),
|
||||
"ride": ("rides", "ride"),
|
||||
"company": ("parks", "company"),
|
||||
"ride_model": ("rides", "ridemodel"),
|
||||
"manufacturer": ("parks", "company"),
|
||||
"designer": ("parks", "company"),
|
||||
"operator": ("parks", "company"),
|
||||
"property_owner": ("parks", "company"),
|
||||
}
|
||||
|
||||
app_label, model_name = entity_type_map[entity_type]
|
||||
content_type = ContentType.objects.get(app_label=app_label, model=model_name)
|
||||
|
||||
# Set automatic fields
|
||||
validated_data["user"] = self.context["request"].user
|
||||
validated_data["content_type"] = content_type
|
||||
validated_data["status"] = "PENDING"
|
||||
|
||||
return super().create(validated_data)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Moderation Report Serializers
|
||||
# ============================================================================
|
||||
|
||||
@@ -25,7 +25,7 @@ from django_fsm import TransitionNotAllowed
|
||||
|
||||
from apps.parks.models import Company as Operator
|
||||
|
||||
from .mixins import (
|
||||
from ..mixins import (
|
||||
AdminRequiredMixin,
|
||||
EditSubmissionMixin,
|
||||
HistoryMixin,
|
||||
@@ -33,7 +33,7 @@ from .mixins import (
|
||||
ModeratorRequiredMixin,
|
||||
PhotoSubmissionMixin,
|
||||
)
|
||||
from .models import (
|
||||
from ..models import (
|
||||
BulkOperation,
|
||||
EditSubmission,
|
||||
ModerationAction,
|
||||
@@ -9,6 +9,8 @@ This module tests end-to-end moderation workflows including:
|
||||
- Bulk operation workflow
|
||||
"""
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.test import TestCase
|
||||
@@ -37,7 +39,7 @@ class SubmissionApprovalWorkflowTests(TestCase):
|
||||
"""
|
||||
Test complete edit submission approval workflow.
|
||||
|
||||
Flow: User submits → Moderator reviews → Moderator approves → Changes applied
|
||||
Flow: User submits → Moderator claims → Moderator approves → Changes applied
|
||||
"""
|
||||
from apps.moderation.models import EditSubmission
|
||||
from apps.parks.models import Company
|
||||
@@ -61,6 +63,13 @@ class SubmissionApprovalWorkflowTests(TestCase):
|
||||
self.assertIsNone(submission.handled_by)
|
||||
self.assertIsNone(submission.handled_at)
|
||||
|
||||
# Moderator claims the submission first
|
||||
submission.transition_to_claimed(user=self.moderator)
|
||||
submission.save()
|
||||
|
||||
submission.refresh_from_db()
|
||||
self.assertEqual(submission.status, "CLAIMED")
|
||||
|
||||
# Moderator approves
|
||||
submission.transition_to_approved(user=self.moderator)
|
||||
submission.handled_by = self.moderator
|
||||
@@ -78,6 +87,8 @@ class SubmissionApprovalWorkflowTests(TestCase):
|
||||
|
||||
Flow: User submits photo → Moderator reviews → Moderator approves → Photo created
|
||||
"""
|
||||
from django_cloudflareimages_toolkit.models import CloudflareImage
|
||||
|
||||
from apps.moderation.models import PhotoSubmission
|
||||
from apps.parks.models import Company, Park
|
||||
|
||||
@@ -87,6 +98,13 @@ class SubmissionApprovalWorkflowTests(TestCase):
|
||||
name="Test Park", slug="test-park", operator=operator, status="OPERATING", timezone="America/New_York"
|
||||
)
|
||||
|
||||
# Create mock CloudflareImage for the photo submission
|
||||
mock_image = CloudflareImage.objects.create(
|
||||
cloudflare_id="test-cf-image-id-12345",
|
||||
user=self.regular_user,
|
||||
expires_at=timezone.now() + timedelta(days=365),
|
||||
)
|
||||
|
||||
# User submits a photo
|
||||
content_type = ContentType.objects.get_for_model(park)
|
||||
submission = PhotoSubmission.objects.create(
|
||||
@@ -94,12 +112,18 @@ class SubmissionApprovalWorkflowTests(TestCase):
|
||||
content_type=content_type,
|
||||
object_id=park.id,
|
||||
status="PENDING",
|
||||
photo_type="GENERAL",
|
||||
description="Beautiful park entrance",
|
||||
photo=mock_image,
|
||||
caption="Beautiful park entrance",
|
||||
)
|
||||
|
||||
self.assertEqual(submission.status, "PENDING")
|
||||
|
||||
# Moderator claims the submission first (required FSM step)
|
||||
submission.claim(user=self.moderator)
|
||||
|
||||
submission.refresh_from_db()
|
||||
self.assertEqual(submission.status, "CLAIMED")
|
||||
|
||||
# Moderator approves
|
||||
submission.transition_to_approved(user=self.moderator)
|
||||
submission.handled_by = self.moderator
|
||||
@@ -144,7 +168,13 @@ class SubmissionRejectionWorkflowTests(TestCase):
|
||||
reason="Name change request",
|
||||
)
|
||||
|
||||
# Moderator rejects
|
||||
# Moderator claims and then rejects
|
||||
submission.transition_to_claimed(user=self.moderator)
|
||||
submission.save()
|
||||
|
||||
submission.refresh_from_db()
|
||||
self.assertEqual(submission.status, "CLAIMED")
|
||||
|
||||
submission.transition_to_rejected(user=self.moderator)
|
||||
submission.handled_by = self.moderator
|
||||
submission.handled_at = timezone.now()
|
||||
@@ -193,7 +223,13 @@ class SubmissionEscalationWorkflowTests(TestCase):
|
||||
reason="Major name change",
|
||||
)
|
||||
|
||||
# Moderator escalates
|
||||
# Moderator claims and then escalates
|
||||
submission.transition_to_claimed(user=self.moderator)
|
||||
submission.save()
|
||||
|
||||
submission.refresh_from_db()
|
||||
self.assertEqual(submission.status, "CLAIMED")
|
||||
|
||||
submission.transition_to_escalated(user=self.moderator)
|
||||
submission.notes = "Escalated: Major change needs admin review"
|
||||
submission.save()
|
||||
@@ -447,11 +483,13 @@ class ModerationQueueWorkflowTests(TestCase):
|
||||
from apps.moderation.models import ModerationQueue
|
||||
|
||||
queue_item = ModerationQueue.objects.create(
|
||||
queue_type="SUBMISSION_REVIEW",
|
||||
item_type="SUBMISSION_REVIEW",
|
||||
status="PENDING",
|
||||
priority="MEDIUM",
|
||||
item_type="edit_submission",
|
||||
item_id=123,
|
||||
title="Review edit submission #123",
|
||||
description="Review and process edit submission",
|
||||
entity_type="edit_submission",
|
||||
entity_id=123,
|
||||
)
|
||||
|
||||
self.assertEqual(queue_item.status, "PENDING")
|
||||
|
||||
@@ -20,6 +20,7 @@ from .views import (
|
||||
ModerationActionViewSet,
|
||||
ModerationQueueViewSet,
|
||||
ModerationReportViewSet,
|
||||
ModerationStatsView,
|
||||
PhotoSubmissionViewSet,
|
||||
UserModerationViewSet,
|
||||
)
|
||||
@@ -175,6 +176,9 @@ html_patterns = [
|
||||
path("", ModerationDashboardView.as_view(), name="dashboard"),
|
||||
path("submissions/", SubmissionListView.as_view(), name="submission_list"),
|
||||
path("history/", HistoryPageView.as_view(), name="history"),
|
||||
# Edit submission detail for HTMX form posts
|
||||
path("submissions/<int:pk>/edit/", EditSubmissionViewSet.as_view({'post': 'partial_update'}), name="edit_submission"),
|
||||
path("edit-submissions/", TemplateView.as_view(template_name="moderation/edit_submissions.html"), name="edit_submissions"),
|
||||
]
|
||||
|
||||
# SSE endpoints for real-time updates
|
||||
@@ -188,6 +192,8 @@ urlpatterns = [
|
||||
*html_patterns,
|
||||
# SSE endpoints
|
||||
*sse_patterns,
|
||||
# Top-level stats endpoint (must be before router.urls to take precedence)
|
||||
path("stats/", ModerationStatsView.as_view(), name="moderation-stats"),
|
||||
# Include all router URLs (API endpoints)
|
||||
path("api/", include(router.urls)),
|
||||
# Standalone convert-to-edit endpoint (frontend calls /moderation/api/edit-submissions/ POST)
|
||||
|
||||
@@ -56,6 +56,7 @@ from .serializers import (
|
||||
BulkOperationSerializer,
|
||||
CompleteQueueItemSerializer,
|
||||
CreateBulkOperationSerializer,
|
||||
CreateEditSubmissionSerializer,
|
||||
CreateModerationActionSerializer,
|
||||
CreateModerationReportSerializer,
|
||||
EditSubmissionListSerializer,
|
||||
@@ -1363,6 +1364,8 @@ class EditSubmissionViewSet(viewsets.ModelViewSet):
|
||||
def get_serializer_class(self):
|
||||
if self.action == "list":
|
||||
return EditSubmissionListSerializer
|
||||
if self.action == "create":
|
||||
return CreateEditSubmissionSerializer
|
||||
return EditSubmissionSerializer
|
||||
|
||||
def get_queryset(self):
|
||||
@@ -1378,6 +1381,191 @@ class EditSubmissionViewSet(viewsets.ModelViewSet):
|
||||
|
||||
return queryset
|
||||
|
||||
@action(detail=False, methods=["post"], permission_classes=[CanViewModerationData], url_path="with-diffs")
|
||||
def with_diffs(self, request):
|
||||
"""
|
||||
Fetch submission items with pre-calculated diffs.
|
||||
|
||||
POST /api/v1/moderation/api/submissions/with-diffs/
|
||||
|
||||
Request body:
|
||||
submission_id: str - The EditSubmission ID to fetch
|
||||
|
||||
Returns:
|
||||
items: list - List of submission items with diffs calculated
|
||||
"""
|
||||
from deepdiff import DeepDiff
|
||||
|
||||
submission_id = request.data.get("submission_id")
|
||||
|
||||
if not submission_id:
|
||||
return Response(
|
||||
{"error": "submission_id is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
try:
|
||||
submission = EditSubmission.objects.get(pk=submission_id)
|
||||
except EditSubmission.DoesNotExist:
|
||||
return Response(
|
||||
{"error": "Submission not found"},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
except Exception:
|
||||
return Response(
|
||||
{"error": "Invalid submission_id format"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Get submission changes
|
||||
entity_data = submission.changes or {}
|
||||
original_data = None
|
||||
|
||||
# Get entity type from content_type
|
||||
entity_type = submission.content_type.model if submission.content_type else None
|
||||
|
||||
# If this is an EDIT submission, try to get the original entity data
|
||||
if submission.object_id and entity_type:
|
||||
try:
|
||||
model_class = submission.content_type.model_class()
|
||||
if model_class:
|
||||
original_entity = model_class.objects.get(pk=submission.object_id)
|
||||
|
||||
from django.forms.models import model_to_dict
|
||||
original_data = model_to_dict(original_entity)
|
||||
except Exception as e:
|
||||
logger.debug(f"Could not fetch original entity for diff: {e}")
|
||||
|
||||
# Calculate field-level diffs
|
||||
field_changes = []
|
||||
|
||||
if original_data and entity_data:
|
||||
# Check if entity_data already contains pre-computed diff objects {new, old}
|
||||
# This happens when the changes dict stores diffs directly
|
||||
has_precomputed_diffs = any(
|
||||
isinstance(value, dict) and "new" in value and "old" in value and len(value) == 2
|
||||
for value in entity_data.values()
|
||||
if isinstance(value, dict)
|
||||
)
|
||||
|
||||
if has_precomputed_diffs:
|
||||
# Extract field changes directly from pre-computed diffs
|
||||
for field, value in entity_data.items():
|
||||
if field.startswith("_"):
|
||||
continue
|
||||
|
||||
if (
|
||||
isinstance(value, dict)
|
||||
and "new" in value
|
||||
and "old" in value
|
||||
and len(value) == 2
|
||||
):
|
||||
field_changes.append({
|
||||
"field": field,
|
||||
"oldValue": value.get("old"),
|
||||
"newValue": value.get("new"),
|
||||
"changeType": "modified",
|
||||
"category": "other",
|
||||
"priority": "optional",
|
||||
})
|
||||
else:
|
||||
# Use DeepDiff for regular data comparison
|
||||
try:
|
||||
diff = DeepDiff(original_data, entity_data, ignore_order=True)
|
||||
|
||||
for change_type, changes in diff.items():
|
||||
if isinstance(changes, dict):
|
||||
for field_path, change_value in changes.items():
|
||||
field_name = field_path.replace("root['", "").replace("']", "").split("']['")[0]
|
||||
|
||||
if change_type == "values_changed":
|
||||
field_changes.append({
|
||||
"field": field_name,
|
||||
"oldValue": change_value.get("old_value"),
|
||||
"newValue": change_value.get("new_value"),
|
||||
"changeType": "modified",
|
||||
"category": "other",
|
||||
"priority": "optional",
|
||||
})
|
||||
elif change_type == "dictionary_item_added":
|
||||
field_changes.append({
|
||||
"field": field_name,
|
||||
"oldValue": None,
|
||||
"newValue": change_value,
|
||||
"changeType": "added",
|
||||
"category": "other",
|
||||
"priority": "optional",
|
||||
})
|
||||
elif change_type == "dictionary_item_removed":
|
||||
field_changes.append({
|
||||
"field": field_name,
|
||||
"oldValue": change_value,
|
||||
"newValue": None,
|
||||
"changeType": "removed",
|
||||
"category": "other",
|
||||
"priority": "optional",
|
||||
})
|
||||
except Exception as e:
|
||||
logger.debug(f"Error calculating diffs: {e}")
|
||||
elif entity_data:
|
||||
# Handle entity_data that may contain pre-computed diff objects {new, old}
|
||||
for field, value in entity_data.items():
|
||||
if field.startswith("_"):
|
||||
continue
|
||||
|
||||
# Check if value is a diff object with {new, old} structure
|
||||
if (
|
||||
isinstance(value, dict)
|
||||
and "new" in value
|
||||
and "old" in value
|
||||
and len(value) == 2
|
||||
):
|
||||
# This is a pre-computed diff, extract the values
|
||||
field_changes.append({
|
||||
"field": field,
|
||||
"oldValue": value.get("old"),
|
||||
"newValue": value.get("new"),
|
||||
"changeType": "modified",
|
||||
"category": "other",
|
||||
"priority": "optional",
|
||||
})
|
||||
else:
|
||||
# Regular value (for create submissions)
|
||||
field_changes.append({
|
||||
"field": field,
|
||||
"oldValue": None,
|
||||
"newValue": value,
|
||||
"changeType": "added",
|
||||
"category": "other",
|
||||
"priority": "optional",
|
||||
})
|
||||
|
||||
action_type = "edit" if submission.object_id else "create"
|
||||
|
||||
item = {
|
||||
"id": str(submission.id),
|
||||
"submission_id": str(submission.id),
|
||||
"item_type": entity_type or "unknown",
|
||||
"action_type": action_type,
|
||||
"status": submission.status,
|
||||
"order_index": 0,
|
||||
"depends_on": None,
|
||||
"entity_data": entity_data,
|
||||
"original_entity_data": original_data,
|
||||
"item_data": entity_data,
|
||||
"original_data": original_data,
|
||||
"diff": {
|
||||
"action": action_type,
|
||||
"fieldChanges": field_changes,
|
||||
"unchangedFields": [],
|
||||
"totalChanges": len(field_changes),
|
||||
},
|
||||
"created_at": submission.created_at.isoformat() if submission.created_at else None,
|
||||
"updated_at": submission.updated_at.isoformat() if hasattr(submission, "updated_at") and submission.updated_at else None,
|
||||
}
|
||||
|
||||
return Response({"items": [item]})
|
||||
|
||||
@action(detail=True, methods=["post"], permission_classes=[IsModeratorOrAdmin])
|
||||
def claim(self, request, pk=None):
|
||||
"""
|
||||
@@ -1440,9 +1628,23 @@ class EditSubmissionViewSet(viewsets.ModelViewSet):
|
||||
},
|
||||
request=request,
|
||||
)
|
||||
return Response(self.get_serializer(submission).data)
|
||||
# Return response in format expected by frontend useModerationQueue.ts
|
||||
# Frontend expects: { locked_until: "...", submission_id: "..." } at top level
|
||||
lock_duration_minutes = 15
|
||||
locked_until = submission.claimed_at + timedelta(minutes=lock_duration_minutes)
|
||||
return Response({
|
||||
"success": True,
|
||||
"locked_until": locked_until.isoformat(),
|
||||
"lockedUntil": locked_until.isoformat(), # Both camelCase and snake_case for compatibility
|
||||
"submission_id": str(submission.id),
|
||||
"submissionId": str(submission.id),
|
||||
"claimed_by": request.user.username,
|
||||
"claimed_at": submission.claimed_at.isoformat() if submission.claimed_at else None,
|
||||
"status": submission.status,
|
||||
"lock_duration_minutes": lock_duration_minutes,
|
||||
})
|
||||
except ValidationError as e:
|
||||
return Response({"error": str(e)}, status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response({"success": False, "error": str(e)}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
@action(detail=True, methods=["post"], permission_classes=[IsModeratorOrAdmin])
|
||||
def unclaim(self, request, pk=None):
|
||||
@@ -1516,6 +1718,162 @@ class EditSubmissionViewSet(viewsets.ModelViewSet):
|
||||
except Exception as e:
|
||||
return Response({"error": str(e)}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
@action(detail=False, methods=["get"], permission_classes=[IsModeratorOrAdmin], url_path="my-active-claim")
|
||||
def my_active_claim(self, request):
|
||||
"""
|
||||
Get the current user's active claim on any submission.
|
||||
|
||||
Used by lock restoration to restore a moderator's active claim after
|
||||
page refresh. Returns the most recent CLAIMED submission for this user.
|
||||
|
||||
Returns:
|
||||
200: Active claim found with submission data
|
||||
200: No active claim (empty data)
|
||||
"""
|
||||
user = request.user
|
||||
|
||||
# Find any submission claimed by this user
|
||||
claimed_submission = (
|
||||
EditSubmission.objects.filter(
|
||||
claimed_by=user,
|
||||
status="CLAIMED"
|
||||
)
|
||||
.order_by("-claimed_at")
|
||||
.first()
|
||||
)
|
||||
|
||||
if not claimed_submission:
|
||||
return Response({
|
||||
"active_claim": None,
|
||||
"message": "No active claims found"
|
||||
})
|
||||
|
||||
return Response({
|
||||
"active_claim": {
|
||||
"id": claimed_submission.id,
|
||||
"status": claimed_submission.status,
|
||||
"claimed_at": claimed_submission.claimed_at.isoformat() if claimed_submission.claimed_at else None,
|
||||
# Include basic submission info for context
|
||||
"content_type": claimed_submission.content_type.model if claimed_submission.content_type else None,
|
||||
"object_id": claimed_submission.object_id,
|
||||
},
|
||||
"message": "Active claim found"
|
||||
})
|
||||
|
||||
@action(detail=True, methods=["post"], permission_classes=[IsModeratorOrAdmin])
|
||||
def extend(self, request, pk=None):
|
||||
"""
|
||||
Extend the lock on a claimed submission.
|
||||
|
||||
Only the claiming moderator can extend the lock.
|
||||
Extends the lock by the default duration (15 minutes).
|
||||
|
||||
Returns:
|
||||
200: Lock extended with new expiration time
|
||||
400: Submission not in claimed state
|
||||
403: User is not the claiming moderator
|
||||
404: Submission not found
|
||||
"""
|
||||
submission = self.get_object()
|
||||
user = request.user
|
||||
|
||||
# Only the claiming user can extend
|
||||
if submission.claimed_by != user:
|
||||
return Response(
|
||||
{"error": "Only the claiming moderator can extend the lock"},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
if submission.status != "CLAIMED":
|
||||
return Response(
|
||||
{"error": "Submission is not claimed"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Extend the claim time by 15 minutes
|
||||
extension_minutes = request.data.get("extension_minutes", 15)
|
||||
new_claimed_at = timezone.now()
|
||||
submission.claimed_at = new_claimed_at
|
||||
submission.save(update_fields=["claimed_at"])
|
||||
|
||||
new_expires_at = new_claimed_at + timedelta(minutes=extension_minutes)
|
||||
|
||||
log_business_event(
|
||||
logger,
|
||||
event_type="submission_lock_extended",
|
||||
message=f"EditSubmission {submission.id} lock extended by {user.username}",
|
||||
context={
|
||||
"model": "EditSubmission",
|
||||
"object_id": submission.id,
|
||||
"extended_by": user.username,
|
||||
"new_expires_at": new_expires_at.isoformat(),
|
||||
},
|
||||
request=request,
|
||||
)
|
||||
|
||||
return Response({
|
||||
"success": True,
|
||||
"new_expiry": new_expires_at.isoformat(),
|
||||
"newExpiresAt": new_expires_at.isoformat(), # CamelCase for compatibility
|
||||
"submission_id": str(submission.id),
|
||||
"extension_minutes": extension_minutes,
|
||||
})
|
||||
|
||||
@action(detail=True, methods=["post"], permission_classes=[IsModeratorOrAdmin])
|
||||
def release(self, request, pk=None):
|
||||
"""
|
||||
Release the lock on a claimed submission (alias for unclaim).
|
||||
|
||||
This is a convenience endpoint that mirrors the unclaim behavior
|
||||
but is named to match the frontend's lock terminology.
|
||||
|
||||
Returns:
|
||||
200: Lock released successfully
|
||||
400: Submission not in claimed state
|
||||
403: User is not the claiming moderator or admin
|
||||
404: Submission not found
|
||||
"""
|
||||
from django.core.exceptions import ValidationError
|
||||
|
||||
submission = self.get_object()
|
||||
user = request.user
|
||||
silent = request.data.get("silent", False)
|
||||
|
||||
# Only the claiming user or an admin can release
|
||||
if submission.claimed_by != user and not user.is_staff:
|
||||
return Response(
|
||||
{"error": "Only the claiming moderator or an admin can release the lock"},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
if submission.status != "CLAIMED":
|
||||
return Response(
|
||||
{"error": "Submission is not claimed"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
try:
|
||||
submission.unclaim(user=user)
|
||||
log_business_event(
|
||||
logger,
|
||||
event_type="submission_lock_released",
|
||||
message=f"EditSubmission {submission.id} lock released by {user.username}",
|
||||
context={
|
||||
"model": "EditSubmission",
|
||||
"object_id": submission.id,
|
||||
"released_by": user.username,
|
||||
"silent": silent,
|
||||
},
|
||||
request=request,
|
||||
)
|
||||
return Response({
|
||||
"success": True,
|
||||
"message": "Lock released successfully",
|
||||
"submission_id": str(submission.id),
|
||||
})
|
||||
except ValidationError as e:
|
||||
return Response({"error": str(e)}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
@action(detail=True, methods=["post"], permission_classes=[IsModeratorOrAdmin], url_path="convert-to-edit")
|
||||
def convert_to_edit(self, request, pk=None):
|
||||
"""
|
||||
@@ -1706,9 +2064,23 @@ class PhotoSubmissionViewSet(viewsets.ModelViewSet):
|
||||
},
|
||||
request=request,
|
||||
)
|
||||
return Response(self.get_serializer(submission).data)
|
||||
# Return response in format expected by frontend useModerationQueue.ts
|
||||
# Frontend expects: { locked_until: "...", submission_id: "..." } at top level
|
||||
lock_duration_minutes = 15
|
||||
locked_until = submission.claimed_at + timedelta(minutes=lock_duration_minutes)
|
||||
return Response({
|
||||
"success": True,
|
||||
"locked_until": locked_until.isoformat(),
|
||||
"lockedUntil": locked_until.isoformat(), # Both camelCase and snake_case for compatibility
|
||||
"submission_id": str(submission.id),
|
||||
"submissionId": str(submission.id),
|
||||
"claimed_by": request.user.username,
|
||||
"claimed_at": submission.claimed_at.isoformat() if submission.claimed_at else None,
|
||||
"status": submission.status,
|
||||
"lock_duration_minutes": lock_duration_minutes,
|
||||
})
|
||||
except ValidationError as e:
|
||||
return Response({"error": str(e)}, status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response({"success": False, "error": str(e)}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
@action(detail=True, methods=["post"], permission_classes=[IsModeratorOrAdmin])
|
||||
def unclaim(self, request, pk=None):
|
||||
@@ -2139,3 +2511,117 @@ class ConvertSubmissionToEditView(APIView):
|
||||
{"success": False, "message": "Internal server error"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Aggregated Moderation Stats View
|
||||
# ============================================================================
|
||||
|
||||
|
||||
from rest_framework.views import APIView
|
||||
|
||||
|
||||
class ModerationStatsView(APIView):
|
||||
"""
|
||||
View for aggregated moderation statistics.
|
||||
|
||||
Returns comprehensive stats from all moderation models including
|
||||
reports, queue, actions, and bulk operations.
|
||||
"""
|
||||
|
||||
permission_classes = [CanViewModerationData]
|
||||
|
||||
def get(self, request):
|
||||
"""Get aggregated moderation statistics."""
|
||||
now = timezone.now()
|
||||
|
||||
# Report stats
|
||||
reports = ModerationReport.objects.all()
|
||||
total_reports = reports.count()
|
||||
pending_reports = reports.filter(status="PENDING").count()
|
||||
resolved_reports = reports.filter(status="RESOLVED").count()
|
||||
|
||||
# Calculate overdue reports
|
||||
overdue_reports = 0
|
||||
for report in reports.filter(status__in=["PENDING", "UNDER_REVIEW"]):
|
||||
sla_hours = {"URGENT": 2, "HIGH": 8, "MEDIUM": 24, "LOW": 72}
|
||||
hours_since_created = (now - report.created_at).total_seconds() / 3600
|
||||
threshold = sla_hours.get(report.priority, 72)
|
||||
if hours_since_created > threshold:
|
||||
overdue_reports += 1
|
||||
|
||||
# Queue stats
|
||||
queue = ModerationQueue.objects.all()
|
||||
queue_size = queue.count()
|
||||
assigned_items = queue.filter(assigned_to__isnull=False).count()
|
||||
unassigned_items = queue.filter(assigned_to__isnull=True).count()
|
||||
|
||||
# Action stats
|
||||
actions = ModerationAction.objects.all()
|
||||
total_actions = actions.count()
|
||||
active_actions = actions.filter(is_active=True).count()
|
||||
expired_actions = actions.filter(
|
||||
is_active=True,
|
||||
expires_at__isnull=False,
|
||||
expires_at__lt=now
|
||||
).count()
|
||||
|
||||
# Bulk operation stats
|
||||
bulk_ops = BulkOperation.objects.all()
|
||||
running_operations = bulk_ops.filter(status="RUNNING").count()
|
||||
completed_operations = bulk_ops.filter(status="COMPLETED").count()
|
||||
failed_operations = bulk_ops.filter(status="FAILED").count()
|
||||
|
||||
# Average resolution time
|
||||
resolved_queryset = reports.filter(
|
||||
status="RESOLVED",
|
||||
resolved_at__isnull=False
|
||||
)
|
||||
avg_resolution_time = 0
|
||||
if resolved_queryset.exists():
|
||||
total_time = sum([
|
||||
(r.resolved_at - r.created_at).total_seconds() / 3600
|
||||
for r in resolved_queryset if r.resolved_at
|
||||
])
|
||||
avg_resolution_time = total_time / resolved_queryset.count()
|
||||
|
||||
# Reports by priority and type
|
||||
reports_by_priority = dict(
|
||||
reports.values_list("priority").annotate(count=Count("id"))
|
||||
)
|
||||
reports_by_type = dict(
|
||||
reports.values_list("report_type").annotate(count=Count("id"))
|
||||
)
|
||||
|
||||
stats_data = {
|
||||
# Report stats
|
||||
"total_reports": total_reports,
|
||||
"pending_reports": pending_reports,
|
||||
"resolved_reports": resolved_reports,
|
||||
"overdue_reports": overdue_reports,
|
||||
|
||||
# Queue stats
|
||||
"queue_size": queue_size,
|
||||
"assigned_items": assigned_items,
|
||||
"unassigned_items": unassigned_items,
|
||||
|
||||
# Action stats
|
||||
"total_actions": total_actions,
|
||||
"active_actions": active_actions,
|
||||
"expired_actions": expired_actions,
|
||||
|
||||
# Bulk operation stats
|
||||
"running_operations": running_operations,
|
||||
"completed_operations": completed_operations,
|
||||
"failed_operations": failed_operations,
|
||||
|
||||
# Performance metrics
|
||||
"average_resolution_time_hours": round(avg_resolution_time, 2),
|
||||
"reports_by_priority": reports_by_priority,
|
||||
"reports_by_type": reports_by_type,
|
||||
|
||||
# Empty metrics array for frontend compatibility
|
||||
"metrics": [],
|
||||
}
|
||||
|
||||
return Response(stats_data)
|
||||
|
||||
@@ -4,7 +4,7 @@ from rest_framework.routers import DefaultRouter
|
||||
from .views import ReviewViewSet
|
||||
|
||||
router = DefaultRouter()
|
||||
router.register(r"reviews", ReviewViewSet, basename="review")
|
||||
router.register(r"", ReviewViewSet, basename="review")
|
||||
|
||||
urlpatterns = [
|
||||
path("", include(router.urls)),
|
||||
|
||||
@@ -9,7 +9,7 @@ from django.db import migrations, models
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("pghistory", "0007_auto_20250421_0444"),
|
||||
("pghistory", "0006_delete_aggregateevent"),
|
||||
("rides", "0028_ridecredit_ridecreditevent_ridecredit_insert_insert_and_more"),
|
||||
]
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ from django.db import migrations, models
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("pghistory", "0007_auto_20250421_0444"),
|
||||
("pghistory", "0006_delete_aggregateevent"),
|
||||
("rides", "0029_darkridestats_darkridestatsevent_flatridestats_and_more"),
|
||||
]
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ from django.db import migrations, models
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("pghistory", "0007_auto_20250421_0444"),
|
||||
("pghistory", "0006_delete_aggregateevent"),
|
||||
("rides", "0030_add_kiddie_and_transportation_stats"),
|
||||
]
|
||||
|
||||
|
||||
@@ -0,0 +1,41 @@
|
||||
# Generated by Django 5.2.9 on 2026-01-06 17:43
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('contenttypes', '0002_remove_content_type_name'),
|
||||
('support', '0002_add_category_to_ticket'),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Report',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||
('updated_at', models.DateTimeField(auto_now=True)),
|
||||
('object_id', models.CharField(help_text='ID of the entity being reported', max_length=50)),
|
||||
('report_type', models.CharField(choices=[('inaccurate', 'Inaccurate Information'), ('inappropriate', 'Inappropriate Content'), ('spam', 'Spam'), ('copyright', 'Copyright Violation'), ('duplicate', 'Duplicate Content'), ('other', 'Other')], db_index=True, help_text='Type of issue being reported', max_length=20)),
|
||||
('reason', models.TextField(help_text='Detailed description of the issue')),
|
||||
('status', models.CharField(choices=[('pending', 'Pending'), ('investigating', 'Investigating'), ('resolved', 'Resolved'), ('dismissed', 'Dismissed')], db_index=True, default='pending', help_text='Current status of the report', max_length=20)),
|
||||
('resolved_at', models.DateTimeField(blank=True, help_text='When the report was resolved', null=True)),
|
||||
('resolution_notes', models.TextField(blank=True, help_text='Notes about how the report was resolved')),
|
||||
('content_type', models.ForeignKey(help_text='Type of entity being reported', on_delete=django.db.models.deletion.CASCADE, to='contenttypes.contenttype')),
|
||||
('reporter', models.ForeignKey(blank=True, help_text='User who submitted the report', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='submitted_reports', to=settings.AUTH_USER_MODEL)),
|
||||
('resolved_by', models.ForeignKey(blank=True, help_text='Moderator who resolved the report', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='resolved_reports', to=settings.AUTH_USER_MODEL)),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Report',
|
||||
'verbose_name_plural': 'Reports',
|
||||
'ordering': ['-created_at'],
|
||||
'abstract': False,
|
||||
'indexes': [models.Index(fields=['status', 'created_at'], name='support_rep_status_aea90b_idx'), models.Index(fields=['content_type', 'object_id'], name='support_rep_content_e9be3b_idx'), models.Index(fields=['report_type', 'created_at'], name='support_rep_report__a54360_idx')],
|
||||
},
|
||||
),
|
||||
]
|
||||
@@ -66,3 +66,105 @@ class Ticket(TrackedModel):
|
||||
if self.user and not self.email:
|
||||
self.email = self.user.email
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
|
||||
class Report(TrackedModel):
|
||||
"""
|
||||
User-submitted reports about content issues.
|
||||
|
||||
Reports allow users to flag problems with specific entities
|
||||
(parks, rides, reviews, etc.) for moderator review.
|
||||
"""
|
||||
|
||||
class ReportType(models.TextChoices):
|
||||
INACCURATE = "inaccurate", "Inaccurate Information"
|
||||
INAPPROPRIATE = "inappropriate", "Inappropriate Content"
|
||||
SPAM = "spam", "Spam"
|
||||
COPYRIGHT = "copyright", "Copyright Violation"
|
||||
DUPLICATE = "duplicate", "Duplicate Content"
|
||||
OTHER = "other", "Other"
|
||||
|
||||
class Status(models.TextChoices):
|
||||
PENDING = "pending", "Pending"
|
||||
INVESTIGATING = "investigating", "Investigating"
|
||||
RESOLVED = "resolved", "Resolved"
|
||||
DISMISSED = "dismissed", "Dismissed"
|
||||
|
||||
# Reporter (optional for anonymous reports)
|
||||
reporter = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name="submitted_reports",
|
||||
help_text="User who submitted the report",
|
||||
)
|
||||
|
||||
# Target entity using GenericForeignKey
|
||||
content_type = models.ForeignKey(
|
||||
"contenttypes.ContentType",
|
||||
on_delete=models.CASCADE,
|
||||
help_text="Type of entity being reported",
|
||||
)
|
||||
object_id = models.CharField(
|
||||
max_length=50,
|
||||
help_text="ID of the entity being reported",
|
||||
)
|
||||
# Note: GenericForeignKey doesn't create a database column
|
||||
# It's a convenience for accessing the related object
|
||||
# content_object = GenericForeignKey("content_type", "object_id")
|
||||
|
||||
# Report details
|
||||
report_type = models.CharField(
|
||||
max_length=20,
|
||||
choices=ReportType.choices,
|
||||
db_index=True,
|
||||
help_text="Type of issue being reported",
|
||||
)
|
||||
reason = models.TextField(
|
||||
help_text="Detailed description of the issue",
|
||||
)
|
||||
status = models.CharField(
|
||||
max_length=20,
|
||||
choices=Status.choices,
|
||||
default=Status.PENDING,
|
||||
db_index=True,
|
||||
help_text="Current status of the report",
|
||||
)
|
||||
|
||||
# Resolution
|
||||
resolved_at = models.DateTimeField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="When the report was resolved",
|
||||
)
|
||||
resolved_by = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name="resolved_reports",
|
||||
help_text="Moderator who resolved the report",
|
||||
)
|
||||
resolution_notes = models.TextField(
|
||||
blank=True,
|
||||
help_text="Notes about how the report was resolved",
|
||||
)
|
||||
|
||||
class Meta(TrackedModel.Meta):
|
||||
verbose_name = "Report"
|
||||
verbose_name_plural = "Reports"
|
||||
ordering = ["-created_at"]
|
||||
indexes = [
|
||||
models.Index(fields=["status", "created_at"]),
|
||||
models.Index(fields=["content_type", "object_id"]),
|
||||
models.Index(fields=["report_type", "created_at"]),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return f"[{self.get_report_type_display()}] {self.content_type} #{self.object_id}"
|
||||
|
||||
@property
|
||||
def is_resolved(self) -> bool:
|
||||
return self.status in (self.Status.RESOLVED, self.Status.DISMISSED)
|
||||
|
||||
|
||||
@@ -33,3 +33,110 @@ class TicketSerializer(serializers.ModelSerializer):
|
||||
if request and not request.user.is_authenticated and not data.get("email"):
|
||||
raise serializers.ValidationError({"email": "Email is required for guests."})
|
||||
return data
|
||||
|
||||
|
||||
class ReportSerializer(serializers.ModelSerializer):
|
||||
"""Serializer for Report model."""
|
||||
|
||||
reporter_username = serializers.CharField(source="reporter.username", read_only=True, allow_null=True)
|
||||
resolved_by_username = serializers.CharField(source="resolved_by.username", read_only=True, allow_null=True)
|
||||
report_type_display = serializers.CharField(source="get_report_type_display", read_only=True)
|
||||
status_display = serializers.CharField(source="get_status_display", read_only=True)
|
||||
content_type_name = serializers.CharField(source="content_type.model", read_only=True)
|
||||
is_resolved = serializers.BooleanField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
from .models import Report
|
||||
|
||||
model = Report
|
||||
fields = [
|
||||
"id",
|
||||
"reporter",
|
||||
"reporter_username",
|
||||
"content_type",
|
||||
"content_type_name",
|
||||
"object_id",
|
||||
"report_type",
|
||||
"report_type_display",
|
||||
"reason",
|
||||
"status",
|
||||
"status_display",
|
||||
"resolved_at",
|
||||
"resolved_by",
|
||||
"resolved_by_username",
|
||||
"resolution_notes",
|
||||
"is_resolved",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
]
|
||||
read_only_fields = [
|
||||
"id",
|
||||
"reporter",
|
||||
"resolved_at",
|
||||
"resolved_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
]
|
||||
|
||||
|
||||
class ReportCreateSerializer(serializers.ModelSerializer):
|
||||
"""Serializer for creating reports with entity type as string."""
|
||||
|
||||
entity_type = serializers.CharField(write_only=True, help_text="Type of entity: park, ride, review, etc.")
|
||||
entity_id = serializers.CharField(write_only=True, help_text="ID of the entity being reported")
|
||||
|
||||
class Meta:
|
||||
from .models import Report
|
||||
|
||||
model = Report
|
||||
fields = [
|
||||
"entity_type",
|
||||
"entity_id",
|
||||
"report_type",
|
||||
"reason",
|
||||
]
|
||||
|
||||
def validate(self, data):
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
|
||||
entity_type = data.pop("entity_type")
|
||||
entity_id = data.pop("entity_id")
|
||||
|
||||
# Map common entity types to app.model
|
||||
type_mapping = {
|
||||
"park": ("parks", "park"),
|
||||
"ride": ("rides", "ride"),
|
||||
"review": ("reviews", "review"),
|
||||
"user": ("accounts", "user"),
|
||||
}
|
||||
|
||||
if entity_type in type_mapping:
|
||||
app_label, model_name = type_mapping[entity_type]
|
||||
else:
|
||||
# Try to parse as app.model
|
||||
parts = entity_type.split(".")
|
||||
if len(parts) != 2:
|
||||
raise serializers.ValidationError(
|
||||
{"entity_type": f"Unknown entity type: {entity_type}. Use 'park', 'ride', 'review', or 'app.model'."}
|
||||
)
|
||||
app_label, model_name = parts
|
||||
|
||||
try:
|
||||
content_type = ContentType.objects.get(app_label=app_label, model=model_name)
|
||||
except ContentType.DoesNotExist:
|
||||
raise serializers.ValidationError({"entity_type": f"Unknown entity type: {entity_type}"})
|
||||
|
||||
data["content_type"] = content_type
|
||||
data["object_id"] = entity_id
|
||||
return data
|
||||
|
||||
|
||||
class ReportResolveSerializer(serializers.Serializer):
|
||||
"""Serializer for resolving reports."""
|
||||
|
||||
status = serializers.ChoiceField(
|
||||
choices=[("resolved", "Resolved"), ("dismissed", "Dismissed")],
|
||||
default="resolved",
|
||||
)
|
||||
notes = serializers.CharField(required=False, allow_blank=True)
|
||||
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
from django.urls import include, path
|
||||
from rest_framework.routers import DefaultRouter
|
||||
|
||||
from .views import TicketViewSet
|
||||
from .views import ReportViewSet, TicketViewSet
|
||||
|
||||
router = DefaultRouter()
|
||||
router.register(r"tickets", TicketViewSet, basename="ticket")
|
||||
router.register(r"reports", ReportViewSet, basename="report")
|
||||
|
||||
urlpatterns = [
|
||||
path("", include(router.urls)),
|
||||
]
|
||||
|
||||
|
||||
@@ -1,8 +1,16 @@
|
||||
from django.utils import timezone
|
||||
from django_filters.rest_framework import DjangoFilterBackend
|
||||
from rest_framework import filters, permissions, viewsets
|
||||
from rest_framework import filters, permissions, status, viewsets
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.response import Response
|
||||
|
||||
from .models import Ticket
|
||||
from .serializers import TicketSerializer
|
||||
from .models import Report, Ticket
|
||||
from .serializers import (
|
||||
ReportCreateSerializer,
|
||||
ReportResolveSerializer,
|
||||
ReportSerializer,
|
||||
TicketSerializer,
|
||||
)
|
||||
|
||||
|
||||
class TicketViewSet(viewsets.ModelViewSet):
|
||||
@@ -33,3 +41,61 @@ class TicketViewSet(viewsets.ModelViewSet):
|
||||
serializer.save(user=self.request.user, email=self.request.user.email)
|
||||
else:
|
||||
serializer.save()
|
||||
|
||||
|
||||
class ReportViewSet(viewsets.ModelViewSet):
|
||||
"""
|
||||
ViewSet for handling user-submitted content reports.
|
||||
|
||||
- Authenticated users can CREATE reports
|
||||
- Staff can LIST/RETRIEVE all reports
|
||||
- Users can LIST/RETRIEVE their own reports
|
||||
- Staff can RESOLVE reports
|
||||
"""
|
||||
|
||||
queryset = Report.objects.select_related("reporter", "resolved_by", "content_type").all()
|
||||
permission_classes = [permissions.IsAuthenticated]
|
||||
filter_backends = [DjangoFilterBackend, filters.OrderingFilter, filters.SearchFilter]
|
||||
filterset_fields = ["status", "report_type"]
|
||||
search_fields = ["reason", "resolution_notes"]
|
||||
ordering_fields = ["created_at", "status", "report_type"]
|
||||
ordering = ["-created_at"]
|
||||
|
||||
def get_serializer_class(self):
|
||||
if self.action == "create":
|
||||
return ReportCreateSerializer
|
||||
if self.action == "resolve":
|
||||
return ReportResolveSerializer
|
||||
return ReportSerializer
|
||||
|
||||
def get_queryset(self):
|
||||
user = self.request.user
|
||||
if user.is_staff:
|
||||
return Report.objects.select_related("reporter", "resolved_by", "content_type").all()
|
||||
return Report.objects.select_related("reporter", "resolved_by", "content_type").filter(reporter=user)
|
||||
|
||||
def perform_create(self, serializer):
|
||||
serializer.save(reporter=self.request.user)
|
||||
|
||||
@action(detail=True, methods=["post"], permission_classes=[permissions.IsAdminUser])
|
||||
def resolve(self, request, pk=None):
|
||||
"""Mark a report as resolved or dismissed."""
|
||||
report = self.get_object()
|
||||
|
||||
if report.is_resolved:
|
||||
return Response(
|
||||
{"detail": "Report is already resolved"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
serializer = ReportResolveSerializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
report.status = serializer.validated_data.get("status", "resolved")
|
||||
report.resolved_at = timezone.now()
|
||||
report.resolved_by = request.user
|
||||
report.resolution_notes = serializer.validated_data.get("notes", "")
|
||||
report.save()
|
||||
|
||||
return Response(ReportSerializer(report).data)
|
||||
|
||||
|
||||
@@ -48,8 +48,18 @@ DATABASES = {
|
||||
# CONN_MAX_AGE: How long to keep connections open (in seconds)
|
||||
# 0 = Close after each request (default Django behavior)
|
||||
# None = Unlimited reuse (not recommended)
|
||||
# 600 = 10 minutes (good balance for most applications)
|
||||
CONN_MAX_AGE = config("DATABASE_CONN_MAX_AGE", default=600, cast=int)
|
||||
# 60 = 1 minute (good for development to prevent connection accumulation)
|
||||
# 600 = 10 minutes (good for production)
|
||||
|
||||
# Check if we're in debug mode (imported from base settings)
|
||||
DEBUG = config("DEBUG", default=False, cast=bool)
|
||||
|
||||
# Use shorter connection lifetime in development to prevent accumulation
|
||||
CONN_MAX_AGE = config(
|
||||
"DATABASE_CONN_MAX_AGE",
|
||||
default=60 if DEBUG else 600,
|
||||
cast=int
|
||||
)
|
||||
|
||||
# Apply CONN_MAX_AGE to the default database
|
||||
DATABASES["default"]["CONN_MAX_AGE"] = CONN_MAX_AGE
|
||||
@@ -59,12 +69,21 @@ DATABASES["default"]["CONN_MAX_AGE"] = CONN_MAX_AGE
|
||||
# =============================================================================
|
||||
# These settings are passed to psycopg2 when creating new connections
|
||||
|
||||
# Shorter timeouts in development to fail fast
|
||||
connect_timeout = config("DATABASE_CONNECT_TIMEOUT", default=5 if DEBUG else 10, cast=int)
|
||||
statement_timeout = config("DATABASE_STATEMENT_TIMEOUT", default=30000, cast=int)
|
||||
# Idle in transaction timeout: close connections that sit idle in a transaction
|
||||
# This prevents connection leaks from unclosed transactions
|
||||
idle_in_transaction_timeout = config("DATABASE_IDLE_IN_TRANSACTION_TIMEOUT", default=60000, cast=int)
|
||||
|
||||
DATABASE_OPTIONS = {
|
||||
# Connection timeout in seconds
|
||||
"connect_timeout": config("DATABASE_CONNECT_TIMEOUT", default=10, cast=int),
|
||||
# Query timeout in milliseconds (30 seconds default)
|
||||
# This prevents runaway queries from blocking the database
|
||||
"options": f"-c statement_timeout={config('DATABASE_STATEMENT_TIMEOUT', default=30000, cast=int)}",
|
||||
"connect_timeout": connect_timeout,
|
||||
# PostgreSQL server-side options
|
||||
"options": (
|
||||
f"-c statement_timeout={statement_timeout} "
|
||||
f"-c idle_in_transaction_session_timeout={idle_in_transaction_timeout}"
|
||||
),
|
||||
}
|
||||
|
||||
# Apply options to PostgreSQL databases
|
||||
@@ -72,6 +91,7 @@ if "postgis" in DATABASE_URL or "postgresql" in DATABASE_URL:
|
||||
DATABASES["default"].setdefault("OPTIONS", {})
|
||||
DATABASES["default"]["OPTIONS"].update(DATABASE_OPTIONS)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# GeoDjango Settings
|
||||
# =============================================================================
|
||||
|
||||
@@ -23,7 +23,7 @@ dependencies = [
|
||||
# =============================================================================
|
||||
# Authentication & Security
|
||||
# =============================================================================
|
||||
"django-allauth>=65.3.0",
|
||||
"django-allauth>=65.9.0",
|
||||
"djangorestframework-simplejwt>=5.5.1",
|
||||
"pyjwt>=2.10.1",
|
||||
"cryptography>=44.0.0",
|
||||
@@ -80,6 +80,7 @@ dependencies = [
|
||||
"httpx>=0.28.1",
|
||||
"django-fsm-2>=4.1.0",
|
||||
"django-notifications-hq>=1.8.3",
|
||||
"deepdiff>=8.0.0",
|
||||
]
|
||||
|
||||
[dependency-groups]
|
||||
|
||||
@@ -128,7 +128,7 @@
|
||||
{% endif %}
|
||||
|
||||
<!-- FSM Actions -->
|
||||
{% if object.status == 'PENDING' or object.status == 'ESCALATED' and user.role in 'ADMIN','SUPERUSER' %}
|
||||
{% if object.status == 'PENDING' or object.status == 'ESCALATED' and user.role == 'ADMIN' or user.role == 'SUPERUSER' %}
|
||||
<div class="mt-6 review-notes" x-data="{ showNotes: false }">
|
||||
<div x-show="showNotes"
|
||||
x-transition:enter="transition ease-out duration-200"
|
||||
|
||||
@@ -57,7 +57,7 @@
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if submission.status == 'PENDING' or submission.status == 'ESCALATED' and user.role in 'ADMIN','SUPERUSER' %}
|
||||
{% if submission.status == 'PENDING' or submission.status == 'ESCALATED' and user.role == 'ADMIN' or user.role == 'SUPERUSER' %}
|
||||
<div class="mt-4 review-notes" x-data="{ showNotes: false }">
|
||||
<textarea x-show="showNotes"
|
||||
name="notes"
|
||||
|
||||
@@ -52,7 +52,7 @@
|
||||
{% endif %}
|
||||
|
||||
<!-- FSM Actions -->
|
||||
{% if object.status == 'PENDING' or object.status == 'ESCALATED' and user.role in 'ADMIN','SUPERUSER' %}
|
||||
{% if object.status == 'PENDING' or object.status == 'ESCALATED' and user.role == 'ADMIN' or user.role == 'SUPERUSER' %}
|
||||
<div class="mt-4 review-notes" x-data="{ showNotes: false }">
|
||||
<textarea x-show="showNotes"
|
||||
name="notes"
|
||||
|
||||
@@ -410,7 +410,7 @@
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if submission.status == 'PENDING' or submission.status == 'ESCALATED' and user.role in 'ADMIN','SUPERUSER' %}
|
||||
{% if submission.status == 'PENDING' or submission.status == 'ESCALATED' and user.role == 'ADMIN' or user.role == 'SUPERUSER' %}
|
||||
<div class="mt-6 review-notes" x-data="{ showNotes: false }">
|
||||
<div x-show="showNotes"
|
||||
x-transition:enter="transition ease-out duration-200"
|
||||
|
||||
37
uv.lock
generated
37
uv.lock
generated
@@ -22,14 +22,14 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "anyio"
|
||||
version = "4.12.0"
|
||||
version = "4.12.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "idna" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/16/ce/8a777047513153587e5434fd752e89334ac33e379aa3497db860eeb60377/anyio-4.12.0.tar.gz", hash = "sha256:73c693b567b0c55130c104d0b43a9baf3aa6a31fc6110116509f27bf75e21ec0", size = 228266, upload-time = "2025-11-28T23:37:38.911Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/96/f0/5eb65b2bb0d09ac6776f2eb54adee6abe8228ea05b20a5ad0e4945de8aac/anyio-4.12.1.tar.gz", hash = "sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703", size = 228685, upload-time = "2026-01-06T11:45:21.246Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/7f/9c/36c5c37947ebfb8c7f22e0eb6e4d188ee2d53aa3880f3f2744fb894f0cb1/anyio-4.12.0-py3-none-any.whl", hash = "sha256:dad2376a628f98eeca4881fc56cd06affd18f659b17a747d3ff0307ced94b1bb", size = 113362, upload-time = "2025-11-28T23:36:57.897Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592, upload-time = "2026-01-06T11:45:19.497Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -434,6 +434,18 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740, upload-time = "2025-10-15T23:18:12.277Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deepdiff"
|
||||
version = "8.6.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "orderly-set" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/19/76/36c9aab3d5c19a94091f7c6c6e784efca50d87b124bf026c36e94719f33c/deepdiff-8.6.1.tar.gz", hash = "sha256:ec56d7a769ca80891b5200ec7bd41eec300ced91ebcc7797b41eb2b3f3ff643a", size = 634054, upload-time = "2025-09-03T19:40:41.461Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f7/e6/efe534ef0952b531b630780e19cabd416e2032697019d5295defc6ef9bd9/deepdiff-8.6.1-py3-none-any.whl", hash = "sha256:ee8708a7f7d37fb273a541fa24ad010ed484192cd0c4ffc0fa0ed5e2d4b9e78b", size = 91378, upload-time = "2025-09-03T19:40:39.679Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dj-database-url"
|
||||
version = "3.1.0"
|
||||
@@ -1240,6 +1252,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/13/6b/9721ba7c68036316bd8aeb596b397253590c87d7045c9d6fc82b7364eff4/nplusone-1.0.0-py2.py3-none-any.whl", hash = "sha256:96b1e6e29e6af3e71b67d0cc012a5ec8c97c6a2f5399f4ba41a2bbe0e253a9ac", size = 15920, upload-time = "2018-05-21T03:40:23.69Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "orderly-set"
|
||||
version = "5.5.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/4a/88/39c83c35d5e97cc203e9e77a4f93bf87ec89cf6a22ac4818fdcc65d66584/orderly_set-5.5.0.tar.gz", hash = "sha256:e87185c8e4d8afa64e7f8160ee2c542a475b738bc891dc3f58102e654125e6ce", size = 27414, upload-time = "2025-07-10T20:10:55.885Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/12/27/fb8d7338b4d551900fa3e580acbe7a0cf655d940e164cb5c00ec31961094/orderly_set-5.5.0-py3-none-any.whl", hash = "sha256:46f0b801948e98f427b412fcabb831677194c05c3b699b80de260374baa0b1e7", size = 13068, upload-time = "2025-07-10T20:10:54.377Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "outcome"
|
||||
version = "1.3.0.post0"
|
||||
@@ -1263,11 +1284,11 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "pathspec"
|
||||
version = "0.12.1"
|
||||
version = "1.0.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/28/2e/83722ece0f6ee24387d6cb830dd562ddbcd6ce0b9d76072c6849670c31b4/pathspec-1.0.1.tar.gz", hash = "sha256:e2769b508d0dd47b09af6ee2c75b2744a2cb1f474ae4b1494fd6a1b7a841613c", size = 129791, upload-time = "2026-01-06T13:02:55.15Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/fe/2257c71721aeab6a6e8aa1f00d01f2a20f58547d249a6c8fef5791f559fc/pathspec-1.0.1-py3-none-any.whl", hash = "sha256:8870061f22c58e6d83463cfce9a7dd6eca0512c772c1001fb09ac64091816721", size = 54584, upload-time = "2026-01-06T13:02:53.601Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2028,6 +2049,7 @@ source = { virtual = "backend" }
|
||||
dependencies = [
|
||||
{ name = "celery" },
|
||||
{ name = "cryptography" },
|
||||
{ name = "deepdiff" },
|
||||
{ name = "dj-database-url" },
|
||||
{ name = "django" },
|
||||
{ name = "django-allauth" },
|
||||
@@ -2112,9 +2134,10 @@ test = [
|
||||
requires-dist = [
|
||||
{ name = "celery", specifier = ">=5.5.3,<6" },
|
||||
{ name = "cryptography", specifier = ">=44.0.0" },
|
||||
{ name = "deepdiff", specifier = ">=8.0.0" },
|
||||
{ name = "dj-database-url", specifier = ">=2.3.0" },
|
||||
{ name = "django", specifier = ">=5.2.8" },
|
||||
{ name = "django-allauth", specifier = ">=65.3.0" },
|
||||
{ name = "django-allauth", specifier = ">=65.9.0" },
|
||||
{ name = "django-celery-beat", specifier = ">=2.8.1" },
|
||||
{ name = "django-celery-results", specifier = ">=2.6.0" },
|
||||
{ name = "django-cleanup", specifier = ">=8.1.0" },
|
||||
|
||||
Reference in New Issue
Block a user