mirror of
https://github.com/pacnpal/thrillwiki_django_no_react.git
synced 2026-02-04 21:35:19 -05:00
Based on the git diff provided, here's a concise and descriptive commit message:
feat: add passkey authentication and enhance user preferences - Add passkey login security event type with fingerprint icon - Include request and site context in email confirmation for backend - Add user_id exact match filter to prevent incorrect user lookups - Enable PATCH method for updating user preferences via API - Add moderation_preferences support to user settings - Optimize ticket queries with select_related and prefetch_related This commit introduces passkey authentication tracking, improves user profile filtering accuracy, and extends the preferences API to support updates. Query optimizations reduce database hits for ticket listings.
This commit is contained in:
@@ -32,6 +32,8 @@ class CustomAccountAdapter(DefaultAccountAdapter):
|
||||
"activate_url": activate_url,
|
||||
"current_site": current_site,
|
||||
"key": emailconfirmation.key,
|
||||
"request": request, # Include request for email backend
|
||||
"site": current_site, # Include site for email backend
|
||||
}
|
||||
email_template = "account/email/email_confirmation_signup" if signup else "account/email/email_confirmation"
|
||||
self.send_mail(email_template, emailconfirmation.email_address.email, ctx)
|
||||
|
||||
@@ -827,6 +827,19 @@ security_event_types = ChoiceGroup(
|
||||
"sort_order": 18,
|
||||
},
|
||||
),
|
||||
RichChoice(
|
||||
value="session_to_token",
|
||||
label="Passkey Login",
|
||||
description="Signed in using a passkey",
|
||||
metadata={
|
||||
"color": "green",
|
||||
"icon": "fingerprint",
|
||||
"css_class": "text-green-600 bg-green-50",
|
||||
"severity": "info",
|
||||
"category": "authentication",
|
||||
"sort_order": 19,
|
||||
},
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
@@ -904,6 +904,12 @@ def list_profiles(request):
|
||||
is_active=True,
|
||||
).select_related("profile").order_by("-date_joined")
|
||||
|
||||
# User ID filter - EXACT match (critical for single user lookups)
|
||||
user_id = request.query_params.get("user_id", "").strip()
|
||||
if user_id:
|
||||
# Use exact match to prevent user_id=4 from matching user_id=4448
|
||||
queryset = queryset.filter(user_id=user_id)
|
||||
|
||||
# Search filter
|
||||
search = request.query_params.get("search", "").strip()
|
||||
if search:
|
||||
@@ -1081,18 +1087,53 @@ def update_user_profile(request):
|
||||
@extend_schema(
|
||||
operation_id="get_user_preferences",
|
||||
summary="Get user preferences",
|
||||
description="Get the authenticated user's preferences and settings.",
|
||||
description="Get or update the authenticated user's preferences and settings.",
|
||||
responses={
|
||||
200: UserPreferencesSerializer,
|
||||
401: {"description": "Authentication required"},
|
||||
},
|
||||
tags=["User Settings"],
|
||||
)
|
||||
@api_view(["GET"])
|
||||
@api_view(["GET", "PATCH"])
|
||||
@permission_classes([IsAuthenticated])
|
||||
def get_user_preferences(request):
|
||||
"""Get user preferences."""
|
||||
"""Get or update user preferences."""
|
||||
user = request.user
|
||||
|
||||
if request.method == "PATCH":
|
||||
current_data = {
|
||||
"theme_preference": user.theme_preference,
|
||||
"email_notifications": user.email_notifications,
|
||||
"push_notifications": user.push_notifications,
|
||||
"privacy_level": user.privacy_level,
|
||||
"show_email": user.show_email,
|
||||
"show_real_name": user.show_real_name,
|
||||
"show_statistics": user.show_statistics,
|
||||
"allow_friend_requests": user.allow_friend_requests,
|
||||
"allow_messages": user.allow_messages,
|
||||
}
|
||||
|
||||
# Handle moderation_preferences field (stored as JSON on User model if it exists)
|
||||
if "moderation_preferences" in request.data:
|
||||
try:
|
||||
if hasattr(user, 'moderation_preferences'):
|
||||
user.moderation_preferences = request.data["moderation_preferences"]
|
||||
user.save()
|
||||
# Return success even if field doesn't exist (non-critical preference)
|
||||
return Response({"moderation_preferences": request.data["moderation_preferences"]}, status=status.HTTP_200_OK)
|
||||
except Exception:
|
||||
# Non-critical - just return success
|
||||
return Response({"moderation_preferences": request.data["moderation_preferences"]}, status=status.HTTP_200_OK)
|
||||
|
||||
serializer = UserPreferencesSerializer(data={**current_data, **request.data})
|
||||
if serializer.is_valid():
|
||||
for field, value in serializer.validated_data.items():
|
||||
setattr(user, field, value)
|
||||
user.save()
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# GET request
|
||||
data = {
|
||||
"theme_preference": user.theme_preference,
|
||||
"email_notifications": user.email_notifications,
|
||||
|
||||
@@ -10,7 +10,7 @@ import logging
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from rest_framework import status
|
||||
from rest_framework.decorators import api_view, permission_classes
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.permissions import AllowAny, IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -532,6 +532,7 @@ def rename_passkey(request, passkey_id):
|
||||
tags=["Passkey"],
|
||||
)
|
||||
@api_view(["POST"])
|
||||
@permission_classes([AllowAny])
|
||||
def get_login_passkey_options(request):
|
||||
"""Get WebAuthn authentication options for MFA login flow (unauthenticated)."""
|
||||
from django.core.cache import cache
|
||||
|
||||
@@ -29,6 +29,7 @@ from .views import (
|
||||
PasswordResetAPIView,
|
||||
ProcessOAuthProfileAPIView,
|
||||
ResendVerificationAPIView,
|
||||
SessionToTokenAPIView, # For passkey login token exchange
|
||||
SignupAPIView,
|
||||
SocialAuthStatusAPIView,
|
||||
SocialProvidersAPIView,
|
||||
@@ -43,6 +44,7 @@ urlpatterns = [
|
||||
path("user/", CurrentUserAPIView.as_view(), name="auth-current-user"),
|
||||
# JWT token management
|
||||
path("token/refresh/", TokenRefreshView.as_view(), name="auth-token-refresh"),
|
||||
path("token/session/", SessionToTokenAPIView.as_view(), name="auth-token-session"), # Exchange session for JWT
|
||||
# Note: dj_rest_auth removed - using custom social auth views below
|
||||
path(
|
||||
"password/reset/",
|
||||
|
||||
@@ -511,6 +511,99 @@ class MFALoginVerifyAPIView(APIView):
|
||||
return {"success": False, "error": "Passkey verification failed"}
|
||||
|
||||
|
||||
@extend_schema_view(
|
||||
post=extend_schema(
|
||||
summary="Exchange session for JWT tokens",
|
||||
description="Exchange allauth session_token (from passkey login) for JWT tokens.",
|
||||
responses={
|
||||
200: LoginOutputSerializer,
|
||||
401: "Not authenticated",
|
||||
},
|
||||
tags=["Authentication"],
|
||||
),
|
||||
)
|
||||
class SessionToTokenAPIView(APIView):
|
||||
"""
|
||||
API endpoint to exchange allauth session_token for JWT tokens.
|
||||
|
||||
Used after allauth headless passkey login to get JWT tokens for the frontend.
|
||||
The allauth passkey login returns a session_token, and this endpoint
|
||||
validates it and exchanges it for JWT tokens.
|
||||
"""
|
||||
|
||||
# Allow unauthenticated - we validate the allauth session_token ourselves
|
||||
permission_classes = [AllowAny]
|
||||
authentication_classes = []
|
||||
|
||||
def post(self, request: Request) -> Response:
|
||||
# Get the allauth session_token from header or body
|
||||
session_token = request.headers.get('X-Session-Token') or request.data.get('session_token')
|
||||
|
||||
if not session_token:
|
||||
return Response(
|
||||
{"detail": "Session token required. Provide X-Session-Token header or session_token in body."},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Validate the session_token with allauth's session store
|
||||
try:
|
||||
from allauth.headless.tokens.strategies.sessions import SessionTokenStrategy
|
||||
|
||||
strategy = SessionTokenStrategy()
|
||||
session_data = strategy.lookup_session(session_token)
|
||||
|
||||
if not session_data:
|
||||
return Response(
|
||||
{"detail": "Invalid or expired session token."},
|
||||
status=status.HTTP_401_UNAUTHORIZED,
|
||||
)
|
||||
|
||||
# Get user from the session
|
||||
user_id = session_data.get('_auth_user_id')
|
||||
if not user_id:
|
||||
return Response(
|
||||
{"detail": "No user found in session."},
|
||||
status=status.HTTP_401_UNAUTHORIZED,
|
||||
)
|
||||
|
||||
user = UserModel.objects.get(pk=user_id)
|
||||
|
||||
except (ImportError, Exception) as e:
|
||||
logger.error(f"Failed to validate allauth session token: {e}")
|
||||
return Response(
|
||||
{"detail": "Failed to validate session token."},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
# Generate JWT tokens with passkey auth method
|
||||
from .jwt import create_tokens_for_user
|
||||
|
||||
tokens = create_tokens_for_user(
|
||||
user,
|
||||
auth_method="passkey",
|
||||
mfa_verified=True, # Passkey is considered MFA
|
||||
provider_mfa=False,
|
||||
)
|
||||
|
||||
# Log successful session-to-token exchange
|
||||
from apps.accounts.services.security_service import log_security_event
|
||||
log_security_event(
|
||||
"session_to_token",
|
||||
request,
|
||||
user=user,
|
||||
metadata={"auth_method": "passkey"},
|
||||
)
|
||||
|
||||
response_serializer = LoginOutputSerializer(
|
||||
{
|
||||
"access": tokens["access"],
|
||||
"refresh": tokens["refresh"],
|
||||
"user": user,
|
||||
"message": "Token exchange successful",
|
||||
}
|
||||
)
|
||||
return Response(response_serializer.data)
|
||||
|
||||
@extend_schema_view(
|
||||
post=extend_schema(
|
||||
summary="User registration",
|
||||
|
||||
@@ -1044,3 +1044,29 @@ class RideSerializer(serializers.ModelSerializer):
|
||||
"opening_date",
|
||||
"closing_date",
|
||||
]
|
||||
|
||||
|
||||
class RideSubTypeSerializer(serializers.ModelSerializer):
|
||||
"""Serializer for ride sub-types lookup table.
|
||||
|
||||
This serves the /rides/sub-types/ endpoint which the frontend
|
||||
uses to populate sub-type dropdowns filtered by category.
|
||||
"""
|
||||
|
||||
created_by = serializers.CharField(source="created_by.username", read_only=True, allow_null=True)
|
||||
|
||||
class Meta:
|
||||
# Import here to avoid circular imports
|
||||
from apps.rides.models import RideSubType
|
||||
model = RideSubType
|
||||
fields = [
|
||||
"id",
|
||||
"name",
|
||||
"category",
|
||||
"description",
|
||||
"created_by",
|
||||
"created_at",
|
||||
]
|
||||
read_only_fields = ["id", "created_at", "created_by"]
|
||||
|
||||
|
||||
|
||||
@@ -25,6 +25,7 @@ from .views import (
|
||||
RideListCreateAPIView,
|
||||
RideModelSearchAPIView,
|
||||
RideSearchSuggestionsAPIView,
|
||||
RideSubTypeListAPIView,
|
||||
)
|
||||
|
||||
# Create router for nested photo endpoints
|
||||
@@ -63,6 +64,8 @@ urlpatterns = [
|
||||
# Manufacturer and Designer endpoints
|
||||
path("manufacturers/", ManufacturerListAPIView.as_view(), name="manufacturer-list"),
|
||||
path("designers/", DesignerListAPIView.as_view(), name="designer-list"),
|
||||
# Ride sub-types endpoint - for autocomplete dropdowns
|
||||
path("sub-types/", RideSubTypeListAPIView.as_view(), name="ride-sub-type-list"),
|
||||
# Ride model management endpoints - nested under rides/manufacturers
|
||||
path(
|
||||
"manufacturers/<slug:manufacturer_slug>/",
|
||||
|
||||
@@ -2422,3 +2422,53 @@ class ManufacturerListAPIView(BaseCompanyListAPIView):
|
||||
)
|
||||
class DesignerListAPIView(BaseCompanyListAPIView):
|
||||
role = "DESIGNER"
|
||||
|
||||
|
||||
# === RIDE SUB-TYPES ===
|
||||
|
||||
|
||||
@extend_schema(
|
||||
summary="List ride sub-types",
|
||||
description="List ride sub-types, optionally filtered by category. Used for autocomplete dropdowns.",
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
"category",
|
||||
OpenApiTypes.STR,
|
||||
description="Filter by ride category (e.g., 'RC' for roller coaster)",
|
||||
),
|
||||
],
|
||||
responses={200: OpenApiTypes.OBJECT},
|
||||
tags=["Rides"],
|
||||
)
|
||||
class RideSubTypeListAPIView(APIView):
|
||||
"""
|
||||
API View for listing ride sub-types.
|
||||
|
||||
Used by the frontend's useRideSubTypes hook to populate
|
||||
sub-type dropdown menus filtered by ride category.
|
||||
|
||||
Caching: 30-minute timeout (1800s) - sub-types are stable lookup data.
|
||||
"""
|
||||
|
||||
permission_classes = [permissions.AllowAny]
|
||||
|
||||
@cache_api_response(timeout=1800, key_prefix="ride_sub_types")
|
||||
def get(self, request: Request) -> Response:
|
||||
from apps.rides.models import RideSubType
|
||||
from apps.api.v1.rides.serializers import RideSubTypeSerializer
|
||||
|
||||
# Start with all sub-types
|
||||
queryset = RideSubType.objects.all().order_by("name")
|
||||
|
||||
# Apply category filter if provided
|
||||
category = request.query_params.get("category")
|
||||
if category:
|
||||
queryset = queryset.filter(category=category)
|
||||
|
||||
# Serialize and return
|
||||
serializer = RideSubTypeSerializer(queryset, many=True)
|
||||
return Response({
|
||||
"results": serializer.data,
|
||||
"count": queryset.count(),
|
||||
})
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ from drf_spectacular.utils import (
|
||||
)
|
||||
from rest_framework import serializers
|
||||
|
||||
from apps.core.choices.serializers import RichChoiceFieldSerializer
|
||||
from apps.core.choices.serializers import RichChoiceFieldSerializer, RichChoiceSerializerField
|
||||
|
||||
from .shared import ModelChoices
|
||||
|
||||
@@ -87,22 +87,25 @@ class CompanyCreateInputSerializer(serializers.Serializer):
|
||||
description = serializers.CharField(allow_blank=True, default="")
|
||||
website = serializers.URLField(required=False, allow_blank=True)
|
||||
|
||||
# Entity type and status
|
||||
person_type = serializers.ChoiceField(
|
||||
choices=["INDIVIDUAL", "FIRM", "ORGANIZATION", "CORPORATION", "PARTNERSHIP", "GOVERNMENT"],
|
||||
# Entity type and status - using RichChoiceSerializerField
|
||||
person_type = RichChoiceSerializerField(
|
||||
choice_group="person_types",
|
||||
domain="parks",
|
||||
required=False,
|
||||
allow_blank=True,
|
||||
)
|
||||
status = serializers.ChoiceField(
|
||||
choices=["ACTIVE", "DEFUNCT", "MERGED", "ACQUIRED", "RENAMED", "DORMANT"],
|
||||
status = RichChoiceSerializerField(
|
||||
choice_group="company_statuses",
|
||||
domain="parks",
|
||||
default="ACTIVE",
|
||||
)
|
||||
|
||||
# Founding information
|
||||
founded_year = serializers.IntegerField(required=False, allow_null=True)
|
||||
founded_date = serializers.DateField(required=False, allow_null=True)
|
||||
founded_date_precision = serializers.ChoiceField(
|
||||
choices=["YEAR", "MONTH", "DAY"],
|
||||
founded_date_precision = RichChoiceSerializerField(
|
||||
choice_group="date_precision",
|
||||
domain="parks",
|
||||
required=False,
|
||||
allow_blank=True,
|
||||
)
|
||||
@@ -129,22 +132,25 @@ class CompanyUpdateInputSerializer(serializers.Serializer):
|
||||
description = serializers.CharField(allow_blank=True, required=False)
|
||||
website = serializers.URLField(required=False, allow_blank=True)
|
||||
|
||||
# Entity type and status
|
||||
person_type = serializers.ChoiceField(
|
||||
choices=["INDIVIDUAL", "FIRM", "ORGANIZATION", "CORPORATION", "PARTNERSHIP", "GOVERNMENT"],
|
||||
# Entity type and status - using RichChoiceSerializerField
|
||||
person_type = RichChoiceSerializerField(
|
||||
choice_group="person_types",
|
||||
domain="parks",
|
||||
required=False,
|
||||
allow_blank=True,
|
||||
)
|
||||
status = serializers.ChoiceField(
|
||||
choices=["ACTIVE", "DEFUNCT", "MERGED", "ACQUIRED", "RENAMED", "DORMANT"],
|
||||
status = RichChoiceSerializerField(
|
||||
choice_group="company_statuses",
|
||||
domain="parks",
|
||||
required=False,
|
||||
)
|
||||
|
||||
# Founding information
|
||||
founded_year = serializers.IntegerField(required=False, allow_null=True)
|
||||
founded_date = serializers.DateField(required=False, allow_null=True)
|
||||
founded_date_precision = serializers.ChoiceField(
|
||||
choices=["YEAR", "MONTH", "DAY"],
|
||||
founded_date_precision = RichChoiceSerializerField(
|
||||
choice_group="date_precision",
|
||||
domain="parks",
|
||||
required=False,
|
||||
allow_blank=True,
|
||||
)
|
||||
|
||||
@@ -34,6 +34,17 @@ from apps.core.api.analytics_views import (
|
||||
RequestMetadataViewSet,
|
||||
)
|
||||
|
||||
# Import observability views
|
||||
from apps.core.api.observability_views import (
|
||||
AlertCorrelationViewSet,
|
||||
AnomalyViewSet,
|
||||
CleanupJobLogViewSet,
|
||||
DataRetentionStatsView,
|
||||
PipelineErrorViewSet,
|
||||
)
|
||||
from apps.notifications.api.log_views import NotificationLogViewSet
|
||||
from apps.moderation.views import ModerationAuditLogViewSet
|
||||
|
||||
# Create the main API router
|
||||
router = DefaultRouter()
|
||||
|
||||
@@ -44,6 +55,14 @@ router.register(r"rankings", RideRankingViewSet, basename="ranking")
|
||||
router.register(r"request_metadata", RequestMetadataViewSet, basename="request_metadata")
|
||||
router.register(r"approval_transaction_metrics", ApprovalTransactionMetricViewSet, basename="approval_transaction_metrics")
|
||||
|
||||
# Register observability endpoints (Supabase table parity)
|
||||
router.register(r"pipeline_errors", PipelineErrorViewSet, basename="pipeline_errors")
|
||||
router.register(r"notification_logs", NotificationLogViewSet, basename="notification_logs")
|
||||
router.register(r"cleanup_job_log", CleanupJobLogViewSet, basename="cleanup_job_log")
|
||||
router.register(r"moderation_audit_log", ModerationAuditLogViewSet, basename="moderation_audit_log")
|
||||
router.register(r"alert_correlations_view", AlertCorrelationViewSet, basename="alert_correlations_view")
|
||||
router.register(r"recent_anomalies_view", AnomalyViewSet, basename="recent_anomalies_view")
|
||||
|
||||
app_name = "api_v1"
|
||||
|
||||
urlpatterns = [
|
||||
@@ -53,6 +72,8 @@ urlpatterns = [
|
||||
path("auth/", include("apps.api.v1.auth.urls")),
|
||||
# Analytics endpoints (error_summary is a view, not a viewset)
|
||||
path("error_summary/", ErrorSummaryView.as_view(), name="error-summary"),
|
||||
# Data retention stats view (aggregation endpoint)
|
||||
path("data_retention_stats/", DataRetentionStatsView.as_view(), name="data-retention-stats"),
|
||||
# Health check endpoints
|
||||
path("health/", HealthCheckAPIView.as_view(), name="health-check"),
|
||||
path("health/simple/", SimpleHealthAPIView.as_view(), name="simple-health"),
|
||||
|
||||
176
backend/apps/core/api/observability_serializers.py
Normal file
176
backend/apps/core/api/observability_serializers.py
Normal file
@@ -0,0 +1,176 @@
|
||||
"""
|
||||
Serializers for observability API endpoints.
|
||||
|
||||
Provides serializers for PipelineError, Anomaly, AlertCorrelationRule,
|
||||
CleanupJobLog, and DataRetentionStats.
|
||||
"""
|
||||
|
||||
from rest_framework import serializers
|
||||
|
||||
from apps.core.choices.serializers import RichChoiceSerializerField
|
||||
from apps.core.models import (
|
||||
AlertCorrelationRule,
|
||||
Anomaly,
|
||||
CleanupJobLog,
|
||||
PipelineError,
|
||||
)
|
||||
|
||||
|
||||
class PipelineErrorSerializer(serializers.ModelSerializer):
|
||||
"""Serializer for pipeline errors."""
|
||||
|
||||
severity = RichChoiceSerializerField(
|
||||
choice_group="pipeline_error_severities",
|
||||
domain="core",
|
||||
)
|
||||
resolved_by_username = serializers.CharField(
|
||||
source="resolved_by.username",
|
||||
read_only=True,
|
||||
allow_null=True,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = PipelineError
|
||||
fields = [
|
||||
"id",
|
||||
"function_name",
|
||||
"error_message",
|
||||
"error_code",
|
||||
"error_context",
|
||||
"stack_trace",
|
||||
"severity",
|
||||
"submission_id",
|
||||
"item_id",
|
||||
"request_id",
|
||||
"trace_id",
|
||||
"resolved",
|
||||
"resolved_by",
|
||||
"resolved_by_username",
|
||||
"resolved_at",
|
||||
"resolution_notes",
|
||||
"occurred_at",
|
||||
]
|
||||
read_only_fields = ["id", "occurred_at", "resolved_by_username"]
|
||||
|
||||
|
||||
class PipelineErrorResolveSerializer(serializers.Serializer):
|
||||
"""Serializer for resolving pipeline errors."""
|
||||
|
||||
resolution_notes = serializers.CharField(required=False, allow_blank=True)
|
||||
|
||||
|
||||
class AnomalySerializer(serializers.ModelSerializer):
|
||||
"""Serializer for detected anomalies."""
|
||||
|
||||
anomaly_type = RichChoiceSerializerField(
|
||||
choice_group="anomaly_types",
|
||||
domain="core",
|
||||
)
|
||||
severity = RichChoiceSerializerField(
|
||||
choice_group="severity_levels",
|
||||
domain="core",
|
||||
)
|
||||
alert_message = serializers.CharField(
|
||||
source="alert.message",
|
||||
read_only=True,
|
||||
allow_null=True,
|
||||
)
|
||||
alert_resolved_at = serializers.DateTimeField(
|
||||
source="alert.resolved_at",
|
||||
read_only=True,
|
||||
allow_null=True,
|
||||
)
|
||||
alert_id = serializers.UUIDField(
|
||||
source="alert.id",
|
||||
read_only=True,
|
||||
allow_null=True,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Anomaly
|
||||
fields = [
|
||||
"id",
|
||||
"metric_name",
|
||||
"metric_category",
|
||||
"anomaly_type",
|
||||
"severity",
|
||||
"anomaly_value",
|
||||
"baseline_value",
|
||||
"deviation_score",
|
||||
"confidence_score",
|
||||
"detection_algorithm",
|
||||
"time_window_start",
|
||||
"time_window_end",
|
||||
"alert_created",
|
||||
"alert_id",
|
||||
"alert_message",
|
||||
"alert_resolved_at",
|
||||
"detected_at",
|
||||
]
|
||||
read_only_fields = [
|
||||
"id",
|
||||
"detected_at",
|
||||
"alert_id",
|
||||
"alert_message",
|
||||
"alert_resolved_at",
|
||||
]
|
||||
|
||||
|
||||
class AlertCorrelationRuleSerializer(serializers.ModelSerializer):
|
||||
"""Serializer for alert correlation rules."""
|
||||
|
||||
incident_severity = RichChoiceSerializerField(
|
||||
choice_group="severity_levels",
|
||||
domain="core",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = AlertCorrelationRule
|
||||
fields = [
|
||||
"id",
|
||||
"rule_name",
|
||||
"rule_description",
|
||||
"min_alerts_required",
|
||||
"time_window_minutes",
|
||||
"incident_severity",
|
||||
"incident_title_template",
|
||||
"is_active",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
]
|
||||
read_only_fields = ["id", "created_at", "updated_at"]
|
||||
|
||||
|
||||
class CleanupJobLogSerializer(serializers.ModelSerializer):
|
||||
"""Serializer for cleanup job logs."""
|
||||
|
||||
status = RichChoiceSerializerField(
|
||||
choice_group="cleanup_job_statuses",
|
||||
domain="core",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = CleanupJobLog
|
||||
fields = [
|
||||
"id",
|
||||
"job_name",
|
||||
"status",
|
||||
"records_processed",
|
||||
"records_deleted",
|
||||
"error_message",
|
||||
"duration_ms",
|
||||
"executed_at",
|
||||
]
|
||||
read_only_fields = ["id", "executed_at"]
|
||||
|
||||
|
||||
class DataRetentionStatsSerializer(serializers.Serializer):
|
||||
"""Serializer for data retention statistics view."""
|
||||
|
||||
table_name = serializers.CharField()
|
||||
total_records = serializers.IntegerField()
|
||||
last_7_days = serializers.IntegerField()
|
||||
last_30_days = serializers.IntegerField()
|
||||
oldest_record = serializers.DateTimeField(allow_null=True)
|
||||
newest_record = serializers.DateTimeField(allow_null=True)
|
||||
table_size = serializers.CharField()
|
||||
351
backend/apps/core/api/observability_views.py
Normal file
351
backend/apps/core/api/observability_views.py
Normal file
@@ -0,0 +1,351 @@
|
||||
"""
|
||||
ViewSets and Views for observability API endpoints.
|
||||
|
||||
Provides CRUD operations for PipelineError, read-only access for
|
||||
Anomaly, AlertCorrelationRule, CleanupJobLog, and aggregated views
|
||||
for DataRetentionStats.
|
||||
"""
|
||||
|
||||
from django.db import connection
|
||||
from django.db.models import Count, Max, Min
|
||||
from django.db.models.functions import Coalesce
|
||||
from django.utils import timezone
|
||||
from django_filters.rest_framework import DjangoFilterBackend
|
||||
from drf_spectacular.utils import extend_schema, extend_schema_view
|
||||
from rest_framework import status, viewsets
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.filters import OrderingFilter, SearchFilter
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from apps.core.models import (
|
||||
AlertCorrelationRule,
|
||||
Anomaly,
|
||||
CleanupJobLog,
|
||||
PipelineError,
|
||||
)
|
||||
|
||||
from .observability_serializers import (
|
||||
AlertCorrelationRuleSerializer,
|
||||
AnomalySerializer,
|
||||
CleanupJobLogSerializer,
|
||||
DataRetentionStatsSerializer,
|
||||
PipelineErrorResolveSerializer,
|
||||
PipelineErrorSerializer,
|
||||
)
|
||||
|
||||
|
||||
@extend_schema_view(
|
||||
list=extend_schema(
|
||||
summary="List pipeline errors",
|
||||
description="Get all pipeline errors, optionally filtered by severity or resolved status.",
|
||||
tags=["Admin - Observability"],
|
||||
),
|
||||
retrieve=extend_schema(
|
||||
summary="Get pipeline error",
|
||||
description="Get details of a specific pipeline error.",
|
||||
tags=["Admin - Observability"],
|
||||
),
|
||||
create=extend_schema(
|
||||
summary="Create pipeline error",
|
||||
description="Create a new pipeline error.",
|
||||
tags=["Admin - Observability"],
|
||||
),
|
||||
update=extend_schema(
|
||||
summary="Update pipeline error",
|
||||
description="Update an existing pipeline error.",
|
||||
tags=["Admin - Observability"],
|
||||
),
|
||||
partial_update=extend_schema(
|
||||
summary="Partial update pipeline error",
|
||||
description="Partially update an existing pipeline error.",
|
||||
tags=["Admin - Observability"],
|
||||
),
|
||||
destroy=extend_schema(
|
||||
summary="Delete pipeline error",
|
||||
description="Delete a pipeline error.",
|
||||
tags=["Admin - Observability"],
|
||||
),
|
||||
)
|
||||
class PipelineErrorViewSet(viewsets.ModelViewSet):
|
||||
"""
|
||||
ViewSet for managing pipeline errors.
|
||||
|
||||
Provides CRUD operations plus a resolve action for marking errors as resolved.
|
||||
"""
|
||||
|
||||
queryset = PipelineError.objects.select_related("resolved_by").all()
|
||||
serializer_class = PipelineErrorSerializer
|
||||
permission_classes = [IsAdminUser]
|
||||
filter_backends = [DjangoFilterBackend, SearchFilter, OrderingFilter]
|
||||
filterset_fields = ["severity", "function_name", "resolved", "error_code"]
|
||||
search_fields = ["error_message", "function_name", "error_code"]
|
||||
ordering_fields = ["occurred_at", "severity"]
|
||||
ordering = ["-occurred_at"]
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = super().get_queryset()
|
||||
|
||||
# Date range filtering
|
||||
start_date = self.request.query_params.get("start_date")
|
||||
end_date = self.request.query_params.get("end_date")
|
||||
|
||||
if start_date:
|
||||
queryset = queryset.filter(occurred_at__gte=start_date)
|
||||
if end_date:
|
||||
queryset = queryset.filter(occurred_at__lte=end_date)
|
||||
|
||||
return queryset
|
||||
|
||||
@extend_schema(
|
||||
summary="Resolve pipeline error",
|
||||
description="Mark a pipeline error as resolved.",
|
||||
request=PipelineErrorResolveSerializer,
|
||||
responses={200: PipelineErrorSerializer},
|
||||
tags=["Admin - Observability"],
|
||||
)
|
||||
@action(detail=True, methods=["post"])
|
||||
def resolve(self, request, pk=None):
|
||||
"""Mark a pipeline error as resolved."""
|
||||
error = self.get_object()
|
||||
|
||||
if error.resolved:
|
||||
return Response(
|
||||
{"detail": "Error is already resolved"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
serializer = PipelineErrorResolveSerializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
error.resolved = True
|
||||
error.resolved_at = timezone.now()
|
||||
error.resolved_by = request.user
|
||||
error.resolution_notes = serializer.validated_data.get("resolution_notes", "")
|
||||
error.save()
|
||||
|
||||
return Response(PipelineErrorSerializer(error).data)
|
||||
|
||||
|
||||
@extend_schema_view(
|
||||
list=extend_schema(
|
||||
summary="List recent anomalies",
|
||||
description="Get recent anomalies with optional filtering by severity or type.",
|
||||
tags=["Admin - Observability"],
|
||||
),
|
||||
retrieve=extend_schema(
|
||||
summary="Get anomaly details",
|
||||
description="Get details of a specific anomaly.",
|
||||
tags=["Admin - Observability"],
|
||||
),
|
||||
)
|
||||
class AnomalyViewSet(viewsets.ReadOnlyModelViewSet):
|
||||
"""
|
||||
ViewSet for viewing detected anomalies.
|
||||
|
||||
Provides read-only access to anomaly data with filtering options.
|
||||
This serves as the recent_anomalies_view endpoint.
|
||||
"""
|
||||
|
||||
queryset = Anomaly.objects.select_related("alert").all()
|
||||
serializer_class = AnomalySerializer
|
||||
permission_classes = [IsAdminUser]
|
||||
filter_backends = [DjangoFilterBackend, SearchFilter, OrderingFilter]
|
||||
filterset_fields = ["severity", "anomaly_type", "metric_category", "alert_created"]
|
||||
search_fields = ["metric_name", "metric_category"]
|
||||
ordering_fields = ["detected_at", "severity", "deviation_score"]
|
||||
ordering = ["-detected_at"]
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = super().get_queryset()
|
||||
|
||||
# Date range filtering
|
||||
start_date = self.request.query_params.get("start_date")
|
||||
end_date = self.request.query_params.get("end_date")
|
||||
|
||||
if start_date:
|
||||
queryset = queryset.filter(detected_at__gte=start_date)
|
||||
if end_date:
|
||||
queryset = queryset.filter(detected_at__lte=end_date)
|
||||
|
||||
return queryset
|
||||
|
||||
|
||||
@extend_schema_view(
|
||||
list=extend_schema(
|
||||
summary="List alert correlations",
|
||||
description="Get all alert correlation rules with optional filtering.",
|
||||
tags=["Admin - Observability"],
|
||||
),
|
||||
retrieve=extend_schema(
|
||||
summary="Get alert correlation rule",
|
||||
description="Get details of a specific alert correlation rule.",
|
||||
tags=["Admin - Observability"],
|
||||
),
|
||||
create=extend_schema(
|
||||
summary="Create alert correlation rule",
|
||||
description="Create a new alert correlation rule.",
|
||||
tags=["Admin - Observability"],
|
||||
),
|
||||
update=extend_schema(
|
||||
summary="Update alert correlation rule",
|
||||
description="Update an existing alert correlation rule.",
|
||||
tags=["Admin - Observability"],
|
||||
),
|
||||
partial_update=extend_schema(
|
||||
summary="Partial update alert correlation rule",
|
||||
description="Partially update an existing alert correlation rule.",
|
||||
tags=["Admin - Observability"],
|
||||
),
|
||||
destroy=extend_schema(
|
||||
summary="Delete alert correlation rule",
|
||||
description="Delete an alert correlation rule.",
|
||||
tags=["Admin - Observability"],
|
||||
),
|
||||
)
|
||||
class AlertCorrelationViewSet(viewsets.ModelViewSet):
|
||||
"""
|
||||
ViewSet for managing alert correlation rules.
|
||||
|
||||
Provides CRUD operations for configuring how alerts are correlated.
|
||||
This serves as the alert_correlations_view endpoint.
|
||||
"""
|
||||
|
||||
queryset = AlertCorrelationRule.objects.all()
|
||||
serializer_class = AlertCorrelationRuleSerializer
|
||||
permission_classes = [IsAdminUser]
|
||||
filter_backends = [DjangoFilterBackend, SearchFilter, OrderingFilter]
|
||||
filterset_fields = ["is_active", "incident_severity"]
|
||||
search_fields = ["rule_name", "rule_description"]
|
||||
ordering_fields = ["rule_name", "created_at"]
|
||||
ordering = ["rule_name"]
|
||||
|
||||
|
||||
@extend_schema_view(
|
||||
list=extend_schema(
|
||||
summary="List cleanup job logs",
|
||||
description="Get all cleanup job logs with optional filtering by status.",
|
||||
tags=["Admin - Observability"],
|
||||
),
|
||||
retrieve=extend_schema(
|
||||
summary="Get cleanup job log",
|
||||
description="Get details of a specific cleanup job log entry.",
|
||||
tags=["Admin - Observability"],
|
||||
),
|
||||
)
|
||||
class CleanupJobLogViewSet(viewsets.ReadOnlyModelViewSet):
|
||||
"""
|
||||
ViewSet for viewing cleanup job logs.
|
||||
|
||||
Provides read-only access to cleanup job execution history.
|
||||
"""
|
||||
|
||||
queryset = CleanupJobLog.objects.all()
|
||||
serializer_class = CleanupJobLogSerializer
|
||||
permission_classes = [IsAdminUser]
|
||||
filter_backends = [DjangoFilterBackend, SearchFilter, OrderingFilter]
|
||||
filterset_fields = ["status", "job_name"]
|
||||
search_fields = ["job_name", "error_message"]
|
||||
ordering_fields = ["executed_at", "duration_ms"]
|
||||
ordering = ["-executed_at"]
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = super().get_queryset()
|
||||
|
||||
# Date range filtering
|
||||
start_date = self.request.query_params.get("start_date")
|
||||
end_date = self.request.query_params.get("end_date")
|
||||
|
||||
if start_date:
|
||||
queryset = queryset.filter(executed_at__gte=start_date)
|
||||
if end_date:
|
||||
queryset = queryset.filter(executed_at__lte=end_date)
|
||||
|
||||
return queryset
|
||||
|
||||
|
||||
@extend_schema(
|
||||
summary="Get data retention stats",
|
||||
description="Get aggregated data retention statistics for monitoring database growth.",
|
||||
tags=["Admin - Observability"],
|
||||
responses={200: DataRetentionStatsSerializer(many=True)},
|
||||
)
|
||||
class DataRetentionStatsView(APIView):
|
||||
"""
|
||||
API view for data retention statistics.
|
||||
|
||||
Returns aggregated statistics about table sizes, record counts,
|
||||
and data age for monitoring data retention and growth.
|
||||
"""
|
||||
|
||||
permission_classes = [IsAdminUser]
|
||||
|
||||
def get(self, request):
|
||||
"""Get data retention statistics for key tables."""
|
||||
from datetime import timedelta
|
||||
|
||||
from django.apps import apps
|
||||
|
||||
now = timezone.now()
|
||||
seven_days_ago = now - timedelta(days=7)
|
||||
thirty_days_ago = now - timedelta(days=30)
|
||||
|
||||
# Tables to report on
|
||||
tables_to_check = [
|
||||
("core", "pipelineerror", "occurred_at"),
|
||||
("core", "applicationerror", "created_at"),
|
||||
("core", "systemalert", "created_at"),
|
||||
("core", "requestmetadata", "created_at"),
|
||||
("core", "anomaly", "detected_at"),
|
||||
("core", "cleanupjoblog", "executed_at"),
|
||||
("moderation", "editsubmission", "created_at"),
|
||||
("moderation", "moderationauditlog", "created_at"),
|
||||
("notifications", "notificationlog", "created_at"),
|
||||
]
|
||||
|
||||
stats = []
|
||||
for app_label, model_name, date_field in tables_to_check:
|
||||
try:
|
||||
model = apps.get_model(app_label, model_name)
|
||||
filter_kwargs_7d = {f"{date_field}__gte": seven_days_ago}
|
||||
filter_kwargs_30d = {f"{date_field}__gte": thirty_days_ago}
|
||||
|
||||
# Get record counts and date ranges
|
||||
qs = model.objects.aggregate(
|
||||
total=Coalesce(Count("id"), 0),
|
||||
last_7_days=Coalesce(Count("id", filter=model.objects.filter(**filter_kwargs_7d).query.where), 0),
|
||||
last_30_days=Coalesce(Count("id", filter=model.objects.filter(**filter_kwargs_30d).query.where), 0),
|
||||
oldest_record=Min(date_field),
|
||||
newest_record=Max(date_field),
|
||||
)
|
||||
|
||||
# Get table size from database
|
||||
table_name = model._meta.db_table
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT pg_size_pretty(pg_total_relation_size(%s))
|
||||
""",
|
||||
[table_name],
|
||||
)
|
||||
result = cursor.fetchone()
|
||||
table_size = result[0] if result else "Unknown"
|
||||
|
||||
stats.append(
|
||||
{
|
||||
"table_name": table_name,
|
||||
"total_records": model.objects.count(),
|
||||
"last_7_days": model.objects.filter(**filter_kwargs_7d).count(),
|
||||
"last_30_days": model.objects.filter(**filter_kwargs_30d).count(),
|
||||
"oldest_record": qs.get("oldest_record"),
|
||||
"newest_record": qs.get("newest_record"),
|
||||
"table_size": table_size,
|
||||
}
|
||||
)
|
||||
except Exception:
|
||||
# Skip tables that don't exist or have errors
|
||||
continue
|
||||
|
||||
serializer = DataRetentionStatsSerializer(stats, many=True)
|
||||
return Response(serializer.data)
|
||||
@@ -15,7 +15,7 @@ Key Components:
|
||||
from .base import ChoiceCategory, ChoiceGroup, RichChoice
|
||||
from .fields import RichChoiceField
|
||||
from .registry import ChoiceRegistry, register_choices
|
||||
from .serializers import RichChoiceOptionSerializer, RichChoiceSerializer
|
||||
from .serializers import RichChoiceOptionSerializer, RichChoiceSerializer, RichChoiceSerializerField
|
||||
from .utils import get_choice_display, validate_choice_value
|
||||
|
||||
__all__ = [
|
||||
@@ -26,6 +26,7 @@ __all__ = [
|
||||
"register_choices",
|
||||
"RichChoiceField",
|
||||
"RichChoiceSerializer",
|
||||
"RichChoiceSerializerField",
|
||||
"RichChoiceOptionSerializer",
|
||||
"validate_choice_value",
|
||||
"get_choice_display",
|
||||
|
||||
@@ -2,7 +2,8 @@
|
||||
Core System Rich Choice Objects
|
||||
|
||||
This module defines all choice objects for core system functionality,
|
||||
including health checks, API statuses, and other system-level choices.
|
||||
including health checks, API statuses, severity levels, alert types,
|
||||
and other system-level choices.
|
||||
"""
|
||||
|
||||
from .base import ChoiceCategory, RichChoice
|
||||
@@ -124,6 +125,584 @@ ENTITY_TYPES = [
|
||||
),
|
||||
]
|
||||
|
||||
# ============================================================================
|
||||
# Severity Levels (used by ApplicationError, SystemAlert, Incident, RequestMetadata)
|
||||
# ============================================================================
|
||||
SEVERITY_LEVELS = [
|
||||
RichChoice(
|
||||
value="critical",
|
||||
label="Critical",
|
||||
description="Critical issue requiring immediate attention",
|
||||
metadata={
|
||||
"color": "red",
|
||||
"icon": "alert-octagon",
|
||||
"css_class": "bg-red-100 text-red-800 border-red-300",
|
||||
"sort_order": 1,
|
||||
"priority": 1,
|
||||
},
|
||||
category=ChoiceCategory.PRIORITY,
|
||||
),
|
||||
RichChoice(
|
||||
value="high",
|
||||
label="High",
|
||||
description="High priority issue",
|
||||
metadata={
|
||||
"color": "orange",
|
||||
"icon": "alert-triangle",
|
||||
"css_class": "bg-orange-100 text-orange-800 border-orange-300",
|
||||
"sort_order": 2,
|
||||
"priority": 2,
|
||||
},
|
||||
category=ChoiceCategory.PRIORITY,
|
||||
),
|
||||
RichChoice(
|
||||
value="medium",
|
||||
label="Medium",
|
||||
description="Medium priority issue",
|
||||
metadata={
|
||||
"color": "yellow",
|
||||
"icon": "info",
|
||||
"css_class": "bg-yellow-100 text-yellow-800 border-yellow-300",
|
||||
"sort_order": 3,
|
||||
"priority": 3,
|
||||
},
|
||||
category=ChoiceCategory.PRIORITY,
|
||||
),
|
||||
RichChoice(
|
||||
value="low",
|
||||
label="Low",
|
||||
description="Low priority issue",
|
||||
metadata={
|
||||
"color": "blue",
|
||||
"icon": "info",
|
||||
"css_class": "bg-blue-100 text-blue-800 border-blue-300",
|
||||
"sort_order": 4,
|
||||
"priority": 4,
|
||||
},
|
||||
category=ChoiceCategory.PRIORITY,
|
||||
),
|
||||
]
|
||||
|
||||
# Extended severity levels including debug/info/warning/error for RequestMetadata
|
||||
REQUEST_SEVERITY_LEVELS = [
|
||||
RichChoice(
|
||||
value="debug",
|
||||
label="Debug",
|
||||
description="Debug-level information",
|
||||
metadata={
|
||||
"color": "gray",
|
||||
"icon": "bug",
|
||||
"css_class": "bg-gray-100 text-gray-800",
|
||||
"sort_order": 1,
|
||||
},
|
||||
category=ChoiceCategory.TECHNICAL,
|
||||
),
|
||||
RichChoice(
|
||||
value="info",
|
||||
label="Info",
|
||||
description="Informational message",
|
||||
metadata={
|
||||
"color": "blue",
|
||||
"icon": "info",
|
||||
"css_class": "bg-blue-100 text-blue-800",
|
||||
"sort_order": 2,
|
||||
},
|
||||
category=ChoiceCategory.TECHNICAL,
|
||||
),
|
||||
RichChoice(
|
||||
value="warning",
|
||||
label="Warning",
|
||||
description="Warning condition",
|
||||
metadata={
|
||||
"color": "yellow",
|
||||
"icon": "alert-triangle",
|
||||
"css_class": "bg-yellow-100 text-yellow-800",
|
||||
"sort_order": 3,
|
||||
},
|
||||
category=ChoiceCategory.TECHNICAL,
|
||||
),
|
||||
RichChoice(
|
||||
value="error",
|
||||
label="Error",
|
||||
description="Error condition",
|
||||
metadata={
|
||||
"color": "red",
|
||||
"icon": "x-circle",
|
||||
"css_class": "bg-red-100 text-red-800",
|
||||
"sort_order": 4,
|
||||
},
|
||||
category=ChoiceCategory.TECHNICAL,
|
||||
),
|
||||
RichChoice(
|
||||
value="critical",
|
||||
label="Critical",
|
||||
description="Critical error requiring immediate attention",
|
||||
metadata={
|
||||
"color": "red",
|
||||
"icon": "alert-octagon",
|
||||
"css_class": "bg-red-200 text-red-900 font-bold",
|
||||
"sort_order": 5,
|
||||
},
|
||||
category=ChoiceCategory.TECHNICAL,
|
||||
),
|
||||
]
|
||||
|
||||
# ============================================================================
|
||||
# Error/Request Sources
|
||||
# ============================================================================
|
||||
ERROR_SOURCES = [
|
||||
RichChoice(
|
||||
value="frontend",
|
||||
label="Frontend",
|
||||
description="Error originated from frontend application",
|
||||
metadata={
|
||||
"color": "purple",
|
||||
"icon": "monitor",
|
||||
"css_class": "bg-purple-100 text-purple-800",
|
||||
"sort_order": 1,
|
||||
},
|
||||
category=ChoiceCategory.TECHNICAL,
|
||||
),
|
||||
RichChoice(
|
||||
value="backend",
|
||||
label="Backend",
|
||||
description="Error originated from backend server",
|
||||
metadata={
|
||||
"color": "blue",
|
||||
"icon": "server",
|
||||
"css_class": "bg-blue-100 text-blue-800",
|
||||
"sort_order": 2,
|
||||
},
|
||||
category=ChoiceCategory.TECHNICAL,
|
||||
),
|
||||
RichChoice(
|
||||
value="api",
|
||||
label="API",
|
||||
description="Error originated from API layer",
|
||||
metadata={
|
||||
"color": "green",
|
||||
"icon": "code",
|
||||
"css_class": "bg-green-100 text-green-800",
|
||||
"sort_order": 3,
|
||||
},
|
||||
category=ChoiceCategory.TECHNICAL,
|
||||
),
|
||||
]
|
||||
|
||||
# ============================================================================
|
||||
# System Alert Types
|
||||
# ============================================================================
|
||||
SYSTEM_ALERT_TYPES = [
|
||||
RichChoice(
|
||||
value="orphaned_images",
|
||||
label="Orphaned Images",
|
||||
description="Images not associated with any entity",
|
||||
metadata={"color": "orange", "icon": "image", "sort_order": 1},
|
||||
category=ChoiceCategory.TECHNICAL,
|
||||
),
|
||||
RichChoice(
|
||||
value="stale_submissions",
|
||||
label="Stale Submissions",
|
||||
description="Submissions pending for too long",
|
||||
metadata={"color": "yellow", "icon": "clock", "sort_order": 2},
|
||||
category=ChoiceCategory.TECHNICAL,
|
||||
),
|
||||
RichChoice(
|
||||
value="circular_dependency",
|
||||
label="Circular Dependency",
|
||||
description="Detected circular reference in data",
|
||||
metadata={"color": "red", "icon": "refresh-cw", "sort_order": 3},
|
||||
category=ChoiceCategory.TECHNICAL,
|
||||
),
|
||||
RichChoice(
|
||||
value="validation_error",
|
||||
label="Validation Error",
|
||||
description="Data validation failure",
|
||||
metadata={"color": "red", "icon": "alert-circle", "sort_order": 4},
|
||||
category=ChoiceCategory.TECHNICAL,
|
||||
),
|
||||
RichChoice(
|
||||
value="ban_attempt",
|
||||
label="Ban Attempt",
|
||||
description="User ban action was triggered",
|
||||
metadata={"color": "red", "icon": "shield-off", "sort_order": 5},
|
||||
category=ChoiceCategory.SECURITY,
|
||||
),
|
||||
RichChoice(
|
||||
value="upload_timeout",
|
||||
label="Upload Timeout",
|
||||
description="File upload exceeded time limit",
|
||||
metadata={"color": "orange", "icon": "upload-cloud", "sort_order": 6},
|
||||
category=ChoiceCategory.TECHNICAL,
|
||||
),
|
||||
RichChoice(
|
||||
value="high_error_rate",
|
||||
label="High Error Rate",
|
||||
description="Elevated error rate detected",
|
||||
metadata={"color": "red", "icon": "trending-up", "sort_order": 7},
|
||||
category=ChoiceCategory.TECHNICAL,
|
||||
),
|
||||
RichChoice(
|
||||
value="database_connection",
|
||||
label="Database Connection",
|
||||
description="Database connectivity issue",
|
||||
metadata={"color": "red", "icon": "database", "sort_order": 8},
|
||||
category=ChoiceCategory.TECHNICAL,
|
||||
),
|
||||
RichChoice(
|
||||
value="memory_usage",
|
||||
label="Memory Usage",
|
||||
description="High memory consumption detected",
|
||||
metadata={"color": "orange", "icon": "cpu", "sort_order": 9},
|
||||
category=ChoiceCategory.TECHNICAL,
|
||||
),
|
||||
RichChoice(
|
||||
value="queue_backup",
|
||||
label="Queue Backup",
|
||||
description="Task queue is backing up",
|
||||
metadata={"color": "yellow", "icon": "layers", "sort_order": 10},
|
||||
category=ChoiceCategory.TECHNICAL,
|
||||
),
|
||||
]
|
||||
|
||||
# ============================================================================
|
||||
# Metric Types for Rate Limiting
|
||||
# ============================================================================
|
||||
METRIC_TYPES = [
|
||||
RichChoice(
|
||||
value="block_rate",
|
||||
label="Block Rate",
|
||||
description="Percentage of requests being blocked",
|
||||
metadata={"color": "red", "icon": "shield", "sort_order": 1},
|
||||
category=ChoiceCategory.TECHNICAL,
|
||||
),
|
||||
RichChoice(
|
||||
value="total_requests",
|
||||
label="Total Requests",
|
||||
description="Total number of requests",
|
||||
metadata={"color": "blue", "icon": "activity", "sort_order": 2},
|
||||
category=ChoiceCategory.TECHNICAL,
|
||||
),
|
||||
RichChoice(
|
||||
value="unique_ips",
|
||||
label="Unique IPs",
|
||||
description="Number of unique IP addresses",
|
||||
metadata={"color": "purple", "icon": "globe", "sort_order": 3},
|
||||
category=ChoiceCategory.TECHNICAL,
|
||||
),
|
||||
RichChoice(
|
||||
value="function_specific",
|
||||
label="Function Specific",
|
||||
description="Metrics for a specific function",
|
||||
metadata={"color": "green", "icon": "code", "sort_order": 4},
|
||||
category=ChoiceCategory.TECHNICAL,
|
||||
),
|
||||
]
|
||||
|
||||
# ============================================================================
|
||||
# Incident Statuses
|
||||
# ============================================================================
|
||||
INCIDENT_STATUSES = [
|
||||
RichChoice(
|
||||
value="open",
|
||||
label="Open",
|
||||
description="Incident is open and awaiting investigation",
|
||||
metadata={
|
||||
"color": "red",
|
||||
"icon": "alert-circle",
|
||||
"css_class": "bg-red-100 text-red-800",
|
||||
"sort_order": 1,
|
||||
"is_active": True,
|
||||
},
|
||||
category=ChoiceCategory.STATUS,
|
||||
),
|
||||
RichChoice(
|
||||
value="investigating",
|
||||
label="Investigating",
|
||||
description="Incident is being actively investigated",
|
||||
metadata={
|
||||
"color": "yellow",
|
||||
"icon": "search",
|
||||
"css_class": "bg-yellow-100 text-yellow-800",
|
||||
"sort_order": 2,
|
||||
"is_active": True,
|
||||
},
|
||||
category=ChoiceCategory.STATUS,
|
||||
),
|
||||
RichChoice(
|
||||
value="resolved",
|
||||
label="Resolved",
|
||||
description="Incident has been resolved",
|
||||
metadata={
|
||||
"color": "green",
|
||||
"icon": "check-circle",
|
||||
"css_class": "bg-green-100 text-green-800",
|
||||
"sort_order": 3,
|
||||
"is_active": False,
|
||||
},
|
||||
category=ChoiceCategory.STATUS,
|
||||
),
|
||||
RichChoice(
|
||||
value="closed",
|
||||
label="Closed",
|
||||
description="Incident is closed",
|
||||
metadata={
|
||||
"color": "gray",
|
||||
"icon": "x-circle",
|
||||
"css_class": "bg-gray-100 text-gray-800",
|
||||
"sort_order": 4,
|
||||
"is_active": False,
|
||||
},
|
||||
category=ChoiceCategory.STATUS,
|
||||
),
|
||||
]
|
||||
|
||||
# ============================================================================
|
||||
# Alert Sources
|
||||
# ============================================================================
|
||||
ALERT_SOURCES = [
|
||||
RichChoice(
|
||||
value="system",
|
||||
label="System Alert",
|
||||
description="Alert from system monitoring",
|
||||
metadata={"color": "blue", "icon": "server", "sort_order": 1},
|
||||
category=ChoiceCategory.TECHNICAL,
|
||||
),
|
||||
RichChoice(
|
||||
value="rate_limit",
|
||||
label="Rate Limit Alert",
|
||||
description="Alert from rate limiting system",
|
||||
metadata={"color": "orange", "icon": "shield", "sort_order": 2},
|
||||
category=ChoiceCategory.TECHNICAL,
|
||||
),
|
||||
]
|
||||
|
||||
# ============================================================================
|
||||
# Pipeline Error Severities
|
||||
# ============================================================================
|
||||
PIPELINE_ERROR_SEVERITIES = [
|
||||
RichChoice(
|
||||
value="critical",
|
||||
label="Critical",
|
||||
description="Critical pipeline failure requiring immediate attention",
|
||||
metadata={
|
||||
"color": "red",
|
||||
"icon": "alert-octagon",
|
||||
"css_class": "bg-red-100 text-red-800 border-red-300",
|
||||
"sort_order": 1,
|
||||
"priority": 1,
|
||||
},
|
||||
category=ChoiceCategory.PRIORITY,
|
||||
),
|
||||
RichChoice(
|
||||
value="error",
|
||||
label="Error",
|
||||
description="Pipeline error that needs investigation",
|
||||
metadata={
|
||||
"color": "orange",
|
||||
"icon": "alert-triangle",
|
||||
"css_class": "bg-orange-100 text-orange-800 border-orange-300",
|
||||
"sort_order": 2,
|
||||
"priority": 2,
|
||||
},
|
||||
category=ChoiceCategory.PRIORITY,
|
||||
),
|
||||
RichChoice(
|
||||
value="warning",
|
||||
label="Warning",
|
||||
description="Pipeline warning that may need attention",
|
||||
metadata={
|
||||
"color": "yellow",
|
||||
"icon": "alert-circle",
|
||||
"css_class": "bg-yellow-100 text-yellow-800 border-yellow-300",
|
||||
"sort_order": 3,
|
||||
"priority": 3,
|
||||
},
|
||||
category=ChoiceCategory.PRIORITY,
|
||||
),
|
||||
RichChoice(
|
||||
value="info",
|
||||
label="Info",
|
||||
description="Informational pipeline event",
|
||||
metadata={
|
||||
"color": "blue",
|
||||
"icon": "info",
|
||||
"css_class": "bg-blue-100 text-blue-800 border-blue-300",
|
||||
"sort_order": 4,
|
||||
"priority": 4,
|
||||
},
|
||||
category=ChoiceCategory.PRIORITY,
|
||||
),
|
||||
]
|
||||
|
||||
# ============================================================================
|
||||
# Anomaly Types
|
||||
# ============================================================================
|
||||
ANOMALY_TYPES = [
|
||||
RichChoice(
|
||||
value="spike",
|
||||
label="Spike",
|
||||
description="Sudden increase in metric value",
|
||||
metadata={
|
||||
"color": "red",
|
||||
"icon": "trending-up",
|
||||
"css_class": "bg-red-100 text-red-800",
|
||||
"sort_order": 1,
|
||||
},
|
||||
category=ChoiceCategory.TECHNICAL,
|
||||
),
|
||||
RichChoice(
|
||||
value="drop",
|
||||
label="Drop",
|
||||
description="Sudden decrease in metric value",
|
||||
metadata={
|
||||
"color": "blue",
|
||||
"icon": "trending-down",
|
||||
"css_class": "bg-blue-100 text-blue-800",
|
||||
"sort_order": 2,
|
||||
},
|
||||
category=ChoiceCategory.TECHNICAL,
|
||||
),
|
||||
RichChoice(
|
||||
value="trend_change",
|
||||
label="Trend Change",
|
||||
description="Change in the overall trend direction",
|
||||
metadata={
|
||||
"color": "yellow",
|
||||
"icon": "activity",
|
||||
"css_class": "bg-yellow-100 text-yellow-800",
|
||||
"sort_order": 3,
|
||||
},
|
||||
category=ChoiceCategory.TECHNICAL,
|
||||
),
|
||||
RichChoice(
|
||||
value="outlier",
|
||||
label="Outlier",
|
||||
description="Value outside normal distribution",
|
||||
metadata={
|
||||
"color": "purple",
|
||||
"icon": "git-branch",
|
||||
"css_class": "bg-purple-100 text-purple-800",
|
||||
"sort_order": 4,
|
||||
},
|
||||
category=ChoiceCategory.TECHNICAL,
|
||||
),
|
||||
RichChoice(
|
||||
value="threshold_breach",
|
||||
label="Threshold Breach",
|
||||
description="Value exceeded configured threshold",
|
||||
metadata={
|
||||
"color": "orange",
|
||||
"icon": "alert-triangle",
|
||||
"css_class": "bg-orange-100 text-orange-800",
|
||||
"sort_order": 5,
|
||||
},
|
||||
category=ChoiceCategory.TECHNICAL,
|
||||
),
|
||||
]
|
||||
|
||||
# ============================================================================
|
||||
# Cleanup Job Statuses
|
||||
# ============================================================================
|
||||
CLEANUP_JOB_STATUSES = [
|
||||
RichChoice(
|
||||
value="success",
|
||||
label="Success",
|
||||
description="Cleanup job completed successfully",
|
||||
metadata={
|
||||
"color": "green",
|
||||
"icon": "check-circle",
|
||||
"css_class": "bg-green-100 text-green-800",
|
||||
"sort_order": 1,
|
||||
},
|
||||
category=ChoiceCategory.STATUS,
|
||||
),
|
||||
RichChoice(
|
||||
value="failed",
|
||||
label="Failed",
|
||||
description="Cleanup job failed with errors",
|
||||
metadata={
|
||||
"color": "red",
|
||||
"icon": "x-circle",
|
||||
"css_class": "bg-red-100 text-red-800",
|
||||
"sort_order": 2,
|
||||
},
|
||||
category=ChoiceCategory.STATUS,
|
||||
),
|
||||
RichChoice(
|
||||
value="partial",
|
||||
label="Partial",
|
||||
description="Cleanup job completed with some failures",
|
||||
metadata={
|
||||
"color": "yellow",
|
||||
"icon": "alert-circle",
|
||||
"css_class": "bg-yellow-100 text-yellow-800",
|
||||
"sort_order": 3,
|
||||
},
|
||||
category=ChoiceCategory.STATUS,
|
||||
),
|
||||
RichChoice(
|
||||
value="skipped",
|
||||
label="Skipped",
|
||||
description="Cleanup job was skipped",
|
||||
metadata={
|
||||
"color": "gray",
|
||||
"icon": "skip-forward",
|
||||
"css_class": "bg-gray-100 text-gray-800",
|
||||
"sort_order": 4,
|
||||
},
|
||||
category=ChoiceCategory.STATUS,
|
||||
),
|
||||
]
|
||||
|
||||
# ============================================================================
|
||||
# Date Precision (shared across multiple domains)
|
||||
# ============================================================================
|
||||
DATE_PRECISION = [
|
||||
RichChoice(
|
||||
value="exact",
|
||||
label="Exact Date",
|
||||
description="Date is known exactly",
|
||||
metadata={"color": "green", "icon": "calendar", "sort_order": 1, "format": "YYYY-MM-DD"},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="month",
|
||||
label="Month and Year",
|
||||
description="Only month and year are known",
|
||||
metadata={"color": "blue", "icon": "calendar", "sort_order": 2, "format": "YYYY-MM"},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="year",
|
||||
label="Year Only",
|
||||
description="Only the year is known",
|
||||
metadata={"color": "yellow", "icon": "calendar", "sort_order": 3, "format": "YYYY"},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="decade",
|
||||
label="Decade",
|
||||
description="Only the decade is known",
|
||||
metadata={"color": "orange", "icon": "calendar", "sort_order": 4, "format": "YYYYs"},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="century",
|
||||
label="Century",
|
||||
description="Only the century is known",
|
||||
metadata={"color": "gray", "icon": "calendar", "sort_order": 5, "format": "YYc"},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="approximate",
|
||||
label="Approximate",
|
||||
description="Date is approximate/estimated",
|
||||
metadata={"color": "gray", "icon": "help-circle", "sort_order": 6, "format": "~YYYY"},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def register_core_choices():
|
||||
"""Register all core system choices with the global registry"""
|
||||
@@ -152,6 +731,95 @@ def register_core_choices():
|
||||
metadata={"domain": "core", "type": "entity_type"},
|
||||
)
|
||||
|
||||
register_choices(
|
||||
name="severity_levels",
|
||||
choices=SEVERITY_LEVELS,
|
||||
domain="core",
|
||||
description="Severity levels for errors and alerts",
|
||||
metadata={"domain": "core", "type": "severity"},
|
||||
)
|
||||
|
||||
register_choices(
|
||||
name="request_severity_levels",
|
||||
choices=REQUEST_SEVERITY_LEVELS,
|
||||
domain="core",
|
||||
description="Extended severity levels for request metadata",
|
||||
metadata={"domain": "core", "type": "request_severity"},
|
||||
)
|
||||
|
||||
register_choices(
|
||||
name="error_sources",
|
||||
choices=ERROR_SOURCES,
|
||||
domain="core",
|
||||
description="Sources of application errors",
|
||||
metadata={"domain": "core", "type": "error_source"},
|
||||
)
|
||||
|
||||
register_choices(
|
||||
name="system_alert_types",
|
||||
choices=SYSTEM_ALERT_TYPES,
|
||||
domain="core",
|
||||
description="Types of system alerts",
|
||||
metadata={"domain": "core", "type": "alert_type"},
|
||||
)
|
||||
|
||||
register_choices(
|
||||
name="metric_types",
|
||||
choices=METRIC_TYPES,
|
||||
domain="core",
|
||||
description="Types of rate limit metrics",
|
||||
metadata={"domain": "core", "type": "metric_type"},
|
||||
)
|
||||
|
||||
register_choices(
|
||||
name="incident_statuses",
|
||||
choices=INCIDENT_STATUSES,
|
||||
domain="core",
|
||||
description="Incident status options",
|
||||
metadata={"domain": "core", "type": "incident_status"},
|
||||
)
|
||||
|
||||
register_choices(
|
||||
name="alert_sources",
|
||||
choices=ALERT_SOURCES,
|
||||
domain="core",
|
||||
description="Sources of alerts",
|
||||
metadata={"domain": "core", "type": "alert_source"},
|
||||
)
|
||||
|
||||
register_choices(
|
||||
name="pipeline_error_severities",
|
||||
choices=PIPELINE_ERROR_SEVERITIES,
|
||||
domain="core",
|
||||
description="Severity levels for pipeline errors",
|
||||
metadata={"domain": "core", "type": "pipeline_error_severity"},
|
||||
)
|
||||
|
||||
register_choices(
|
||||
name="anomaly_types",
|
||||
choices=ANOMALY_TYPES,
|
||||
domain="core",
|
||||
description="Types of detected anomalies",
|
||||
metadata={"domain": "core", "type": "anomaly_type"},
|
||||
)
|
||||
|
||||
register_choices(
|
||||
name="cleanup_job_statuses",
|
||||
choices=CLEANUP_JOB_STATUSES,
|
||||
domain="core",
|
||||
description="Status options for cleanup jobs",
|
||||
metadata={"domain": "core", "type": "cleanup_job_status"},
|
||||
)
|
||||
|
||||
register_choices(
|
||||
name="date_precision",
|
||||
choices=DATE_PRECISION,
|
||||
domain="core",
|
||||
description="Date precision options",
|
||||
metadata={"domain": "core", "type": "date_precision"},
|
||||
)
|
||||
|
||||
|
||||
# Auto-register choices when module is imported
|
||||
register_core_choices()
|
||||
|
||||
|
||||
133
backend/apps/core/choices/filters.py
Normal file
133
backend/apps/core/choices/filters.py
Normal file
@@ -0,0 +1,133 @@
|
||||
"""
|
||||
Django-filter Integration for Rich Choices
|
||||
|
||||
This module provides django-filter compatible filter classes that integrate
|
||||
with the RichChoice registry system.
|
||||
"""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from django_filters import ChoiceFilter, MultipleChoiceFilter
|
||||
|
||||
from .registry import registry
|
||||
|
||||
|
||||
class RichChoiceFilter(ChoiceFilter):
|
||||
"""
|
||||
Django-filter ChoiceFilter that uses the RichChoice registry.
|
||||
|
||||
This is the REQUIRED replacement for ChoiceFilter with inline choices.
|
||||
|
||||
Usage:
|
||||
class MyFilterSet(django_filters.FilterSet):
|
||||
status = RichChoiceFilter(
|
||||
choice_group="ticket_statuses",
|
||||
domain="support",
|
||||
)
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
choice_group: str,
|
||||
domain: str = "core",
|
||||
allow_deprecated: bool = False,
|
||||
**kwargs
|
||||
):
|
||||
"""
|
||||
Initialize the filter.
|
||||
|
||||
Args:
|
||||
choice_group: Name of the choice group in the registry
|
||||
domain: Domain namespace for the choice group
|
||||
allow_deprecated: Whether to include deprecated choices
|
||||
**kwargs: Additional arguments passed to ChoiceFilter
|
||||
"""
|
||||
self.choice_group = choice_group
|
||||
self.domain = domain
|
||||
self.allow_deprecated = allow_deprecated
|
||||
|
||||
# Get choices from registry
|
||||
if allow_deprecated:
|
||||
choices_list = registry.get_choices(choice_group, domain)
|
||||
else:
|
||||
choices_list = registry.get_active_choices(choice_group, domain)
|
||||
|
||||
choices = [(c.value, c.label) for c in choices_list]
|
||||
|
||||
super().__init__(choices=choices, **kwargs)
|
||||
|
||||
|
||||
class RichMultipleChoiceFilter(MultipleChoiceFilter):
|
||||
"""
|
||||
Django-filter MultipleChoiceFilter that uses the RichChoice registry.
|
||||
|
||||
This is the REQUIRED replacement for MultipleChoiceFilter with inline choices.
|
||||
|
||||
Usage:
|
||||
class MyFilterSet(django_filters.FilterSet):
|
||||
statuses = RichMultipleChoiceFilter(
|
||||
choice_group="ticket_statuses",
|
||||
domain="support",
|
||||
field_name="status",
|
||||
)
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
choice_group: str,
|
||||
domain: str = "core",
|
||||
allow_deprecated: bool = False,
|
||||
**kwargs
|
||||
):
|
||||
"""
|
||||
Initialize the filter.
|
||||
|
||||
Args:
|
||||
choice_group: Name of the choice group in the registry
|
||||
domain: Domain namespace for the choice group
|
||||
allow_deprecated: Whether to include deprecated choices
|
||||
**kwargs: Additional arguments passed to MultipleChoiceFilter
|
||||
"""
|
||||
self.choice_group = choice_group
|
||||
self.domain = domain
|
||||
self.allow_deprecated = allow_deprecated
|
||||
|
||||
# Get choices from registry
|
||||
if allow_deprecated:
|
||||
choices_list = registry.get_choices(choice_group, domain)
|
||||
else:
|
||||
choices_list = registry.get_active_choices(choice_group, domain)
|
||||
|
||||
choices = [(c.value, c.label) for c in choices_list]
|
||||
|
||||
super().__init__(choices=choices, **kwargs)
|
||||
|
||||
|
||||
def get_choice_filter_class(
|
||||
choice_group: str,
|
||||
domain: str = "core",
|
||||
allow_deprecated: bool = False,
|
||||
**extra_kwargs: Any
|
||||
) -> type[RichChoiceFilter]:
|
||||
"""
|
||||
Factory function to create a RichChoiceFilter class with preset choices.
|
||||
|
||||
Useful when you need to define the filter class dynamically or
|
||||
when the choice_group/domain aren't available at class definition time.
|
||||
|
||||
Usage:
|
||||
StatusFilter = get_choice_filter_class("ticket_statuses", "support")
|
||||
|
||||
class MyFilterSet(django_filters.FilterSet):
|
||||
status = StatusFilter()
|
||||
"""
|
||||
class DynamicRichChoiceFilter(RichChoiceFilter):
|
||||
def __init__(self, **kwargs):
|
||||
kwargs.setdefault("choice_group", choice_group)
|
||||
kwargs.setdefault("domain", domain)
|
||||
kwargs.setdefault("allow_deprecated", allow_deprecated)
|
||||
for key, value in extra_kwargs.items():
|
||||
kwargs.setdefault(key, value)
|
||||
super().__init__(**kwargs)
|
||||
|
||||
return DynamicRichChoiceFilter
|
||||
@@ -265,3 +265,98 @@ def serialize_choice_value(value: str, choice_group: str, domain: str = "core",
|
||||
}
|
||||
else:
|
||||
return value
|
||||
|
||||
|
||||
class RichChoiceSerializerField(serializers.ChoiceField):
|
||||
"""
|
||||
DRF serializer field for RichChoice values.
|
||||
|
||||
This field validates input against the RichChoice registry and provides
|
||||
type-safe choice handling with proper error messages. It is the REQUIRED
|
||||
replacement for serializers.ChoiceField with inline choices.
|
||||
|
||||
Usage:
|
||||
class MySerializer(serializers.Serializer):
|
||||
status = RichChoiceSerializerField(
|
||||
choice_group="ticket_statuses",
|
||||
domain="support",
|
||||
)
|
||||
|
||||
# With rich metadata in output
|
||||
severity = RichChoiceSerializerField(
|
||||
choice_group="severity_levels",
|
||||
domain="core",
|
||||
include_metadata=True,
|
||||
)
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
choice_group: str,
|
||||
domain: str = "core",
|
||||
include_metadata: bool = False,
|
||||
allow_deprecated: bool = False,
|
||||
**kwargs
|
||||
):
|
||||
"""
|
||||
Initialize the serializer field.
|
||||
|
||||
Args:
|
||||
choice_group: Name of the choice group in the registry
|
||||
domain: Domain namespace for the choice group
|
||||
include_metadata: Whether to include rich choice metadata in output
|
||||
allow_deprecated: Whether to allow deprecated choices
|
||||
**kwargs: Additional arguments passed to ChoiceField
|
||||
"""
|
||||
self.choice_group = choice_group
|
||||
self.domain = domain
|
||||
self.include_metadata = include_metadata
|
||||
self.allow_deprecated = allow_deprecated
|
||||
|
||||
# Get choices from registry for validation
|
||||
if allow_deprecated:
|
||||
choices_list = registry.get_choices(choice_group, domain)
|
||||
else:
|
||||
choices_list = registry.get_active_choices(choice_group, domain)
|
||||
|
||||
# Build choices tuple for DRF ChoiceField
|
||||
choices = [(c.value, c.label) for c in choices_list]
|
||||
|
||||
# Store valid values for error messages
|
||||
self._valid_values = [c.value for c in choices_list]
|
||||
|
||||
super().__init__(choices=choices, **kwargs)
|
||||
|
||||
def to_representation(self, value: str) -> Any:
|
||||
"""Convert choice value to representation."""
|
||||
if not value:
|
||||
return value
|
||||
|
||||
if self.include_metadata:
|
||||
return serialize_choice_value(
|
||||
value,
|
||||
self.choice_group,
|
||||
self.domain,
|
||||
include_metadata=True
|
||||
)
|
||||
return value
|
||||
|
||||
def to_internal_value(self, data: Any) -> str:
|
||||
"""Convert input data to choice value."""
|
||||
# Handle rich choice object input (value dict)
|
||||
if isinstance(data, dict) and "value" in data:
|
||||
data = data["value"]
|
||||
|
||||
# Validate and return
|
||||
return super().to_internal_value(data)
|
||||
|
||||
def fail(self, key: str, **kwargs: Any) -> None:
|
||||
"""Provide better error messages with valid choices listed."""
|
||||
if key == "invalid_choice":
|
||||
valid_choices = ", ".join(self._valid_values)
|
||||
raise serializers.ValidationError(
|
||||
f"'{kwargs.get('input', '')}' is not a valid choice for {self.choice_group}. "
|
||||
f"Valid choices are: {valid_choices}"
|
||||
)
|
||||
super().fail(key, **kwargs)
|
||||
|
||||
|
||||
@@ -0,0 +1,69 @@
|
||||
# Generated by Django 5.2.10 on 2026-01-11 00:48
|
||||
|
||||
import apps.core.choices.fields
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0010_add_milestone_model'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='applicationerror',
|
||||
name='severity',
|
||||
field=apps.core.choices.fields.RichChoiceField(allow_deprecated=False, choice_group='severity_levels', choices=[('critical', 'Critical'), ('high', 'High'), ('medium', 'Medium'), ('low', 'Low')], db_index=True, default='medium', domain='core', help_text='Error severity level', max_length=20),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='applicationerror',
|
||||
name='source',
|
||||
field=apps.core.choices.fields.RichChoiceField(allow_deprecated=False, choice_group='error_sources', choices=[('frontend', 'Frontend'), ('backend', 'Backend'), ('api', 'API')], db_index=True, domain='core', help_text='Where the error originated', max_length=20),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='incident',
|
||||
name='severity',
|
||||
field=apps.core.choices.fields.RichChoiceField(allow_deprecated=False, choice_group='severity_levels', choices=[('critical', 'Critical'), ('high', 'High'), ('medium', 'Medium'), ('low', 'Low')], db_index=True, domain='core', help_text='Incident severity level', max_length=20),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='incident',
|
||||
name='status',
|
||||
field=apps.core.choices.fields.RichChoiceField(allow_deprecated=False, choice_group='incident_statuses', choices=[('open', 'Open'), ('investigating', 'Investigating'), ('resolved', 'Resolved'), ('closed', 'Closed')], db_index=True, default='open', domain='core', help_text='Current incident status', max_length=20),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='incidentalert',
|
||||
name='alert_source',
|
||||
field=apps.core.choices.fields.RichChoiceField(allow_deprecated=False, choice_group='alert_sources', choices=[('system', 'System Alert'), ('rate_limit', 'Rate Limit Alert')], domain='core', help_text='Source type of the alert', max_length=20),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='milestone',
|
||||
name='event_date_precision',
|
||||
field=apps.core.choices.fields.RichChoiceField(allow_deprecated=False, choice_group='date_precision', choices=[('exact', 'Exact Date'), ('month', 'Month and Year'), ('year', 'Year Only'), ('decade', 'Decade'), ('century', 'Century'), ('approximate', 'Approximate')], default='exact', domain='core', help_text='Precision of the event date', max_length=20),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='milestoneevent',
|
||||
name='event_date_precision',
|
||||
field=apps.core.choices.fields.RichChoiceField(allow_deprecated=False, choice_group='date_precision', choices=[('exact', 'Exact Date'), ('month', 'Month and Year'), ('year', 'Year Only'), ('decade', 'Decade'), ('century', 'Century'), ('approximate', 'Approximate')], default='exact', domain='core', help_text='Precision of the event date', max_length=20),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='ratelimitalertconfig',
|
||||
name='metric_type',
|
||||
field=apps.core.choices.fields.RichChoiceField(allow_deprecated=False, choice_group='metric_types', choices=[('block_rate', 'Block Rate'), ('total_requests', 'Total Requests'), ('unique_ips', 'Unique IPs'), ('function_specific', 'Function Specific')], db_index=True, domain='core', help_text='Type of metric to monitor', max_length=50),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='requestmetadata',
|
||||
name='severity',
|
||||
field=apps.core.choices.fields.RichChoiceField(allow_deprecated=False, choice_group='request_severity_levels', choices=[('debug', 'Debug'), ('info', 'Info'), ('warning', 'Warning'), ('error', 'Error'), ('critical', 'Critical')], db_index=True, default='info', domain='core', help_text='Error severity level', max_length=20),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='systemalert',
|
||||
name='alert_type',
|
||||
field=apps.core.choices.fields.RichChoiceField(allow_deprecated=False, choice_group='system_alert_types', choices=[('orphaned_images', 'Orphaned Images'), ('stale_submissions', 'Stale Submissions'), ('circular_dependency', 'Circular Dependency'), ('validation_error', 'Validation Error'), ('ban_attempt', 'Ban Attempt'), ('upload_timeout', 'Upload Timeout'), ('high_error_rate', 'High Error Rate'), ('database_connection', 'Database Connection'), ('memory_usage', 'Memory Usage'), ('queue_backup', 'Queue Backup')], db_index=True, domain='core', help_text='Type of system alert', max_length=50),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='systemalert',
|
||||
name='severity',
|
||||
field=apps.core.choices.fields.RichChoiceField(allow_deprecated=False, choice_group='severity_levels', choices=[('critical', 'Critical'), ('high', 'High'), ('medium', 'Medium'), ('low', 'Low')], db_index=True, domain='core', help_text='Alert severity level', max_length=20),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,320 @@
|
||||
# Generated by Django 5.2.10 on 2026-01-11 18:06
|
||||
|
||||
import apps.core.choices.fields
|
||||
import django.db.models.deletion
|
||||
import uuid
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("core", "0011_alter_applicationerror_severity_and_more"),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="AlertCorrelationRule",
|
||||
fields=[
|
||||
("id", models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||
(
|
||||
"rule_name",
|
||||
models.CharField(
|
||||
db_index=True, help_text="Unique name for this correlation rule", max_length=255, unique=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"rule_description",
|
||||
models.TextField(blank=True, help_text="Description of what this rule correlates"),
|
||||
),
|
||||
(
|
||||
"min_alerts_required",
|
||||
models.PositiveIntegerField(
|
||||
default=3, help_text="Minimum number of alerts needed to trigger correlation"
|
||||
),
|
||||
),
|
||||
(
|
||||
"time_window_minutes",
|
||||
models.PositiveIntegerField(default=30, help_text="Time window in minutes for alert correlation"),
|
||||
),
|
||||
(
|
||||
"incident_severity",
|
||||
apps.core.choices.fields.RichChoiceField(
|
||||
allow_deprecated=False,
|
||||
choice_group="severity_levels",
|
||||
choices=[("critical", "Critical"), ("high", "High"), ("medium", "Medium"), ("low", "Low")],
|
||||
default="medium",
|
||||
domain="core",
|
||||
help_text="Severity to assign to correlated incidents",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
(
|
||||
"incident_title_template",
|
||||
models.CharField(
|
||||
help_text="Template for incident title (supports {count}, {rule_name})", max_length=255
|
||||
),
|
||||
),
|
||||
(
|
||||
"is_active",
|
||||
models.BooleanField(db_index=True, default=True, help_text="Whether this rule is currently active"),
|
||||
),
|
||||
("created_at", models.DateTimeField(auto_now_add=True, help_text="When this rule was created")),
|
||||
("updated_at", models.DateTimeField(auto_now=True, help_text="When this rule was last updated")),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Alert Correlation Rule",
|
||||
"verbose_name_plural": "Alert Correlation Rules",
|
||||
"ordering": ["rule_name"],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="CleanupJobLog",
|
||||
fields=[
|
||||
("id", models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||
("job_name", models.CharField(db_index=True, help_text="Name of the cleanup job", max_length=255)),
|
||||
(
|
||||
"status",
|
||||
apps.core.choices.fields.RichChoiceField(
|
||||
allow_deprecated=False,
|
||||
choice_group="cleanup_job_statuses",
|
||||
choices=[
|
||||
("success", "Success"),
|
||||
("failed", "Failed"),
|
||||
("partial", "Partial"),
|
||||
("skipped", "Skipped"),
|
||||
],
|
||||
db_index=True,
|
||||
default="success",
|
||||
domain="core",
|
||||
help_text="Execution status",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
("records_processed", models.PositiveIntegerField(default=0, help_text="Number of records processed")),
|
||||
("records_deleted", models.PositiveIntegerField(default=0, help_text="Number of records deleted")),
|
||||
("error_message", models.TextField(blank=True, help_text="Error message if job failed", null=True)),
|
||||
(
|
||||
"duration_ms",
|
||||
models.PositiveIntegerField(blank=True, help_text="Execution duration in milliseconds", null=True),
|
||||
),
|
||||
(
|
||||
"executed_at",
|
||||
models.DateTimeField(auto_now_add=True, db_index=True, help_text="When this job was executed"),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Cleanup Job Log",
|
||||
"verbose_name_plural": "Cleanup Job Logs",
|
||||
"ordering": ["-executed_at"],
|
||||
"indexes": [
|
||||
models.Index(fields=["job_name", "executed_at"], name="core_cleanu_job_nam_4530fd_idx"),
|
||||
models.Index(fields=["status", "executed_at"], name="core_cleanu_status_fa6360_idx"),
|
||||
],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="Anomaly",
|
||||
fields=[
|
||||
("id", models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||
(
|
||||
"metric_name",
|
||||
models.CharField(
|
||||
db_index=True, help_text="Name of the metric that exhibited anomalous behavior", max_length=255
|
||||
),
|
||||
),
|
||||
(
|
||||
"metric_category",
|
||||
models.CharField(
|
||||
db_index=True,
|
||||
help_text="Category of the metric (e.g., 'performance', 'error_rate', 'traffic')",
|
||||
max_length=100,
|
||||
),
|
||||
),
|
||||
(
|
||||
"anomaly_type",
|
||||
apps.core.choices.fields.RichChoiceField(
|
||||
allow_deprecated=False,
|
||||
choice_group="anomaly_types",
|
||||
choices=[
|
||||
("spike", "Spike"),
|
||||
("drop", "Drop"),
|
||||
("trend_change", "Trend Change"),
|
||||
("outlier", "Outlier"),
|
||||
("threshold_breach", "Threshold Breach"),
|
||||
],
|
||||
db_index=True,
|
||||
domain="core",
|
||||
help_text="Type of anomaly detected",
|
||||
max_length=30,
|
||||
),
|
||||
),
|
||||
(
|
||||
"severity",
|
||||
apps.core.choices.fields.RichChoiceField(
|
||||
allow_deprecated=False,
|
||||
choice_group="severity_levels",
|
||||
choices=[("critical", "Critical"), ("high", "High"), ("medium", "Medium"), ("low", "Low")],
|
||||
db_index=True,
|
||||
domain="core",
|
||||
help_text="Severity of the anomaly",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
(
|
||||
"anomaly_value",
|
||||
models.DecimalField(decimal_places=6, help_text="The anomalous value detected", max_digits=20),
|
||||
),
|
||||
(
|
||||
"baseline_value",
|
||||
models.DecimalField(decimal_places=6, help_text="The expected baseline value", max_digits=20),
|
||||
),
|
||||
(
|
||||
"deviation_score",
|
||||
models.DecimalField(decimal_places=4, help_text="Standard deviations from normal", max_digits=10),
|
||||
),
|
||||
(
|
||||
"confidence_score",
|
||||
models.DecimalField(
|
||||
decimal_places=4, help_text="Confidence score of the detection (0-1)", max_digits=5
|
||||
),
|
||||
),
|
||||
("detection_algorithm", models.CharField(help_text="Algorithm used for detection", max_length=100)),
|
||||
("time_window_start", models.DateTimeField(help_text="Start of the detection time window")),
|
||||
("time_window_end", models.DateTimeField(help_text="End of the detection time window")),
|
||||
(
|
||||
"alert_created",
|
||||
models.BooleanField(
|
||||
db_index=True, default=False, help_text="Whether an alert was created for this anomaly"
|
||||
),
|
||||
),
|
||||
(
|
||||
"detected_at",
|
||||
models.DateTimeField(auto_now_add=True, db_index=True, help_text="When this anomaly was detected"),
|
||||
),
|
||||
(
|
||||
"alert",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
help_text="Linked system alert if created",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="anomalies",
|
||||
to="core.systemalert",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Anomaly",
|
||||
"verbose_name_plural": "Anomalies",
|
||||
"ordering": ["-detected_at"],
|
||||
"indexes": [
|
||||
models.Index(fields=["metric_name", "detected_at"], name="core_anomal_metric__06c3c9_idx"),
|
||||
models.Index(fields=["severity", "detected_at"], name="core_anomal_severit_ea7a17_idx"),
|
||||
models.Index(fields=["anomaly_type", "detected_at"], name="core_anomal_anomaly_eb45f7_idx"),
|
||||
models.Index(fields=["alert_created", "detected_at"], name="core_anomal_alert_c_5a0c1a_idx"),
|
||||
],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="PipelineError",
|
||||
fields=[
|
||||
("id", models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||
(
|
||||
"function_name",
|
||||
models.CharField(
|
||||
db_index=True, help_text="Name of the function/pipeline that failed", max_length=255
|
||||
),
|
||||
),
|
||||
("error_message", models.TextField(help_text="Error message describing the failure")),
|
||||
(
|
||||
"error_code",
|
||||
models.CharField(
|
||||
blank=True, db_index=True, help_text="Error code for categorization", max_length=100, null=True
|
||||
),
|
||||
),
|
||||
("error_context", models.JSONField(blank=True, help_text="Additional context data as JSON", null=True)),
|
||||
("stack_trace", models.TextField(blank=True, help_text="Full stack trace for debugging", null=True)),
|
||||
(
|
||||
"severity",
|
||||
apps.core.choices.fields.RichChoiceField(
|
||||
allow_deprecated=False,
|
||||
choice_group="pipeline_error_severities",
|
||||
choices=[
|
||||
("critical", "Critical"),
|
||||
("error", "Error"),
|
||||
("warning", "Warning"),
|
||||
("info", "Info"),
|
||||
],
|
||||
db_index=True,
|
||||
default="error",
|
||||
domain="core",
|
||||
help_text="Severity level of the error",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
(
|
||||
"submission_id",
|
||||
models.UUIDField(
|
||||
blank=True, db_index=True, help_text="ID of related content submission if applicable", null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"item_id",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
db_index=True,
|
||||
help_text="Generic reference to related item",
|
||||
max_length=255,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"request_id",
|
||||
models.UUIDField(blank=True, db_index=True, help_text="Request ID for correlation", null=True),
|
||||
),
|
||||
("trace_id", models.UUIDField(blank=True, db_index=True, help_text="Distributed trace ID", null=True)),
|
||||
(
|
||||
"resolved",
|
||||
models.BooleanField(db_index=True, default=False, help_text="Whether this error has been resolved"),
|
||||
),
|
||||
(
|
||||
"resolved_at",
|
||||
models.DateTimeField(
|
||||
blank=True, db_index=True, help_text="When this error was resolved", null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"resolution_notes",
|
||||
models.TextField(blank=True, help_text="Notes about how the error was resolved", null=True),
|
||||
),
|
||||
(
|
||||
"occurred_at",
|
||||
models.DateTimeField(auto_now_add=True, db_index=True, help_text="When this error occurred"),
|
||||
),
|
||||
(
|
||||
"resolved_by",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
help_text="User who resolved this error",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="resolved_pipeline_errors",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Pipeline Error",
|
||||
"verbose_name_plural": "Pipeline Errors",
|
||||
"ordering": ["-occurred_at"],
|
||||
"indexes": [
|
||||
models.Index(fields=["severity", "occurred_at"], name="core_pipeli_severit_9c8037_idx"),
|
||||
models.Index(fields=["function_name", "occurred_at"], name="core_pipeli_functio_efb015_idx"),
|
||||
models.Index(fields=["resolved", "occurred_at"], name="core_pipeli_resolve_cd60c5_idx"),
|
||||
],
|
||||
},
|
||||
),
|
||||
]
|
||||
@@ -8,8 +8,12 @@ from django.contrib.contenttypes.models import ContentType
|
||||
from django.db import models
|
||||
from django.utils.text import slugify
|
||||
|
||||
from apps.core.choices.fields import RichChoiceField
|
||||
from apps.core.history import TrackedModel
|
||||
|
||||
# Import choices module to ensure registration on app load
|
||||
from apps.core.choices import core_choices # noqa: F401
|
||||
|
||||
|
||||
@pghistory.track()
|
||||
class SlugHistory(models.Model):
|
||||
@@ -136,17 +140,6 @@ class ApplicationError(models.Model):
|
||||
reported via API (frontend) and displayed in the admin dashboard.
|
||||
"""
|
||||
|
||||
class Severity(models.TextChoices):
|
||||
CRITICAL = "critical", "Critical"
|
||||
HIGH = "high", "High"
|
||||
MEDIUM = "medium", "Medium"
|
||||
LOW = "low", "Low"
|
||||
|
||||
class Source(models.TextChoices):
|
||||
FRONTEND = "frontend", "Frontend"
|
||||
BACKEND = "backend", "Backend"
|
||||
API = "api", "API"
|
||||
|
||||
# Identity
|
||||
error_id = models.UUIDField(
|
||||
unique=True,
|
||||
@@ -180,16 +173,18 @@ class ApplicationError(models.Model):
|
||||
db_index=True,
|
||||
help_text="Application-specific error code",
|
||||
)
|
||||
severity = models.CharField(
|
||||
severity = RichChoiceField(
|
||||
choice_group="severity_levels",
|
||||
domain="core",
|
||||
max_length=20,
|
||||
choices=Severity.choices,
|
||||
default=Severity.MEDIUM,
|
||||
default="medium",
|
||||
db_index=True,
|
||||
help_text="Error severity level",
|
||||
)
|
||||
source = models.CharField(
|
||||
source = RichChoiceField(
|
||||
choice_group="error_sources",
|
||||
domain="core",
|
||||
max_length=20,
|
||||
choices=Source.choices,
|
||||
db_index=True,
|
||||
help_text="Where the error originated",
|
||||
)
|
||||
@@ -308,34 +303,18 @@ class SystemAlert(models.Model):
|
||||
validation errors, ban attempts, upload timeouts, and high error rates.
|
||||
"""
|
||||
|
||||
class AlertType(models.TextChoices):
|
||||
ORPHANED_IMAGES = "orphaned_images", "Orphaned Images"
|
||||
STALE_SUBMISSIONS = "stale_submissions", "Stale Submissions"
|
||||
CIRCULAR_DEPENDENCY = "circular_dependency", "Circular Dependency"
|
||||
VALIDATION_ERROR = "validation_error", "Validation Error"
|
||||
BAN_ATTEMPT = "ban_attempt", "Ban Attempt"
|
||||
UPLOAD_TIMEOUT = "upload_timeout", "Upload Timeout"
|
||||
HIGH_ERROR_RATE = "high_error_rate", "High Error Rate"
|
||||
DATABASE_CONNECTION = "database_connection", "Database Connection"
|
||||
MEMORY_USAGE = "memory_usage", "Memory Usage"
|
||||
QUEUE_BACKUP = "queue_backup", "Queue Backup"
|
||||
|
||||
class Severity(models.TextChoices):
|
||||
LOW = "low", "Low"
|
||||
MEDIUM = "medium", "Medium"
|
||||
HIGH = "high", "High"
|
||||
CRITICAL = "critical", "Critical"
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||
alert_type = models.CharField(
|
||||
alert_type = RichChoiceField(
|
||||
choice_group="system_alert_types",
|
||||
domain="core",
|
||||
max_length=50,
|
||||
choices=AlertType.choices,
|
||||
db_index=True,
|
||||
help_text="Type of system alert",
|
||||
)
|
||||
severity = models.CharField(
|
||||
severity = RichChoiceField(
|
||||
choice_group="severity_levels",
|
||||
domain="core",
|
||||
max_length=20,
|
||||
choices=Severity.choices,
|
||||
db_index=True,
|
||||
help_text="Alert severity level",
|
||||
)
|
||||
@@ -386,16 +365,11 @@ class RateLimitAlertConfig(models.Model):
|
||||
Defines thresholds that trigger alerts when exceeded.
|
||||
"""
|
||||
|
||||
class MetricType(models.TextChoices):
|
||||
BLOCK_RATE = "block_rate", "Block Rate"
|
||||
TOTAL_REQUESTS = "total_requests", "Total Requests"
|
||||
UNIQUE_IPS = "unique_ips", "Unique IPs"
|
||||
FUNCTION_SPECIFIC = "function_specific", "Function Specific"
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||
metric_type = models.CharField(
|
||||
metric_type = RichChoiceField(
|
||||
choice_group="metric_types",
|
||||
domain="core",
|
||||
max_length=50,
|
||||
choices=MetricType.choices,
|
||||
db_index=True,
|
||||
help_text="Type of metric to monitor",
|
||||
)
|
||||
@@ -484,18 +458,6 @@ class Incident(models.Model):
|
||||
allowing teams to track and resolve related alerts together.
|
||||
"""
|
||||
|
||||
class Status(models.TextChoices):
|
||||
OPEN = "open", "Open"
|
||||
INVESTIGATING = "investigating", "Investigating"
|
||||
RESOLVED = "resolved", "Resolved"
|
||||
CLOSED = "closed", "Closed"
|
||||
|
||||
class Severity(models.TextChoices):
|
||||
LOW = "low", "Low"
|
||||
MEDIUM = "medium", "Medium"
|
||||
HIGH = "high", "High"
|
||||
CRITICAL = "critical", "Critical"
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||
incident_number = models.CharField(
|
||||
max_length=20,
|
||||
@@ -505,16 +467,18 @@ class Incident(models.Model):
|
||||
)
|
||||
title = models.CharField(max_length=255, help_text="Brief description of the incident")
|
||||
description = models.TextField(null=True, blank=True, help_text="Detailed description")
|
||||
severity = models.CharField(
|
||||
severity = RichChoiceField(
|
||||
choice_group="severity_levels",
|
||||
domain="core",
|
||||
max_length=20,
|
||||
choices=Severity.choices,
|
||||
db_index=True,
|
||||
help_text="Incident severity level",
|
||||
)
|
||||
status = models.CharField(
|
||||
status = RichChoiceField(
|
||||
choice_group="incident_statuses",
|
||||
domain="core",
|
||||
max_length=20,
|
||||
choices=Status.choices,
|
||||
default=Status.OPEN,
|
||||
default="open",
|
||||
db_index=True,
|
||||
help_text="Current incident status",
|
||||
)
|
||||
@@ -582,10 +546,6 @@ class IncidentAlert(models.Model):
|
||||
Supports linking both system alerts and rate limit alerts.
|
||||
"""
|
||||
|
||||
class AlertSource(models.TextChoices):
|
||||
SYSTEM = "system", "System Alert"
|
||||
RATE_LIMIT = "rate_limit", "Rate Limit Alert"
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||
incident = models.ForeignKey(
|
||||
Incident,
|
||||
@@ -593,9 +553,10 @@ class IncidentAlert(models.Model):
|
||||
related_name="linked_alerts",
|
||||
help_text="The incident this alert is linked to",
|
||||
)
|
||||
alert_source = models.CharField(
|
||||
alert_source = RichChoiceField(
|
||||
choice_group="alert_sources",
|
||||
domain="core",
|
||||
max_length=20,
|
||||
choices=AlertSource.choices,
|
||||
help_text="Source type of the alert",
|
||||
)
|
||||
alert_id = models.UUIDField(help_text="ID of the linked alert")
|
||||
@@ -633,13 +594,6 @@ class RequestMetadata(models.Model):
|
||||
dashboard for error monitoring and analytics.
|
||||
"""
|
||||
|
||||
class Severity(models.TextChoices):
|
||||
DEBUG = "debug", "Debug"
|
||||
INFO = "info", "Info"
|
||||
WARNING = "warning", "Warning"
|
||||
ERROR = "error", "Error"
|
||||
CRITICAL = "critical", "Critical"
|
||||
|
||||
# Identity & Correlation
|
||||
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||
request_id = models.CharField(
|
||||
@@ -789,10 +743,11 @@ class RequestMetadata(models.Model):
|
||||
null=True,
|
||||
help_text="React component stack trace",
|
||||
)
|
||||
severity = models.CharField(
|
||||
severity = RichChoiceField(
|
||||
choice_group="request_severity_levels",
|
||||
domain="core",
|
||||
max_length=20,
|
||||
choices=Severity.choices,
|
||||
default=Severity.INFO,
|
||||
default="info",
|
||||
db_index=True,
|
||||
help_text="Error severity level",
|
||||
)
|
||||
@@ -1062,14 +1017,6 @@ class Milestone(TrackedModel):
|
||||
Maps to frontend milestoneValidationSchema in entityValidationSchemas.ts
|
||||
"""
|
||||
|
||||
class DatePrecision(models.TextChoices):
|
||||
EXACT = "exact", "Exact Date"
|
||||
MONTH = "month", "Month and Year"
|
||||
YEAR = "year", "Year Only"
|
||||
DECADE = "decade", "Decade"
|
||||
CENTURY = "century", "Century"
|
||||
APPROXIMATE = "approximate", "Approximate"
|
||||
|
||||
# Core event information
|
||||
title = models.CharField(
|
||||
max_length=200,
|
||||
@@ -1088,10 +1035,11 @@ class Milestone(TrackedModel):
|
||||
db_index=True,
|
||||
help_text="Date when the event occurred or will occur",
|
||||
)
|
||||
event_date_precision = models.CharField(
|
||||
event_date_precision = RichChoiceField(
|
||||
choice_group="date_precision",
|
||||
domain="core",
|
||||
max_length=20,
|
||||
choices=DatePrecision.choices,
|
||||
default=DatePrecision.EXACT,
|
||||
default="exact",
|
||||
help_text="Precision of the event date",
|
||||
)
|
||||
|
||||
@@ -1161,3 +1109,354 @@ class Milestone(TrackedModel):
|
||||
def __str__(self) -> str:
|
||||
return f"{self.title} ({self.event_date})"
|
||||
|
||||
|
||||
class PipelineError(models.Model):
|
||||
"""
|
||||
Tracks pipeline/processing errors for debugging and monitoring.
|
||||
|
||||
Records errors that occur during data processing pipelines,
|
||||
approval workflows, and other automated operations. Supports
|
||||
resolution tracking and filtering by severity, function, and date.
|
||||
"""
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||
function_name = models.CharField(
|
||||
max_length=255,
|
||||
db_index=True,
|
||||
help_text="Name of the function/pipeline that failed",
|
||||
)
|
||||
error_message = models.TextField(
|
||||
help_text="Error message describing the failure",
|
||||
)
|
||||
error_code = models.CharField(
|
||||
max_length=100,
|
||||
blank=True,
|
||||
null=True,
|
||||
db_index=True,
|
||||
help_text="Error code for categorization",
|
||||
)
|
||||
error_context = models.JSONField(
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="Additional context data as JSON",
|
||||
)
|
||||
stack_trace = models.TextField(
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="Full stack trace for debugging",
|
||||
)
|
||||
severity = RichChoiceField(
|
||||
choice_group="pipeline_error_severities",
|
||||
domain="core",
|
||||
max_length=20,
|
||||
default="error",
|
||||
db_index=True,
|
||||
help_text="Severity level of the error",
|
||||
)
|
||||
|
||||
# References
|
||||
submission_id = models.UUIDField(
|
||||
blank=True,
|
||||
null=True,
|
||||
db_index=True,
|
||||
help_text="ID of related content submission if applicable",
|
||||
)
|
||||
item_id = models.CharField(
|
||||
max_length=255,
|
||||
blank=True,
|
||||
null=True,
|
||||
db_index=True,
|
||||
help_text="Generic reference to related item",
|
||||
)
|
||||
request_id = models.UUIDField(
|
||||
blank=True,
|
||||
null=True,
|
||||
db_index=True,
|
||||
help_text="Request ID for correlation",
|
||||
)
|
||||
trace_id = models.UUIDField(
|
||||
blank=True,
|
||||
null=True,
|
||||
db_index=True,
|
||||
help_text="Distributed trace ID",
|
||||
)
|
||||
|
||||
# Resolution
|
||||
resolved = models.BooleanField(
|
||||
default=False,
|
||||
db_index=True,
|
||||
help_text="Whether this error has been resolved",
|
||||
)
|
||||
resolved_by = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=models.SET_NULL,
|
||||
related_name="resolved_pipeline_errors",
|
||||
help_text="User who resolved this error",
|
||||
)
|
||||
resolved_at = models.DateTimeField(
|
||||
blank=True,
|
||||
null=True,
|
||||
db_index=True,
|
||||
help_text="When this error was resolved",
|
||||
)
|
||||
resolution_notes = models.TextField(
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="Notes about how the error was resolved",
|
||||
)
|
||||
|
||||
# Timestamps
|
||||
occurred_at = models.DateTimeField(
|
||||
auto_now_add=True,
|
||||
db_index=True,
|
||||
help_text="When this error occurred",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
ordering = ["-occurred_at"]
|
||||
verbose_name = "Pipeline Error"
|
||||
verbose_name_plural = "Pipeline Errors"
|
||||
indexes = [
|
||||
models.Index(fields=["severity", "occurred_at"]),
|
||||
models.Index(fields=["function_name", "occurred_at"]),
|
||||
models.Index(fields=["resolved", "occurred_at"]),
|
||||
]
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"[{self.get_severity_display()}] {self.function_name}: {self.error_message[:50]}"
|
||||
|
||||
|
||||
class Anomaly(models.Model):
|
||||
"""
|
||||
Records detected anomalies in system metrics.
|
||||
|
||||
Anomalies are identified by detection algorithms and stored
|
||||
for analysis and alerting. Each anomaly includes the metric
|
||||
details, deviation scores, and optional links to created alerts.
|
||||
"""
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||
metric_name = models.CharField(
|
||||
max_length=255,
|
||||
db_index=True,
|
||||
help_text="Name of the metric that exhibited anomalous behavior",
|
||||
)
|
||||
metric_category = models.CharField(
|
||||
max_length=100,
|
||||
db_index=True,
|
||||
help_text="Category of the metric (e.g., 'performance', 'error_rate', 'traffic')",
|
||||
)
|
||||
anomaly_type = RichChoiceField(
|
||||
choice_group="anomaly_types",
|
||||
domain="core",
|
||||
max_length=30,
|
||||
db_index=True,
|
||||
help_text="Type of anomaly detected",
|
||||
)
|
||||
severity = RichChoiceField(
|
||||
choice_group="severity_levels",
|
||||
domain="core",
|
||||
max_length=20,
|
||||
db_index=True,
|
||||
help_text="Severity of the anomaly",
|
||||
)
|
||||
|
||||
# Metric values
|
||||
anomaly_value = models.DecimalField(
|
||||
max_digits=20,
|
||||
decimal_places=6,
|
||||
help_text="The anomalous value detected",
|
||||
)
|
||||
baseline_value = models.DecimalField(
|
||||
max_digits=20,
|
||||
decimal_places=6,
|
||||
help_text="The expected baseline value",
|
||||
)
|
||||
deviation_score = models.DecimalField(
|
||||
max_digits=10,
|
||||
decimal_places=4,
|
||||
help_text="Standard deviations from normal",
|
||||
)
|
||||
confidence_score = models.DecimalField(
|
||||
max_digits=5,
|
||||
decimal_places=4,
|
||||
help_text="Confidence score of the detection (0-1)",
|
||||
)
|
||||
|
||||
# Detection context
|
||||
detection_algorithm = models.CharField(
|
||||
max_length=100,
|
||||
help_text="Algorithm used for detection",
|
||||
)
|
||||
time_window_start = models.DateTimeField(
|
||||
help_text="Start of the detection time window",
|
||||
)
|
||||
time_window_end = models.DateTimeField(
|
||||
help_text="End of the detection time window",
|
||||
)
|
||||
|
||||
# Alert linkage
|
||||
alert_created = models.BooleanField(
|
||||
default=False,
|
||||
db_index=True,
|
||||
help_text="Whether an alert was created for this anomaly",
|
||||
)
|
||||
alert = models.ForeignKey(
|
||||
SystemAlert,
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=models.SET_NULL,
|
||||
related_name="anomalies",
|
||||
help_text="Linked system alert if created",
|
||||
)
|
||||
|
||||
# Timestamps
|
||||
detected_at = models.DateTimeField(
|
||||
auto_now_add=True,
|
||||
db_index=True,
|
||||
help_text="When this anomaly was detected",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
ordering = ["-detected_at"]
|
||||
verbose_name = "Anomaly"
|
||||
verbose_name_plural = "Anomalies"
|
||||
indexes = [
|
||||
models.Index(fields=["metric_name", "detected_at"]),
|
||||
models.Index(fields=["severity", "detected_at"]),
|
||||
models.Index(fields=["anomaly_type", "detected_at"]),
|
||||
models.Index(fields=["alert_created", "detected_at"]),
|
||||
]
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"[{self.get_severity_display()}] {self.metric_name}: {self.get_anomaly_type_display()}"
|
||||
|
||||
|
||||
class AlertCorrelationRule(models.Model):
|
||||
"""
|
||||
Defines rules for correlating multiple alerts.
|
||||
|
||||
When multiple alerts match a correlation rule's pattern within
|
||||
the time window, they can be grouped into an incident. This
|
||||
helps reduce alert fatigue and identify related issues.
|
||||
"""
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||
rule_name = models.CharField(
|
||||
max_length=255,
|
||||
unique=True,
|
||||
db_index=True,
|
||||
help_text="Unique name for this correlation rule",
|
||||
)
|
||||
rule_description = models.TextField(
|
||||
blank=True,
|
||||
help_text="Description of what this rule correlates",
|
||||
)
|
||||
min_alerts_required = models.PositiveIntegerField(
|
||||
default=3,
|
||||
help_text="Minimum number of alerts needed to trigger correlation",
|
||||
)
|
||||
time_window_minutes = models.PositiveIntegerField(
|
||||
default=30,
|
||||
help_text="Time window in minutes for alert correlation",
|
||||
)
|
||||
incident_severity = RichChoiceField(
|
||||
choice_group="severity_levels",
|
||||
domain="core",
|
||||
max_length=20,
|
||||
default="medium",
|
||||
help_text="Severity to assign to correlated incidents",
|
||||
)
|
||||
incident_title_template = models.CharField(
|
||||
max_length=255,
|
||||
help_text="Template for incident title (supports {count}, {rule_name})",
|
||||
)
|
||||
is_active = models.BooleanField(
|
||||
default=True,
|
||||
db_index=True,
|
||||
help_text="Whether this rule is currently active",
|
||||
)
|
||||
|
||||
# Timestamps
|
||||
created_at = models.DateTimeField(
|
||||
auto_now_add=True,
|
||||
help_text="When this rule was created",
|
||||
)
|
||||
updated_at = models.DateTimeField(
|
||||
auto_now=True,
|
||||
help_text="When this rule was last updated",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
ordering = ["rule_name"]
|
||||
verbose_name = "Alert Correlation Rule"
|
||||
verbose_name_plural = "Alert Correlation Rules"
|
||||
|
||||
def __str__(self) -> str:
|
||||
status = "active" if self.is_active else "inactive"
|
||||
return f"{self.rule_name} ({status})"
|
||||
|
||||
|
||||
class CleanupJobLog(models.Model):
|
||||
"""
|
||||
Audit log for cleanup and maintenance jobs.
|
||||
|
||||
Records the execution of background cleanup tasks,
|
||||
including success/failure status, records processed,
|
||||
and execution time for monitoring and debugging.
|
||||
"""
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||
job_name = models.CharField(
|
||||
max_length=255,
|
||||
db_index=True,
|
||||
help_text="Name of the cleanup job",
|
||||
)
|
||||
status = RichChoiceField(
|
||||
choice_group="cleanup_job_statuses",
|
||||
domain="core",
|
||||
max_length=20,
|
||||
default="success",
|
||||
db_index=True,
|
||||
help_text="Execution status",
|
||||
)
|
||||
records_processed = models.PositiveIntegerField(
|
||||
default=0,
|
||||
help_text="Number of records processed",
|
||||
)
|
||||
records_deleted = models.PositiveIntegerField(
|
||||
default=0,
|
||||
help_text="Number of records deleted",
|
||||
)
|
||||
error_message = models.TextField(
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="Error message if job failed",
|
||||
)
|
||||
duration_ms = models.PositiveIntegerField(
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="Execution duration in milliseconds",
|
||||
)
|
||||
|
||||
# Timestamps
|
||||
executed_at = models.DateTimeField(
|
||||
auto_now_add=True,
|
||||
db_index=True,
|
||||
help_text="When this job was executed",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
ordering = ["-executed_at"]
|
||||
verbose_name = "Cleanup Job Log"
|
||||
verbose_name_plural = "Cleanup Job Logs"
|
||||
indexes = [
|
||||
models.Index(fields=["job_name", "executed_at"]),
|
||||
models.Index(fields=["status", "executed_at"]),
|
||||
]
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.job_name}: {self.get_status_display()} ({self.records_processed} processed)"
|
||||
|
||||
|
||||
@@ -124,6 +124,20 @@ SUBMISSION_TYPES = [
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="PHOTO",
|
||||
label="Photo Submission",
|
||||
description="Photo upload for existing content",
|
||||
metadata={
|
||||
"color": "purple",
|
||||
"icon": "photograph",
|
||||
"css_class": "bg-purple-100 text-purple-800 border-purple-200",
|
||||
"sort_order": 3,
|
||||
"requires_existing_object": True,
|
||||
"complexity_level": "low",
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
]
|
||||
|
||||
# ============================================================================
|
||||
@@ -934,6 +948,122 @@ BULK_OPERATION_TYPES = [
|
||||
# PhotoSubmission uses the same STATUS_CHOICES as EditSubmission
|
||||
PHOTO_SUBMISSION_STATUSES = EDIT_SUBMISSION_STATUSES
|
||||
|
||||
# ============================================================================
|
||||
# ModerationAuditLog Action Choices
|
||||
# ============================================================================
|
||||
|
||||
MODERATION_AUDIT_ACTIONS = [
|
||||
RichChoice(
|
||||
value="approved",
|
||||
label="Approved",
|
||||
description="Submission was approved by moderator",
|
||||
metadata={
|
||||
"color": "green",
|
||||
"icon": "check-circle",
|
||||
"css_class": "bg-green-100 text-green-800",
|
||||
"sort_order": 1,
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="rejected",
|
||||
label="Rejected",
|
||||
description="Submission was rejected by moderator",
|
||||
metadata={
|
||||
"color": "red",
|
||||
"icon": "x-circle",
|
||||
"css_class": "bg-red-100 text-red-800",
|
||||
"sort_order": 2,
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="claimed",
|
||||
label="Claimed",
|
||||
description="Submission was claimed by moderator",
|
||||
metadata={
|
||||
"color": "blue",
|
||||
"icon": "user-check",
|
||||
"css_class": "bg-blue-100 text-blue-800",
|
||||
"sort_order": 3,
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="unclaimed",
|
||||
label="Unclaimed",
|
||||
description="Submission was released by moderator",
|
||||
metadata={
|
||||
"color": "gray",
|
||||
"icon": "user-minus",
|
||||
"css_class": "bg-gray-100 text-gray-800",
|
||||
"sort_order": 4,
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="escalated",
|
||||
label="Escalated",
|
||||
description="Submission was escalated for higher-level review",
|
||||
metadata={
|
||||
"color": "purple",
|
||||
"icon": "arrow-up",
|
||||
"css_class": "bg-purple-100 text-purple-800",
|
||||
"sort_order": 5,
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="converted_to_edit",
|
||||
label="Converted to Edit",
|
||||
description="Photo submission was converted to an edit submission",
|
||||
metadata={
|
||||
"color": "indigo",
|
||||
"icon": "refresh",
|
||||
"css_class": "bg-indigo-100 text-indigo-800",
|
||||
"sort_order": 6,
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="status_changed",
|
||||
label="Status Changed",
|
||||
description="Submission status was changed",
|
||||
metadata={
|
||||
"color": "yellow",
|
||||
"icon": "refresh-cw",
|
||||
"css_class": "bg-yellow-100 text-yellow-800",
|
||||
"sort_order": 7,
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="notes_added",
|
||||
label="Notes Added",
|
||||
description="Moderator notes were added to submission",
|
||||
metadata={
|
||||
"color": "blue",
|
||||
"icon": "edit",
|
||||
"css_class": "bg-blue-100 text-blue-800",
|
||||
"sort_order": 8,
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="auto_approved",
|
||||
label="Auto Approved",
|
||||
description="Submission was auto-approved by the system",
|
||||
metadata={
|
||||
"color": "green",
|
||||
"icon": "zap",
|
||||
"css_class": "bg-green-100 text-green-800",
|
||||
"sort_order": 9,
|
||||
"is_system_action": True,
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
]
|
||||
|
||||
# ============================================================================
|
||||
# Choice Registration
|
||||
# ============================================================================
|
||||
@@ -958,3 +1088,6 @@ register_choices("bulk_operation_types", BULK_OPERATION_TYPES, "moderation", "Bu
|
||||
register_choices(
|
||||
"photo_submission_statuses", PHOTO_SUBMISSION_STATUSES, "moderation", "Photo submission status options"
|
||||
)
|
||||
register_choices(
|
||||
"moderation_audit_actions", MODERATION_AUDIT_ACTIONS, "moderation", "Moderation audit log action types"
|
||||
)
|
||||
|
||||
@@ -27,12 +27,10 @@ User = get_user_model()
|
||||
class ModerationReportFilter(django_filters.FilterSet):
|
||||
"""Filter for ModerationReport model."""
|
||||
|
||||
# Status filters
|
||||
status = django_filters.ChoiceFilter(
|
||||
choices=lambda: [
|
||||
(choice.value, choice.label) for choice in get_choices("moderation_report_statuses", "moderation")
|
||||
],
|
||||
help_text="Filter by report status",
|
||||
# Status filters - use method filter for case-insensitive matching
|
||||
status = django_filters.CharFilter(
|
||||
method="filter_status",
|
||||
help_text="Filter by report status (case-insensitive)",
|
||||
)
|
||||
|
||||
# Priority filters
|
||||
@@ -144,6 +142,19 @@ class ModerationReportFilter(django_filters.FilterSet):
|
||||
return queryset.exclude(resolution_action__isnull=True, resolution_action="")
|
||||
return queryset.filter(Q(resolution_action__isnull=True) | Q(resolution_action=""))
|
||||
|
||||
def filter_status(self, queryset, name, value):
|
||||
"""Filter by status with case-insensitive matching."""
|
||||
if not value:
|
||||
return queryset
|
||||
# Normalize to uppercase for matching against RichChoice values
|
||||
normalized_value = value.upper()
|
||||
# Validate against registered choices
|
||||
valid_values = {choice.value for choice in get_choices("moderation_report_statuses", "moderation")}
|
||||
if normalized_value in valid_values:
|
||||
return queryset.filter(status=normalized_value)
|
||||
# If not valid, return empty queryset (invalid filter value)
|
||||
return queryset.none()
|
||||
|
||||
|
||||
class ModerationQueueFilter(django_filters.FilterSet):
|
||||
"""Filter for ModerationQueue model."""
|
||||
|
||||
@@ -0,0 +1,96 @@
|
||||
# Generated by Django 5.2.10 on 2026-01-11 18:06
|
||||
|
||||
import apps.core.choices.fields
|
||||
import django.db.models.deletion
|
||||
import uuid
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("moderation", "0009_add_claim_fields"),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="ModerationAuditLog",
|
||||
fields=[
|
||||
("id", models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
|
||||
(
|
||||
"action",
|
||||
apps.core.choices.fields.RichChoiceField(
|
||||
allow_deprecated=False,
|
||||
choice_group="moderation_audit_actions",
|
||||
choices=[
|
||||
("approved", "Approved"),
|
||||
("rejected", "Rejected"),
|
||||
("claimed", "Claimed"),
|
||||
("unclaimed", "Unclaimed"),
|
||||
("escalated", "Escalated"),
|
||||
("converted_to_edit", "Converted to Edit"),
|
||||
("status_changed", "Status Changed"),
|
||||
("notes_added", "Notes Added"),
|
||||
("auto_approved", "Auto Approved"),
|
||||
],
|
||||
db_index=True,
|
||||
domain="moderation",
|
||||
help_text="The action that was performed",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
(
|
||||
"previous_status",
|
||||
models.CharField(blank=True, help_text="Status before the action", max_length=50, null=True),
|
||||
),
|
||||
(
|
||||
"new_status",
|
||||
models.CharField(blank=True, help_text="Status after the action", max_length=50, null=True),
|
||||
),
|
||||
("notes", models.TextField(blank=True, help_text="Notes or comments about the action", null=True)),
|
||||
(
|
||||
"is_system_action",
|
||||
models.BooleanField(
|
||||
db_index=True, default=False, help_text="Whether this was an automated system action"
|
||||
),
|
||||
),
|
||||
("is_test_data", models.BooleanField(default=False, help_text="Whether this is test data")),
|
||||
(
|
||||
"created_at",
|
||||
models.DateTimeField(auto_now_add=True, db_index=True, help_text="When this action was performed"),
|
||||
),
|
||||
(
|
||||
"moderator",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
help_text="The moderator who performed the action (null for system actions)",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="moderation_audit_logs",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
(
|
||||
"submission",
|
||||
models.ForeignKey(
|
||||
help_text="The submission this audit log entry is for",
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="audit_logs",
|
||||
to="moderation.editsubmission",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Moderation Audit Log",
|
||||
"verbose_name_plural": "Moderation Audit Logs",
|
||||
"ordering": ["-created_at"],
|
||||
"indexes": [
|
||||
models.Index(fields=["submission", "created_at"], name="moderation__submiss_2f5e56_idx"),
|
||||
models.Index(fields=["moderator", "created_at"], name="moderation__moderat_591c14_idx"),
|
||||
models.Index(fields=["action", "created_at"], name="moderation__action_a98c47_idx"),
|
||||
],
|
||||
},
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,99 @@
|
||||
# Generated by Django 5.2.10 on 2026-01-12 23:00
|
||||
|
||||
import django.db.models.deletion
|
||||
import pgtrigger.compiler
|
||||
import pgtrigger.migrations
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("django_cloudflareimages_toolkit", "0001_initial"),
|
||||
("moderation", "0010_moderationauditlog"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
pgtrigger.migrations.RemoveTrigger(
|
||||
model_name="editsubmission",
|
||||
name="insert_insert",
|
||||
),
|
||||
pgtrigger.migrations.RemoveTrigger(
|
||||
model_name="editsubmission",
|
||||
name="update_update",
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="editsubmission",
|
||||
name="caption",
|
||||
field=models.CharField(blank=True, help_text="Photo caption", max_length=255),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="editsubmission",
|
||||
name="date_taken",
|
||||
field=models.DateField(blank=True, help_text="Date the photo was taken", null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="editsubmission",
|
||||
name="photo",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
help_text="Photo for photo submissions",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
to="django_cloudflareimages_toolkit.cloudflareimage",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="editsubmissionevent",
|
||||
name="caption",
|
||||
field=models.CharField(blank=True, help_text="Photo caption", max_length=255),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="editsubmissionevent",
|
||||
name="date_taken",
|
||||
field=models.DateField(blank=True, help_text="Date the photo was taken", null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="editsubmissionevent",
|
||||
name="photo",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
db_constraint=False,
|
||||
help_text="Photo for photo submissions",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to="django_cloudflareimages_toolkit.cloudflareimage",
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="editsubmission",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "moderation_editsubmissionevent" ("caption", "changes", "claimed_at", "claimed_by_id", "content_type_id", "created_at", "date_taken", "handled_at", "handled_by_id", "id", "moderator_changes", "notes", "object_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "photo_id", "reason", "source", "status", "submission_type", "updated_at", "user_id") VALUES (NEW."caption", NEW."changes", NEW."claimed_at", NEW."claimed_by_id", NEW."content_type_id", NEW."created_at", NEW."date_taken", NEW."handled_at", NEW."handled_by_id", NEW."id", NEW."moderator_changes", NEW."notes", NEW."object_id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."photo_id", NEW."reason", NEW."source", NEW."status", NEW."submission_type", NEW."updated_at", NEW."user_id"); RETURN NULL;',
|
||||
hash="e9aed25fe6389b113919e729543a9abe20d9f30c",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_2c796",
|
||||
table="moderation_editsubmission",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="editsubmission",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "moderation_editsubmissionevent" ("caption", "changes", "claimed_at", "claimed_by_id", "content_type_id", "created_at", "date_taken", "handled_at", "handled_by_id", "id", "moderator_changes", "notes", "object_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "photo_id", "reason", "source", "status", "submission_type", "updated_at", "user_id") VALUES (NEW."caption", NEW."changes", NEW."claimed_at", NEW."claimed_by_id", NEW."content_type_id", NEW."created_at", NEW."date_taken", NEW."handled_at", NEW."handled_by_id", NEW."id", NEW."moderator_changes", NEW."notes", NEW."object_id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."photo_id", NEW."reason", NEW."source", NEW."status", NEW."submission_type", NEW."updated_at", NEW."user_id"); RETURN NULL;',
|
||||
hash="070083ba4d2d459067d9c3a90356a759f6262a90",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_ab38f",
|
||||
table="moderation_editsubmission",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,64 @@
|
||||
"""
|
||||
Data migration to copy PhotoSubmission data to EditSubmission.
|
||||
|
||||
This migration copies all PhotoSubmission rows to EditSubmission with submission_type="PHOTO".
|
||||
After this migration, PhotoSubmission model can be safely removed.
|
||||
"""
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
def migrate_photo_submissions(apps, schema_editor):
|
||||
"""Copy PhotoSubmission data to EditSubmission."""
|
||||
PhotoSubmission = apps.get_model("moderation", "PhotoSubmission")
|
||||
EditSubmission = apps.get_model("moderation", "EditSubmission")
|
||||
ContentType = apps.get_model("contenttypes", "ContentType")
|
||||
|
||||
# Get EditSubmission content type for reference
|
||||
edit_submission_ct = ContentType.objects.get_for_model(EditSubmission)
|
||||
|
||||
migrated = 0
|
||||
for photo_sub in PhotoSubmission.objects.all():
|
||||
# Create EditSubmission from PhotoSubmission
|
||||
EditSubmission.objects.create(
|
||||
user=photo_sub.user,
|
||||
content_type=photo_sub.content_type,
|
||||
object_id=photo_sub.object_id,
|
||||
submission_type="PHOTO",
|
||||
changes={}, # Photos don't have field changes
|
||||
reason="Photo submission", # Default reason
|
||||
status=photo_sub.status,
|
||||
created_at=photo_sub.created_at,
|
||||
handled_by=photo_sub.handled_by,
|
||||
handled_at=photo_sub.handled_at,
|
||||
notes=photo_sub.notes,
|
||||
claimed_by=photo_sub.claimed_by,
|
||||
claimed_at=photo_sub.claimed_at,
|
||||
# Photo-specific fields
|
||||
photo=photo_sub.photo,
|
||||
caption=photo_sub.caption,
|
||||
date_taken=photo_sub.date_taken,
|
||||
)
|
||||
migrated += 1
|
||||
|
||||
if migrated:
|
||||
print(f"Migrated {migrated} PhotoSubmission(s) to EditSubmission")
|
||||
|
||||
|
||||
def reverse_migration(apps, schema_editor):
|
||||
"""Remove migrated EditSubmissions with type PHOTO."""
|
||||
EditSubmission = apps.get_model("moderation", "EditSubmission")
|
||||
deleted, _ = EditSubmission.objects.filter(submission_type="PHOTO").delete()
|
||||
if deleted:
|
||||
print(f"Deleted {deleted} PHOTO EditSubmission(s)")
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("moderation", "0011_add_photo_fields_to_editsubmission"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(migrate_photo_submissions, reverse_migration),
|
||||
]
|
||||
@@ -18,6 +18,7 @@ are registered via the callback configuration defined in each model's Meta class
|
||||
|
||||
from datetime import timedelta
|
||||
from typing import Any
|
||||
import uuid
|
||||
|
||||
import pghistory
|
||||
from django.conf import settings
|
||||
@@ -114,6 +115,25 @@ class EditSubmission(StateMachineMixin, TrackedModel):
|
||||
help_text="Moderator's edited version of the changes before approval",
|
||||
)
|
||||
|
||||
# Photo submission fields (only used when submission_type="PHOTO")
|
||||
photo = models.ForeignKey(
|
||||
"django_cloudflareimages_toolkit.CloudflareImage",
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="Photo for photo submissions",
|
||||
)
|
||||
caption = models.CharField(
|
||||
max_length=255,
|
||||
blank=True,
|
||||
help_text="Photo caption",
|
||||
)
|
||||
date_taken = models.DateField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="Date the photo was taken",
|
||||
)
|
||||
|
||||
# Metadata
|
||||
reason = models.TextField(help_text="Why this edit/addition is needed")
|
||||
source = models.TextField(blank=True, help_text="Source of information (if applicable)")
|
||||
@@ -190,6 +210,122 @@ class EditSubmission(StateMachineMixin, TrackedModel):
|
||||
"""Get the final changes to apply (moderator changes if available, otherwise original changes)"""
|
||||
return self.moderator_changes or self.changes
|
||||
|
||||
def _get_model_class_for_item_type(self, item_type: str):
|
||||
"""
|
||||
Map item_type string to the corresponding Django model class.
|
||||
|
||||
Args:
|
||||
item_type: Type string from frontend (e.g., 'manufacturer', 'park', 'ride_model')
|
||||
|
||||
Returns:
|
||||
Model class for the item type
|
||||
"""
|
||||
# Lazy imports to avoid circular dependencies
|
||||
from apps.parks.models import Company, Park
|
||||
from apps.rides.models import Ride, RideModel
|
||||
|
||||
type_map = {
|
||||
# Company types (all map to Company model)
|
||||
'manufacturer': Company,
|
||||
'designer': Company,
|
||||
'operator': Company,
|
||||
'property_owner': Company,
|
||||
'company': Company,
|
||||
# Entity types
|
||||
'park': Park,
|
||||
'ride': Ride,
|
||||
'ride_model': RideModel,
|
||||
}
|
||||
|
||||
model_class = type_map.get(item_type.lower())
|
||||
if not model_class:
|
||||
raise ValueError(f"Unknown item_type: {item_type}")
|
||||
return model_class
|
||||
|
||||
def _process_composite_items(self, composite_items: list[dict[str, Any]]) -> dict[int, Any]:
|
||||
"""
|
||||
Process composite submission items (dependencies) before the primary entity.
|
||||
|
||||
Args:
|
||||
composite_items: List of dependency items from frontend's submissionItems array
|
||||
Each item has: item_type, action_type, item_data, order_index, depends_on
|
||||
|
||||
Returns:
|
||||
Dictionary mapping order_index -> created entity ID for resolving temp references
|
||||
"""
|
||||
from django.db import transaction
|
||||
|
||||
# Sort by order_index to ensure proper dependency order
|
||||
sorted_items = sorted(composite_items, key=lambda x: x.get('order_index', 0))
|
||||
|
||||
# Map of order_index -> created entity ID
|
||||
created_entities: dict[int, Any] = {}
|
||||
|
||||
with transaction.atomic():
|
||||
for item in sorted_items:
|
||||
item_type = item.get('item_type', '')
|
||||
item_data = item.get('item_data', {})
|
||||
order_index = item.get('order_index', 0)
|
||||
|
||||
if not item_type or not item_data:
|
||||
continue
|
||||
|
||||
# Get the model class for this item type
|
||||
model_class = self._get_model_class_for_item_type(item_type)
|
||||
|
||||
# Clean up internal fields not needed for model creation
|
||||
clean_data = {}
|
||||
for key, value in item_data.items():
|
||||
# Skip internal/temp fields
|
||||
if key.startswith('_temp_') or key == 'images' or key == '_composite_items':
|
||||
continue
|
||||
# Skip fields with None or 'temp-' values
|
||||
if value is None or (isinstance(value, str) and value.startswith('temp-')):
|
||||
continue
|
||||
clean_data[key] = value
|
||||
|
||||
# Resolve _temp_*_ref fields to actual entity IDs from previously created entities
|
||||
for key, value in item_data.items():
|
||||
if key.startswith('_temp_') and key.endswith('_ref'):
|
||||
# Extract the field name: _temp_manufacturer_ref -> manufacturer_id
|
||||
field_name = key[6:-4] + '_id' # Remove '_temp_' prefix and '_ref' suffix
|
||||
ref_order_index = value
|
||||
if isinstance(ref_order_index, int) and ref_order_index in created_entities:
|
||||
clean_data[field_name] = created_entities[ref_order_index]
|
||||
|
||||
# Resolve foreign keys to model instances
|
||||
resolved_data = {}
|
||||
for field_name, value in clean_data.items():
|
||||
try:
|
||||
field = model_class._meta.get_field(field_name)
|
||||
if isinstance(field, models.ForeignKey) and value is not None:
|
||||
try:
|
||||
related_obj = field.related_model.objects.get(pk=value)
|
||||
resolved_data[field_name] = related_obj
|
||||
except ObjectDoesNotExist:
|
||||
# Skip invalid FK references
|
||||
continue
|
||||
else:
|
||||
resolved_data[field_name] = value
|
||||
except:
|
||||
# Field doesn't exist on model, still try to include it
|
||||
resolved_data[field_name] = value
|
||||
|
||||
# Create the entity
|
||||
try:
|
||||
obj = model_class(**resolved_data)
|
||||
obj.full_clean()
|
||||
obj.save()
|
||||
created_entities[order_index] = obj.pk
|
||||
except Exception as e:
|
||||
# Log but continue - don't fail the whole submission for one dependency
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.error(f"Failed to create composite item {item_type}: {e}")
|
||||
continue
|
||||
|
||||
return created_entities
|
||||
|
||||
def claim(self, user: UserType) -> None:
|
||||
"""
|
||||
Claim this submission for review.
|
||||
@@ -266,6 +402,28 @@ class EditSubmission(StateMachineMixin, TrackedModel):
|
||||
raise ValueError("Could not resolve model class")
|
||||
|
||||
final_changes = self._get_final_changes()
|
||||
|
||||
# Process composite items (dependencies) first if present
|
||||
created_entity_ids: dict[int, Any] = {}
|
||||
if '_composite_items' in final_changes:
|
||||
composite_items = final_changes.pop('_composite_items')
|
||||
if composite_items and isinstance(composite_items, list):
|
||||
created_entity_ids = self._process_composite_items(composite_items)
|
||||
|
||||
# Resolve _temp_*_ref fields in the primary entity using created dependency IDs
|
||||
for key in list(final_changes.keys()):
|
||||
if key.startswith('_temp_') and key.endswith('_ref'):
|
||||
# Extract field name: _temp_manufacturer_ref -> manufacturer_id
|
||||
field_name = key[6:-4] + '_id' # Remove '_temp_' and '_ref'
|
||||
ref_order_index = final_changes.pop(key)
|
||||
if isinstance(ref_order_index, int) and ref_order_index in created_entity_ids:
|
||||
final_changes[field_name] = created_entity_ids[ref_order_index]
|
||||
|
||||
# Remove any remaining internal fields
|
||||
keys_to_remove = [k for k in final_changes.keys() if k.startswith('_')]
|
||||
for key in keys_to_remove:
|
||||
final_changes.pop(key, None)
|
||||
|
||||
resolved_changes = self._resolve_foreign_keys(final_changes)
|
||||
|
||||
try:
|
||||
@@ -295,6 +453,7 @@ class EditSubmission(StateMachineMixin, TrackedModel):
|
||||
|
||||
return obj
|
||||
|
||||
|
||||
except Exception as e:
|
||||
# On error, record the issue and attempt rejection transition
|
||||
self.notes = f"Approval failed: {str(e)}"
|
||||
@@ -900,3 +1059,82 @@ class PhotoSubmission(StateMachineMixin, TrackedModel):
|
||||
self.handled_at = timezone.now()
|
||||
self.notes = notes
|
||||
self.save()
|
||||
|
||||
|
||||
class ModerationAuditLog(models.Model):
|
||||
"""
|
||||
Audit log for moderation actions.
|
||||
|
||||
Records all moderation activities including approvals, rejections,
|
||||
claims, escalations, and conversions for accountability and analytics.
|
||||
"""
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||
submission = models.ForeignKey(
|
||||
EditSubmission,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="audit_logs",
|
||||
help_text="The submission this audit log entry is for",
|
||||
)
|
||||
moderator = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=models.SET_NULL,
|
||||
related_name="moderation_audit_logs",
|
||||
help_text="The moderator who performed the action (null for system actions)",
|
||||
)
|
||||
action = RichChoiceField(
|
||||
choice_group="moderation_audit_actions",
|
||||
domain="moderation",
|
||||
max_length=50,
|
||||
db_index=True,
|
||||
help_text="The action that was performed",
|
||||
)
|
||||
previous_status = models.CharField(
|
||||
max_length=50,
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="Status before the action",
|
||||
)
|
||||
new_status = models.CharField(
|
||||
max_length=50,
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="Status after the action",
|
||||
)
|
||||
notes = models.TextField(
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="Notes or comments about the action",
|
||||
)
|
||||
is_system_action = models.BooleanField(
|
||||
default=False,
|
||||
db_index=True,
|
||||
help_text="Whether this was an automated system action",
|
||||
)
|
||||
is_test_data = models.BooleanField(
|
||||
default=False,
|
||||
help_text="Whether this is test data",
|
||||
)
|
||||
|
||||
# Timestamps
|
||||
created_at = models.DateTimeField(
|
||||
auto_now_add=True,
|
||||
db_index=True,
|
||||
help_text="When this action was performed",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
ordering = ["-created_at"]
|
||||
verbose_name = "Moderation Audit Log"
|
||||
verbose_name_plural = "Moderation Audit Logs"
|
||||
indexes = [
|
||||
models.Index(fields=["submission", "created_at"]),
|
||||
models.Index(fields=["moderator", "created_at"]),
|
||||
models.Index(fields=["action", "created_at"]),
|
||||
]
|
||||
|
||||
def __str__(self) -> str:
|
||||
actor = self.moderator.username if self.moderator else "System"
|
||||
return f"{self.get_action_display()} by {actor} on {self.submission_id}"
|
||||
|
||||
@@ -100,6 +100,10 @@ class EditSubmissionSerializer(serializers.ModelSerializer):
|
||||
"claimed_at",
|
||||
"created_at",
|
||||
"time_since_created",
|
||||
# Photo fields (used when submission_type="PHOTO")
|
||||
"photo",
|
||||
"caption",
|
||||
"date_taken",
|
||||
]
|
||||
read_only_fields = [
|
||||
"id",
|
||||
@@ -1062,3 +1066,45 @@ class PhotoSubmissionSerializer(serializers.ModelSerializer):
|
||||
else:
|
||||
minutes = diff.seconds // 60
|
||||
return f"{minutes} minutes ago"
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Moderation Audit Log Serializers
|
||||
# ============================================================================
|
||||
|
||||
|
||||
class ModerationAuditLogSerializer(serializers.ModelSerializer):
|
||||
"""Serializer for moderation audit logs."""
|
||||
|
||||
moderator = UserBasicSerializer(read_only=True)
|
||||
moderator_username = serializers.CharField(source="moderator.username", read_only=True, allow_null=True)
|
||||
submission_content_type = serializers.CharField(source="submission.content_type.model", read_only=True)
|
||||
action_display = serializers.CharField(source="get_action_display", read_only=True)
|
||||
|
||||
class Meta:
|
||||
from .models import ModerationAuditLog
|
||||
|
||||
model = ModerationAuditLog
|
||||
fields = [
|
||||
"id",
|
||||
"submission",
|
||||
"submission_content_type",
|
||||
"moderator",
|
||||
"moderator_username",
|
||||
"action",
|
||||
"action_display",
|
||||
"previous_status",
|
||||
"new_status",
|
||||
"notes",
|
||||
"is_system_action",
|
||||
"is_test_data",
|
||||
"created_at",
|
||||
]
|
||||
read_only_fields = [
|
||||
"id",
|
||||
"created_at",
|
||||
"moderator",
|
||||
"moderator_username",
|
||||
"submission_content_type",
|
||||
"action_display",
|
||||
]
|
||||
|
||||
@@ -5,6 +5,7 @@ Following Django styleguide pattern for business logic encapsulation.
|
||||
|
||||
from typing import Any
|
||||
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.db import transaction
|
||||
from django.db.models import QuerySet
|
||||
from django.utils import timezone
|
||||
@@ -340,9 +341,13 @@ class ModerationService:
|
||||
Dictionary with submission info and queue status
|
||||
"""
|
||||
with transaction.atomic():
|
||||
# Create the photo submission
|
||||
submission = PhotoSubmission(
|
||||
content_object=content_object,
|
||||
# Create the photo submission using unified EditSubmission with PHOTO type
|
||||
submission = EditSubmission(
|
||||
content_type=ContentType.objects.get_for_model(content_object),
|
||||
object_id=content_object.pk,
|
||||
submission_type="PHOTO",
|
||||
changes={}, # Photos don't have field changes
|
||||
reason="Photo submission",
|
||||
photo=photo,
|
||||
caption=caption,
|
||||
date_taken=date_taken,
|
||||
|
||||
@@ -95,7 +95,7 @@ def expire_stale_claims(lock_duration_minutes: int = None) -> dict:
|
||||
source="task",
|
||||
)
|
||||
|
||||
# Process PhotoSubmissions with stale claims
|
||||
# Process PhotoSubmissions with stale claims (legacy model - until removed)
|
||||
stale_photo_ids = list(
|
||||
PhotoSubmission.objects.filter(
|
||||
status="CLAIMED",
|
||||
@@ -132,6 +132,43 @@ def expire_stale_claims(lock_duration_minutes: int = None) -> dict:
|
||||
source="task",
|
||||
)
|
||||
|
||||
# Also process EditSubmission with PHOTO type (new unified model)
|
||||
stale_photo_edit_ids = list(
|
||||
EditSubmission.objects.filter(
|
||||
submission_type="PHOTO",
|
||||
status="CLAIMED",
|
||||
claimed_at__lt=cutoff_time,
|
||||
).values_list("id", flat=True)
|
||||
)
|
||||
|
||||
for submission_id in stale_photo_edit_ids:
|
||||
result["edit_submissions"]["processed"] += 1 # Count with edit submissions
|
||||
try:
|
||||
with transaction.atomic():
|
||||
submission = EditSubmission.objects.select_for_update(skip_locked=True).filter(
|
||||
id=submission_id,
|
||||
status="CLAIMED",
|
||||
).first()
|
||||
|
||||
if submission:
|
||||
_release_claim(submission)
|
||||
result["edit_submissions"]["released"] += 1
|
||||
logger.info(
|
||||
"Released stale claim on PHOTO EditSubmission %s (claimed by %s at %s)",
|
||||
submission_id,
|
||||
submission.claimed_by,
|
||||
submission.claimed_at,
|
||||
)
|
||||
except Exception as e:
|
||||
result["edit_submissions"]["failed"] += 1
|
||||
error_msg = f"PHOTO EditSubmission {submission_id}: {str(e)}"
|
||||
result["failures"].append(error_msg)
|
||||
capture_and_log(
|
||||
e,
|
||||
f"Release stale claim on PHOTO EditSubmission {submission_id}",
|
||||
source="task",
|
||||
)
|
||||
|
||||
total_released = result["edit_submissions"]["released"] + result["photo_submissions"]["released"]
|
||||
total_failed = result["edit_submissions"]["failed"] + result["photo_submissions"]["failed"]
|
||||
|
||||
|
||||
@@ -2131,12 +2131,14 @@ class PhotoSubmissionViewSet(viewsets.ModelViewSet):
|
||||
"""
|
||||
ViewSet for managing photo submissions.
|
||||
|
||||
Now queries EditSubmission with submission_type="PHOTO" for unified model.
|
||||
Includes claim/unclaim endpoints with concurrency protection using
|
||||
database row locking (select_for_update) to prevent race conditions.
|
||||
"""
|
||||
|
||||
queryset = PhotoSubmission.objects.all()
|
||||
serializer_class = PhotoSubmissionSerializer
|
||||
# Use EditSubmission filtered by PHOTO type instead of separate PhotoSubmission model
|
||||
queryset = EditSubmission.objects.filter(submission_type="PHOTO")
|
||||
serializer_class = EditSubmissionSerializer # Use unified serializer
|
||||
filter_backends = [DjangoFilterBackend, SearchFilter, OrderingFilter]
|
||||
search_fields = ["caption", "notes"]
|
||||
ordering_fields = ["created_at", "status"]
|
||||
@@ -2144,10 +2146,10 @@ class PhotoSubmissionViewSet(viewsets.ModelViewSet):
|
||||
permission_classes = [CanViewModerationData]
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = super().get_queryset()
|
||||
status = self.request.query_params.get("status")
|
||||
if status:
|
||||
queryset = queryset.filter(status=status)
|
||||
queryset = EditSubmission.objects.filter(submission_type="PHOTO")
|
||||
status_param = self.request.query_params.get("status")
|
||||
if status_param:
|
||||
queryset = queryset.filter(status=status_param)
|
||||
|
||||
# User filter
|
||||
user_id = self.request.query_params.get("user")
|
||||
@@ -2168,8 +2170,9 @@ class PhotoSubmissionViewSet(viewsets.ModelViewSet):
|
||||
|
||||
with transaction.atomic():
|
||||
try:
|
||||
submission = PhotoSubmission.objects.select_for_update(nowait=True).get(pk=pk)
|
||||
except PhotoSubmission.DoesNotExist:
|
||||
# Use EditSubmission filtered by PHOTO type
|
||||
submission = EditSubmission.objects.filter(submission_type="PHOTO").select_for_update(nowait=True).get(pk=pk)
|
||||
except EditSubmission.DoesNotExist:
|
||||
return Response({"error": "Submission not found"}, status=status.HTTP_404_NOT_FOUND)
|
||||
except DatabaseError:
|
||||
return Response(
|
||||
@@ -2198,9 +2201,10 @@ class PhotoSubmissionViewSet(viewsets.ModelViewSet):
|
||||
log_business_event(
|
||||
logger,
|
||||
event_type="submission_claimed",
|
||||
message=f"PhotoSubmission {submission.id} claimed by {request.user.username}",
|
||||
message=f"Photo EditSubmission {submission.id} claimed by {request.user.username}",
|
||||
context={
|
||||
"model": "PhotoSubmission",
|
||||
"model": "EditSubmission",
|
||||
"submission_type": "PHOTO",
|
||||
"object_id": submission.id,
|
||||
"claimed_by": request.user.username,
|
||||
},
|
||||
@@ -2767,3 +2771,55 @@ class ModerationStatsView(APIView):
|
||||
}
|
||||
|
||||
return Response(stats_data)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Moderation Audit Log ViewSet
|
||||
# ============================================================================
|
||||
|
||||
|
||||
class ModerationAuditLogViewSet(viewsets.ReadOnlyModelViewSet):
|
||||
"""
|
||||
ViewSet for viewing moderation audit logs.
|
||||
|
||||
Provides read-only access to moderation action history for auditing
|
||||
and accountability purposes.
|
||||
"""
|
||||
|
||||
from .models import ModerationAuditLog
|
||||
from .serializers import ModerationAuditLogSerializer
|
||||
|
||||
queryset = ModerationAuditLog.objects.select_related(
|
||||
"submission", "submission__content_type", "moderator"
|
||||
).all()
|
||||
serializer_class = ModerationAuditLogSerializer
|
||||
permission_classes = [IsAdminOrSuperuser]
|
||||
filter_backends = [DjangoFilterBackend, SearchFilter, OrderingFilter]
|
||||
filterset_fields = ["action", "is_system_action", "is_test_data"]
|
||||
search_fields = ["notes"]
|
||||
ordering_fields = ["created_at", "action"]
|
||||
ordering = ["-created_at"]
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = super().get_queryset()
|
||||
|
||||
# Filter by submission ID
|
||||
submission_id = self.request.query_params.get("submission_id")
|
||||
if submission_id:
|
||||
queryset = queryset.filter(submission_id=submission_id)
|
||||
|
||||
# Filter by moderator ID
|
||||
moderator_id = self.request.query_params.get("moderator_id")
|
||||
if moderator_id:
|
||||
queryset = queryset.filter(moderator_id=moderator_id)
|
||||
|
||||
# Date range filtering
|
||||
start_date = self.request.query_params.get("start_date")
|
||||
end_date = self.request.query_params.get("end_date")
|
||||
|
||||
if start_date:
|
||||
queryset = queryset.filter(created_at__gte=start_date)
|
||||
if end_date:
|
||||
queryset = queryset.filter(created_at__lte=end_date)
|
||||
|
||||
return queryset
|
||||
|
||||
46
backend/apps/notifications/api/log_serializers.py
Normal file
46
backend/apps/notifications/api/log_serializers.py
Normal file
@@ -0,0 +1,46 @@
|
||||
"""
|
||||
Serializers for Notification Log API.
|
||||
"""
|
||||
|
||||
from rest_framework import serializers
|
||||
|
||||
from apps.core.choices.serializers import RichChoiceSerializerField
|
||||
from apps.notifications.models import NotificationLog
|
||||
|
||||
|
||||
class NotificationLogSerializer(serializers.ModelSerializer):
|
||||
"""Serializer for notification logs."""
|
||||
|
||||
status = RichChoiceSerializerField(
|
||||
choice_group="notification_log_statuses",
|
||||
domain="notifications",
|
||||
)
|
||||
user_username = serializers.CharField(
|
||||
source="user.username",
|
||||
read_only=True,
|
||||
allow_null=True,
|
||||
)
|
||||
user_email = serializers.EmailField(
|
||||
source="user.email",
|
||||
read_only=True,
|
||||
allow_null=True,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = NotificationLog
|
||||
fields = [
|
||||
"id",
|
||||
"user",
|
||||
"user_username",
|
||||
"user_email",
|
||||
"workflow_id",
|
||||
"notification_type",
|
||||
"channel",
|
||||
"status",
|
||||
"payload",
|
||||
"error_message",
|
||||
"novu_transaction_id",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
]
|
||||
read_only_fields = ["id", "created_at", "updated_at", "user_username", "user_email"]
|
||||
61
backend/apps/notifications/api/log_views.py
Normal file
61
backend/apps/notifications/api/log_views.py
Normal file
@@ -0,0 +1,61 @@
|
||||
"""
|
||||
ViewSet for Notification Log API.
|
||||
"""
|
||||
|
||||
from django_filters.rest_framework import DjangoFilterBackend
|
||||
from drf_spectacular.utils import extend_schema, extend_schema_view
|
||||
from rest_framework import viewsets
|
||||
from rest_framework.filters import OrderingFilter, SearchFilter
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
|
||||
from apps.notifications.models import NotificationLog
|
||||
|
||||
from .log_serializers import NotificationLogSerializer
|
||||
|
||||
|
||||
@extend_schema_view(
|
||||
list=extend_schema(
|
||||
summary="List notification logs",
|
||||
description="Get all notification logs with optional filtering by status, channel, or workflow.",
|
||||
tags=["Admin - Notifications"],
|
||||
),
|
||||
retrieve=extend_schema(
|
||||
summary="Get notification log",
|
||||
description="Get details of a specific notification log entry.",
|
||||
tags=["Admin - Notifications"],
|
||||
),
|
||||
)
|
||||
class NotificationLogViewSet(viewsets.ReadOnlyModelViewSet):
|
||||
"""
|
||||
ViewSet for viewing notification logs.
|
||||
|
||||
Provides read-only access to notification delivery history.
|
||||
"""
|
||||
|
||||
queryset = NotificationLog.objects.select_related("user").all()
|
||||
serializer_class = NotificationLogSerializer
|
||||
permission_classes = [IsAdminUser]
|
||||
filter_backends = [DjangoFilterBackend, SearchFilter, OrderingFilter]
|
||||
filterset_fields = ["status", "channel", "workflow_id", "notification_type"]
|
||||
search_fields = ["workflow_id", "notification_type", "error_message"]
|
||||
ordering_fields = ["created_at", "status"]
|
||||
ordering = ["-created_at"]
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = super().get_queryset()
|
||||
|
||||
# Filter by user ID if provided
|
||||
user_id = self.request.query_params.get("user_id")
|
||||
if user_id:
|
||||
queryset = queryset.filter(user_id=user_id)
|
||||
|
||||
# Date range filtering
|
||||
start_date = self.request.query_params.get("start_date")
|
||||
end_date = self.request.query_params.get("end_date")
|
||||
|
||||
if start_date:
|
||||
queryset = queryset.filter(created_at__gte=start_date)
|
||||
if end_date:
|
||||
queryset = queryset.filter(created_at__lte=end_date)
|
||||
|
||||
return queryset
|
||||
185
backend/apps/notifications/choices.py
Normal file
185
backend/apps/notifications/choices.py
Normal file
@@ -0,0 +1,185 @@
|
||||
"""
|
||||
Rich Choice Objects for Notifications Domain
|
||||
|
||||
This module defines all choice objects for the notifications domain,
|
||||
using the RichChoices pattern for consistent UI rendering and validation.
|
||||
"""
|
||||
|
||||
from apps.core.choices import ChoiceCategory, RichChoice
|
||||
from apps.core.choices.registry import register_choices
|
||||
|
||||
# ============================================================================
|
||||
# Notification Log Status Choices
|
||||
# ============================================================================
|
||||
NOTIFICATION_LOG_STATUSES = [
|
||||
RichChoice(
|
||||
value="pending",
|
||||
label="Pending",
|
||||
description="Notification is queued for delivery",
|
||||
metadata={
|
||||
"color": "yellow",
|
||||
"icon": "clock",
|
||||
"css_class": "bg-yellow-100 text-yellow-800",
|
||||
"sort_order": 1,
|
||||
},
|
||||
category=ChoiceCategory.STATUS,
|
||||
),
|
||||
RichChoice(
|
||||
value="sent",
|
||||
label="Sent",
|
||||
description="Notification has been sent",
|
||||
metadata={
|
||||
"color": "blue",
|
||||
"icon": "send",
|
||||
"css_class": "bg-blue-100 text-blue-800",
|
||||
"sort_order": 2,
|
||||
},
|
||||
category=ChoiceCategory.STATUS,
|
||||
),
|
||||
RichChoice(
|
||||
value="delivered",
|
||||
label="Delivered",
|
||||
description="Notification was successfully delivered",
|
||||
metadata={
|
||||
"color": "green",
|
||||
"icon": "check-circle",
|
||||
"css_class": "bg-green-100 text-green-800",
|
||||
"sort_order": 3,
|
||||
},
|
||||
category=ChoiceCategory.STATUS,
|
||||
),
|
||||
RichChoice(
|
||||
value="failed",
|
||||
label="Failed",
|
||||
description="Notification delivery failed",
|
||||
metadata={
|
||||
"color": "red",
|
||||
"icon": "x-circle",
|
||||
"css_class": "bg-red-100 text-red-800",
|
||||
"sort_order": 4,
|
||||
},
|
||||
category=ChoiceCategory.STATUS,
|
||||
),
|
||||
]
|
||||
|
||||
# ============================================================================
|
||||
# System Announcement Severity Choices
|
||||
# ============================================================================
|
||||
ANNOUNCEMENT_SEVERITIES = [
|
||||
RichChoice(
|
||||
value="info",
|
||||
label="Information",
|
||||
description="Informational announcement",
|
||||
metadata={
|
||||
"color": "blue",
|
||||
"icon": "info",
|
||||
"css_class": "bg-blue-100 text-blue-800 border-blue-200",
|
||||
"sort_order": 1,
|
||||
},
|
||||
category=ChoiceCategory.PRIORITY,
|
||||
),
|
||||
RichChoice(
|
||||
value="warning",
|
||||
label="Warning",
|
||||
description="Warning announcement requiring attention",
|
||||
metadata={
|
||||
"color": "yellow",
|
||||
"icon": "alert-triangle",
|
||||
"css_class": "bg-yellow-100 text-yellow-800 border-yellow-200",
|
||||
"sort_order": 2,
|
||||
},
|
||||
category=ChoiceCategory.PRIORITY,
|
||||
),
|
||||
RichChoice(
|
||||
value="critical",
|
||||
label="Critical",
|
||||
description="Critical announcement requiring immediate attention",
|
||||
metadata={
|
||||
"color": "red",
|
||||
"icon": "alert-octagon",
|
||||
"css_class": "bg-red-100 text-red-800 border-red-200",
|
||||
"sort_order": 3,
|
||||
},
|
||||
category=ChoiceCategory.PRIORITY,
|
||||
),
|
||||
]
|
||||
|
||||
# ============================================================================
|
||||
# Notification Level Choices (for in-app notifications)
|
||||
# ============================================================================
|
||||
NOTIFICATION_LEVELS = [
|
||||
RichChoice(
|
||||
value="info",
|
||||
label="Info",
|
||||
description="Informational notification",
|
||||
metadata={
|
||||
"color": "blue",
|
||||
"icon": "info",
|
||||
"css_class": "bg-blue-100 text-blue-800",
|
||||
"sort_order": 1,
|
||||
},
|
||||
category=ChoiceCategory.NOTIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="success",
|
||||
label="Success",
|
||||
description="Success notification",
|
||||
metadata={
|
||||
"color": "green",
|
||||
"icon": "check-circle",
|
||||
"css_class": "bg-green-100 text-green-800",
|
||||
"sort_order": 2,
|
||||
},
|
||||
category=ChoiceCategory.NOTIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="warning",
|
||||
label="Warning",
|
||||
description="Warning notification",
|
||||
metadata={
|
||||
"color": "yellow",
|
||||
"icon": "alert-triangle",
|
||||
"css_class": "bg-yellow-100 text-yellow-800",
|
||||
"sort_order": 3,
|
||||
},
|
||||
category=ChoiceCategory.NOTIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="error",
|
||||
label="Error",
|
||||
description="Error notification",
|
||||
metadata={
|
||||
"color": "red",
|
||||
"icon": "x-circle",
|
||||
"css_class": "bg-red-100 text-red-800",
|
||||
"sort_order": 4,
|
||||
},
|
||||
category=ChoiceCategory.NOTIFICATION,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def register_notifications_choices() -> None:
|
||||
"""Register all notifications domain choices with the global registry."""
|
||||
register_choices(
|
||||
name="notification_log_statuses",
|
||||
choices=NOTIFICATION_LOG_STATUSES,
|
||||
domain="notifications",
|
||||
description="Status options for notification logs",
|
||||
)
|
||||
register_choices(
|
||||
name="announcement_severities",
|
||||
choices=ANNOUNCEMENT_SEVERITIES,
|
||||
domain="notifications",
|
||||
description="Severity levels for system announcements",
|
||||
)
|
||||
register_choices(
|
||||
name="notification_levels",
|
||||
choices=NOTIFICATION_LEVELS,
|
||||
domain="notifications",
|
||||
description="Level options for in-app notifications",
|
||||
)
|
||||
|
||||
|
||||
# Auto-register choices when module is imported
|
||||
register_notifications_choices()
|
||||
@@ -0,0 +1,50 @@
|
||||
# Generated by Django 5.2.10 on 2026-01-10 22:01
|
||||
|
||||
import apps.core.choices.fields
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("notifications", "0002_add_notification_model"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="notification",
|
||||
name="level",
|
||||
field=apps.core.choices.fields.RichChoiceField(
|
||||
allow_deprecated=False,
|
||||
choice_group="notification_levels",
|
||||
choices=[("info", "Info"), ("success", "Success"), ("warning", "Warning"), ("error", "Error")],
|
||||
default="info",
|
||||
domain="notifications",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="notificationlog",
|
||||
name="status",
|
||||
field=apps.core.choices.fields.RichChoiceField(
|
||||
allow_deprecated=False,
|
||||
choice_group="notification_log_statuses",
|
||||
choices=[("pending", "Pending"), ("sent", "Sent"), ("delivered", "Delivered"), ("failed", "Failed")],
|
||||
default="pending",
|
||||
domain="notifications",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="systemannouncement",
|
||||
name="severity",
|
||||
field=apps.core.choices.fields.RichChoiceField(
|
||||
allow_deprecated=False,
|
||||
choice_group="announcement_severities",
|
||||
choices=[("info", "Information"), ("warning", "Warning"), ("critical", "Critical")],
|
||||
default="info",
|
||||
domain="notifications",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -14,6 +14,11 @@ Subscriber model is kept for backward compatibility but is optional.
|
||||
from django.conf import settings
|
||||
from django.db import models
|
||||
|
||||
from apps.core.choices.fields import RichChoiceField
|
||||
|
||||
# Import choices to ensure registration on app load
|
||||
from . import choices # noqa: F401
|
||||
|
||||
|
||||
class Subscriber(models.Model):
|
||||
"""
|
||||
@@ -100,12 +105,6 @@ class NotificationLog(models.Model):
|
||||
Audit log of sent notifications.
|
||||
"""
|
||||
|
||||
class Status(models.TextChoices):
|
||||
PENDING = "pending", "Pending"
|
||||
SENT = "sent", "Sent"
|
||||
DELIVERED = "delivered", "Delivered"
|
||||
FAILED = "failed", "Failed"
|
||||
|
||||
user = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
on_delete=models.SET_NULL,
|
||||
@@ -115,10 +114,11 @@ class NotificationLog(models.Model):
|
||||
workflow_id = models.CharField(max_length=100, db_index=True)
|
||||
notification_type = models.CharField(max_length=50)
|
||||
channel = models.CharField(max_length=20) # email, push, in_app, sms
|
||||
status = models.CharField(
|
||||
status = RichChoiceField(
|
||||
choice_group="notification_log_statuses",
|
||||
domain="notifications",
|
||||
max_length=20,
|
||||
choices=Status.choices,
|
||||
default=Status.PENDING,
|
||||
default="pending",
|
||||
)
|
||||
payload = models.JSONField(default=dict, blank=True)
|
||||
error_message = models.TextField(blank=True)
|
||||
@@ -144,17 +144,13 @@ class SystemAnnouncement(models.Model):
|
||||
System-wide announcements.
|
||||
"""
|
||||
|
||||
class Severity(models.TextChoices):
|
||||
INFO = "info", "Information"
|
||||
WARNING = "warning", "Warning"
|
||||
CRITICAL = "critical", "Critical"
|
||||
|
||||
title = models.CharField(max_length=255)
|
||||
message = models.TextField()
|
||||
severity = models.CharField(
|
||||
severity = RichChoiceField(
|
||||
choice_group="announcement_severities",
|
||||
domain="notifications",
|
||||
max_length=20,
|
||||
choices=Severity.choices,
|
||||
default=Severity.INFO,
|
||||
default="info",
|
||||
)
|
||||
action_url = models.URLField(blank=True)
|
||||
is_active = models.BooleanField(default=True)
|
||||
@@ -184,12 +180,6 @@ class Notification(models.Model):
|
||||
supporting both in-app and email notification channels.
|
||||
"""
|
||||
|
||||
class Level(models.TextChoices):
|
||||
INFO = "info", "Info"
|
||||
SUCCESS = "success", "Success"
|
||||
WARNING = "warning", "Warning"
|
||||
ERROR = "error", "Error"
|
||||
|
||||
# Who receives the notification
|
||||
recipient = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
@@ -207,10 +197,11 @@ class Notification(models.Model):
|
||||
# What happened
|
||||
verb = models.CharField(max_length=255)
|
||||
description = models.TextField(blank=True)
|
||||
level = models.CharField(
|
||||
level = RichChoiceField(
|
||||
choice_group="notification_levels",
|
||||
domain="notifications",
|
||||
max_length=20,
|
||||
choices=Level.choices,
|
||||
default=Level.INFO,
|
||||
default="info",
|
||||
)
|
||||
# The object that was acted upon (generic foreign key)
|
||||
action_object_content_type = models.ForeignKey(
|
||||
|
||||
@@ -4,6 +4,8 @@ Notification serializers.
|
||||
|
||||
from rest_framework import serializers
|
||||
|
||||
from apps.core.choices.serializers import RichChoiceSerializerField
|
||||
|
||||
from .models import NotificationLog, NotificationPreference, Subscriber, SystemAnnouncement
|
||||
|
||||
|
||||
@@ -131,8 +133,9 @@ class CreateAnnouncementSerializer(serializers.Serializer):
|
||||
|
||||
title = serializers.CharField(required=True, max_length=255)
|
||||
message = serializers.CharField(required=True)
|
||||
severity = serializers.ChoiceField(
|
||||
choices=["info", "warning", "critical"],
|
||||
severity = RichChoiceSerializerField(
|
||||
choice_group="announcement_severities",
|
||||
domain="notifications",
|
||||
default="info",
|
||||
)
|
||||
action_url = serializers.URLField(required=False, allow_blank=True)
|
||||
|
||||
@@ -238,6 +238,186 @@ PARKS_COMPANY_ROLES = [
|
||||
),
|
||||
]
|
||||
|
||||
# ============================================================================
|
||||
# Person/Entity Type Choices (for Company model)
|
||||
# ============================================================================
|
||||
PERSON_TYPES = [
|
||||
RichChoice(
|
||||
value="INDIVIDUAL",
|
||||
label="Individual",
|
||||
description="Single person or sole proprietor",
|
||||
metadata={"color": "blue", "icon": "user", "css_class": "bg-blue-100 text-blue-800", "sort_order": 1},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="FIRM",
|
||||
label="Firm",
|
||||
description="Professional services firm",
|
||||
metadata={"color": "indigo", "icon": "briefcase", "css_class": "bg-indigo-100 text-indigo-800", "sort_order": 2},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="ORGANIZATION",
|
||||
label="Organization",
|
||||
description="Non-profit or member organization",
|
||||
metadata={"color": "green", "icon": "users", "css_class": "bg-green-100 text-green-800", "sort_order": 3},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="CORPORATION",
|
||||
label="Corporation",
|
||||
description="Incorporated business entity",
|
||||
metadata={"color": "purple", "icon": "building", "css_class": "bg-purple-100 text-purple-800", "sort_order": 4},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="PARTNERSHIP",
|
||||
label="Partnership",
|
||||
description="Business partnership",
|
||||
metadata={"color": "orange", "icon": "handshake", "css_class": "bg-orange-100 text-orange-800", "sort_order": 5},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="GOVERNMENT",
|
||||
label="Government Entity",
|
||||
description="Government agency or public entity",
|
||||
metadata={"color": "gray", "icon": "landmark", "css_class": "bg-gray-100 text-gray-800", "sort_order": 6},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
]
|
||||
|
||||
# ============================================================================
|
||||
# Company Status Choices
|
||||
# ============================================================================
|
||||
COMPANY_STATUSES = [
|
||||
RichChoice(
|
||||
value="ACTIVE",
|
||||
label="Active",
|
||||
description="Company is currently operating",
|
||||
metadata={
|
||||
"color": "green",
|
||||
"icon": "check-circle",
|
||||
"css_class": "bg-green-100 text-green-800",
|
||||
"sort_order": 1,
|
||||
"is_active": True,
|
||||
},
|
||||
category=ChoiceCategory.STATUS,
|
||||
),
|
||||
RichChoice(
|
||||
value="DEFUNCT",
|
||||
label="Defunct",
|
||||
description="Company no longer exists",
|
||||
metadata={
|
||||
"color": "red",
|
||||
"icon": "x-circle",
|
||||
"css_class": "bg-red-100 text-red-800",
|
||||
"sort_order": 2,
|
||||
"is_active": False,
|
||||
},
|
||||
category=ChoiceCategory.STATUS,
|
||||
),
|
||||
RichChoice(
|
||||
value="MERGED",
|
||||
label="Merged",
|
||||
description="Company merged with another entity",
|
||||
metadata={
|
||||
"color": "purple",
|
||||
"icon": "git-merge",
|
||||
"css_class": "bg-purple-100 text-purple-800",
|
||||
"sort_order": 3,
|
||||
"is_active": False,
|
||||
},
|
||||
category=ChoiceCategory.STATUS,
|
||||
),
|
||||
RichChoice(
|
||||
value="ACQUIRED",
|
||||
label="Acquired",
|
||||
description="Company was acquired by another entity",
|
||||
metadata={
|
||||
"color": "blue",
|
||||
"icon": "arrow-right-circle",
|
||||
"css_class": "bg-blue-100 text-blue-800",
|
||||
"sort_order": 4,
|
||||
"is_active": False,
|
||||
},
|
||||
category=ChoiceCategory.STATUS,
|
||||
),
|
||||
RichChoice(
|
||||
value="RENAMED",
|
||||
label="Renamed",
|
||||
description="Company changed its name",
|
||||
metadata={
|
||||
"color": "yellow",
|
||||
"icon": "edit",
|
||||
"css_class": "bg-yellow-100 text-yellow-800",
|
||||
"sort_order": 5,
|
||||
"is_active": True,
|
||||
},
|
||||
category=ChoiceCategory.STATUS,
|
||||
),
|
||||
RichChoice(
|
||||
value="DORMANT",
|
||||
label="Dormant",
|
||||
description="Company is inactive but not dissolved",
|
||||
metadata={
|
||||
"color": "gray",
|
||||
"icon": "pause-circle",
|
||||
"css_class": "bg-gray-100 text-gray-800",
|
||||
"sort_order": 6,
|
||||
"is_active": False,
|
||||
},
|
||||
category=ChoiceCategory.STATUS,
|
||||
),
|
||||
]
|
||||
|
||||
# ============================================================================
|
||||
# Date Precision Choices (for parks domain - founding dates, opening dates, etc.)
|
||||
# ============================================================================
|
||||
DATE_PRECISION = [
|
||||
RichChoice(
|
||||
value="exact",
|
||||
label="Exact Date",
|
||||
description="Date is known exactly",
|
||||
metadata={"color": "green", "icon": "calendar", "sort_order": 1, "format": "YYYY-MM-DD"},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="month",
|
||||
label="Month and Year",
|
||||
description="Only month and year are known",
|
||||
metadata={"color": "blue", "icon": "calendar", "sort_order": 2, "format": "YYYY-MM"},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="year",
|
||||
label="Year Only",
|
||||
description="Only the year is known",
|
||||
metadata={"color": "yellow", "icon": "calendar", "sort_order": 3, "format": "YYYY"},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="decade",
|
||||
label="Decade",
|
||||
description="Only the decade is known",
|
||||
metadata={"color": "orange", "icon": "calendar", "sort_order": 4, "format": "YYYYs"},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="century",
|
||||
label="Century",
|
||||
description="Only the century is known",
|
||||
metadata={"color": "gray", "icon": "calendar", "sort_order": 5, "format": "YYc"},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="approximate",
|
||||
label="Approximate",
|
||||
description="Date is approximate/estimated",
|
||||
metadata={"color": "gray", "icon": "help-circle", "sort_order": 6, "format": "~YYYY"},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def register_parks_choices():
|
||||
"""Register all parks domain choices with the global registry"""
|
||||
@@ -266,6 +446,31 @@ def register_parks_choices():
|
||||
metadata={"domain": "parks", "type": "company_role"},
|
||||
)
|
||||
|
||||
register_choices(
|
||||
name="person_types",
|
||||
choices=PERSON_TYPES,
|
||||
domain="parks",
|
||||
description="Person/entity type classifications",
|
||||
metadata={"domain": "parks", "type": "person_type"},
|
||||
)
|
||||
|
||||
register_choices(
|
||||
name="company_statuses",
|
||||
choices=COMPANY_STATUSES,
|
||||
domain="parks",
|
||||
description="Company operational status options",
|
||||
metadata={"domain": "parks", "type": "company_status"},
|
||||
)
|
||||
|
||||
register_choices(
|
||||
name="date_precision",
|
||||
choices=DATE_PRECISION,
|
||||
domain="parks",
|
||||
description="Date precision options for parks domain",
|
||||
metadata={"domain": "parks", "type": "date_precision"},
|
||||
)
|
||||
|
||||
|
||||
# Auto-register choices when module is imported
|
||||
register_parks_choices()
|
||||
|
||||
|
||||
@@ -0,0 +1,218 @@
|
||||
# Generated by Django 5.2.10 on 2026-01-10 22:01
|
||||
|
||||
import apps.core.choices.fields
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("parks", "0032_add_logo_image_id"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="company",
|
||||
name="founded_date_precision",
|
||||
field=apps.core.choices.fields.RichChoiceField(
|
||||
allow_deprecated=False,
|
||||
blank=True,
|
||||
choice_group="date_precision",
|
||||
choices=[
|
||||
("exact", "Exact Date"),
|
||||
("month", "Month and Year"),
|
||||
("year", "Year Only"),
|
||||
("decade", "Decade"),
|
||||
("century", "Century"),
|
||||
("approximate", "Approximate"),
|
||||
],
|
||||
domain="parks",
|
||||
help_text="Precision of the founding date",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="company",
|
||||
name="person_type",
|
||||
field=apps.core.choices.fields.RichChoiceField(
|
||||
allow_deprecated=False,
|
||||
blank=True,
|
||||
choice_group="person_types",
|
||||
choices=[
|
||||
("INDIVIDUAL", "Individual"),
|
||||
("FIRM", "Firm"),
|
||||
("ORGANIZATION", "Organization"),
|
||||
("CORPORATION", "Corporation"),
|
||||
("PARTNERSHIP", "Partnership"),
|
||||
("GOVERNMENT", "Government Entity"),
|
||||
],
|
||||
domain="parks",
|
||||
help_text="Type of entity (individual, firm, organization, etc.)",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="company",
|
||||
name="status",
|
||||
field=apps.core.choices.fields.RichChoiceField(
|
||||
allow_deprecated=False,
|
||||
choice_group="company_statuses",
|
||||
choices=[
|
||||
("ACTIVE", "Active"),
|
||||
("DEFUNCT", "Defunct"),
|
||||
("MERGED", "Merged"),
|
||||
("ACQUIRED", "Acquired"),
|
||||
("RENAMED", "Renamed"),
|
||||
("DORMANT", "Dormant"),
|
||||
],
|
||||
default="ACTIVE",
|
||||
domain="parks",
|
||||
help_text="Current operational status of the company",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="companyevent",
|
||||
name="founded_date_precision",
|
||||
field=apps.core.choices.fields.RichChoiceField(
|
||||
allow_deprecated=False,
|
||||
blank=True,
|
||||
choice_group="date_precision",
|
||||
choices=[
|
||||
("exact", "Exact Date"),
|
||||
("month", "Month and Year"),
|
||||
("year", "Year Only"),
|
||||
("decade", "Decade"),
|
||||
("century", "Century"),
|
||||
("approximate", "Approximate"),
|
||||
],
|
||||
domain="parks",
|
||||
help_text="Precision of the founding date",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="companyevent",
|
||||
name="person_type",
|
||||
field=apps.core.choices.fields.RichChoiceField(
|
||||
allow_deprecated=False,
|
||||
blank=True,
|
||||
choice_group="person_types",
|
||||
choices=[
|
||||
("INDIVIDUAL", "Individual"),
|
||||
("FIRM", "Firm"),
|
||||
("ORGANIZATION", "Organization"),
|
||||
("CORPORATION", "Corporation"),
|
||||
("PARTNERSHIP", "Partnership"),
|
||||
("GOVERNMENT", "Government Entity"),
|
||||
],
|
||||
domain="parks",
|
||||
help_text="Type of entity (individual, firm, organization, etc.)",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="companyevent",
|
||||
name="status",
|
||||
field=apps.core.choices.fields.RichChoiceField(
|
||||
allow_deprecated=False,
|
||||
choice_group="company_statuses",
|
||||
choices=[
|
||||
("ACTIVE", "Active"),
|
||||
("DEFUNCT", "Defunct"),
|
||||
("MERGED", "Merged"),
|
||||
("ACQUIRED", "Acquired"),
|
||||
("RENAMED", "Renamed"),
|
||||
("DORMANT", "Dormant"),
|
||||
],
|
||||
default="ACTIVE",
|
||||
domain="parks",
|
||||
help_text="Current operational status of the company",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="park",
|
||||
name="closing_date_precision",
|
||||
field=apps.core.choices.fields.RichChoiceField(
|
||||
allow_deprecated=False,
|
||||
blank=True,
|
||||
choice_group="date_precision",
|
||||
choices=[
|
||||
("exact", "Exact Date"),
|
||||
("month", "Month and Year"),
|
||||
("year", "Year Only"),
|
||||
("decade", "Decade"),
|
||||
("century", "Century"),
|
||||
("approximate", "Approximate"),
|
||||
],
|
||||
default="exact",
|
||||
domain="parks",
|
||||
help_text="Precision of the closing date",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="park",
|
||||
name="opening_date_precision",
|
||||
field=apps.core.choices.fields.RichChoiceField(
|
||||
allow_deprecated=False,
|
||||
blank=True,
|
||||
choice_group="date_precision",
|
||||
choices=[
|
||||
("exact", "Exact Date"),
|
||||
("month", "Month and Year"),
|
||||
("year", "Year Only"),
|
||||
("decade", "Decade"),
|
||||
("century", "Century"),
|
||||
("approximate", "Approximate"),
|
||||
],
|
||||
default="exact",
|
||||
domain="parks",
|
||||
help_text="Precision of the opening date",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="parkevent",
|
||||
name="closing_date_precision",
|
||||
field=apps.core.choices.fields.RichChoiceField(
|
||||
allow_deprecated=False,
|
||||
blank=True,
|
||||
choice_group="date_precision",
|
||||
choices=[
|
||||
("exact", "Exact Date"),
|
||||
("month", "Month and Year"),
|
||||
("year", "Year Only"),
|
||||
("decade", "Decade"),
|
||||
("century", "Century"),
|
||||
("approximate", "Approximate"),
|
||||
],
|
||||
default="exact",
|
||||
domain="parks",
|
||||
help_text="Precision of the closing date",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="parkevent",
|
||||
name="opening_date_precision",
|
||||
field=apps.core.choices.fields.RichChoiceField(
|
||||
allow_deprecated=False,
|
||||
blank=True,
|
||||
choice_group="date_precision",
|
||||
choices=[
|
||||
("exact", "Exact Date"),
|
||||
("month", "Month and Year"),
|
||||
("year", "Year Only"),
|
||||
("decade", "Decade"),
|
||||
("century", "Century"),
|
||||
("approximate", "Approximate"),
|
||||
],
|
||||
default="exact",
|
||||
domain="parks",
|
||||
help_text="Precision of the opening date",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -26,34 +26,20 @@ class Company(TrackedModel):
|
||||
description = models.TextField(blank=True, help_text="Detailed company description")
|
||||
website = models.URLField(blank=True, help_text="Company website URL")
|
||||
|
||||
# Person/Entity Type (ported from legacy thrillwiki-87)
|
||||
PERSON_TYPES = [
|
||||
("INDIVIDUAL", "Individual"),
|
||||
("FIRM", "Firm"),
|
||||
("ORGANIZATION", "Organization"),
|
||||
("CORPORATION", "Corporation"),
|
||||
("PARTNERSHIP", "Partnership"),
|
||||
("GOVERNMENT", "Government Entity"),
|
||||
]
|
||||
person_type = models.CharField(
|
||||
# Person/Entity Type - using RichChoiceField
|
||||
person_type = RichChoiceField(
|
||||
choice_group="person_types",
|
||||
domain="parks",
|
||||
max_length=20,
|
||||
choices=PERSON_TYPES,
|
||||
blank=True,
|
||||
help_text="Type of entity (individual, firm, organization, etc.)",
|
||||
)
|
||||
|
||||
# Company Status (ported from legacy)
|
||||
COMPANY_STATUSES = [
|
||||
("ACTIVE", "Active"),
|
||||
("DEFUNCT", "Defunct"),
|
||||
("MERGED", "Merged"),
|
||||
("ACQUIRED", "Acquired"),
|
||||
("RENAMED", "Renamed"),
|
||||
("DORMANT", "Dormant"),
|
||||
]
|
||||
status = models.CharField(
|
||||
# Company Status - using RichChoiceField
|
||||
status = RichChoiceField(
|
||||
choice_group="company_statuses",
|
||||
domain="parks",
|
||||
max_length=20,
|
||||
choices=COMPANY_STATUSES,
|
||||
default="ACTIVE",
|
||||
help_text="Current operational status of the company",
|
||||
)
|
||||
@@ -61,17 +47,10 @@ class Company(TrackedModel):
|
||||
# Founding Information (enhanced from just founded_year)
|
||||
founded_year = models.PositiveIntegerField(blank=True, null=True, help_text="Year the company was founded")
|
||||
founded_date = models.DateField(blank=True, null=True, help_text="Full founding date if known")
|
||||
DATE_PRECISION_CHOICES = [
|
||||
("exact", "Exact Date"),
|
||||
("month", "Month and Year"),
|
||||
("year", "Year Only"),
|
||||
("decade", "Decade"),
|
||||
("century", "Century"),
|
||||
("approximate", "Approximate"),
|
||||
]
|
||||
founded_date_precision = models.CharField(
|
||||
founded_date_precision = RichChoiceField(
|
||||
choice_group="date_precision",
|
||||
domain="parks",
|
||||
max_length=20,
|
||||
choices=DATE_PRECISION_CHOICES,
|
||||
blank=True,
|
||||
help_text="Precision of the founding date",
|
||||
)
|
||||
|
||||
@@ -54,31 +54,19 @@ class Park(StateMachineMixin, TrackedModel):
|
||||
|
||||
# Details
|
||||
opening_date = models.DateField(null=True, blank=True, help_text="Opening date")
|
||||
opening_date_precision = models.CharField(
|
||||
opening_date_precision = RichChoiceField(
|
||||
choice_group="date_precision",
|
||||
domain="parks",
|
||||
max_length=20,
|
||||
choices=[
|
||||
("exact", "Exact Date"),
|
||||
("month", "Month and Year"),
|
||||
("year", "Year Only"),
|
||||
("decade", "Decade"),
|
||||
("century", "Century"),
|
||||
("approximate", "Approximate"),
|
||||
],
|
||||
default="exact",
|
||||
blank=True,
|
||||
help_text="Precision of the opening date",
|
||||
)
|
||||
closing_date = models.DateField(null=True, blank=True, help_text="Closing date")
|
||||
closing_date_precision = models.CharField(
|
||||
closing_date_precision = RichChoiceField(
|
||||
choice_group="date_precision",
|
||||
domain="parks",
|
||||
max_length=20,
|
||||
choices=[
|
||||
("exact", "Exact Date"),
|
||||
("month", "Month and Year"),
|
||||
("year", "Year Only"),
|
||||
("decade", "Decade"),
|
||||
("century", "Century"),
|
||||
("approximate", "Approximate"),
|
||||
],
|
||||
default="exact",
|
||||
blank=True,
|
||||
help_text="Precision of the closing date",
|
||||
|
||||
99
backend/apps/reviews/choices.py
Normal file
99
backend/apps/reviews/choices.py
Normal file
@@ -0,0 +1,99 @@
|
||||
"""
|
||||
Rich Choice Objects for Reviews Domain
|
||||
|
||||
This module defines all choice objects for the reviews domain,
|
||||
using the RichChoices pattern for consistent UI rendering and validation.
|
||||
"""
|
||||
|
||||
from apps.core.choices import ChoiceCategory, RichChoice
|
||||
from apps.core.choices.registry import register_choices
|
||||
|
||||
# ============================================================================
|
||||
# Rating Value Choices (1-5 stars)
|
||||
# ============================================================================
|
||||
RATING_VALUES = [
|
||||
RichChoice(
|
||||
value="1",
|
||||
label="1 Star",
|
||||
description="Poor rating",
|
||||
metadata={
|
||||
"color": "red",
|
||||
"icon": "star",
|
||||
"css_class": "bg-red-100 text-red-800",
|
||||
"sort_order": 1,
|
||||
"numeric_value": 1,
|
||||
"star_count": 1,
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="2",
|
||||
label="2 Stars",
|
||||
description="Below average rating",
|
||||
metadata={
|
||||
"color": "orange",
|
||||
"icon": "star",
|
||||
"css_class": "bg-orange-100 text-orange-800",
|
||||
"sort_order": 2,
|
||||
"numeric_value": 2,
|
||||
"star_count": 2,
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="3",
|
||||
label="3 Stars",
|
||||
description="Average rating",
|
||||
metadata={
|
||||
"color": "yellow",
|
||||
"icon": "star",
|
||||
"css_class": "bg-yellow-100 text-yellow-800",
|
||||
"sort_order": 3,
|
||||
"numeric_value": 3,
|
||||
"star_count": 3,
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="4",
|
||||
label="4 Stars",
|
||||
description="Good rating",
|
||||
metadata={
|
||||
"color": "lime",
|
||||
"icon": "star",
|
||||
"css_class": "bg-lime-100 text-lime-800",
|
||||
"sort_order": 4,
|
||||
"numeric_value": 4,
|
||||
"star_count": 4,
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="5",
|
||||
label="5 Stars",
|
||||
description="Excellent rating",
|
||||
metadata={
|
||||
"color": "green",
|
||||
"icon": "star",
|
||||
"css_class": "bg-green-100 text-green-800",
|
||||
"sort_order": 5,
|
||||
"numeric_value": 5,
|
||||
"star_count": 5,
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def register_reviews_choices() -> None:
|
||||
"""Register all reviews domain choices with the global registry."""
|
||||
register_choices(
|
||||
name="rating_values",
|
||||
choices=RATING_VALUES,
|
||||
domain="reviews",
|
||||
description="Rating values for reviews (1-5 stars)",
|
||||
)
|
||||
|
||||
|
||||
# Auto-register choices when module is imported
|
||||
register_reviews_choices()
|
||||
@@ -0,0 +1,66 @@
|
||||
# Generated by Django 5.2.10 on 2026-01-12 01:01
|
||||
|
||||
import apps.core.choices.fields
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("rides", "0039_add_photographer_to_photos"),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="RideSubType",
|
||||
fields=[
|
||||
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"name",
|
||||
models.CharField(help_text="Name of the ride sub-type (e.g., 'Flying Coaster')", max_length=100),
|
||||
),
|
||||
(
|
||||
"category",
|
||||
apps.core.choices.fields.RichChoiceField(
|
||||
allow_deprecated=False,
|
||||
choice_group="categories",
|
||||
choices=[
|
||||
("RC", "Roller Coaster"),
|
||||
("DR", "Dark Ride"),
|
||||
("FR", "Flat Ride"),
|
||||
("WR", "Water Ride"),
|
||||
("TR", "Transport Ride"),
|
||||
("OT", "Other"),
|
||||
],
|
||||
domain="rides",
|
||||
help_text="Ride category this sub-type belongs to",
|
||||
max_length=2,
|
||||
),
|
||||
),
|
||||
("description", models.TextField(blank=True, help_text="Description of this ride sub-type")),
|
||||
(
|
||||
"created_by",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
help_text="User who created this sub-type",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="created_ride_sub_types",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Ride Sub-Type",
|
||||
"verbose_name_plural": "Ride Sub-Types",
|
||||
"ordering": ["category", "name"],
|
||||
"abstract": False,
|
||||
"unique_together": {("category", "name")},
|
||||
},
|
||||
),
|
||||
]
|
||||
@@ -17,12 +17,14 @@ from .rankings import RankingSnapshot, RidePairComparison, RideRanking
|
||||
from .reviews import RideReview
|
||||
from .rides import Ride, RideModel, RollerCoasterStats
|
||||
from .stats import DarkRideStats, FlatRideStats, KiddieRideStats, TransportationStats, WaterRideStats
|
||||
from .sub_types import RideSubType
|
||||
|
||||
__all__ = [
|
||||
# Primary models
|
||||
"Ride",
|
||||
"RideModel",
|
||||
"RideNameHistory",
|
||||
"RideSubType",
|
||||
"RollerCoasterStats",
|
||||
"WaterRideStats",
|
||||
"DarkRideStats",
|
||||
|
||||
67
backend/apps/rides/models/sub_types.py
Normal file
67
backend/apps/rides/models/sub_types.py
Normal file
@@ -0,0 +1,67 @@
|
||||
"""
|
||||
RideSubType model for categorizing rides by sub-type within each category.
|
||||
|
||||
This model replaces the legacy Supabase ride_sub_types table,
|
||||
providing a lookup table for ride sub-types (e.g., 'Flying Coaster',
|
||||
'Inverted Coaster' for roller coasters).
|
||||
"""
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import models
|
||||
|
||||
from apps.core.choices import RichChoiceField
|
||||
from apps.core.models import TrackedModel
|
||||
|
||||
|
||||
class RideSubType(TrackedModel):
|
||||
"""
|
||||
Lookup table for ride sub-types categorized by ride category.
|
||||
|
||||
Examples:
|
||||
- Roller Coaster: Flying Coaster, Inverted Coaster, Dive Coaster
|
||||
- Water Ride: Log Flume, River Rapids, Splash Battle
|
||||
- Dark Ride: Trackless, Omnimover, Boat Ride
|
||||
"""
|
||||
|
||||
name = models.CharField(
|
||||
max_length=100,
|
||||
help_text="Name of the ride sub-type (e.g., 'Flying Coaster')",
|
||||
)
|
||||
category = RichChoiceField(
|
||||
choice_group="categories",
|
||||
domain="rides",
|
||||
max_length=2,
|
||||
help_text="Ride category this sub-type belongs to",
|
||||
)
|
||||
description = models.TextField(
|
||||
blank=True,
|
||||
help_text="Description of this ride sub-type",
|
||||
)
|
||||
created_by = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name="created_ride_sub_types",
|
||||
help_text="User who created this sub-type",
|
||||
)
|
||||
|
||||
class Meta(TrackedModel.Meta):
|
||||
verbose_name = "Ride Sub-Type"
|
||||
verbose_name_plural = "Ride Sub-Types"
|
||||
ordering = ["category", "name"]
|
||||
unique_together = [["category", "name"]]
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.name} ({self.get_category_display()})"
|
||||
|
||||
def get_category_display(self) -> str:
|
||||
"""Get human-readable category label."""
|
||||
from apps.core.choices.registry import get_choice_group
|
||||
|
||||
group = get_choice_group("categories", domain="rides")
|
||||
if group:
|
||||
for choice in group.choices:
|
||||
if choice.value == self.category:
|
||||
return choice.label
|
||||
return self.category
|
||||
@@ -6,7 +6,7 @@ from django.utils import timezone
|
||||
|
||||
from apps.core.utils import capture_and_log
|
||||
|
||||
from .models import Ride
|
||||
from .models import Ride, RideSubType
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -286,3 +286,59 @@ def track_ride_name_changes(sender, instance, **kwargs):
|
||||
except Exception as e:
|
||||
logger.exception(f"Failed to track name change for ride {instance.pk}: {e}")
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Auto-Create Ride Sub-Types on Ride Save
|
||||
# =============================================================================
|
||||
|
||||
|
||||
@receiver(post_save, sender=Ride)
|
||||
def auto_create_ride_sub_type(sender, instance, created, **kwargs):
|
||||
"""
|
||||
Automatically create a RideSubType entry when a ride is saved with a new sub-type value.
|
||||
|
||||
This integrates with the submission pipeline - when a ride submission with a new
|
||||
ride_sub_type value is approved, the sub-type automatically gets added to the
|
||||
lookup table for future autocomplete suggestions.
|
||||
|
||||
Args:
|
||||
sender: The Ride model class.
|
||||
instance: The Ride instance that was saved.
|
||||
created: Whether this is a new ride (not used, we check sub-types for all saves).
|
||||
"""
|
||||
# Skip if no ride_sub_type is set
|
||||
if not instance.ride_sub_type or not instance.ride_sub_type.strip():
|
||||
return
|
||||
|
||||
# Skip if no category is set (can't categorize the sub-type)
|
||||
if not instance.category:
|
||||
return
|
||||
|
||||
ride_sub_type_value = instance.ride_sub_type.strip()
|
||||
|
||||
try:
|
||||
# Check if this sub-type already exists for this category
|
||||
existing = RideSubType.objects.filter(
|
||||
name__iexact=ride_sub_type_value,
|
||||
category=instance.category
|
||||
).exists()
|
||||
|
||||
if not existing:
|
||||
# Create the new sub-type entry
|
||||
RideSubType.objects.create(
|
||||
name=ride_sub_type_value,
|
||||
category=instance.category,
|
||||
description=f"Auto-created from ride: {instance.name}",
|
||||
created_by=getattr(instance, 'created_by', None),
|
||||
)
|
||||
logger.info(
|
||||
f"Auto-created RideSubType '{ride_sub_type_value}' for category "
|
||||
f"'{instance.category}' from ride '{instance.name}'"
|
||||
)
|
||||
except Exception as e:
|
||||
# Non-critical error - log but don't fail the ride save
|
||||
logger.warning(
|
||||
f"Failed to auto-create RideSubType for ride {instance.pk}: {e}"
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -145,6 +145,179 @@ TICKET_CATEGORIES = [
|
||||
),
|
||||
]
|
||||
|
||||
# ============================================================================
|
||||
# Report Type Choices (for user-submitted reports about content issues)
|
||||
# ============================================================================
|
||||
REPORT_TYPES = [
|
||||
RichChoice(
|
||||
value="inaccurate",
|
||||
label="Inaccurate Information",
|
||||
description="Information is factually incorrect",
|
||||
metadata={
|
||||
"color": "orange",
|
||||
"icon": "alert-circle",
|
||||
"css_class": "bg-orange-100 text-orange-800 border-orange-200",
|
||||
"sort_order": 1,
|
||||
"requires_evidence": True,
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="inappropriate",
|
||||
label="Inappropriate Content",
|
||||
description="Content is offensive or inappropriate",
|
||||
metadata={
|
||||
"color": "red",
|
||||
"icon": "flag",
|
||||
"css_class": "bg-red-100 text-red-800 border-red-200",
|
||||
"sort_order": 2,
|
||||
"requires_evidence": False,
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="spam",
|
||||
label="Spam",
|
||||
description="Content is spam or promotional",
|
||||
metadata={
|
||||
"color": "purple",
|
||||
"icon": "mail-x",
|
||||
"css_class": "bg-purple-100 text-purple-800 border-purple-200",
|
||||
"sort_order": 3,
|
||||
"requires_evidence": False,
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="copyright",
|
||||
label="Copyright Violation",
|
||||
description="Content violates copyright",
|
||||
metadata={
|
||||
"color": "indigo",
|
||||
"icon": "shield-alert",
|
||||
"css_class": "bg-indigo-100 text-indigo-800 border-indigo-200",
|
||||
"sort_order": 4,
|
||||
"requires_evidence": True,
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="duplicate",
|
||||
label="Duplicate Content",
|
||||
description="Content duplicates existing entry",
|
||||
metadata={
|
||||
"color": "yellow",
|
||||
"icon": "copy",
|
||||
"css_class": "bg-yellow-100 text-yellow-800 border-yellow-200",
|
||||
"sort_order": 5,
|
||||
"requires_evidence": True,
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="other",
|
||||
label="Other",
|
||||
description="Other issue not covered above",
|
||||
metadata={
|
||||
"color": "gray",
|
||||
"icon": "help-circle",
|
||||
"css_class": "bg-gray-100 text-gray-800 border-gray-200",
|
||||
"sort_order": 6,
|
||||
"requires_evidence": False,
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
]
|
||||
|
||||
# ============================================================================
|
||||
# Report Status Choices
|
||||
# ============================================================================
|
||||
REPORT_STATUSES = [
|
||||
RichChoice(
|
||||
value="pending",
|
||||
label="Pending",
|
||||
description="Report is awaiting review",
|
||||
metadata={
|
||||
"color": "yellow",
|
||||
"icon": "clock",
|
||||
"css_class": "bg-yellow-100 text-yellow-800 border-yellow-200",
|
||||
"sort_order": 1,
|
||||
"is_active": True,
|
||||
},
|
||||
category=ChoiceCategory.STATUS,
|
||||
),
|
||||
RichChoice(
|
||||
value="investigating",
|
||||
label="Investigating",
|
||||
description="Report is being investigated",
|
||||
metadata={
|
||||
"color": "blue",
|
||||
"icon": "search",
|
||||
"css_class": "bg-blue-100 text-blue-800 border-blue-200",
|
||||
"sort_order": 2,
|
||||
"is_active": True,
|
||||
},
|
||||
category=ChoiceCategory.STATUS,
|
||||
),
|
||||
RichChoice(
|
||||
value="resolved",
|
||||
label="Resolved",
|
||||
description="Report has been resolved with action taken",
|
||||
metadata={
|
||||
"color": "green",
|
||||
"icon": "check-circle",
|
||||
"css_class": "bg-green-100 text-green-800 border-green-200",
|
||||
"sort_order": 3,
|
||||
"is_active": False,
|
||||
},
|
||||
category=ChoiceCategory.STATUS,
|
||||
),
|
||||
RichChoice(
|
||||
value="dismissed",
|
||||
label="Dismissed",
|
||||
description="Report was dismissed as invalid or duplicate",
|
||||
metadata={
|
||||
"color": "gray",
|
||||
"icon": "x-circle",
|
||||
"css_class": "bg-gray-100 text-gray-800 border-gray-200",
|
||||
"sort_order": 4,
|
||||
"is_active": False,
|
||||
},
|
||||
category=ChoiceCategory.STATUS,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Email Direction Choices
|
||||
# ============================================================================
|
||||
EMAIL_DIRECTIONS = [
|
||||
RichChoice(
|
||||
value="inbound",
|
||||
label="Inbound",
|
||||
description="Email received from user",
|
||||
metadata={
|
||||
"color": "blue",
|
||||
"icon": "arrow-down-left",
|
||||
"css_class": "bg-blue-100 text-blue-800 border-blue-200",
|
||||
"sort_order": 1,
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="outbound",
|
||||
label="Outbound",
|
||||
description="Email sent to user",
|
||||
metadata={
|
||||
"color": "green",
|
||||
"icon": "arrow-up-right",
|
||||
"css_class": "bg-green-100 text-green-800 border-green-200",
|
||||
"sort_order": 2,
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def register_support_choices() -> None:
|
||||
"""Register all support domain choices with the global registry."""
|
||||
@@ -160,7 +333,26 @@ def register_support_choices() -> None:
|
||||
domain="support",
|
||||
description="Category options for support tickets",
|
||||
)
|
||||
register_choices(
|
||||
"report_types",
|
||||
REPORT_TYPES,
|
||||
domain="support",
|
||||
description="Type options for user-submitted reports",
|
||||
)
|
||||
register_choices(
|
||||
"report_statuses",
|
||||
REPORT_STATUSES,
|
||||
domain="support",
|
||||
description="Status options for user-submitted reports",
|
||||
)
|
||||
register_choices(
|
||||
"email_directions",
|
||||
EMAIL_DIRECTIONS,
|
||||
domain="support",
|
||||
description="Direction options for email threads",
|
||||
)
|
||||
|
||||
|
||||
# Auto-register choices when module is imported
|
||||
register_support_choices()
|
||||
|
||||
|
||||
@@ -0,0 +1,53 @@
|
||||
# Generated by Django 5.2.10 on 2026-01-10 22:01
|
||||
|
||||
import apps.core.choices.fields
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("support", "0004_alter_ticket_category_alter_ticket_status"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="report",
|
||||
name="report_type",
|
||||
field=apps.core.choices.fields.RichChoiceField(
|
||||
allow_deprecated=False,
|
||||
choice_group="report_types",
|
||||
choices=[
|
||||
("inaccurate", "Inaccurate Information"),
|
||||
("inappropriate", "Inappropriate Content"),
|
||||
("spam", "Spam"),
|
||||
("copyright", "Copyright Violation"),
|
||||
("duplicate", "Duplicate Content"),
|
||||
("other", "Other"),
|
||||
],
|
||||
db_index=True,
|
||||
domain="support",
|
||||
help_text="Type of issue being reported",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="report",
|
||||
name="status",
|
||||
field=apps.core.choices.fields.RichChoiceField(
|
||||
allow_deprecated=False,
|
||||
choice_group="report_statuses",
|
||||
choices=[
|
||||
("pending", "Pending"),
|
||||
("investigating", "Investigating"),
|
||||
("resolved", "Resolved"),
|
||||
("dismissed", "Dismissed"),
|
||||
],
|
||||
db_index=True,
|
||||
default="pending",
|
||||
domain="support",
|
||||
help_text="Current status of the report",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,147 @@
|
||||
# Generated by Django 5.2.10 on 2026-01-11 20:42
|
||||
|
||||
import apps.core.choices.fields
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("support", "0005_alter_report_report_type_alter_report_status"),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="EmailThread",
|
||||
fields=[
|
||||
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"message_id",
|
||||
models.CharField(blank=True, help_text="Email message ID for threading", max_length=255),
|
||||
),
|
||||
("from_email", models.EmailField(help_text="Sender email address", max_length=254)),
|
||||
("to_email", models.EmailField(help_text="Recipient email address", max_length=254)),
|
||||
("subject", models.CharField(max_length=255)),
|
||||
("body_text", models.TextField(help_text="Plain text email body")),
|
||||
(
|
||||
"direction",
|
||||
apps.core.choices.fields.RichChoiceField(
|
||||
allow_deprecated=False,
|
||||
choice_group="email_directions",
|
||||
choices=[("inbound", "Inbound"), ("outbound", "Outbound")],
|
||||
domain="support",
|
||||
help_text="Whether email is inbound or outbound",
|
||||
max_length=10,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Email Thread",
|
||||
"verbose_name_plural": "Email Threads",
|
||||
"ordering": ["created_at"],
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="ticket",
|
||||
name="admin_notes",
|
||||
field=models.TextField(blank=True, help_text="Internal notes for administrators"),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="ticket",
|
||||
name="archived_at",
|
||||
field=models.DateTimeField(blank=True, help_text="When the ticket was archived", null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="ticket",
|
||||
name="archived_by",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
help_text="Staff member who archived this ticket",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="archived_tickets",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="ticket",
|
||||
name="assigned_to",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
help_text="Staff member assigned to this ticket",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="assigned_tickets",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="ticket",
|
||||
name="name",
|
||||
field=models.CharField(
|
||||
blank=True, help_text="Name of the submitter (for anonymous tickets)", max_length=255
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="ticket",
|
||||
name="resolved_at",
|
||||
field=models.DateTimeField(blank=True, help_text="When the ticket was resolved", null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="ticket",
|
||||
name="resolved_by",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
help_text="Staff member who resolved this ticket",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="resolved_tickets",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="ticket",
|
||||
name="ticket_number",
|
||||
field=models.CharField(blank=True, help_text="Human-readable ticket number", max_length=20, unique=True),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="ticket",
|
||||
index=models.Index(fields=["status", "created_at"], name="support_tic_status_d0b46e_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="ticket",
|
||||
index=models.Index(fields=["ticket_number"], name="support_tic_ticket__d87f40_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="ticket",
|
||||
index=models.Index(fields=["archived_at"], name="support_tic_archive_8fe8c5_idx"),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="emailthread",
|
||||
name="sent_by",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
help_text="Staff member who sent this email (for outbound)",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="sent_email_threads",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="emailthread",
|
||||
name="ticket",
|
||||
field=models.ForeignKey(
|
||||
help_text="Associated support ticket",
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="email_threads",
|
||||
to="support.ticket",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,27 @@
|
||||
# Generated by Django 5.2.10 on 2026-01-11 21:19
|
||||
|
||||
import apps.core.state_machine.fields
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("support", "0006_add_ticket_admin_fields_and_email_threads"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="ticket",
|
||||
name="status",
|
||||
field=apps.core.state_machine.fields.RichFSMField(
|
||||
allow_deprecated=False,
|
||||
choice_group="ticket_statuses",
|
||||
choices=[("open", "Open"), ("in_progress", "In Progress"), ("closed", "Closed")],
|
||||
db_index=True,
|
||||
default="open",
|
||||
domain="support",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,14 +1,27 @@
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db import models
|
||||
from django.utils import timezone
|
||||
|
||||
from apps.core.choices.fields import RichChoiceField
|
||||
from apps.core.history import TrackedModel
|
||||
from apps.core.state_machine import RichFSMField, StateMachineMixin
|
||||
|
||||
# Import choices to ensure registration on app load
|
||||
from . import choices # noqa: F401
|
||||
|
||||
|
||||
class Ticket(TrackedModel):
|
||||
class Ticket(StateMachineMixin, TrackedModel):
|
||||
"""
|
||||
Support ticket model with FSM-managed status transitions.
|
||||
|
||||
Status workflow:
|
||||
open -> in_progress -> closed
|
||||
-> open (reopen)
|
||||
"""
|
||||
|
||||
state_field_name = "status"
|
||||
|
||||
user = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
on_delete=models.SET_NULL,
|
||||
@@ -18,6 +31,13 @@ class Ticket(TrackedModel):
|
||||
help_text="User who submitted the ticket (optional)",
|
||||
)
|
||||
|
||||
# Submitter info (for anonymous/guest tickets)
|
||||
name = models.CharField(
|
||||
max_length=255,
|
||||
blank=True,
|
||||
help_text="Name of the submitter (for anonymous tickets)",
|
||||
)
|
||||
|
||||
category = RichChoiceField(
|
||||
choice_group="ticket_categories",
|
||||
domain="support",
|
||||
@@ -30,7 +50,8 @@ class Ticket(TrackedModel):
|
||||
message = models.TextField()
|
||||
email = models.EmailField(help_text="Contact email", blank=True)
|
||||
|
||||
status = RichChoiceField(
|
||||
# FSM-managed status field
|
||||
status = RichFSMField(
|
||||
choice_group="ticket_statuses",
|
||||
domain="support",
|
||||
max_length=20,
|
||||
@@ -38,20 +59,250 @@ class Ticket(TrackedModel):
|
||||
db_index=True,
|
||||
)
|
||||
|
||||
# Human-readable ticket number (e.g., TKT-00001)
|
||||
ticket_number = models.CharField(
|
||||
max_length=20,
|
||||
unique=True,
|
||||
blank=True,
|
||||
help_text="Human-readable ticket number",
|
||||
)
|
||||
|
||||
# Admin management fields
|
||||
admin_notes = models.TextField(
|
||||
blank=True,
|
||||
help_text="Internal notes for administrators",
|
||||
)
|
||||
assigned_to = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name="assigned_tickets",
|
||||
help_text="Staff member assigned to this ticket",
|
||||
)
|
||||
|
||||
# Resolution tracking
|
||||
resolved_at = models.DateTimeField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="When the ticket was resolved",
|
||||
)
|
||||
resolved_by = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name="resolved_tickets",
|
||||
help_text="Staff member who resolved this ticket",
|
||||
)
|
||||
|
||||
# Archive functionality
|
||||
archived_at = models.DateTimeField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="When the ticket was archived",
|
||||
)
|
||||
archived_by = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name="archived_tickets",
|
||||
help_text="Staff member who archived this ticket",
|
||||
)
|
||||
|
||||
class Meta(TrackedModel.Meta):
|
||||
verbose_name = "Ticket"
|
||||
verbose_name_plural = "Tickets"
|
||||
ordering = ["-created_at"]
|
||||
indexes = [
|
||||
models.Index(fields=["status", "created_at"]),
|
||||
models.Index(fields=["ticket_number"]),
|
||||
models.Index(fields=["archived_at"]),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return f"[{self.get_category_display()}] {self.subject}"
|
||||
return f"[{self.ticket_number}] {self.subject}"
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
# If user is set but email is empty, autofill from user
|
||||
if self.user and not self.email:
|
||||
self.email = self.user.email
|
||||
# Generate ticket number if not set
|
||||
if not self.ticket_number:
|
||||
self.ticket_number = self._generate_ticket_number()
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
def _generate_ticket_number(self):
|
||||
"""Generate a unique ticket number like TKT-00001."""
|
||||
import uuid
|
||||
|
||||
# Use last 8 chars of a UUID for uniqueness
|
||||
suffix = uuid.uuid4().hex[:8].upper()
|
||||
return f"TKT-{suffix}"
|
||||
|
||||
# =========================================================================
|
||||
# FSM Transition Methods
|
||||
# =========================================================================
|
||||
|
||||
def start_progress(self, user=None) -> None:
|
||||
"""
|
||||
Start working on this ticket.
|
||||
Transition: open -> in_progress
|
||||
|
||||
Args:
|
||||
user: The staff member starting work on the ticket
|
||||
"""
|
||||
if self.status != "open":
|
||||
raise ValidationError(
|
||||
f"Cannot start progress: current status is {self.status}, expected open"
|
||||
)
|
||||
|
||||
self.status = "in_progress"
|
||||
if user and user.is_staff:
|
||||
self.assigned_to = user
|
||||
self.save()
|
||||
|
||||
def close(self, user=None, notes: str = "") -> None:
|
||||
"""
|
||||
Close/resolve this ticket.
|
||||
Transition: in_progress -> closed
|
||||
|
||||
Args:
|
||||
user: The staff member closing the ticket
|
||||
notes: Optional resolution notes
|
||||
"""
|
||||
if self.status not in ("open", "in_progress"):
|
||||
raise ValidationError(
|
||||
f"Cannot close: current status is {self.status}, expected open or in_progress"
|
||||
)
|
||||
|
||||
self.status = "closed"
|
||||
self.resolved_at = timezone.now()
|
||||
if user:
|
||||
self.resolved_by = user
|
||||
if notes:
|
||||
self.admin_notes = f"{self.admin_notes}\n\n[CLOSED] {notes}".strip()
|
||||
self.save()
|
||||
|
||||
def reopen(self, user=None, reason: str = "") -> None:
|
||||
"""
|
||||
Reopen a closed ticket.
|
||||
Transition: closed -> open
|
||||
|
||||
Args:
|
||||
user: The staff member reopening the ticket
|
||||
reason: Reason for reopening
|
||||
"""
|
||||
if self.status != "closed":
|
||||
raise ValidationError(
|
||||
f"Cannot reopen: current status is {self.status}, expected closed"
|
||||
)
|
||||
|
||||
self.status = "open"
|
||||
self.resolved_at = None
|
||||
self.resolved_by = None
|
||||
if reason:
|
||||
self.admin_notes = f"{self.admin_notes}\n\n[REOPENED] {reason}".strip()
|
||||
self.save()
|
||||
|
||||
def archive(self, user=None, reason: str = "") -> None:
|
||||
"""
|
||||
Archive this ticket.
|
||||
Can be called from any status.
|
||||
|
||||
Args:
|
||||
user: The staff member archiving the ticket
|
||||
reason: Reason for archiving
|
||||
"""
|
||||
if self.archived_at:
|
||||
raise ValidationError("Ticket is already archived")
|
||||
|
||||
self.archived_at = timezone.now()
|
||||
if user:
|
||||
self.archived_by = user
|
||||
if reason:
|
||||
self.admin_notes = f"{self.admin_notes}\n\n[ARCHIVED] {reason}".strip()
|
||||
self.save()
|
||||
|
||||
def unarchive(self, user=None) -> None:
|
||||
"""
|
||||
Restore an archived ticket.
|
||||
|
||||
Args:
|
||||
user: The staff member unarchiving the ticket
|
||||
"""
|
||||
if not self.archived_at:
|
||||
raise ValidationError("Ticket is not archived")
|
||||
|
||||
self.archived_at = None
|
||||
self.archived_by = None
|
||||
self.admin_notes = f"{self.admin_notes}\n\n[UNARCHIVED] by {user.username if user else 'system'}".strip()
|
||||
self.save()
|
||||
|
||||
# =========================================================================
|
||||
# Computed Properties
|
||||
# =========================================================================
|
||||
|
||||
@property
|
||||
def thread_id(self):
|
||||
"""Return the ID for email thread association."""
|
||||
return str(self.id)
|
||||
|
||||
@property
|
||||
def response_count(self):
|
||||
"""Return number of email thread responses."""
|
||||
return self.email_threads.count()
|
||||
|
||||
@property
|
||||
def last_admin_response_at(self):
|
||||
"""Return timestamp of last admin response."""
|
||||
last_outbound = self.email_threads.filter(direction="outbound").order_by("-created_at").first()
|
||||
return last_outbound.created_at if last_outbound else None
|
||||
|
||||
|
||||
class EmailThread(TrackedModel):
|
||||
"""Email thread entries for ticket conversations."""
|
||||
|
||||
ticket = models.ForeignKey(
|
||||
Ticket,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="email_threads",
|
||||
help_text="Associated support ticket",
|
||||
)
|
||||
message_id = models.CharField(
|
||||
max_length=255,
|
||||
blank=True,
|
||||
help_text="Email message ID for threading",
|
||||
)
|
||||
from_email = models.EmailField(help_text="Sender email address")
|
||||
to_email = models.EmailField(help_text="Recipient email address")
|
||||
subject = models.CharField(max_length=255)
|
||||
body_text = models.TextField(help_text="Plain text email body")
|
||||
|
||||
direction = RichChoiceField(
|
||||
choice_group="email_directions",
|
||||
domain="support",
|
||||
max_length=10,
|
||||
help_text="Whether email is inbound or outbound",
|
||||
)
|
||||
sent_by = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name="sent_email_threads",
|
||||
help_text="Staff member who sent this email (for outbound)",
|
||||
)
|
||||
|
||||
class Meta(TrackedModel.Meta):
|
||||
verbose_name = "Email Thread"
|
||||
verbose_name_plural = "Email Threads"
|
||||
ordering = ["created_at"]
|
||||
|
||||
def __str__(self):
|
||||
return f"[{self.direction}] {self.subject}"
|
||||
|
||||
|
||||
class Report(TrackedModel):
|
||||
"""
|
||||
@@ -61,20 +312,6 @@ class Report(TrackedModel):
|
||||
(parks, rides, reviews, etc.) for moderator review.
|
||||
"""
|
||||
|
||||
class ReportType(models.TextChoices):
|
||||
INACCURATE = "inaccurate", "Inaccurate Information"
|
||||
INAPPROPRIATE = "inappropriate", "Inappropriate Content"
|
||||
SPAM = "spam", "Spam"
|
||||
COPYRIGHT = "copyright", "Copyright Violation"
|
||||
DUPLICATE = "duplicate", "Duplicate Content"
|
||||
OTHER = "other", "Other"
|
||||
|
||||
class Status(models.TextChoices):
|
||||
PENDING = "pending", "Pending"
|
||||
INVESTIGATING = "investigating", "Investigating"
|
||||
RESOLVED = "resolved", "Resolved"
|
||||
DISMISSED = "dismissed", "Dismissed"
|
||||
|
||||
# Reporter (optional for anonymous reports)
|
||||
reporter = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
@@ -99,20 +336,22 @@ class Report(TrackedModel):
|
||||
# It's a convenience for accessing the related object
|
||||
# content_object = GenericForeignKey("content_type", "object_id")
|
||||
|
||||
# Report details
|
||||
report_type = models.CharField(
|
||||
# Report details - now using RichChoiceField
|
||||
report_type = RichChoiceField(
|
||||
choice_group="report_types",
|
||||
domain="support",
|
||||
max_length=20,
|
||||
choices=ReportType.choices,
|
||||
db_index=True,
|
||||
help_text="Type of issue being reported",
|
||||
)
|
||||
reason = models.TextField(
|
||||
help_text="Detailed description of the issue",
|
||||
)
|
||||
status = models.CharField(
|
||||
status = RichChoiceField(
|
||||
choice_group="report_statuses",
|
||||
domain="support",
|
||||
max_length=20,
|
||||
choices=Status.choices,
|
||||
default=Status.PENDING,
|
||||
default="pending",
|
||||
db_index=True,
|
||||
help_text="Current status of the report",
|
||||
)
|
||||
@@ -151,5 +390,6 @@ class Report(TrackedModel):
|
||||
|
||||
@property
|
||||
def is_resolved(self) -> bool:
|
||||
return self.status in (self.Status.RESOLVED, self.Status.DISMISSED)
|
||||
return self.status in ("resolved", "dismissed")
|
||||
|
||||
|
||||
|
||||
@@ -1,31 +1,126 @@
|
||||
from rest_framework import serializers
|
||||
|
||||
from apps.accounts.serializers import UserSerializer
|
||||
from apps.core.choices.serializers import RichChoiceSerializerField
|
||||
|
||||
from .models import Ticket
|
||||
from .models import EmailThread, Ticket
|
||||
|
||||
|
||||
class SubmitterProfileSerializer(serializers.Serializer):
|
||||
"""Nested serializer for submitter profile data."""
|
||||
|
||||
display_name = serializers.CharField(source="profile.display_name", allow_null=True)
|
||||
created_at = serializers.DateTimeField(source="date_joined", allow_null=True)
|
||||
coaster_count = serializers.IntegerField(source="profile.coaster_credit_count", allow_null=True, default=0)
|
||||
ride_count = serializers.IntegerField(source="profile.ride_credit_count", allow_null=True, default=0)
|
||||
park_count = serializers.IntegerField(source="profile.park_credit_count", allow_null=True, default=0)
|
||||
review_count = serializers.IntegerField(source="profile.review_count", allow_null=True, default=0)
|
||||
avatar_url = serializers.CharField(source="profile.avatar_url", allow_null=True)
|
||||
|
||||
|
||||
class TicketSerializer(serializers.ModelSerializer):
|
||||
"""Serializer for Ticket model with full frontend compatibility."""
|
||||
|
||||
# User fields
|
||||
user = UserSerializer(read_only=True)
|
||||
user_id = serializers.UUIDField(source="user.id", read_only=True, allow_null=True)
|
||||
submitter_username = serializers.CharField(source="user.username", read_only=True, allow_null=True)
|
||||
submitter_reputation = serializers.SerializerMethodField()
|
||||
submitter_profile = serializers.SerializerMethodField()
|
||||
|
||||
# Choice display fields
|
||||
category_display = serializers.CharField(source="get_category_display", read_only=True)
|
||||
status_display = serializers.CharField(source="get_status_display", read_only=True)
|
||||
|
||||
# Computed fields
|
||||
thread_id = serializers.CharField(read_only=True)
|
||||
response_count = serializers.IntegerField(read_only=True)
|
||||
last_admin_response_at = serializers.DateTimeField(read_only=True, allow_null=True)
|
||||
|
||||
# Resolution tracking (alias resolved_by username)
|
||||
resolved_by_username = serializers.CharField(source="resolved_by.username", read_only=True, allow_null=True)
|
||||
|
||||
# Admin fields
|
||||
assigned_to_username = serializers.CharField(source="assigned_to.username", read_only=True, allow_null=True)
|
||||
archived_by_username = serializers.CharField(source="archived_by.username", read_only=True, allow_null=True)
|
||||
|
||||
class Meta:
|
||||
model = Ticket
|
||||
fields = [
|
||||
# Core fields
|
||||
"id",
|
||||
"user",
|
||||
"category",
|
||||
"category_display",
|
||||
"subject",
|
||||
"message",
|
||||
"email",
|
||||
"status",
|
||||
"status_display",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
# User/submitter fields
|
||||
"user",
|
||||
"user_id",
|
||||
"submitter_username",
|
||||
"submitter_reputation",
|
||||
"submitter_profile",
|
||||
"name",
|
||||
"email",
|
||||
# Ticket content
|
||||
"subject",
|
||||
"message",
|
||||
"category",
|
||||
"category_display",
|
||||
# Status
|
||||
"status",
|
||||
"status_display",
|
||||
# Ticket number
|
||||
"ticket_number",
|
||||
# Admin management
|
||||
"admin_notes",
|
||||
"assigned_to",
|
||||
"assigned_to_username",
|
||||
# Resolution
|
||||
"resolved_at",
|
||||
"resolved_by",
|
||||
"resolved_by_username",
|
||||
# Thread info
|
||||
"thread_id",
|
||||
"last_admin_response_at",
|
||||
"response_count",
|
||||
# Archive
|
||||
"archived_at",
|
||||
"archived_by",
|
||||
"archived_by_username",
|
||||
]
|
||||
read_only_fields = ["id", "status", "created_at", "updated_at", "user"]
|
||||
read_only_fields = [
|
||||
"id",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"user",
|
||||
"user_id",
|
||||
"submitter_username",
|
||||
"submitter_reputation",
|
||||
"submitter_profile",
|
||||
"ticket_number",
|
||||
"thread_id",
|
||||
"response_count",
|
||||
"last_admin_response_at",
|
||||
]
|
||||
|
||||
def get_submitter_reputation(self, obj):
|
||||
"""Get the submitter's reputation score."""
|
||||
if obj.user and hasattr(obj.user, "profile"):
|
||||
return getattr(obj.user.profile, "reputation", 0)
|
||||
return None
|
||||
|
||||
def get_submitter_profile(self, obj):
|
||||
"""Get a subset of profile data for display."""
|
||||
if not obj.user or not hasattr(obj.user, "profile"):
|
||||
return None
|
||||
profile = obj.user.profile
|
||||
return {
|
||||
"display_name": getattr(profile, "display_name", None),
|
||||
"created_at": obj.user.date_joined.isoformat() if obj.user.date_joined else None,
|
||||
"coaster_count": getattr(profile, "coaster_credit_count", 0),
|
||||
"ride_count": getattr(profile, "ride_credit_count", 0),
|
||||
"park_count": getattr(profile, "park_credit_count", 0),
|
||||
"review_count": getattr(profile, "review_count", 0),
|
||||
"avatar_url": getattr(profile, "avatar_url", None),
|
||||
}
|
||||
|
||||
def validate(self, data):
|
||||
# Ensure email is provided if user is anonymous
|
||||
@@ -35,6 +130,31 @@ class TicketSerializer(serializers.ModelSerializer):
|
||||
return data
|
||||
|
||||
|
||||
class EmailThreadSerializer(serializers.ModelSerializer):
|
||||
"""Serializer for EmailThread model."""
|
||||
|
||||
# NOTE: Frontend uses submission_id, we provide ticket as that field
|
||||
submission_id = serializers.UUIDField(source="ticket.id", read_only=True)
|
||||
sent_by_username = serializers.CharField(source="sent_by.username", read_only=True, allow_null=True)
|
||||
|
||||
class Meta:
|
||||
model = EmailThread
|
||||
fields = [
|
||||
"id",
|
||||
"created_at",
|
||||
"submission_id",
|
||||
"message_id",
|
||||
"from_email",
|
||||
"to_email",
|
||||
"subject",
|
||||
"body_text",
|
||||
"direction",
|
||||
"sent_by",
|
||||
"sent_by_username",
|
||||
]
|
||||
read_only_fields = ["id", "created_at", "submission_id"]
|
||||
|
||||
|
||||
class ReportSerializer(serializers.ModelSerializer):
|
||||
"""Serializer for Report model."""
|
||||
|
||||
@@ -134,14 +254,141 @@ class ReportCreateSerializer(serializers.ModelSerializer):
|
||||
class ReportResolveSerializer(serializers.Serializer):
|
||||
"""Serializer for resolving reports."""
|
||||
|
||||
from .models import Report
|
||||
|
||||
status = serializers.ChoiceField(
|
||||
choices=[
|
||||
(Report.Status.RESOLVED, "Resolved"),
|
||||
(Report.Status.DISMISSED, "Dismissed"),
|
||||
],
|
||||
default=Report.Status.RESOLVED,
|
||||
# Use RichChoiceSerializerField with only resolution statuses
|
||||
status = RichChoiceSerializerField(
|
||||
choice_group="report_statuses",
|
||||
domain="support",
|
||||
default="resolved",
|
||||
)
|
||||
notes = serializers.CharField(required=False, allow_blank=True)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Support Ticket Action Serializers
|
||||
# =============================================================================
|
||||
|
||||
|
||||
class SendReplySerializer(serializers.Serializer):
|
||||
"""
|
||||
Input serializer for send_reply action.
|
||||
|
||||
Validates the request body for sending an email reply to a ticket.
|
||||
Supports both snake_case and camelCase field names for frontend compatibility.
|
||||
"""
|
||||
|
||||
# Primary fields (required=False because we validate manually to support camelCase)
|
||||
reply_body = serializers.CharField(
|
||||
required=False,
|
||||
min_length=1,
|
||||
max_length=50000,
|
||||
help_text="The body text of the email reply",
|
||||
)
|
||||
new_status = RichChoiceSerializerField(
|
||||
choice_group="ticket_statuses",
|
||||
domain="support",
|
||||
required=False,
|
||||
allow_null=True,
|
||||
help_text="Optionally update the ticket status after sending reply",
|
||||
)
|
||||
|
||||
# camelCase aliases for frontend compatibility
|
||||
replyBody = serializers.CharField(required=False, write_only=True)
|
||||
newStatus = serializers.CharField(required=False, write_only=True, allow_null=True)
|
||||
|
||||
def validate(self, data: dict) -> dict:
|
||||
"""Normalize camelCase to snake_case and validate required fields."""
|
||||
# Normalize camelCase to snake_case
|
||||
if "replyBody" in data and data["replyBody"]:
|
||||
data["reply_body"] = data.pop("replyBody")
|
||||
elif "replyBody" in data:
|
||||
data.pop("replyBody")
|
||||
|
||||
if "newStatus" in data and data["newStatus"]:
|
||||
data["new_status"] = data.pop("newStatus")
|
||||
elif "newStatus" in data:
|
||||
data.pop("newStatus")
|
||||
|
||||
# Validate required fields
|
||||
if not data.get("reply_body"):
|
||||
raise serializers.ValidationError({"reply_body": "This field is required."})
|
||||
|
||||
return data
|
||||
|
||||
|
||||
class SendReplyResponseSerializer(serializers.Serializer):
|
||||
"""Response serializer for send_reply action."""
|
||||
|
||||
detail = serializers.CharField()
|
||||
thread_id = serializers.UUIDField()
|
||||
ticket_number = serializers.CharField()
|
||||
|
||||
|
||||
class MergeTicketsSerializer(serializers.Serializer):
|
||||
"""
|
||||
Input serializer for merge tickets action.
|
||||
|
||||
Validates the request body for merging multiple tickets into one.
|
||||
Supports both snake_case and camelCase field names for frontend compatibility.
|
||||
"""
|
||||
|
||||
# Primary fields (required=False because we validate manually to support camelCase)
|
||||
primary_ticket_id = serializers.UUIDField(
|
||||
required=False,
|
||||
help_text="UUID of the primary ticket that will absorb others",
|
||||
)
|
||||
merge_ticket_ids = serializers.ListField(
|
||||
child=serializers.UUIDField(),
|
||||
required=False,
|
||||
min_length=1,
|
||||
help_text="List of ticket UUIDs to merge into the primary",
|
||||
)
|
||||
merge_reason = serializers.CharField(
|
||||
required=False,
|
||||
allow_blank=True,
|
||||
max_length=500,
|
||||
help_text="Optional reason for the merge",
|
||||
)
|
||||
|
||||
# camelCase aliases for frontend compatibility
|
||||
primaryTicketId = serializers.UUIDField(required=False, write_only=True)
|
||||
mergeTicketIds = serializers.ListField(
|
||||
child=serializers.UUIDField(),
|
||||
required=False,
|
||||
write_only=True,
|
||||
)
|
||||
mergeReason = serializers.CharField(required=False, write_only=True, allow_blank=True)
|
||||
|
||||
def validate(self, data: dict) -> dict:
|
||||
"""Normalize camelCase to snake_case and validate required fields."""
|
||||
# Normalize camelCase to snake_case
|
||||
if "primaryTicketId" in data and data["primaryTicketId"]:
|
||||
data["primary_ticket_id"] = data.pop("primaryTicketId")
|
||||
elif "primaryTicketId" in data:
|
||||
data.pop("primaryTicketId")
|
||||
|
||||
if "mergeTicketIds" in data and data["mergeTicketIds"]:
|
||||
data["merge_ticket_ids"] = data.pop("mergeTicketIds")
|
||||
elif "mergeTicketIds" in data:
|
||||
data.pop("mergeTicketIds")
|
||||
|
||||
if "mergeReason" in data and data["mergeReason"]:
|
||||
data["merge_reason"] = data.pop("mergeReason")
|
||||
elif "mergeReason" in data:
|
||||
data.pop("mergeReason")
|
||||
|
||||
# Validate required fields
|
||||
if not data.get("primary_ticket_id"):
|
||||
raise serializers.ValidationError({"primary_ticket_id": "This field is required."})
|
||||
if not data.get("merge_ticket_ids"):
|
||||
raise serializers.ValidationError({"merge_ticket_ids": "This field is required."})
|
||||
|
||||
return data
|
||||
|
||||
|
||||
class MergeTicketsResponseSerializer(serializers.Serializer):
|
||||
"""Response serializer for merge tickets action."""
|
||||
|
||||
detail = serializers.CharField()
|
||||
primaryTicketNumber = serializers.CharField()
|
||||
mergedCount = serializers.IntegerField()
|
||||
threadsConsolidated = serializers.IntegerField()
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
from django.urls import include, path
|
||||
from rest_framework.routers import DefaultRouter
|
||||
|
||||
from .views import ReportViewSet, TicketViewSet
|
||||
from .views import EmailThreadViewSet, ReportViewSet, TicketViewSet
|
||||
|
||||
router = DefaultRouter()
|
||||
router.register(r"tickets", TicketViewSet, basename="ticket")
|
||||
router.register(r"reports", ReportViewSet, basename="report")
|
||||
router.register(r"threads", EmailThreadViewSet, basename="email-thread")
|
||||
|
||||
urlpatterns = [
|
||||
path("", include(router.urls)),
|
||||
]
|
||||
|
||||
|
||||
@@ -4,8 +4,9 @@ from rest_framework import filters, permissions, status, viewsets
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.response import Response
|
||||
|
||||
from .models import Report, Ticket
|
||||
from .models import EmailThread, Report, Ticket
|
||||
from .serializers import (
|
||||
EmailThreadSerializer,
|
||||
ReportCreateSerializer,
|
||||
ReportResolveSerializer,
|
||||
ReportSerializer,
|
||||
@@ -23,17 +24,26 @@ class TicketViewSet(viewsets.ModelViewSet):
|
||||
queryset = Ticket.objects.all()
|
||||
serializer_class = TicketSerializer
|
||||
permission_classes = [permissions.AllowAny] # We handle granular perms in get_queryset/perform_create
|
||||
filter_backends = [DjangoFilterBackend, filters.OrderingFilter]
|
||||
filterset_fields = ["status", "category"]
|
||||
ordering_fields = ["created_at", "status"]
|
||||
filter_backends = [DjangoFilterBackend, filters.OrderingFilter, filters.SearchFilter]
|
||||
filterset_fields = ["status", "category", "archived_at"]
|
||||
search_fields = ["name", "email", "subject", "ticket_number"]
|
||||
ordering_fields = ["created_at", "status", "ticket_number"]
|
||||
ordering = ["-created_at"]
|
||||
|
||||
def get_queryset(self):
|
||||
user = self.request.user
|
||||
qs = Ticket.objects.select_related(
|
||||
"user",
|
||||
"user__profile",
|
||||
"assigned_to",
|
||||
"resolved_by",
|
||||
"archived_by",
|
||||
).prefetch_related("email_threads")
|
||||
|
||||
if user.is_staff:
|
||||
return Ticket.objects.all()
|
||||
return qs
|
||||
if user.is_authenticated:
|
||||
return Ticket.objects.filter(user=user)
|
||||
return qs.filter(user=user)
|
||||
return Ticket.objects.none() # Guests can't list tickets
|
||||
|
||||
def perform_create(self, serializer):
|
||||
@@ -42,6 +52,337 @@ class TicketViewSet(viewsets.ModelViewSet):
|
||||
else:
|
||||
serializer.save()
|
||||
|
||||
@action(detail=True, methods=["post"], permission_classes=[permissions.IsAdminUser])
|
||||
def send_reply(self, request, pk=None):
|
||||
"""
|
||||
Send an email reply to the ticket submitter.
|
||||
|
||||
Creates an EmailThread record and sends the email via ForwardEmail.
|
||||
Optionally updates the ticket status.
|
||||
"""
|
||||
from typing import Any
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.sites.models import Site
|
||||
|
||||
from django_forwardemail.services import EmailService
|
||||
|
||||
from .serializers import SendReplySerializer
|
||||
|
||||
# Validate input
|
||||
serializer = SendReplySerializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
validated_data: dict[str, Any] = serializer.validated_data
|
||||
|
||||
ticket = self.get_object()
|
||||
|
||||
if not ticket.email:
|
||||
return Response(
|
||||
{"detail": "Ticket has no email address to reply to"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
reply_body: str = validated_data["reply_body"]
|
||||
new_status: str | None = validated_data.get("new_status")
|
||||
|
||||
# Build email subject with ticket number for threading
|
||||
subject = f"Re: [{ticket.ticket_number}] {ticket.subject}"
|
||||
|
||||
# Get the support from email with proper formatting
|
||||
from_email: str = getattr(settings, "DEFAULT_FROM_EMAIL", "ThrillWiki Support <support@thrillwiki.com>")
|
||||
|
||||
try:
|
||||
# Get the current site for ForwardEmail configuration
|
||||
# ForwardEmail requires a Site object, not RequestSite
|
||||
try:
|
||||
site = Site.objects.get_current()
|
||||
except Site.DoesNotExist:
|
||||
site = Site.objects.first()
|
||||
if site is None:
|
||||
return Response(
|
||||
{"detail": "No site configured for email sending"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
# Send email via ForwardEmail service
|
||||
EmailService.send_email(
|
||||
to=ticket.email,
|
||||
subject=subject,
|
||||
text=reply_body,
|
||||
from_email=from_email,
|
||||
reply_to=from_email, # Ensure replies come back to support
|
||||
site=site,
|
||||
)
|
||||
|
||||
# Create EmailThread record for audit trail
|
||||
email_thread = EmailThread.objects.create(
|
||||
ticket=ticket,
|
||||
from_email=from_email,
|
||||
to_email=ticket.email,
|
||||
subject=subject,
|
||||
body_text=reply_body,
|
||||
direction="outbound",
|
||||
sent_by=request.user,
|
||||
)
|
||||
|
||||
# Update ticket status if provided
|
||||
if new_status and new_status != ticket.status:
|
||||
ticket.status = new_status
|
||||
if new_status in ("resolved", "closed"):
|
||||
ticket.resolved_at = timezone.now()
|
||||
ticket.resolved_by = request.user
|
||||
ticket.save()
|
||||
|
||||
return Response({
|
||||
"detail": "Reply sent successfully",
|
||||
"thread_id": str(email_thread.id),
|
||||
"ticket_number": ticket.ticket_number,
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
# Log the error for debugging
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.exception("Failed to send ticket reply email")
|
||||
|
||||
return Response(
|
||||
{"detail": f"Failed to send email: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
@action(detail=False, methods=["post"], permission_classes=[permissions.IsAdminUser])
|
||||
def merge(self, request):
|
||||
"""
|
||||
Merge multiple tickets into a primary ticket.
|
||||
|
||||
Moves all EmailThread records to the primary ticket and archives merged tickets.
|
||||
"""
|
||||
from typing import Any
|
||||
from uuid import UUID
|
||||
|
||||
from .serializers import MergeTicketsSerializer
|
||||
|
||||
# Validate input
|
||||
serializer = MergeTicketsSerializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
validated_data: dict[str, Any] = serializer.validated_data
|
||||
|
||||
primary_id: UUID = validated_data["primary_ticket_id"]
|
||||
merge_ids: list[UUID] = validated_data["merge_ticket_ids"]
|
||||
reason: str = validated_data.get("merge_reason", "")
|
||||
|
||||
# Get primary ticket
|
||||
try:
|
||||
primary_ticket = Ticket.objects.get(pk=primary_id)
|
||||
except Ticket.DoesNotExist:
|
||||
return Response(
|
||||
{"detail": f"Primary ticket {primary_id} not found"},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
# Get tickets to merge (exclud primary if accidentally included)
|
||||
tickets_to_merge = Ticket.objects.filter(pk__in=merge_ids).exclude(pk=primary_id)
|
||||
if tickets_to_merge.count() == 0:
|
||||
return Response(
|
||||
{"detail": "No valid tickets found to merge"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
merged_count = 0
|
||||
threads_consolidated = 0
|
||||
merged_ticket_numbers: list[str] = []
|
||||
|
||||
for ticket in tickets_to_merge:
|
||||
# Move all email threads to primary ticket
|
||||
thread_count = EmailThread.objects.filter(ticket=ticket).update(ticket=primary_ticket)
|
||||
threads_consolidated += thread_count
|
||||
|
||||
# Record the merged ticket number
|
||||
merged_ticket_numbers.append(ticket.ticket_number)
|
||||
|
||||
# Archive the merged ticket with merge history
|
||||
ticket.archived_at = timezone.now()
|
||||
ticket.archived_by = request.user
|
||||
existing_notes = ticket.admin_notes or ""
|
||||
ticket.admin_notes = (
|
||||
f"{existing_notes}\n\n"
|
||||
f"[MERGED] Merged into {primary_ticket.ticket_number} by {request.user.username} "
|
||||
f"on {timezone.now().isoformat()}. Reason: {reason or 'Not specified'}"
|
||||
).strip()
|
||||
ticket.save()
|
||||
|
||||
merged_count += 1
|
||||
|
||||
# Update primary ticket with merge history
|
||||
existing_merged = primary_ticket.admin_notes or ""
|
||||
merge_note = (
|
||||
f"\n\n[MERGE HISTORY] Absorbed tickets: {', '.join(merged_ticket_numbers)} "
|
||||
f"({threads_consolidated} threads consolidated) by {request.user.username}"
|
||||
)
|
||||
primary_ticket.admin_notes = existing_merged + merge_note
|
||||
primary_ticket.save()
|
||||
|
||||
return Response({
|
||||
"detail": "Tickets merged successfully",
|
||||
"primaryTicketNumber": primary_ticket.ticket_number,
|
||||
"mergedCount": merged_count,
|
||||
"threadsConsolidated": threads_consolidated,
|
||||
})
|
||||
|
||||
# =========================================================================
|
||||
# FSM Transition Endpoints
|
||||
# =========================================================================
|
||||
|
||||
@action(detail=True, methods=["post"], permission_classes=[permissions.IsAdminUser])
|
||||
def start_progress(self, request, pk=None):
|
||||
"""
|
||||
Start working on a ticket.
|
||||
Transition: open -> in_progress
|
||||
"""
|
||||
from django.core.exceptions import ValidationError as DjangoValidationError
|
||||
|
||||
ticket = self.get_object()
|
||||
try:
|
||||
ticket.start_progress(user=request.user)
|
||||
return Response({
|
||||
"detail": "Ticket marked as in progress",
|
||||
"ticketNumber": ticket.ticket_number,
|
||||
"status": ticket.status,
|
||||
})
|
||||
except DjangoValidationError as e:
|
||||
return Response(
|
||||
{"detail": str(e.message if hasattr(e, 'message') else e)},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
@action(detail=True, methods=["post"], permission_classes=[permissions.IsAdminUser], url_path="close")
|
||||
def close_ticket(self, request, pk=None):
|
||||
"""
|
||||
Close/resolve a ticket.
|
||||
Transition: open|in_progress -> closed
|
||||
"""
|
||||
from django.core.exceptions import ValidationError as DjangoValidationError
|
||||
|
||||
ticket = self.get_object()
|
||||
notes = request.data.get("notes", "")
|
||||
|
||||
try:
|
||||
ticket.close(user=request.user, notes=notes)
|
||||
return Response({
|
||||
"detail": "Ticket closed successfully",
|
||||
"ticketNumber": ticket.ticket_number,
|
||||
"status": ticket.status,
|
||||
"resolvedAt": ticket.resolved_at.isoformat() if ticket.resolved_at else None,
|
||||
})
|
||||
except DjangoValidationError as e:
|
||||
return Response(
|
||||
{"detail": str(e.message if hasattr(e, 'message') else e)},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
@action(detail=True, methods=["post"], permission_classes=[permissions.IsAdminUser])
|
||||
def reopen(self, request, pk=None):
|
||||
"""
|
||||
Reopen a closed ticket.
|
||||
Transition: closed -> open
|
||||
"""
|
||||
from django.core.exceptions import ValidationError as DjangoValidationError
|
||||
|
||||
ticket = self.get_object()
|
||||
reason = request.data.get("reason", "")
|
||||
|
||||
try:
|
||||
ticket.reopen(user=request.user, reason=reason)
|
||||
return Response({
|
||||
"detail": "Ticket reopened",
|
||||
"ticketNumber": ticket.ticket_number,
|
||||
"status": ticket.status,
|
||||
})
|
||||
except DjangoValidationError as e:
|
||||
return Response(
|
||||
{"detail": str(e.message if hasattr(e, 'message') else e)},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
@action(detail=True, methods=["post"], permission_classes=[permissions.IsAdminUser])
|
||||
def archive(self, request, pk=None):
|
||||
"""Archive a ticket."""
|
||||
from django.core.exceptions import ValidationError as DjangoValidationError
|
||||
|
||||
ticket = self.get_object()
|
||||
reason = request.data.get("reason", "")
|
||||
|
||||
try:
|
||||
ticket.archive(user=request.user, reason=reason)
|
||||
return Response({
|
||||
"detail": "Ticket archived",
|
||||
"ticketNumber": ticket.ticket_number,
|
||||
"archivedAt": ticket.archived_at.isoformat() if ticket.archived_at else None,
|
||||
})
|
||||
except DjangoValidationError as e:
|
||||
return Response(
|
||||
{"detail": str(e.message if hasattr(e, 'message') else e)},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
@action(detail=True, methods=["post"], permission_classes=[permissions.IsAdminUser])
|
||||
def unarchive(self, request, pk=None):
|
||||
"""Restore an archived ticket."""
|
||||
from django.core.exceptions import ValidationError as DjangoValidationError
|
||||
|
||||
ticket = self.get_object()
|
||||
|
||||
try:
|
||||
ticket.unarchive(user=request.user)
|
||||
return Response({
|
||||
"detail": "Ticket unarchived",
|
||||
"ticketNumber": ticket.ticket_number,
|
||||
})
|
||||
except DjangoValidationError as e:
|
||||
return Response(
|
||||
{"detail": str(e.message if hasattr(e, 'message') else e)},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
@action(detail=True, methods=["get"], permission_classes=[permissions.IsAdminUser])
|
||||
def available_transitions(self, request, pk=None):
|
||||
"""
|
||||
Get available transitions for a ticket.
|
||||
Uses StateMachineMixin to return FSM-aware transition metadata.
|
||||
"""
|
||||
ticket = self.get_object()
|
||||
transitions = ticket.get_available_user_transitions(request.user)
|
||||
|
||||
return Response({
|
||||
"ticketNumber": ticket.ticket_number,
|
||||
"currentStatus": ticket.status,
|
||||
"currentStatusDisplay": ticket.get_status_display(),
|
||||
"availableTransitions": transitions,
|
||||
})
|
||||
|
||||
|
||||
class EmailThreadViewSet(viewsets.ModelViewSet):
|
||||
"""
|
||||
ViewSet for email thread entries.
|
||||
Staff only for full access.
|
||||
"""
|
||||
|
||||
queryset = EmailThread.objects.select_related("ticket", "sent_by").all()
|
||||
serializer_class = EmailThreadSerializer
|
||||
permission_classes = [permissions.IsAdminUser]
|
||||
filter_backends = [DjangoFilterBackend, filters.OrderingFilter]
|
||||
filterset_fields = ["ticket", "direction"]
|
||||
ordering_fields = ["created_at"]
|
||||
ordering = ["created_at"]
|
||||
|
||||
def get_queryset(self):
|
||||
# Support filtering by submission_id (which is ticket_id in our model)
|
||||
qs = super().get_queryset()
|
||||
submission_id = self.request.query_params.get("submission_id")
|
||||
if submission_id:
|
||||
qs = qs.filter(ticket_id=submission_id)
|
||||
return qs
|
||||
|
||||
|
||||
class ReportViewSet(viewsets.ModelViewSet):
|
||||
"""
|
||||
|
||||
@@ -73,11 +73,11 @@ CACHE_MIDDLEWARE_KEY_PREFIX = "thrillwiki_dev"
|
||||
# =============================================================================
|
||||
# Use ForwardEmail for actual sending, or console for debugging
|
||||
|
||||
# Console backend for debugging (uncomment to use):
|
||||
# EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
|
||||
# Console backend for debugging (recommended for local development):
|
||||
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
|
||||
|
||||
# ForwardEmail backend for actual email sending:
|
||||
EMAIL_BACKEND = "django_forwardemail.backends.ForwardEmailBackend"
|
||||
# ForwardEmail backend for actual email sending (uncomment for production testing):
|
||||
# EMAIL_BACKEND = "django_forwardemail.backends.ForwardEmailBackend"
|
||||
|
||||
# =============================================================================
|
||||
# Security Settings (Relaxed for Development)
|
||||
@@ -87,6 +87,9 @@ SECURE_SSL_REDIRECT = False
|
||||
SESSION_COOKIE_SECURE = False
|
||||
CSRF_COOKIE_SECURE = False
|
||||
|
||||
# Email verification - optional for local development to allow passkey login
|
||||
ACCOUNT_EMAIL_VERIFICATION = "optional"
|
||||
|
||||
# =============================================================================
|
||||
# Development Apps
|
||||
# =============================================================================
|
||||
|
||||
@@ -101,6 +101,7 @@ CORS_ALLOW_HEADERS = [
|
||||
"x-csrftoken",
|
||||
"x-requested-with",
|
||||
"x-api-version",
|
||||
"x-session-token", # Required for allauth headless app client
|
||||
]
|
||||
|
||||
# HTTP methods allowed for CORS requests
|
||||
|
||||
174
scripts/lint_choices.py
Executable file
174
scripts/lint_choices.py
Executable file
@@ -0,0 +1,174 @@
|
||||
#!/usr/bin/env python
|
||||
"""
|
||||
RichChoiceField Enforcement Lint Script
|
||||
|
||||
This script checks for prohibited choice patterns in the codebase.
|
||||
Exit code 0 = no violations found, exit code 1 = violations found.
|
||||
|
||||
Usage:
|
||||
python scripts/lint_choices.py
|
||||
python scripts/lint_choices.py --fix # Show fix suggestions
|
||||
|
||||
Add to CI:
|
||||
python scripts/lint_choices.py || exit 1
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import re
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# Patterns to detect prohibited choice usage
|
||||
PROHIBITED_PATTERNS = [
|
||||
# TextChoices / IntegerChoices class definitions
|
||||
(r'class\s+\w+\s*\(\s*models\.(TextChoices|IntegerChoices)\s*\)',
|
||||
'models.TextChoices/IntegerChoices class definition'),
|
||||
|
||||
# Inline tuple choices in CharField
|
||||
(r'choices\s*=\s*\[\s*\(\s*["\']',
|
||||
'Inline tuple choices'),
|
||||
|
||||
# Direct reference to .choices attribute
|
||||
(r'choices\s*=\s*\w+\.choices',
|
||||
'Reference to inner TextChoices.choices'),
|
||||
]
|
||||
|
||||
# Directories/files to exclude
|
||||
EXCLUDE_PATTERNS = [
|
||||
'*/migrations/*',
|
||||
'*/.venv/*',
|
||||
'*/node_modules/*',
|
||||
'*/__pycache__/*',
|
||||
'*.pyc',
|
||||
'lint_choices.py', # Exclude this script
|
||||
]
|
||||
|
||||
# Files allowed to define TextChoices (infrastructure only)
|
||||
ALLOWED_EXCEPTION_FILES = [
|
||||
# Core choice infrastructure files can reference these patterns
|
||||
'apps/core/choices/',
|
||||
]
|
||||
|
||||
|
||||
def should_exclude(path: Path) -> bool:
|
||||
"""Check if path should be excluded from linting."""
|
||||
path_str = str(path)
|
||||
for pattern in EXCLUDE_PATTERNS:
|
||||
if pattern.startswith('*/'):
|
||||
if pattern[2:].rstrip('/*') in path_str:
|
||||
return True
|
||||
elif pattern.endswith('/*'):
|
||||
if path_str.startswith(pattern[:-2]):
|
||||
return True
|
||||
elif pattern in path_str:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def is_exception_file(path: Path) -> bool:
|
||||
"""Check if file is an allowed exception."""
|
||||
path_str = str(path)
|
||||
for exception in ALLOWED_EXCEPTION_FILES:
|
||||
if exception in path_str:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def scan_file(filepath: Path) -> list[tuple[int, str, str]]:
|
||||
"""Scan a file for prohibited patterns. Returns list of (line_num, line, pattern_name)."""
|
||||
violations = []
|
||||
|
||||
if should_exclude(filepath) or is_exception_file(filepath):
|
||||
return violations
|
||||
|
||||
try:
|
||||
content = filepath.read_text(encoding='utf-8')
|
||||
lines = content.split('\n')
|
||||
|
||||
for i, line in enumerate(lines, 1):
|
||||
# Skip comments
|
||||
stripped = line.strip()
|
||||
if stripped.startswith('#'):
|
||||
continue
|
||||
|
||||
for pattern, description in PROHIBITED_PATTERNS:
|
||||
if re.search(pattern, line):
|
||||
violations.append((i, line.strip(), description))
|
||||
break # Only report one violation per line
|
||||
|
||||
except Exception as e:
|
||||
print(f"Warning: Could not read {filepath}: {e}", file=sys.stderr)
|
||||
|
||||
return violations
|
||||
|
||||
|
||||
def scan_directory(root_dir: Path) -> dict[Path, list]:
|
||||
"""Scan all Python files in directory for violations."""
|
||||
all_violations = {}
|
||||
|
||||
for filepath in root_dir.rglob('*.py'):
|
||||
violations = scan_file(filepath)
|
||||
if violations:
|
||||
all_violations[filepath] = violations
|
||||
|
||||
return all_violations
|
||||
|
||||
|
||||
def print_violations(violations: dict, show_fix: bool = False):
|
||||
"""Print violations in a readable format."""
|
||||
total = 0
|
||||
|
||||
for filepath, file_violations in sorted(violations.items()):
|
||||
print(f"\n\033[1;31m{filepath}\033[0m")
|
||||
for line_num, line_content, description in file_violations:
|
||||
print(f" Line {line_num}: [{description}]")
|
||||
print(f" {line_content[:80]}{'...' if len(line_content) > 80 else ''}")
|
||||
total += 1
|
||||
|
||||
if show_fix:
|
||||
print(f" \033[1;33mFix:\033[0m Use RichChoiceField(choice_group='...', domain='...')")
|
||||
|
||||
return total
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description='Lint for prohibited choice patterns')
|
||||
parser.add_argument('--fix', action='store_true', help='Show fix suggestions')
|
||||
parser.add_argument('path', nargs='?', default='apps', help='Path to scan (default: apps)')
|
||||
args = parser.parse_args()
|
||||
|
||||
# Find backend directory
|
||||
script_dir = Path(__file__).parent
|
||||
backend_dir = script_dir.parent
|
||||
if (backend_dir / 'apps').exists():
|
||||
scan_path = backend_dir / args.path
|
||||
elif (backend_dir / 'backend' / 'apps').exists():
|
||||
scan_path = backend_dir / 'backend' / args.path
|
||||
else:
|
||||
print("Error: Could not find apps directory", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
print(f"Scanning {scan_path} for prohibited choice patterns...")
|
||||
print("=" * 60)
|
||||
|
||||
violations = scan_directory(scan_path)
|
||||
|
||||
if violations:
|
||||
total = print_violations(violations, show_fix=args.fix)
|
||||
print("\n" + "=" * 60)
|
||||
print(f"\033[1;31mFound {total} violation(s) in {len(violations)} file(s)\033[0m")
|
||||
print("\nProhibited patterns:")
|
||||
print(" - models.TextChoices / models.IntegerChoices classes")
|
||||
print(" - Inline choices=[(value, label), ...]")
|
||||
print(" - References to InnerClass.choices")
|
||||
print("\nRequired pattern:")
|
||||
print(" RichChoiceField(choice_group='group_name', domain='domain_name')")
|
||||
print("\nSee: .agent/workflows/enforce-richchoice.md for migration guide")
|
||||
sys.exit(1)
|
||||
else:
|
||||
print("\n\033[1;32m✓ No prohibited choice patterns found!\033[0m")
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
Reference in New Issue
Block a user