feat: Implement a new notifications application, add admin API views for dashboard metrics, introduce scheduled tasks, and update API routing and project configurations.

This commit is contained in:
pacnpal
2026-01-05 09:50:00 -05:00
parent 1c6e219662
commit a801813dcf
27 changed files with 3829 additions and 131 deletions

View File

@@ -0,0 +1 @@
# Admin API module

View File

@@ -0,0 +1,37 @@
"""
Admin API URL configuration.
Provides endpoints for admin dashboard functionality.
"""
from django.urls import path
from . import views
app_name = "admin_api"
urlpatterns = [
# OSM Cache Stats
path(
"osm-usage-stats/",
views.OSMUsageStatsView.as_view(),
name="osm_usage_stats",
),
# Rate Limit Metrics
path(
"rate-limit-metrics/",
views.RateLimitMetricsView.as_view(),
name="rate_limit_metrics",
),
# Database Manager (admin CRUD operations)
path(
"database-manager/",
views.DatabaseManagerView.as_view(),
name="database_manager",
),
# Celery Task Status (read-only)
path(
"tasks/status/",
views.CeleryTaskStatusView.as_view(),
name="task_status",
),
]

View File

@@ -0,0 +1,710 @@
"""
Admin API views for dashboard functionality.
These views provide endpoints for:
- OSM cache statistics
- Rate limiting metrics
- Database manager operations
- Celery task status
"""
import logging
from datetime import timedelta
from typing import Any
from django.apps import apps
from django.contrib.auth import get_user_model
from django.core.cache import cache
from django.db import transaction
from django.db.models import Count, Q
from django.utils import timezone
from rest_framework import status
from rest_framework.permissions import IsAdminUser
from rest_framework.response import Response
from rest_framework.views import APIView
from apps.core.utils import capture_and_log
logger = logging.getLogger(__name__)
User = get_user_model()
class OSMUsageStatsView(APIView):
"""
GET /admin/osm-usage-stats/
Return OSM cache statistics for admin dashboard.
"""
permission_classes = [IsAdminUser]
def get(self, request):
"""Return OSM/location cache usage statistics."""
try:
# Try to get stats from cache first
cached_stats = cache.get("osm_usage_stats")
if cached_stats:
return Response(cached_stats)
# Calculate fresh stats
now = timezone.now()
last_24h = now - timedelta(hours=24)
# Get location query cache model if it exists
try:
LocationQueryCache = apps.get_model("maps", "LocationQueryCache")
has_cache_model = True
except LookupError:
has_cache_model = False
if has_cache_model:
total_queries = LocationQueryCache.objects.count()
recent_queries = LocationQueryCache.objects.filter(
created_at__gte=last_24h
).count()
cache_hits = LocationQueryCache.objects.filter(
access_count__gt=1
).count()
stats = {
"timeWindow": "24h",
"totalSearches": recent_queries,
"cacheHits": cache_hits,
"cacheMisses": max(0, recent_queries - cache_hits),
"apiCalls": max(0, recent_queries - cache_hits),
"errors": 0,
"cacheHitRate": (
round(cache_hits / total_queries * 100, 2)
if total_queries > 0
else 0
),
"avgResponseTime": 0, # Would need request logging
"totalCachedQueries": total_queries,
"totalCacheAccesses": cache_hits,
"hourlyData": [],
"apiCallsSaved": cache_hits,
"estimatedCost": {
"callsMade": max(0, recent_queries - cache_hits),
"callsSaved": cache_hits,
"savings": f"${cache_hits * 0.001:.2f}", # Estimated
},
}
else:
# Return empty stats if no cache model
stats = {
"timeWindow": "24h",
"totalSearches": 0,
"cacheHits": 0,
"cacheMisses": 0,
"apiCalls": 0,
"errors": 0,
"cacheHitRate": 0,
"avgResponseTime": 0,
"totalCachedQueries": 0,
"totalCacheAccesses": 0,
"hourlyData": [],
"apiCallsSaved": 0,
"estimatedCost": {
"callsMade": 0,
"callsSaved": 0,
"savings": "$0.00",
},
}
# Cache for 5 minutes
cache.set("osm_usage_stats", stats, 300)
return Response(stats)
except Exception as e:
capture_and_log(e, "OSM usage stats - error", source="api")
return Response(
{"detail": "Failed to fetch OSM usage stats"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
class RateLimitMetricsView(APIView):
"""
POST /admin/rate-limit-metrics/
Return rate limiting metrics for admin dashboard.
"""
permission_classes = [IsAdminUser]
def post(self, request):
"""Return rate limit metrics based on action."""
try:
action = request.data.get("action", "stats")
time_window = request.data.get("timeWindow", 60000) # ms
limit = request.data.get("limit", 100)
# Convert time_window from ms to seconds
time_window_seconds = time_window / 1000 if time_window else 60
cutoff = timezone.now() - timedelta(seconds=time_window_seconds)
if action == "stats":
# Return aggregate statistics
# In a real implementation, you'd query a rate limit log table
stats = {
"totalRequests": 0,
"allowedRequests": 0,
"blockedRequests": 0,
"blockRate": 0,
"uniqueIPs": 0,
"uniqueUsers": 0,
"topBlockedIPs": [],
"topBlockedUsers": [],
"tierDistribution": {
"anonymous": 0,
"authenticated": 0,
"premium": 0,
"admin": 0,
},
}
return Response(stats)
elif action == "recent":
# Return recent rate limit events
return Response([])
elif action == "function":
# Return metrics for a specific function
function_name = request.data.get("functionName", "")
return Response([])
elif action == "user":
# Return metrics for a specific user
user_id = request.data.get("userId", "")
return Response([])
elif action == "ip":
# Return metrics for a specific IP
client_ip = request.data.get("clientIP", "")
return Response([])
return Response(
{"detail": f"Unknown action: {action}"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_and_log(e, "Rate limit metrics - error", source="api")
return Response(
{"detail": "Failed to fetch rate limit metrics"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
class DatabaseManagerView(APIView):
"""
POST /admin/database-manager/
Handle admin CRUD operations for entities.
"""
permission_classes = [IsAdminUser]
# Map entity types to Django models
ENTITY_MODEL_MAP = {
"parks": ("parks", "Park"),
"rides": ("rides", "Ride"),
"companies": ("companies", "Company"),
"reviews": ("reviews", "Review"),
"blog_posts": ("blog", "BlogPost"),
"photos": ("media", "Photo"),
"lists": ("lists", "List"),
"profiles": ("accounts", "UserProfile"),
}
def post(self, request):
"""Dispatch to appropriate handler based on operation."""
try:
operation = request.data.get("operation")
entity_type = request.data.get("entityType")
entity_id = request.data.get("entityId")
data = request.data.get("data", {})
change_reason = request.data.get("changeReason", "Admin operation")
if not operation:
return Response(
{"detail": "operation is required"},
status=status.HTTP_400_BAD_REQUEST,
)
if not entity_type:
return Response(
{"detail": "entityType is required"},
status=status.HTTP_400_BAD_REQUEST,
)
# Get the model class
model_info = self.ENTITY_MODEL_MAP.get(entity_type)
if not model_info:
return Response(
{"detail": f"Unknown entity type: {entity_type}"},
status=status.HTTP_400_BAD_REQUEST,
)
try:
Model = apps.get_model(model_info[0], model_info[1])
except LookupError:
return Response(
{"detail": f"Model not found for {entity_type}"},
status=status.HTTP_400_BAD_REQUEST,
)
# Dispatch to handler
handlers = {
"create": self._handle_create,
"update": self._handle_update,
"delete": self._handle_delete,
"restore": self._handle_restore,
"permanent-delete": self._handle_permanent_delete,
"bulk-update-status": self._handle_bulk_update_status,
"bulk-delete": self._handle_bulk_delete,
"bulk-restore": self._handle_bulk_restore,
"bulk-permanent-delete": self._handle_bulk_permanent_delete,
"get-dependencies": self._handle_get_dependencies,
}
handler = handlers.get(operation)
if not handler:
return Response(
{"detail": f"Unknown operation: {operation}"},
status=status.HTTP_400_BAD_REQUEST,
)
return handler(Model, entity_type, entity_id, data, change_reason, request)
except Exception as e:
capture_and_log(
e, f"Database manager - {operation} error", source="api"
)
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
def _handle_create(
self, Model, entity_type, entity_id, data, change_reason, request
):
"""Create a new entity."""
with transaction.atomic():
instance = Model.objects.create(**data)
return Response(
{
"success": True,
"data": {"id": str(instance.pk)},
"message": f"{entity_type} created successfully",
}
)
def _handle_update(
self, Model, entity_type, entity_id, data, change_reason, request
):
"""Update an existing entity."""
if not entity_id:
return Response(
{"detail": "entityId is required for update"},
status=status.HTTP_400_BAD_REQUEST,
)
with transaction.atomic():
try:
instance = Model.objects.get(pk=entity_id)
except Model.DoesNotExist:
return Response(
{"detail": f"{entity_type} not found"},
status=status.HTTP_404_NOT_FOUND,
)
for key, value in data.items():
if hasattr(instance, key):
setattr(instance, key, value)
instance.save()
return Response(
{
"success": True,
"data": {"id": str(instance.pk)},
"message": f"{entity_type} updated successfully",
}
)
def _handle_delete(
self, Model, entity_type, entity_id, data, change_reason, request
):
"""Soft delete an entity (set status to deleted)."""
if not entity_id:
return Response(
{"detail": "entityId is required for delete"},
status=status.HTTP_400_BAD_REQUEST,
)
with transaction.atomic():
try:
instance = Model.objects.get(pk=entity_id)
except Model.DoesNotExist:
return Response(
{"detail": f"{entity_type} not found"},
status=status.HTTP_404_NOT_FOUND,
)
# Try soft delete first (set status)
if hasattr(instance, "status"):
instance.status = "deleted"
instance.save()
elif hasattr(instance, "is_deleted"):
instance.is_deleted = True
instance.save()
elif hasattr(instance, "deleted_at"):
instance.deleted_at = timezone.now()
instance.save()
else:
# Hard delete if no soft delete field
instance.delete()
return Response(
{
"success": True,
"data": {"id": str(entity_id)},
"message": f"{entity_type} deleted successfully",
}
)
def _handle_restore(
self, Model, entity_type, entity_id, data, change_reason, request
):
"""Restore a soft-deleted entity."""
if not entity_id:
return Response(
{"detail": "entityId is required for restore"},
status=status.HTTP_400_BAD_REQUEST,
)
new_status = data.get("status", "draft")
with transaction.atomic():
try:
# Try to get even deleted entities
instance = Model.all_objects.get(pk=entity_id)
except AttributeError:
# Model doesn't have all_objects manager
instance = Model.objects.get(pk=entity_id)
except Model.DoesNotExist:
return Response(
{"detail": f"{entity_type} not found"},
status=status.HTTP_404_NOT_FOUND,
)
if hasattr(instance, "status"):
instance.status = new_status
instance.save()
elif hasattr(instance, "is_deleted"):
instance.is_deleted = False
instance.save()
elif hasattr(instance, "deleted_at"):
instance.deleted_at = None
instance.save()
return Response(
{
"success": True,
"data": {"id": str(entity_id)},
"message": f"{entity_type} restored successfully",
}
)
def _handle_permanent_delete(
self, Model, entity_type, entity_id, data, change_reason, request
):
"""Permanently delete an entity."""
if not entity_id:
return Response(
{"detail": "entityId is required for permanent-delete"},
status=status.HTTP_400_BAD_REQUEST,
)
with transaction.atomic():
try:
# Try to get even deleted entities
try:
instance = Model.all_objects.get(pk=entity_id)
except AttributeError:
instance = Model.objects.get(pk=entity_id)
except Model.DoesNotExist:
return Response(
{"detail": f"{entity_type} not found"},
status=status.HTTP_404_NOT_FOUND,
)
instance.delete()
return Response(
{
"success": True,
"data": {"id": str(entity_id)},
"message": f"{entity_type} permanently deleted",
}
)
def _handle_bulk_update_status(
self, Model, entity_type, entity_id, data, change_reason, request
):
"""Bulk update status of multiple entities."""
entity_ids = data.get("entityIds", [])
new_status = data.get("status")
if not entity_ids:
return Response(
{"detail": "entityIds is required"},
status=status.HTTP_400_BAD_REQUEST,
)
if not new_status:
return Response(
{"detail": "status is required"},
status=status.HTTP_400_BAD_REQUEST,
)
with transaction.atomic():
updated = Model.objects.filter(pk__in=entity_ids).update(status=new_status)
return Response(
{
"success": True,
"bulk": {
"successCount": updated,
"failedCount": len(entity_ids) - updated,
},
"message": f"Updated {updated} {entity_type}",
}
)
def _handle_bulk_delete(
self, Model, entity_type, entity_id, data, change_reason, request
):
"""Bulk soft delete multiple entities."""
entity_ids = data.get("entityIds", [])
if not entity_ids:
return Response(
{"detail": "entityIds is required"},
status=status.HTTP_400_BAD_REQUEST,
)
with transaction.atomic():
if hasattr(Model, "status"):
updated = Model.objects.filter(pk__in=entity_ids).update(
status="deleted"
)
else:
updated = Model.objects.filter(pk__in=entity_ids).update(
is_deleted=True
)
return Response(
{
"success": True,
"bulk": {
"successCount": updated,
"failedCount": len(entity_ids) - updated,
},
"message": f"Deleted {updated} {entity_type}",
}
)
def _handle_bulk_restore(
self, Model, entity_type, entity_id, data, change_reason, request
):
"""Bulk restore soft-deleted entities."""
entity_ids = data.get("entityIds", [])
new_status = data.get("status", "draft")
if not entity_ids:
return Response(
{"detail": "entityIds is required"},
status=status.HTTP_400_BAD_REQUEST,
)
with transaction.atomic():
try:
updated = Model.all_objects.filter(pk__in=entity_ids).update(
status=new_status
)
except AttributeError:
updated = Model.objects.filter(pk__in=entity_ids).update(
status=new_status
)
return Response(
{
"success": True,
"bulk": {
"successCount": updated,
"failedCount": len(entity_ids) - updated,
},
"message": f"Restored {updated} {entity_type}",
}
)
def _handle_bulk_permanent_delete(
self, Model, entity_type, entity_id, data, change_reason, request
):
"""Bulk permanently delete entities."""
entity_ids = data.get("entityIds", [])
if not entity_ids:
return Response(
{"detail": "entityIds is required"},
status=status.HTTP_400_BAD_REQUEST,
)
with transaction.atomic():
try:
deleted, _ = Model.all_objects.filter(pk__in=entity_ids).delete()
except AttributeError:
deleted, _ = Model.objects.filter(pk__in=entity_ids).delete()
return Response(
{
"success": True,
"bulk": {
"successCount": deleted,
"failedCount": len(entity_ids) - deleted,
},
"message": f"Permanently deleted {deleted} {entity_type}",
}
)
def _handle_get_dependencies(
self, Model, entity_type, entity_id, data, change_reason, request
):
"""Get dependencies for an entity before deletion."""
if not entity_id:
return Response(
{"detail": "entityId is required"},
status=status.HTTP_400_BAD_REQUEST,
)
try:
instance = Model.objects.get(pk=entity_id)
except Model.DoesNotExist:
return Response(
{"detail": f"{entity_type} not found"},
status=status.HTTP_404_NOT_FOUND,
)
# Get related objects count
dependencies = []
for rel in instance._meta.get_fields():
if rel.one_to_many or rel.one_to_one or rel.many_to_many:
try:
related_name = rel.get_accessor_name()
related_manager = getattr(instance, related_name, None)
if related_manager and hasattr(related_manager, "count"):
count = related_manager.count()
if count > 0:
dependencies.append(
{
"type": rel.related_model._meta.verbose_name_plural,
"count": count,
}
)
except Exception:
pass
return Response(
{
"success": True,
"dependencies": dependencies,
"hasDependencies": len(dependencies) > 0,
}
)
class CeleryTaskStatusView(APIView):
"""
GET /admin/tasks/status/
Return Celery task status (read-only).
"""
permission_classes = [IsAdminUser]
# List of known scheduled tasks
SCHEDULED_TASKS = [
{
"name": "process_scheduled_deletions",
"display_name": "Process Scheduled Deletions",
"schedule": "daily at midnight",
},
{
"name": "process_closing_entities",
"display_name": "Process Closing Entities",
"schedule": "daily at midnight",
},
{
"name": "process_expired_bans",
"display_name": "Process Expired Bans",
"schedule": "every 15 minutes",
},
{
"name": "cleanup_orphaned_images",
"display_name": "Cleanup Orphaned Images",
"schedule": "weekly on Sunday",
},
{
"name": "cleanup_old_versions",
"display_name": "Cleanup Old Versions",
"schedule": "weekly on Sunday",
},
{
"name": "data_retention_cleanup",
"display_name": "Data Retention Cleanup",
"schedule": "daily at 3 AM",
},
]
def get(self, request):
"""Return status of all scheduled tasks."""
try:
task_name = request.query_params.get("task")
tasks_status = []
for task_info in self.SCHEDULED_TASKS:
# Get last run info from cache
cache_key = f"task_last_run_{task_info['name']}"
last_run_info = cache.get(cache_key, {})
task_status = {
"name": task_info["name"],
"displayName": task_info["display_name"],
"schedule": task_info["schedule"],
"lastRun": last_run_info.get("timestamp"),
"lastResult": last_run_info.get("result", "unknown"),
"lastDuration": last_run_info.get("duration"),
"status": "scheduled",
}
if task_name and task_name == task_info["name"]:
return Response(task_status)
tasks_status.append(task_status)
if task_name:
return Response(
{"detail": f"Unknown task: {task_name}"},
status=status.HTTP_404_NOT_FOUND,
)
return Response(
{
"tasks": tasks_status,
"totalTasks": len(tasks_status),
}
)
except Exception as e:
capture_and_log(e, "Celery task status - error", source="api")
return Response(
{"detail": "Failed to fetch task status"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)

View File

@@ -38,8 +38,7 @@ urlpatterns = [
path("user/", CurrentUserAPIView.as_view(), name="auth-current-user"),
# JWT token management
path("token/refresh/", TokenRefreshView.as_view(), name="auth-token-refresh"),
# Social authentication endpoints (dj-rest-auth)
path("social/", include("dj_rest_auth.registration.urls")),
# Note: dj_rest_auth removed - using custom social auth views below
path(
"password/reset/",
PasswordResetAPIView.as_view(),

View File

@@ -1,7 +1,11 @@
from django.urls import path
from .views import GenerateUploadURLView
from . import views
app_name = "images"
urlpatterns = [
path("generate-upload-url/", GenerateUploadURLView.as_view(), name="generate-upload-url"),
path("generate-upload-url/", views.GenerateUploadURLView.as_view(), name="generate_upload_url"),
path("delete/", views.DeleteImageView.as_view(), name="delete_image"),
path("og-image/", views.GenerateOGImageView.as_view(), name="og_image"),
]

View File

@@ -1,6 +1,7 @@
import logging
import requests
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from rest_framework import status
from rest_framework.permissions import IsAuthenticated
@@ -30,3 +31,109 @@ class GenerateUploadURLView(APIView):
except Exception as e:
capture_and_log(e, 'Generate upload URL - unexpected error', source='api')
return Response({"detail": "An unexpected error occurred."}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
class DeleteImageView(APIView):
"""
POST /images/delete/
Delete an image from Cloudflare Images.
"""
permission_classes = [IsAuthenticated]
def post(self, request):
image_id = request.data.get("image_id")
if not image_id:
return Response(
{"detail": "image_id is required"},
status=status.HTTP_400_BAD_REQUEST,
)
try:
# Get Cloudflare credentials
account_id = getattr(settings, "CLOUDFLARE_IMAGES_ACCOUNT_ID", None)
api_token = getattr(settings, "CLOUDFLARE_IMAGES_API_TOKEN", None)
if not account_id or not api_token:
logger.warning("Cloudflare Images not configured, mock deleting image")
return Response({"success": True, "mock": True})
# Delete from Cloudflare
url = f"https://api.cloudflare.com/client/v4/accounts/{account_id}/images/v1/{image_id}"
response = requests.delete(
url,
headers={"Authorization": f"Bearer {api_token}"},
timeout=10,
)
if response.status_code in (200, 404): # 404 = already deleted
return Response({"success": True})
else:
logger.error(f"Cloudflare delete failed: {response.text}")
return Response(
{"detail": "Failed to delete image"},
status=status.HTTP_502_BAD_GATEWAY,
)
except requests.RequestException as e:
capture_and_log(e, "Delete image - Cloudflare API error", source="api")
return Response(
{"detail": "Failed to delete image"},
status=status.HTTP_502_BAD_GATEWAY,
)
except Exception as e:
capture_and_log(e, "Delete image - unexpected error", source="api")
return Response(
{"detail": "An unexpected error occurred"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
class GenerateOGImageView(APIView):
"""
POST /images/og-image/
Generate an Open Graph image for social sharing.
"""
permission_classes = [] # Public endpoint
def post(self, request):
title = request.data.get("title", "")
description = request.data.get("description", "")
entity_type = request.data.get("entity_type", "")
image_url = request.data.get("image_url", "")
if not title:
return Response(
{"detail": "title is required"},
status=status.HTTP_400_BAD_REQUEST,
)
try:
# This is a placeholder for OG image generation
# In production, you would:
# 1. Use an image generation service (Cloudinary, imgix, etc.)
# 2. Or use a headless browser service (Puppeteer, Playwright)
# 3. Or use a dedicated OG image service
# For now, return a template URL or placeholder
base_url = getattr(settings, "SITE_URL", "https://thrillwiki.com")
og_image_url = f"{base_url}/api/v1/images/og-preview/?title={title[:100]}"
return Response({
"success": True,
"og_image_url": og_image_url,
"title": title,
"description": description[:200] if description else "",
"entity_type": entity_type,
"note": "Placeholder - configure OG image service for production",
})
except Exception as e:
capture_and_log(e, "Generate OG image", source="api")
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)

View File

@@ -30,4 +30,7 @@ urlpatterns = [
views.MapCacheAPIView.as_view(),
name="map_cache_invalidate",
),
# Location detection and enrichment
path("detect-location/", views.DetectLocationView.as_view(), name="detect_location"),
path("enrich-location/", views.EnrichLocationView.as_view(), name="enrich_location"),
]

View File

@@ -999,3 +999,245 @@ MapSearchView = MapSearchAPIView
MapBoundsView = MapBoundsAPIView
MapStatsView = MapStatsAPIView
MapCacheView = MapCacheAPIView
# =============================================================================
# Location Detection / Enrichment Endpoints
# =============================================================================
@extend_schema_view(
post=extend_schema(
summary="Detect user location from IP",
description="Detect the user's approximate location based on their IP address.",
request={
"application/json": {
"type": "object",
"properties": {
"ip_address": {
"type": "string",
"description": "IP address to geolocate. If not provided, uses request IP.",
}
},
}
},
responses={
200: {
"type": "object",
"properties": {
"latitude": {"type": "number"},
"longitude": {"type": "number"},
"city": {"type": "string"},
"region": {"type": "string"},
"country": {"type": "string"},
"timezone": {"type": "string"},
},
}
},
tags=["Maps"],
),
)
class DetectLocationView(APIView):
"""
POST /maps/detect-location/
Detect user's location based on IP address using a geolocation service.
"""
permission_classes = [AllowAny]
def post(self, request):
try:
# Get IP address from request or payload
ip_address = request.data.get("ip_address")
if not ip_address:
# Get client IP from request
x_forwarded_for = request.META.get("HTTP_X_FORWARDED_FOR")
if x_forwarded_for:
ip_address = x_forwarded_for.split(",")[0].strip()
else:
ip_address = request.META.get("REMOTE_ADDR", "")
# For localhost/development, return a default location
if ip_address in ("127.0.0.1", "::1", "localhost") or ip_address.startswith("192.168."):
return Response(
{
"latitude": 40.7128,
"longitude": -74.006,
"city": "New York",
"region": "New York",
"country": "US",
"country_name": "United States",
"timezone": "America/New_York",
"detected": False,
"reason": "localhost_fallback",
}
)
# Use IP geolocation service (ipapi.co, ipinfo.io, etc.)
import httpx
try:
response = httpx.get(
f"https://ipapi.co/{ip_address}/json/",
timeout=5.0,
headers={"User-Agent": "ThrillWiki/1.0"},
)
if response.status_code == 200:
data = response.json()
return Response(
{
"latitude": data.get("latitude"),
"longitude": data.get("longitude"),
"city": data.get("city", ""),
"region": data.get("region", ""),
"country": data.get("country_code", ""),
"country_name": data.get("country_name", ""),
"timezone": data.get("timezone", ""),
"detected": True,
}
)
except httpx.HTTPError as e:
logger.warning(f"IP geolocation failed: {e}")
# Fallback response
return Response(
{
"latitude": None,
"longitude": None,
"city": "",
"region": "",
"country": "",
"country_name": "",
"timezone": "",
"detected": False,
"reason": "geolocation_failed",
}
)
except Exception as e:
capture_and_log(e, "Detect location from IP", source="api")
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
@extend_schema_view(
post=extend_schema(
summary="Enrich location with geocoding",
description="Enrich location data with reverse geocoding (coordinates to address).",
request={
"application/json": {
"type": "object",
"properties": {
"latitude": {"type": "number", "required": True},
"longitude": {"type": "number", "required": True},
},
}
},
responses={
200: {
"type": "object",
"properties": {
"formatted_address": {"type": "string"},
"street_address": {"type": "string"},
"city": {"type": "string"},
"state": {"type": "string"},
"postal_code": {"type": "string"},
"country": {"type": "string"},
},
}
},
tags=["Maps"],
),
)
class EnrichLocationView(APIView):
"""
POST /maps/enrich-location/
Enrich location with reverse geocoding (coordinates to address).
"""
permission_classes = [AllowAny]
def post(self, request):
try:
latitude = request.data.get("latitude")
longitude = request.data.get("longitude")
if latitude is None or longitude is None:
return Response(
{"detail": "latitude and longitude are required"},
status=status.HTTP_400_BAD_REQUEST,
)
try:
lat = float(latitude)
lng = float(longitude)
except (TypeError, ValueError):
return Response(
{"detail": "Invalid latitude or longitude"},
status=status.HTTP_400_BAD_REQUEST,
)
# Use reverse geocoding service
import httpx
try:
# Using Nominatim (OpenStreetMap) - free, no API key required
response = httpx.get(
"https://nominatim.openstreetmap.org/reverse",
params={
"lat": lat,
"lon": lng,
"format": "json",
"addressdetails": 1,
},
timeout=5.0,
headers={"User-Agent": "ThrillWiki/1.0"},
)
if response.status_code == 200:
data = response.json()
address = data.get("address", {})
return Response(
{
"formatted_address": data.get("display_name", ""),
"street_address": address.get("road", ""),
"house_number": address.get("house_number", ""),
"city": (
address.get("city")
or address.get("town")
or address.get("village")
or ""
),
"state": address.get("state", ""),
"postal_code": address.get("postcode", ""),
"country": address.get("country", ""),
"country_code": address.get("country_code", "").upper(),
"enriched": True,
}
)
except httpx.HTTPError as e:
logger.warning(f"Reverse geocoding failed: {e}")
# Fallback response
return Response(
{
"formatted_address": "",
"street_address": "",
"city": "",
"state": "",
"postal_code": "",
"country": "",
"country_code": "",
"enriched": False,
"reason": "geocoding_failed",
}
)
except Exception as e:
capture_and_log(e, "Enrich location", source="api")
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)

View File

@@ -106,8 +106,11 @@ urlpatterns = [
path("media/", include("apps.media.urls")),
path("blog/", include("apps.blog.urls")),
path("support/", include("apps.support.urls")),
path("notifications/", include("apps.notifications.urls")),
path("errors/", include("apps.core.urls.errors")),
path("images/", include("apps.api.v1.images.urls")),
# Admin dashboard API endpoints
path("admin/", include("apps.api.v1.admin.urls")),
# Cloudflare Images Toolkit API endpoints
path("cloudflare-images/", include("django_cloudflareimages_toolkit.urls")),
# Include router URLs (for rankings and any other router-registered endpoints)

View File

@@ -3,3 +3,22 @@ Core tasks package for ThrillWiki.
This package contains all Celery tasks for the core application.
"""
from apps.core.tasks.scheduled import (
cleanup_old_versions,
cleanup_orphaned_images,
data_retention_cleanup,
process_closing_entities,
process_expired_bans,
process_scheduled_deletions,
)
__all__ = [
"process_scheduled_deletions",
"process_closing_entities",
"process_expired_bans",
"cleanup_orphaned_images",
"cleanup_old_versions",
"data_retention_cleanup",
]

View File

@@ -0,0 +1,417 @@
"""
Scheduled Celery tasks for ThrillWiki.
These tasks are run on a schedule via Celery Beat for maintenance operations.
"""
import logging
from datetime import timedelta
from celery import shared_task
from django.contrib.auth import get_user_model
from django.db import transaction
from django.utils import timezone
from apps.core.utils import capture_and_log
logger = logging.getLogger(__name__)
User = get_user_model()
@shared_task(name="core.process_scheduled_deletions")
def process_scheduled_deletions() -> dict:
"""
Process scheduled account deletions.
Users who requested account deletion and whose grace period has expired
will have their accounts permanently deleted.
Returns:
dict: Summary with counts of processed, succeeded, and failed deletions
"""
from apps.accounts.models import AccountDeletionRequest
logger.info("Starting scheduled account deletions processing")
cutoff_time = timezone.now()
processed = 0
succeeded = 0
failed = 0
failures = []
try:
# Get deletion requests that are past their scheduled time
pending_deletions = AccountDeletionRequest.objects.filter(
status="pending",
scheduled_deletion_at__lte=cutoff_time,
).select_related("user")
for request in pending_deletions:
processed += 1
try:
with transaction.atomic():
user = request.user
username = user.username
# Mark request as processing
request.status = "processing"
request.save()
# Anonymize user data (keep submissions)
user.username = f"deleted_{user.id}"
user.email = f"deleted_{user.id}@deleted.thrillwiki.com"
user.first_name = ""
user.last_name = ""
user.is_active = False
user.save()
# Mark deletion as complete
request.status = "completed"
request.completed_at = timezone.now()
request.save()
succeeded += 1
logger.info(f"Successfully processed deletion for user {username}")
except Exception as e:
failed += 1
error_msg = f"User {request.user_id}: {str(e)}"
failures.append(error_msg)
capture_and_log(e, f"Process scheduled deletion for user {request.user_id}", source="task")
except Exception as e:
capture_and_log(e, "Process scheduled deletions", source="task")
result = {
"processed": processed,
"succeeded": succeeded,
"failed": failed,
"failures": failures[:10], # Limit failure list
"timestamp": timezone.now().isoformat(),
}
logger.info(
f"Completed scheduled deletions: {processed} processed, {succeeded} succeeded, {failed} failed"
)
return result
@shared_task(name="core.process_closing_entities")
def process_closing_entities() -> dict:
"""
Process parks and rides that have reached their closing date.
Entities in CLOSING status with a closing_date in the past will be
transitioned to their post_closing_status (typically CLOSED or SBNO).
Returns:
dict: Summary with counts
"""
from apps.parks.models import Park
from apps.rides.models import Ride
logger.info("Starting closing entities processing")
today = timezone.now().date()
results = {"parks": {"processed": 0, "succeeded": 0, "failed": 0}, "rides": {"processed": 0, "succeeded": 0, "failed": 0}}
# Get system user for automated transitions
try:
system_user = User.objects.get(username="system")
except User.DoesNotExist:
system_user = User.objects.filter(is_staff=True).first()
# Process parks
try:
closing_parks = Park.objects.filter(
status="CLOSING",
closing_date__lte=today,
)
for park in closing_parks:
results["parks"]["processed"] += 1
try:
with transaction.atomic():
# Transition to closed status
park.status = getattr(park, "post_closing_status", "CLOSED") or "CLOSED"
park.save(update_fields=["status", "updated_at"])
results["parks"]["succeeded"] += 1
logger.info(f"Transitioned park {park.name} to {park.status}")
except Exception as e:
results["parks"]["failed"] += 1
capture_and_log(e, f"Process closing park {park.id}", source="task")
except Exception as e:
capture_and_log(e, "Process closing parks", source="task")
# Process rides (already handled by rides.check_overdue_closings, but included for completeness)
try:
closing_rides = Ride.objects.filter(
status="CLOSING",
closing_date__lte=today,
)
for ride in closing_rides:
results["rides"]["processed"] += 1
try:
with transaction.atomic():
if hasattr(ride, "apply_post_closing_status") and system_user:
ride.apply_post_closing_status(user=system_user)
else:
ride.status = getattr(ride, "post_closing_status", "CLOSED") or "CLOSED"
ride.save(update_fields=["status", "updated_at"])
results["rides"]["succeeded"] += 1
logger.info(f"Transitioned ride {ride.name} to {ride.status}")
except Exception as e:
results["rides"]["failed"] += 1
capture_and_log(e, f"Process closing ride {ride.id}", source="task")
except Exception as e:
capture_and_log(e, "Process closing rides", source="task")
logger.info(f"Completed closing entities: Parks {results['parks']}, Rides {results['rides']}")
return results
@shared_task(name="core.process_expired_bans")
def process_expired_bans() -> dict:
"""
Process expired user bans.
Users with temporary bans that have expired will have their ban lifted.
Returns:
dict: Summary with counts
"""
from apps.accounts.models import UserBan
logger.info("Starting expired bans processing")
now = timezone.now()
processed = 0
succeeded = 0
failed = 0
try:
expired_bans = UserBan.objects.filter(
is_active=True,
expires_at__isnull=False,
expires_at__lte=now,
).select_related("user")
for ban in expired_bans:
processed += 1
try:
with transaction.atomic():
ban.is_active = False
ban.save(update_fields=["is_active", "updated_at"])
# Reactivate user if this was their only active ban
active_bans = UserBan.objects.filter(user=ban.user, is_active=True).count()
if active_bans == 0 and not ban.user.is_active:
ban.user.is_active = True
ban.user.save(update_fields=["is_active"])
succeeded += 1
logger.info(f"Lifted expired ban for user {ban.user.username}")
except Exception as e:
failed += 1
capture_and_log(e, f"Process expired ban {ban.id}", source="task")
except Exception as e:
capture_and_log(e, "Process expired bans", source="task")
# Model may not exist yet
if "UserBan" in str(e):
logger.info("UserBan model not found, skipping expired bans processing")
return {"skipped": True, "reason": "UserBan model not found"}
result = {
"processed": processed,
"succeeded": succeeded,
"failed": failed,
"timestamp": timezone.now().isoformat(),
}
logger.info(f"Completed expired bans: {processed} processed, {succeeded} succeeded, {failed} failed")
return result
@shared_task(name="core.cleanup_orphaned_images")
def cleanup_orphaned_images() -> dict:
"""
Clean up orphaned images.
Images that are not associated with any entity and are older than the
retention period will be deleted.
Returns:
dict: Summary with counts
"""
logger.info("Starting orphaned images cleanup")
# This is a placeholder - actual implementation depends on image storage strategy
# For Cloudflare Images, we would need to:
# 1. Query all images from Cloudflare
# 2. Compare against images referenced in the database
# 3. Delete orphaned images
result = {
"processed": 0,
"deleted": 0,
"skipped": 0,
"timestamp": timezone.now().isoformat(),
"note": "Placeholder implementation - configure based on image storage",
}
logger.info("Completed orphaned images cleanup")
return result
@shared_task(name="core.cleanup_old_versions")
def cleanup_old_versions() -> dict:
"""
Clean up old entity versions from pghistory.
Keeps the most recent N versions and deletes older ones to manage
database size.
Returns:
dict: Summary with counts
"""
logger.info("Starting old versions cleanup")
# Configuration
MAX_VERSIONS_PER_ENTITY = 50
MIN_AGE_DAYS = 90 # Only delete versions older than this
deleted_count = 0
cutoff_date = timezone.now() - timedelta(days=MIN_AGE_DAYS)
try:
# pghistory stores events in pgh_* tables
# We need to identify which models have history tracking
from django.db import connection
with connection.cursor() as cursor:
# Get list of pghistory event tables
cursor.execute(
"""
SELECT table_name
FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name LIKE 'pgh_%event'
"""
)
event_tables = [row[0] for row in cursor.fetchall()]
for table_name in event_tables:
try:
# Delete old versions beyond the retention limit
# This is a simplified approach - a more sophisticated one
# would keep the most recent N per entity
cursor.execute(
f"""
DELETE FROM {table_name}
WHERE pgh_created_at < %s
AND pgh_id NOT IN (
SELECT pgh_id FROM (
SELECT pgh_id,
ROW_NUMBER() OVER (PARTITION BY pgh_obj_id ORDER BY pgh_created_at DESC) as rn
FROM {table_name}
) ranked
WHERE rn <= %s
)
""",
[cutoff_date, MAX_VERSIONS_PER_ENTITY],
)
deleted_in_table = cursor.rowcount
deleted_count += deleted_in_table
if deleted_in_table > 0:
logger.info(f"Deleted {deleted_in_table} old versions from {table_name}")
except Exception as e:
logger.warning(f"Error cleaning up {table_name}: {e}")
except Exception as e:
capture_and_log(e, "Cleanup old versions", source="task")
result = {
"deleted": deleted_count,
"cutoff_date": cutoff_date.isoformat(),
"max_versions_per_entity": MAX_VERSIONS_PER_ENTITY,
"timestamp": timezone.now().isoformat(),
}
logger.info(f"Completed old versions cleanup: {deleted_count} versions deleted")
return result
@shared_task(name="core.data_retention_cleanup")
def data_retention_cleanup() -> dict:
"""
Clean up data per retention policy (GDPR compliance).
Handles:
- Session cleanup
- Expired token cleanup
- Old audit log cleanup
- Temporary data cleanup
Returns:
dict: Summary with counts
"""
logger.info("Starting data retention cleanup")
results = {
"sessions": 0,
"tokens": 0,
"audit_logs": 0,
"temp_data": 0,
}
try:
from django.contrib.sessions.models import Session
# Clean up expired sessions
expired_sessions = Session.objects.filter(expire_date__lt=timezone.now())
results["sessions"] = expired_sessions.count()
expired_sessions.delete()
logger.info(f"Deleted {results['sessions']} expired sessions")
except Exception as e:
logger.warning(f"Session cleanup error: {e}")
try:
from rest_framework_simplejwt.token_blacklist.models import OutstandingToken
# Clean up expired tokens (older than 30 days)
cutoff = timezone.now() - timedelta(days=30)
expired_tokens = OutstandingToken.objects.filter(expires_at__lt=cutoff)
results["tokens"] = expired_tokens.count()
expired_tokens.delete()
logger.info(f"Deleted {results['tokens']} expired tokens")
except Exception as e:
logger.warning(f"Token cleanup error: {e}")
try:
from apps.accounts.models import ProfileAuditLog
# Clean up old audit logs (older than 1 year)
cutoff = timezone.now() - timedelta(days=365)
old_logs = ProfileAuditLog.objects.filter(created_at__lt=cutoff)
results["audit_logs"] = old_logs.count()
old_logs.delete()
logger.info(f"Deleted {results['audit_logs']} old audit logs")
except Exception as e:
logger.warning(f"Audit log cleanup error: {e}")
result = {
**results,
"timestamp": timezone.now().isoformat(),
}
logger.info(f"Completed data retention cleanup: {result}")
return result

View File

@@ -0,0 +1,10 @@
"""
Notifications app for ThrillWiki.
Provides notification management including:
- Subscriber management (Novu integration)
- Notification preferences
- Notification triggering and logging
"""
default_app_config = "apps.notifications.apps.NotificationsConfig"

View File

@@ -0,0 +1,38 @@
"""
Notifications admin configuration.
"""
from django.contrib import admin
from .models import NotificationLog, NotificationPreference, Subscriber, SystemAnnouncement
@admin.register(Subscriber)
class SubscriberAdmin(admin.ModelAdmin):
list_display = ["user", "novu_subscriber_id", "email", "created_at"]
search_fields = ["user__username", "novu_subscriber_id", "email"]
readonly_fields = ["created_at", "updated_at"]
@admin.register(NotificationPreference)
class NotificationPreferenceAdmin(admin.ModelAdmin):
list_display = ["user", "is_opted_out", "updated_at"]
list_filter = ["is_opted_out"]
search_fields = ["user__username"]
readonly_fields = ["created_at", "updated_at"]
@admin.register(NotificationLog)
class NotificationLogAdmin(admin.ModelAdmin):
list_display = ["workflow_id", "user", "channel", "status", "created_at"]
list_filter = ["status", "channel", "workflow_id"]
search_fields = ["user__username", "workflow_id", "novu_transaction_id"]
readonly_fields = ["created_at", "updated_at"]
@admin.register(SystemAnnouncement)
class SystemAnnouncementAdmin(admin.ModelAdmin):
list_display = ["title", "severity", "is_active", "created_by", "created_at"]
list_filter = ["severity", "is_active"]
search_fields = ["title", "message"]
readonly_fields = ["created_at"]

View File

@@ -0,0 +1,18 @@
"""
Notifications app configuration.
This app provides Django-native notification functionality for ThrillWiki,
including in-app notifications, email notifications, and user preferences.
"""
from django.apps import AppConfig
class NotificationsConfig(AppConfig):
"""Configuration for the ThrillWiki notifications app."""
default_auto_field = "django.db.models.BigAutoField"
name = "apps.notifications"
verbose_name = "Notifications"

View File

@@ -0,0 +1,159 @@
# Generated by Django 5.2.9 on 2026-01-05 13:50
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name="NotificationPreference",
fields=[
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
(
"channel_preferences",
models.JSONField(
blank=True, default=dict, help_text="Preferences per channel (email, push, in_app, sms)"
),
),
(
"workflow_preferences",
models.JSONField(blank=True, default=dict, help_text="Preferences per notification workflow"),
),
(
"frequency_settings",
models.JSONField(blank=True, default=dict, help_text="Digest and frequency settings"),
),
("is_opted_out", models.BooleanField(default=False)),
("created_at", models.DateTimeField(auto_now_add=True)),
("updated_at", models.DateTimeField(auto_now=True)),
(
"user",
models.OneToOneField(
on_delete=django.db.models.deletion.CASCADE,
related_name="novu_notification_prefs",
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"verbose_name": "Notification Preference",
"verbose_name_plural": "Notification Preferences",
},
),
migrations.CreateModel(
name="Subscriber",
fields=[
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
("novu_subscriber_id", models.CharField(db_index=True, max_length=255, unique=True)),
("first_name", models.CharField(blank=True, max_length=100)),
("last_name", models.CharField(blank=True, max_length=100)),
("email", models.EmailField(blank=True, max_length=254)),
("phone", models.CharField(blank=True, max_length=20)),
("avatar", models.URLField(blank=True)),
("locale", models.CharField(default="en", max_length=10)),
("data", models.JSONField(blank=True, default=dict, help_text="Custom subscriber data")),
("created_at", models.DateTimeField(auto_now_add=True)),
("updated_at", models.DateTimeField(auto_now=True)),
(
"user",
models.OneToOneField(
on_delete=django.db.models.deletion.CASCADE,
related_name="notification_subscriber",
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"verbose_name": "Notification Subscriber",
"verbose_name_plural": "Notification Subscribers",
},
),
migrations.CreateModel(
name="SystemAnnouncement",
fields=[
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
("title", models.CharField(max_length=255)),
("message", models.TextField()),
(
"severity",
models.CharField(
choices=[("info", "Information"), ("warning", "Warning"), ("critical", "Critical")],
default="info",
max_length=20,
),
),
("action_url", models.URLField(blank=True)),
("is_active", models.BooleanField(default=True)),
("created_at", models.DateTimeField(auto_now_add=True)),
("expires_at", models.DateTimeField(blank=True, null=True)),
(
"created_by",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="announcements_created",
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"verbose_name": "System Announcement",
"verbose_name_plural": "System Announcements",
"ordering": ["-created_at"],
},
),
migrations.CreateModel(
name="NotificationLog",
fields=[
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
("workflow_id", models.CharField(db_index=True, max_length=100)),
("notification_type", models.CharField(max_length=50)),
("channel", models.CharField(max_length=20)),
(
"status",
models.CharField(
choices=[
("pending", "Pending"),
("sent", "Sent"),
("delivered", "Delivered"),
("failed", "Failed"),
],
default="pending",
max_length=20,
),
),
("payload", models.JSONField(blank=True, default=dict)),
("error_message", models.TextField(blank=True)),
("novu_transaction_id", models.CharField(blank=True, db_index=True, max_length=255)),
("created_at", models.DateTimeField(auto_now_add=True)),
("updated_at", models.DateTimeField(auto_now=True)),
(
"user",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="notification_logs",
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"verbose_name": "Notification Log",
"verbose_name_plural": "Notification Logs",
"ordering": ["-created_at"],
"indexes": [
models.Index(fields=["user", "-created_at"], name="notificatio_user_id_57d53d_idx"),
models.Index(fields=["workflow_id", "-created_at"], name="notificatio_workflo_e1a025_idx"),
],
},
),
]

View File

@@ -0,0 +1,93 @@
# Generated by Django 5.2.9 on 2026-01-05 14:36
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("contenttypes", "0002_remove_content_type_name"),
("notifications", "0001_initial"),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.AlterField(
model_name="subscriber",
name="novu_subscriber_id",
field=models.CharField(
db_index=True, help_text="Legacy Novu subscriber ID (deprecated)", max_length=255, unique=True
),
),
migrations.CreateModel(
name="Notification",
fields=[
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
("verb", models.CharField(max_length=255)),
("description", models.TextField(blank=True)),
(
"level",
models.CharField(
choices=[("info", "Info"), ("success", "Success"), ("warning", "Warning"), ("error", "Error")],
default="info",
max_length=20,
),
),
("action_object_id", models.PositiveIntegerField(blank=True, null=True)),
("target_id", models.PositiveIntegerField(blank=True, null=True)),
("data", models.JSONField(blank=True, default=dict)),
("unread", models.BooleanField(db_index=True, default=True)),
("timestamp", models.DateTimeField(auto_now_add=True)),
("read_at", models.DateTimeField(blank=True, null=True)),
(
"action_object_content_type",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="notification_action_objects",
to="contenttypes.contenttype",
),
),
(
"actor",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="notifications_sent",
to=settings.AUTH_USER_MODEL,
),
),
(
"recipient",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="in_app_notifications",
to=settings.AUTH_USER_MODEL,
),
),
(
"target_content_type",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="notification_targets",
to="contenttypes.contenttype",
),
),
],
options={
"verbose_name": "Notification",
"verbose_name_plural": "Notifications",
"ordering": ["-timestamp"],
"indexes": [
models.Index(fields=["recipient", "-timestamp"], name="notificatio_recipie_b8fa2a_idx"),
models.Index(fields=["recipient", "unread"], name="notificatio_recipie_8bedf2_idx"),
],
},
),
]

View File

@@ -0,0 +1,298 @@
"""
Notifications models.
Provides models for:
- Subscriber: User notification profile (legacy, kept for compatibility)
- NotificationPreference: User notification preferences
- NotificationLog: Audit trail of sent notifications
- SystemAnnouncement: System-wide announcements
Note: Now using django-notifications-hq for the core notification system.
Subscriber model is kept for backward compatibility but is optional.
"""
from django.conf import settings
from django.db import models
class Subscriber(models.Model):
"""
User notification profile.
Note: This model is kept for backward compatibility. The new
django-notifications-hq system uses User directly for notifications.
This can be used for storing additional notification-related user data.
"""
user = models.OneToOneField(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
related_name="notification_subscriber",
)
# Legacy field - kept for migration compatibility
novu_subscriber_id = models.CharField(
max_length=255,
unique=True,
db_index=True,
help_text="Legacy Novu subscriber ID (deprecated)"
)
first_name = models.CharField(max_length=100, blank=True)
last_name = models.CharField(max_length=100, blank=True)
email = models.EmailField(blank=True)
phone = models.CharField(max_length=20, blank=True)
avatar = models.URLField(blank=True)
locale = models.CharField(max_length=10, default="en")
data = models.JSONField(default=dict, blank=True, help_text="Custom subscriber data")
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
verbose_name = "Notification Subscriber"
verbose_name_plural = "Notification Subscribers"
def __str__(self):
return f"Subscriber({self.user.username})"
class NotificationPreference(models.Model):
"""
User notification preferences across channels and workflows.
"""
user = models.OneToOneField(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
related_name="novu_notification_prefs", # Renamed to avoid conflict with User.notification_preferences JSONField
)
# Channel preferences
channel_preferences = models.JSONField(
default=dict,
blank=True,
help_text="Preferences per channel (email, push, in_app, sms)",
)
# Workflow-specific preferences
workflow_preferences = models.JSONField(
default=dict,
blank=True,
help_text="Preferences per notification workflow",
)
# Frequency settings
frequency_settings = models.JSONField(
default=dict,
blank=True,
help_text="Digest and frequency settings",
)
# Global opt-out
is_opted_out = models.BooleanField(default=False)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
verbose_name = "Notification Preference"
verbose_name_plural = "Notification Preferences"
def __str__(self):
return f"Preferences({self.user.username})"
class NotificationLog(models.Model):
"""
Audit log of sent notifications.
"""
class Status(models.TextChoices):
PENDING = "pending", "Pending"
SENT = "sent", "Sent"
DELIVERED = "delivered", "Delivered"
FAILED = "failed", "Failed"
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.SET_NULL,
null=True,
related_name="notification_logs",
)
workflow_id = models.CharField(max_length=100, db_index=True)
notification_type = models.CharField(max_length=50)
channel = models.CharField(max_length=20) # email, push, in_app, sms
status = models.CharField(
max_length=20,
choices=Status.choices,
default=Status.PENDING,
)
payload = models.JSONField(default=dict, blank=True)
error_message = models.TextField(blank=True)
novu_transaction_id = models.CharField(max_length=255, blank=True, db_index=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
verbose_name = "Notification Log"
verbose_name_plural = "Notification Logs"
ordering = ["-created_at"]
indexes = [
models.Index(fields=["user", "-created_at"]),
models.Index(fields=["workflow_id", "-created_at"]),
]
def __str__(self):
return f"Log({self.workflow_id}, {self.status})"
class SystemAnnouncement(models.Model):
"""
System-wide announcements.
"""
class Severity(models.TextChoices):
INFO = "info", "Information"
WARNING = "warning", "Warning"
CRITICAL = "critical", "Critical"
title = models.CharField(max_length=255)
message = models.TextField()
severity = models.CharField(
max_length=20,
choices=Severity.choices,
default=Severity.INFO,
)
action_url = models.URLField(blank=True)
is_active = models.BooleanField(default=True)
created_by = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.SET_NULL,
null=True,
related_name="announcements_created",
)
created_at = models.DateTimeField(auto_now_add=True)
expires_at = models.DateTimeField(null=True, blank=True)
class Meta:
verbose_name = "System Announcement"
verbose_name_plural = "System Announcements"
ordering = ["-created_at"]
def __str__(self):
return f"{self.title} ({self.severity})"
class Notification(models.Model):
"""
In-app notification model.
This is a Django-native implementation for storing user notifications,
supporting both in-app and email notification channels.
"""
class Level(models.TextChoices):
INFO = "info", "Info"
SUCCESS = "success", "Success"
WARNING = "warning", "Warning"
ERROR = "error", "Error"
# Who receives the notification
recipient = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
related_name="in_app_notifications", # Renamed to avoid clash with accounts.UserNotification
)
# Who triggered the notification (can be null for system notifications)
actor = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name="notifications_sent",
)
# What happened
verb = models.CharField(max_length=255)
description = models.TextField(blank=True)
level = models.CharField(
max_length=20,
choices=Level.choices,
default=Level.INFO,
)
# The object that was acted upon (generic foreign key)
action_object_content_type = models.ForeignKey(
"contenttypes.ContentType",
on_delete=models.CASCADE,
blank=True,
null=True,
related_name="notification_action_objects",
)
action_object_id = models.PositiveIntegerField(blank=True, null=True)
# The target of the action (generic foreign key)
target_content_type = models.ForeignKey(
"contenttypes.ContentType",
on_delete=models.CASCADE,
blank=True,
null=True,
related_name="notification_targets",
)
target_id = models.PositiveIntegerField(blank=True, null=True)
# Additional data
data = models.JSONField(default=dict, blank=True)
# Status
unread = models.BooleanField(default=True, db_index=True)
# Timestamps
timestamp = models.DateTimeField(auto_now_add=True)
read_at = models.DateTimeField(null=True, blank=True)
class Meta:
verbose_name = "Notification"
verbose_name_plural = "Notifications"
ordering = ["-timestamp"]
indexes = [
models.Index(fields=["recipient", "-timestamp"]),
models.Index(fields=["recipient", "unread"]),
]
def __str__(self):
return f"{self.verb} -> {self.recipient}"
def mark_as_read(self):
"""Mark this notification as read."""
if self.unread:
from django.utils import timezone
self.unread = False
self.read_at = timezone.now()
self.save(update_fields=["unread", "read_at"])
@property
def action_object(self):
"""Get the action object instance."""
if self.action_object_content_type and self.action_object_id:
return self.action_object_content_type.get_object_for_this_type(
pk=self.action_object_id
)
return None
@property
def target(self):
"""Get the target instance."""
if self.target_content_type and self.target_id:
return self.target_content_type.get_object_for_this_type(pk=self.target_id)
return None
class NotificationManager(models.Manager):
"""Custom manager for Notification model."""
def unread(self):
"""Return only unread notifications."""
return self.filter(unread=True)
def read(self):
"""Return only read notifications."""
return self.filter(unread=False)
def mark_all_as_read(self):
"""Mark all notifications as read."""
from django.utils import timezone
return self.filter(unread=True).update(unread=False, read_at=timezone.now())
# Add custom manager to Notification model
Notification.objects = NotificationManager()
Notification.objects.model = Notification

View File

@@ -0,0 +1,156 @@
"""
Notification serializers.
"""
from rest_framework import serializers
from .models import NotificationLog, NotificationPreference, Subscriber, SystemAnnouncement
class SubscriberSerializer(serializers.ModelSerializer):
"""Serializer for Subscriber model."""
subscriber_id = serializers.CharField(source="novu_subscriber_id", read_only=True)
class Meta:
model = Subscriber
fields = [
"subscriber_id",
"first_name",
"last_name",
"email",
"phone",
"avatar",
"locale",
"data",
"created_at",
"updated_at",
]
read_only_fields = ["subscriber_id", "created_at", "updated_at"]
class CreateSubscriberSerializer(serializers.Serializer):
"""Serializer for creating a new subscriber."""
subscriber_id = serializers.CharField(required=True)
first_name = serializers.CharField(required=False, allow_blank=True, default="")
last_name = serializers.CharField(required=False, allow_blank=True, default="")
email = serializers.EmailField(required=False, allow_blank=True)
phone = serializers.CharField(required=False, allow_blank=True, default="")
avatar = serializers.URLField(required=False, allow_blank=True)
locale = serializers.CharField(required=False, default="en")
data = serializers.JSONField(required=False, default=dict)
class UpdateSubscriberSerializer(serializers.Serializer):
"""Serializer for updating a subscriber."""
subscriber_id = serializers.CharField(required=True)
first_name = serializers.CharField(required=False, allow_blank=True)
last_name = serializers.CharField(required=False, allow_blank=True)
email = serializers.EmailField(required=False, allow_blank=True)
phone = serializers.CharField(required=False, allow_blank=True)
avatar = serializers.URLField(required=False, allow_blank=True)
locale = serializers.CharField(required=False)
data = serializers.JSONField(required=False)
class NotificationPreferenceSerializer(serializers.ModelSerializer):
"""Serializer for NotificationPreference model."""
class Meta:
model = NotificationPreference
fields = [
"channel_preferences",
"workflow_preferences",
"frequency_settings",
"is_opted_out",
"updated_at",
]
read_only_fields = ["updated_at"]
class UpdatePreferencesSerializer(serializers.Serializer):
"""Serializer for updating notification preferences."""
user_id = serializers.CharField(required=True)
preferences = serializers.JSONField(required=True)
class TriggerNotificationSerializer(serializers.Serializer):
"""Serializer for triggering a notification."""
workflow_id = serializers.CharField(required=True)
subscriber_id = serializers.CharField(required=True)
payload = serializers.JSONField(required=False, default=dict)
overrides = serializers.JSONField(required=False, default=dict)
class ModeratorSubmissionNotificationSerializer(serializers.Serializer):
"""Serializer for moderator submission notifications."""
submission_id = serializers.CharField(required=True)
submission_type = serializers.CharField(required=True)
submitter_name = serializers.CharField(required=True)
action = serializers.CharField(required=True)
class ModeratorReportNotificationSerializer(serializers.Serializer):
"""Serializer for moderator report notifications."""
report_id = serializers.CharField(required=True)
report_type = serializers.CharField(required=True)
reported_entity_type = serializers.CharField(required=True)
reported_entity_id = serializers.CharField(required=True)
reporter_name = serializers.CharField(required=True)
reason = serializers.CharField(required=True)
entity_preview = serializers.CharField(required=False, allow_blank=True)
reported_at = serializers.DateTimeField(required=False)
class SystemAnnouncementSerializer(serializers.ModelSerializer):
"""Serializer for system announcements."""
class Meta:
model = SystemAnnouncement
fields = [
"id",
"title",
"message",
"severity",
"action_url",
"is_active",
"created_at",
"expires_at",
]
read_only_fields = ["id", "created_at"]
class CreateAnnouncementSerializer(serializers.Serializer):
"""Serializer for creating system announcements."""
title = serializers.CharField(required=True, max_length=255)
message = serializers.CharField(required=True)
severity = serializers.ChoiceField(
choices=["info", "warning", "critical"],
default="info",
)
action_url = serializers.URLField(required=False, allow_blank=True)
class NotificationLogSerializer(serializers.ModelSerializer):
"""Serializer for notification logs."""
class Meta:
model = NotificationLog
fields = [
"id",
"workflow_id",
"notification_type",
"channel",
"status",
"payload",
"error_message",
"created_at",
]
read_only_fields = ["id", "created_at"]

View File

@@ -0,0 +1,571 @@
"""
Django-native notification service.
This service provides a fully Django-native notification system. Supports:
- In-app notifications
- Email notifications (via Django email backend)
- Real-time notifications (ready for Django Channels integration)
"""
import logging
from typing import Any
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.contenttypes.models import ContentType
from django.core.mail import send_mail
from django.db.models import QuerySet
from django.template.loader import render_to_string
from django.utils import timezone
from django.utils.html import strip_tags
from .models import Notification, NotificationLog, NotificationPreference, SystemAnnouncement
logger = logging.getLogger(__name__)
User = get_user_model()
class NotificationService:
"""
Django-native notification service using django-notifications-hq.
This replaces the Novu-based service with a fully Django-native approach.
"""
# Notification workflow types
WORKFLOW_SUBMISSION_STATUS = "submission_status"
WORKFLOW_MODERATION_ALERT = "moderation_alert"
WORKFLOW_SYSTEM_ANNOUNCEMENT = "system_announcement"
WORKFLOW_ADMIN_ALERT = "admin_alert"
WORKFLOW_WELCOME = "welcome"
WORKFLOW_COMMENT_REPLY = "comment_reply"
WORKFLOW_MENTION = "mention"
WORKFLOW_FOLLOW = "follow"
def __init__(self):
self.from_email = getattr(
settings, "DEFAULT_FROM_EMAIL", "noreply@thrillwiki.com"
)
self.site_name = getattr(settings, "SITE_NAME", "ThrillWiki")
self.site_url = getattr(settings, "SITE_URL", "https://thrillwiki.com")
def send_notification(
self,
recipient: User,
actor: User | None,
verb: str,
action_object: Any = None,
target: Any = None,
description: str = "",
level: str = "info",
data: dict | None = None,
send_email: bool = True,
email_template: str | None = None,
) -> bool:
"""
Send a notification to a user.
Args:
recipient: The user to notify
actor: The user who performed the action (can be None for system notifications)
verb: Description of the action (e.g., "approved your submission")
action_object: The object that was acted upon
target: The target of the action
description: Additional description text
level: Notification level (info, success, warning, error)
data: Additional data to store with the notification
send_email: Whether to also send an email notification
email_template: Template path for email (optional)
Returns:
True if notification was sent successfully
"""
try:
# Check user preferences
if self._is_user_opted_out(recipient):
logger.debug(f"User {recipient.id} opted out of notifications")
return False
# Create in-app notification using our native model
notification_data = {
"recipient": recipient,
"actor": actor,
"verb": verb,
"description": description,
"level": level,
"data": data or {},
}
# Add generic foreign key for action_object if provided
if action_object:
notification_data["action_object_content_type"] = ContentType.objects.get_for_model(action_object)
notification_data["action_object_id"] = action_object.pk
# Add generic foreign key for target if provided
if target:
notification_data["target_content_type"] = ContentType.objects.get_for_model(target)
notification_data["target_id"] = target.pk
Notification.objects.create(**notification_data)
# Log the notification
self._log_notification(
user=recipient,
workflow_id=data.get("workflow_id", "general") if data else "general",
notification_type=level,
channel="in_app",
status=NotificationLog.Status.SENT,
payload=data or {},
)
# Optionally send email
if send_email and self._should_send_email(recipient, data):
self._send_email_notification(
recipient=recipient,
verb=verb,
actor=actor,
action_object=action_object,
target=target,
description=description,
template=email_template,
data=data,
)
return True
except Exception as e:
logger.exception(f"Failed to send notification to {recipient.id}: {e}")
self._log_notification(
user=recipient,
workflow_id=data.get("workflow_id", "general") if data else "general",
notification_type=level,
channel="in_app",
status=NotificationLog.Status.FAILED,
payload=data or {},
error_message=str(e),
)
return False
def send_to_group(
self,
recipients: QuerySet | list,
actor: User | None,
verb: str,
action_object: Any = None,
target: Any = None,
description: str = "",
level: str = "info",
data: dict | None = None,
send_email: bool = False,
) -> dict:
"""
Send a notification to multiple users.
Returns:
Dict with success/failure counts
"""
results = {"success": 0, "failed": 0, "skipped": 0}
for recipient in recipients:
if self._is_user_opted_out(recipient):
results["skipped"] += 1
continue
success = self.send_notification(
recipient=recipient,
actor=actor,
verb=verb,
action_object=action_object,
target=target,
description=description,
level=level,
data=data,
send_email=send_email,
)
if success:
results["success"] += 1
else:
results["failed"] += 1
return results
def notify_moderators(
self,
verb: str,
action_object: Any = None,
description: str = "",
data: dict | None = None,
) -> dict:
"""
Send a notification to all moderators.
"""
from django.contrib.auth import get_user_model
User = get_user_model()
# Get users with moderator permissions
moderators = User.objects.filter(
is_active=True,
is_staff=True, # Or use a specific permission check
).exclude(
novu_notification_prefs__is_opted_out=True
)
return self.send_to_group(
recipients=moderators,
actor=None,
verb=verb,
action_object=action_object,
description=description,
level="info",
data={**(data or {}), "workflow_id": self.WORKFLOW_MODERATION_ALERT},
send_email=True,
)
def notify_admins(
self,
verb: str,
description: str = "",
level: str = "warning",
data: dict | None = None,
) -> dict:
"""
Send a notification to all admins.
"""
admins = User.objects.filter(is_superuser=True, is_active=True)
return self.send_to_group(
recipients=admins,
actor=None,
verb=verb,
description=description,
level=level,
data={**(data or {}), "workflow_id": self.WORKFLOW_ADMIN_ALERT},
send_email=True,
)
def send_system_announcement(
self,
title: str,
message: str,
severity: str = "info",
action_url: str = "",
target_users: QuerySet | None = None,
created_by: User | None = None,
) -> SystemAnnouncement:
"""
Create and broadcast a system announcement.
"""
# Create the announcement
announcement = SystemAnnouncement.objects.create(
title=title,
message=message,
severity=severity,
action_url=action_url,
created_by=created_by,
is_active=True,
)
# Notify users
recipients = target_users or User.objects.filter(is_active=True)
self.send_to_group(
recipients=recipients,
actor=created_by,
verb=f"System announcement: {title}",
action_object=announcement,
description=message,
level=severity,
data={
"workflow_id": self.WORKFLOW_SYSTEM_ANNOUNCEMENT,
"announcement_id": str(announcement.id),
"action_url": action_url,
},
send_email=severity in ["warning", "critical"],
)
return announcement
def get_user_notifications(
self,
user: User,
unread_only: bool = False,
limit: int = 50,
):
"""
Get notifications for a user.
"""
qs = Notification.objects.filter(recipient=user)
if unread_only:
qs = qs.unread()
return qs[:limit]
def mark_as_read(self, user: User, notification_id: int | None = None):
"""
Mark notification(s) as read.
"""
if notification_id:
try:
notification = Notification.objects.get(recipient=user, id=notification_id)
notification.mark_as_read()
except Notification.DoesNotExist:
pass
else:
# Mark all as read
Notification.objects.filter(recipient=user).mark_all_as_read()
def get_unread_count(self, user: User) -> int:
"""
Get count of unread notifications.
"""
return Notification.objects.filter(recipient=user, unread=True).count()
def _is_user_opted_out(self, user: User) -> bool:
"""Check if user has opted out of notifications."""
try:
prefs = NotificationPreference.objects.get(user=user)
return prefs.is_opted_out
except NotificationPreference.DoesNotExist:
return False
def _should_send_email(self, user: User, data: dict | None) -> bool:
"""Check if email should be sent based on user preferences."""
try:
prefs = NotificationPreference.objects.get(user=user)
# Check channel preferences
channel_prefs = prefs.channel_preferences or {}
email_enabled = channel_prefs.get("email", True)
if not email_enabled:
return False
# Check workflow-specific preferences
if data and "workflow_id" in data:
workflow_prefs = prefs.workflow_preferences or {}
workflow_email = workflow_prefs.get(data["workflow_id"], {}).get("email", True)
return workflow_email
return True
except NotificationPreference.DoesNotExist:
# Default to sending email if no preferences set
return True
def _send_email_notification(
self,
recipient: User,
verb: str,
actor: User | None,
action_object: Any,
target: Any,
description: str,
template: str | None,
data: dict | None,
):
"""Send an email notification."""
try:
# Build context
context = {
"recipient": recipient,
"actor": actor,
"verb": verb,
"action_object": action_object,
"target": target,
"description": description,
"site_name": self.site_name,
"site_url": self.site_url,
"data": data or {},
}
# Render email
if template:
html_content = render_to_string(template, context)
text_content = strip_tags(html_content)
else:
# Default simple email
actor_name = actor.username if actor else self.site_name
subject = f"{actor_name} {verb}"
text_content = description or f"{actor_name} {verb}"
html_content = f"<p>{text_content}</p>"
if data and data.get("action_url"):
html_content += f'<p><a href="{data["action_url"]}">View details</a></p>'
subject = f"[{self.site_name}] {verb[:50]}"
send_mail(
subject=subject,
message=text_content,
from_email=self.from_email,
recipient_list=[recipient.email],
html_message=html_content,
fail_silently=True,
)
# Log email notification
self._log_notification(
user=recipient,
workflow_id=data.get("workflow_id", "general") if data else "general",
notification_type="email",
channel="email",
status=NotificationLog.Status.SENT,
payload=data or {},
)
except Exception as e:
logger.exception(f"Failed to send email to {recipient.email}: {e}")
self._log_notification(
user=recipient,
workflow_id=data.get("workflow_id", "general") if data else "general",
notification_type="email",
channel="email",
status=NotificationLog.Status.FAILED,
payload=data or {},
error_message=str(e),
)
def _log_notification(
self,
user: User,
workflow_id: str,
notification_type: str,
channel: str,
status: str,
payload: dict,
error_message: str = "",
):
"""Log a notification to the audit trail."""
NotificationLog.objects.create(
user=user,
workflow_id=workflow_id,
notification_type=notification_type,
channel=channel,
status=status,
payload=payload,
error_message=error_message,
)
# Singleton instance
notification_service = NotificationService()
# ============================================================================
# Backward compatibility - keep old NovuService interface but delegate to native
# ============================================================================
class NovuServiceSync:
"""
Backward-compatible wrapper that delegates to the new notification service.
This maintains the old API signature for existing code while using
the new Django-native implementation.
"""
def __init__(self):
self._service = notification_service
@property
def is_configured(self) -> bool:
"""Always configured since we're using Django-native system."""
return True
def create_subscriber(self, subscriber_id: str, **kwargs) -> dict[str, Any]:
"""Create subscriber - now a no-op as django-notifications-hq uses User directly."""
logger.info(f"Subscriber creation not needed for django-notifications-hq: {subscriber_id}")
return {"subscriberId": subscriber_id, "status": "native"}
def update_subscriber(self, subscriber_id: str, **kwargs) -> dict[str, Any]:
"""Update subscriber - now a no-op."""
logger.info(f"Subscriber update not needed for django-notifications-hq: {subscriber_id}")
return {"subscriberId": subscriber_id, "status": "native"}
def trigger_notification(
self,
workflow_id: str,
subscriber_id: str,
payload: dict | None = None,
overrides: dict | None = None,
) -> dict[str, Any]:
"""Trigger a notification using the new native service."""
try:
user = User.objects.get(pk=subscriber_id)
verb = payload.get("message", f"Notification: {workflow_id}") if payload else f"Notification: {workflow_id}"
description = payload.get("description", "") if payload else ""
success = self._service.send_notification(
recipient=user,
actor=None,
verb=verb,
description=description,
data={**(payload or {}), "workflow_id": workflow_id},
)
return {
"status": "sent" if success else "failed",
"workflow_id": workflow_id,
}
except User.DoesNotExist:
logger.error(f"User not found for notification: {subscriber_id}")
return {"status": "failed", "error": "User not found"}
def trigger_topic_notification(
self,
workflow_id: str,
topic_key: str,
payload: dict | None = None,
) -> dict[str, Any]:
"""Trigger topic notification - maps to group notification."""
logger.info(f"Topic notification: {workflow_id} -> {topic_key}")
# Map topic keys to user groups
if topic_key == "moderators":
result = self._service.notify_moderators(
verb=payload.get("message", "New moderation task") if payload else "New moderation task",
data={**(payload or {}), "workflow_id": workflow_id},
)
elif topic_key == "admins":
result = self._service.notify_admins(
verb=payload.get("message", "Admin notification") if payload else "Admin notification",
data={**(payload or {}), "workflow_id": workflow_id},
)
else:
logger.warning(f"Unknown topic key: {topic_key}")
result = {"success": 0, "failed": 0, "skipped": 0}
return {
"status": "sent",
"workflow_id": workflow_id,
"result": result,
}
def update_preferences(
self,
subscriber_id: str,
preferences: dict[str, Any],
) -> dict[str, Any]:
"""Update notification preferences."""
try:
user = User.objects.get(pk=subscriber_id)
prefs, _ = NotificationPreference.objects.get_or_create(user=user)
if "channel_preferences" in preferences:
prefs.channel_preferences = preferences["channel_preferences"]
if "workflow_preferences" in preferences:
prefs.workflow_preferences = preferences["workflow_preferences"]
if "is_opted_out" in preferences:
prefs.is_opted_out = preferences["is_opted_out"]
prefs.save()
return {"status": "updated"}
except User.DoesNotExist:
return {"status": "failed", "error": "User not found"}
# Keep old name for backward compatibility
novu_service = NovuServiceSync()

View File

@@ -0,0 +1,76 @@
"""
Notification URL configuration.
Note: Now using django-notifications-hq for native Django notifications.
Legacy Novu endpoints are kept for backward compatibility.
"""
from django.urls import path
from .views import (
AdminAlertView,
AdminCriticalErrorView,
CreateSubscriberView,
NotificationListView,
NotificationMarkReadView,
NotificationUnreadCountView,
NotifyModeratorsReportView,
NotifyModeratorsSubmissionView,
NotifyUserSubmissionStatusView,
SystemAnnouncementView,
TriggerNotificationView,
UpdatePreferencesView,
UpdateSubscriberView,
)
app_name = "notifications"
urlpatterns = [
# ========== Native Notification Endpoints ==========
# List notifications for current user
path("", NotificationListView.as_view(), name="list"),
# Mark notification(s) as read
path("mark-read/", NotificationMarkReadView.as_view(), name="mark_read"),
# Get unread count
path("unread-count/", NotificationUnreadCountView.as_view(), name="unread_count"),
# ========== Legacy/Compatibility Endpoints ==========
# Subscriber management (legacy - kept for backward compatibility)
path("subscribers/", CreateSubscriberView.as_view(), name="create_subscriber"),
path("subscribers/update/", UpdateSubscriberView.as_view(), name="update_subscriber"),
# Preferences
path("preferences/", UpdatePreferencesView.as_view(), name="preferences"),
# Trigger notifications
path("trigger/", TriggerNotificationView.as_view(), name="trigger"),
# Moderator notifications
path(
"moderators/submission/",
NotifyModeratorsSubmissionView.as_view(),
name="moderators_submission",
),
path(
"moderators/report/",
NotifyModeratorsReportView.as_view(),
name="moderators_report",
),
# User notifications
path(
"user/submission-status/",
NotifyUserSubmissionStatusView.as_view(),
name="user_submission_status",
),
# System notifications
path(
"system/announcement/",
SystemAnnouncementView.as_view(),
name="system_announcement",
),
# Admin notifications
path("admin/alert/", AdminAlertView.as_view(), name="admin_alert"),
path(
"admin/critical-error/",
AdminCriticalErrorView.as_view(),
name="admin_critical_error",
),
]

View File

@@ -0,0 +1,617 @@
"""
Notification views.
Provides REST API endpoints for:
- Subscriber management (legacy compatibility)
- Preference updates
- Notification triggering
- Moderator notifications
- System announcements
- User notification list and management
Note: Now using django-notifications-hq for native Django notifications.
The novu_service import provides backward compatibility.
"""
import logging
from django.contrib.auth import get_user_model
from rest_framework import status
from rest_framework.permissions import IsAdminUser, IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from apps.core.utils import capture_and_log
from .models import NotificationLog, NotificationPreference, Subscriber, SystemAnnouncement
from .serializers import (
CreateAnnouncementSerializer,
CreateSubscriberSerializer,
ModeratorReportNotificationSerializer,
ModeratorSubmissionNotificationSerializer,
NotificationPreferenceSerializer,
SystemAnnouncementSerializer,
TriggerNotificationSerializer,
UpdatePreferencesSerializer,
UpdateSubscriberSerializer,
)
from .services import novu_service, notification_service
logger = logging.getLogger(__name__)
User = get_user_model()
class CreateSubscriberView(APIView):
"""
POST /notifications/subscribers/
Create or update a Novu subscriber.
"""
permission_classes = [IsAuthenticated]
def post(self, request):
serializer = CreateSubscriberSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
data = serializer.validated_data
subscriber_id = data["subscriber_id"]
try:
# Update or create local subscriber record
subscriber, created = Subscriber.objects.update_or_create(
user=request.user,
defaults={
"novu_subscriber_id": subscriber_id,
"first_name": data.get("first_name", ""),
"last_name": data.get("last_name", ""),
"email": data.get("email") or request.user.email,
"phone": data.get("phone", ""),
"avatar": data.get("avatar", ""),
"locale": data.get("locale", "en"),
"data": data.get("data", {}),
},
)
# Sync to Novu if configured
if novu_service.is_configured:
novu_service.create_subscriber(
subscriber_id=subscriber_id,
email=subscriber.email,
first_name=subscriber.first_name,
last_name=subscriber.last_name,
phone=subscriber.phone,
avatar=subscriber.avatar,
locale=subscriber.locale,
data=subscriber.data,
)
return Response(
{"subscriberId": subscriber_id, "created": created},
status=status.HTTP_201_CREATED if created else status.HTTP_200_OK,
)
except Exception as e:
capture_and_log(e, "Create notification subscriber", source="api")
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
class UpdateSubscriberView(APIView):
"""
POST /notifications/subscribers/update/
Update a Novu subscriber.
"""
permission_classes = [IsAuthenticated]
def post(self, request):
serializer = UpdateSubscriberSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
data = serializer.validated_data
subscriber_id = data["subscriber_id"]
try:
# Update local record
subscriber = Subscriber.objects.filter(user=request.user).first()
if not subscriber:
return Response(
{"detail": "Subscriber not found"},
status=status.HTTP_404_NOT_FOUND,
)
# Update fields
for field in ["first_name", "last_name", "email", "phone", "avatar", "locale", "data"]:
if field in data:
setattr(subscriber, field, data[field])
subscriber.save()
# Sync to Novu
if novu_service.is_configured:
update_fields = {k: v for k, v in data.items() if k != "subscriber_id"}
novu_service.update_subscriber(subscriber_id, **update_fields)
return Response({"success": True})
except Exception as e:
capture_and_log(e, "Update notification subscriber", source="api")
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
class UpdatePreferencesView(APIView):
"""
POST /notifications/preferences/
Update notification preferences.
"""
permission_classes = [IsAuthenticated]
def post(self, request):
serializer = UpdatePreferencesSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
data = serializer.validated_data
preferences = data["preferences"]
try:
# Update local preferences
pref, created = NotificationPreference.objects.update_or_create(
user=request.user,
defaults={
"channel_preferences": preferences.get("channelPreferences", {}),
"workflow_preferences": preferences.get("workflowPreferences", {}),
"frequency_settings": preferences.get("frequencySettings", {}),
},
)
# Sync to Novu
if novu_service.is_configured:
subscriber = Subscriber.objects.filter(user=request.user).first()
if subscriber:
novu_service.update_preferences(subscriber.novu_subscriber_id, preferences)
return Response({"success": True})
except Exception as e:
capture_and_log(e, "Update notification preferences", source="api")
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
def get(self, request):
"""Get current user's notification preferences."""
try:
pref = NotificationPreference.objects.filter(user=request.user).first()
if not pref:
return Response(
{
"channelPreferences": {},
"workflowPreferences": {},
"frequencySettings": {},
}
)
return Response(NotificationPreferenceSerializer(pref).data)
except Exception as e:
capture_and_log(e, "Get notification preferences", source="api")
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
class TriggerNotificationView(APIView):
"""
POST /notifications/trigger/
Trigger a notification workflow.
"""
permission_classes = [IsAuthenticated]
def post(self, request):
serializer = TriggerNotificationSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
data = serializer.validated_data
try:
# Log the notification
log = NotificationLog.objects.create(
user=request.user,
workflow_id=data["workflow_id"],
notification_type="trigger",
channel="all",
payload=data.get("payload", {}),
)
# Trigger via Novu
if novu_service.is_configured:
result = novu_service.trigger_notification(
workflow_id=data["workflow_id"],
subscriber_id=data["subscriber_id"],
payload=data.get("payload"),
overrides=data.get("overrides"),
)
log.novu_transaction_id = result.get("transactionId", "")
log.status = NotificationLog.Status.SENT
else:
log.status = NotificationLog.Status.SENT # Mock success
log.save()
return Response({"success": True, "transactionId": log.novu_transaction_id})
except Exception as e:
capture_and_log(e, "Trigger notification", source="api")
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
class NotifyModeratorsSubmissionView(APIView):
"""
POST /notifications/moderators/submission/
Notify moderators about a new submission.
"""
permission_classes = [IsAuthenticated]
def post(self, request):
serializer = ModeratorSubmissionNotificationSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
data = serializer.validated_data
try:
# Log the notification
NotificationLog.objects.create(
user=request.user,
workflow_id="moderator-submission-notification",
notification_type="moderator_submission",
channel="in_app",
payload=data,
status=NotificationLog.Status.SENT,
)
# Trigger to moderator topic
if novu_service.is_configured:
novu_service.trigger_topic_notification(
workflow_id="moderator-submission-notification",
topic_key="moderators",
payload=data,
)
return Response({"success": True})
except Exception as e:
capture_and_log(e, "Notify moderators (submission)", source="api")
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
class NotifyModeratorsReportView(APIView):
"""
POST /notifications/moderators/report/
Notify moderators about a new report.
"""
permission_classes = [IsAuthenticated]
def post(self, request):
serializer = ModeratorReportNotificationSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
data = serializer.validated_data
try:
# Log the notification
NotificationLog.objects.create(
user=request.user,
workflow_id="moderator-report-notification",
notification_type="moderator_report",
channel="in_app",
payload=data,
status=NotificationLog.Status.SENT,
)
# Trigger to moderator topic
if novu_service.is_configured:
novu_service.trigger_topic_notification(
workflow_id="moderator-report-notification",
topic_key="moderators",
payload=data,
)
return Response({"success": True})
except Exception as e:
capture_and_log(e, "Notify moderators (report)", source="api")
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
class NotifyUserSubmissionStatusView(APIView):
"""
POST /notifications/user/submission-status/
Notify a user about their submission status change.
"""
permission_classes = [IsAuthenticated]
def post(self, request):
data = request.data
try:
subscriber_id = data.get("subscriber_id") or str(request.user.id)
# Log the notification
NotificationLog.objects.create(
user=request.user,
workflow_id="submission-status-update",
notification_type="submission_status",
channel="email",
payload=data,
status=NotificationLog.Status.SENT,
)
# Trigger notification
if novu_service.is_configured:
novu_service.trigger_notification(
workflow_id="submission-status-update",
subscriber_id=subscriber_id,
payload=data,
)
return Response({"success": True})
except Exception as e:
capture_and_log(e, "Notify user submission status", source="api")
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
class SystemAnnouncementView(APIView):
"""
POST /notifications/system/announcement/
Send a system-wide announcement (admin only).
"""
permission_classes = [IsAdminUser]
def post(self, request):
serializer = CreateAnnouncementSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
data = serializer.validated_data
try:
# Create announcement record
announcement = SystemAnnouncement.objects.create(
title=data["title"],
message=data["message"],
severity=data.get("severity", "info"),
action_url=data.get("action_url", ""),
created_by=request.user,
)
# Trigger to all users topic
if novu_service.is_configured:
novu_service.trigger_topic_notification(
workflow_id="system-announcement",
topic_key="users",
payload={
"title": announcement.title,
"message": announcement.message,
"severity": announcement.severity,
"actionUrl": announcement.action_url,
},
)
return Response(
{
"success": True,
"announcementId": str(announcement.id),
},
status=status.HTTP_201_CREATED,
)
except Exception as e:
capture_and_log(e, "System announcement", source="api")
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
class AdminAlertView(APIView):
"""
POST /notifications/admin/alert/
Send alert to admins.
"""
permission_classes = [IsAuthenticated]
def post(self, request):
data = request.data
try:
# Log the alert
NotificationLog.objects.create(
user=request.user,
workflow_id="admin-alert",
notification_type="admin_alert",
channel="email",
payload=data,
status=NotificationLog.Status.SENT,
)
# Trigger to admin topic
if novu_service.is_configured:
novu_service.trigger_topic_notification(
workflow_id="admin-alert",
topic_key="admins",
payload=data,
)
return Response({"success": True})
except Exception as e:
capture_and_log(e, "Admin alert", source="api")
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
class AdminCriticalErrorView(APIView):
"""
POST /notifications/admin/critical-error/
Send critical error alert to admins.
"""
permission_classes = [IsAuthenticated]
def post(self, request):
data = request.data
try:
# Log the alert
NotificationLog.objects.create(
user=request.user,
workflow_id="admin-critical-error",
notification_type="critical_error",
channel="email",
payload=data,
status=NotificationLog.Status.SENT,
)
# Trigger to admin topic with urgent priority
if novu_service.is_configured:
novu_service.trigger_topic_notification(
workflow_id="admin-critical-error",
topic_key="admins",
payload=data,
)
return Response({"success": True})
except Exception as e:
capture_and_log(e, "Admin critical error", source="api")
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
# ============================================================================
# Native Notification Views (django-notifications-hq)
# ============================================================================
class NotificationListView(APIView):
"""
GET /notifications/
Get list of notifications for the current user.
"""
permission_classes = [IsAuthenticated]
def get(self, request):
try:
unread_only = request.query_params.get("unread_only", "false").lower() == "true"
limit = min(int(request.query_params.get("limit", 50)), 100)
notifications = notification_service.get_user_notifications(
user=request.user,
unread_only=unread_only,
limit=limit,
)
# Serialize notifications
notification_list = []
for notif in notifications:
notification_list.append({
"id": notif.id,
"actor": str(notif.actor) if notif.actor else None,
"verb": notif.verb,
"description": notif.description or "",
"target": str(notif.target) if notif.target else None,
"actionObject": str(notif.action_object) if notif.action_object else None,
"level": notif.level,
"unread": notif.unread,
"data": notif.data or {},
"timestamp": notif.timestamp.isoformat(),
})
return Response({
"notifications": notification_list,
"unreadCount": notification_service.get_unread_count(request.user),
})
except Exception as e:
capture_and_log(e, "Get notifications", source="api")
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
class NotificationMarkReadView(APIView):
"""
POST /notifications/mark-read/
Mark notification(s) as read.
"""
permission_classes = [IsAuthenticated]
def post(self, request):
try:
notification_id = request.data.get("notification_id")
notification_service.mark_as_read(
user=request.user,
notification_id=notification_id,
)
return Response({
"success": True,
"unreadCount": notification_service.get_unread_count(request.user),
})
except Exception as e:
capture_and_log(e, "Mark notification read", source="api")
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
class NotificationUnreadCountView(APIView):
"""
GET /notifications/unread-count/
Get count of unread notifications.
"""
permission_classes = [IsAuthenticated]
def get(self, request):
try:
count = notification_service.get_unread_count(request.user)
return Response({"unreadCount": count})
except Exception as e:
capture_and_log(e, "Get unread count", source="api")
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)

View File

@@ -66,6 +66,31 @@ app.conf.update(
"task": "rides.check_overdue_closings",
"schedule": 86400.0, # Daily at midnight
},
# ====== New scheduled tasks ======
"process-scheduled-deletions": {
"task": "core.process_scheduled_deletions",
"schedule": 86400.0, # Daily
},
"process-closing-entities": {
"task": "core.process_closing_entities",
"schedule": 86400.0, # Daily
},
"process-expired-bans": {
"task": "core.process_expired_bans",
"schedule": 3600.0, # Hourly
},
"cleanup-orphaned-images": {
"task": "core.cleanup_orphaned_images",
"schedule": 604800.0, # Weekly
},
"cleanup-old-versions": {
"task": "core.cleanup_old_versions",
"schedule": 2592000.0, # Monthly (30 days)
},
"data-retention-cleanup": {
"task": "core.data_retention_cleanup",
"schedule": 86400.0, # Daily
},
},
# Task result settings
result_expires=3600, # 1 hour

View File

@@ -73,8 +73,7 @@ THIRD_PARTY_APPS = [
"rest_framework.authtoken",
"rest_framework_simplejwt", # JWT authentication
"rest_framework_simplejwt.token_blacklist", # JWT token blacklist
"dj_rest_auth", # REST authentication with JWT support
"dj_rest_auth.registration", # REST registration support
# Note: dj_rest_auth removed - using custom auth views in apps.api.v1.auth
"drf_spectacular", # OpenAPI 3.0 documentation
"corsheaders", # CORS headers for API
"pghistory", # django-pghistory
@@ -102,6 +101,8 @@ THIRD_PARTY_APPS = [
"django_celery_beat", # Celery beat scheduler
"django_celery_results", # Celery result backend
"django_extensions", # Django Extensions for enhanced development tools
# Note: django-notifications-hq is installed but not in INSTALLED_APPS
# to avoid app label conflict. We use a custom implementation instead.
]
LOCAL_APPS = [
@@ -117,6 +118,7 @@ LOCAL_APPS = [
"apps.media",
"apps.blog",
"apps.support",
"apps.notifications", # Notification service
]
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS

View File

@@ -34,7 +34,7 @@ ACCOUNT_LOGIN_METHODS = {"email", "username"}
# Email verification settings
ACCOUNT_EMAIL_VERIFICATION = config("ACCOUNT_EMAIL_VERIFICATION", default="mandatory")
ACCOUNT_EMAIL_REQUIRED = True
# Note: ACCOUNT_EMAIL_REQUIRED is handled by ACCOUNT_SIGNUP_FIELDS above (email* = required)
ACCOUNT_EMAIL_VERIFICATION_SUPPORTS_CHANGE = True
ACCOUNT_EMAIL_VERIFICATION_SUPPORTS_RESEND = True

View File

@@ -24,7 +24,6 @@ dependencies = [
# Authentication & Security
# =============================================================================
"django-allauth>=65.3.0",
"dj-rest-auth>=7.0.0",
"djangorestframework-simplejwt>=5.5.1",
"pyjwt>=2.10.1",
"cryptography>=44.0.0",
@@ -58,7 +57,6 @@ dependencies = [
# Database & History Tracking
# =============================================================================
"django-pghistory>=3.5.2",
"django-fsm>=2.8.1",
"django-fsm-log>=3.1.0",
# =============================================================================
# Monitoring & Observability
@@ -79,6 +77,9 @@ dependencies = [
"django-turnstile>=0.1.2",
"fido2>=2.0.0",
"qrcode[pil]>=8.2",
"httpx>=0.28.1",
"django-fsm-2>=4.1.0",
"django-notifications-hq>=1.8.3",
]
[dependency-groups]