Compare commits

..

3 Commits

34 changed files with 5996 additions and 153 deletions

View File

@@ -112,6 +112,8 @@ urlpatterns = [
path("profile/avatar/delete/", views.delete_avatar, name="delete_avatar"), path("profile/avatar/delete/", views.delete_avatar, name="delete_avatar"),
# Login history endpoint # Login history endpoint
path("login-history/", views.get_login_history, name="get_login_history"), path("login-history/", views.get_login_history, name="get_login_history"),
# Email change cancellation endpoint
path("email-change/cancel/", views.cancel_email_change, name="cancel_email_change"),
# Magic Link (Login by Code) endpoints # Magic Link (Login by Code) endpoints
path("magic-link/request/", views_magic_link.request_magic_link, name="request_magic_link"), path("magic-link/request/", views_magic_link.request_magic_link, name="request_magic_link"),
path("magic-link/verify/", views_magic_link.verify_magic_link, name="verify_magic_link"), path("magic-link/verify/", views_magic_link.verify_magic_link, name="verify_magic_link"),

View File

@@ -1640,3 +1640,95 @@ def get_login_history(request):
"count": len(results), "count": len(results),
} }
) )
@extend_schema(
operation_id="cancel_email_change",
summary="Cancel pending email change",
description=(
"Cancel a pending email change request. This will clear the new_email field "
"and prevent the email change from being completed."
),
responses={
200: {
"description": "Email change cancelled or no pending change found",
"example": {
"detail": "Email change cancelled",
"had_pending_change": True,
"cancelled_email": "newemail@example.com",
},
},
401: {
"description": "Authentication required",
"example": {"detail": "Authentication required"},
},
},
tags=["Account Management"],
)
@api_view(["POST"])
@permission_classes([IsAuthenticated])
def cancel_email_change(request):
"""
Cancel a pending email change request.
This endpoint allows users to cancel their pending email change
if they change their mind before completing the verification.
**Authentication Required**: User must be logged in.
"""
try:
user = request.user
# Check if user has a pending email change
pending_email = user.pending_email
if pending_email:
# Clear the pending email
user.pending_email = None
user.save(update_fields=["pending_email"])
logger.info(
f"User {user.username} cancelled email change to {pending_email}",
extra={
"user": user.username,
"user_id": user.user_id,
"cancelled_email": pending_email,
"action": "email_change_cancelled",
},
)
return Response(
{
"success": True,
"detail": "Email change cancelled",
"had_pending_change": True,
"cancelled_email": pending_email,
},
status=status.HTTP_200_OK,
)
# No pending change, but still success (idempotent)
return Response(
{
"success": True,
"detail": "No pending email change found",
"had_pending_change": False,
"cancelled_email": None,
},
status=status.HTTP_200_OK,
)
except Exception as e:
capture_and_log(
e,
f"Cancel email change for user {request.user.username}",
source="api",
request=request,
)
return Response(
{
"success": False,
"error": f"Error cancelling email change: {str(e)}",
},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)

View File

@@ -0,0 +1 @@
# Admin API module

View File

@@ -0,0 +1,55 @@
"""
Admin API URL configuration.
Provides endpoints for admin dashboard functionality.
"""
from django.urls import path
from . import views
app_name = "admin_api"
urlpatterns = [
# OSM Cache Stats
path(
"osm-usage-stats/",
views.OSMUsageStatsView.as_view(),
name="osm_usage_stats",
),
# Rate Limit Metrics
path(
"rate-limit-metrics/",
views.RateLimitMetricsView.as_view(),
name="rate_limit_metrics",
),
# Database Manager (admin CRUD operations)
path(
"database-manager/",
views.DatabaseManagerView.as_view(),
name="database_manager",
),
# Celery Task Status (read-only)
path(
"tasks/status/",
views.CeleryTaskStatusView.as_view(),
name="task_status",
),
# Anomaly Detection
path(
"anomalies/detect/",
views.DetectAnomaliesView.as_view(),
name="detect_anomalies",
),
# Metrics Collection
path(
"metrics/collect/",
views.CollectMetricsView.as_view(),
name="collect_metrics",
),
# Pipeline Integrity Scan
path(
"pipeline/integrity-scan/",
views.PipelineIntegrityScanView.as_view(),
name="pipeline_integrity_scan",
),
]

File diff suppressed because it is too large Load Diff

View File

@@ -24,6 +24,7 @@ from .views import (
LogoutAPIView, LogoutAPIView,
PasswordChangeAPIView, PasswordChangeAPIView,
PasswordResetAPIView, PasswordResetAPIView,
ProcessOAuthProfileAPIView,
ResendVerificationAPIView, ResendVerificationAPIView,
SignupAPIView, SignupAPIView,
SocialAuthStatusAPIView, SocialAuthStatusAPIView,
@@ -38,8 +39,7 @@ urlpatterns = [
path("user/", CurrentUserAPIView.as_view(), name="auth-current-user"), path("user/", CurrentUserAPIView.as_view(), name="auth-current-user"),
# JWT token management # JWT token management
path("token/refresh/", TokenRefreshView.as_view(), name="auth-token-refresh"), path("token/refresh/", TokenRefreshView.as_view(), name="auth-token-refresh"),
# Social authentication endpoints (dj-rest-auth) # Note: dj_rest_auth removed - using custom social auth views below
path("social/", include("dj_rest_auth.registration.urls")),
path( path(
"password/reset/", "password/reset/",
PasswordResetAPIView.as_view(), PasswordResetAPIView.as_view(),
@@ -81,6 +81,11 @@ urlpatterns = [
SocialAuthStatusAPIView.as_view(), SocialAuthStatusAPIView.as_view(),
name="auth-social-status", name="auth-social-status",
), ),
path(
"social/process-profile/",
ProcessOAuthProfileAPIView.as_view(),
name="auth-social-process-profile",
),
path("status/", AuthStatusAPIView.as_view(), name="auth-status"), path("status/", AuthStatusAPIView.as_view(), name="auth-status"),
# Email verification endpoints # Email verification endpoints
path( path(

View File

@@ -6,6 +6,8 @@ login, signup, logout, password management, social authentication,
user profiles, and top lists. user profiles, and top lists.
""" """
import logging
from typing import cast # added 'cast' from typing import cast # added 'cast'
from django.contrib.auth import authenticate, get_user_model, login, logout from django.contrib.auth import authenticate, get_user_model, login, logout
@@ -71,6 +73,7 @@ except Exception:
TurnstileMixin = FallbackTurnstileMixin TurnstileMixin = FallbackTurnstileMixin
UserModel = get_user_model() UserModel = get_user_model()
logger = logging.getLogger(__name__)
# Helper: safely obtain underlying HttpRequest (used by Django auth) # Helper: safely obtain underlying HttpRequest (used by Django auth)
@@ -831,7 +834,529 @@ The ThrillWiki Team
# Don't reveal whether email exists # Don't reveal whether email exists
return Response({"detail": "If the email exists, a verification email has been sent", "success": True}) return Response({"detail": "If the email exists, a verification email has been sent", "success": True})
# Note: User Profile, Top List, and Top List Item ViewSets are now handled # Note: User Profile, Top List, and Top List Item ViewSets are now handled
# by the dedicated accounts app at backend/apps/api/v1/accounts/views.py # by the dedicated accounts app at backend/apps/api/v1/accounts/views.py
# to avoid duplication and maintain clean separation of concerns. # to avoid duplication and maintain clean separation of concerns.
@extend_schema_view(
post=extend_schema(
summary="Process OAuth profile",
description="Process OAuth profile data during social authentication flow.",
request={
"type": "object",
"properties": {
"provider": {"type": "string", "description": "OAuth provider (e.g., google, discord)"},
"profile": {
"type": "object",
"description": "Profile data from OAuth provider",
"properties": {
"id": {"type": "string"},
"email": {"type": "string", "format": "email"},
"name": {"type": "string"},
"avatar_url": {"type": "string", "format": "uri"},
},
},
"access_token": {"type": "string", "description": "OAuth access token"},
},
"required": ["provider", "profile"],
},
responses={
200: {
"type": "object",
"properties": {
"success": {"type": "boolean"},
"action": {"type": "string", "enum": ["created", "updated", "linked"]},
"user": {"type": "object"},
"profile_synced": {"type": "boolean"},
},
},
400: "Bad Request",
401: "Unauthorized",
403: "Account suspended",
},
tags=["Social Authentication"],
),
)
class ProcessOAuthProfileAPIView(APIView):
"""
API endpoint to process OAuth profile data.
This endpoint is called AFTER the OAuth flow is complete to:
1. Check if user is banned (SECURITY CRITICAL)
2. Extract avatar from OAuth provider
3. Download and upload avatar to Cloudflare Images
4. Sync display name from OAuth provider
5. Update username if it's a generic UUID-based username
Called with an empty body - uses the authenticated session.
Full parity with Supabase Edge Function: process-oauth-profile
BULLETPROOFED: Comprehensive validation, sanitization, and error handling.
"""
permission_classes = [IsAuthenticated]
# Security constants
MAX_AVATAR_SIZE = 10 * 1024 * 1024 # 10MB
AVATAR_DOWNLOAD_TIMEOUT = 10.0 # seconds
AVATAR_UPLOAD_TIMEOUT = 30.0 # seconds
MAX_USERNAME_LENGTH = 150
MIN_USERNAME_LENGTH = 3
ALLOWED_USERNAME_CHARS = set("abcdefghijklmnopqrstuvwxyz0123456789_")
# Rate limiting for avatar uploads (prevent abuse)
AVATAR_UPLOAD_COOLDOWN = 60 # seconds between uploads
def post(self, request: Request) -> Response:
import re
import httpx
from django.db import transaction
from django.core.cache import cache
try:
user = request.user
# ================================================================
# STEP 0: Validate user object exists and is valid
# ================================================================
if not user or not hasattr(user, 'user_id'):
logger.error("ProcessOAuthProfile called with invalid user object")
return Response({
"success": False,
"error": "Invalid user session",
}, status=status.HTTP_401_UNAUTHORIZED)
user_id_str = str(user.user_id)
# ================================================================
# STEP 1: CRITICAL - Check ban status FIRST
# ================================================================
is_banned = getattr(user, 'is_banned', False)
# Also check via profile if applicable
if not is_banned:
try:
from apps.accounts.models import UserProfile
profile_check = UserProfile.objects.filter(user=user).first()
if profile_check and getattr(profile_check, 'is_banned', False):
is_banned = True
except Exception:
pass
if is_banned:
ban_reason = getattr(user, 'ban_reason', None) or "Policy violation"
# Sanitize ban reason for response
safe_ban_reason = str(ban_reason)[:200] if ban_reason else None
logger.warning(
f"Banned user attempted OAuth profile update",
extra={"user_id": user_id_str, "ban_reason": safe_ban_reason}
)
return Response({
"error": "Account suspended",
"message": (
f"Your account has been suspended. Reason: {safe_ban_reason}"
if safe_ban_reason
else "Your account has been suspended. Contact support for assistance."
),
"ban_reason": safe_ban_reason,
}, status=status.HTTP_403_FORBIDDEN)
# ================================================================
# STEP 2: Check rate limiting for avatar uploads
# ================================================================
rate_limit_key = f"oauth_profile:avatar:{user_id_str}"
if cache.get(rate_limit_key):
return Response({
"success": True,
"action": "rate_limited",
"message": "Please wait before updating your profile again",
"avatar_uploaded": False,
"profile_updated": False,
})
# ================================================================
# STEP 3: Get OAuth provider info from social accounts
# ================================================================
try:
from allauth.socialaccount.models import SocialAccount
except ImportError:
logger.error("django-allauth not installed")
return Response({
"success": False,
"error": "Social authentication not configured",
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
social_accounts = SocialAccount.objects.filter(user=user)
if not social_accounts.exists():
return Response({
"success": True,
"action": "skipped",
"message": "No OAuth accounts linked",
})
# Get the most recent social account
social_account = social_accounts.order_by("-date_joined").first()
if not social_account:
return Response({
"success": True,
"action": "skipped",
"message": "No valid OAuth account found",
})
provider = social_account.provider or "unknown"
extra_data = social_account.extra_data or {}
# Validate extra_data is a dict
if not isinstance(extra_data, dict):
logger.warning(f"Invalid extra_data type for user {user_id_str}: {type(extra_data)}")
extra_data = {}
# ================================================================
# STEP 4: Extract profile data based on provider (with sanitization)
# ================================================================
avatar_url = None
display_name = None
username_base = None
if provider == "google":
avatar_url = self._sanitize_url(extra_data.get("picture"))
display_name = self._sanitize_display_name(extra_data.get("name"))
email = extra_data.get("email", "")
if email and isinstance(email, str):
username_base = self._sanitize_username(email.split("@")[0])
elif provider == "discord":
discord_data = extra_data
discord_id = discord_data.get("id") or discord_data.get("sub")
display_name = self._sanitize_display_name(
discord_data.get("global_name")
or discord_data.get("full_name")
or discord_data.get("name")
)
# Discord avatar URL construction with validation
avatar_hash = discord_data.get("avatar")
if discord_id and avatar_hash and isinstance(discord_id, str) and isinstance(avatar_hash, str):
# Validate discord_id is numeric
if discord_id.isdigit():
# Validate avatar_hash is alphanumeric
if re.match(r'^[a-zA-Z0-9_]+$', avatar_hash):
avatar_url = f"https://cdn.discordapp.com/avatars/{discord_id}/{avatar_hash}.png?size=256"
if not avatar_url:
avatar_url = self._sanitize_url(
discord_data.get("avatar_url") or discord_data.get("picture")
)
raw_username = discord_data.get("username") or discord_data.get("name", "")
if raw_username and isinstance(raw_username, str):
username_base = self._sanitize_username(raw_username.split("#")[0])
if not username_base and discord_id:
username_base = f"discord_{str(discord_id)[:8]}"
else:
# Generic provider handling
avatar_url = self._sanitize_url(
extra_data.get("picture")
or extra_data.get("avatar_url")
or extra_data.get("avatar")
)
display_name = self._sanitize_display_name(
extra_data.get("name") or extra_data.get("display_name")
)
# ================================================================
# STEP 5: Get or create user profile (with transaction)
# ================================================================
from apps.accounts.models import UserProfile
with transaction.atomic():
profile, profile_created = UserProfile.objects.select_for_update().get_or_create(
user=user
)
# Check if profile already has an avatar
if profile.avatar_id:
return Response({
"success": True,
"action": "skipped",
"message": "Avatar already exists",
"avatar_uploaded": False,
"profile_updated": False,
})
# ================================================================
# STEP 6: Download and upload avatar to Cloudflare (outside transaction)
# ================================================================
avatar_uploaded = False
if avatar_url:
try:
# Validate URL scheme
if not avatar_url.startswith(('https://', 'http://')):
logger.warning(f"Invalid avatar URL scheme: {avatar_url[:50]}")
else:
# Download avatar from provider
download_response = httpx.get(
avatar_url,
timeout=self.AVATAR_DOWNLOAD_TIMEOUT,
follow_redirects=True,
headers={
"User-Agent": "ThrillWiki/1.0",
"Accept": "image/*",
},
)
if download_response.status_code == 200:
image_data = download_response.content
content_type = download_response.headers.get("content-type", "")
# Validate content type
if not content_type.startswith("image/"):
logger.warning(f"Invalid content type for avatar: {content_type}")
# Validate file size
elif len(image_data) > self.MAX_AVATAR_SIZE:
logger.warning(
f"Avatar too large for user {user_id_str}: {len(image_data)} bytes"
)
# Validate minimum size (avoid empty images)
elif len(image_data) < 100:
logger.warning(f"Avatar too small for user {user_id_str}")
else:
avatar_uploaded = self._upload_to_cloudflare(
image_data, user_id_str, provider, profile
)
else:
logger.warning(
f"Avatar download failed: {download_response.status_code}",
extra={"user_id": user_id_str, "provider": provider}
)
except httpx.TimeoutException:
logger.warning(f"Avatar download timeout for user {user_id_str}")
except httpx.HTTPError as download_error:
logger.warning(f"Failed to download avatar: {download_error}")
except Exception as e:
logger.warning(f"Unexpected avatar error: {e}")
# Set rate limit after successful processing
if avatar_uploaded:
cache.set(rate_limit_key, True, self.AVATAR_UPLOAD_COOLDOWN)
# ================================================================
# STEP 7: Update display name if not set (with validation)
# ================================================================
profile_updated = False
if display_name and not getattr(user, "display_name", None):
try:
user.display_name = display_name
user.save(update_fields=["display_name"])
profile_updated = True
except Exception as e:
logger.warning(f"Failed to update display name: {e}")
# ================================================================
# STEP 8: Update username if it's a generic UUID-based username
# ================================================================
current_username = getattr(user, "username", "") or ""
if username_base and current_username.startswith("user_"):
try:
new_username = self._ensure_unique_username(username_base, user.user_id)
if new_username and new_username != current_username:
user.username = new_username
user.save(update_fields=["username"])
profile_updated = True
logger.info(
f"Username updated from {current_username} to {new_username}",
extra={"user_id": user_id_str}
)
except Exception as e:
logger.warning(f"Failed to update username: {e}")
return Response({
"success": True,
"action": "processed",
"provider": provider,
"avatar_uploaded": avatar_uploaded,
"profile_updated": profile_updated,
"message": "OAuth profile processed successfully",
})
except Exception as e:
capture_and_log(e, "Process OAuth profile", source="api", request=request)
return Response({
"success": False,
"error": "Failed to process OAuth profile",
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
def _sanitize_url(self, url) -> str | None:
"""Sanitize and validate URL."""
if not url or not isinstance(url, str):
return None
url = url.strip()[:2000] # Limit length
# Basic URL validation
if not url.startswith(('https://', 'http://')):
return None
# Block obviously malicious patterns
dangerous_patterns = ['javascript:', 'data:', 'file:', '<script', 'onclick']
for pattern in dangerous_patterns:
if pattern.lower() in url.lower():
return None
return url
def _sanitize_display_name(self, name) -> str | None:
"""Sanitize display name."""
if not name or not isinstance(name, str):
return None
import re
# Strip and limit length
name = name.strip()[:100]
# Remove control characters
name = re.sub(r'[\x00-\x1f\x7f-\x9f]', '', name)
# Remove excessive whitespace
name = ' '.join(name.split())
# Must have at least 1 character
if len(name) < 1:
return None
return name
def _sanitize_username(self, username) -> str | None:
"""Sanitize username for use."""
if not username or not isinstance(username, str):
return None
import re
# Lowercase and remove non-allowed characters
username = username.lower().strip()
username = re.sub(r'[^a-z0-9_]', '', username)
# Enforce length limits
if len(username) < self.MIN_USERNAME_LENGTH:
return None
username = username[:self.MAX_USERNAME_LENGTH]
return username
def _upload_to_cloudflare(self, image_data: bytes, user_id: str, provider: str, profile) -> bool:
"""Upload image to Cloudflare Images with error handling."""
import httpx
from django.db import transaction
try:
from django_cloudflareimages_toolkit.models import CloudflareImage
from django_cloudflareimages_toolkit.services import CloudflareImagesService
cf_service = CloudflareImagesService()
# Request direct upload URL
upload_result = cf_service.get_direct_upload_url(
metadata={
"type": "avatar",
"user_id": user_id,
"provider": provider,
}
)
if not upload_result or "upload_url" not in upload_result:
logger.warning("Failed to get Cloudflare upload URL")
return False
upload_url = upload_result["upload_url"]
cloudflare_id = upload_result.get("id") or upload_result.get("cloudflare_id")
if not cloudflare_id:
logger.warning("No Cloudflare ID in upload result")
return False
# Upload image to Cloudflare
files = {"file": ("avatar.png", image_data, "image/png")}
upload_response = httpx.post(
upload_url,
files=files,
timeout=self.AVATAR_UPLOAD_TIMEOUT,
)
if upload_response.status_code not in [200, 201]:
logger.warning(f"Cloudflare upload failed: {upload_response.status_code}")
return False
# Create CloudflareImage record and link to profile
with transaction.atomic():
cf_image = CloudflareImage.objects.create(
cloudflare_id=cloudflare_id,
is_uploaded=True,
metadata={
"type": "avatar",
"user_id": user_id,
"provider": provider,
}
)
profile.avatar = cf_image
profile.save(update_fields=["avatar"])
logger.info(
f"Avatar uploaded successfully",
extra={"user_id": user_id, "provider": provider, "cloudflare_id": cloudflare_id}
)
return True
except ImportError:
logger.warning("django-cloudflareimages-toolkit not available")
return False
except Exception as cf_error:
logger.warning(f"Cloudflare upload error: {cf_error}")
return False
def _ensure_unique_username(self, base_username: str, user_id: str, max_attempts: int = 10) -> str | None:
"""
Ensure username is unique by appending numbers if needed.
Returns None if no valid username can be generated.
"""
if not base_username:
return None
username = base_username.lower()[:self.MAX_USERNAME_LENGTH]
# Validate characters
if not all(c in self.ALLOWED_USERNAME_CHARS for c in username):
return None
attempt = 0
while attempt < max_attempts:
try:
existing = UserModel.objects.filter(username=username).exclude(user_id=user_id).exists()
if not existing:
return username
except Exception:
break
attempt += 1
# Ensure we don't exceed max length with suffix
suffix = f"_{attempt}"
max_base = self.MAX_USERNAME_LENGTH - len(suffix)
username = f"{base_username.lower()[:max_base]}{suffix}"
# Fallback to UUID-based username
return f"user_{str(user_id)[:8]}"

View File

@@ -1,7 +1,11 @@
from django.urls import path from django.urls import path
from .views import GenerateUploadURLView from . import views
app_name = "images"
urlpatterns = [ urlpatterns = [
path("generate-upload-url/", GenerateUploadURLView.as_view(), name="generate-upload-url"), path("generate-upload-url/", views.GenerateUploadURLView.as_view(), name="generate_upload_url"),
path("delete/", views.DeleteImageView.as_view(), name="delete_image"),
path("og-image/", views.GenerateOGImageView.as_view(), name="og_image"),
] ]

View File

@@ -1,6 +1,7 @@
import logging import logging
import requests import requests
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured from django.core.exceptions import ImproperlyConfigured
from rest_framework import status from rest_framework import status
from rest_framework.permissions import IsAuthenticated from rest_framework.permissions import IsAuthenticated
@@ -30,3 +31,109 @@ class GenerateUploadURLView(APIView):
except Exception as e: except Exception as e:
capture_and_log(e, 'Generate upload URL - unexpected error', source='api') capture_and_log(e, 'Generate upload URL - unexpected error', source='api')
return Response({"detail": "An unexpected error occurred."}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) return Response({"detail": "An unexpected error occurred."}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
class DeleteImageView(APIView):
"""
POST /images/delete/
Delete an image from Cloudflare Images.
"""
permission_classes = [IsAuthenticated]
def post(self, request):
image_id = request.data.get("image_id")
if not image_id:
return Response(
{"detail": "image_id is required"},
status=status.HTTP_400_BAD_REQUEST,
)
try:
# Get Cloudflare credentials
account_id = getattr(settings, "CLOUDFLARE_IMAGES_ACCOUNT_ID", None)
api_token = getattr(settings, "CLOUDFLARE_IMAGES_API_TOKEN", None)
if not account_id or not api_token:
logger.warning("Cloudflare Images not configured, mock deleting image")
return Response({"success": True, "mock": True})
# Delete from Cloudflare
url = f"https://api.cloudflare.com/client/v4/accounts/{account_id}/images/v1/{image_id}"
response = requests.delete(
url,
headers={"Authorization": f"Bearer {api_token}"},
timeout=10,
)
if response.status_code in (200, 404): # 404 = already deleted
return Response({"success": True})
else:
logger.error(f"Cloudflare delete failed: {response.text}")
return Response(
{"detail": "Failed to delete image"},
status=status.HTTP_502_BAD_GATEWAY,
)
except requests.RequestException as e:
capture_and_log(e, "Delete image - Cloudflare API error", source="api")
return Response(
{"detail": "Failed to delete image"},
status=status.HTTP_502_BAD_GATEWAY,
)
except Exception as e:
capture_and_log(e, "Delete image - unexpected error", source="api")
return Response(
{"detail": "An unexpected error occurred"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
class GenerateOGImageView(APIView):
"""
POST /images/og-image/
Generate an Open Graph image for social sharing.
"""
permission_classes = [] # Public endpoint
def post(self, request):
title = request.data.get("title", "")
description = request.data.get("description", "")
entity_type = request.data.get("entity_type", "")
image_url = request.data.get("image_url", "")
if not title:
return Response(
{"detail": "title is required"},
status=status.HTTP_400_BAD_REQUEST,
)
try:
# This is a placeholder for OG image generation
# In production, you would:
# 1. Use an image generation service (Cloudinary, imgix, etc.)
# 2. Or use a headless browser service (Puppeteer, Playwright)
# 3. Or use a dedicated OG image service
# For now, return a template URL or placeholder
base_url = getattr(settings, "SITE_URL", "https://thrillwiki.com")
og_image_url = f"{base_url}/api/v1/images/og-preview/?title={title[:100]}"
return Response({
"success": True,
"og_image_url": og_image_url,
"title": title,
"description": description[:200] if description else "",
"entity_type": entity_type,
"note": "Placeholder - configure OG image service for production",
})
except Exception as e:
capture_and_log(e, "Generate OG image", source="api")
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)

View File

@@ -30,4 +30,8 @@ urlpatterns = [
views.MapCacheAPIView.as_view(), views.MapCacheAPIView.as_view(),
name="map_cache_invalidate", name="map_cache_invalidate",
), ),
# Location detection and enrichment
path("detect-location/", views.DetectLocationView.as_view(), name="detect_location"),
path("enrich-location/", views.EnrichLocationView.as_view(), name="enrich_location"),
path("search-location/", views.SearchLocationView.as_view(), name="search_location"),
] ]

View File

@@ -999,3 +999,630 @@ MapSearchView = MapSearchAPIView
MapBoundsView = MapBoundsAPIView MapBoundsView = MapBoundsAPIView
MapStatsView = MapStatsAPIView MapStatsView = MapStatsAPIView
MapCacheView = MapCacheAPIView MapCacheView = MapCacheAPIView
# =============================================================================
# Location Detection / Enrichment Endpoints
# =============================================================================
@extend_schema_view(
post=extend_schema(
summary="Detect user location from IP",
description="Detect the user's approximate location based on their IP address.",
request={
"application/json": {
"type": "object",
"properties": {
"ip_address": {
"type": "string",
"description": "IP address to geolocate. If not provided, uses request IP.",
}
},
}
},
responses={
200: {
"type": "object",
"properties": {
"latitude": {"type": "number"},
"longitude": {"type": "number"},
"city": {"type": "string"},
"region": {"type": "string"},
"country": {"type": "string"},
"timezone": {"type": "string"},
},
}
},
tags=["Maps"],
),
)
class DetectLocationView(APIView):
"""
POST /maps/detect-location/
Detect user's location based on IP address using a geolocation service.
"""
permission_classes = [AllowAny]
def post(self, request):
try:
# Get IP address from request or payload
ip_address = request.data.get("ip_address")
if not ip_address:
# Get client IP from request
x_forwarded_for = request.META.get("HTTP_X_FORWARDED_FOR")
if x_forwarded_for:
ip_address = x_forwarded_for.split(",")[0].strip()
else:
ip_address = request.META.get("REMOTE_ADDR", "")
# For localhost/development, return a default location
if ip_address in ("127.0.0.1", "::1", "localhost") or ip_address.startswith("192.168."):
return Response(
{
"latitude": 40.7128,
"longitude": -74.006,
"city": "New York",
"region": "New York",
"country": "US",
"country_name": "United States",
"timezone": "America/New_York",
"detected": False,
"reason": "localhost_fallback",
}
)
# Use IP geolocation service (ipapi.co, ipinfo.io, etc.)
import httpx
try:
response = httpx.get(
f"https://ipapi.co/{ip_address}/json/",
timeout=5.0,
headers={"User-Agent": "ThrillWiki/1.0"},
)
if response.status_code == 200:
data = response.json()
return Response(
{
"latitude": data.get("latitude"),
"longitude": data.get("longitude"),
"city": data.get("city", ""),
"region": data.get("region", ""),
"country": data.get("country_code", ""),
"country_name": data.get("country_name", ""),
"timezone": data.get("timezone", ""),
"detected": True,
}
)
except httpx.HTTPError as e:
logger.warning(f"IP geolocation failed: {e}")
# Fallback response
return Response(
{
"latitude": None,
"longitude": None,
"city": "",
"region": "",
"country": "",
"country_name": "",
"timezone": "",
"detected": False,
"reason": "geolocation_failed",
}
)
except Exception as e:
capture_and_log(e, "Detect location from IP", source="api")
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
@extend_schema_view(
post=extend_schema(
summary="Enrich location with geocoding",
description="Enrich location data with reverse geocoding (coordinates to address).",
request={
"application/json": {
"type": "object",
"properties": {
"latitude": {"type": "number", "required": True},
"longitude": {"type": "number", "required": True},
},
}
},
responses={
200: {
"type": "object",
"properties": {
"formatted_address": {"type": "string"},
"street_address": {"type": "string"},
"city": {"type": "string"},
"state": {"type": "string"},
"postal_code": {"type": "string"},
"country": {"type": "string"},
},
}
},
tags=["Maps"],
),
)
class EnrichLocationView(APIView):
"""
POST /maps/enrich-location/
Enrich location with reverse geocoding (coordinates to address).
"""
permission_classes = [AllowAny]
def post(self, request):
try:
latitude = request.data.get("latitude")
longitude = request.data.get("longitude")
if latitude is None or longitude is None:
return Response(
{"detail": "latitude and longitude are required"},
status=status.HTTP_400_BAD_REQUEST,
)
try:
lat = float(latitude)
lng = float(longitude)
except (TypeError, ValueError):
return Response(
{"detail": "Invalid latitude or longitude"},
status=status.HTTP_400_BAD_REQUEST,
)
# Use reverse geocoding service
import httpx
try:
# Using Nominatim (OpenStreetMap) - free, no API key required
response = httpx.get(
"https://nominatim.openstreetmap.org/reverse",
params={
"lat": lat,
"lon": lng,
"format": "json",
"addressdetails": 1,
},
timeout=5.0,
headers={"User-Agent": "ThrillWiki/1.0"},
)
if response.status_code == 200:
data = response.json()
address = data.get("address", {})
return Response(
{
"formatted_address": data.get("display_name", ""),
"street_address": address.get("road", ""),
"house_number": address.get("house_number", ""),
"city": (
address.get("city")
or address.get("town")
or address.get("village")
or ""
),
"state": address.get("state", ""),
"postal_code": address.get("postcode", ""),
"country": address.get("country", ""),
"country_code": address.get("country_code", "").upper(),
"enriched": True,
}
)
except httpx.HTTPError as e:
logger.warning(f"Reverse geocoding failed: {e}")
# Fallback response
return Response(
{
"formatted_address": "",
"street_address": "",
"city": "",
"state": "",
"postal_code": "",
"country": "",
"country_code": "",
"enriched": False,
"reason": "geocoding_failed",
}
)
except Exception as e:
capture_and_log(e, "Enrich location", source="api")
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
@extend_schema_view(
post=extend_schema(
summary="Search for a location by text",
description="Forward geocoding - convert a text query (address, city name, etc.) to coordinates.",
request={
"application/json": {
"type": "object",
"properties": {
"query": {
"type": "string",
"description": "Location search query (address, city, place name, etc.)",
},
"limit": {
"type": "integer",
"description": "Maximum number of results to return (default: 5)",
},
"country": {
"type": "string",
"description": "ISO 3166-1 alpha-2 country code to restrict search",
},
},
"required": ["query"],
}
},
responses={
200: {
"type": "object",
"properties": {
"results": {
"type": "array",
"items": {
"type": "object",
"properties": {
"latitude": {"type": "number"},
"longitude": {"type": "number"},
"formatted_address": {"type": "string"},
"city": {"type": "string"},
"state": {"type": "string"},
"country": {"type": "string"},
"importance": {"type": "number"},
},
},
},
"query": {"type": "string"},
"count": {"type": "integer"},
},
},
400: {"description": "Missing or invalid query parameter"},
},
tags=["Maps"],
),
)
class SearchLocationView(APIView):
"""
POST /maps/search-location/
Forward geocoding - search for locations by text query.
Full parity with Supabase Edge Function: search-location
Features:
- Query caching with SHA-256 hash (7-day expiration)
- Rate limiting (30 requests per minute per IP)
- Usage logging for monitoring
- Cache headers (X-Cache: HIT/MISS)
"""
permission_classes = [AllowAny]
# Rate limit settings matching original
RATE_LIMIT_REQUESTS = 30
RATE_LIMIT_PERIOD = 60 # 1 minute
CACHE_EXPIRATION = 7 * 24 * 60 * 60 # 7 days in seconds
def _hash_query(self, query: str) -> str:
"""Hash query for cache lookup (matching original SHA-256)."""
import hashlib
normalized = query.strip().lower()
return hashlib.sha256(normalized.encode()).hexdigest()
def _get_client_ip(self, request) -> str:
"""Get client IP from request headers."""
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
return x_forwarded_for.split(',')[0].strip()
return request.META.get('HTTP_X_REAL_IP') or request.META.get('REMOTE_ADDR') or 'unknown'
def _check_rate_limit(self, client_ip: str) -> tuple[bool, int]:
"""
Check if client is rate limited.
Returns (is_allowed, current_count).
"""
from django.core.cache import cache
rate_limit_key = f"search_location:rate:{client_ip}"
current_count = cache.get(rate_limit_key, 0)
if current_count >= self.RATE_LIMIT_REQUESTS:
return False, current_count
# Increment counter with TTL
cache.set(rate_limit_key, current_count + 1, self.RATE_LIMIT_PERIOD)
return True, current_count + 1
def _get_cached_result(self, query_hash: str):
"""Get cached result if available."""
from django.core.cache import cache
cache_key = f"search_location:query:{query_hash}"
cached_data = cache.get(cache_key)
if cached_data:
# Update access count in a separate key
access_key = f"search_location:access:{query_hash}"
access_count = cache.get(access_key, 0)
cache.set(access_key, access_count + 1, self.CACHE_EXPIRATION)
return cached_data
def _set_cached_result(self, query: str, query_hash: str, results: list):
"""Cache the results."""
from django.core.cache import cache
cache_key = f"search_location:query:{query_hash}"
cache_data = {
"query": query,
"results": results,
"result_count": len(results),
}
cache.set(cache_key, cache_data, self.CACHE_EXPIRATION)
# Initialize access count
access_key = f"search_location:access:{query_hash}"
cache.set(access_key, 1, self.CACHE_EXPIRATION)
def _log_usage(self, query: str, cache_hit: bool, api_called: bool,
response_time_ms: int = None, result_count: int = None,
client_ip: str = None, user_id: str = None,
error: str = None, status_code: int = None):
"""Log API usage for monitoring."""
# Log to structured logger for now (can be enhanced to write to DB)
logger.info(
"OpenStreetMap API usage",
extra={
"query": query[:100],
"cache_hit": cache_hit,
"api_called": api_called,
"response_time_ms": response_time_ms,
"result_count": result_count,
"client_ip": client_ip,
"user_id": user_id,
"error": error,
"status_code": status_code,
}
)
def post(self, request):
import time
import re
start_time = time.time()
client_ip = self._get_client_ip(request)
user_id = None
try:
# Safely get user ID
if request.user and request.user.is_authenticated:
user_id = str(getattr(request.user, 'user_id', request.user.id))
except Exception:
pass
try:
# ================================================================
# STEP 0: Sanitize and validate input
# ================================================================
raw_query = request.data.get("query", "")
if not isinstance(raw_query, str):
raw_query = str(raw_query) if raw_query else ""
# Sanitize query: strip, limit length, remove control characters
query = raw_query.strip()[:500]
query = re.sub(r'[\x00-\x1f\x7f-\x9f]', '', query)
# Validate limit
try:
limit = min(int(request.data.get("limit", 5)), 10)
limit = max(limit, 1) # At least 1
except (ValueError, TypeError):
limit = 5
# Sanitize country code (2-letter ISO code)
raw_country = request.data.get("country", "")
country_code = ""
if raw_country and isinstance(raw_country, str):
country_code = re.sub(r'[^a-zA-Z]', '', raw_country)[:2].lower()
# ================================================================
# STEP 1: Validate query (original: min 3 characters)
# ================================================================
if not query:
response_time = int((time.time() - start_time) * 1000)
self._log_usage(
query="",
cache_hit=False,
api_called=False,
response_time_ms=response_time,
client_ip=client_ip,
user_id=user_id,
error="Query is required",
status_code=400
)
return Response(
{"error": "Query is required"},
status=status.HTTP_400_BAD_REQUEST,
)
if len(query) < 3: # Match original: min 3 characters
response_time = int((time.time() - start_time) * 1000)
self._log_usage(
query=query,
cache_hit=False,
api_called=False,
response_time_ms=response_time,
client_ip=client_ip,
user_id=user_id,
error="Query must be at least 3 characters",
status_code=400
)
return Response(
{"error": "Query must be at least 3 characters"},
status=status.HTTP_400_BAD_REQUEST,
)
# ================================================================
# STEP 2: Check rate limit (30 req/min per IP)
# ================================================================
is_allowed, current_count = self._check_rate_limit(client_ip)
if not is_allowed:
response_time = int((time.time() - start_time) * 1000)
self._log_usage(
query=query,
cache_hit=False,
api_called=False,
response_time_ms=response_time,
client_ip=client_ip,
user_id=user_id,
error="Rate limit exceeded",
status_code=429
)
return Response(
{"error": "Rate limit exceeded. Please try again later."},
status=status.HTTP_429_TOO_MANY_REQUESTS,
headers={
"Retry-After": str(self.RATE_LIMIT_PERIOD),
"X-RateLimit-Limit": str(self.RATE_LIMIT_REQUESTS),
"X-RateLimit-Remaining": "0",
}
)
# ================================================================
# STEP 3: Check cache
# ================================================================
query_hash = self._hash_query(query)
cached = self._get_cached_result(query_hash)
if cached:
response_time = int((time.time() - start_time) * 1000)
results = cached.get("results", [])
self._log_usage(
query=query,
cache_hit=True,
api_called=False,
response_time_ms=response_time,
result_count=len(results),
client_ip=client_ip,
user_id=user_id,
status_code=200
)
# Return raw array like original (frontend handles both formats)
response = Response(
results,
status=status.HTTP_200_OK,
)
response["X-Cache"] = "HIT"
response["Cache-Control"] = "public, max-age=3600"
return response
# ================================================================
# STEP 4: Cache miss - call Nominatim API
# ================================================================
import httpx
try:
params = {
"q": query,
"format": "json",
"addressdetails": 1,
"limit": limit,
}
if country_code:
params["countrycodes"] = country_code.lower()
api_response = httpx.get(
"https://nominatim.openstreetmap.org/search",
params=params,
timeout=10.0,
headers={"User-Agent": "ThrillWiki/1.0 (https://thrillwiki.com)"},
)
if api_response.status_code != 200:
logger.warning(
f"Nominatim API error: {api_response.status_code}",
extra={"status": api_response.status_code}
)
return Response(
{"error": "Location search failed", "status": api_response.status_code},
status=api_response.status_code,
)
data = api_response.json()
response_time = int((time.time() - start_time) * 1000)
# ================================================================
# STEP 5: Cache the results (background-like, but sync in Django)
# ================================================================
try:
self._set_cached_result(query, query_hash, data)
except Exception as cache_error:
logger.warning(f"Failed to cache result: {cache_error}")
# Log usage
self._log_usage(
query=query,
cache_hit=False,
api_called=True,
response_time_ms=response_time,
result_count=len(data) if isinstance(data, list) else 0,
client_ip=client_ip,
user_id=user_id,
status_code=200
)
# Return raw array like original Nominatim response
response = Response(
data,
status=status.HTTP_200_OK,
)
response["X-Cache"] = "MISS"
response["Cache-Control"] = "public, max-age=3600"
return response
except httpx.HTTPError as e:
logger.warning(f"Forward geocoding failed: {e}")
response_time = int((time.time() - start_time) * 1000)
self._log_usage(
query=query,
cache_hit=False,
api_called=True,
response_time_ms=response_time,
client_ip=client_ip,
user_id=user_id,
error=str(e),
status_code=500
)
return Response(
{"error": "Failed to fetch location data"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
except ValueError as e:
return Response(
{"error": f"Invalid parameter: {str(e)}"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_and_log(e, "Search location", source="api")
return Response(
{"error": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)

View File

@@ -56,36 +56,26 @@ class CompanyDetailOutputSerializer(serializers.Serializer):
name = serializers.CharField() name = serializers.CharField()
slug = serializers.CharField() slug = serializers.CharField()
roles = serializers.ListField(child=serializers.CharField()) roles = serializers.ListField(child=serializers.CharField())
description = serializers.CharField() description = serializers.CharField(allow_blank=True)
website = serializers.URLField(required=False, allow_blank=True) website = serializers.URLField(required=False, allow_blank=True, allow_null=True)
# Entity type and status (ported from legacy)
person_type = serializers.CharField(required=False, allow_blank=True)
status = serializers.CharField()
# Founding information # Founding information
founded_year = serializers.IntegerField(allow_null=True) founded_date = serializers.DateField(allow_null=True, required=False)
founded_date = serializers.DateField(allow_null=True)
founded_date_precision = serializers.CharField(required=False, allow_blank=True)
# Image URLs # Counts (from model)
logo_url = serializers.URLField(required=False, allow_blank=True) rides_count = serializers.IntegerField(required=False, default=0)
banner_image_url = serializers.URLField(required=False, allow_blank=True) coasters_count = serializers.IntegerField(required=False, default=0)
card_image_url = serializers.URLField(required=False, allow_blank=True)
# Frontend URL
# Rating and review aggregates url = serializers.URLField(required=False, allow_blank=True, allow_null=True)
average_rating = serializers.DecimalField(max_digits=3, decimal_places=2, allow_null=True)
review_count = serializers.IntegerField()
# Counts
parks_count = serializers.IntegerField()
rides_count = serializers.IntegerField()
# Metadata # Metadata
created_at = serializers.DateTimeField() created_at = serializers.DateTimeField()
updated_at = serializers.DateTimeField() updated_at = serializers.DateTimeField()
class CompanyCreateInputSerializer(serializers.Serializer): class CompanyCreateInputSerializer(serializers.Serializer):
"""Input serializer for creating companies.""" """Input serializer for creating companies."""

View File

@@ -106,8 +106,11 @@ urlpatterns = [
path("media/", include("apps.media.urls")), path("media/", include("apps.media.urls")),
path("blog/", include("apps.blog.urls")), path("blog/", include("apps.blog.urls")),
path("support/", include("apps.support.urls")), path("support/", include("apps.support.urls")),
path("notifications/", include("apps.notifications.urls")),
path("errors/", include("apps.core.urls.errors")), path("errors/", include("apps.core.urls.errors")),
path("images/", include("apps.api.v1.images.urls")), path("images/", include("apps.api.v1.images.urls")),
# Admin dashboard API endpoints
path("admin/", include("apps.api.v1.admin.urls")),
# Cloudflare Images Toolkit API endpoints # Cloudflare Images Toolkit API endpoints
path("cloudflare-images/", include("django_cloudflareimages_toolkit.urls")), path("cloudflare-images/", include("django_cloudflareimages_toolkit.urls")),
# Include router URLs (for rankings and any other router-registered endpoints) # Include router URLs (for rankings and any other router-registered endpoints)

View File

@@ -3,3 +3,22 @@ Core tasks package for ThrillWiki.
This package contains all Celery tasks for the core application. This package contains all Celery tasks for the core application.
""" """
from apps.core.tasks.scheduled import (
cleanup_old_versions,
cleanup_orphaned_images,
data_retention_cleanup,
process_closing_entities,
process_expired_bans,
process_scheduled_deletions,
)
__all__ = [
"process_scheduled_deletions",
"process_closing_entities",
"process_expired_bans",
"cleanup_orphaned_images",
"cleanup_old_versions",
"data_retention_cleanup",
]

View File

@@ -0,0 +1,417 @@
"""
Scheduled Celery tasks for ThrillWiki.
These tasks are run on a schedule via Celery Beat for maintenance operations.
"""
import logging
from datetime import timedelta
from celery import shared_task
from django.contrib.auth import get_user_model
from django.db import transaction
from django.utils import timezone
from apps.core.utils import capture_and_log
logger = logging.getLogger(__name__)
User = get_user_model()
@shared_task(name="core.process_scheduled_deletions")
def process_scheduled_deletions() -> dict:
"""
Process scheduled account deletions.
Users who requested account deletion and whose grace period has expired
will have their accounts permanently deleted.
Returns:
dict: Summary with counts of processed, succeeded, and failed deletions
"""
from apps.accounts.models import AccountDeletionRequest
logger.info("Starting scheduled account deletions processing")
cutoff_time = timezone.now()
processed = 0
succeeded = 0
failed = 0
failures = []
try:
# Get deletion requests that are past their scheduled time
pending_deletions = AccountDeletionRequest.objects.filter(
status="pending",
scheduled_deletion_at__lte=cutoff_time,
).select_related("user")
for request in pending_deletions:
processed += 1
try:
with transaction.atomic():
user = request.user
username = user.username
# Mark request as processing
request.status = "processing"
request.save()
# Anonymize user data (keep submissions)
user.username = f"deleted_{user.id}"
user.email = f"deleted_{user.id}@deleted.thrillwiki.com"
user.first_name = ""
user.last_name = ""
user.is_active = False
user.save()
# Mark deletion as complete
request.status = "completed"
request.completed_at = timezone.now()
request.save()
succeeded += 1
logger.info(f"Successfully processed deletion for user {username}")
except Exception as e:
failed += 1
error_msg = f"User {request.user_id}: {str(e)}"
failures.append(error_msg)
capture_and_log(e, f"Process scheduled deletion for user {request.user_id}", source="task")
except Exception as e:
capture_and_log(e, "Process scheduled deletions", source="task")
result = {
"processed": processed,
"succeeded": succeeded,
"failed": failed,
"failures": failures[:10], # Limit failure list
"timestamp": timezone.now().isoformat(),
}
logger.info(
f"Completed scheduled deletions: {processed} processed, {succeeded} succeeded, {failed} failed"
)
return result
@shared_task(name="core.process_closing_entities")
def process_closing_entities() -> dict:
"""
Process parks and rides that have reached their closing date.
Entities in CLOSING status with a closing_date in the past will be
transitioned to their post_closing_status (typically CLOSED or SBNO).
Returns:
dict: Summary with counts
"""
from apps.parks.models import Park
from apps.rides.models import Ride
logger.info("Starting closing entities processing")
today = timezone.now().date()
results = {"parks": {"processed": 0, "succeeded": 0, "failed": 0}, "rides": {"processed": 0, "succeeded": 0, "failed": 0}}
# Get system user for automated transitions
try:
system_user = User.objects.get(username="system")
except User.DoesNotExist:
system_user = User.objects.filter(is_staff=True).first()
# Process parks
try:
closing_parks = Park.objects.filter(
status="CLOSING",
closing_date__lte=today,
)
for park in closing_parks:
results["parks"]["processed"] += 1
try:
with transaction.atomic():
# Transition to closed status
park.status = getattr(park, "post_closing_status", "CLOSED") or "CLOSED"
park.save(update_fields=["status", "updated_at"])
results["parks"]["succeeded"] += 1
logger.info(f"Transitioned park {park.name} to {park.status}")
except Exception as e:
results["parks"]["failed"] += 1
capture_and_log(e, f"Process closing park {park.id}", source="task")
except Exception as e:
capture_and_log(e, "Process closing parks", source="task")
# Process rides (already handled by rides.check_overdue_closings, but included for completeness)
try:
closing_rides = Ride.objects.filter(
status="CLOSING",
closing_date__lte=today,
)
for ride in closing_rides:
results["rides"]["processed"] += 1
try:
with transaction.atomic():
if hasattr(ride, "apply_post_closing_status") and system_user:
ride.apply_post_closing_status(user=system_user)
else:
ride.status = getattr(ride, "post_closing_status", "CLOSED") or "CLOSED"
ride.save(update_fields=["status", "updated_at"])
results["rides"]["succeeded"] += 1
logger.info(f"Transitioned ride {ride.name} to {ride.status}")
except Exception as e:
results["rides"]["failed"] += 1
capture_and_log(e, f"Process closing ride {ride.id}", source="task")
except Exception as e:
capture_and_log(e, "Process closing rides", source="task")
logger.info(f"Completed closing entities: Parks {results['parks']}, Rides {results['rides']}")
return results
@shared_task(name="core.process_expired_bans")
def process_expired_bans() -> dict:
"""
Process expired user bans.
Users with temporary bans that have expired will have their ban lifted.
Returns:
dict: Summary with counts
"""
from apps.accounts.models import UserBan
logger.info("Starting expired bans processing")
now = timezone.now()
processed = 0
succeeded = 0
failed = 0
try:
expired_bans = UserBan.objects.filter(
is_active=True,
expires_at__isnull=False,
expires_at__lte=now,
).select_related("user")
for ban in expired_bans:
processed += 1
try:
with transaction.atomic():
ban.is_active = False
ban.save(update_fields=["is_active", "updated_at"])
# Reactivate user if this was their only active ban
active_bans = UserBan.objects.filter(user=ban.user, is_active=True).count()
if active_bans == 0 and not ban.user.is_active:
ban.user.is_active = True
ban.user.save(update_fields=["is_active"])
succeeded += 1
logger.info(f"Lifted expired ban for user {ban.user.username}")
except Exception as e:
failed += 1
capture_and_log(e, f"Process expired ban {ban.id}", source="task")
except Exception as e:
capture_and_log(e, "Process expired bans", source="task")
# Model may not exist yet
if "UserBan" in str(e):
logger.info("UserBan model not found, skipping expired bans processing")
return {"skipped": True, "reason": "UserBan model not found"}
result = {
"processed": processed,
"succeeded": succeeded,
"failed": failed,
"timestamp": timezone.now().isoformat(),
}
logger.info(f"Completed expired bans: {processed} processed, {succeeded} succeeded, {failed} failed")
return result
@shared_task(name="core.cleanup_orphaned_images")
def cleanup_orphaned_images() -> dict:
"""
Clean up orphaned images.
Images that are not associated with any entity and are older than the
retention period will be deleted.
Returns:
dict: Summary with counts
"""
logger.info("Starting orphaned images cleanup")
# This is a placeholder - actual implementation depends on image storage strategy
# For Cloudflare Images, we would need to:
# 1. Query all images from Cloudflare
# 2. Compare against images referenced in the database
# 3. Delete orphaned images
result = {
"processed": 0,
"deleted": 0,
"skipped": 0,
"timestamp": timezone.now().isoformat(),
"note": "Placeholder implementation - configure based on image storage",
}
logger.info("Completed orphaned images cleanup")
return result
@shared_task(name="core.cleanup_old_versions")
def cleanup_old_versions() -> dict:
"""
Clean up old entity versions from pghistory.
Keeps the most recent N versions and deletes older ones to manage
database size.
Returns:
dict: Summary with counts
"""
logger.info("Starting old versions cleanup")
# Configuration
MAX_VERSIONS_PER_ENTITY = 50
MIN_AGE_DAYS = 90 # Only delete versions older than this
deleted_count = 0
cutoff_date = timezone.now() - timedelta(days=MIN_AGE_DAYS)
try:
# pghistory stores events in pgh_* tables
# We need to identify which models have history tracking
from django.db import connection
with connection.cursor() as cursor:
# Get list of pghistory event tables
cursor.execute(
"""
SELECT table_name
FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name LIKE 'pgh_%event'
"""
)
event_tables = [row[0] for row in cursor.fetchall()]
for table_name in event_tables:
try:
# Delete old versions beyond the retention limit
# This is a simplified approach - a more sophisticated one
# would keep the most recent N per entity
cursor.execute(
f"""
DELETE FROM {table_name}
WHERE pgh_created_at < %s
AND pgh_id NOT IN (
SELECT pgh_id FROM (
SELECT pgh_id,
ROW_NUMBER() OVER (PARTITION BY pgh_obj_id ORDER BY pgh_created_at DESC) as rn
FROM {table_name}
) ranked
WHERE rn <= %s
)
""",
[cutoff_date, MAX_VERSIONS_PER_ENTITY],
)
deleted_in_table = cursor.rowcount
deleted_count += deleted_in_table
if deleted_in_table > 0:
logger.info(f"Deleted {deleted_in_table} old versions from {table_name}")
except Exception as e:
logger.warning(f"Error cleaning up {table_name}: {e}")
except Exception as e:
capture_and_log(e, "Cleanup old versions", source="task")
result = {
"deleted": deleted_count,
"cutoff_date": cutoff_date.isoformat(),
"max_versions_per_entity": MAX_VERSIONS_PER_ENTITY,
"timestamp": timezone.now().isoformat(),
}
logger.info(f"Completed old versions cleanup: {deleted_count} versions deleted")
return result
@shared_task(name="core.data_retention_cleanup")
def data_retention_cleanup() -> dict:
"""
Clean up data per retention policy (GDPR compliance).
Handles:
- Session cleanup
- Expired token cleanup
- Old audit log cleanup
- Temporary data cleanup
Returns:
dict: Summary with counts
"""
logger.info("Starting data retention cleanup")
results = {
"sessions": 0,
"tokens": 0,
"audit_logs": 0,
"temp_data": 0,
}
try:
from django.contrib.sessions.models import Session
# Clean up expired sessions
expired_sessions = Session.objects.filter(expire_date__lt=timezone.now())
results["sessions"] = expired_sessions.count()
expired_sessions.delete()
logger.info(f"Deleted {results['sessions']} expired sessions")
except Exception as e:
logger.warning(f"Session cleanup error: {e}")
try:
from rest_framework_simplejwt.token_blacklist.models import OutstandingToken
# Clean up expired tokens (older than 30 days)
cutoff = timezone.now() - timedelta(days=30)
expired_tokens = OutstandingToken.objects.filter(expires_at__lt=cutoff)
results["tokens"] = expired_tokens.count()
expired_tokens.delete()
logger.info(f"Deleted {results['tokens']} expired tokens")
except Exception as e:
logger.warning(f"Token cleanup error: {e}")
try:
from apps.accounts.models import ProfileAuditLog
# Clean up old audit logs (older than 1 year)
cutoff = timezone.now() - timedelta(days=365)
old_logs = ProfileAuditLog.objects.filter(created_at__lt=cutoff)
results["audit_logs"] = old_logs.count()
old_logs.delete()
logger.info(f"Deleted {results['audit_logs']} old audit logs")
except Exception as e:
logger.warning(f"Audit log cleanup error: {e}")
result = {
**results,
"timestamp": timezone.now().isoformat(),
}
logger.info(f"Completed data retention cleanup: {result}")
return result

View File

@@ -15,6 +15,7 @@ from apps.core.views.views import FSMTransitionView
from .sse import ModerationSSETestView, ModerationSSEView from .sse import ModerationSSETestView, ModerationSSEView
from .views import ( from .views import (
BulkOperationViewSet, BulkOperationViewSet,
ConvertSubmissionToEditView,
EditSubmissionViewSet, EditSubmissionViewSet,
ModerationActionViewSet, ModerationActionViewSet,
ModerationQueueViewSet, ModerationQueueViewSet,
@@ -189,6 +190,8 @@ urlpatterns = [
*sse_patterns, *sse_patterns,
# Include all router URLs (API endpoints) # Include all router URLs (API endpoints)
path("api/", include(router.urls)), path("api/", include(router.urls)),
# Standalone convert-to-edit endpoint (frontend calls /moderation/api/edit-submissions/ POST)
path("api/edit-submissions/", ConvertSubmissionToEditView.as_view(), name="convert-to-edit"),
# FSM transition convenience endpoints # FSM transition convenience endpoints
] + fsm_transition_patterns ] + fsm_transition_patterns

View File

@@ -1516,6 +1516,116 @@ class EditSubmissionViewSet(viewsets.ModelViewSet):
except Exception as e: except Exception as e:
return Response({"error": str(e)}, status=status.HTTP_400_BAD_REQUEST) return Response({"error": str(e)}, status=status.HTTP_400_BAD_REQUEST)
@action(detail=True, methods=["post"], permission_classes=[IsModeratorOrAdmin], url_path="convert-to-edit")
def convert_to_edit(self, request, pk=None):
"""
Convert a pending entity submission to an edit suggestion.
This is used when a new entity submission should be merged with
an existing entity rather than creating a new one.
Request body:
target_entity_type: str - The type of entity to merge into (e.g., 'park', 'ride')
target_entity_id: int - The ID of the existing entity
merge_fields: list[str] - Optional list of fields to merge (defaults to all)
notes: str - Optional moderator notes
Returns:
200: Submission successfully converted
400: Invalid request or conversion not possible
404: Submission or target entity not found
"""
from django.contrib.contenttypes.models import ContentType
submission = self.get_object()
user = request.user
# Validate submission state
if submission.status not in ["PENDING", "CLAIMED"]:
return Response(
{"error": f"Cannot convert submission in {submission.status} state"},
status=status.HTTP_400_BAD_REQUEST,
)
# Get request data
target_entity_type = request.data.get("target_entity_type")
target_entity_id = request.data.get("target_entity_id")
merge_fields = request.data.get("merge_fields", [])
notes = request.data.get("notes", "")
if not target_entity_type or not target_entity_id:
return Response(
{"error": "target_entity_type and target_entity_id are required"},
status=status.HTTP_400_BAD_REQUEST,
)
# Look up the target entity
try:
app_label = "parks" if target_entity_type in ["park"] else "rides"
if target_entity_type == "company":
app_label = "core"
content_type = ContentType.objects.get(app_label=app_label, model=target_entity_type)
model_class = content_type.model_class()
target_entity = model_class.objects.get(pk=target_entity_id)
except (ContentType.DoesNotExist, Exception) as e:
return Response(
{"error": f"Target entity not found: {target_entity_type}#{target_entity_id}"},
status=status.HTTP_404_NOT_FOUND,
)
# Store the conversion metadata
conversion_data = {
"converted_from": "new_entity_submission",
"target_entity_type": target_entity_type,
"target_entity_id": target_entity_id,
"target_entity_name": str(target_entity),
"merge_fields": merge_fields,
"converted_by": user.username,
"converted_at": timezone.now().isoformat(),
"notes": notes,
}
# Update the submission
if hasattr(submission, "changes") and isinstance(submission.changes, dict):
submission.changes["_conversion_metadata"] = conversion_data
else:
# Create changes dict if it doesn't exist
submission.changes = {"_conversion_metadata": conversion_data}
# Add moderator note
if hasattr(submission, "moderator_notes"):
existing_notes = submission.moderator_notes or ""
submission.moderator_notes = (
f"{existing_notes}\n\n[Converted to edit] {notes}".strip()
if notes
else f"{existing_notes}\n\n[Converted to edit for {target_entity_type} #{target_entity_id}]".strip()
)
submission.save()
# Log the conversion
log_business_event(
logger,
event_type="submission_converted_to_edit",
message=f"EditSubmission {submission.id} converted to edit for {target_entity_type}#{target_entity_id}",
context={
"model": "EditSubmission",
"object_id": submission.id,
"target_entity_type": target_entity_type,
"target_entity_id": target_entity_id,
"converted_by": user.username,
},
request=request,
)
return Response({
"success": True,
"message": f"Submission converted to edit for {target_entity_type} #{target_entity_id}",
"submission": self.get_serializer(submission).data,
"conversion_metadata": conversion_data,
})
class PhotoSubmissionViewSet(viewsets.ModelViewSet): class PhotoSubmissionViewSet(viewsets.ModelViewSet):
""" """
@@ -1667,3 +1777,365 @@ class PhotoSubmissionViewSet(viewsets.ModelViewSet):
return Response(self.get_serializer(submission).data) return Response(self.get_serializer(submission).data)
except Exception as e: except Exception as e:
return Response({"error": str(e)}, status=status.HTTP_400_BAD_REQUEST) return Response({"error": str(e)}, status=status.HTTP_400_BAD_REQUEST)
# ============================================================================
# Standalone Convert Submission to Edit View
# ============================================================================
from rest_framework.views import APIView
class ConvertSubmissionToEditView(APIView):
"""
POST /api/moderation/api/convert-to-edit/
Convert a CREATE submission to an EDIT by linking it to an existing entity.
Full parity with Supabase Edge Function: convert-submission-to-edit
This endpoint:
1. Validates the submission is locked by the requesting moderator
2. Validates the submission is in a valid state (PENDING or CLAIMED)
3. Validates the submission_type is 'CREATE' (only CREATE can be converted)
4. Looks up the existing entity
5. Updates the submission_type to 'EDIT' and links to existing entity
6. Logs to audit trail
BULLETPROOFED: Transaction safety, UUID validation, comprehensive error handling.
Request body:
{
"submissionId": "...", # The EditSubmission ID
"itemId": "...", # The submission item ID (optional, for Supabase compat)
"existingEntityId": "...", # The existing entity to link to
"conversionType": "..." # Optional: 'automatic' or 'manual'
}
Returns:
{
"success": true/false,
"itemId": "...",
"submissionId": "...",
"existingEntityId": "...",
"existingEntityName": "...",
"message": "..."
}
"""
permission_classes = [IsModeratorOrAdmin]
# Validation constants
MAX_NOTE_LENGTH = 5000
ALLOWED_CONVERSION_TYPES = {"automatic", "manual", "duplicate_detected"}
VALID_STATES = {"PENDING", "CLAIMED", "pending", "partially_approved", "claimed"}
def post(self, request):
from django.db import transaction
from django.contrib.contenttypes.models import ContentType
import uuid
try:
# ================================================================
# STEP 0: Validate user is authenticated
# ================================================================
user = request.user
if not user or not user.is_authenticated:
return Response(
{"success": False, "message": "Authentication required"},
status=status.HTTP_401_UNAUTHORIZED,
)
# ================================================================
# STEP 1: Extract and validate request parameters
# ================================================================
submission_id = request.data.get("submissionId")
item_id = request.data.get("itemId") # For Supabase compatibility
existing_entity_id = request.data.get("existingEntityId")
conversion_type = request.data.get("conversionType", "automatic")
# Validate required parameters
if not submission_id:
return Response(
{"success": False, "message": "submissionId is required"},
status=status.HTTP_400_BAD_REQUEST,
)
if not existing_entity_id:
return Response(
{"success": False, "message": "existingEntityId is required"},
status=status.HTTP_400_BAD_REQUEST,
)
# Validate UUID formats
try:
if isinstance(submission_id, str):
submission_uuid = uuid.UUID(submission_id)
else:
submission_uuid = submission_id
except (ValueError, AttributeError):
return Response(
{"success": False, "message": "Invalid submissionId format"},
status=status.HTTP_400_BAD_REQUEST,
)
try:
if isinstance(existing_entity_id, str):
entity_uuid = uuid.UUID(existing_entity_id)
else:
entity_uuid = existing_entity_id
except (ValueError, AttributeError):
return Response(
{"success": False, "message": "Invalid existingEntityId format"},
status=status.HTTP_400_BAD_REQUEST,
)
# Sanitize conversion_type
if not isinstance(conversion_type, str):
conversion_type = "automatic"
conversion_type = conversion_type.strip().lower()[:50]
if conversion_type not in self.ALLOWED_CONVERSION_TYPES:
conversion_type = "automatic"
# ================================================================
# STEP 2: Get the submission with select_for_update
# ================================================================
try:
with transaction.atomic():
submission = EditSubmission.objects.select_for_update().get(pk=submission_uuid)
except EditSubmission.DoesNotExist:
return Response(
{"success": False, "message": "Submission not found"},
status=status.HTTP_404_NOT_FOUND,
)
except Exception as e:
logger.warning(f"Failed to fetch submission {submission_id}: {e}")
return Response(
{"success": False, "message": "Failed to fetch submission"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
# ================================================================
# STEP 3: Verify submission is locked by requesting moderator
# ================================================================
claimed_by_id = getattr(submission, 'claimed_by_id', None)
user_id = getattr(user, 'id', None)
if claimed_by_id != user_id:
# Additional check: allow admins to override
if not getattr(user, 'is_staff', False) and not getattr(user, 'is_superuser', False):
return Response(
{"success": False, "message": "You must claim this submission before converting it"},
status=status.HTTP_400_BAD_REQUEST,
)
logger.info(
f"Admin override: {user.username} converting submission claimed by user {claimed_by_id}",
extra={"submission_id": str(submission_uuid), "admin_user": user.username}
)
# ================================================================
# STEP 4: Validate submission state
# ================================================================
current_status = getattr(submission, 'status', 'unknown')
if current_status not in self.VALID_STATES:
return Response(
{"success": False, "message": f"Submission must be pending or claimed to convert (current: {current_status})"},
status=status.HTTP_400_BAD_REQUEST,
)
# ================================================================
# STEP 5: Validate submission_type is CREATE
# ================================================================
current_type = getattr(submission, 'submission_type', '')
if current_type != "CREATE":
return Response(
{
"success": False,
"message": f"Item is already set to '{current_type}', cannot convert"
},
status=status.HTTP_400_BAD_REQUEST,
)
# ================================================================
# STEP 6: Determine entity type from submission's content_type
# ================================================================
target_entity_type = None
target_entity_name = None
target_entity_slug = None
target_entity = None
if submission.content_type:
target_entity_type = submission.content_type.model
# Also try to get from changes if available
if not target_entity_type and isinstance(submission.changes, dict):
target_entity_type = submission.changes.get("entity_type")
# ================================================================
# STEP 7: Look up the existing entity
# ================================================================
app_label_map = {
"park": "parks",
"ride": "rides",
"company": "core",
"ridemodel": "rides",
"manufacturer": "core",
"operator": "core",
}
if target_entity_type:
try:
app_label = app_label_map.get(target_entity_type.lower(), "core")
content_type = ContentType.objects.get(app_label=app_label, model=target_entity_type.lower())
model_class = content_type.model_class()
if model_class is None:
raise ValueError(f"No model class for {target_entity_type}")
target_entity = model_class.objects.filter(pk=entity_uuid).first()
if not target_entity:
return Response(
{"success": False, "message": f"Existing {target_entity_type} not found with ID {existing_entity_id}"},
status=status.HTTP_404_NOT_FOUND,
)
target_entity_name = str(getattr(target_entity, 'name', target_entity))[:200]
target_entity_slug = getattr(target_entity, 'slug', None)
except ContentType.DoesNotExist:
return Response(
{"success": False, "message": f"Unknown entity type: {target_entity_type}"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
logger.warning(f"Failed to look up entity {target_entity_type}/{existing_entity_id}: {e}")
return Response(
{"success": False, "message": "Existing entity not found"},
status=status.HTTP_404_NOT_FOUND,
)
else:
# Try to find entity across common models
for model_name, app_label in [("park", "parks"), ("ride", "rides"), ("company", "core")]:
try:
content_type = ContentType.objects.get(app_label=app_label, model=model_name)
model_class = content_type.model_class()
if model_class is None:
continue
target_entity = model_class.objects.filter(pk=entity_uuid).first()
if target_entity:
target_entity_type = model_name
target_entity_name = str(getattr(target_entity, 'name', target_entity))[:200]
target_entity_slug = getattr(target_entity, 'slug', None)
break
except Exception:
continue
if not target_entity:
return Response(
{"success": False, "message": "Existing entity not found in any known model"},
status=status.HTTP_404_NOT_FOUND,
)
# ================================================================
# STEP 8: Update submission with atomic transaction
# ================================================================
try:
with transaction.atomic():
# Re-fetch with lock to ensure no concurrent modifications
submission = EditSubmission.objects.select_for_update().get(pk=submission_uuid)
# Double-check state hasn't changed
if submission.submission_type != "CREATE":
return Response(
{"success": False, "message": "Submission was already converted"},
status=status.HTTP_409_CONFLICT,
)
# Update submission_type
submission.submission_type = "EDIT"
# Link to existing entity via object_id
submission.object_id = entity_uuid
# Store conversion metadata in changes
if not isinstance(submission.changes, dict):
submission.changes = {}
submission.changes["_conversion_metadata"] = {
"converted_from": "new_entity_submission",
"original_action_type": "create",
"target_entity_type": target_entity_type,
"target_entity_id": str(entity_uuid),
"target_entity_name": target_entity_name,
"target_entity_slug": target_entity_slug,
"conversion_type": conversion_type,
"converted_by": user.username,
"converted_by_id": str(getattr(user, 'user_id', user.id)),
"converted_at": timezone.now().isoformat(),
}
# Add moderator note (with length limit)
existing_notes = (submission.notes or "")[:self.MAX_NOTE_LENGTH]
conversion_note = f"[Converted CREATE to EDIT] for {target_entity_type}: {target_entity_name}"
if target_entity_slug:
conversion_note += f" ({target_entity_slug})"
conversion_note += f". Conversion type: {conversion_type}"
new_notes = f"{existing_notes}\n\n{conversion_note}".strip()
submission.notes = new_notes[:self.MAX_NOTE_LENGTH]
submission.save(update_fields=["submission_type", "object_id", "changes", "notes"])
except Exception as e:
logger.error(f"Failed to update submission {submission_uuid}: {e}")
return Response(
{"success": False, "message": "Failed to update submission"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
# ================================================================
# STEP 9: Log to audit trail (outside transaction for reliability)
# ================================================================
try:
log_business_event(
logger,
event_type="submission_converted_to_edit",
message=f"EditSubmission {submission.id} converted from CREATE to EDIT for {target_entity_type}#{entity_uuid}",
context={
"model": "EditSubmission",
"object_id": str(submission.id),
"item_id": str(item_id) if item_id else None,
"target_entity_type": target_entity_type,
"target_entity_id": str(entity_uuid),
"target_entity_name": target_entity_name,
"converted_by": user.username,
"conversion_type": conversion_type,
},
request=request,
)
except Exception as log_error:
# Don't fail the request if logging fails
logger.warning(f"Failed to log conversion event: {log_error}")
# ================================================================
# STEP 10: Return success response matching original format
# ================================================================
return Response({
"success": True,
"itemId": str(item_id) if item_id else str(submission.id),
"submissionId": str(submission.id),
"existingEntityId": str(entity_uuid),
"existingEntityName": target_entity_name,
"message": f"Converted submission item to EDIT for existing {target_entity_type}: {target_entity_name}",
})
except Exception as e:
capture_and_log(e, "Convert submission to edit", source="moderation", request=request)
return Response(
{"success": False, "message": "Internal server error"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)

View File

@@ -0,0 +1,10 @@
"""
Notifications app for ThrillWiki.
Provides notification management including:
- Subscriber management (Novu integration)
- Notification preferences
- Notification triggering and logging
"""
default_app_config = "apps.notifications.apps.NotificationsConfig"

View File

@@ -0,0 +1,38 @@
"""
Notifications admin configuration.
"""
from django.contrib import admin
from .models import NotificationLog, NotificationPreference, Subscriber, SystemAnnouncement
@admin.register(Subscriber)
class SubscriberAdmin(admin.ModelAdmin):
list_display = ["user", "novu_subscriber_id", "email", "created_at"]
search_fields = ["user__username", "novu_subscriber_id", "email"]
readonly_fields = ["created_at", "updated_at"]
@admin.register(NotificationPreference)
class NotificationPreferenceAdmin(admin.ModelAdmin):
list_display = ["user", "is_opted_out", "updated_at"]
list_filter = ["is_opted_out"]
search_fields = ["user__username"]
readonly_fields = ["created_at", "updated_at"]
@admin.register(NotificationLog)
class NotificationLogAdmin(admin.ModelAdmin):
list_display = ["workflow_id", "user", "channel", "status", "created_at"]
list_filter = ["status", "channel", "workflow_id"]
search_fields = ["user__username", "workflow_id", "novu_transaction_id"]
readonly_fields = ["created_at", "updated_at"]
@admin.register(SystemAnnouncement)
class SystemAnnouncementAdmin(admin.ModelAdmin):
list_display = ["title", "severity", "is_active", "created_by", "created_at"]
list_filter = ["severity", "is_active"]
search_fields = ["title", "message"]
readonly_fields = ["created_at"]

View File

@@ -0,0 +1,18 @@
"""
Notifications app configuration.
This app provides Django-native notification functionality for ThrillWiki,
including in-app notifications, email notifications, and user preferences.
"""
from django.apps import AppConfig
class NotificationsConfig(AppConfig):
"""Configuration for the ThrillWiki notifications app."""
default_auto_field = "django.db.models.BigAutoField"
name = "apps.notifications"
verbose_name = "Notifications"

View File

@@ -0,0 +1,159 @@
# Generated by Django 5.2.9 on 2026-01-05 13:50
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name="NotificationPreference",
fields=[
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
(
"channel_preferences",
models.JSONField(
blank=True, default=dict, help_text="Preferences per channel (email, push, in_app, sms)"
),
),
(
"workflow_preferences",
models.JSONField(blank=True, default=dict, help_text="Preferences per notification workflow"),
),
(
"frequency_settings",
models.JSONField(blank=True, default=dict, help_text="Digest and frequency settings"),
),
("is_opted_out", models.BooleanField(default=False)),
("created_at", models.DateTimeField(auto_now_add=True)),
("updated_at", models.DateTimeField(auto_now=True)),
(
"user",
models.OneToOneField(
on_delete=django.db.models.deletion.CASCADE,
related_name="novu_notification_prefs",
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"verbose_name": "Notification Preference",
"verbose_name_plural": "Notification Preferences",
},
),
migrations.CreateModel(
name="Subscriber",
fields=[
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
("novu_subscriber_id", models.CharField(db_index=True, max_length=255, unique=True)),
("first_name", models.CharField(blank=True, max_length=100)),
("last_name", models.CharField(blank=True, max_length=100)),
("email", models.EmailField(blank=True, max_length=254)),
("phone", models.CharField(blank=True, max_length=20)),
("avatar", models.URLField(blank=True)),
("locale", models.CharField(default="en", max_length=10)),
("data", models.JSONField(blank=True, default=dict, help_text="Custom subscriber data")),
("created_at", models.DateTimeField(auto_now_add=True)),
("updated_at", models.DateTimeField(auto_now=True)),
(
"user",
models.OneToOneField(
on_delete=django.db.models.deletion.CASCADE,
related_name="notification_subscriber",
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"verbose_name": "Notification Subscriber",
"verbose_name_plural": "Notification Subscribers",
},
),
migrations.CreateModel(
name="SystemAnnouncement",
fields=[
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
("title", models.CharField(max_length=255)),
("message", models.TextField()),
(
"severity",
models.CharField(
choices=[("info", "Information"), ("warning", "Warning"), ("critical", "Critical")],
default="info",
max_length=20,
),
),
("action_url", models.URLField(blank=True)),
("is_active", models.BooleanField(default=True)),
("created_at", models.DateTimeField(auto_now_add=True)),
("expires_at", models.DateTimeField(blank=True, null=True)),
(
"created_by",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="announcements_created",
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"verbose_name": "System Announcement",
"verbose_name_plural": "System Announcements",
"ordering": ["-created_at"],
},
),
migrations.CreateModel(
name="NotificationLog",
fields=[
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
("workflow_id", models.CharField(db_index=True, max_length=100)),
("notification_type", models.CharField(max_length=50)),
("channel", models.CharField(max_length=20)),
(
"status",
models.CharField(
choices=[
("pending", "Pending"),
("sent", "Sent"),
("delivered", "Delivered"),
("failed", "Failed"),
],
default="pending",
max_length=20,
),
),
("payload", models.JSONField(blank=True, default=dict)),
("error_message", models.TextField(blank=True)),
("novu_transaction_id", models.CharField(blank=True, db_index=True, max_length=255)),
("created_at", models.DateTimeField(auto_now_add=True)),
("updated_at", models.DateTimeField(auto_now=True)),
(
"user",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="notification_logs",
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"verbose_name": "Notification Log",
"verbose_name_plural": "Notification Logs",
"ordering": ["-created_at"],
"indexes": [
models.Index(fields=["user", "-created_at"], name="notificatio_user_id_57d53d_idx"),
models.Index(fields=["workflow_id", "-created_at"], name="notificatio_workflo_e1a025_idx"),
],
},
),
]

View File

@@ -0,0 +1,93 @@
# Generated by Django 5.2.9 on 2026-01-05 14:36
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("contenttypes", "0002_remove_content_type_name"),
("notifications", "0001_initial"),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.AlterField(
model_name="subscriber",
name="novu_subscriber_id",
field=models.CharField(
db_index=True, help_text="Legacy Novu subscriber ID (deprecated)", max_length=255, unique=True
),
),
migrations.CreateModel(
name="Notification",
fields=[
("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
("verb", models.CharField(max_length=255)),
("description", models.TextField(blank=True)),
(
"level",
models.CharField(
choices=[("info", "Info"), ("success", "Success"), ("warning", "Warning"), ("error", "Error")],
default="info",
max_length=20,
),
),
("action_object_id", models.PositiveIntegerField(blank=True, null=True)),
("target_id", models.PositiveIntegerField(blank=True, null=True)),
("data", models.JSONField(blank=True, default=dict)),
("unread", models.BooleanField(db_index=True, default=True)),
("timestamp", models.DateTimeField(auto_now_add=True)),
("read_at", models.DateTimeField(blank=True, null=True)),
(
"action_object_content_type",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="notification_action_objects",
to="contenttypes.contenttype",
),
),
(
"actor",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="notifications_sent",
to=settings.AUTH_USER_MODEL,
),
),
(
"recipient",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="in_app_notifications",
to=settings.AUTH_USER_MODEL,
),
),
(
"target_content_type",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="notification_targets",
to="contenttypes.contenttype",
),
),
],
options={
"verbose_name": "Notification",
"verbose_name_plural": "Notifications",
"ordering": ["-timestamp"],
"indexes": [
models.Index(fields=["recipient", "-timestamp"], name="notificatio_recipie_b8fa2a_idx"),
models.Index(fields=["recipient", "unread"], name="notificatio_recipie_8bedf2_idx"),
],
},
),
]

View File

@@ -0,0 +1,298 @@
"""
Notifications models.
Provides models for:
- Subscriber: User notification profile (legacy, kept for compatibility)
- NotificationPreference: User notification preferences
- NotificationLog: Audit trail of sent notifications
- SystemAnnouncement: System-wide announcements
Note: Now using django-notifications-hq for the core notification system.
Subscriber model is kept for backward compatibility but is optional.
"""
from django.conf import settings
from django.db import models
class Subscriber(models.Model):
"""
User notification profile.
Note: This model is kept for backward compatibility. The new
django-notifications-hq system uses User directly for notifications.
This can be used for storing additional notification-related user data.
"""
user = models.OneToOneField(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
related_name="notification_subscriber",
)
# Legacy field - kept for migration compatibility
novu_subscriber_id = models.CharField(
max_length=255,
unique=True,
db_index=True,
help_text="Legacy Novu subscriber ID (deprecated)"
)
first_name = models.CharField(max_length=100, blank=True)
last_name = models.CharField(max_length=100, blank=True)
email = models.EmailField(blank=True)
phone = models.CharField(max_length=20, blank=True)
avatar = models.URLField(blank=True)
locale = models.CharField(max_length=10, default="en")
data = models.JSONField(default=dict, blank=True, help_text="Custom subscriber data")
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
verbose_name = "Notification Subscriber"
verbose_name_plural = "Notification Subscribers"
def __str__(self):
return f"Subscriber({self.user.username})"
class NotificationPreference(models.Model):
"""
User notification preferences across channels and workflows.
"""
user = models.OneToOneField(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
related_name="novu_notification_prefs", # Renamed to avoid conflict with User.notification_preferences JSONField
)
# Channel preferences
channel_preferences = models.JSONField(
default=dict,
blank=True,
help_text="Preferences per channel (email, push, in_app, sms)",
)
# Workflow-specific preferences
workflow_preferences = models.JSONField(
default=dict,
blank=True,
help_text="Preferences per notification workflow",
)
# Frequency settings
frequency_settings = models.JSONField(
default=dict,
blank=True,
help_text="Digest and frequency settings",
)
# Global opt-out
is_opted_out = models.BooleanField(default=False)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
verbose_name = "Notification Preference"
verbose_name_plural = "Notification Preferences"
def __str__(self):
return f"Preferences({self.user.username})"
class NotificationLog(models.Model):
"""
Audit log of sent notifications.
"""
class Status(models.TextChoices):
PENDING = "pending", "Pending"
SENT = "sent", "Sent"
DELIVERED = "delivered", "Delivered"
FAILED = "failed", "Failed"
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.SET_NULL,
null=True,
related_name="notification_logs",
)
workflow_id = models.CharField(max_length=100, db_index=True)
notification_type = models.CharField(max_length=50)
channel = models.CharField(max_length=20) # email, push, in_app, sms
status = models.CharField(
max_length=20,
choices=Status.choices,
default=Status.PENDING,
)
payload = models.JSONField(default=dict, blank=True)
error_message = models.TextField(blank=True)
novu_transaction_id = models.CharField(max_length=255, blank=True, db_index=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
verbose_name = "Notification Log"
verbose_name_plural = "Notification Logs"
ordering = ["-created_at"]
indexes = [
models.Index(fields=["user", "-created_at"]),
models.Index(fields=["workflow_id", "-created_at"]),
]
def __str__(self):
return f"Log({self.workflow_id}, {self.status})"
class SystemAnnouncement(models.Model):
"""
System-wide announcements.
"""
class Severity(models.TextChoices):
INFO = "info", "Information"
WARNING = "warning", "Warning"
CRITICAL = "critical", "Critical"
title = models.CharField(max_length=255)
message = models.TextField()
severity = models.CharField(
max_length=20,
choices=Severity.choices,
default=Severity.INFO,
)
action_url = models.URLField(blank=True)
is_active = models.BooleanField(default=True)
created_by = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.SET_NULL,
null=True,
related_name="announcements_created",
)
created_at = models.DateTimeField(auto_now_add=True)
expires_at = models.DateTimeField(null=True, blank=True)
class Meta:
verbose_name = "System Announcement"
verbose_name_plural = "System Announcements"
ordering = ["-created_at"]
def __str__(self):
return f"{self.title} ({self.severity})"
class Notification(models.Model):
"""
In-app notification model.
This is a Django-native implementation for storing user notifications,
supporting both in-app and email notification channels.
"""
class Level(models.TextChoices):
INFO = "info", "Info"
SUCCESS = "success", "Success"
WARNING = "warning", "Warning"
ERROR = "error", "Error"
# Who receives the notification
recipient = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
related_name="in_app_notifications", # Renamed to avoid clash with accounts.UserNotification
)
# Who triggered the notification (can be null for system notifications)
actor = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name="notifications_sent",
)
# What happened
verb = models.CharField(max_length=255)
description = models.TextField(blank=True)
level = models.CharField(
max_length=20,
choices=Level.choices,
default=Level.INFO,
)
# The object that was acted upon (generic foreign key)
action_object_content_type = models.ForeignKey(
"contenttypes.ContentType",
on_delete=models.CASCADE,
blank=True,
null=True,
related_name="notification_action_objects",
)
action_object_id = models.PositiveIntegerField(blank=True, null=True)
# The target of the action (generic foreign key)
target_content_type = models.ForeignKey(
"contenttypes.ContentType",
on_delete=models.CASCADE,
blank=True,
null=True,
related_name="notification_targets",
)
target_id = models.PositiveIntegerField(blank=True, null=True)
# Additional data
data = models.JSONField(default=dict, blank=True)
# Status
unread = models.BooleanField(default=True, db_index=True)
# Timestamps
timestamp = models.DateTimeField(auto_now_add=True)
read_at = models.DateTimeField(null=True, blank=True)
class Meta:
verbose_name = "Notification"
verbose_name_plural = "Notifications"
ordering = ["-timestamp"]
indexes = [
models.Index(fields=["recipient", "-timestamp"]),
models.Index(fields=["recipient", "unread"]),
]
def __str__(self):
return f"{self.verb} -> {self.recipient}"
def mark_as_read(self):
"""Mark this notification as read."""
if self.unread:
from django.utils import timezone
self.unread = False
self.read_at = timezone.now()
self.save(update_fields=["unread", "read_at"])
@property
def action_object(self):
"""Get the action object instance."""
if self.action_object_content_type and self.action_object_id:
return self.action_object_content_type.get_object_for_this_type(
pk=self.action_object_id
)
return None
@property
def target(self):
"""Get the target instance."""
if self.target_content_type and self.target_id:
return self.target_content_type.get_object_for_this_type(pk=self.target_id)
return None
class NotificationManager(models.Manager):
"""Custom manager for Notification model."""
def unread(self):
"""Return only unread notifications."""
return self.filter(unread=True)
def read(self):
"""Return only read notifications."""
return self.filter(unread=False)
def mark_all_as_read(self):
"""Mark all notifications as read."""
from django.utils import timezone
return self.filter(unread=True).update(unread=False, read_at=timezone.now())
# Add custom manager to Notification model
Notification.objects = NotificationManager()
Notification.objects.model = Notification

View File

@@ -0,0 +1,156 @@
"""
Notification serializers.
"""
from rest_framework import serializers
from .models import NotificationLog, NotificationPreference, Subscriber, SystemAnnouncement
class SubscriberSerializer(serializers.ModelSerializer):
"""Serializer for Subscriber model."""
subscriber_id = serializers.CharField(source="novu_subscriber_id", read_only=True)
class Meta:
model = Subscriber
fields = [
"subscriber_id",
"first_name",
"last_name",
"email",
"phone",
"avatar",
"locale",
"data",
"created_at",
"updated_at",
]
read_only_fields = ["subscriber_id", "created_at", "updated_at"]
class CreateSubscriberSerializer(serializers.Serializer):
"""Serializer for creating a new subscriber."""
subscriber_id = serializers.CharField(required=True)
first_name = serializers.CharField(required=False, allow_blank=True, default="")
last_name = serializers.CharField(required=False, allow_blank=True, default="")
email = serializers.EmailField(required=False, allow_blank=True)
phone = serializers.CharField(required=False, allow_blank=True, default="")
avatar = serializers.URLField(required=False, allow_blank=True)
locale = serializers.CharField(required=False, default="en")
data = serializers.JSONField(required=False, default=dict)
class UpdateSubscriberSerializer(serializers.Serializer):
"""Serializer for updating a subscriber."""
subscriber_id = serializers.CharField(required=True)
first_name = serializers.CharField(required=False, allow_blank=True)
last_name = serializers.CharField(required=False, allow_blank=True)
email = serializers.EmailField(required=False, allow_blank=True)
phone = serializers.CharField(required=False, allow_blank=True)
avatar = serializers.URLField(required=False, allow_blank=True)
locale = serializers.CharField(required=False)
data = serializers.JSONField(required=False)
class NotificationPreferenceSerializer(serializers.ModelSerializer):
"""Serializer for NotificationPreference model."""
class Meta:
model = NotificationPreference
fields = [
"channel_preferences",
"workflow_preferences",
"frequency_settings",
"is_opted_out",
"updated_at",
]
read_only_fields = ["updated_at"]
class UpdatePreferencesSerializer(serializers.Serializer):
"""Serializer for updating notification preferences."""
user_id = serializers.CharField(required=True)
preferences = serializers.JSONField(required=True)
class TriggerNotificationSerializer(serializers.Serializer):
"""Serializer for triggering a notification."""
workflow_id = serializers.CharField(required=True)
subscriber_id = serializers.CharField(required=True)
payload = serializers.JSONField(required=False, default=dict)
overrides = serializers.JSONField(required=False, default=dict)
class ModeratorSubmissionNotificationSerializer(serializers.Serializer):
"""Serializer for moderator submission notifications."""
submission_id = serializers.CharField(required=True)
submission_type = serializers.CharField(required=True)
submitter_name = serializers.CharField(required=True)
action = serializers.CharField(required=True)
class ModeratorReportNotificationSerializer(serializers.Serializer):
"""Serializer for moderator report notifications."""
report_id = serializers.CharField(required=True)
report_type = serializers.CharField(required=True)
reported_entity_type = serializers.CharField(required=True)
reported_entity_id = serializers.CharField(required=True)
reporter_name = serializers.CharField(required=True)
reason = serializers.CharField(required=True)
entity_preview = serializers.CharField(required=False, allow_blank=True)
reported_at = serializers.DateTimeField(required=False)
class SystemAnnouncementSerializer(serializers.ModelSerializer):
"""Serializer for system announcements."""
class Meta:
model = SystemAnnouncement
fields = [
"id",
"title",
"message",
"severity",
"action_url",
"is_active",
"created_at",
"expires_at",
]
read_only_fields = ["id", "created_at"]
class CreateAnnouncementSerializer(serializers.Serializer):
"""Serializer for creating system announcements."""
title = serializers.CharField(required=True, max_length=255)
message = serializers.CharField(required=True)
severity = serializers.ChoiceField(
choices=["info", "warning", "critical"],
default="info",
)
action_url = serializers.URLField(required=False, allow_blank=True)
class NotificationLogSerializer(serializers.ModelSerializer):
"""Serializer for notification logs."""
class Meta:
model = NotificationLog
fields = [
"id",
"workflow_id",
"notification_type",
"channel",
"status",
"payload",
"error_message",
"created_at",
]
read_only_fields = ["id", "created_at"]

View File

@@ -0,0 +1,571 @@
"""
Django-native notification service.
This service provides a fully Django-native notification system. Supports:
- In-app notifications
- Email notifications (via Django email backend)
- Real-time notifications (ready for Django Channels integration)
"""
import logging
from typing import Any
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.contenttypes.models import ContentType
from django.core.mail import send_mail
from django.db.models import QuerySet
from django.template.loader import render_to_string
from django.utils import timezone
from django.utils.html import strip_tags
from .models import Notification, NotificationLog, NotificationPreference, SystemAnnouncement
logger = logging.getLogger(__name__)
User = get_user_model()
class NotificationService:
"""
Django-native notification service using django-notifications-hq.
This replaces the Novu-based service with a fully Django-native approach.
"""
# Notification workflow types
WORKFLOW_SUBMISSION_STATUS = "submission_status"
WORKFLOW_MODERATION_ALERT = "moderation_alert"
WORKFLOW_SYSTEM_ANNOUNCEMENT = "system_announcement"
WORKFLOW_ADMIN_ALERT = "admin_alert"
WORKFLOW_WELCOME = "welcome"
WORKFLOW_COMMENT_REPLY = "comment_reply"
WORKFLOW_MENTION = "mention"
WORKFLOW_FOLLOW = "follow"
def __init__(self):
self.from_email = getattr(
settings, "DEFAULT_FROM_EMAIL", "noreply@thrillwiki.com"
)
self.site_name = getattr(settings, "SITE_NAME", "ThrillWiki")
self.site_url = getattr(settings, "SITE_URL", "https://thrillwiki.com")
def send_notification(
self,
recipient: User,
actor: User | None,
verb: str,
action_object: Any = None,
target: Any = None,
description: str = "",
level: str = "info",
data: dict | None = None,
send_email: bool = True,
email_template: str | None = None,
) -> bool:
"""
Send a notification to a user.
Args:
recipient: The user to notify
actor: The user who performed the action (can be None for system notifications)
verb: Description of the action (e.g., "approved your submission")
action_object: The object that was acted upon
target: The target of the action
description: Additional description text
level: Notification level (info, success, warning, error)
data: Additional data to store with the notification
send_email: Whether to also send an email notification
email_template: Template path for email (optional)
Returns:
True if notification was sent successfully
"""
try:
# Check user preferences
if self._is_user_opted_out(recipient):
logger.debug(f"User {recipient.id} opted out of notifications")
return False
# Create in-app notification using our native model
notification_data = {
"recipient": recipient,
"actor": actor,
"verb": verb,
"description": description,
"level": level,
"data": data or {},
}
# Add generic foreign key for action_object if provided
if action_object:
notification_data["action_object_content_type"] = ContentType.objects.get_for_model(action_object)
notification_data["action_object_id"] = action_object.pk
# Add generic foreign key for target if provided
if target:
notification_data["target_content_type"] = ContentType.objects.get_for_model(target)
notification_data["target_id"] = target.pk
Notification.objects.create(**notification_data)
# Log the notification
self._log_notification(
user=recipient,
workflow_id=data.get("workflow_id", "general") if data else "general",
notification_type=level,
channel="in_app",
status=NotificationLog.Status.SENT,
payload=data or {},
)
# Optionally send email
if send_email and self._should_send_email(recipient, data):
self._send_email_notification(
recipient=recipient,
verb=verb,
actor=actor,
action_object=action_object,
target=target,
description=description,
template=email_template,
data=data,
)
return True
except Exception as e:
logger.exception(f"Failed to send notification to {recipient.id}: {e}")
self._log_notification(
user=recipient,
workflow_id=data.get("workflow_id", "general") if data else "general",
notification_type=level,
channel="in_app",
status=NotificationLog.Status.FAILED,
payload=data or {},
error_message=str(e),
)
return False
def send_to_group(
self,
recipients: QuerySet | list,
actor: User | None,
verb: str,
action_object: Any = None,
target: Any = None,
description: str = "",
level: str = "info",
data: dict | None = None,
send_email: bool = False,
) -> dict:
"""
Send a notification to multiple users.
Returns:
Dict with success/failure counts
"""
results = {"success": 0, "failed": 0, "skipped": 0}
for recipient in recipients:
if self._is_user_opted_out(recipient):
results["skipped"] += 1
continue
success = self.send_notification(
recipient=recipient,
actor=actor,
verb=verb,
action_object=action_object,
target=target,
description=description,
level=level,
data=data,
send_email=send_email,
)
if success:
results["success"] += 1
else:
results["failed"] += 1
return results
def notify_moderators(
self,
verb: str,
action_object: Any = None,
description: str = "",
data: dict | None = None,
) -> dict:
"""
Send a notification to all moderators.
"""
from django.contrib.auth import get_user_model
User = get_user_model()
# Get users with moderator permissions
moderators = User.objects.filter(
is_active=True,
is_staff=True, # Or use a specific permission check
).exclude(
novu_notification_prefs__is_opted_out=True
)
return self.send_to_group(
recipients=moderators,
actor=None,
verb=verb,
action_object=action_object,
description=description,
level="info",
data={**(data or {}), "workflow_id": self.WORKFLOW_MODERATION_ALERT},
send_email=True,
)
def notify_admins(
self,
verb: str,
description: str = "",
level: str = "warning",
data: dict | None = None,
) -> dict:
"""
Send a notification to all admins.
"""
admins = User.objects.filter(is_superuser=True, is_active=True)
return self.send_to_group(
recipients=admins,
actor=None,
verb=verb,
description=description,
level=level,
data={**(data or {}), "workflow_id": self.WORKFLOW_ADMIN_ALERT},
send_email=True,
)
def send_system_announcement(
self,
title: str,
message: str,
severity: str = "info",
action_url: str = "",
target_users: QuerySet | None = None,
created_by: User | None = None,
) -> SystemAnnouncement:
"""
Create and broadcast a system announcement.
"""
# Create the announcement
announcement = SystemAnnouncement.objects.create(
title=title,
message=message,
severity=severity,
action_url=action_url,
created_by=created_by,
is_active=True,
)
# Notify users
recipients = target_users or User.objects.filter(is_active=True)
self.send_to_group(
recipients=recipients,
actor=created_by,
verb=f"System announcement: {title}",
action_object=announcement,
description=message,
level=severity,
data={
"workflow_id": self.WORKFLOW_SYSTEM_ANNOUNCEMENT,
"announcement_id": str(announcement.id),
"action_url": action_url,
},
send_email=severity in ["warning", "critical"],
)
return announcement
def get_user_notifications(
self,
user: User,
unread_only: bool = False,
limit: int = 50,
):
"""
Get notifications for a user.
"""
qs = Notification.objects.filter(recipient=user)
if unread_only:
qs = qs.unread()
return qs[:limit]
def mark_as_read(self, user: User, notification_id: int | None = None):
"""
Mark notification(s) as read.
"""
if notification_id:
try:
notification = Notification.objects.get(recipient=user, id=notification_id)
notification.mark_as_read()
except Notification.DoesNotExist:
pass
else:
# Mark all as read
Notification.objects.filter(recipient=user).mark_all_as_read()
def get_unread_count(self, user: User) -> int:
"""
Get count of unread notifications.
"""
return Notification.objects.filter(recipient=user, unread=True).count()
def _is_user_opted_out(self, user: User) -> bool:
"""Check if user has opted out of notifications."""
try:
prefs = NotificationPreference.objects.get(user=user)
return prefs.is_opted_out
except NotificationPreference.DoesNotExist:
return False
def _should_send_email(self, user: User, data: dict | None) -> bool:
"""Check if email should be sent based on user preferences."""
try:
prefs = NotificationPreference.objects.get(user=user)
# Check channel preferences
channel_prefs = prefs.channel_preferences or {}
email_enabled = channel_prefs.get("email", True)
if not email_enabled:
return False
# Check workflow-specific preferences
if data and "workflow_id" in data:
workflow_prefs = prefs.workflow_preferences or {}
workflow_email = workflow_prefs.get(data["workflow_id"], {}).get("email", True)
return workflow_email
return True
except NotificationPreference.DoesNotExist:
# Default to sending email if no preferences set
return True
def _send_email_notification(
self,
recipient: User,
verb: str,
actor: User | None,
action_object: Any,
target: Any,
description: str,
template: str | None,
data: dict | None,
):
"""Send an email notification."""
try:
# Build context
context = {
"recipient": recipient,
"actor": actor,
"verb": verb,
"action_object": action_object,
"target": target,
"description": description,
"site_name": self.site_name,
"site_url": self.site_url,
"data": data or {},
}
# Render email
if template:
html_content = render_to_string(template, context)
text_content = strip_tags(html_content)
else:
# Default simple email
actor_name = actor.username if actor else self.site_name
subject = f"{actor_name} {verb}"
text_content = description or f"{actor_name} {verb}"
html_content = f"<p>{text_content}</p>"
if data and data.get("action_url"):
html_content += f'<p><a href="{data["action_url"]}">View details</a></p>'
subject = f"[{self.site_name}] {verb[:50]}"
send_mail(
subject=subject,
message=text_content,
from_email=self.from_email,
recipient_list=[recipient.email],
html_message=html_content,
fail_silently=True,
)
# Log email notification
self._log_notification(
user=recipient,
workflow_id=data.get("workflow_id", "general") if data else "general",
notification_type="email",
channel="email",
status=NotificationLog.Status.SENT,
payload=data or {},
)
except Exception as e:
logger.exception(f"Failed to send email to {recipient.email}: {e}")
self._log_notification(
user=recipient,
workflow_id=data.get("workflow_id", "general") if data else "general",
notification_type="email",
channel="email",
status=NotificationLog.Status.FAILED,
payload=data or {},
error_message=str(e),
)
def _log_notification(
self,
user: User,
workflow_id: str,
notification_type: str,
channel: str,
status: str,
payload: dict,
error_message: str = "",
):
"""Log a notification to the audit trail."""
NotificationLog.objects.create(
user=user,
workflow_id=workflow_id,
notification_type=notification_type,
channel=channel,
status=status,
payload=payload,
error_message=error_message,
)
# Singleton instance
notification_service = NotificationService()
# ============================================================================
# Backward compatibility - keep old NovuService interface but delegate to native
# ============================================================================
class NovuServiceSync:
"""
Backward-compatible wrapper that delegates to the new notification service.
This maintains the old API signature for existing code while using
the new Django-native implementation.
"""
def __init__(self):
self._service = notification_service
@property
def is_configured(self) -> bool:
"""Always configured since we're using Django-native system."""
return True
def create_subscriber(self, subscriber_id: str, **kwargs) -> dict[str, Any]:
"""Create subscriber - now a no-op as django-notifications-hq uses User directly."""
logger.info(f"Subscriber creation not needed for django-notifications-hq: {subscriber_id}")
return {"subscriberId": subscriber_id, "status": "native"}
def update_subscriber(self, subscriber_id: str, **kwargs) -> dict[str, Any]:
"""Update subscriber - now a no-op."""
logger.info(f"Subscriber update not needed for django-notifications-hq: {subscriber_id}")
return {"subscriberId": subscriber_id, "status": "native"}
def trigger_notification(
self,
workflow_id: str,
subscriber_id: str,
payload: dict | None = None,
overrides: dict | None = None,
) -> dict[str, Any]:
"""Trigger a notification using the new native service."""
try:
user = User.objects.get(pk=subscriber_id)
verb = payload.get("message", f"Notification: {workflow_id}") if payload else f"Notification: {workflow_id}"
description = payload.get("description", "") if payload else ""
success = self._service.send_notification(
recipient=user,
actor=None,
verb=verb,
description=description,
data={**(payload or {}), "workflow_id": workflow_id},
)
return {
"status": "sent" if success else "failed",
"workflow_id": workflow_id,
}
except User.DoesNotExist:
logger.error(f"User not found for notification: {subscriber_id}")
return {"status": "failed", "error": "User not found"}
def trigger_topic_notification(
self,
workflow_id: str,
topic_key: str,
payload: dict | None = None,
) -> dict[str, Any]:
"""Trigger topic notification - maps to group notification."""
logger.info(f"Topic notification: {workflow_id} -> {topic_key}")
# Map topic keys to user groups
if topic_key == "moderators":
result = self._service.notify_moderators(
verb=payload.get("message", "New moderation task") if payload else "New moderation task",
data={**(payload or {}), "workflow_id": workflow_id},
)
elif topic_key == "admins":
result = self._service.notify_admins(
verb=payload.get("message", "Admin notification") if payload else "Admin notification",
data={**(payload or {}), "workflow_id": workflow_id},
)
else:
logger.warning(f"Unknown topic key: {topic_key}")
result = {"success": 0, "failed": 0, "skipped": 0}
return {
"status": "sent",
"workflow_id": workflow_id,
"result": result,
}
def update_preferences(
self,
subscriber_id: str,
preferences: dict[str, Any],
) -> dict[str, Any]:
"""Update notification preferences."""
try:
user = User.objects.get(pk=subscriber_id)
prefs, _ = NotificationPreference.objects.get_or_create(user=user)
if "channel_preferences" in preferences:
prefs.channel_preferences = preferences["channel_preferences"]
if "workflow_preferences" in preferences:
prefs.workflow_preferences = preferences["workflow_preferences"]
if "is_opted_out" in preferences:
prefs.is_opted_out = preferences["is_opted_out"]
prefs.save()
return {"status": "updated"}
except User.DoesNotExist:
return {"status": "failed", "error": "User not found"}
# Keep old name for backward compatibility
novu_service = NovuServiceSync()

View File

@@ -0,0 +1,76 @@
"""
Notification URL configuration.
Note: Now using django-notifications-hq for native Django notifications.
Legacy Novu endpoints are kept for backward compatibility.
"""
from django.urls import path
from .views import (
AdminAlertView,
AdminCriticalErrorView,
CreateSubscriberView,
NotificationListView,
NotificationMarkReadView,
NotificationUnreadCountView,
NotifyModeratorsReportView,
NotifyModeratorsSubmissionView,
NotifyUserSubmissionStatusView,
SystemAnnouncementView,
TriggerNotificationView,
UpdatePreferencesView,
UpdateSubscriberView,
)
app_name = "notifications"
urlpatterns = [
# ========== Native Notification Endpoints ==========
# List notifications for current user
path("", NotificationListView.as_view(), name="list"),
# Mark notification(s) as read
path("mark-read/", NotificationMarkReadView.as_view(), name="mark_read"),
# Get unread count
path("unread-count/", NotificationUnreadCountView.as_view(), name="unread_count"),
# ========== Legacy/Compatibility Endpoints ==========
# Subscriber management (legacy - kept for backward compatibility)
path("subscribers/", CreateSubscriberView.as_view(), name="create_subscriber"),
path("subscribers/update/", UpdateSubscriberView.as_view(), name="update_subscriber"),
# Preferences
path("preferences/", UpdatePreferencesView.as_view(), name="preferences"),
# Trigger notifications
path("trigger/", TriggerNotificationView.as_view(), name="trigger"),
# Moderator notifications
path(
"moderators/submission/",
NotifyModeratorsSubmissionView.as_view(),
name="moderators_submission",
),
path(
"moderators/report/",
NotifyModeratorsReportView.as_view(),
name="moderators_report",
),
# User notifications
path(
"user/submission-status/",
NotifyUserSubmissionStatusView.as_view(),
name="user_submission_status",
),
# System notifications
path(
"system/announcement/",
SystemAnnouncementView.as_view(),
name="system_announcement",
),
# Admin notifications
path("admin/alert/", AdminAlertView.as_view(), name="admin_alert"),
path(
"admin/critical-error/",
AdminCriticalErrorView.as_view(),
name="admin_critical_error",
),
]

View File

@@ -0,0 +1,617 @@
"""
Notification views.
Provides REST API endpoints for:
- Subscriber management (legacy compatibility)
- Preference updates
- Notification triggering
- Moderator notifications
- System announcements
- User notification list and management
Note: Now using django-notifications-hq for native Django notifications.
The novu_service import provides backward compatibility.
"""
import logging
from django.contrib.auth import get_user_model
from rest_framework import status
from rest_framework.permissions import IsAdminUser, IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from apps.core.utils import capture_and_log
from .models import NotificationLog, NotificationPreference, Subscriber, SystemAnnouncement
from .serializers import (
CreateAnnouncementSerializer,
CreateSubscriberSerializer,
ModeratorReportNotificationSerializer,
ModeratorSubmissionNotificationSerializer,
NotificationPreferenceSerializer,
SystemAnnouncementSerializer,
TriggerNotificationSerializer,
UpdatePreferencesSerializer,
UpdateSubscriberSerializer,
)
from .services import novu_service, notification_service
logger = logging.getLogger(__name__)
User = get_user_model()
class CreateSubscriberView(APIView):
"""
POST /notifications/subscribers/
Create or update a Novu subscriber.
"""
permission_classes = [IsAuthenticated]
def post(self, request):
serializer = CreateSubscriberSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
data = serializer.validated_data
subscriber_id = data["subscriber_id"]
try:
# Update or create local subscriber record
subscriber, created = Subscriber.objects.update_or_create(
user=request.user,
defaults={
"novu_subscriber_id": subscriber_id,
"first_name": data.get("first_name", ""),
"last_name": data.get("last_name", ""),
"email": data.get("email") or request.user.email,
"phone": data.get("phone", ""),
"avatar": data.get("avatar", ""),
"locale": data.get("locale", "en"),
"data": data.get("data", {}),
},
)
# Sync to Novu if configured
if novu_service.is_configured:
novu_service.create_subscriber(
subscriber_id=subscriber_id,
email=subscriber.email,
first_name=subscriber.first_name,
last_name=subscriber.last_name,
phone=subscriber.phone,
avatar=subscriber.avatar,
locale=subscriber.locale,
data=subscriber.data,
)
return Response(
{"subscriberId": subscriber_id, "created": created},
status=status.HTTP_201_CREATED if created else status.HTTP_200_OK,
)
except Exception as e:
capture_and_log(e, "Create notification subscriber", source="api")
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
class UpdateSubscriberView(APIView):
"""
POST /notifications/subscribers/update/
Update a Novu subscriber.
"""
permission_classes = [IsAuthenticated]
def post(self, request):
serializer = UpdateSubscriberSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
data = serializer.validated_data
subscriber_id = data["subscriber_id"]
try:
# Update local record
subscriber = Subscriber.objects.filter(user=request.user).first()
if not subscriber:
return Response(
{"detail": "Subscriber not found"},
status=status.HTTP_404_NOT_FOUND,
)
# Update fields
for field in ["first_name", "last_name", "email", "phone", "avatar", "locale", "data"]:
if field in data:
setattr(subscriber, field, data[field])
subscriber.save()
# Sync to Novu
if novu_service.is_configured:
update_fields = {k: v for k, v in data.items() if k != "subscriber_id"}
novu_service.update_subscriber(subscriber_id, **update_fields)
return Response({"success": True})
except Exception as e:
capture_and_log(e, "Update notification subscriber", source="api")
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
class UpdatePreferencesView(APIView):
"""
POST /notifications/preferences/
Update notification preferences.
"""
permission_classes = [IsAuthenticated]
def post(self, request):
serializer = UpdatePreferencesSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
data = serializer.validated_data
preferences = data["preferences"]
try:
# Update local preferences
pref, created = NotificationPreference.objects.update_or_create(
user=request.user,
defaults={
"channel_preferences": preferences.get("channelPreferences", {}),
"workflow_preferences": preferences.get("workflowPreferences", {}),
"frequency_settings": preferences.get("frequencySettings", {}),
},
)
# Sync to Novu
if novu_service.is_configured:
subscriber = Subscriber.objects.filter(user=request.user).first()
if subscriber:
novu_service.update_preferences(subscriber.novu_subscriber_id, preferences)
return Response({"success": True})
except Exception as e:
capture_and_log(e, "Update notification preferences", source="api")
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
def get(self, request):
"""Get current user's notification preferences."""
try:
pref = NotificationPreference.objects.filter(user=request.user).first()
if not pref:
return Response(
{
"channelPreferences": {},
"workflowPreferences": {},
"frequencySettings": {},
}
)
return Response(NotificationPreferenceSerializer(pref).data)
except Exception as e:
capture_and_log(e, "Get notification preferences", source="api")
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
class TriggerNotificationView(APIView):
"""
POST /notifications/trigger/
Trigger a notification workflow.
"""
permission_classes = [IsAuthenticated]
def post(self, request):
serializer = TriggerNotificationSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
data = serializer.validated_data
try:
# Log the notification
log = NotificationLog.objects.create(
user=request.user,
workflow_id=data["workflow_id"],
notification_type="trigger",
channel="all",
payload=data.get("payload", {}),
)
# Trigger via Novu
if novu_service.is_configured:
result = novu_service.trigger_notification(
workflow_id=data["workflow_id"],
subscriber_id=data["subscriber_id"],
payload=data.get("payload"),
overrides=data.get("overrides"),
)
log.novu_transaction_id = result.get("transactionId", "")
log.status = NotificationLog.Status.SENT
else:
log.status = NotificationLog.Status.SENT # Mock success
log.save()
return Response({"success": True, "transactionId": log.novu_transaction_id})
except Exception as e:
capture_and_log(e, "Trigger notification", source="api")
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
class NotifyModeratorsSubmissionView(APIView):
"""
POST /notifications/moderators/submission/
Notify moderators about a new submission.
"""
permission_classes = [IsAuthenticated]
def post(self, request):
serializer = ModeratorSubmissionNotificationSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
data = serializer.validated_data
try:
# Log the notification
NotificationLog.objects.create(
user=request.user,
workflow_id="moderator-submission-notification",
notification_type="moderator_submission",
channel="in_app",
payload=data,
status=NotificationLog.Status.SENT,
)
# Trigger to moderator topic
if novu_service.is_configured:
novu_service.trigger_topic_notification(
workflow_id="moderator-submission-notification",
topic_key="moderators",
payload=data,
)
return Response({"success": True})
except Exception as e:
capture_and_log(e, "Notify moderators (submission)", source="api")
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
class NotifyModeratorsReportView(APIView):
"""
POST /notifications/moderators/report/
Notify moderators about a new report.
"""
permission_classes = [IsAuthenticated]
def post(self, request):
serializer = ModeratorReportNotificationSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
data = serializer.validated_data
try:
# Log the notification
NotificationLog.objects.create(
user=request.user,
workflow_id="moderator-report-notification",
notification_type="moderator_report",
channel="in_app",
payload=data,
status=NotificationLog.Status.SENT,
)
# Trigger to moderator topic
if novu_service.is_configured:
novu_service.trigger_topic_notification(
workflow_id="moderator-report-notification",
topic_key="moderators",
payload=data,
)
return Response({"success": True})
except Exception as e:
capture_and_log(e, "Notify moderators (report)", source="api")
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
class NotifyUserSubmissionStatusView(APIView):
"""
POST /notifications/user/submission-status/
Notify a user about their submission status change.
"""
permission_classes = [IsAuthenticated]
def post(self, request):
data = request.data
try:
subscriber_id = data.get("subscriber_id") or str(request.user.id)
# Log the notification
NotificationLog.objects.create(
user=request.user,
workflow_id="submission-status-update",
notification_type="submission_status",
channel="email",
payload=data,
status=NotificationLog.Status.SENT,
)
# Trigger notification
if novu_service.is_configured:
novu_service.trigger_notification(
workflow_id="submission-status-update",
subscriber_id=subscriber_id,
payload=data,
)
return Response({"success": True})
except Exception as e:
capture_and_log(e, "Notify user submission status", source="api")
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
class SystemAnnouncementView(APIView):
"""
POST /notifications/system/announcement/
Send a system-wide announcement (admin only).
"""
permission_classes = [IsAdminUser]
def post(self, request):
serializer = CreateAnnouncementSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
data = serializer.validated_data
try:
# Create announcement record
announcement = SystemAnnouncement.objects.create(
title=data["title"],
message=data["message"],
severity=data.get("severity", "info"),
action_url=data.get("action_url", ""),
created_by=request.user,
)
# Trigger to all users topic
if novu_service.is_configured:
novu_service.trigger_topic_notification(
workflow_id="system-announcement",
topic_key="users",
payload={
"title": announcement.title,
"message": announcement.message,
"severity": announcement.severity,
"actionUrl": announcement.action_url,
},
)
return Response(
{
"success": True,
"announcementId": str(announcement.id),
},
status=status.HTTP_201_CREATED,
)
except Exception as e:
capture_and_log(e, "System announcement", source="api")
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
class AdminAlertView(APIView):
"""
POST /notifications/admin/alert/
Send alert to admins.
"""
permission_classes = [IsAuthenticated]
def post(self, request):
data = request.data
try:
# Log the alert
NotificationLog.objects.create(
user=request.user,
workflow_id="admin-alert",
notification_type="admin_alert",
channel="email",
payload=data,
status=NotificationLog.Status.SENT,
)
# Trigger to admin topic
if novu_service.is_configured:
novu_service.trigger_topic_notification(
workflow_id="admin-alert",
topic_key="admins",
payload=data,
)
return Response({"success": True})
except Exception as e:
capture_and_log(e, "Admin alert", source="api")
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
class AdminCriticalErrorView(APIView):
"""
POST /notifications/admin/critical-error/
Send critical error alert to admins.
"""
permission_classes = [IsAuthenticated]
def post(self, request):
data = request.data
try:
# Log the alert
NotificationLog.objects.create(
user=request.user,
workflow_id="admin-critical-error",
notification_type="critical_error",
channel="email",
payload=data,
status=NotificationLog.Status.SENT,
)
# Trigger to admin topic with urgent priority
if novu_service.is_configured:
novu_service.trigger_topic_notification(
workflow_id="admin-critical-error",
topic_key="admins",
payload=data,
)
return Response({"success": True})
except Exception as e:
capture_and_log(e, "Admin critical error", source="api")
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
# ============================================================================
# Native Notification Views (django-notifications-hq)
# ============================================================================
class NotificationListView(APIView):
"""
GET /notifications/
Get list of notifications for the current user.
"""
permission_classes = [IsAuthenticated]
def get(self, request):
try:
unread_only = request.query_params.get("unread_only", "false").lower() == "true"
limit = min(int(request.query_params.get("limit", 50)), 100)
notifications = notification_service.get_user_notifications(
user=request.user,
unread_only=unread_only,
limit=limit,
)
# Serialize notifications
notification_list = []
for notif in notifications:
notification_list.append({
"id": notif.id,
"actor": str(notif.actor) if notif.actor else None,
"verb": notif.verb,
"description": notif.description or "",
"target": str(notif.target) if notif.target else None,
"actionObject": str(notif.action_object) if notif.action_object else None,
"level": notif.level,
"unread": notif.unread,
"data": notif.data or {},
"timestamp": notif.timestamp.isoformat(),
})
return Response({
"notifications": notification_list,
"unreadCount": notification_service.get_unread_count(request.user),
})
except Exception as e:
capture_and_log(e, "Get notifications", source="api")
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
class NotificationMarkReadView(APIView):
"""
POST /notifications/mark-read/
Mark notification(s) as read.
"""
permission_classes = [IsAuthenticated]
def post(self, request):
try:
notification_id = request.data.get("notification_id")
notification_service.mark_as_read(
user=request.user,
notification_id=notification_id,
)
return Response({
"success": True,
"unreadCount": notification_service.get_unread_count(request.user),
})
except Exception as e:
capture_and_log(e, "Mark notification read", source="api")
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
class NotificationUnreadCountView(APIView):
"""
GET /notifications/unread-count/
Get count of unread notifications.
"""
permission_classes = [IsAuthenticated]
def get(self, request):
try:
count = notification_service.get_unread_count(request.user)
return Response({"unreadCount": count})
except Exception as e:
capture_and_log(e, "Get unread count", source="api")
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)

View File

@@ -66,6 +66,31 @@ app.conf.update(
"task": "rides.check_overdue_closings", "task": "rides.check_overdue_closings",
"schedule": 86400.0, # Daily at midnight "schedule": 86400.0, # Daily at midnight
}, },
# ====== New scheduled tasks ======
"process-scheduled-deletions": {
"task": "core.process_scheduled_deletions",
"schedule": 86400.0, # Daily
},
"process-closing-entities": {
"task": "core.process_closing_entities",
"schedule": 86400.0, # Daily
},
"process-expired-bans": {
"task": "core.process_expired_bans",
"schedule": 3600.0, # Hourly
},
"cleanup-orphaned-images": {
"task": "core.cleanup_orphaned_images",
"schedule": 604800.0, # Weekly
},
"cleanup-old-versions": {
"task": "core.cleanup_old_versions",
"schedule": 2592000.0, # Monthly (30 days)
},
"data-retention-cleanup": {
"task": "core.data_retention_cleanup",
"schedule": 86400.0, # Daily
},
}, },
# Task result settings # Task result settings
result_expires=3600, # 1 hour result_expires=3600, # 1 hour

View File

@@ -73,8 +73,7 @@ THIRD_PARTY_APPS = [
"rest_framework.authtoken", "rest_framework.authtoken",
"rest_framework_simplejwt", # JWT authentication "rest_framework_simplejwt", # JWT authentication
"rest_framework_simplejwt.token_blacklist", # JWT token blacklist "rest_framework_simplejwt.token_blacklist", # JWT token blacklist
"dj_rest_auth", # REST authentication with JWT support # Note: dj_rest_auth removed - using custom auth views in apps.api.v1.auth
"dj_rest_auth.registration", # REST registration support
"drf_spectacular", # OpenAPI 3.0 documentation "drf_spectacular", # OpenAPI 3.0 documentation
"corsheaders", # CORS headers for API "corsheaders", # CORS headers for API
"pghistory", # django-pghistory "pghistory", # django-pghistory
@@ -102,6 +101,8 @@ THIRD_PARTY_APPS = [
"django_celery_beat", # Celery beat scheduler "django_celery_beat", # Celery beat scheduler
"django_celery_results", # Celery result backend "django_celery_results", # Celery result backend
"django_extensions", # Django Extensions for enhanced development tools "django_extensions", # Django Extensions for enhanced development tools
# Note: django-notifications-hq is installed but not in INSTALLED_APPS
# to avoid app label conflict. We use a custom implementation instead.
] ]
LOCAL_APPS = [ LOCAL_APPS = [
@@ -117,6 +118,7 @@ LOCAL_APPS = [
"apps.media", "apps.media",
"apps.blog", "apps.blog",
"apps.support", "apps.support",
"apps.notifications", # Notification service
] ]
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS

View File

@@ -34,7 +34,7 @@ ACCOUNT_LOGIN_METHODS = {"email", "username"}
# Email verification settings # Email verification settings
ACCOUNT_EMAIL_VERIFICATION = config("ACCOUNT_EMAIL_VERIFICATION", default="mandatory") ACCOUNT_EMAIL_VERIFICATION = config("ACCOUNT_EMAIL_VERIFICATION", default="mandatory")
ACCOUNT_EMAIL_REQUIRED = True # Note: ACCOUNT_EMAIL_REQUIRED is handled by ACCOUNT_SIGNUP_FIELDS above (email* = required)
ACCOUNT_EMAIL_VERIFICATION_SUPPORTS_CHANGE = True ACCOUNT_EMAIL_VERIFICATION_SUPPORTS_CHANGE = True
ACCOUNT_EMAIL_VERIFICATION_SUPPORTS_RESEND = True ACCOUNT_EMAIL_VERIFICATION_SUPPORTS_RESEND = True

View File

@@ -24,7 +24,6 @@ dependencies = [
# Authentication & Security # Authentication & Security
# ============================================================================= # =============================================================================
"django-allauth>=65.3.0", "django-allauth>=65.3.0",
"dj-rest-auth>=7.0.0",
"djangorestframework-simplejwt>=5.5.1", "djangorestframework-simplejwt>=5.5.1",
"pyjwt>=2.10.1", "pyjwt>=2.10.1",
"cryptography>=44.0.0", "cryptography>=44.0.0",
@@ -58,7 +57,6 @@ dependencies = [
# Database & History Tracking # Database & History Tracking
# ============================================================================= # =============================================================================
"django-pghistory>=3.5.2", "django-pghistory>=3.5.2",
"django-fsm>=2.8.1",
"django-fsm-log>=3.1.0", "django-fsm-log>=3.1.0",
# ============================================================================= # =============================================================================
# Monitoring & Observability # Monitoring & Observability
@@ -79,6 +77,9 @@ dependencies = [
"django-turnstile>=0.1.2", "django-turnstile>=0.1.2",
"fido2>=2.0.0", "fido2>=2.0.0",
"qrcode[pil]>=8.2", "qrcode[pil]>=8.2",
"httpx>=0.28.1",
"django-fsm-2>=4.1.0",
"django-notifications-hq>=1.8.3",
] ]
[dependency-groups] [dependency-groups]
@@ -90,6 +91,7 @@ dev = [
"rope>=1.14.0", "rope>=1.14.0",
"ruff>=0.9.2", "ruff>=0.9.2",
"pyright>=1.1.405", "pyright>=1.1.405",
"factory-boy>=3.3.3",
] ]
test = [ test = [
"pytest>=8.3.5", "pytest>=8.3.5",

93
docs/ENDPOINT_MAPPING.md Normal file
View File

@@ -0,0 +1,93 @@
# Supabase → Django Endpoint Mapping
## Overview
This document maps all Supabase endpoints to their Django REST Framework equivalents.
---
## Edge Function Mappings
### Images
| Supabase Function | Django Endpoint | Method | Status |
|-------------------|-----------------|--------|--------|
| `upload-image` | `/api/v1/images/generate-upload-url/` | POST | ✅ |
| `delete-image` | `/api/v1/images/delete/` | POST | ✅ |
| `generate-og-image` | `/api/v1/images/og-image/` | POST | ✅ |
### Location/Maps
| Supabase Function | Django Endpoint | Method | Status |
|-------------------|-----------------|--------|--------|
| `detect-location` | `/api/v1/maps/detect-location/` | POST | ✅ |
| `enrich-location` | `/api/v1/maps/enrich-location/` | POST | ✅ |
| `search-location` | `/api/v1/maps/search-location/` | POST | ✅ |
### Notifications (Django Native)
| Supabase Function | Django Endpoint | Method | Status |
|-------------------|-----------------|--------|--------|
| `get-notifications` | `/api/v1/notifications/` | GET | ✅ |
| `mark-notification-read` | `/api/v1/notifications/mark-read/` | POST | ✅ |
| `get-unread-count` | `/api/v1/notifications/unread-count/` | GET | ✅ |
### Moderation
| Supabase Function | Django Endpoint | Method | Status |
|-------------------|-----------------|--------|--------|
| `process-bulk-approval` | `/api/v1/moderation/api/bulk-operations/` | POST | ✅ |
| `claim-submission` | `/api/v1/moderation/api/queue/` | POST | ✅ |
| `convert-submission-to-edit` | `/api/v1/moderation/api/edit-submissions/` | POST | ✅ |
### Auth/MFA
| Supabase Function | Django Endpoint | Method | Status |
|-------------------|-----------------|--------|--------|
| `mfa-unenroll` | `/api/v1/auth/mfa/totp/deactivate/` | POST | ✅ |
| `process-oauth-profile` | `/api/v1/auth/social/process-profile/` | POST | ✅ |
### Account Management
| Supabase Function | Django Endpoint | Method | Status |
|-------------------|-----------------|--------|--------|
| `cancel-account-deletion` | `/api/v1/accounts/delete-account/cancel/` | POST | ✅ |
| `confirm-account-deletion` | `/api/v1/accounts/delete-account/verify/` | POST | ✅ |
| `cancel-email-change` | `/api/v1/accounts/email-change/cancel/` | POST | ✅ |
| `export-user-data` | `/api/v1/accounts/data-export/` | POST | ✅ |
### Admin Dashboard
| Supabase Function | Django Endpoint | Method | Status |
|-------------------|-----------------|--------|--------|
| `detect-anomalies` | `/api/v1/admin/anomalies/detect/` | POST | ✅ |
| `collect-metrics` | `/api/v1/admin/metrics/collect/` | POST | ✅ |
| `pipeline-integrity-scan` | `/api/v1/admin/pipeline/integrity-scan/` | POST | ✅ |
| `task-status` | `/api/v1/admin/tasks/status/` | GET | ✅ |
---
## Table Mappings
| Supabase Table | Django Endpoint |
|----------------|-----------------|
| `parks` | `/api/v1/parks/` |
| `rides` | `/api/v1/rides/` |
| `companies` | `/api/v1/companies/` |
| `ride_models` | `/api/v1/rides/models/` |
| `profiles` | `/api/v1/accounts/profiles/` |
| `reviews` | `/api/v1/reviews/` |
| `photos` | `/api/v1/media/photos/` |
| `content_submissions` | `/api/v1/moderation/submissions/` |
| `ride_credits` | `/api/v1/accounts/credits/` |
---
## Scheduled Tasks (Celery)
| Supabase Function | Celery Task |
|-------------------|-------------|
| `process-scheduled-deletions` | `apps.core.tasks.scheduled` |
| `process-closing-entities` | `apps.core.tasks.scheduled` |
| `cleanup-orphaned-images` | `apps.core.tasks.scheduled` |
---
## Deprecated
| Function | Status |
|----------|--------|
| `migrate-novu-users` | 🚫 Replaced by Django native |
| `novu-webhook` | 🚫 Replaced by Django native |

338
uv.lock generated
View File

@@ -20,6 +20,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/26/99/fc813cd978842c26c82534010ea849eee9ab3a13ea2b74e95cb9c99e747b/amqp-5.3.1-py3-none-any.whl", hash = "sha256:43b3319e1b4e7d1251833a93d672b4af1e40f3d632d479b98661a95f117880a2", size = 50944, upload-time = "2024-11-12T19:55:41.782Z" }, { url = "https://files.pythonhosted.org/packages/26/99/fc813cd978842c26c82534010ea849eee9ab3a13ea2b74e95cb9c99e747b/amqp-5.3.1-py3-none-any.whl", hash = "sha256:43b3319e1b4e7d1251833a93d672b4af1e40f3d632d479b98661a95f117880a2", size = 50944, upload-time = "2024-11-12T19:55:41.782Z" },
] ]
[[package]]
name = "anyio"
version = "4.12.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "idna" },
]
sdist = { url = "https://files.pythonhosted.org/packages/16/ce/8a777047513153587e5434fd752e89334ac33e379aa3497db860eeb60377/anyio-4.12.0.tar.gz", hash = "sha256:73c693b567b0c55130c104d0b43a9baf3aa6a31fc6110116509f27bf75e21ec0", size = 228266, upload-time = "2025-11-28T23:37:38.911Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/7f/9c/36c5c37947ebfb8c7f22e0eb6e4d188ee2d53aa3880f3f2744fb894f0cb1/anyio-4.12.0-py3-none-any.whl", hash = "sha256:dad2376a628f98eeca4881fc56cd06affd18f659b17a747d3ff0307ced94b1bb", size = 113362, upload-time = "2025-11-28T23:36:57.897Z" },
]
[[package]] [[package]]
name = "asgiref" name = "asgiref"
version = "3.11.0" version = "3.11.0"
@@ -122,7 +134,7 @@ wheels = [
[[package]] [[package]]
name = "celery" name = "celery"
version = "5.6.0" version = "5.6.2"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "billiard" }, { name = "billiard" },
@@ -130,24 +142,23 @@ dependencies = [
{ name = "click-didyoumean" }, { name = "click-didyoumean" },
{ name = "click-plugins" }, { name = "click-plugins" },
{ name = "click-repl" }, { name = "click-repl" },
{ name = "exceptiongroup" },
{ name = "kombu" }, { name = "kombu" },
{ name = "python-dateutil" }, { name = "python-dateutil" },
{ name = "tzlocal" }, { name = "tzlocal" },
{ name = "vine" }, { name = "vine" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/ad/5f/b681ae3c89290d2ea6562ea96b40f5af6f6fc5f7743e2cd1a19e47721548/celery-5.6.0.tar.gz", hash = "sha256:641405206042d52ae460e4e9751a2e31b06cf80ab836fcf92e0b9311d7ea8113", size = 1712522, upload-time = "2025-11-30T17:39:46.282Z" } sdist = { url = "https://files.pythonhosted.org/packages/8f/9d/3d13596519cfa7207a6f9834f4b082554845eb3cd2684b5f8535d50c7c44/celery-5.6.2.tar.gz", hash = "sha256:4a8921c3fcf2ad76317d3b29020772103581ed2454c4c042cc55dcc43585009b", size = 1718802, upload-time = "2026-01-04T12:35:58.012Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/01/4e/53a125038d6a814491a0ae3457435c13cf8821eb602292cf9db37ce35f62/celery-5.6.0-py3-none-any.whl", hash = "sha256:33cf01477b175017fc8f22c5ee8a65157591043ba8ca78a443fe703aa910f581", size = 444561, upload-time = "2025-11-30T17:39:44.314Z" }, { url = "https://files.pythonhosted.org/packages/dd/bd/9ecd619e456ae4ba73b6583cc313f26152afae13e9a82ac4fe7f8856bfd1/celery-5.6.2-py3-none-any.whl", hash = "sha256:3ffafacbe056951b629c7abcf9064c4a2366de0bdfc9fdba421b97ebb68619a5", size = 445502, upload-time = "2026-01-04T12:35:55.894Z" },
] ]
[[package]] [[package]]
name = "certifi" name = "certifi"
version = "2025.11.12" version = "2026.1.4"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/a2/8c/58f469717fa48465e4a50c014a0400602d3c437d7c0c468e17ada824da3a/certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316", size = 160538, upload-time = "2025-11-12T02:54:51.517Z" } sdist = { url = "https://files.pythonhosted.org/packages/e0/2d/a891ca51311197f6ad14a7ef42e2399f36cf2f9bd44752b3dc4eab60fdc5/certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120", size = 154268, upload-time = "2026-01-04T02:42:41.825Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/70/7d/9bc192684cea499815ff478dfcdc13835ddf401365057044fb721ec6bddb/certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b", size = 159438, upload-time = "2025-11-12T02:54:49.735Z" }, { url = "https://files.pythonhosted.org/packages/e6/ad/3cc14f097111b4de0040c83a525973216457bbeeb63739ef1ed275c1c021/certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c", size = 152900, upload-time = "2026-01-04T02:42:40.15Z" },
] ]
[[package]] [[package]]
@@ -296,63 +307,63 @@ wheels = [
[[package]] [[package]]
name = "coverage" name = "coverage"
version = "7.13.0" version = "7.13.1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/b6/45/2c665ca77ec32ad67e25c77daf1cee28ee4558f3bc571cdbaf88a00b9f23/coverage-7.13.0.tar.gz", hash = "sha256:a394aa27f2d7ff9bc04cf703817773a59ad6dfbd577032e690f961d2460ee936", size = 820905, upload-time = "2025-12-08T13:14:38.055Z" } sdist = { url = "https://files.pythonhosted.org/packages/23/f9/e92df5e07f3fc8d4c7f9a0f146ef75446bf870351cd37b788cf5897f8079/coverage-7.13.1.tar.gz", hash = "sha256:b7593fe7eb5feaa3fbb461ac79aac9f9fc0387a5ca8080b0c6fe2ca27b091afd", size = 825862, upload-time = "2025-12-28T15:42:56.969Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/7c/cc/bce226595eb3bf7d13ccffe154c3c487a22222d87ff018525ab4dd2e9542/coverage-7.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:28ee1c96109974af104028a8ef57cec21447d42d0e937c0275329272e370ebcf", size = 218297, upload-time = "2025-12-08T13:13:10.977Z" }, { url = "https://files.pythonhosted.org/packages/a3/a4/e98e689347a1ff1a7f67932ab535cef82eb5e78f32a9e4132e114bbb3a0a/coverage-7.13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cb237bfd0ef4d5eb6a19e29f9e528ac67ac3be932ea6b44fb6cc09b9f3ecff78", size = 218951, upload-time = "2025-12-28T15:41:16.653Z" },
{ url = "https://files.pythonhosted.org/packages/3b/9f/73c4d34600aae03447dff3d7ad1d0ac649856bfb87d1ca7d681cfc913f9e/coverage-7.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d1e97353dcc5587b85986cda4ff3ec98081d7e84dd95e8b2a6d59820f0545f8a", size = 218673, upload-time = "2025-12-08T13:13:12.562Z" }, { url = "https://files.pythonhosted.org/packages/32/33/7cbfe2bdc6e2f03d6b240d23dc45fdaf3fd270aaf2d640be77b7f16989ab/coverage-7.13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1dcb645d7e34dcbcc96cd7c132b1fc55c39263ca62eb961c064eb3928997363b", size = 219325, upload-time = "2025-12-28T15:41:18.609Z" },
{ url = "https://files.pythonhosted.org/packages/63/ab/8fa097db361a1e8586535ae5073559e6229596b3489ec3ef2f5b38df8cb2/coverage-7.13.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:99acd4dfdfeb58e1937629eb1ab6ab0899b131f183ee5f23e0b5da5cba2fec74", size = 249652, upload-time = "2025-12-08T13:13:13.909Z" }, { url = "https://files.pythonhosted.org/packages/59/f6/efdabdb4929487baeb7cb2a9f7dac457d9356f6ad1b255be283d58b16316/coverage-7.13.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3d42df8201e00384736f0df9be2ced39324c3907607d17d50d50116c989d84cd", size = 250309, upload-time = "2025-12-28T15:41:20.629Z" },
{ url = "https://files.pythonhosted.org/packages/90/3a/9bfd4de2ff191feb37ef9465855ca56a6f2f30a3bca172e474130731ac3d/coverage-7.13.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:ff45e0cd8451e293b63ced93161e189780baf444119391b3e7d25315060368a6", size = 252251, upload-time = "2025-12-08T13:13:15.553Z" }, { url = "https://files.pythonhosted.org/packages/12/da/91a52516e9d5aea87d32d1523f9cdcf7a35a3b298e6be05d6509ba3cfab2/coverage-7.13.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fa3edde1aa8807de1d05934982416cb3ec46d1d4d91e280bcce7cca01c507992", size = 252907, upload-time = "2025-12-28T15:41:22.257Z" },
{ url = "https://files.pythonhosted.org/packages/df/61/b5d8105f016e1b5874af0d7c67542da780ccd4a5f2244a433d3e20ceb1ad/coverage-7.13.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f4f72a85316d8e13234cafe0a9f81b40418ad7a082792fa4165bd7d45d96066b", size = 253492, upload-time = "2025-12-08T13:13:16.849Z" }, { url = "https://files.pythonhosted.org/packages/75/38/f1ea837e3dc1231e086db1638947e00d264e7e8c41aa8ecacf6e1e0c05f4/coverage-7.13.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9edd0e01a343766add6817bc448408858ba6b489039eaaa2018474e4001651a4", size = 254148, upload-time = "2025-12-28T15:41:23.87Z" },
{ url = "https://files.pythonhosted.org/packages/f3/b8/0fad449981803cc47a4694768b99823fb23632150743f9c83af329bb6090/coverage-7.13.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:11c21557d0e0a5a38632cbbaca5f008723b26a89d70db6315523df6df77d6232", size = 249850, upload-time = "2025-12-08T13:13:18.142Z" }, { url = "https://files.pythonhosted.org/packages/7f/43/f4f16b881aaa34954ba446318dea6b9ed5405dd725dd8daac2358eda869a/coverage-7.13.1-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:985b7836931d033570b94c94713c6dba5f9d3ff26045f72c3e5dbc5fe3361e5a", size = 250515, upload-time = "2025-12-28T15:41:25.437Z" },
{ url = "https://files.pythonhosted.org/packages/9a/e9/8d68337c3125014d918cf4327d5257553a710a2995a6a6de2ac77e5aa429/coverage-7.13.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:76541dc8d53715fb4f7a3a06b34b0dc6846e3c69bc6204c55653a85dd6220971", size = 251633, upload-time = "2025-12-08T13:13:19.56Z" }, { url = "https://files.pythonhosted.org/packages/84/34/8cba7f00078bd468ea914134e0144263194ce849ec3baad187ffb6203d1c/coverage-7.13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ffed1e4980889765c84a5d1a566159e363b71d6b6fbaf0bebc9d3c30bc016766", size = 252292, upload-time = "2025-12-28T15:41:28.459Z" },
{ url = "https://files.pythonhosted.org/packages/55/14/d4112ab26b3a1bc4b3c1295d8452dcf399ed25be4cf649002fb3e64b2d93/coverage-7.13.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:6e9e451dee940a86789134b6b0ffbe31c454ade3b849bb8a9d2cca2541a8e91d", size = 249586, upload-time = "2025-12-08T13:13:20.883Z" }, { url = "https://files.pythonhosted.org/packages/8c/a4/cffac66c7652d84ee4ac52d3ccb94c015687d3b513f9db04bfcac2ac800d/coverage-7.13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8842af7f175078456b8b17f1b73a0d16a65dcbdc653ecefeb00a56b3c8c298c4", size = 250242, upload-time = "2025-12-28T15:41:30.02Z" },
{ url = "https://files.pythonhosted.org/packages/2c/a9/22b0000186db663b0d82f86c2f1028099ae9ac202491685051e2a11a5218/coverage-7.13.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:5c67dace46f361125e6b9cace8fe0b729ed8479f47e70c89b838d319375c8137", size = 249412, upload-time = "2025-12-08T13:13:22.22Z" }, { url = "https://files.pythonhosted.org/packages/f4/78/9a64d462263dde416f3c0067efade7b52b52796f489b1037a95b0dc389c9/coverage-7.13.1-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:ccd7a6fca48ca9c131d9b0a2972a581e28b13416fc313fb98b6d24a03ce9a398", size = 250068, upload-time = "2025-12-28T15:41:32.007Z" },
{ url = "https://files.pythonhosted.org/packages/a1/2e/42d8e0d9e7527fba439acdc6ed24a2b97613b1dc85849b1dd935c2cffef0/coverage-7.13.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f59883c643cb19630500f57016f76cfdcd6845ca8c5b5ea1f6e17f74c8e5f511", size = 251191, upload-time = "2025-12-08T13:13:23.899Z" }, { url = "https://files.pythonhosted.org/packages/69/c8/a8994f5fece06db7c4a97c8fc1973684e178599b42e66280dded0524ef00/coverage-7.13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0403f647055de2609be776965108447deb8e384fe4a553c119e3ff6bfbab4784", size = 251846, upload-time = "2025-12-28T15:41:33.946Z" },
{ url = "https://files.pythonhosted.org/packages/a4/af/8c7af92b1377fd8860536aadd58745119252aaaa71a5213e5a8e8007a9f5/coverage-7.13.0-cp313-cp313-win32.whl", hash = "sha256:58632b187be6f0be500f553be41e277712baa278147ecb7559983c6d9faf7ae1", size = 220829, upload-time = "2025-12-08T13:13:25.182Z" }, { url = "https://files.pythonhosted.org/packages/cc/f7/91fa73c4b80305c86598a2d4e54ba22df6bf7d0d97500944af7ef155d9f7/coverage-7.13.1-cp313-cp313-win32.whl", hash = "sha256:549d195116a1ba1e1ae2f5ca143f9777800f6636eab917d4f02b5310d6d73461", size = 221512, upload-time = "2025-12-28T15:41:35.519Z" },
{ url = "https://files.pythonhosted.org/packages/58/f9/725e8bf16f343d33cbe076c75dc8370262e194ff10072c0608b8e5cf33a3/coverage-7.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:73419b89f812f498aca53f757dd834919b48ce4799f9d5cad33ca0ae442bdb1a", size = 221640, upload-time = "2025-12-08T13:13:26.836Z" }, { url = "https://files.pythonhosted.org/packages/45/0b/0768b4231d5a044da8f75e097a8714ae1041246bb765d6b5563bab456735/coverage-7.13.1-cp313-cp313-win_amd64.whl", hash = "sha256:5899d28b5276f536fcf840b18b61a9fce23cc3aec1d114c44c07fe94ebeaa500", size = 222321, upload-time = "2025-12-28T15:41:37.371Z" },
{ url = "https://files.pythonhosted.org/packages/8a/ff/e98311000aa6933cc79274e2b6b94a2fe0fe3434fca778eba82003675496/coverage-7.13.0-cp313-cp313-win_arm64.whl", hash = "sha256:eb76670874fdd6091eedcc856128ee48c41a9bbbb9c3f1c7c3cf169290e3ffd6", size = 220269, upload-time = "2025-12-08T13:13:28.116Z" }, { url = "https://files.pythonhosted.org/packages/9b/b8/bdcb7253b7e85157282450262008f1366aa04663f3e3e4c30436f596c3e2/coverage-7.13.1-cp313-cp313-win_arm64.whl", hash = "sha256:868a2fae76dfb06e87291bcbd4dcbcc778a8500510b618d50496e520bd94d9b9", size = 220949, upload-time = "2025-12-28T15:41:39.553Z" },
{ url = "https://files.pythonhosted.org/packages/cf/cf/bbaa2e1275b300343ea865f7d424cc0a2e2a1df6925a070b2b2d5d765330/coverage-7.13.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:6e63ccc6e0ad8986386461c3c4b737540f20426e7ec932f42e030320896c311a", size = 218990, upload-time = "2025-12-08T13:13:29.463Z" }, { url = "https://files.pythonhosted.org/packages/70/52/f2be52cc445ff75ea8397948c96c1b4ee14f7f9086ea62fc929c5ae7b717/coverage-7.13.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:67170979de0dacac3f3097d02b0ad188d8edcea44ccc44aaa0550af49150c7dc", size = 219643, upload-time = "2025-12-28T15:41:41.567Z" },
{ url = "https://files.pythonhosted.org/packages/21/1d/82f0b3323b3d149d7672e7744c116e9c170f4957e0c42572f0366dbb4477/coverage-7.13.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:494f5459ffa1bd45e18558cd98710c36c0b8fbfa82a5eabcbe671d80ecffbfe8", size = 219340, upload-time = "2025-12-08T13:13:31.524Z" }, { url = "https://files.pythonhosted.org/packages/47/79/c85e378eaa239e2edec0c5523f71542c7793fe3340954eafb0bc3904d32d/coverage-7.13.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f80e2bb21bfab56ed7405c2d79d34b5dc0bc96c2c1d2a067b643a09fb756c43a", size = 219997, upload-time = "2025-12-28T15:41:43.418Z" },
{ url = "https://files.pythonhosted.org/packages/fb/e3/fe3fd4702a3832a255f4d43013eacb0ef5fc155a5960ea9269d8696db28b/coverage-7.13.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:06cac81bf10f74034e055e903f5f946e3e26fc51c09fc9f584e4a1605d977053", size = 260638, upload-time = "2025-12-08T13:13:32.965Z" }, { url = "https://files.pythonhosted.org/packages/fe/9b/b1ade8bfb653c0bbce2d6d6e90cc6c254cbb99b7248531cc76253cb4da6d/coverage-7.13.1-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f83351e0f7dcdb14d7326c3d8d8c4e915fa685cbfdc6281f9470d97a04e9dfe4", size = 261296, upload-time = "2025-12-28T15:41:45.207Z" },
{ url = "https://files.pythonhosted.org/packages/ad/01/63186cb000307f2b4da463f72af9b85d380236965574c78e7e27680a2593/coverage-7.13.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f2ffc92b46ed6e6760f1d47a71e56b5664781bc68986dbd1836b2b70c0ce2071", size = 262705, upload-time = "2025-12-08T13:13:34.378Z" }, { url = "https://files.pythonhosted.org/packages/1f/af/ebf91e3e1a2473d523e87e87fd8581e0aa08741b96265730e2d79ce78d8d/coverage-7.13.1-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bb3f6562e89bad0110afbe64e485aac2462efdce6232cdec7862a095dc3412f6", size = 263363, upload-time = "2025-12-28T15:41:47.163Z" },
{ url = "https://files.pythonhosted.org/packages/7c/a1/c0dacef0cc865f2455d59eed3548573ce47ed603205ffd0735d1d78b5906/coverage-7.13.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0602f701057c6823e5db1b74530ce85f17c3c5be5c85fc042ac939cbd909426e", size = 265125, upload-time = "2025-12-08T13:13:35.73Z" }, { url = "https://files.pythonhosted.org/packages/c4/8b/fb2423526d446596624ac7fde12ea4262e66f86f5120114c3cfd0bb2befa/coverage-7.13.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:77545b5dcda13b70f872c3b5974ac64c21d05e65b1590b441c8560115dc3a0d1", size = 265783, upload-time = "2025-12-28T15:41:49.03Z" },
{ url = "https://files.pythonhosted.org/packages/ef/92/82b99223628b61300bd382c205795533bed021505eab6dd86e11fb5d7925/coverage-7.13.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:25dc33618d45456ccb1d37bce44bc78cf269909aa14c4db2e03d63146a8a1493", size = 259844, upload-time = "2025-12-08T13:13:37.69Z" }, { url = "https://files.pythonhosted.org/packages/9b/26/ef2adb1e22674913b89f0fe7490ecadcef4a71fa96f5ced90c60ec358789/coverage-7.13.1-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a4d240d260a1aed814790bbe1f10a5ff31ce6c21bc78f0da4a1e8268d6c80dbd", size = 260508, upload-time = "2025-12-28T15:41:51.035Z" },
{ url = "https://files.pythonhosted.org/packages/cf/2c/89b0291ae4e6cd59ef042708e1c438e2290f8c31959a20055d8768349ee2/coverage-7.13.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:71936a8b3b977ddd0b694c28c6a34f4fff2e9dd201969a4ff5d5fc7742d614b0", size = 262700, upload-time = "2025-12-08T13:13:39.525Z" }, { url = "https://files.pythonhosted.org/packages/ce/7d/f0f59b3404caf662e7b5346247883887687c074ce67ba453ea08c612b1d5/coverage-7.13.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d2287ac9360dec3837bfdad969963a5d073a09a85d898bd86bea82aa8876ef3c", size = 263357, upload-time = "2025-12-28T15:41:52.631Z" },
{ url = "https://files.pythonhosted.org/packages/bf/f9/a5f992efae1996245e796bae34ceb942b05db275e4b34222a9a40b9fbd3b/coverage-7.13.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:936bc20503ce24770c71938d1369461f0c5320830800933bc3956e2a4ded930e", size = 260321, upload-time = "2025-12-08T13:13:41.172Z" }, { url = "https://files.pythonhosted.org/packages/1a/b1/29896492b0b1a047604d35d6fa804f12818fa30cdad660763a5f3159e158/coverage-7.13.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:0d2c11f3ea4db66b5cbded23b20185c35066892c67d80ec4be4bab257b9ad1e0", size = 260978, upload-time = "2025-12-28T15:41:54.589Z" },
{ url = "https://files.pythonhosted.org/packages/4c/89/a29f5d98c64fedbe32e2ac3c227fbf78edc01cc7572eee17d61024d89889/coverage-7.13.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:af0a583efaacc52ae2521f8d7910aff65cdb093091d76291ac5820d5e947fc1c", size = 259222, upload-time = "2025-12-08T13:13:43.282Z" }, { url = "https://files.pythonhosted.org/packages/48/f2/971de1238a62e6f0a4128d37adadc8bb882ee96afbe03ff1570291754629/coverage-7.13.1-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:3fc6a169517ca0d7ca6846c3c5392ef2b9e38896f61d615cb75b9e7134d4ee1e", size = 259877, upload-time = "2025-12-28T15:41:56.263Z" },
{ url = "https://files.pythonhosted.org/packages/b3/c3/940fe447aae302a6701ee51e53af7e08b86ff6eed7631e5740c157ee22b9/coverage-7.13.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f1c23e24a7000da892a312fb17e33c5f94f8b001de44b7cf8ba2e36fbd15859e", size = 261411, upload-time = "2025-12-08T13:13:44.72Z" }, { url = "https://files.pythonhosted.org/packages/6a/fc/0474efcbb590ff8628830e9aaec5f1831594874360e3251f1fdec31d07a3/coverage-7.13.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d10a2ed46386e850bb3de503a54f9fe8192e5917fcbb143bfef653a9355e9a53", size = 262069, upload-time = "2025-12-28T15:41:58.093Z" },
{ url = "https://files.pythonhosted.org/packages/eb/31/12a4aec689cb942a89129587860ed4d0fd522d5fda81237147fde554b8ae/coverage-7.13.0-cp313-cp313t-win32.whl", hash = "sha256:5f8a0297355e652001015e93be345ee54393e45dc3050af4a0475c5a2b767d46", size = 221505, upload-time = "2025-12-08T13:13:46.332Z" }, { url = "https://files.pythonhosted.org/packages/88/4f/3c159b7953db37a7b44c0eab8a95c37d1aa4257c47b4602c04022d5cb975/coverage-7.13.1-cp313-cp313t-win32.whl", hash = "sha256:75a6f4aa904301dab8022397a22c0039edc1f51e90b83dbd4464b8a38dc87842", size = 222184, upload-time = "2025-12-28T15:41:59.763Z" },
{ url = "https://files.pythonhosted.org/packages/65/8c/3b5fe3259d863572d2b0827642c50c3855d26b3aefe80bdc9eba1f0af3b0/coverage-7.13.0-cp313-cp313t-win_amd64.whl", hash = "sha256:6abb3a4c52f05e08460bd9acf04fec027f8718ecaa0d09c40ffbc3fbd70ecc39", size = 222569, upload-time = "2025-12-08T13:13:47.79Z" }, { url = "https://files.pythonhosted.org/packages/58/a5/6b57d28f81417f9335774f20679d9d13b9a8fb90cd6160957aa3b54a2379/coverage-7.13.1-cp313-cp313t-win_amd64.whl", hash = "sha256:309ef5706e95e62578cda256b97f5e097916a2c26247c287bbe74794e7150df2", size = 223250, upload-time = "2025-12-28T15:42:01.52Z" },
{ url = "https://files.pythonhosted.org/packages/b0/39/f71fa8316a96ac72fc3908839df651e8eccee650001a17f2c78cdb355624/coverage-7.13.0-cp313-cp313t-win_arm64.whl", hash = "sha256:3ad968d1e3aa6ce5be295ab5fe3ae1bf5bb4769d0f98a80a0252d543a2ef2e9e", size = 220841, upload-time = "2025-12-08T13:13:49.243Z" }, { url = "https://files.pythonhosted.org/packages/81/7c/160796f3b035acfbb58be80e02e484548595aa67e16a6345e7910ace0a38/coverage-7.13.1-cp313-cp313t-win_arm64.whl", hash = "sha256:92f980729e79b5d16d221038dbf2e8f9a9136afa072f9d5d6ed4cb984b126a09", size = 221521, upload-time = "2025-12-28T15:42:03.275Z" },
{ url = "https://files.pythonhosted.org/packages/f8/4b/9b54bedda55421449811dcd5263a2798a63f48896c24dfb92b0f1b0845bd/coverage-7.13.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:453b7ec753cf5e4356e14fe858064e5520c460d3bbbcb9c35e55c0d21155c256", size = 218343, upload-time = "2025-12-08T13:13:50.811Z" }, { url = "https://files.pythonhosted.org/packages/aa/8e/ba0e597560c6563fc0adb902fda6526df5d4aa73bb10adf0574d03bd2206/coverage-7.13.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:97ab3647280d458a1f9adb85244e81587505a43c0c7cff851f5116cd2814b894", size = 218996, upload-time = "2025-12-28T15:42:04.978Z" },
{ url = "https://files.pythonhosted.org/packages/59/df/c3a1f34d4bba2e592c8979f924da4d3d4598b0df2392fbddb7761258e3dc/coverage-7.13.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:af827b7cbb303e1befa6c4f94fd2bf72f108089cfa0f8abab8f4ca553cf5ca5a", size = 218672, upload-time = "2025-12-08T13:13:52.284Z" }, { url = "https://files.pythonhosted.org/packages/6b/8e/764c6e116f4221dc7aa26c4061181ff92edb9c799adae6433d18eeba7a14/coverage-7.13.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8f572d989142e0908e6acf57ad1b9b86989ff057c006d13b76c146ec6a20216a", size = 219326, upload-time = "2025-12-28T15:42:06.691Z" },
{ url = "https://files.pythonhosted.org/packages/07/62/eec0659e47857698645ff4e6ad02e30186eb8afd65214fd43f02a76537cb/coverage-7.13.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:9987a9e4f8197a1000280f7cc089e3ea2c8b3c0a64d750537809879a7b4ceaf9", size = 249715, upload-time = "2025-12-08T13:13:53.791Z" }, { url = "https://files.pythonhosted.org/packages/4f/a6/6130dc6d8da28cdcbb0f2bf8865aeca9b157622f7c0031e48c6cf9a0e591/coverage-7.13.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d72140ccf8a147e94274024ff6fd8fb7811354cf7ef88b1f0a988ebaa5bc774f", size = 250374, upload-time = "2025-12-28T15:42:08.786Z" },
{ url = "https://files.pythonhosted.org/packages/23/2d/3c7ff8b2e0e634c1f58d095f071f52ed3c23ff25be524b0ccae8b71f99f8/coverage-7.13.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:3188936845cd0cb114fa6a51842a304cdbac2958145d03be2377ec41eb285d19", size = 252225, upload-time = "2025-12-08T13:13:55.274Z" }, { url = "https://files.pythonhosted.org/packages/82/2b/783ded568f7cd6b677762f780ad338bf4b4750205860c17c25f7c708995e/coverage-7.13.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d3c9f051b028810f5a87c88e5d6e9af3c0ff32ef62763bf15d29f740453ca909", size = 252882, upload-time = "2025-12-28T15:42:10.515Z" },
{ url = "https://files.pythonhosted.org/packages/aa/ac/fb03b469d20e9c9a81093575003f959cf91a4a517b783aab090e4538764b/coverage-7.13.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a2bdb3babb74079f021696cb46b8bb5f5661165c385d3a238712b031a12355be", size = 253559, upload-time = "2025-12-08T13:13:57.161Z" }, { url = "https://files.pythonhosted.org/packages/cd/b2/9808766d082e6a4d59eb0cc881a57fc1600eb2c5882813eefff8254f71b5/coverage-7.13.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f398ba4df52d30b1763f62eed9de5620dcde96e6f491f4c62686736b155aa6e4", size = 254218, upload-time = "2025-12-28T15:42:12.208Z" },
{ url = "https://files.pythonhosted.org/packages/29/62/14afa9e792383c66cc0a3b872a06ded6e4ed1079c7d35de274f11d27064e/coverage-7.13.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7464663eaca6adba4175f6c19354feea61ebbdd735563a03d1e472c7072d27bb", size = 249724, upload-time = "2025-12-08T13:13:58.692Z" }, { url = "https://files.pythonhosted.org/packages/44/ea/52a985bb447c871cb4d2e376e401116520991b597c85afdde1ea9ef54f2c/coverage-7.13.1-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:132718176cc723026d201e347f800cd1a9e4b62ccd3f82476950834dad501c75", size = 250391, upload-time = "2025-12-28T15:42:14.21Z" },
{ url = "https://files.pythonhosted.org/packages/31/b7/333f3dab2939070613696ab3ee91738950f0467778c6e5a5052e840646b7/coverage-7.13.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8069e831f205d2ff1f3d355e82f511eb7c5522d7d413f5db5756b772ec8697f8", size = 251582, upload-time = "2025-12-08T13:14:00.642Z" }, { url = "https://files.pythonhosted.org/packages/7f/1d/125b36cc12310718873cfc8209ecfbc1008f14f4f5fa0662aa608e579353/coverage-7.13.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:9e549d642426e3579b3f4b92d0431543b012dcb6e825c91619d4e93b7363c3f9", size = 252239, upload-time = "2025-12-28T15:42:16.292Z" },
{ url = "https://files.pythonhosted.org/packages/81/cb/69162bda9381f39b2287265d7e29ee770f7c27c19f470164350a38318764/coverage-7.13.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:6fb2d5d272341565f08e962cce14cdf843a08ac43bd621783527adb06b089c4b", size = 249538, upload-time = "2025-12-08T13:14:02.556Z" }, { url = "https://files.pythonhosted.org/packages/6a/16/10c1c164950cade470107f9f14bbac8485f8fb8515f515fca53d337e4a7f/coverage-7.13.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:90480b2134999301eea795b3a9dbf606c6fbab1b489150c501da84a959442465", size = 250196, upload-time = "2025-12-28T15:42:18.54Z" },
{ url = "https://files.pythonhosted.org/packages/e0/76/350387b56a30f4970abe32b90b2a434f87d29f8b7d4ae40d2e8a85aacfb3/coverage-7.13.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:5e70f92ef89bac1ac8a99b3324923b4749f008fdbd7aa9cb35e01d7a284a04f9", size = 249349, upload-time = "2025-12-08T13:14:04.015Z" }, { url = "https://files.pythonhosted.org/packages/2a/c6/cd860fac08780c6fd659732f6ced1b40b79c35977c1356344e44d72ba6c4/coverage-7.13.1-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:e825dbb7f84dfa24663dd75835e7257f8882629fc11f03ecf77d84a75134b864", size = 250008, upload-time = "2025-12-28T15:42:20.365Z" },
{ url = "https://files.pythonhosted.org/packages/86/0d/7f6c42b8d59f4c7e43ea3059f573c0dcfed98ba46eb43c68c69e52ae095c/coverage-7.13.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:4b5de7d4583e60d5fd246dd57fcd3a8aa23c6e118a8c72b38adf666ba8e7e927", size = 251011, upload-time = "2025-12-08T13:14:05.505Z" }, { url = "https://files.pythonhosted.org/packages/f0/3a/a8c58d3d38f82a5711e1e0a67268362af48e1a03df27c03072ac30feefcf/coverage-7.13.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:623dcc6d7a7ba450bbdbeedbaa0c42b329bdae16491af2282f12a7e809be7eb9", size = 251671, upload-time = "2025-12-28T15:42:22.114Z" },
{ url = "https://files.pythonhosted.org/packages/d7/f1/4bb2dff379721bb0b5c649d5c5eaf438462cad824acf32eb1b7ca0c7078e/coverage-7.13.0-cp314-cp314-win32.whl", hash = "sha256:a6c6e16b663be828a8f0b6c5027d36471d4a9f90d28444aa4ced4d48d7d6ae8f", size = 221091, upload-time = "2025-12-08T13:14:07.127Z" }, { url = "https://files.pythonhosted.org/packages/f0/bc/fd4c1da651d037a1e3d53e8cb3f8182f4b53271ffa9a95a2e211bacc0349/coverage-7.13.1-cp314-cp314-win32.whl", hash = "sha256:6e73ebb44dca5f708dc871fe0b90cf4cff1a13f9956f747cc87b535a840386f5", size = 221777, upload-time = "2025-12-28T15:42:23.919Z" },
{ url = "https://files.pythonhosted.org/packages/ba/44/c239da52f373ce379c194b0ee3bcc121020e397242b85f99e0afc8615066/coverage-7.13.0-cp314-cp314-win_amd64.whl", hash = "sha256:0900872f2fdb3ee5646b557918d02279dc3af3dfb39029ac4e945458b13f73bc", size = 221904, upload-time = "2025-12-08T13:14:08.542Z" }, { url = "https://files.pythonhosted.org/packages/4b/50/71acabdc8948464c17e90b5ffd92358579bd0910732c2a1c9537d7536aa6/coverage-7.13.1-cp314-cp314-win_amd64.whl", hash = "sha256:be753b225d159feb397bd0bf91ae86f689bad0da09d3b301478cd39b878ab31a", size = 222592, upload-time = "2025-12-28T15:42:25.619Z" },
{ url = "https://files.pythonhosted.org/packages/89/1f/b9f04016d2a29c2e4a0307baefefad1a4ec5724946a2b3e482690486cade/coverage-7.13.0-cp314-cp314-win_arm64.whl", hash = "sha256:3a10260e6a152e5f03f26db4a407c4c62d3830b9af9b7c0450b183615f05d43b", size = 220480, upload-time = "2025-12-08T13:14:10.958Z" }, { url = "https://files.pythonhosted.org/packages/f7/c8/a6fb943081bb0cc926499c7907731a6dc9efc2cbdc76d738c0ab752f1a32/coverage-7.13.1-cp314-cp314-win_arm64.whl", hash = "sha256:228b90f613b25ba0019361e4ab81520b343b622fc657daf7e501c4ed6a2366c0", size = 221169, upload-time = "2025-12-28T15:42:27.629Z" },
{ url = "https://files.pythonhosted.org/packages/16/d4/364a1439766c8e8647860584171c36010ca3226e6e45b1753b1b249c5161/coverage-7.13.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:9097818b6cc1cfb5f174e3263eba4a62a17683bcfe5c4b5d07f4c97fa51fbf28", size = 219074, upload-time = "2025-12-08T13:14:13.345Z" }, { url = "https://files.pythonhosted.org/packages/16/61/d5b7a0a0e0e40d62e59bc8c7aa1afbd86280d82728ba97f0673b746b78e2/coverage-7.13.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:60cfb538fe9ef86e5b2ab0ca8fc8d62524777f6c611dcaf76dc16fbe9b8e698a", size = 219730, upload-time = "2025-12-28T15:42:29.306Z" },
{ url = "https://files.pythonhosted.org/packages/ce/f4/71ba8be63351e099911051b2089662c03d5671437a0ec2171823c8e03bec/coverage-7.13.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0018f73dfb4301a89292c73be6ba5f58722ff79f51593352759c1790ded1cabe", size = 219342, upload-time = "2025-12-08T13:14:15.02Z" }, { url = "https://files.pythonhosted.org/packages/a3/2c/8881326445fd071bb49514d1ce97d18a46a980712b51fee84f9ab42845b4/coverage-7.13.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:57dfc8048c72ba48a8c45e188d811e5efd7e49b387effc8fb17e97936dde5bf6", size = 220001, upload-time = "2025-12-28T15:42:31.319Z" },
{ url = "https://files.pythonhosted.org/packages/5e/25/127d8ed03d7711a387d96f132589057213e3aef7475afdaa303412463f22/coverage-7.13.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:166ad2a22ee770f5656e1257703139d3533b4a0b6909af67c6b4a3adc1c98657", size = 260713, upload-time = "2025-12-08T13:14:16.907Z" }, { url = "https://files.pythonhosted.org/packages/b5/d7/50de63af51dfa3a7f91cc37ad8fcc1e244b734232fbc8b9ab0f3c834a5cd/coverage-7.13.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3f2f725aa3e909b3c5fdb8192490bdd8e1495e85906af74fe6e34a2a77ba0673", size = 261370, upload-time = "2025-12-28T15:42:32.992Z" },
{ url = "https://files.pythonhosted.org/packages/fd/db/559fbb6def07d25b2243663b46ba9eb5a3c6586c0c6f4e62980a68f0ee1c/coverage-7.13.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f6aaef16d65d1787280943f1c8718dc32e9cf141014e4634d64446702d26e0ff", size = 262825, upload-time = "2025-12-08T13:14:18.68Z" }, { url = "https://files.pythonhosted.org/packages/e1/2c/d31722f0ec918fd7453b2758312729f645978d212b410cd0f7c2aed88a94/coverage-7.13.1-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9ee68b21909686eeb21dfcba2c3b81fee70dcf38b140dcd5aa70680995fa3aa5", size = 263485, upload-time = "2025-12-28T15:42:34.759Z" },
{ url = "https://files.pythonhosted.org/packages/37/99/6ee5bf7eff884766edb43bd8736b5e1c5144d0fe47498c3779326fe75a35/coverage-7.13.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e999e2dcc094002d6e2c7bbc1fb85b58ba4f465a760a8014d97619330cdbbbf3", size = 265233, upload-time = "2025-12-08T13:14:20.55Z" }, { url = "https://files.pythonhosted.org/packages/fa/7a/2c114fa5c5fc08ba0777e4aec4c97e0b4a1afcb69c75f1f54cff78b073ab/coverage-7.13.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:724b1b270cb13ea2e6503476e34541a0b1f62280bc997eab443f87790202033d", size = 265890, upload-time = "2025-12-28T15:42:36.517Z" },
{ url = "https://files.pythonhosted.org/packages/d8/90/92f18fe0356ea69e1f98f688ed80cec39f44e9f09a1f26a1bbf017cc67f2/coverage-7.13.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:00c3d22cf6fb1cf3bf662aaaa4e563be8243a5ed2630339069799835a9cc7f9b", size = 259779, upload-time = "2025-12-08T13:14:22.367Z" }, { url = "https://files.pythonhosted.org/packages/65/d9/f0794aa1c74ceabc780fe17f6c338456bbc4e96bd950f2e969f48ac6fb20/coverage-7.13.1-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:916abf1ac5cf7eb16bc540a5bf75c71c43a676f5c52fcb9fe75a2bd75fb944e8", size = 260445, upload-time = "2025-12-28T15:42:38.646Z" },
{ url = "https://files.pythonhosted.org/packages/90/5d/b312a8b45b37a42ea7d27d7d3ff98ade3a6c892dd48d1d503e773503373f/coverage-7.13.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22ccfe8d9bb0d6134892cbe1262493a8c70d736b9df930f3f3afae0fe3ac924d", size = 262700, upload-time = "2025-12-08T13:14:24.309Z" }, { url = "https://files.pythonhosted.org/packages/49/23/184b22a00d9bb97488863ced9454068c79e413cb23f472da6cbddc6cfc52/coverage-7.13.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:776483fd35b58d8afe3acbd9988d5de592ab6da2d2a865edfdbc9fdb43e7c486", size = 263357, upload-time = "2025-12-28T15:42:40.788Z" },
{ url = "https://files.pythonhosted.org/packages/63/f8/b1d0de5c39351eb71c366f872376d09386640840a2e09b0d03973d791e20/coverage-7.13.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:9372dff5ea15930fea0445eaf37bbbafbc771a49e70c0aeed8b4e2c2614cc00e", size = 260302, upload-time = "2025-12-08T13:14:26.068Z" }, { url = "https://files.pythonhosted.org/packages/7d/bd/58af54c0c9199ea4190284f389005779d7daf7bf3ce40dcd2d2b2f96da69/coverage-7.13.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:b6f3b96617e9852703f5b633ea01315ca45c77e879584f283c44127f0f1ec564", size = 260959, upload-time = "2025-12-28T15:42:42.808Z" },
{ url = "https://files.pythonhosted.org/packages/aa/7c/d42f4435bc40c55558b3109a39e2d456cddcec37434f62a1f1230991667a/coverage-7.13.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:69ac2c492918c2461bc6ace42d0479638e60719f2a4ef3f0815fa2df88e9f940", size = 259136, upload-time = "2025-12-08T13:14:27.604Z" }, { url = "https://files.pythonhosted.org/packages/4b/2a/6839294e8f78a4891bf1df79d69c536880ba2f970d0ff09e7513d6e352e9/coverage-7.13.1-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:bd63e7b74661fed317212fab774e2a648bc4bb09b35f25474f8e3325d2945cd7", size = 259792, upload-time = "2025-12-28T15:42:44.818Z" },
{ url = "https://files.pythonhosted.org/packages/b8/d3/23413241dc04d47cfe19b9a65b32a2edd67ecd0b817400c2843ebc58c847/coverage-7.13.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:739c6c051a7540608d097b8e13c76cfa85263ced467168dc6b477bae3df7d0e2", size = 261467, upload-time = "2025-12-08T13:14:29.09Z" }, { url = "https://files.pythonhosted.org/packages/ba/c3/528674d4623283310ad676c5af7414b9850ab6d55c2300e8aa4b945ec554/coverage-7.13.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:933082f161bbb3e9f90d00990dc956120f608cdbcaeea15c4d897f56ef4fe416", size = 262123, upload-time = "2025-12-28T15:42:47.108Z" },
{ url = "https://files.pythonhosted.org/packages/13/e6/6e063174500eee216b96272c0d1847bf215926786f85c2bd024cf4d02d2f/coverage-7.13.0-cp314-cp314t-win32.whl", hash = "sha256:fe81055d8c6c9de76d60c94ddea73c290b416e061d40d542b24a5871bad498b7", size = 221875, upload-time = "2025-12-08T13:14:31.106Z" }, { url = "https://files.pythonhosted.org/packages/06/c5/8c0515692fb4c73ac379d8dc09b18eaf0214ecb76ea6e62467ba7a1556ff/coverage-7.13.1-cp314-cp314t-win32.whl", hash = "sha256:18be793c4c87de2965e1c0f060f03d9e5aff66cfeae8e1dbe6e5b88056ec153f", size = 222562, upload-time = "2025-12-28T15:42:49.144Z" },
{ url = "https://files.pythonhosted.org/packages/3b/46/f4fb293e4cbe3620e3ac2a3e8fd566ed33affb5861a9b20e3dd6c1896cbc/coverage-7.13.0-cp314-cp314t-win_amd64.whl", hash = "sha256:445badb539005283825959ac9fa4a28f712c214b65af3a2c464f1adc90f5fcbc", size = 222982, upload-time = "2025-12-08T13:14:33.1Z" }, { url = "https://files.pythonhosted.org/packages/05/0e/c0a0c4678cb30dac735811db529b321d7e1c9120b79bd728d4f4d6b010e9/coverage-7.13.1-cp314-cp314t-win_amd64.whl", hash = "sha256:0e42e0ec0cd3e0d851cb3c91f770c9301f48647cb2877cb78f74bdaa07639a79", size = 223670, upload-time = "2025-12-28T15:42:51.218Z" },
{ url = "https://files.pythonhosted.org/packages/68/62/5b3b9018215ed9733fbd1ae3b2ed75c5de62c3b55377a52cae732e1b7805/coverage-7.13.0-cp314-cp314t-win_arm64.whl", hash = "sha256:de7f6748b890708578fc4b7bb967d810aeb6fcc9bff4bb77dbca77dab2f9df6a", size = 221016, upload-time = "2025-12-08T13:14:34.601Z" }, { url = "https://files.pythonhosted.org/packages/f5/5f/b177aa0011f354abf03a8f30a85032686d290fdeed4222b27d36b4372a50/coverage-7.13.1-cp314-cp314t-win_arm64.whl", hash = "sha256:eaecf47ef10c72ece9a2a92118257da87e460e113b83cc0d2905cbbe931792b4", size = 221707, upload-time = "2025-12-28T15:42:53.034Z" },
{ url = "https://files.pythonhosted.org/packages/8d/4c/1968f32fb9a2604645827e11ff84a31e59d532e01995f904723b4f5328b3/coverage-7.13.0-py3-none-any.whl", hash = "sha256:850d2998f380b1e266459ca5b47bc9e7daf9af1d070f66317972f382d46f1904", size = 210068, upload-time = "2025-12-08T13:14:36.236Z" }, { url = "https://files.pythonhosted.org/packages/cc/48/d9f421cb8da5afaa1a64570d9989e00fb7955e6acddc5a12979f7666ef60/coverage-7.13.1-py3-none-any.whl", hash = "sha256:2016745cb3ba554469d02819d78958b571792bb68e31302610e898f80dd3a573", size = 210722, upload-time = "2025-12-28T15:42:54.901Z" },
] ]
[[package]] [[package]]
@@ -425,26 +436,16 @@ wheels = [
[[package]] [[package]]
name = "dj-database-url" name = "dj-database-url"
version = "3.0.1" version = "3.1.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "django" }, { name = "django" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/75/05/2ec51009f4ce424877dbd8ad95868faec0c3494ed0ff1635f9ab53d9e0ee/dj_database_url-3.0.1.tar.gz", hash = "sha256:8994961efb888fc6bf8c41550870c91f6f7691ca751888ebaa71442b7f84eff8", size = 12556, upload-time = "2025-07-02T09:40:11.424Z" } sdist = { url = "https://files.pythonhosted.org/packages/95/c6/88676a7333fb7c668e626b55f8bfc8527dd863973eb1c40412b95d27747d/dj_database_url-3.1.0.tar.gz", hash = "sha256:d80218426b83f9302c8d27d4fccf52de5cf0cab179f0645fb2839f37605d1353", size = 7924, upload-time = "2026-01-04T09:18:32.693Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/aa/5e/86a43c6fdaa41c12d58e4ff3ebbfd6b71a7cb0360a08614e3754ef2e9afb/dj_database_url-3.0.1-py3-none-any.whl", hash = "sha256:43950018e1eeea486bf11136384aec0fe55b29fe6fd8a44553231b85661d9383", size = 8808, upload-time = "2025-07-02T09:40:26.326Z" }, { url = "https://files.pythonhosted.org/packages/68/1b/e84f7472ab0bdacc3fd09556eb4dd40d88246941d465cc103b36a8dabcd8/dj_database_url-3.1.0-py3-none-any.whl", hash = "sha256:155a56fbbecbaaf1348ccd73bf29138b4c9988363ba08261a0f0145e392e638c", size = 8849, upload-time = "2026-01-04T09:18:43.77Z" },
] ]
[[package]]
name = "dj-rest-auth"
version = "7.0.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "django" },
{ name = "djangorestframework" },
]
sdist = { url = "https://files.pythonhosted.org/packages/b7/19/00150c8bedf7b6d4c44ecf7c2be9e58ae2203b42741ca734152d34f549f1/dj-rest-auth-7.0.1.tar.gz", hash = "sha256:3f8c744cbcf05355ff4bcbef0c8a63645da38e29a0fdef3c3332d4aced52fb90", size = 220541, upload-time = "2025-01-04T23:37:38.688Z" }
[[package]] [[package]]
name = "django" name = "django"
version = "5.2.9" version = "5.2.9"
@@ -619,6 +620,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/fc/87/ad5a38d1a8241b485835c6e6158634b29e885be78424ca42fb63df15b965/django_fsm-3.0.1-py2.py3-none-any.whl", hash = "sha256:ea07be2da221efa5cb8743cc94e0bb64fd962adff594f82269040eb4708c30c6", size = 12454, upload-time = "2025-10-07T16:33:26.218Z" }, { url = "https://files.pythonhosted.org/packages/fc/87/ad5a38d1a8241b485835c6e6158634b29e885be78424ca42fb63df15b965/django_fsm-3.0.1-py2.py3-none-any.whl", hash = "sha256:ea07be2da221efa5cb8743cc94e0bb64fd962adff594f82269040eb4708c30c6", size = 12454, upload-time = "2025-10-07T16:33:26.218Z" },
] ]
[[package]]
name = "django-fsm-2"
version = "4.1.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "django" },
]
sdist = { url = "https://files.pythonhosted.org/packages/7e/8f/d1ec9bafdfd7830a40ab1f72887cd931e07f43552b03869495598cb1170c/django_fsm_2-4.1.0.tar.gz", hash = "sha256:5fbe34839f315a06e29052ded8868292fc469f8f37c8d4d88427ad15a92680ae", size = 17695, upload-time = "2025-11-03T15:03:43.477Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/76/97/f4ce5f7b3f389e03c259b0501fc28a9d1db359b09776251130ae9c5e9590/django_fsm_2-4.1.0-py3-none-any.whl", hash = "sha256:58e20abe633c1375d80aca55fd66ca2431794d32f44751f333f386de869f0e6f", size = 14976, upload-time = "2025-11-03T15:03:41.938Z" },
]
[[package]] [[package]]
name = "django-fsm-log" name = "django-fsm-log"
version = "3.1.0" version = "3.1.0"
@@ -670,6 +683,31 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/bf/be/c00a3c861e5356105176c9f120fad3ff95698c1d61e172dd0a0a64acdb9b/django_htmx_autocomplete-1.0.15-py3-none-any.whl", hash = "sha256:c895ea457c0b2a79d14a0b6ead4fba8270fd910ad0d7a0fcbd3ae0b2cb8b6a1e", size = 54059, upload-time = "2025-12-19T17:46:49.595Z" }, { url = "https://files.pythonhosted.org/packages/bf/be/c00a3c861e5356105176c9f120fad3ff95698c1d61e172dd0a0a64acdb9b/django_htmx_autocomplete-1.0.15-py3-none-any.whl", hash = "sha256:c895ea457c0b2a79d14a0b6ead4fba8270fd910ad0d7a0fcbd3ae0b2cb8b6a1e", size = 54059, upload-time = "2025-12-19T17:46:49.595Z" },
] ]
[[package]]
name = "django-model-utils"
version = "5.0.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "django" },
]
sdist = { url = "https://files.pythonhosted.org/packages/81/60/5e232c32a2c977cc1af8c70a38ef436598bc649ad89c2c4568454edde2c9/django_model_utils-5.0.0.tar.gz", hash = "sha256:041cdd6230d2fbf6cd943e1969318bce762272077f4ecd333ab2263924b4e5eb", size = 80559, upload-time = "2024-09-04T11:35:22.858Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/fd/13/87a42048700c54bfce35900a34e2031245132775fb24363fc0e33664aa9c/django_model_utils-5.0.0-py3-none-any.whl", hash = "sha256:fec78e6c323d565a221f7c4edc703f4567d7bb1caeafe1acd16a80c5ff82056b", size = 42630, upload-time = "2024-09-04T11:36:23.166Z" },
]
[[package]]
name = "django-notifications-hq"
version = "1.8.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "django" },
{ name = "django-model-utils" },
{ name = "jsonfield" },
{ name = "pytz" },
{ name = "swapper" },
]
sdist = { url = "https://files.pythonhosted.org/packages/36/18/1b42038963d8b0aeeb380a24ff86a650067833cf6d2d87b678be2d27c609/django-notifications-hq-1.8.3.tar.gz", hash = "sha256:0f4b216bb382b7c7c4eef273eb211e59c1c6a0ea38cba6077415ac031d330725", size = 32238, upload-time = "2023-10-19T00:03:42.703Z" }
[[package]] [[package]]
name = "django-pghistory" name = "django-pghistory"
version = "3.9.1" version = "3.9.1"
@@ -752,16 +790,16 @@ wheels = [
[[package]] [[package]]
name = "django-tailwind-cli" name = "django-tailwind-cli"
version = "4.4.2" version = "4.5.1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "django" }, { name = "django" },
{ name = "django-typer" }, { name = "django-typer" },
{ name = "semver" }, { name = "semver" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/86/09/8359181201a03871e34d8d47685b15244e778c8ece9f209a86d543cb7767/django_tailwind_cli-4.4.2.tar.gz", hash = "sha256:c3ad962710fc95acf1bb45b1b7747fe549d50ff99228cadc4cf2f28fd8d4e8ce", size = 97420, upload-time = "2025-09-23T15:07:23.876Z" } sdist = { url = "https://files.pythonhosted.org/packages/f0/6d/ad632a539d7cc74a07e43f6292dae66ea2c8944c637da22945768cc9b846/django_tailwind_cli-4.5.1.tar.gz", hash = "sha256:e3cdacab1d7e81f08c3ec44a4e8217c7b3a1e986825c1cd4c2acca58fbc584ac", size = 99414, upload-time = "2025-12-29T17:11:55.275Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/cf/08/8b8c7c4a4f9f4ad3c4815f53c4f98de19b5c37803a9af767d0cebd779af4/django_tailwind_cli-4.4.2-py3-none-any.whl", hash = "sha256:8d1d69ae19209b5d6fd66150d916edbced1d154eee55895d807441dbfe282cae", size = 31688, upload-time = "2025-09-23T15:07:22.16Z" }, { url = "https://files.pythonhosted.org/packages/28/3f/e1fa6aa71e15b2c7f1d4807657be2dfb3b17c7ed9b9595fea30e8a8f36bc/django_tailwind_cli-4.5.1-py3-none-any.whl", hash = "sha256:72991e93c070da864c63d2af96cf601b70af51fadfae786b9326b2ff0e124a72", size = 34275, upload-time = "2025-12-29T17:11:53.599Z" },
] ]
[[package]] [[package]]
@@ -802,11 +840,14 @@ wheels = [
[[package]] [[package]]
name = "django-widget-tweaks" name = "django-widget-tweaks"
version = "1.5.0" version = "1.5.1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/a5/fe/26eb92fba83844e71bbec0ced7fc2e843e5990020e3cc676925204031654/django-widget-tweaks-1.5.0.tar.gz", hash = "sha256:1c2180681ebb994e922c754804c7ffebbe1245014777ac47897a81f57cc629c7", size = 14767, upload-time = "2023-08-25T15:29:12.778Z" } dependencies = [
{ name = "django" },
]
sdist = { url = "https://files.pythonhosted.org/packages/01/6d/d1b5a3ae3bccfee96e10315373298cea51e5e0d6853d022181b7b0861a4d/django_widget_tweaks-1.5.1.tar.gz", hash = "sha256:084acc9eeb5a3208f2670522de6284287973e54d54488ce6d402f4b99bc5f452", size = 16233, upload-time = "2026-01-02T12:46:28.907Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/46/6a/6cb6deb5c38b785c77c3ba66f53051eada49205979c407323eb666930915/django_widget_tweaks-1.5.0-py3-none-any.whl", hash = "sha256:a41b7b2f05bd44d673d11ebd6c09a96f1d013ee98121cb98c384fe84e33b881e", size = 8960, upload-time = "2023-08-25T15:29:05.644Z" }, { url = "https://files.pythonhosted.org/packages/64/6a/ad176284371005426b9a1c424e6cd77a9018ab1b17dc23948bfbeb2f6a21/django_widget_tweaks-1.5.1-py3-none-any.whl", hash = "sha256:3f5080f8365740fc1c14607498c975cbfed896dd0c40e1b563095716ee31e3b5", size = 9634, upload-time = "2026-01-02T12:46:02.18Z" },
] ]
[[package]] [[package]]
@@ -852,15 +893,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/32/d9/502c56fc3ca960075d00956283f1c44e8cafe433dada03f9ed2821f3073b/drf_spectacular-0.29.0-py3-none-any.whl", hash = "sha256:d1ee7c9535d89848affb4427347f7c4a22c5d22530b8842ef133d7b72e19b41a", size = 105433, upload-time = "2025-11-02T03:40:24.823Z" }, { url = "https://files.pythonhosted.org/packages/32/d9/502c56fc3ca960075d00956283f1c44e8cafe433dada03f9ed2821f3073b/drf_spectacular-0.29.0-py3-none-any.whl", hash = "sha256:d1ee7c9535d89848affb4427347f7c4a22c5d22530b8842ef133d7b72e19b41a", size = 105433, upload-time = "2025-11-02T03:40:24.823Z" },
] ]
[[package]]
name = "exceptiongroup"
version = "1.3.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371, upload-time = "2025-11-21T23:01:54.787Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598", size = 16740, upload-time = "2025-11-21T23:01:53.443Z" },
]
[[package]] [[package]]
name = "factory-boy" name = "factory-boy"
version = "3.3.3" version = "3.3.3"
@@ -875,14 +907,14 @@ wheels = [
[[package]] [[package]]
name = "faker" name = "faker"
version = "39.0.0" version = "40.1.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "tzdata" }, { name = "tzdata" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/30/b9/0897fb5888ddda099dc0f314a8a9afb5faa7e52eaf6865c00686dfb394db/faker-39.0.0.tar.gz", hash = "sha256:ddae46d3b27e01cea7894651d687b33bcbe19a45ef044042c721ceac6d3da0ff", size = 1941757, upload-time = "2025-12-17T19:19:04.762Z" } sdist = { url = "https://files.pythonhosted.org/packages/d7/1d/aa43ef59589ddf3647df918143f1bac9eb004cce1c43124ee3347061797d/faker-40.1.0.tar.gz", hash = "sha256:c402212a981a8a28615fea9120d789e3f6062c0c259a82bfb8dff5d273e539d2", size = 1948784, upload-time = "2025-12-29T18:06:00.659Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/eb/5a/26cdb1b10a55ac6eb11a738cea14865fa753606c4897d7be0f5dc230df00/faker-39.0.0-py3-none-any.whl", hash = "sha256:c72f1fca8f1a24b8da10fcaa45739135a19772218ddd61b86b7ea1b8c790dce7", size = 1980775, upload-time = "2025-12-17T19:19:02.926Z" }, { url = "https://files.pythonhosted.org/packages/fc/23/e22da510e1ec1488966330bf76d8ff4bd535cbfc93660eeb7657761a1bb2/faker-40.1.0-py3-none-any.whl", hash = "sha256:a616d35818e2a2387c297de80e2288083bc915e24b7e39d2fb5bc66cce3a929f", size = 1985317, upload-time = "2025-12-29T18:05:58.831Z" },
] ]
[[package]] [[package]]
@@ -1007,6 +1039,34 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/b2/2f/8a0befeed8bbe142d5a6cf3b51e8cbe019c32a64a596b0ebcbc007a8f8f1/hiredis-3.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b442b6ab038a6f3b5109874d2514c4edf389d8d8b553f10f12654548808683bc", size = 23808, upload-time = "2025-10-14T16:33:04.965Z" }, { url = "https://files.pythonhosted.org/packages/b2/2f/8a0befeed8bbe142d5a6cf3b51e8cbe019c32a64a596b0ebcbc007a8f8f1/hiredis-3.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b442b6ab038a6f3b5109874d2514c4edf389d8d8b553f10f12654548808683bc", size = 23808, upload-time = "2025-10-14T16:33:04.965Z" },
] ]
[[package]]
name = "httpcore"
version = "1.0.9"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "certifi" },
{ name = "h11" },
]
sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" },
]
[[package]]
name = "httpx"
version = "0.28.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
{ name = "certifi" },
{ name = "httpcore" },
{ name = "idna" },
]
sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" },
]
[[package]] [[package]]
name = "idna" name = "idna"
version = "3.11" version = "3.11"
@@ -1034,6 +1094,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" },
] ]
[[package]]
name = "jsonfield"
version = "3.2.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "django" },
]
sdist = { url = "https://files.pythonhosted.org/packages/fa/e9/537e105246dba81d898853dbbe17eb3edd23d47a35074b99fd4add6f1662/jsonfield-3.2.0.tar.gz", hash = "sha256:ca53871bc3308ae4f4cddc3b4f99ed5c6fc6abb1832fbfb499bc6da566c70e4a", size = 17156, upload-time = "2025-07-04T23:06:24.883Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/0a/22/2e08e7b957f50e5eceefde018ce9ee88aceb5126231128d9c1cb8167c1c8/jsonfield-3.2.0-py3-none-any.whl", hash = "sha256:ca4f6bf89c819f293e77074d613c0021e3c4e8521be95c73d03caecb4372e1ee", size = 8316, upload-time = "2025-07-04T23:06:23.588Z" },
]
[[package]] [[package]]
name = "jsonschema" name = "jsonschema"
version = "4.25.1" version = "4.25.1"
@@ -1063,7 +1135,7 @@ wheels = [
[[package]] [[package]]
name = "kombu" name = "kombu"
version = "5.6.1" version = "5.6.2"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "amqp" }, { name = "amqp" },
@@ -1071,9 +1143,9 @@ dependencies = [
{ name = "tzdata" }, { name = "tzdata" },
{ name = "vine" }, { name = "vine" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/ac/05/749ada8e51718445d915af13f1d18bc4333848e8faa0cb234028a3328ec8/kombu-5.6.1.tar.gz", hash = "sha256:90f1febb57ad4f53ca327a87598191b2520e0c793c75ea3b88d98e3b111282e4", size = 471548, upload-time = "2025-11-25T11:07:33.504Z" } sdist = { url = "https://files.pythonhosted.org/packages/b6/a5/607e533ed6c83ae1a696969b8e1c137dfebd5759a2e9682e26ff1b97740b/kombu-5.6.2.tar.gz", hash = "sha256:8060497058066c6f5aed7c26d7cd0d3b574990b09de842a8c5aaed0b92cc5a55", size = 472594, upload-time = "2025-12-29T20:30:07.779Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/14/d6/943cf84117cd9ddecf6e1707a3f712a49fc64abdb8ac31b19132871af1dd/kombu-5.6.1-py3-none-any.whl", hash = "sha256:b69e3f5527ec32fc5196028a36376501682973e9620d6175d1c3d4eaf7e95409", size = 214141, upload-time = "2025-11-25T11:07:31.54Z" }, { url = "https://files.pythonhosted.org/packages/fb/0f/834427d8c03ff1d7e867d3db3d176470c64871753252b21b4f4897d1fa45/kombu-5.6.2-py3-none-any.whl", hash = "sha256:efcfc559da324d41d61ca311b0c64965ea35b4c55cc04ee36e55386145dace93", size = 214219, upload-time = "2025-12-29T20:30:05.74Z" },
] ]
[[package]] [[package]]
@@ -1285,30 +1357,30 @@ wheels = [
[[package]] [[package]]
name = "psutil" name = "psutil"
version = "7.2.0" version = "7.2.1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/be/7c/31d1c3ceb1260301f87565f50689dc6da3db427ece1e1e012af22abca54e/psutil-7.2.0.tar.gz", hash = "sha256:2e4f8e1552f77d14dc96fb0f6240c5b34a37081c0889f0853b3b29a496e5ef64", size = 489863, upload-time = "2025-12-23T20:26:24.616Z" } sdist = { url = "https://files.pythonhosted.org/packages/73/cb/09e5184fb5fc0358d110fc3ca7f6b1d033800734d34cac10f4136cfac10e/psutil-7.2.1.tar.gz", hash = "sha256:f7583aec590485b43ca601dd9cea0dcd65bd7bb21d30ef4ddbf4ea6b5ed1bdd3", size = 490253, upload-time = "2025-12-29T08:26:00.169Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/a8/8e/b35aae6ed19bc4e2286cac4832e4d522fcf00571867b0a85a3f77ef96a80/psutil-7.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:c31e927555539132a00380c971816ea43d089bf4bd5f3e918ed8c16776d68474", size = 129593, upload-time = "2025-12-23T20:26:28.019Z" }, { url = "https://files.pythonhosted.org/packages/77/8e/f0c242053a368c2aa89584ecd1b054a18683f13d6e5a318fc9ec36582c94/psutil-7.2.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:ba9f33bb525b14c3ea563b2fd521a84d2fa214ec59e3e6a2858f78d0844dd60d", size = 129624, upload-time = "2025-12-29T08:26:04.255Z" },
{ url = "https://files.pythonhosted.org/packages/61/a2/773d17d74e122bbffe08b97f73f2d4a01ef53fb03b98e61b8e4f64a9c6b9/psutil-7.2.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:db8e44e766cef86dea47d9a1fa535d38dc76449e5878a92f33683b7dba5bfcb2", size = 130104, upload-time = "2025-12-23T20:26:30.27Z" }, { url = "https://files.pythonhosted.org/packages/26/97/a58a4968f8990617decee234258a2b4fc7cd9e35668387646c1963e69f26/psutil-7.2.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:81442dac7abfc2f4f4385ea9e12ddf5a796721c0f6133260687fec5c3780fa49", size = 130132, upload-time = "2025-12-29T08:26:06.228Z" },
{ url = "https://files.pythonhosted.org/packages/0d/e3/d3a9b3f4bd231abbd70a988beb2e3edd15306051bccbfc4472bd34a56e01/psutil-7.2.0-cp313-cp313t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85ef849ac92169dedc59a7ac2fb565f47b3468fbe1524bf748746bc21afb94c7", size = 180579, upload-time = "2025-12-23T20:26:32.628Z" }, { url = "https://files.pythonhosted.org/packages/db/6d/ed44901e830739af5f72a85fa7ec5ff1edea7f81bfbf4875e409007149bd/psutil-7.2.1-cp313-cp313t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ea46c0d060491051d39f0d2cff4f98d5c72b288289f57a21556cc7d504db37fc", size = 180612, upload-time = "2025-12-29T08:26:08.276Z" },
{ url = "https://files.pythonhosted.org/packages/66/f8/6c73044424aabe1b7824d4d4504029d406648286d8fe7ba8c4682e0d3042/psutil-7.2.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:26782bdbae2f5c14ce9ebe8ad2411dc2ca870495e0cd90f8910ede7fa5e27117", size = 183171, upload-time = "2025-12-23T20:26:34.972Z" }, { url = "https://files.pythonhosted.org/packages/c7/65/b628f8459bca4efbfae50d4bf3feaab803de9a160b9d5f3bd9295a33f0c2/psutil-7.2.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:35630d5af80d5d0d49cfc4d64c1c13838baf6717a13effb35869a5919b854cdf", size = 183201, upload-time = "2025-12-29T08:26:10.622Z" },
{ url = "https://files.pythonhosted.org/packages/48/7d/76d7a863340885d41826562225a566683e653ee6c9ba03c9f3856afa7d80/psutil-7.2.0-cp313-cp313t-win_amd64.whl", hash = "sha256:b7665f612d3b38a583391b95969667a53aaf6c5706dc27a602c9a4874fbf09e4", size = 139055, upload-time = "2025-12-23T20:26:36.848Z" }, { url = "https://files.pythonhosted.org/packages/fb/23/851cadc9764edcc18f0effe7d0bf69f727d4cf2442deb4a9f78d4e4f30f2/psutil-7.2.1-cp313-cp313t-win_amd64.whl", hash = "sha256:923f8653416604e356073e6e0bccbe7c09990acef442def2f5640dd0faa9689f", size = 139081, upload-time = "2025-12-29T08:26:12.483Z" },
{ url = "https://files.pythonhosted.org/packages/a0/48/200054ada0ae4872c8a71db54f3eb6a9af4101680ee6830d373b7fda526b/psutil-7.2.0-cp313-cp313t-win_arm64.whl", hash = "sha256:4413373c174520ae28a24a8974ad8ce6b21f060d27dde94e25f8c73a7effe57a", size = 134737, upload-time = "2025-12-23T20:26:38.784Z" }, { url = "https://files.pythonhosted.org/packages/59/82/d63e8494ec5758029f31c6cb06d7d161175d8281e91d011a4a441c8a43b5/psutil-7.2.1-cp313-cp313t-win_arm64.whl", hash = "sha256:cfbe6b40ca48019a51827f20d830887b3107a74a79b01ceb8cc8de4ccb17b672", size = 134767, upload-time = "2025-12-29T08:26:14.528Z" },
{ url = "https://files.pythonhosted.org/packages/44/86/98da45dff471b93ef5ce5bcaefa00e3038295a7880a77cf74018243d37fb/psutil-7.2.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:2f2f53fd114e7946dfba3afb98c9b7c7f376009447360ca15bfb73f2066f84c7", size = 129692, upload-time = "2025-12-23T20:26:40.623Z" }, { url = "https://files.pythonhosted.org/packages/05/c2/5fb764bd61e40e1fe756a44bd4c21827228394c17414ade348e28f83cd79/psutil-7.2.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:494c513ccc53225ae23eec7fe6e1482f1b8a44674241b54561f755a898650679", size = 129716, upload-time = "2025-12-29T08:26:16.017Z" },
{ url = "https://files.pythonhosted.org/packages/50/ee/10eae91ba4ad071c92db3c178ba861f30406342de9f0ddbe6d51fd741236/psutil-7.2.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:e65c41d7e60068f60ce43b31a3a7fc90deb0dfd34ffc824a2574c2e5279b377e", size = 130110, upload-time = "2025-12-23T20:26:42.569Z" }, { url = "https://files.pythonhosted.org/packages/c9/d2/935039c20e06f615d9ca6ca0ab756cf8408a19d298ffaa08666bc18dc805/psutil-7.2.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:3fce5f92c22b00cdefd1645aa58ab4877a01679e901555067b1bd77039aa589f", size = 130133, upload-time = "2025-12-29T08:26:18.009Z" },
{ url = "https://files.pythonhosted.org/packages/87/3a/2b2897443d56fedbbc34ac68a0dc7d55faa05d555372a2f989109052f86d/psutil-7.2.0-cp314-cp314t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cc66d21366850a4261412ce994ae9976bba9852dafb4f2fa60db68ed17ff5281", size = 181487, upload-time = "2025-12-23T20:26:44.633Z" }, { url = "https://files.pythonhosted.org/packages/77/69/19f1eb0e01d24c2b3eacbc2f78d3b5add8a89bf0bb69465bc8d563cc33de/psutil-7.2.1-cp314-cp314t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:93f3f7b0bb07711b49626e7940d6fe52aa9940ad86e8f7e74842e73189712129", size = 181518, upload-time = "2025-12-29T08:26:20.241Z" },
{ url = "https://files.pythonhosted.org/packages/11/66/44308428f7333db42c5ea7390c52af1b38f59b80b80c437291f58b5dfdad/psutil-7.2.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e025d67b42b8f22b096d5d20f5171de0e0fefb2f0ce983a13c5a1b5ed9872706", size = 184320, upload-time = "2025-12-23T20:26:46.83Z" }, { url = "https://files.pythonhosted.org/packages/e1/6d/7e18b1b4fa13ad370787626c95887b027656ad4829c156bb6569d02f3262/psutil-7.2.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d34d2ca888208eea2b5c68186841336a7f5e0b990edec929be909353a202768a", size = 184348, upload-time = "2025-12-29T08:26:22.215Z" },
{ url = "https://files.pythonhosted.org/packages/18/28/d2feadc7f18e501c5ce687c377db7dca924585418fd694272b8e488ea99f/psutil-7.2.0-cp314-cp314t-win_amd64.whl", hash = "sha256:45f6b91f7ad63414d6454fd609e5e3556d0e1038d5d9c75a1368513bdf763f57", size = 140372, upload-time = "2025-12-23T20:26:49.334Z" }, { url = "https://files.pythonhosted.org/packages/98/60/1672114392dd879586d60dd97896325df47d9a130ac7401318005aab28ec/psutil-7.2.1-cp314-cp314t-win_amd64.whl", hash = "sha256:2ceae842a78d1603753561132d5ad1b2f8a7979cb0c283f5b52fb4e6e14b1a79", size = 140400, upload-time = "2025-12-29T08:26:23.993Z" },
{ url = "https://files.pythonhosted.org/packages/b2/1d/48381f5fd0425aa054c4ee3de24f50de3d6c347019f3aec75f357377d447/psutil-7.2.0-cp314-cp314t-win_arm64.whl", hash = "sha256:87b18a19574139d60a546e88b5f5b9cbad598e26cdc790d204ab95d7024f03ee", size = 135400, upload-time = "2025-12-23T20:26:51.585Z" }, { url = "https://files.pythonhosted.org/packages/fb/7b/d0e9d4513c46e46897b46bcfc410d51fc65735837ea57a25170f298326e6/psutil-7.2.1-cp314-cp314t-win_arm64.whl", hash = "sha256:08a2f175e48a898c8eb8eace45ce01777f4785bc744c90aa2cc7f2fa5462a266", size = 135430, upload-time = "2025-12-29T08:26:25.999Z" },
{ url = "https://files.pythonhosted.org/packages/40/c5/a49160bf3e165b7b93a60579a353cf5d939d7f878fe5fd369110f1d18043/psutil-7.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:977a2fcd132d15cb05b32b2d85b98d087cad039b0ce435731670ba74da9e6133", size = 128116, upload-time = "2025-12-23T20:26:53.516Z" }, { url = "https://files.pythonhosted.org/packages/c5/cf/5180eb8c8bdf6a503c6919f1da28328bd1e6b3b1b5b9d5b01ae64f019616/psutil-7.2.1-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:b2e953fcfaedcfbc952b44744f22d16575d3aa78eb4f51ae74165b4e96e55f42", size = 128137, upload-time = "2025-12-29T08:26:27.759Z" },
{ url = "https://files.pythonhosted.org/packages/10/a1/c75feb480f60cd768fb6ed00ac362a16a33e5076ec8475a22d8162fb2659/psutil-7.2.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:24151011c21fadd94214d7139d7c6c54569290d7e553989bdf0eab73b13beb8c", size = 128925, upload-time = "2025-12-23T20:26:55.573Z" }, { url = "https://files.pythonhosted.org/packages/c5/2c/78e4a789306a92ade5000da4f5de3255202c534acdadc3aac7b5458fadef/psutil-7.2.1-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:05cc68dbb8c174828624062e73078e7e35406f4ca2d0866c272c2410d8ef06d1", size = 128947, upload-time = "2025-12-29T08:26:29.548Z" },
{ url = "https://files.pythonhosted.org/packages/12/ff/e93136587c00a543f4bc768b157fac2c47cd77b180d4f4e5c6efb6ea53a2/psutil-7.2.0-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:91f211ba9279e7c61d9d8f84b713cfc38fa161cb0597d5cb3f1ca742f6848254", size = 154666, upload-time = "2025-12-23T20:26:57.312Z" }, { url = "https://files.pythonhosted.org/packages/29/f8/40e01c350ad9a2b3cb4e6adbcc8a83b17ee50dd5792102b6142385937db5/psutil-7.2.1-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5e38404ca2bb30ed7267a46c02f06ff842e92da3bb8c5bfdadbd35a5722314d8", size = 154694, upload-time = "2025-12-29T08:26:32.147Z" },
{ url = "https://files.pythonhosted.org/packages/b8/dd/4c2de9c3827c892599d277a69d2224136800870a8a88a80981de905de28d/psutil-7.2.0-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f37415188b7ea98faf90fed51131181646c59098b077550246e2e092e127418b", size = 156109, upload-time = "2025-12-23T20:26:58.851Z" }, { url = "https://files.pythonhosted.org/packages/06/e4/b751cdf839c011a9714a783f120e6a86b7494eb70044d7d81a25a5cd295f/psutil-7.2.1-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ab2b98c9fc19f13f59628d94df5cc4cc4844bc572467d113a8b517d634e362c6", size = 156136, upload-time = "2025-12-29T08:26:34.079Z" },
{ url = "https://files.pythonhosted.org/packages/81/3f/090943c682d3629968dd0b04826ddcbc760ee1379021dbe316e2ddfcd01b/psutil-7.2.0-cp36-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0d12c7ce6ed1128cd81fd54606afa054ac7dbb9773469ebb58cf2f171c49f2ac", size = 148081, upload-time = "2025-12-23T20:27:01.318Z" }, { url = "https://files.pythonhosted.org/packages/44/ad/bbf6595a8134ee1e94a4487af3f132cef7fce43aef4a93b49912a48c3af7/psutil-7.2.1-cp36-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:f78baafb38436d5a128f837fab2d92c276dfb48af01a240b861ae02b2413ada8", size = 148108, upload-time = "2025-12-29T08:26:36.225Z" },
{ url = "https://files.pythonhosted.org/packages/c4/88/c39648ebb8ec182d0364af53cdefe6eddb5f3872ba718b5855a8ff65d6d4/psutil-7.2.0-cp36-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ca0faef7976530940dcd39bc5382d0d0d5eb023b186a4901ca341bd8d8684151", size = 147376, upload-time = "2025-12-23T20:27:03.347Z" }, { url = "https://files.pythonhosted.org/packages/1c/15/dd6fd869753ce82ff64dcbc18356093471a5a5adf4f77ed1f805d473d859/psutil-7.2.1-cp36-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:99a4cd17a5fdd1f3d014396502daa70b5ec21bf4ffe38393e152f8e449757d67", size = 147402, upload-time = "2025-12-29T08:26:39.21Z" },
{ url = "https://files.pythonhosted.org/packages/01/a2/5b39e08bd9b27476bc7cce7e21c71a481ad60b81ffac49baf02687a50d7f/psutil-7.2.0-cp37-abi3-win_amd64.whl", hash = "sha256:abdb74137ca232d20250e9ad471f58d500e7743bc8253ba0bfbf26e570c0e437", size = 136910, upload-time = "2025-12-23T20:27:05.289Z" }, { url = "https://files.pythonhosted.org/packages/34/68/d9317542e3f2b180c4306e3f45d3c922d7e86d8ce39f941bb9e2e9d8599e/psutil-7.2.1-cp37-abi3-win_amd64.whl", hash = "sha256:b1b0671619343aa71c20ff9767eced0483e4fc9e1f489d50923738caf6a03c17", size = 136938, upload-time = "2025-12-29T08:26:41.036Z" },
{ url = "https://files.pythonhosted.org/packages/59/54/53839db1258c1eaeb4ded57ff202144ebc75b23facc05a74fd98d338b0c6/psutil-7.2.0-cp37-abi3-win_arm64.whl", hash = "sha256:284e71038b3139e7ab3834b63b3eb5aa5565fcd61a681ec746ef9a0a8c457fd2", size = 133807, upload-time = "2025-12-23T20:27:06.825Z" }, { url = "https://files.pythonhosted.org/packages/3e/73/2ce007f4198c80fcf2cb24c169884f833fe93fbc03d55d302627b094ee91/psutil-7.2.1-cp37-abi3-win_arm64.whl", hash = "sha256:0d67c1822c355aa6f7314d92018fb4268a76668a536f133599b91edd48759442", size = 133836, upload-time = "2025-12-29T08:26:43.086Z" },
] ]
[[package]] [[package]]
@@ -1580,6 +1652,15 @@ global = [
{ name = "platformdirs" }, { name = "platformdirs" },
] ]
[[package]]
name = "pytz"
version = "2025.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884, upload-time = "2025-03-25T02:25:00.538Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225, upload-time = "2025-03-25T02:24:58.468Z" },
]
[[package]] [[package]]
name = "pyyaml" name = "pyyaml"
version = "6.0.3" version = "6.0.3"
@@ -1922,6 +2003,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/49/4b/359f28a903c13438ef59ebeee215fb25da53066db67b305c125f1c6d2a25/sqlparse-0.5.5-py3-none-any.whl", hash = "sha256:12a08b3bf3eec877c519589833aed092e2444e68240a3577e8e26148acc7b1ba", size = 46138, upload-time = "2025-12-19T07:17:46.573Z" }, { url = "https://files.pythonhosted.org/packages/49/4b/359f28a903c13438ef59ebeee215fb25da53066db67b305c125f1c6d2a25/sqlparse-0.5.5-py3-none-any.whl", hash = "sha256:12a08b3bf3eec877c519589833aed092e2444e68240a3577e8e26148acc7b1ba", size = 46138, upload-time = "2025-12-19T07:17:46.573Z" },
] ]
[[package]]
name = "swapper"
version = "1.4.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/9b/3b/98ea1cfc04dc9805d58c5a96dd006f5d88a5a32b7b05e1f5a1c00363bb9a/swapper-1.4.0.tar.gz", hash = "sha256:9e083af114ee0593241a7b877e3e0e7d3a580454f5d59016c667a5563306f8fe", size = 12668, upload-time = "2024-08-14T19:36:07.539Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e9/53/c59363308ef97507a680372471e25e1ebab2e706a45a7c416eea6474c928/swapper-1.4.0-py2.py3-none-any.whl", hash = "sha256:57b8378aad234242542fe32dc6e8cff0ed24b63493d20b3c88ee01f894b9345e", size = 7106, upload-time = "2024-08-14T19:36:06.247Z" },
]
[[package]] [[package]]
name = "text-unidecode" name = "text-unidecode"
version = "1.3" version = "1.3"
@@ -1939,7 +2029,6 @@ dependencies = [
{ name = "celery" }, { name = "celery" },
{ name = "cryptography" }, { name = "cryptography" },
{ name = "dj-database-url" }, { name = "dj-database-url" },
{ name = "dj-rest-auth" },
{ name = "django" }, { name = "django" },
{ name = "django-allauth" }, { name = "django-allauth" },
{ name = "django-celery-beat" }, { name = "django-celery-beat" },
@@ -1951,11 +2040,12 @@ dependencies = [
{ name = "django-extensions" }, { name = "django-extensions" },
{ name = "django-filter" }, { name = "django-filter" },
{ name = "django-forwardemail" }, { name = "django-forwardemail" },
{ name = "django-fsm" }, { name = "django-fsm-2" },
{ name = "django-fsm-log" }, { name = "django-fsm-log" },
{ name = "django-health-check" }, { name = "django-health-check" },
{ name = "django-htmx" }, { name = "django-htmx" },
{ name = "django-htmx-autocomplete" }, { name = "django-htmx-autocomplete" },
{ name = "django-notifications-hq" },
{ name = "django-pghistory" }, { name = "django-pghistory" },
{ name = "django-redis" }, { name = "django-redis" },
{ name = "django-tailwind-cli" }, { name = "django-tailwind-cli" },
@@ -1966,6 +2056,7 @@ dependencies = [
{ name = "drf-spectacular" }, { name = "drf-spectacular" },
{ name = "fido2" }, { name = "fido2" },
{ name = "hiredis" }, { name = "hiredis" },
{ name = "httpx" },
{ name = "nplusone" }, { name = "nplusone" },
{ name = "piexif" }, { name = "piexif" },
{ name = "pillow" }, { name = "pillow" },
@@ -1992,6 +2083,7 @@ dev = [
{ name = "autopep8" }, { name = "autopep8" },
{ name = "black" }, { name = "black" },
{ name = "django-stubs" }, { name = "django-stubs" },
{ name = "factory-boy" },
{ name = "pyright" }, { name = "pyright" },
{ name = "rope" }, { name = "rope" },
{ name = "ruff" }, { name = "ruff" },
@@ -2021,7 +2113,6 @@ requires-dist = [
{ name = "celery", specifier = ">=5.5.3,<6" }, { name = "celery", specifier = ">=5.5.3,<6" },
{ name = "cryptography", specifier = ">=44.0.0" }, { name = "cryptography", specifier = ">=44.0.0" },
{ name = "dj-database-url", specifier = ">=2.3.0" }, { name = "dj-database-url", specifier = ">=2.3.0" },
{ name = "dj-rest-auth", specifier = ">=7.0.0" },
{ name = "django", specifier = ">=5.2.8" }, { name = "django", specifier = ">=5.2.8" },
{ name = "django-allauth", specifier = ">=65.3.0" }, { name = "django-allauth", specifier = ">=65.3.0" },
{ name = "django-celery-beat", specifier = ">=2.8.1" }, { name = "django-celery-beat", specifier = ">=2.8.1" },
@@ -2033,11 +2124,12 @@ requires-dist = [
{ name = "django-extensions", specifier = ">=4.1" }, { name = "django-extensions", specifier = ">=4.1" },
{ name = "django-filter", specifier = ">=24.3" }, { name = "django-filter", specifier = ">=24.3" },
{ name = "django-forwardemail", specifier = ">=1.0.0" }, { name = "django-forwardemail", specifier = ">=1.0.0" },
{ name = "django-fsm", specifier = ">=2.8.1" }, { name = "django-fsm-2", specifier = ">=4.1.0" },
{ name = "django-fsm-log", specifier = ">=3.1.0" }, { name = "django-fsm-log", specifier = ">=3.1.0" },
{ name = "django-health-check", specifier = ">=3.17.0" }, { name = "django-health-check", specifier = ">=3.17.0" },
{ name = "django-htmx", specifier = ">=1.20.0" }, { name = "django-htmx", specifier = ">=1.20.0" },
{ name = "django-htmx-autocomplete", specifier = ">=1.0.5" }, { name = "django-htmx-autocomplete", specifier = ">=1.0.5" },
{ name = "django-notifications-hq", specifier = ">=1.8.3" },
{ name = "django-pghistory", specifier = ">=3.5.2" }, { name = "django-pghistory", specifier = ">=3.5.2" },
{ name = "django-redis", specifier = ">=5.4.0" }, { name = "django-redis", specifier = ">=5.4.0" },
{ name = "django-tailwind-cli", specifier = ">=2.21.1" }, { name = "django-tailwind-cli", specifier = ">=2.21.1" },
@@ -2048,6 +2140,7 @@ requires-dist = [
{ name = "drf-spectacular", specifier = ">=0.28.0" }, { name = "drf-spectacular", specifier = ">=0.28.0" },
{ name = "fido2", specifier = ">=2.0.0" }, { name = "fido2", specifier = ">=2.0.0" },
{ name = "hiredis", specifier = ">=3.1.0" }, { name = "hiredis", specifier = ">=3.1.0" },
{ name = "httpx", specifier = ">=0.28.1" },
{ name = "nplusone", specifier = ">=1.0.0" }, { name = "nplusone", specifier = ">=1.0.0" },
{ name = "piexif", specifier = ">=1.1.3" }, { name = "piexif", specifier = ">=1.1.3" },
{ name = "pillow", specifier = ">=10.4.0,<11.2" }, { name = "pillow", specifier = ">=10.4.0,<11.2" },
@@ -2074,6 +2167,7 @@ dev = [
{ name = "autopep8", specifier = ">=2.3.2" }, { name = "autopep8", specifier = ">=2.3.2" },
{ name = "black", specifier = ">=25.1.0" }, { name = "black", specifier = ">=25.1.0" },
{ name = "django-stubs", specifier = ">=5.2.2" }, { name = "django-stubs", specifier = ">=5.2.2" },
{ name = "factory-boy", specifier = ">=3.3.3" },
{ name = "pyright", specifier = ">=1.1.405" }, { name = "pyright", specifier = ">=1.1.405" },
{ name = "rope", specifier = ">=1.14.0" }, { name = "rope", specifier = ">=1.14.0" },
{ name = "ruff", specifier = ">=0.9.2" }, { name = "ruff", specifier = ">=0.9.2" },