mirror of
https://github.com/pacnpal/thrillwiki_django_no_react.git
synced 2026-02-05 01:55:18 -05:00
Compare commits
2 Commits
a801813dcf
...
b80654952d
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b80654952d | ||
|
|
2b7bb4dfaa |
@@ -112,6 +112,8 @@ urlpatterns = [
|
||||
path("profile/avatar/delete/", views.delete_avatar, name="delete_avatar"),
|
||||
# Login history endpoint
|
||||
path("login-history/", views.get_login_history, name="get_login_history"),
|
||||
# Email change cancellation endpoint
|
||||
path("email-change/cancel/", views.cancel_email_change, name="cancel_email_change"),
|
||||
# Magic Link (Login by Code) endpoints
|
||||
path("magic-link/request/", views_magic_link.request_magic_link, name="request_magic_link"),
|
||||
path("magic-link/verify/", views_magic_link.verify_magic_link, name="verify_magic_link"),
|
||||
|
||||
@@ -1640,3 +1640,95 @@ def get_login_history(request):
|
||||
"count": len(results),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@extend_schema(
|
||||
operation_id="cancel_email_change",
|
||||
summary="Cancel pending email change",
|
||||
description=(
|
||||
"Cancel a pending email change request. This will clear the new_email field "
|
||||
"and prevent the email change from being completed."
|
||||
),
|
||||
responses={
|
||||
200: {
|
||||
"description": "Email change cancelled or no pending change found",
|
||||
"example": {
|
||||
"detail": "Email change cancelled",
|
||||
"had_pending_change": True,
|
||||
"cancelled_email": "newemail@example.com",
|
||||
},
|
||||
},
|
||||
401: {
|
||||
"description": "Authentication required",
|
||||
"example": {"detail": "Authentication required"},
|
||||
},
|
||||
},
|
||||
tags=["Account Management"],
|
||||
)
|
||||
@api_view(["POST"])
|
||||
@permission_classes([IsAuthenticated])
|
||||
def cancel_email_change(request):
|
||||
"""
|
||||
Cancel a pending email change request.
|
||||
|
||||
This endpoint allows users to cancel their pending email change
|
||||
if they change their mind before completing the verification.
|
||||
|
||||
**Authentication Required**: User must be logged in.
|
||||
"""
|
||||
try:
|
||||
user = request.user
|
||||
|
||||
# Check if user has a pending email change
|
||||
pending_email = user.pending_email
|
||||
|
||||
if pending_email:
|
||||
# Clear the pending email
|
||||
user.pending_email = None
|
||||
user.save(update_fields=["pending_email"])
|
||||
|
||||
logger.info(
|
||||
f"User {user.username} cancelled email change to {pending_email}",
|
||||
extra={
|
||||
"user": user.username,
|
||||
"user_id": user.user_id,
|
||||
"cancelled_email": pending_email,
|
||||
"action": "email_change_cancelled",
|
||||
},
|
||||
)
|
||||
|
||||
return Response(
|
||||
{
|
||||
"success": True,
|
||||
"detail": "Email change cancelled",
|
||||
"had_pending_change": True,
|
||||
"cancelled_email": pending_email,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
# No pending change, but still success (idempotent)
|
||||
return Response(
|
||||
{
|
||||
"success": True,
|
||||
"detail": "No pending email change found",
|
||||
"had_pending_change": False,
|
||||
"cancelled_email": None,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
capture_and_log(
|
||||
e,
|
||||
f"Cancel email change for user {request.user.username}",
|
||||
source="api",
|
||||
request=request,
|
||||
)
|
||||
return Response(
|
||||
{
|
||||
"success": False,
|
||||
"error": f"Error cancelling email change: {str(e)}",
|
||||
},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
@@ -34,4 +34,22 @@ urlpatterns = [
|
||||
views.CeleryTaskStatusView.as_view(),
|
||||
name="task_status",
|
||||
),
|
||||
# Anomaly Detection
|
||||
path(
|
||||
"anomalies/detect/",
|
||||
views.DetectAnomaliesView.as_view(),
|
||||
name="detect_anomalies",
|
||||
),
|
||||
# Metrics Collection
|
||||
path(
|
||||
"metrics/collect/",
|
||||
views.CollectMetricsView.as_view(),
|
||||
name="collect_metrics",
|
||||
),
|
||||
# Pipeline Integrity Scan
|
||||
path(
|
||||
"pipeline/integrity-scan/",
|
||||
views.PipelineIntegrityScanView.as_view(),
|
||||
name="pipeline_integrity_scan",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -708,3 +708,558 @@ class CeleryTaskStatusView(APIView):
|
||||
{"detail": "Failed to fetch task status"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
class DetectAnomaliesView(APIView):
|
||||
"""
|
||||
POST /admin/anomalies/detect/
|
||||
Detect data anomalies for admin dashboard.
|
||||
|
||||
Full parity with Supabase Edge Function: detect-anomalies
|
||||
|
||||
The original Edge Function implements 7 ML detection algorithms:
|
||||
1. Z-Score (standard deviation outliers)
|
||||
2. Moving Average (trend deviation)
|
||||
3. Rate of Change (sudden changes)
|
||||
4. Isolation Forest (ML-based outlier detection)
|
||||
5. Seasonal Decomposition (periodic pattern anomalies)
|
||||
6. Predictive (Holt-Winters exponential smoothing)
|
||||
7. Ensemble (combines all algorithms)
|
||||
|
||||
This implementation provides:
|
||||
- Config-driven detection framework
|
||||
- Data quality checks (orphaned records, duplicates, incomplete data)
|
||||
- Auto-alerting for critical anomalies
|
||||
|
||||
TODO: Implement full ML algorithms with numpy/scipy in follow-up task.
|
||||
"""
|
||||
|
||||
permission_classes = [IsAdminUser]
|
||||
|
||||
# Severity score thresholds
|
||||
SEVERITY_THRESHOLDS = {
|
||||
"critical": 4.0,
|
||||
"high": 3.0,
|
||||
"medium": 2.0,
|
||||
"low": 1.0,
|
||||
}
|
||||
|
||||
def post(self, request):
|
||||
"""Detect and return data anomalies."""
|
||||
try:
|
||||
# ================================================================
|
||||
# Input validation with safe type handling
|
||||
# ================================================================
|
||||
raw_type = request.data.get("type", "all")
|
||||
anomaly_type = raw_type if isinstance(raw_type, str) else "all"
|
||||
anomaly_type = anomaly_type.strip().lower()[:50]
|
||||
|
||||
# Validate anomaly_type against allowed values
|
||||
allowed_types = {"all", "orphaned", "duplicates", "incomplete", "data_quality"}
|
||||
if anomaly_type not in allowed_types:
|
||||
anomaly_type = "all"
|
||||
|
||||
# Safe sensitivity parsing with bounds
|
||||
try:
|
||||
sensitivity = float(request.data.get("sensitivity", 2.5))
|
||||
sensitivity = max(0.1, min(sensitivity, 10.0)) # Clamp to [0.1, 10.0]
|
||||
except (ValueError, TypeError):
|
||||
sensitivity = 2.5
|
||||
|
||||
# Safe lookback_minutes parsing with bounds
|
||||
try:
|
||||
lookback_minutes = int(request.data.get("lookback_minutes", 60))
|
||||
lookback_minutes = max(1, min(lookback_minutes, 10080)) # 1 min to 1 week
|
||||
except (ValueError, TypeError):
|
||||
lookback_minutes = 60
|
||||
|
||||
anomalies = []
|
||||
|
||||
|
||||
# ================================================================
|
||||
# Data Quality Anomalies (immediate checks)
|
||||
# ================================================================
|
||||
|
||||
# Check for orphaned records
|
||||
if anomaly_type in ["all", "orphaned", "data_quality"]:
|
||||
try:
|
||||
Park = apps.get_model("parks", "Park")
|
||||
Ride = apps.get_model("rides", "Ride")
|
||||
|
||||
# Rides without parks
|
||||
orphaned_rides = Ride.objects.filter(park__isnull=True).count()
|
||||
if orphaned_rides > 0:
|
||||
severity = "high" if orphaned_rides > 10 else "medium"
|
||||
anomalies.append({
|
||||
"id": f"orphaned_rides_{timezone.now().timestamp()}",
|
||||
"metric_name": "orphaned_records",
|
||||
"metric_category": "data_quality",
|
||||
"anomaly_type": "orphaned_rides",
|
||||
"severity": severity,
|
||||
"baseline_value": 0,
|
||||
"anomaly_value": orphaned_rides,
|
||||
"deviation_score": orphaned_rides / 5.0, # Score based on count
|
||||
"confidence_score": 1.0, # 100% confidence for exact counts
|
||||
"detection_algorithm": "rule_based",
|
||||
"description": f"{orphaned_rides} rides without associated park",
|
||||
"detected_at": timezone.now().isoformat(),
|
||||
})
|
||||
except LookupError:
|
||||
pass
|
||||
|
||||
# Check for duplicate slugs
|
||||
if anomaly_type in ["all", "duplicates", "data_quality"]:
|
||||
try:
|
||||
Park = apps.get_model("parks", "Park")
|
||||
duplicate_slugs = (
|
||||
Park.objects.values("slug")
|
||||
.annotate(count=Count("id"))
|
||||
.filter(count__gt=1)
|
||||
)
|
||||
dup_count = duplicate_slugs.count()
|
||||
if dup_count > 0:
|
||||
anomalies.append({
|
||||
"id": f"duplicate_slugs_{timezone.now().timestamp()}",
|
||||
"metric_name": "duplicate_slugs",
|
||||
"metric_category": "data_quality",
|
||||
"anomaly_type": "duplicate_values",
|
||||
"severity": "high" if dup_count > 5 else "medium",
|
||||
"baseline_value": 0,
|
||||
"anomaly_value": dup_count,
|
||||
"deviation_score": dup_count / 2.0,
|
||||
"confidence_score": 1.0,
|
||||
"detection_algorithm": "rule_based",
|
||||
"description": f"{dup_count} duplicate park slugs detected",
|
||||
"detected_at": timezone.now().isoformat(),
|
||||
})
|
||||
except LookupError:
|
||||
pass
|
||||
|
||||
# Check for missing required fields
|
||||
if anomaly_type in ["all", "incomplete", "data_quality"]:
|
||||
try:
|
||||
Park = apps.get_model("parks", "Park")
|
||||
parks_no_location = Park.objects.filter(
|
||||
Q(latitude__isnull=True) | Q(longitude__isnull=True)
|
||||
).count()
|
||||
if parks_no_location > 0:
|
||||
anomalies.append({
|
||||
"id": f"incomplete_parks_{timezone.now().timestamp()}",
|
||||
"metric_name": "incomplete_data",
|
||||
"metric_category": "data_quality",
|
||||
"anomaly_type": "missing_required_fields",
|
||||
"severity": "low" if parks_no_location < 10 else "medium",
|
||||
"baseline_value": 0,
|
||||
"anomaly_value": parks_no_location,
|
||||
"deviation_score": parks_no_location / 10.0,
|
||||
"confidence_score": 1.0,
|
||||
"detection_algorithm": "rule_based",
|
||||
"description": f"{parks_no_location} parks missing location data",
|
||||
"detected_at": timezone.now().isoformat(),
|
||||
})
|
||||
except LookupError:
|
||||
pass
|
||||
|
||||
# ================================================================
|
||||
# TODO: Implement ML-based anomaly detection
|
||||
# ================================================================
|
||||
# The original Supabase Edge Function reads from:
|
||||
# - anomaly_detection_config table (enabled metrics, sensitivity, algorithms)
|
||||
# - metric_time_series table (historical metric data)
|
||||
#
|
||||
# Then applies these algorithms:
|
||||
# 1. z_score: (value - mean) / std_dev
|
||||
# 2. moving_average: deviation from rolling average
|
||||
# 3. rate_of_change: delta compared to previous values
|
||||
# 4. isolation_forest: sklearn-style outlier detection
|
||||
# 5. seasonal_decomposition: detect periodic pattern breaks
|
||||
# 6. predictive: Holt-Winters forecasting comparison
|
||||
# 7. ensemble: weighted combination of all methods
|
||||
#
|
||||
# For now, we return data quality anomalies. Full ML implementation
|
||||
# requires numpy/scipy dependencies and metric_time_series data.
|
||||
|
||||
# ================================================================
|
||||
# Auto-create alerts for critical/high severity
|
||||
# ================================================================
|
||||
for anomaly in anomalies:
|
||||
if anomaly.get("severity") in ["critical", "high"]:
|
||||
# Log critical anomaly (would create SystemAlert in full impl)
|
||||
logger.warning(
|
||||
f"Critical anomaly detected: {anomaly.get('description')}",
|
||||
extra={
|
||||
"anomaly_type": anomaly.get("anomaly_type"),
|
||||
"severity": anomaly.get("severity"),
|
||||
"deviation_score": anomaly.get("deviation_score"),
|
||||
}
|
||||
)
|
||||
|
||||
# Calculate summary counts
|
||||
detected_count = len(anomalies)
|
||||
critical_count = sum(1 for a in anomalies if a.get("severity") in ["critical", "high"])
|
||||
|
||||
return Response({
|
||||
"success": True,
|
||||
"detected_count": detected_count,
|
||||
"critical_count": critical_count,
|
||||
"anomalies": anomalies,
|
||||
"scanned_at": timezone.now().isoformat(),
|
||||
"config": {
|
||||
"sensitivity": sensitivity,
|
||||
"lookback_minutes": lookback_minutes,
|
||||
"algorithms_available": [
|
||||
"rule_based",
|
||||
# TODO: Add when implemented
|
||||
# "z_score", "moving_average", "rate_of_change",
|
||||
# "isolation_forest", "seasonal", "predictive", "ensemble"
|
||||
],
|
||||
},
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
capture_and_log(e, "Detect anomalies - error", source="api")
|
||||
return Response(
|
||||
{"detail": "Failed to detect anomalies"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
class CollectMetricsView(APIView):
|
||||
"""
|
||||
POST /admin/metrics/collect/
|
||||
Collect system metrics for admin dashboard.
|
||||
|
||||
BULLETPROOFED: Safe input parsing with validation.
|
||||
"""
|
||||
|
||||
permission_classes = [IsAdminUser]
|
||||
|
||||
# Allowed values
|
||||
ALLOWED_METRIC_TYPES = {"all", "database", "users", "moderation", "performance"}
|
||||
ALLOWED_TIME_RANGES = {"24h", "7d", "30d", "1h", "12h"}
|
||||
|
||||
def post(self, request):
|
||||
"""Collect and return system metrics."""
|
||||
try:
|
||||
# ================================================================
|
||||
# Input validation with safe type handling
|
||||
# ================================================================
|
||||
raw_type = request.data.get("type", "all")
|
||||
metric_type = raw_type if isinstance(raw_type, str) else "all"
|
||||
metric_type = metric_type.strip().lower()[:50]
|
||||
if metric_type not in self.ALLOWED_METRIC_TYPES:
|
||||
metric_type = "all"
|
||||
|
||||
raw_time_range = request.data.get("timeRange", "24h")
|
||||
time_range = raw_time_range if isinstance(raw_time_range, str) else "24h"
|
||||
time_range = time_range.strip().lower()[:10]
|
||||
if time_range not in self.ALLOWED_TIME_RANGES:
|
||||
time_range = "24h"
|
||||
|
||||
# Parse time range to cutoff
|
||||
time_range_map = {
|
||||
"1h": timedelta(hours=1),
|
||||
"12h": timedelta(hours=12),
|
||||
"24h": timedelta(hours=24),
|
||||
"7d": timedelta(days=7),
|
||||
"30d": timedelta(days=30),
|
||||
}
|
||||
cutoff = timezone.now() - time_range_map.get(time_range, timedelta(hours=24))
|
||||
|
||||
metrics = {
|
||||
"collectedAt": timezone.now().isoformat(),
|
||||
"timeRange": time_range,
|
||||
}
|
||||
|
||||
# Database metrics
|
||||
if metric_type in ["all", "database"]:
|
||||
try:
|
||||
Park = apps.get_model("parks", "Park")
|
||||
Ride = apps.get_model("rides", "Ride")
|
||||
|
||||
metrics["database"] = {
|
||||
"totalParks": Park.objects.count(),
|
||||
"totalRides": Ride.objects.count(),
|
||||
"recentParks": Park.objects.filter(created_at__gte=cutoff).count(),
|
||||
"recentRides": Ride.objects.filter(created_at__gte=cutoff).count(),
|
||||
}
|
||||
except (LookupError, Exception):
|
||||
metrics["database"] = {
|
||||
"error": "Could not fetch database metrics"
|
||||
}
|
||||
|
||||
# User metrics
|
||||
if metric_type in ["all", "users"]:
|
||||
try:
|
||||
metrics["users"] = {
|
||||
"totalUsers": User.objects.count(),
|
||||
"activeUsers": User.objects.filter(
|
||||
last_login__gte=cutoff
|
||||
).count(),
|
||||
"newUsers": User.objects.filter(
|
||||
date_joined__gte=cutoff
|
||||
).count(),
|
||||
}
|
||||
except Exception:
|
||||
metrics["users"] = {
|
||||
"error": "Could not fetch user metrics"
|
||||
}
|
||||
|
||||
# Moderation metrics
|
||||
if metric_type in ["all", "moderation"]:
|
||||
try:
|
||||
EditSubmission = apps.get_model("moderation", "EditSubmission")
|
||||
metrics["moderation"] = {
|
||||
"pendingSubmissions": EditSubmission.objects.filter(
|
||||
status="PENDING"
|
||||
).count(),
|
||||
"recentSubmissions": EditSubmission.objects.filter(
|
||||
created_at__gte=cutoff
|
||||
).count(),
|
||||
}
|
||||
except (LookupError, Exception):
|
||||
metrics["moderation"] = {
|
||||
"error": "Could not fetch moderation metrics"
|
||||
}
|
||||
|
||||
return Response({
|
||||
"success": True,
|
||||
"metrics": metrics,
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
capture_and_log(e, "Collect metrics - error", source="api")
|
||||
return Response(
|
||||
{"detail": "Failed to collect metrics"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
class PipelineIntegrityScanView(APIView):
|
||||
"""
|
||||
POST /admin/pipeline/integrity-scan/
|
||||
Scan data pipeline for integrity issues.
|
||||
|
||||
BULLETPROOFED: Safe input parsing with validation.
|
||||
"""
|
||||
|
||||
permission_classes = [IsAdminUser]
|
||||
|
||||
# Allowed values
|
||||
ALLOWED_SCAN_TYPES = {"full", "referential", "status", "media", "submissions", "stuck", "versions"}
|
||||
MAX_HOURS_BACK = 720 # 30 days max
|
||||
|
||||
def post(self, request):
|
||||
"""Run integrity scan on data pipeline."""
|
||||
try:
|
||||
# ================================================================
|
||||
# Input validation with safe type handling
|
||||
# ================================================================
|
||||
|
||||
# Safe hours_back parsing with bounds
|
||||
try:
|
||||
hours_back = int(request.data.get("hours_back", 48))
|
||||
hours_back = max(1, min(hours_back, self.MAX_HOURS_BACK))
|
||||
except (ValueError, TypeError):
|
||||
hours_back = 48
|
||||
|
||||
# Safe scan_type validation
|
||||
raw_type = request.data.get("type", "full")
|
||||
scan_type = raw_type if isinstance(raw_type, str) else "full"
|
||||
scan_type = scan_type.strip().lower()[:50]
|
||||
if scan_type not in self.ALLOWED_SCAN_TYPES:
|
||||
scan_type = "full"
|
||||
|
||||
# Safe fix_issues parsing (boolean)
|
||||
raw_fix = request.data.get("fix", False)
|
||||
fix_issues = raw_fix is True or str(raw_fix).lower() in ("true", "1", "yes")
|
||||
|
||||
# Calculate cutoff time based on hours_back
|
||||
cutoff_time = timezone.now() - timedelta(hours=hours_back)
|
||||
|
||||
issues = []
|
||||
fixed_count = 0
|
||||
|
||||
# Check for referential integrity
|
||||
if scan_type in ["full", "referential"]:
|
||||
try:
|
||||
Ride = apps.get_model("rides", "Ride")
|
||||
Park = apps.get_model("parks", "Park")
|
||||
|
||||
# Rides pointing to non-existent parks
|
||||
valid_park_ids = Park.objects.values_list("id", flat=True)
|
||||
invalid_rides = Ride.objects.exclude(
|
||||
park_id__in=valid_park_ids
|
||||
).exclude(park_id__isnull=True)
|
||||
|
||||
if invalid_rides.exists():
|
||||
for ride in invalid_rides[:10]: # Limit to 10 examples
|
||||
issues.append({
|
||||
"issue_type": "broken_reference",
|
||||
"entity_type": "ride",
|
||||
"entity_id": str(ride.id),
|
||||
"submission_id": "",
|
||||
"severity": "critical",
|
||||
"description": f"Ride '{ride.name}' has invalid park reference",
|
||||
"detected_at": timezone.now().isoformat(),
|
||||
})
|
||||
|
||||
if fix_issues:
|
||||
# Set invalid park references to null
|
||||
invalid_rides.update(park_id=None)
|
||||
fixed_count += invalid_rides.count()
|
||||
except LookupError:
|
||||
pass
|
||||
|
||||
# Check for status consistency
|
||||
if scan_type in ["full", "status"]:
|
||||
try:
|
||||
Park = apps.get_model("parks", "Park")
|
||||
|
||||
# Parks with invalid status values
|
||||
valid_statuses = ["operating", "closed", "under_construction", "announced", "deleted"]
|
||||
invalid_status_parks = Park.objects.exclude(status__in=valid_statuses)
|
||||
|
||||
for park in invalid_status_parks[:10]: # Limit to 10 examples
|
||||
issues.append({
|
||||
"issue_type": "invalid_status",
|
||||
"entity_type": "park",
|
||||
"entity_id": str(park.id),
|
||||
"submission_id": "",
|
||||
"severity": "warning",
|
||||
"description": f"Park '{park.name}' has invalid status: {park.status}",
|
||||
"detected_at": timezone.now().isoformat(),
|
||||
})
|
||||
except LookupError:
|
||||
pass
|
||||
|
||||
# Check for orphaned media
|
||||
if scan_type in ["full", "media"]:
|
||||
try:
|
||||
Photo = apps.get_model("media", "Photo")
|
||||
|
||||
orphaned_photos = Photo.objects.filter(
|
||||
entity_id__isnull=True,
|
||||
entity_type__isnull=True,
|
||||
)
|
||||
|
||||
for photo in orphaned_photos[:10]: # Limit to 10 examples
|
||||
issues.append({
|
||||
"issue_type": "orphaned_media",
|
||||
"entity_type": "photo",
|
||||
"entity_id": str(photo.id),
|
||||
"submission_id": "",
|
||||
"severity": "info",
|
||||
"description": "Photo without associated entity",
|
||||
"detected_at": timezone.now().isoformat(),
|
||||
})
|
||||
except LookupError:
|
||||
pass
|
||||
|
||||
# ================================================================
|
||||
# Check 3: Stuck submissions with expired locks (from original)
|
||||
# ================================================================
|
||||
if scan_type in ["full", "submissions", "stuck"]:
|
||||
try:
|
||||
EditSubmission = apps.get_model("moderation", "EditSubmission")
|
||||
|
||||
# Find submissions that are claimed but claim has expired
|
||||
# Assuming a claim expires after 30 minutes of inactivity
|
||||
claim_expiry = timezone.now() - timedelta(minutes=30)
|
||||
|
||||
stuck_submissions = EditSubmission.objects.filter(
|
||||
status__in=["CLAIMED", "claimed", "reviewing"],
|
||||
claimed_at__lt=claim_expiry,
|
||||
).exclude(claimed_at__isnull=True)
|
||||
|
||||
for sub in stuck_submissions[:10]: # Limit to 10 examples
|
||||
hours_stuck = (timezone.now() - sub.claimed_at).total_seconds() / 3600
|
||||
issues.append({
|
||||
"issue_type": "stuck_submission",
|
||||
"entity_type": "edit_submission",
|
||||
"entity_id": str(sub.id),
|
||||
"submission_id": str(sub.id),
|
||||
"severity": "warning" if hours_stuck < 4 else "critical",
|
||||
"description": (
|
||||
f"Submission claimed by {sub.claimed_by.username if sub.claimed_by else 'unknown'} "
|
||||
f"but stuck for {hours_stuck:.1f} hours"
|
||||
),
|
||||
"detected_at": timezone.now().isoformat(),
|
||||
"metadata": {
|
||||
"claimed_at": sub.claimed_at.isoformat() if sub.claimed_at else None,
|
||||
"claimed_by": sub.claimed_by.username if sub.claimed_by else None,
|
||||
"hours_stuck": round(hours_stuck, 1),
|
||||
},
|
||||
})
|
||||
|
||||
if fix_issues:
|
||||
# Unclaim stuck submissions
|
||||
sub.claimed_by = None
|
||||
sub.claimed_at = None
|
||||
sub.status = "PENDING"
|
||||
sub.save(update_fields=["claimed_by", "claimed_at", "status"])
|
||||
fixed_count += 1
|
||||
|
||||
except LookupError:
|
||||
pass
|
||||
|
||||
# ================================================================
|
||||
# Check: Entities with approvals but no version records (from original)
|
||||
# Uses pghistory events as proxy for version records
|
||||
# ================================================================
|
||||
if scan_type in ["full", "versions"]:
|
||||
try:
|
||||
# Check if pghistory events exist for recently approved submissions
|
||||
EditSubmission = apps.get_model("moderation", "EditSubmission")
|
||||
|
||||
recently_approved = EditSubmission.objects.filter(
|
||||
status__in=["APPROVED", "approved"],
|
||||
handled_at__gte=cutoff_time,
|
||||
)
|
||||
|
||||
for sub in recently_approved[:10]:
|
||||
# Check if the target object has history
|
||||
target = sub.content_object
|
||||
if target and hasattr(target, 'events'):
|
||||
try:
|
||||
event_count = target.events.count()
|
||||
if event_count == 0:
|
||||
issues.append({
|
||||
"issue_type": "missing_version_record",
|
||||
"entity_type": sub.content_type.model,
|
||||
"entity_id": str(sub.object_id),
|
||||
"submission_id": str(sub.id),
|
||||
"severity": "critical",
|
||||
"description": f"Approved {sub.content_type.model} has no version history",
|
||||
"detected_at": timezone.now().isoformat(),
|
||||
})
|
||||
except Exception:
|
||||
pass
|
||||
except LookupError:
|
||||
pass
|
||||
|
||||
# Calculate summary counts
|
||||
critical_count = sum(1 for i in issues if i.get("severity") == "critical")
|
||||
warning_count = sum(1 for i in issues if i.get("severity") == "warning")
|
||||
info_count = sum(1 for i in issues if i.get("severity") == "info")
|
||||
|
||||
# Return in frontend-expected format
|
||||
return Response({
|
||||
"success": True,
|
||||
"scan_timestamp": timezone.now().isoformat(),
|
||||
"hours_scanned": hours_back,
|
||||
"issues_found": len(issues),
|
||||
"issues": issues,
|
||||
"summary": {
|
||||
"critical": critical_count,
|
||||
"warning": warning_count,
|
||||
"info": info_count,
|
||||
},
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
capture_and_log(e, "Pipeline integrity scan - error", source="api")
|
||||
return Response(
|
||||
{"detail": "Failed to run integrity scan"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
@@ -24,6 +24,7 @@ from .views import (
|
||||
LogoutAPIView,
|
||||
PasswordChangeAPIView,
|
||||
PasswordResetAPIView,
|
||||
ProcessOAuthProfileAPIView,
|
||||
ResendVerificationAPIView,
|
||||
SignupAPIView,
|
||||
SocialAuthStatusAPIView,
|
||||
@@ -80,6 +81,11 @@ urlpatterns = [
|
||||
SocialAuthStatusAPIView.as_view(),
|
||||
name="auth-social-status",
|
||||
),
|
||||
path(
|
||||
"social/process-profile/",
|
||||
ProcessOAuthProfileAPIView.as_view(),
|
||||
name="auth-social-process-profile",
|
||||
),
|
||||
path("status/", AuthStatusAPIView.as_view(), name="auth-status"),
|
||||
# Email verification endpoints
|
||||
path(
|
||||
|
||||
@@ -6,6 +6,8 @@ login, signup, logout, password management, social authentication,
|
||||
user profiles, and top lists.
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
from typing import cast # added 'cast'
|
||||
|
||||
from django.contrib.auth import authenticate, get_user_model, login, logout
|
||||
@@ -71,6 +73,7 @@ except Exception:
|
||||
TurnstileMixin = FallbackTurnstileMixin
|
||||
|
||||
UserModel = get_user_model()
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Helper: safely obtain underlying HttpRequest (used by Django auth)
|
||||
|
||||
@@ -831,7 +834,529 @@ The ThrillWiki Team
|
||||
# Don't reveal whether email exists
|
||||
return Response({"detail": "If the email exists, a verification email has been sent", "success": True})
|
||||
|
||||
|
||||
# Note: User Profile, Top List, and Top List Item ViewSets are now handled
|
||||
# by the dedicated accounts app at backend/apps/api/v1/accounts/views.py
|
||||
# to avoid duplication and maintain clean separation of concerns.
|
||||
|
||||
|
||||
@extend_schema_view(
|
||||
post=extend_schema(
|
||||
summary="Process OAuth profile",
|
||||
description="Process OAuth profile data during social authentication flow.",
|
||||
request={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"provider": {"type": "string", "description": "OAuth provider (e.g., google, discord)"},
|
||||
"profile": {
|
||||
"type": "object",
|
||||
"description": "Profile data from OAuth provider",
|
||||
"properties": {
|
||||
"id": {"type": "string"},
|
||||
"email": {"type": "string", "format": "email"},
|
||||
"name": {"type": "string"},
|
||||
"avatar_url": {"type": "string", "format": "uri"},
|
||||
},
|
||||
},
|
||||
"access_token": {"type": "string", "description": "OAuth access token"},
|
||||
},
|
||||
"required": ["provider", "profile"],
|
||||
},
|
||||
responses={
|
||||
200: {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"success": {"type": "boolean"},
|
||||
"action": {"type": "string", "enum": ["created", "updated", "linked"]},
|
||||
"user": {"type": "object"},
|
||||
"profile_synced": {"type": "boolean"},
|
||||
},
|
||||
},
|
||||
400: "Bad Request",
|
||||
401: "Unauthorized",
|
||||
403: "Account suspended",
|
||||
},
|
||||
tags=["Social Authentication"],
|
||||
),
|
||||
)
|
||||
class ProcessOAuthProfileAPIView(APIView):
|
||||
"""
|
||||
API endpoint to process OAuth profile data.
|
||||
|
||||
This endpoint is called AFTER the OAuth flow is complete to:
|
||||
1. Check if user is banned (SECURITY CRITICAL)
|
||||
2. Extract avatar from OAuth provider
|
||||
3. Download and upload avatar to Cloudflare Images
|
||||
4. Sync display name from OAuth provider
|
||||
5. Update username if it's a generic UUID-based username
|
||||
|
||||
Called with an empty body - uses the authenticated session.
|
||||
|
||||
Full parity with Supabase Edge Function: process-oauth-profile
|
||||
|
||||
BULLETPROOFED: Comprehensive validation, sanitization, and error handling.
|
||||
"""
|
||||
|
||||
permission_classes = [IsAuthenticated]
|
||||
|
||||
# Security constants
|
||||
MAX_AVATAR_SIZE = 10 * 1024 * 1024 # 10MB
|
||||
AVATAR_DOWNLOAD_TIMEOUT = 10.0 # seconds
|
||||
AVATAR_UPLOAD_TIMEOUT = 30.0 # seconds
|
||||
MAX_USERNAME_LENGTH = 150
|
||||
MIN_USERNAME_LENGTH = 3
|
||||
ALLOWED_USERNAME_CHARS = set("abcdefghijklmnopqrstuvwxyz0123456789_")
|
||||
|
||||
# Rate limiting for avatar uploads (prevent abuse)
|
||||
AVATAR_UPLOAD_COOLDOWN = 60 # seconds between uploads
|
||||
|
||||
def post(self, request: Request) -> Response:
|
||||
import re
|
||||
import httpx
|
||||
from django.db import transaction
|
||||
from django.core.cache import cache
|
||||
|
||||
try:
|
||||
user = request.user
|
||||
|
||||
# ================================================================
|
||||
# STEP 0: Validate user object exists and is valid
|
||||
# ================================================================
|
||||
if not user or not hasattr(user, 'user_id'):
|
||||
logger.error("ProcessOAuthProfile called with invalid user object")
|
||||
return Response({
|
||||
"success": False,
|
||||
"error": "Invalid user session",
|
||||
}, status=status.HTTP_401_UNAUTHORIZED)
|
||||
|
||||
user_id_str = str(user.user_id)
|
||||
|
||||
# ================================================================
|
||||
# STEP 1: CRITICAL - Check ban status FIRST
|
||||
# ================================================================
|
||||
is_banned = getattr(user, 'is_banned', False)
|
||||
|
||||
# Also check via profile if applicable
|
||||
if not is_banned:
|
||||
try:
|
||||
from apps.accounts.models import UserProfile
|
||||
profile_check = UserProfile.objects.filter(user=user).first()
|
||||
if profile_check and getattr(profile_check, 'is_banned', False):
|
||||
is_banned = True
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if is_banned:
|
||||
ban_reason = getattr(user, 'ban_reason', None) or "Policy violation"
|
||||
# Sanitize ban reason for response
|
||||
safe_ban_reason = str(ban_reason)[:200] if ban_reason else None
|
||||
|
||||
logger.warning(
|
||||
f"Banned user attempted OAuth profile update",
|
||||
extra={"user_id": user_id_str, "ban_reason": safe_ban_reason}
|
||||
)
|
||||
|
||||
return Response({
|
||||
"error": "Account suspended",
|
||||
"message": (
|
||||
f"Your account has been suspended. Reason: {safe_ban_reason}"
|
||||
if safe_ban_reason
|
||||
else "Your account has been suspended. Contact support for assistance."
|
||||
),
|
||||
"ban_reason": safe_ban_reason,
|
||||
}, status=status.HTTP_403_FORBIDDEN)
|
||||
|
||||
# ================================================================
|
||||
# STEP 2: Check rate limiting for avatar uploads
|
||||
# ================================================================
|
||||
rate_limit_key = f"oauth_profile:avatar:{user_id_str}"
|
||||
if cache.get(rate_limit_key):
|
||||
return Response({
|
||||
"success": True,
|
||||
"action": "rate_limited",
|
||||
"message": "Please wait before updating your profile again",
|
||||
"avatar_uploaded": False,
|
||||
"profile_updated": False,
|
||||
})
|
||||
|
||||
# ================================================================
|
||||
# STEP 3: Get OAuth provider info from social accounts
|
||||
# ================================================================
|
||||
try:
|
||||
from allauth.socialaccount.models import SocialAccount
|
||||
except ImportError:
|
||||
logger.error("django-allauth not installed")
|
||||
return Response({
|
||||
"success": False,
|
||||
"error": "Social authentication not configured",
|
||||
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
|
||||
social_accounts = SocialAccount.objects.filter(user=user)
|
||||
|
||||
if not social_accounts.exists():
|
||||
return Response({
|
||||
"success": True,
|
||||
"action": "skipped",
|
||||
"message": "No OAuth accounts linked",
|
||||
})
|
||||
|
||||
# Get the most recent social account
|
||||
social_account = social_accounts.order_by("-date_joined").first()
|
||||
if not social_account:
|
||||
return Response({
|
||||
"success": True,
|
||||
"action": "skipped",
|
||||
"message": "No valid OAuth account found",
|
||||
})
|
||||
|
||||
provider = social_account.provider or "unknown"
|
||||
extra_data = social_account.extra_data or {}
|
||||
|
||||
# Validate extra_data is a dict
|
||||
if not isinstance(extra_data, dict):
|
||||
logger.warning(f"Invalid extra_data type for user {user_id_str}: {type(extra_data)}")
|
||||
extra_data = {}
|
||||
|
||||
# ================================================================
|
||||
# STEP 4: Extract profile data based on provider (with sanitization)
|
||||
# ================================================================
|
||||
avatar_url = None
|
||||
display_name = None
|
||||
username_base = None
|
||||
|
||||
if provider == "google":
|
||||
avatar_url = self._sanitize_url(extra_data.get("picture"))
|
||||
display_name = self._sanitize_display_name(extra_data.get("name"))
|
||||
email = extra_data.get("email", "")
|
||||
if email and isinstance(email, str):
|
||||
username_base = self._sanitize_username(email.split("@")[0])
|
||||
|
||||
elif provider == "discord":
|
||||
discord_data = extra_data
|
||||
discord_id = discord_data.get("id") or discord_data.get("sub")
|
||||
|
||||
display_name = self._sanitize_display_name(
|
||||
discord_data.get("global_name")
|
||||
or discord_data.get("full_name")
|
||||
or discord_data.get("name")
|
||||
)
|
||||
|
||||
# Discord avatar URL construction with validation
|
||||
avatar_hash = discord_data.get("avatar")
|
||||
if discord_id and avatar_hash and isinstance(discord_id, str) and isinstance(avatar_hash, str):
|
||||
# Validate discord_id is numeric
|
||||
if discord_id.isdigit():
|
||||
# Validate avatar_hash is alphanumeric
|
||||
if re.match(r'^[a-zA-Z0-9_]+$', avatar_hash):
|
||||
avatar_url = f"https://cdn.discordapp.com/avatars/{discord_id}/{avatar_hash}.png?size=256"
|
||||
|
||||
if not avatar_url:
|
||||
avatar_url = self._sanitize_url(
|
||||
discord_data.get("avatar_url") or discord_data.get("picture")
|
||||
)
|
||||
|
||||
raw_username = discord_data.get("username") or discord_data.get("name", "")
|
||||
if raw_username and isinstance(raw_username, str):
|
||||
username_base = self._sanitize_username(raw_username.split("#")[0])
|
||||
if not username_base and discord_id:
|
||||
username_base = f"discord_{str(discord_id)[:8]}"
|
||||
|
||||
else:
|
||||
# Generic provider handling
|
||||
avatar_url = self._sanitize_url(
|
||||
extra_data.get("picture")
|
||||
or extra_data.get("avatar_url")
|
||||
or extra_data.get("avatar")
|
||||
)
|
||||
display_name = self._sanitize_display_name(
|
||||
extra_data.get("name") or extra_data.get("display_name")
|
||||
)
|
||||
|
||||
# ================================================================
|
||||
# STEP 5: Get or create user profile (with transaction)
|
||||
# ================================================================
|
||||
from apps.accounts.models import UserProfile
|
||||
|
||||
with transaction.atomic():
|
||||
profile, profile_created = UserProfile.objects.select_for_update().get_or_create(
|
||||
user=user
|
||||
)
|
||||
|
||||
# Check if profile already has an avatar
|
||||
if profile.avatar_id:
|
||||
return Response({
|
||||
"success": True,
|
||||
"action": "skipped",
|
||||
"message": "Avatar already exists",
|
||||
"avatar_uploaded": False,
|
||||
"profile_updated": False,
|
||||
})
|
||||
|
||||
# ================================================================
|
||||
# STEP 6: Download and upload avatar to Cloudflare (outside transaction)
|
||||
# ================================================================
|
||||
avatar_uploaded = False
|
||||
|
||||
if avatar_url:
|
||||
try:
|
||||
# Validate URL scheme
|
||||
if not avatar_url.startswith(('https://', 'http://')):
|
||||
logger.warning(f"Invalid avatar URL scheme: {avatar_url[:50]}")
|
||||
else:
|
||||
# Download avatar from provider
|
||||
download_response = httpx.get(
|
||||
avatar_url,
|
||||
timeout=self.AVATAR_DOWNLOAD_TIMEOUT,
|
||||
follow_redirects=True,
|
||||
headers={
|
||||
"User-Agent": "ThrillWiki/1.0",
|
||||
"Accept": "image/*",
|
||||
},
|
||||
)
|
||||
|
||||
if download_response.status_code == 200:
|
||||
image_data = download_response.content
|
||||
content_type = download_response.headers.get("content-type", "")
|
||||
|
||||
# Validate content type
|
||||
if not content_type.startswith("image/"):
|
||||
logger.warning(f"Invalid content type for avatar: {content_type}")
|
||||
# Validate file size
|
||||
elif len(image_data) > self.MAX_AVATAR_SIZE:
|
||||
logger.warning(
|
||||
f"Avatar too large for user {user_id_str}: {len(image_data)} bytes"
|
||||
)
|
||||
# Validate minimum size (avoid empty images)
|
||||
elif len(image_data) < 100:
|
||||
logger.warning(f"Avatar too small for user {user_id_str}")
|
||||
else:
|
||||
avatar_uploaded = self._upload_to_cloudflare(
|
||||
image_data, user_id_str, provider, profile
|
||||
)
|
||||
else:
|
||||
logger.warning(
|
||||
f"Avatar download failed: {download_response.status_code}",
|
||||
extra={"user_id": user_id_str, "provider": provider}
|
||||
)
|
||||
|
||||
except httpx.TimeoutException:
|
||||
logger.warning(f"Avatar download timeout for user {user_id_str}")
|
||||
except httpx.HTTPError as download_error:
|
||||
logger.warning(f"Failed to download avatar: {download_error}")
|
||||
except Exception as e:
|
||||
logger.warning(f"Unexpected avatar error: {e}")
|
||||
|
||||
# Set rate limit after successful processing
|
||||
if avatar_uploaded:
|
||||
cache.set(rate_limit_key, True, self.AVATAR_UPLOAD_COOLDOWN)
|
||||
|
||||
# ================================================================
|
||||
# STEP 7: Update display name if not set (with validation)
|
||||
# ================================================================
|
||||
profile_updated = False
|
||||
|
||||
if display_name and not getattr(user, "display_name", None):
|
||||
try:
|
||||
user.display_name = display_name
|
||||
user.save(update_fields=["display_name"])
|
||||
profile_updated = True
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to update display name: {e}")
|
||||
|
||||
# ================================================================
|
||||
# STEP 8: Update username if it's a generic UUID-based username
|
||||
# ================================================================
|
||||
current_username = getattr(user, "username", "") or ""
|
||||
if username_base and current_username.startswith("user_"):
|
||||
try:
|
||||
new_username = self._ensure_unique_username(username_base, user.user_id)
|
||||
if new_username and new_username != current_username:
|
||||
user.username = new_username
|
||||
user.save(update_fields=["username"])
|
||||
profile_updated = True
|
||||
logger.info(
|
||||
f"Username updated from {current_username} to {new_username}",
|
||||
extra={"user_id": user_id_str}
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to update username: {e}")
|
||||
|
||||
return Response({
|
||||
"success": True,
|
||||
"action": "processed",
|
||||
"provider": provider,
|
||||
"avatar_uploaded": avatar_uploaded,
|
||||
"profile_updated": profile_updated,
|
||||
"message": "OAuth profile processed successfully",
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
capture_and_log(e, "Process OAuth profile", source="api", request=request)
|
||||
return Response({
|
||||
"success": False,
|
||||
"error": "Failed to process OAuth profile",
|
||||
}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
|
||||
def _sanitize_url(self, url) -> str | None:
|
||||
"""Sanitize and validate URL."""
|
||||
if not url or not isinstance(url, str):
|
||||
return None
|
||||
|
||||
url = url.strip()[:2000] # Limit length
|
||||
|
||||
# Basic URL validation
|
||||
if not url.startswith(('https://', 'http://')):
|
||||
return None
|
||||
|
||||
# Block obviously malicious patterns
|
||||
dangerous_patterns = ['javascript:', 'data:', 'file:', '<script', 'onclick']
|
||||
for pattern in dangerous_patterns:
|
||||
if pattern.lower() in url.lower():
|
||||
return None
|
||||
|
||||
return url
|
||||
|
||||
def _sanitize_display_name(self, name) -> str | None:
|
||||
"""Sanitize display name."""
|
||||
if not name or not isinstance(name, str):
|
||||
return None
|
||||
|
||||
import re
|
||||
|
||||
# Strip and limit length
|
||||
name = name.strip()[:100]
|
||||
|
||||
# Remove control characters
|
||||
name = re.sub(r'[\x00-\x1f\x7f-\x9f]', '', name)
|
||||
|
||||
# Remove excessive whitespace
|
||||
name = ' '.join(name.split())
|
||||
|
||||
# Must have at least 1 character
|
||||
if len(name) < 1:
|
||||
return None
|
||||
|
||||
return name
|
||||
|
||||
def _sanitize_username(self, username) -> str | None:
|
||||
"""Sanitize username for use."""
|
||||
if not username or not isinstance(username, str):
|
||||
return None
|
||||
|
||||
import re
|
||||
|
||||
# Lowercase and remove non-allowed characters
|
||||
username = username.lower().strip()
|
||||
username = re.sub(r'[^a-z0-9_]', '', username)
|
||||
|
||||
# Enforce length limits
|
||||
if len(username) < self.MIN_USERNAME_LENGTH:
|
||||
return None
|
||||
|
||||
username = username[:self.MAX_USERNAME_LENGTH]
|
||||
|
||||
return username
|
||||
|
||||
def _upload_to_cloudflare(self, image_data: bytes, user_id: str, provider: str, profile) -> bool:
|
||||
"""Upload image to Cloudflare Images with error handling."""
|
||||
import httpx
|
||||
from django.db import transaction
|
||||
|
||||
try:
|
||||
from django_cloudflareimages_toolkit.models import CloudflareImage
|
||||
from django_cloudflareimages_toolkit.services import CloudflareImagesService
|
||||
|
||||
cf_service = CloudflareImagesService()
|
||||
|
||||
# Request direct upload URL
|
||||
upload_result = cf_service.get_direct_upload_url(
|
||||
metadata={
|
||||
"type": "avatar",
|
||||
"user_id": user_id,
|
||||
"provider": provider,
|
||||
}
|
||||
)
|
||||
|
||||
if not upload_result or "upload_url" not in upload_result:
|
||||
logger.warning("Failed to get Cloudflare upload URL")
|
||||
return False
|
||||
|
||||
upload_url = upload_result["upload_url"]
|
||||
cloudflare_id = upload_result.get("id") or upload_result.get("cloudflare_id")
|
||||
|
||||
if not cloudflare_id:
|
||||
logger.warning("No Cloudflare ID in upload result")
|
||||
return False
|
||||
|
||||
# Upload image to Cloudflare
|
||||
files = {"file": ("avatar.png", image_data, "image/png")}
|
||||
upload_response = httpx.post(
|
||||
upload_url,
|
||||
files=files,
|
||||
timeout=self.AVATAR_UPLOAD_TIMEOUT,
|
||||
)
|
||||
|
||||
if upload_response.status_code not in [200, 201]:
|
||||
logger.warning(f"Cloudflare upload failed: {upload_response.status_code}")
|
||||
return False
|
||||
|
||||
# Create CloudflareImage record and link to profile
|
||||
with transaction.atomic():
|
||||
cf_image = CloudflareImage.objects.create(
|
||||
cloudflare_id=cloudflare_id,
|
||||
is_uploaded=True,
|
||||
metadata={
|
||||
"type": "avatar",
|
||||
"user_id": user_id,
|
||||
"provider": provider,
|
||||
}
|
||||
)
|
||||
|
||||
profile.avatar = cf_image
|
||||
profile.save(update_fields=["avatar"])
|
||||
|
||||
logger.info(
|
||||
f"Avatar uploaded successfully",
|
||||
extra={"user_id": user_id, "provider": provider, "cloudflare_id": cloudflare_id}
|
||||
)
|
||||
return True
|
||||
|
||||
except ImportError:
|
||||
logger.warning("django-cloudflareimages-toolkit not available")
|
||||
return False
|
||||
except Exception as cf_error:
|
||||
logger.warning(f"Cloudflare upload error: {cf_error}")
|
||||
return False
|
||||
|
||||
def _ensure_unique_username(self, base_username: str, user_id: str, max_attempts: int = 10) -> str | None:
|
||||
"""
|
||||
Ensure username is unique by appending numbers if needed.
|
||||
|
||||
Returns None if no valid username can be generated.
|
||||
"""
|
||||
if not base_username:
|
||||
return None
|
||||
|
||||
username = base_username.lower()[:self.MAX_USERNAME_LENGTH]
|
||||
|
||||
# Validate characters
|
||||
if not all(c in self.ALLOWED_USERNAME_CHARS for c in username):
|
||||
return None
|
||||
|
||||
attempt = 0
|
||||
|
||||
while attempt < max_attempts:
|
||||
try:
|
||||
existing = UserModel.objects.filter(username=username).exclude(user_id=user_id).exists()
|
||||
if not existing:
|
||||
return username
|
||||
except Exception:
|
||||
break
|
||||
|
||||
attempt += 1
|
||||
# Ensure we don't exceed max length with suffix
|
||||
suffix = f"_{attempt}"
|
||||
max_base = self.MAX_USERNAME_LENGTH - len(suffix)
|
||||
username = f"{base_username.lower()[:max_base]}{suffix}"
|
||||
|
||||
# Fallback to UUID-based username
|
||||
return f"user_{str(user_id)[:8]}"
|
||||
|
||||
@@ -33,4 +33,5 @@ urlpatterns = [
|
||||
# Location detection and enrichment
|
||||
path("detect-location/", views.DetectLocationView.as_view(), name="detect_location"),
|
||||
path("enrich-location/", views.EnrichLocationView.as_view(), name="enrich_location"),
|
||||
path("search-location/", views.SearchLocationView.as_view(), name="search_location"),
|
||||
]
|
||||
|
||||
@@ -1241,3 +1241,388 @@ class EnrichLocationView(APIView):
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
@extend_schema_view(
|
||||
post=extend_schema(
|
||||
summary="Search for a location by text",
|
||||
description="Forward geocoding - convert a text query (address, city name, etc.) to coordinates.",
|
||||
request={
|
||||
"application/json": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "Location search query (address, city, place name, etc.)",
|
||||
},
|
||||
"limit": {
|
||||
"type": "integer",
|
||||
"description": "Maximum number of results to return (default: 5)",
|
||||
},
|
||||
"country": {
|
||||
"type": "string",
|
||||
"description": "ISO 3166-1 alpha-2 country code to restrict search",
|
||||
},
|
||||
},
|
||||
"required": ["query"],
|
||||
}
|
||||
},
|
||||
responses={
|
||||
200: {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"results": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"latitude": {"type": "number"},
|
||||
"longitude": {"type": "number"},
|
||||
"formatted_address": {"type": "string"},
|
||||
"city": {"type": "string"},
|
||||
"state": {"type": "string"},
|
||||
"country": {"type": "string"},
|
||||
"importance": {"type": "number"},
|
||||
},
|
||||
},
|
||||
},
|
||||
"query": {"type": "string"},
|
||||
"count": {"type": "integer"},
|
||||
},
|
||||
},
|
||||
400: {"description": "Missing or invalid query parameter"},
|
||||
},
|
||||
tags=["Maps"],
|
||||
),
|
||||
)
|
||||
class SearchLocationView(APIView):
|
||||
"""
|
||||
POST /maps/search-location/
|
||||
Forward geocoding - search for locations by text query.
|
||||
|
||||
Full parity with Supabase Edge Function: search-location
|
||||
|
||||
Features:
|
||||
- Query caching with SHA-256 hash (7-day expiration)
|
||||
- Rate limiting (30 requests per minute per IP)
|
||||
- Usage logging for monitoring
|
||||
- Cache headers (X-Cache: HIT/MISS)
|
||||
"""
|
||||
|
||||
permission_classes = [AllowAny]
|
||||
|
||||
# Rate limit settings matching original
|
||||
RATE_LIMIT_REQUESTS = 30
|
||||
RATE_LIMIT_PERIOD = 60 # 1 minute
|
||||
CACHE_EXPIRATION = 7 * 24 * 60 * 60 # 7 days in seconds
|
||||
|
||||
def _hash_query(self, query: str) -> str:
|
||||
"""Hash query for cache lookup (matching original SHA-256)."""
|
||||
import hashlib
|
||||
normalized = query.strip().lower()
|
||||
return hashlib.sha256(normalized.encode()).hexdigest()
|
||||
|
||||
def _get_client_ip(self, request) -> str:
|
||||
"""Get client IP from request headers."""
|
||||
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
|
||||
if x_forwarded_for:
|
||||
return x_forwarded_for.split(',')[0].strip()
|
||||
return request.META.get('HTTP_X_REAL_IP') or request.META.get('REMOTE_ADDR') or 'unknown'
|
||||
|
||||
def _check_rate_limit(self, client_ip: str) -> tuple[bool, int]:
|
||||
"""
|
||||
Check if client is rate limited.
|
||||
Returns (is_allowed, current_count).
|
||||
"""
|
||||
from django.core.cache import cache
|
||||
|
||||
rate_limit_key = f"search_location:rate:{client_ip}"
|
||||
current_count = cache.get(rate_limit_key, 0)
|
||||
|
||||
if current_count >= self.RATE_LIMIT_REQUESTS:
|
||||
return False, current_count
|
||||
|
||||
# Increment counter with TTL
|
||||
cache.set(rate_limit_key, current_count + 1, self.RATE_LIMIT_PERIOD)
|
||||
return True, current_count + 1
|
||||
|
||||
def _get_cached_result(self, query_hash: str):
|
||||
"""Get cached result if available."""
|
||||
from django.core.cache import cache
|
||||
|
||||
cache_key = f"search_location:query:{query_hash}"
|
||||
cached_data = cache.get(cache_key)
|
||||
|
||||
if cached_data:
|
||||
# Update access count in a separate key
|
||||
access_key = f"search_location:access:{query_hash}"
|
||||
access_count = cache.get(access_key, 0)
|
||||
cache.set(access_key, access_count + 1, self.CACHE_EXPIRATION)
|
||||
|
||||
return cached_data
|
||||
|
||||
def _set_cached_result(self, query: str, query_hash: str, results: list):
|
||||
"""Cache the results."""
|
||||
from django.core.cache import cache
|
||||
|
||||
cache_key = f"search_location:query:{query_hash}"
|
||||
cache_data = {
|
||||
"query": query,
|
||||
"results": results,
|
||||
"result_count": len(results),
|
||||
}
|
||||
cache.set(cache_key, cache_data, self.CACHE_EXPIRATION)
|
||||
|
||||
# Initialize access count
|
||||
access_key = f"search_location:access:{query_hash}"
|
||||
cache.set(access_key, 1, self.CACHE_EXPIRATION)
|
||||
|
||||
def _log_usage(self, query: str, cache_hit: bool, api_called: bool,
|
||||
response_time_ms: int = None, result_count: int = None,
|
||||
client_ip: str = None, user_id: str = None,
|
||||
error: str = None, status_code: int = None):
|
||||
"""Log API usage for monitoring."""
|
||||
# Log to structured logger for now (can be enhanced to write to DB)
|
||||
logger.info(
|
||||
"OpenStreetMap API usage",
|
||||
extra={
|
||||
"query": query[:100],
|
||||
"cache_hit": cache_hit,
|
||||
"api_called": api_called,
|
||||
"response_time_ms": response_time_ms,
|
||||
"result_count": result_count,
|
||||
"client_ip": client_ip,
|
||||
"user_id": user_id,
|
||||
"error": error,
|
||||
"status_code": status_code,
|
||||
}
|
||||
)
|
||||
|
||||
def post(self, request):
|
||||
import time
|
||||
import re
|
||||
start_time = time.time()
|
||||
|
||||
client_ip = self._get_client_ip(request)
|
||||
user_id = None
|
||||
|
||||
try:
|
||||
# Safely get user ID
|
||||
if request.user and request.user.is_authenticated:
|
||||
user_id = str(getattr(request.user, 'user_id', request.user.id))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
# ================================================================
|
||||
# STEP 0: Sanitize and validate input
|
||||
# ================================================================
|
||||
raw_query = request.data.get("query", "")
|
||||
if not isinstance(raw_query, str):
|
||||
raw_query = str(raw_query) if raw_query else ""
|
||||
|
||||
# Sanitize query: strip, limit length, remove control characters
|
||||
query = raw_query.strip()[:500]
|
||||
query = re.sub(r'[\x00-\x1f\x7f-\x9f]', '', query)
|
||||
|
||||
# Validate limit
|
||||
try:
|
||||
limit = min(int(request.data.get("limit", 5)), 10)
|
||||
limit = max(limit, 1) # At least 1
|
||||
except (ValueError, TypeError):
|
||||
limit = 5
|
||||
|
||||
# Sanitize country code (2-letter ISO code)
|
||||
raw_country = request.data.get("country", "")
|
||||
country_code = ""
|
||||
if raw_country and isinstance(raw_country, str):
|
||||
country_code = re.sub(r'[^a-zA-Z]', '', raw_country)[:2].lower()
|
||||
|
||||
|
||||
# ================================================================
|
||||
# STEP 1: Validate query (original: min 3 characters)
|
||||
# ================================================================
|
||||
if not query:
|
||||
response_time = int((time.time() - start_time) * 1000)
|
||||
self._log_usage(
|
||||
query="",
|
||||
cache_hit=False,
|
||||
api_called=False,
|
||||
response_time_ms=response_time,
|
||||
client_ip=client_ip,
|
||||
user_id=user_id,
|
||||
error="Query is required",
|
||||
status_code=400
|
||||
)
|
||||
return Response(
|
||||
{"error": "Query is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if len(query) < 3: # Match original: min 3 characters
|
||||
response_time = int((time.time() - start_time) * 1000)
|
||||
self._log_usage(
|
||||
query=query,
|
||||
cache_hit=False,
|
||||
api_called=False,
|
||||
response_time_ms=response_time,
|
||||
client_ip=client_ip,
|
||||
user_id=user_id,
|
||||
error="Query must be at least 3 characters",
|
||||
status_code=400
|
||||
)
|
||||
return Response(
|
||||
{"error": "Query must be at least 3 characters"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# ================================================================
|
||||
# STEP 2: Check rate limit (30 req/min per IP)
|
||||
# ================================================================
|
||||
is_allowed, current_count = self._check_rate_limit(client_ip)
|
||||
if not is_allowed:
|
||||
response_time = int((time.time() - start_time) * 1000)
|
||||
self._log_usage(
|
||||
query=query,
|
||||
cache_hit=False,
|
||||
api_called=False,
|
||||
response_time_ms=response_time,
|
||||
client_ip=client_ip,
|
||||
user_id=user_id,
|
||||
error="Rate limit exceeded",
|
||||
status_code=429
|
||||
)
|
||||
return Response(
|
||||
{"error": "Rate limit exceeded. Please try again later."},
|
||||
status=status.HTTP_429_TOO_MANY_REQUESTS,
|
||||
headers={
|
||||
"Retry-After": str(self.RATE_LIMIT_PERIOD),
|
||||
"X-RateLimit-Limit": str(self.RATE_LIMIT_REQUESTS),
|
||||
"X-RateLimit-Remaining": "0",
|
||||
}
|
||||
)
|
||||
|
||||
# ================================================================
|
||||
# STEP 3: Check cache
|
||||
# ================================================================
|
||||
query_hash = self._hash_query(query)
|
||||
cached = self._get_cached_result(query_hash)
|
||||
|
||||
if cached:
|
||||
response_time = int((time.time() - start_time) * 1000)
|
||||
results = cached.get("results", [])
|
||||
|
||||
self._log_usage(
|
||||
query=query,
|
||||
cache_hit=True,
|
||||
api_called=False,
|
||||
response_time_ms=response_time,
|
||||
result_count=len(results),
|
||||
client_ip=client_ip,
|
||||
user_id=user_id,
|
||||
status_code=200
|
||||
)
|
||||
|
||||
# Return raw array like original (frontend handles both formats)
|
||||
response = Response(
|
||||
results,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
response["X-Cache"] = "HIT"
|
||||
response["Cache-Control"] = "public, max-age=3600"
|
||||
return response
|
||||
|
||||
# ================================================================
|
||||
# STEP 4: Cache miss - call Nominatim API
|
||||
# ================================================================
|
||||
import httpx
|
||||
|
||||
try:
|
||||
params = {
|
||||
"q": query,
|
||||
"format": "json",
|
||||
"addressdetails": 1,
|
||||
"limit": limit,
|
||||
}
|
||||
if country_code:
|
||||
params["countrycodes"] = country_code.lower()
|
||||
|
||||
api_response = httpx.get(
|
||||
"https://nominatim.openstreetmap.org/search",
|
||||
params=params,
|
||||
timeout=10.0,
|
||||
headers={"User-Agent": "ThrillWiki/1.0 (https://thrillwiki.com)"},
|
||||
)
|
||||
|
||||
if api_response.status_code != 200:
|
||||
logger.warning(
|
||||
f"Nominatim API error: {api_response.status_code}",
|
||||
extra={"status": api_response.status_code}
|
||||
)
|
||||
return Response(
|
||||
{"error": "Location search failed", "status": api_response.status_code},
|
||||
status=api_response.status_code,
|
||||
)
|
||||
|
||||
data = api_response.json()
|
||||
response_time = int((time.time() - start_time) * 1000)
|
||||
|
||||
# ================================================================
|
||||
# STEP 5: Cache the results (background-like, but sync in Django)
|
||||
# ================================================================
|
||||
try:
|
||||
self._set_cached_result(query, query_hash, data)
|
||||
except Exception as cache_error:
|
||||
logger.warning(f"Failed to cache result: {cache_error}")
|
||||
|
||||
# Log usage
|
||||
self._log_usage(
|
||||
query=query,
|
||||
cache_hit=False,
|
||||
api_called=True,
|
||||
response_time_ms=response_time,
|
||||
result_count=len(data) if isinstance(data, list) else 0,
|
||||
client_ip=client_ip,
|
||||
user_id=user_id,
|
||||
status_code=200
|
||||
)
|
||||
|
||||
# Return raw array like original Nominatim response
|
||||
response = Response(
|
||||
data,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
response["X-Cache"] = "MISS"
|
||||
response["Cache-Control"] = "public, max-age=3600"
|
||||
return response
|
||||
|
||||
except httpx.HTTPError as e:
|
||||
logger.warning(f"Forward geocoding failed: {e}")
|
||||
response_time = int((time.time() - start_time) * 1000)
|
||||
|
||||
self._log_usage(
|
||||
query=query,
|
||||
cache_hit=False,
|
||||
api_called=True,
|
||||
response_time_ms=response_time,
|
||||
client_ip=client_ip,
|
||||
user_id=user_id,
|
||||
error=str(e),
|
||||
status_code=500
|
||||
)
|
||||
|
||||
return Response(
|
||||
{"error": "Failed to fetch location data"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
except ValueError as e:
|
||||
return Response(
|
||||
{"error": f"Invalid parameter: {str(e)}"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
capture_and_log(e, "Search location", source="api")
|
||||
return Response(
|
||||
{"error": str(e)},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
@@ -56,36 +56,26 @@ class CompanyDetailOutputSerializer(serializers.Serializer):
|
||||
name = serializers.CharField()
|
||||
slug = serializers.CharField()
|
||||
roles = serializers.ListField(child=serializers.CharField())
|
||||
description = serializers.CharField()
|
||||
website = serializers.URLField(required=False, allow_blank=True)
|
||||
|
||||
# Entity type and status (ported from legacy)
|
||||
person_type = serializers.CharField(required=False, allow_blank=True)
|
||||
status = serializers.CharField()
|
||||
description = serializers.CharField(allow_blank=True)
|
||||
website = serializers.URLField(required=False, allow_blank=True, allow_null=True)
|
||||
|
||||
# Founding information
|
||||
founded_year = serializers.IntegerField(allow_null=True)
|
||||
founded_date = serializers.DateField(allow_null=True)
|
||||
founded_date_precision = serializers.CharField(required=False, allow_blank=True)
|
||||
founded_date = serializers.DateField(allow_null=True, required=False)
|
||||
|
||||
# Image URLs
|
||||
logo_url = serializers.URLField(required=False, allow_blank=True)
|
||||
banner_image_url = serializers.URLField(required=False, allow_blank=True)
|
||||
card_image_url = serializers.URLField(required=False, allow_blank=True)
|
||||
|
||||
# Rating and review aggregates
|
||||
average_rating = serializers.DecimalField(max_digits=3, decimal_places=2, allow_null=True)
|
||||
review_count = serializers.IntegerField()
|
||||
|
||||
# Counts
|
||||
parks_count = serializers.IntegerField()
|
||||
rides_count = serializers.IntegerField()
|
||||
# Counts (from model)
|
||||
rides_count = serializers.IntegerField(required=False, default=0)
|
||||
coasters_count = serializers.IntegerField(required=False, default=0)
|
||||
|
||||
# Frontend URL
|
||||
url = serializers.URLField(required=False, allow_blank=True, allow_null=True)
|
||||
|
||||
# Metadata
|
||||
created_at = serializers.DateTimeField()
|
||||
updated_at = serializers.DateTimeField()
|
||||
|
||||
|
||||
|
||||
|
||||
class CompanyCreateInputSerializer(serializers.Serializer):
|
||||
"""Input serializer for creating companies."""
|
||||
|
||||
|
||||
@@ -15,6 +15,7 @@ from apps.core.views.views import FSMTransitionView
|
||||
from .sse import ModerationSSETestView, ModerationSSEView
|
||||
from .views import (
|
||||
BulkOperationViewSet,
|
||||
ConvertSubmissionToEditView,
|
||||
EditSubmissionViewSet,
|
||||
ModerationActionViewSet,
|
||||
ModerationQueueViewSet,
|
||||
@@ -189,6 +190,8 @@ urlpatterns = [
|
||||
*sse_patterns,
|
||||
# Include all router URLs (API endpoints)
|
||||
path("api/", include(router.urls)),
|
||||
# Standalone convert-to-edit endpoint (frontend calls /moderation/api/edit-submissions/ POST)
|
||||
path("api/edit-submissions/", ConvertSubmissionToEditView.as_view(), name="convert-to-edit"),
|
||||
# FSM transition convenience endpoints
|
||||
] + fsm_transition_patterns
|
||||
|
||||
|
||||
@@ -1516,6 +1516,116 @@ class EditSubmissionViewSet(viewsets.ModelViewSet):
|
||||
except Exception as e:
|
||||
return Response({"error": str(e)}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
@action(detail=True, methods=["post"], permission_classes=[IsModeratorOrAdmin], url_path="convert-to-edit")
|
||||
def convert_to_edit(self, request, pk=None):
|
||||
"""
|
||||
Convert a pending entity submission to an edit suggestion.
|
||||
|
||||
This is used when a new entity submission should be merged with
|
||||
an existing entity rather than creating a new one.
|
||||
|
||||
Request body:
|
||||
target_entity_type: str - The type of entity to merge into (e.g., 'park', 'ride')
|
||||
target_entity_id: int - The ID of the existing entity
|
||||
merge_fields: list[str] - Optional list of fields to merge (defaults to all)
|
||||
notes: str - Optional moderator notes
|
||||
|
||||
Returns:
|
||||
200: Submission successfully converted
|
||||
400: Invalid request or conversion not possible
|
||||
404: Submission or target entity not found
|
||||
"""
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
|
||||
submission = self.get_object()
|
||||
user = request.user
|
||||
|
||||
# Validate submission state
|
||||
if submission.status not in ["PENDING", "CLAIMED"]:
|
||||
return Response(
|
||||
{"error": f"Cannot convert submission in {submission.status} state"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Get request data
|
||||
target_entity_type = request.data.get("target_entity_type")
|
||||
target_entity_id = request.data.get("target_entity_id")
|
||||
merge_fields = request.data.get("merge_fields", [])
|
||||
notes = request.data.get("notes", "")
|
||||
|
||||
if not target_entity_type or not target_entity_id:
|
||||
return Response(
|
||||
{"error": "target_entity_type and target_entity_id are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Look up the target entity
|
||||
try:
|
||||
app_label = "parks" if target_entity_type in ["park"] else "rides"
|
||||
if target_entity_type == "company":
|
||||
app_label = "core"
|
||||
|
||||
content_type = ContentType.objects.get(app_label=app_label, model=target_entity_type)
|
||||
model_class = content_type.model_class()
|
||||
target_entity = model_class.objects.get(pk=target_entity_id)
|
||||
except (ContentType.DoesNotExist, Exception) as e:
|
||||
return Response(
|
||||
{"error": f"Target entity not found: {target_entity_type}#{target_entity_id}"},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
# Store the conversion metadata
|
||||
conversion_data = {
|
||||
"converted_from": "new_entity_submission",
|
||||
"target_entity_type": target_entity_type,
|
||||
"target_entity_id": target_entity_id,
|
||||
"target_entity_name": str(target_entity),
|
||||
"merge_fields": merge_fields,
|
||||
"converted_by": user.username,
|
||||
"converted_at": timezone.now().isoformat(),
|
||||
"notes": notes,
|
||||
}
|
||||
|
||||
# Update the submission
|
||||
if hasattr(submission, "changes") and isinstance(submission.changes, dict):
|
||||
submission.changes["_conversion_metadata"] = conversion_data
|
||||
else:
|
||||
# Create changes dict if it doesn't exist
|
||||
submission.changes = {"_conversion_metadata": conversion_data}
|
||||
|
||||
# Add moderator note
|
||||
if hasattr(submission, "moderator_notes"):
|
||||
existing_notes = submission.moderator_notes or ""
|
||||
submission.moderator_notes = (
|
||||
f"{existing_notes}\n\n[Converted to edit] {notes}".strip()
|
||||
if notes
|
||||
else f"{existing_notes}\n\n[Converted to edit for {target_entity_type} #{target_entity_id}]".strip()
|
||||
)
|
||||
|
||||
submission.save()
|
||||
|
||||
# Log the conversion
|
||||
log_business_event(
|
||||
logger,
|
||||
event_type="submission_converted_to_edit",
|
||||
message=f"EditSubmission {submission.id} converted to edit for {target_entity_type}#{target_entity_id}",
|
||||
context={
|
||||
"model": "EditSubmission",
|
||||
"object_id": submission.id,
|
||||
"target_entity_type": target_entity_type,
|
||||
"target_entity_id": target_entity_id,
|
||||
"converted_by": user.username,
|
||||
},
|
||||
request=request,
|
||||
)
|
||||
|
||||
return Response({
|
||||
"success": True,
|
||||
"message": f"Submission converted to edit for {target_entity_type} #{target_entity_id}",
|
||||
"submission": self.get_serializer(submission).data,
|
||||
"conversion_metadata": conversion_data,
|
||||
})
|
||||
|
||||
|
||||
class PhotoSubmissionViewSet(viewsets.ModelViewSet):
|
||||
"""
|
||||
@@ -1667,3 +1777,365 @@ class PhotoSubmissionViewSet(viewsets.ModelViewSet):
|
||||
return Response(self.get_serializer(submission).data)
|
||||
except Exception as e:
|
||||
return Response({"error": str(e)}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Standalone Convert Submission to Edit View
|
||||
# ============================================================================
|
||||
|
||||
|
||||
from rest_framework.views import APIView
|
||||
|
||||
|
||||
class ConvertSubmissionToEditView(APIView):
|
||||
"""
|
||||
POST /api/moderation/api/convert-to-edit/
|
||||
|
||||
Convert a CREATE submission to an EDIT by linking it to an existing entity.
|
||||
|
||||
Full parity with Supabase Edge Function: convert-submission-to-edit
|
||||
|
||||
This endpoint:
|
||||
1. Validates the submission is locked by the requesting moderator
|
||||
2. Validates the submission is in a valid state (PENDING or CLAIMED)
|
||||
3. Validates the submission_type is 'CREATE' (only CREATE can be converted)
|
||||
4. Looks up the existing entity
|
||||
5. Updates the submission_type to 'EDIT' and links to existing entity
|
||||
6. Logs to audit trail
|
||||
|
||||
BULLETPROOFED: Transaction safety, UUID validation, comprehensive error handling.
|
||||
|
||||
Request body:
|
||||
{
|
||||
"submissionId": "...", # The EditSubmission ID
|
||||
"itemId": "...", # The submission item ID (optional, for Supabase compat)
|
||||
"existingEntityId": "...", # The existing entity to link to
|
||||
"conversionType": "..." # Optional: 'automatic' or 'manual'
|
||||
}
|
||||
|
||||
Returns:
|
||||
{
|
||||
"success": true/false,
|
||||
"itemId": "...",
|
||||
"submissionId": "...",
|
||||
"existingEntityId": "...",
|
||||
"existingEntityName": "...",
|
||||
"message": "..."
|
||||
}
|
||||
"""
|
||||
|
||||
permission_classes = [IsModeratorOrAdmin]
|
||||
|
||||
# Validation constants
|
||||
MAX_NOTE_LENGTH = 5000
|
||||
ALLOWED_CONVERSION_TYPES = {"automatic", "manual", "duplicate_detected"}
|
||||
VALID_STATES = {"PENDING", "CLAIMED", "pending", "partially_approved", "claimed"}
|
||||
|
||||
def post(self, request):
|
||||
from django.db import transaction
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
import uuid
|
||||
|
||||
try:
|
||||
# ================================================================
|
||||
# STEP 0: Validate user is authenticated
|
||||
# ================================================================
|
||||
user = request.user
|
||||
if not user or not user.is_authenticated:
|
||||
return Response(
|
||||
{"success": False, "message": "Authentication required"},
|
||||
status=status.HTTP_401_UNAUTHORIZED,
|
||||
)
|
||||
|
||||
# ================================================================
|
||||
# STEP 1: Extract and validate request parameters
|
||||
# ================================================================
|
||||
submission_id = request.data.get("submissionId")
|
||||
item_id = request.data.get("itemId") # For Supabase compatibility
|
||||
existing_entity_id = request.data.get("existingEntityId")
|
||||
conversion_type = request.data.get("conversionType", "automatic")
|
||||
|
||||
# Validate required parameters
|
||||
if not submission_id:
|
||||
return Response(
|
||||
{"success": False, "message": "submissionId is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if not existing_entity_id:
|
||||
return Response(
|
||||
{"success": False, "message": "existingEntityId is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Validate UUID formats
|
||||
try:
|
||||
if isinstance(submission_id, str):
|
||||
submission_uuid = uuid.UUID(submission_id)
|
||||
else:
|
||||
submission_uuid = submission_id
|
||||
except (ValueError, AttributeError):
|
||||
return Response(
|
||||
{"success": False, "message": "Invalid submissionId format"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
try:
|
||||
if isinstance(existing_entity_id, str):
|
||||
entity_uuid = uuid.UUID(existing_entity_id)
|
||||
else:
|
||||
entity_uuid = existing_entity_id
|
||||
except (ValueError, AttributeError):
|
||||
return Response(
|
||||
{"success": False, "message": "Invalid existingEntityId format"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Sanitize conversion_type
|
||||
if not isinstance(conversion_type, str):
|
||||
conversion_type = "automatic"
|
||||
conversion_type = conversion_type.strip().lower()[:50]
|
||||
if conversion_type not in self.ALLOWED_CONVERSION_TYPES:
|
||||
conversion_type = "automatic"
|
||||
|
||||
# ================================================================
|
||||
# STEP 2: Get the submission with select_for_update
|
||||
# ================================================================
|
||||
try:
|
||||
with transaction.atomic():
|
||||
submission = EditSubmission.objects.select_for_update().get(pk=submission_uuid)
|
||||
except EditSubmission.DoesNotExist:
|
||||
return Response(
|
||||
{"success": False, "message": "Submission not found"},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to fetch submission {submission_id}: {e}")
|
||||
return Response(
|
||||
{"success": False, "message": "Failed to fetch submission"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
# ================================================================
|
||||
# STEP 3: Verify submission is locked by requesting moderator
|
||||
# ================================================================
|
||||
claimed_by_id = getattr(submission, 'claimed_by_id', None)
|
||||
user_id = getattr(user, 'id', None)
|
||||
|
||||
if claimed_by_id != user_id:
|
||||
# Additional check: allow admins to override
|
||||
if not getattr(user, 'is_staff', False) and not getattr(user, 'is_superuser', False):
|
||||
return Response(
|
||||
{"success": False, "message": "You must claim this submission before converting it"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
logger.info(
|
||||
f"Admin override: {user.username} converting submission claimed by user {claimed_by_id}",
|
||||
extra={"submission_id": str(submission_uuid), "admin_user": user.username}
|
||||
)
|
||||
|
||||
# ================================================================
|
||||
# STEP 4: Validate submission state
|
||||
# ================================================================
|
||||
current_status = getattr(submission, 'status', 'unknown')
|
||||
if current_status not in self.VALID_STATES:
|
||||
return Response(
|
||||
{"success": False, "message": f"Submission must be pending or claimed to convert (current: {current_status})"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# ================================================================
|
||||
# STEP 5: Validate submission_type is CREATE
|
||||
# ================================================================
|
||||
current_type = getattr(submission, 'submission_type', '')
|
||||
if current_type != "CREATE":
|
||||
return Response(
|
||||
{
|
||||
"success": False,
|
||||
"message": f"Item is already set to '{current_type}', cannot convert"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# ================================================================
|
||||
# STEP 6: Determine entity type from submission's content_type
|
||||
# ================================================================
|
||||
target_entity_type = None
|
||||
target_entity_name = None
|
||||
target_entity_slug = None
|
||||
target_entity = None
|
||||
|
||||
if submission.content_type:
|
||||
target_entity_type = submission.content_type.model
|
||||
|
||||
# Also try to get from changes if available
|
||||
if not target_entity_type and isinstance(submission.changes, dict):
|
||||
target_entity_type = submission.changes.get("entity_type")
|
||||
|
||||
# ================================================================
|
||||
# STEP 7: Look up the existing entity
|
||||
# ================================================================
|
||||
app_label_map = {
|
||||
"park": "parks",
|
||||
"ride": "rides",
|
||||
"company": "core",
|
||||
"ridemodel": "rides",
|
||||
"manufacturer": "core",
|
||||
"operator": "core",
|
||||
}
|
||||
|
||||
if target_entity_type:
|
||||
try:
|
||||
app_label = app_label_map.get(target_entity_type.lower(), "core")
|
||||
content_type = ContentType.objects.get(app_label=app_label, model=target_entity_type.lower())
|
||||
model_class = content_type.model_class()
|
||||
|
||||
if model_class is None:
|
||||
raise ValueError(f"No model class for {target_entity_type}")
|
||||
|
||||
target_entity = model_class.objects.filter(pk=entity_uuid).first()
|
||||
|
||||
if not target_entity:
|
||||
return Response(
|
||||
{"success": False, "message": f"Existing {target_entity_type} not found with ID {existing_entity_id}"},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
target_entity_name = str(getattr(target_entity, 'name', target_entity))[:200]
|
||||
target_entity_slug = getattr(target_entity, 'slug', None)
|
||||
|
||||
except ContentType.DoesNotExist:
|
||||
return Response(
|
||||
{"success": False, "message": f"Unknown entity type: {target_entity_type}"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to look up entity {target_entity_type}/{existing_entity_id}: {e}")
|
||||
return Response(
|
||||
{"success": False, "message": "Existing entity not found"},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
else:
|
||||
# Try to find entity across common models
|
||||
for model_name, app_label in [("park", "parks"), ("ride", "rides"), ("company", "core")]:
|
||||
try:
|
||||
content_type = ContentType.objects.get(app_label=app_label, model=model_name)
|
||||
model_class = content_type.model_class()
|
||||
|
||||
if model_class is None:
|
||||
continue
|
||||
|
||||
target_entity = model_class.objects.filter(pk=entity_uuid).first()
|
||||
if target_entity:
|
||||
target_entity_type = model_name
|
||||
target_entity_name = str(getattr(target_entity, 'name', target_entity))[:200]
|
||||
target_entity_slug = getattr(target_entity, 'slug', None)
|
||||
break
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
if not target_entity:
|
||||
return Response(
|
||||
{"success": False, "message": "Existing entity not found in any known model"},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
# ================================================================
|
||||
# STEP 8: Update submission with atomic transaction
|
||||
# ================================================================
|
||||
try:
|
||||
with transaction.atomic():
|
||||
# Re-fetch with lock to ensure no concurrent modifications
|
||||
submission = EditSubmission.objects.select_for_update().get(pk=submission_uuid)
|
||||
|
||||
# Double-check state hasn't changed
|
||||
if submission.submission_type != "CREATE":
|
||||
return Response(
|
||||
{"success": False, "message": "Submission was already converted"},
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
|
||||
# Update submission_type
|
||||
submission.submission_type = "EDIT"
|
||||
|
||||
# Link to existing entity via object_id
|
||||
submission.object_id = entity_uuid
|
||||
|
||||
# Store conversion metadata in changes
|
||||
if not isinstance(submission.changes, dict):
|
||||
submission.changes = {}
|
||||
|
||||
submission.changes["_conversion_metadata"] = {
|
||||
"converted_from": "new_entity_submission",
|
||||
"original_action_type": "create",
|
||||
"target_entity_type": target_entity_type,
|
||||
"target_entity_id": str(entity_uuid),
|
||||
"target_entity_name": target_entity_name,
|
||||
"target_entity_slug": target_entity_slug,
|
||||
"conversion_type": conversion_type,
|
||||
"converted_by": user.username,
|
||||
"converted_by_id": str(getattr(user, 'user_id', user.id)),
|
||||
"converted_at": timezone.now().isoformat(),
|
||||
}
|
||||
|
||||
# Add moderator note (with length limit)
|
||||
existing_notes = (submission.notes or "")[:self.MAX_NOTE_LENGTH]
|
||||
conversion_note = f"[Converted CREATE to EDIT] for {target_entity_type}: {target_entity_name}"
|
||||
if target_entity_slug:
|
||||
conversion_note += f" ({target_entity_slug})"
|
||||
conversion_note += f". Conversion type: {conversion_type}"
|
||||
|
||||
new_notes = f"{existing_notes}\n\n{conversion_note}".strip()
|
||||
submission.notes = new_notes[:self.MAX_NOTE_LENGTH]
|
||||
|
||||
submission.save(update_fields=["submission_type", "object_id", "changes", "notes"])
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to update submission {submission_uuid}: {e}")
|
||||
return Response(
|
||||
{"success": False, "message": "Failed to update submission"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
# ================================================================
|
||||
# STEP 9: Log to audit trail (outside transaction for reliability)
|
||||
# ================================================================
|
||||
try:
|
||||
log_business_event(
|
||||
logger,
|
||||
event_type="submission_converted_to_edit",
|
||||
message=f"EditSubmission {submission.id} converted from CREATE to EDIT for {target_entity_type}#{entity_uuid}",
|
||||
context={
|
||||
"model": "EditSubmission",
|
||||
"object_id": str(submission.id),
|
||||
"item_id": str(item_id) if item_id else None,
|
||||
"target_entity_type": target_entity_type,
|
||||
"target_entity_id": str(entity_uuid),
|
||||
"target_entity_name": target_entity_name,
|
||||
"converted_by": user.username,
|
||||
"conversion_type": conversion_type,
|
||||
},
|
||||
request=request,
|
||||
)
|
||||
except Exception as log_error:
|
||||
# Don't fail the request if logging fails
|
||||
logger.warning(f"Failed to log conversion event: {log_error}")
|
||||
|
||||
# ================================================================
|
||||
# STEP 10: Return success response matching original format
|
||||
# ================================================================
|
||||
return Response({
|
||||
"success": True,
|
||||
"itemId": str(item_id) if item_id else str(submission.id),
|
||||
"submissionId": str(submission.id),
|
||||
"existingEntityId": str(entity_uuid),
|
||||
"existingEntityName": target_entity_name,
|
||||
"message": f"Converted submission item to EDIT for existing {target_entity_type}: {target_entity_name}",
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
capture_and_log(e, "Convert submission to edit", source="moderation", request=request)
|
||||
return Response(
|
||||
{"success": False, "message": "Internal server error"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
@@ -91,6 +91,7 @@ dev = [
|
||||
"rope>=1.14.0",
|
||||
"ruff>=0.9.2",
|
||||
"pyright>=1.1.405",
|
||||
"factory-boy>=3.3.3",
|
||||
]
|
||||
test = [
|
||||
"pytest>=8.3.5",
|
||||
|
||||
93
docs/ENDPOINT_MAPPING.md
Normal file
93
docs/ENDPOINT_MAPPING.md
Normal file
@@ -0,0 +1,93 @@
|
||||
# Supabase → Django Endpoint Mapping
|
||||
|
||||
## Overview
|
||||
This document maps all Supabase endpoints to their Django REST Framework equivalents.
|
||||
|
||||
---
|
||||
|
||||
## Edge Function Mappings
|
||||
|
||||
### Images
|
||||
| Supabase Function | Django Endpoint | Method | Status |
|
||||
|-------------------|-----------------|--------|--------|
|
||||
| `upload-image` | `/api/v1/images/generate-upload-url/` | POST | ✅ |
|
||||
| `delete-image` | `/api/v1/images/delete/` | POST | ✅ |
|
||||
| `generate-og-image` | `/api/v1/images/og-image/` | POST | ✅ |
|
||||
|
||||
### Location/Maps
|
||||
| Supabase Function | Django Endpoint | Method | Status |
|
||||
|-------------------|-----------------|--------|--------|
|
||||
| `detect-location` | `/api/v1/maps/detect-location/` | POST | ✅ |
|
||||
| `enrich-location` | `/api/v1/maps/enrich-location/` | POST | ✅ |
|
||||
| `search-location` | `/api/v1/maps/search-location/` | POST | ✅ |
|
||||
|
||||
### Notifications (Django Native)
|
||||
| Supabase Function | Django Endpoint | Method | Status |
|
||||
|-------------------|-----------------|--------|--------|
|
||||
| `get-notifications` | `/api/v1/notifications/` | GET | ✅ |
|
||||
| `mark-notification-read` | `/api/v1/notifications/mark-read/` | POST | ✅ |
|
||||
| `get-unread-count` | `/api/v1/notifications/unread-count/` | GET | ✅ |
|
||||
|
||||
### Moderation
|
||||
| Supabase Function | Django Endpoint | Method | Status |
|
||||
|-------------------|-----------------|--------|--------|
|
||||
| `process-bulk-approval` | `/api/v1/moderation/api/bulk-operations/` | POST | ✅ |
|
||||
| `claim-submission` | `/api/v1/moderation/api/queue/` | POST | ✅ |
|
||||
| `convert-submission-to-edit` | `/api/v1/moderation/api/edit-submissions/` | POST | ✅ |
|
||||
|
||||
### Auth/MFA
|
||||
| Supabase Function | Django Endpoint | Method | Status |
|
||||
|-------------------|-----------------|--------|--------|
|
||||
| `mfa-unenroll` | `/api/v1/auth/mfa/totp/deactivate/` | POST | ✅ |
|
||||
| `process-oauth-profile` | `/api/v1/auth/social/process-profile/` | POST | ✅ |
|
||||
|
||||
### Account Management
|
||||
| Supabase Function | Django Endpoint | Method | Status |
|
||||
|-------------------|-----------------|--------|--------|
|
||||
| `cancel-account-deletion` | `/api/v1/accounts/delete-account/cancel/` | POST | ✅ |
|
||||
| `confirm-account-deletion` | `/api/v1/accounts/delete-account/verify/` | POST | ✅ |
|
||||
| `cancel-email-change` | `/api/v1/accounts/email-change/cancel/` | POST | ✅ |
|
||||
| `export-user-data` | `/api/v1/accounts/data-export/` | POST | ✅ |
|
||||
|
||||
### Admin Dashboard
|
||||
| Supabase Function | Django Endpoint | Method | Status |
|
||||
|-------------------|-----------------|--------|--------|
|
||||
| `detect-anomalies` | `/api/v1/admin/anomalies/detect/` | POST | ✅ |
|
||||
| `collect-metrics` | `/api/v1/admin/metrics/collect/` | POST | ✅ |
|
||||
| `pipeline-integrity-scan` | `/api/v1/admin/pipeline/integrity-scan/` | POST | ✅ |
|
||||
| `task-status` | `/api/v1/admin/tasks/status/` | GET | ✅ |
|
||||
|
||||
---
|
||||
|
||||
## Table Mappings
|
||||
|
||||
| Supabase Table | Django Endpoint |
|
||||
|----------------|-----------------|
|
||||
| `parks` | `/api/v1/parks/` |
|
||||
| `rides` | `/api/v1/rides/` |
|
||||
| `companies` | `/api/v1/companies/` |
|
||||
| `ride_models` | `/api/v1/rides/models/` |
|
||||
| `profiles` | `/api/v1/accounts/profiles/` |
|
||||
| `reviews` | `/api/v1/reviews/` |
|
||||
| `photos` | `/api/v1/media/photos/` |
|
||||
| `content_submissions` | `/api/v1/moderation/submissions/` |
|
||||
| `ride_credits` | `/api/v1/accounts/credits/` |
|
||||
|
||||
---
|
||||
|
||||
## Scheduled Tasks (Celery)
|
||||
|
||||
| Supabase Function | Celery Task |
|
||||
|-------------------|-------------|
|
||||
| `process-scheduled-deletions` | `apps.core.tasks.scheduled` |
|
||||
| `process-closing-entities` | `apps.core.tasks.scheduled` |
|
||||
| `cleanup-orphaned-images` | `apps.core.tasks.scheduled` |
|
||||
|
||||
---
|
||||
|
||||
## Deprecated
|
||||
|
||||
| Function | Status |
|
||||
|----------|--------|
|
||||
| `migrate-novu-users` | 🚫 Replaced by Django native |
|
||||
| `novu-webhook` | 🚫 Replaced by Django native |
|
||||
2
uv.lock
generated
2
uv.lock
generated
@@ -2083,6 +2083,7 @@ dev = [
|
||||
{ name = "autopep8" },
|
||||
{ name = "black" },
|
||||
{ name = "django-stubs" },
|
||||
{ name = "factory-boy" },
|
||||
{ name = "pyright" },
|
||||
{ name = "rope" },
|
||||
{ name = "ruff" },
|
||||
@@ -2166,6 +2167,7 @@ dev = [
|
||||
{ name = "autopep8", specifier = ">=2.3.2" },
|
||||
{ name = "black", specifier = ">=25.1.0" },
|
||||
{ name = "django-stubs", specifier = ">=5.2.2" },
|
||||
{ name = "factory-boy", specifier = ">=3.3.3" },
|
||||
{ name = "pyright", specifier = ">=1.1.405" },
|
||||
{ name = "rope", specifier = ">=1.14.0" },
|
||||
{ name = "ruff", specifier = ">=0.9.2" },
|
||||
|
||||
Reference in New Issue
Block a user