feat: Implement a new notifications application, add admin API views for dashboard metrics, introduce scheduled tasks, and update API routing and project configurations.

This commit is contained in:
pacnpal
2026-01-05 09:50:00 -05:00
parent 1c6e219662
commit a801813dcf
27 changed files with 3829 additions and 131 deletions

View File

@@ -0,0 +1 @@
# Admin API module

View File

@@ -0,0 +1,37 @@
"""
Admin API URL configuration.
Provides endpoints for admin dashboard functionality.
"""
from django.urls import path
from . import views
app_name = "admin_api"
urlpatterns = [
# OSM Cache Stats
path(
"osm-usage-stats/",
views.OSMUsageStatsView.as_view(),
name="osm_usage_stats",
),
# Rate Limit Metrics
path(
"rate-limit-metrics/",
views.RateLimitMetricsView.as_view(),
name="rate_limit_metrics",
),
# Database Manager (admin CRUD operations)
path(
"database-manager/",
views.DatabaseManagerView.as_view(),
name="database_manager",
),
# Celery Task Status (read-only)
path(
"tasks/status/",
views.CeleryTaskStatusView.as_view(),
name="task_status",
),
]

View File

@@ -0,0 +1,710 @@
"""
Admin API views for dashboard functionality.
These views provide endpoints for:
- OSM cache statistics
- Rate limiting metrics
- Database manager operations
- Celery task status
"""
import logging
from datetime import timedelta
from typing import Any
from django.apps import apps
from django.contrib.auth import get_user_model
from django.core.cache import cache
from django.db import transaction
from django.db.models import Count, Q
from django.utils import timezone
from rest_framework import status
from rest_framework.permissions import IsAdminUser
from rest_framework.response import Response
from rest_framework.views import APIView
from apps.core.utils import capture_and_log
logger = logging.getLogger(__name__)
User = get_user_model()
class OSMUsageStatsView(APIView):
"""
GET /admin/osm-usage-stats/
Return OSM cache statistics for admin dashboard.
"""
permission_classes = [IsAdminUser]
def get(self, request):
"""Return OSM/location cache usage statistics."""
try:
# Try to get stats from cache first
cached_stats = cache.get("osm_usage_stats")
if cached_stats:
return Response(cached_stats)
# Calculate fresh stats
now = timezone.now()
last_24h = now - timedelta(hours=24)
# Get location query cache model if it exists
try:
LocationQueryCache = apps.get_model("maps", "LocationQueryCache")
has_cache_model = True
except LookupError:
has_cache_model = False
if has_cache_model:
total_queries = LocationQueryCache.objects.count()
recent_queries = LocationQueryCache.objects.filter(
created_at__gte=last_24h
).count()
cache_hits = LocationQueryCache.objects.filter(
access_count__gt=1
).count()
stats = {
"timeWindow": "24h",
"totalSearches": recent_queries,
"cacheHits": cache_hits,
"cacheMisses": max(0, recent_queries - cache_hits),
"apiCalls": max(0, recent_queries - cache_hits),
"errors": 0,
"cacheHitRate": (
round(cache_hits / total_queries * 100, 2)
if total_queries > 0
else 0
),
"avgResponseTime": 0, # Would need request logging
"totalCachedQueries": total_queries,
"totalCacheAccesses": cache_hits,
"hourlyData": [],
"apiCallsSaved": cache_hits,
"estimatedCost": {
"callsMade": max(0, recent_queries - cache_hits),
"callsSaved": cache_hits,
"savings": f"${cache_hits * 0.001:.2f}", # Estimated
},
}
else:
# Return empty stats if no cache model
stats = {
"timeWindow": "24h",
"totalSearches": 0,
"cacheHits": 0,
"cacheMisses": 0,
"apiCalls": 0,
"errors": 0,
"cacheHitRate": 0,
"avgResponseTime": 0,
"totalCachedQueries": 0,
"totalCacheAccesses": 0,
"hourlyData": [],
"apiCallsSaved": 0,
"estimatedCost": {
"callsMade": 0,
"callsSaved": 0,
"savings": "$0.00",
},
}
# Cache for 5 minutes
cache.set("osm_usage_stats", stats, 300)
return Response(stats)
except Exception as e:
capture_and_log(e, "OSM usage stats - error", source="api")
return Response(
{"detail": "Failed to fetch OSM usage stats"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
class RateLimitMetricsView(APIView):
"""
POST /admin/rate-limit-metrics/
Return rate limiting metrics for admin dashboard.
"""
permission_classes = [IsAdminUser]
def post(self, request):
"""Return rate limit metrics based on action."""
try:
action = request.data.get("action", "stats")
time_window = request.data.get("timeWindow", 60000) # ms
limit = request.data.get("limit", 100)
# Convert time_window from ms to seconds
time_window_seconds = time_window / 1000 if time_window else 60
cutoff = timezone.now() - timedelta(seconds=time_window_seconds)
if action == "stats":
# Return aggregate statistics
# In a real implementation, you'd query a rate limit log table
stats = {
"totalRequests": 0,
"allowedRequests": 0,
"blockedRequests": 0,
"blockRate": 0,
"uniqueIPs": 0,
"uniqueUsers": 0,
"topBlockedIPs": [],
"topBlockedUsers": [],
"tierDistribution": {
"anonymous": 0,
"authenticated": 0,
"premium": 0,
"admin": 0,
},
}
return Response(stats)
elif action == "recent":
# Return recent rate limit events
return Response([])
elif action == "function":
# Return metrics for a specific function
function_name = request.data.get("functionName", "")
return Response([])
elif action == "user":
# Return metrics for a specific user
user_id = request.data.get("userId", "")
return Response([])
elif action == "ip":
# Return metrics for a specific IP
client_ip = request.data.get("clientIP", "")
return Response([])
return Response(
{"detail": f"Unknown action: {action}"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_and_log(e, "Rate limit metrics - error", source="api")
return Response(
{"detail": "Failed to fetch rate limit metrics"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
class DatabaseManagerView(APIView):
"""
POST /admin/database-manager/
Handle admin CRUD operations for entities.
"""
permission_classes = [IsAdminUser]
# Map entity types to Django models
ENTITY_MODEL_MAP = {
"parks": ("parks", "Park"),
"rides": ("rides", "Ride"),
"companies": ("companies", "Company"),
"reviews": ("reviews", "Review"),
"blog_posts": ("blog", "BlogPost"),
"photos": ("media", "Photo"),
"lists": ("lists", "List"),
"profiles": ("accounts", "UserProfile"),
}
def post(self, request):
"""Dispatch to appropriate handler based on operation."""
try:
operation = request.data.get("operation")
entity_type = request.data.get("entityType")
entity_id = request.data.get("entityId")
data = request.data.get("data", {})
change_reason = request.data.get("changeReason", "Admin operation")
if not operation:
return Response(
{"detail": "operation is required"},
status=status.HTTP_400_BAD_REQUEST,
)
if not entity_type:
return Response(
{"detail": "entityType is required"},
status=status.HTTP_400_BAD_REQUEST,
)
# Get the model class
model_info = self.ENTITY_MODEL_MAP.get(entity_type)
if not model_info:
return Response(
{"detail": f"Unknown entity type: {entity_type}"},
status=status.HTTP_400_BAD_REQUEST,
)
try:
Model = apps.get_model(model_info[0], model_info[1])
except LookupError:
return Response(
{"detail": f"Model not found for {entity_type}"},
status=status.HTTP_400_BAD_REQUEST,
)
# Dispatch to handler
handlers = {
"create": self._handle_create,
"update": self._handle_update,
"delete": self._handle_delete,
"restore": self._handle_restore,
"permanent-delete": self._handle_permanent_delete,
"bulk-update-status": self._handle_bulk_update_status,
"bulk-delete": self._handle_bulk_delete,
"bulk-restore": self._handle_bulk_restore,
"bulk-permanent-delete": self._handle_bulk_permanent_delete,
"get-dependencies": self._handle_get_dependencies,
}
handler = handlers.get(operation)
if not handler:
return Response(
{"detail": f"Unknown operation: {operation}"},
status=status.HTTP_400_BAD_REQUEST,
)
return handler(Model, entity_type, entity_id, data, change_reason, request)
except Exception as e:
capture_and_log(
e, f"Database manager - {operation} error", source="api"
)
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
def _handle_create(
self, Model, entity_type, entity_id, data, change_reason, request
):
"""Create a new entity."""
with transaction.atomic():
instance = Model.objects.create(**data)
return Response(
{
"success": True,
"data": {"id": str(instance.pk)},
"message": f"{entity_type} created successfully",
}
)
def _handle_update(
self, Model, entity_type, entity_id, data, change_reason, request
):
"""Update an existing entity."""
if not entity_id:
return Response(
{"detail": "entityId is required for update"},
status=status.HTTP_400_BAD_REQUEST,
)
with transaction.atomic():
try:
instance = Model.objects.get(pk=entity_id)
except Model.DoesNotExist:
return Response(
{"detail": f"{entity_type} not found"},
status=status.HTTP_404_NOT_FOUND,
)
for key, value in data.items():
if hasattr(instance, key):
setattr(instance, key, value)
instance.save()
return Response(
{
"success": True,
"data": {"id": str(instance.pk)},
"message": f"{entity_type} updated successfully",
}
)
def _handle_delete(
self, Model, entity_type, entity_id, data, change_reason, request
):
"""Soft delete an entity (set status to deleted)."""
if not entity_id:
return Response(
{"detail": "entityId is required for delete"},
status=status.HTTP_400_BAD_REQUEST,
)
with transaction.atomic():
try:
instance = Model.objects.get(pk=entity_id)
except Model.DoesNotExist:
return Response(
{"detail": f"{entity_type} not found"},
status=status.HTTP_404_NOT_FOUND,
)
# Try soft delete first (set status)
if hasattr(instance, "status"):
instance.status = "deleted"
instance.save()
elif hasattr(instance, "is_deleted"):
instance.is_deleted = True
instance.save()
elif hasattr(instance, "deleted_at"):
instance.deleted_at = timezone.now()
instance.save()
else:
# Hard delete if no soft delete field
instance.delete()
return Response(
{
"success": True,
"data": {"id": str(entity_id)},
"message": f"{entity_type} deleted successfully",
}
)
def _handle_restore(
self, Model, entity_type, entity_id, data, change_reason, request
):
"""Restore a soft-deleted entity."""
if not entity_id:
return Response(
{"detail": "entityId is required for restore"},
status=status.HTTP_400_BAD_REQUEST,
)
new_status = data.get("status", "draft")
with transaction.atomic():
try:
# Try to get even deleted entities
instance = Model.all_objects.get(pk=entity_id)
except AttributeError:
# Model doesn't have all_objects manager
instance = Model.objects.get(pk=entity_id)
except Model.DoesNotExist:
return Response(
{"detail": f"{entity_type} not found"},
status=status.HTTP_404_NOT_FOUND,
)
if hasattr(instance, "status"):
instance.status = new_status
instance.save()
elif hasattr(instance, "is_deleted"):
instance.is_deleted = False
instance.save()
elif hasattr(instance, "deleted_at"):
instance.deleted_at = None
instance.save()
return Response(
{
"success": True,
"data": {"id": str(entity_id)},
"message": f"{entity_type} restored successfully",
}
)
def _handle_permanent_delete(
self, Model, entity_type, entity_id, data, change_reason, request
):
"""Permanently delete an entity."""
if not entity_id:
return Response(
{"detail": "entityId is required for permanent-delete"},
status=status.HTTP_400_BAD_REQUEST,
)
with transaction.atomic():
try:
# Try to get even deleted entities
try:
instance = Model.all_objects.get(pk=entity_id)
except AttributeError:
instance = Model.objects.get(pk=entity_id)
except Model.DoesNotExist:
return Response(
{"detail": f"{entity_type} not found"},
status=status.HTTP_404_NOT_FOUND,
)
instance.delete()
return Response(
{
"success": True,
"data": {"id": str(entity_id)},
"message": f"{entity_type} permanently deleted",
}
)
def _handle_bulk_update_status(
self, Model, entity_type, entity_id, data, change_reason, request
):
"""Bulk update status of multiple entities."""
entity_ids = data.get("entityIds", [])
new_status = data.get("status")
if not entity_ids:
return Response(
{"detail": "entityIds is required"},
status=status.HTTP_400_BAD_REQUEST,
)
if not new_status:
return Response(
{"detail": "status is required"},
status=status.HTTP_400_BAD_REQUEST,
)
with transaction.atomic():
updated = Model.objects.filter(pk__in=entity_ids).update(status=new_status)
return Response(
{
"success": True,
"bulk": {
"successCount": updated,
"failedCount": len(entity_ids) - updated,
},
"message": f"Updated {updated} {entity_type}",
}
)
def _handle_bulk_delete(
self, Model, entity_type, entity_id, data, change_reason, request
):
"""Bulk soft delete multiple entities."""
entity_ids = data.get("entityIds", [])
if not entity_ids:
return Response(
{"detail": "entityIds is required"},
status=status.HTTP_400_BAD_REQUEST,
)
with transaction.atomic():
if hasattr(Model, "status"):
updated = Model.objects.filter(pk__in=entity_ids).update(
status="deleted"
)
else:
updated = Model.objects.filter(pk__in=entity_ids).update(
is_deleted=True
)
return Response(
{
"success": True,
"bulk": {
"successCount": updated,
"failedCount": len(entity_ids) - updated,
},
"message": f"Deleted {updated} {entity_type}",
}
)
def _handle_bulk_restore(
self, Model, entity_type, entity_id, data, change_reason, request
):
"""Bulk restore soft-deleted entities."""
entity_ids = data.get("entityIds", [])
new_status = data.get("status", "draft")
if not entity_ids:
return Response(
{"detail": "entityIds is required"},
status=status.HTTP_400_BAD_REQUEST,
)
with transaction.atomic():
try:
updated = Model.all_objects.filter(pk__in=entity_ids).update(
status=new_status
)
except AttributeError:
updated = Model.objects.filter(pk__in=entity_ids).update(
status=new_status
)
return Response(
{
"success": True,
"bulk": {
"successCount": updated,
"failedCount": len(entity_ids) - updated,
},
"message": f"Restored {updated} {entity_type}",
}
)
def _handle_bulk_permanent_delete(
self, Model, entity_type, entity_id, data, change_reason, request
):
"""Bulk permanently delete entities."""
entity_ids = data.get("entityIds", [])
if not entity_ids:
return Response(
{"detail": "entityIds is required"},
status=status.HTTP_400_BAD_REQUEST,
)
with transaction.atomic():
try:
deleted, _ = Model.all_objects.filter(pk__in=entity_ids).delete()
except AttributeError:
deleted, _ = Model.objects.filter(pk__in=entity_ids).delete()
return Response(
{
"success": True,
"bulk": {
"successCount": deleted,
"failedCount": len(entity_ids) - deleted,
},
"message": f"Permanently deleted {deleted} {entity_type}",
}
)
def _handle_get_dependencies(
self, Model, entity_type, entity_id, data, change_reason, request
):
"""Get dependencies for an entity before deletion."""
if not entity_id:
return Response(
{"detail": "entityId is required"},
status=status.HTTP_400_BAD_REQUEST,
)
try:
instance = Model.objects.get(pk=entity_id)
except Model.DoesNotExist:
return Response(
{"detail": f"{entity_type} not found"},
status=status.HTTP_404_NOT_FOUND,
)
# Get related objects count
dependencies = []
for rel in instance._meta.get_fields():
if rel.one_to_many or rel.one_to_one or rel.many_to_many:
try:
related_name = rel.get_accessor_name()
related_manager = getattr(instance, related_name, None)
if related_manager and hasattr(related_manager, "count"):
count = related_manager.count()
if count > 0:
dependencies.append(
{
"type": rel.related_model._meta.verbose_name_plural,
"count": count,
}
)
except Exception:
pass
return Response(
{
"success": True,
"dependencies": dependencies,
"hasDependencies": len(dependencies) > 0,
}
)
class CeleryTaskStatusView(APIView):
"""
GET /admin/tasks/status/
Return Celery task status (read-only).
"""
permission_classes = [IsAdminUser]
# List of known scheduled tasks
SCHEDULED_TASKS = [
{
"name": "process_scheduled_deletions",
"display_name": "Process Scheduled Deletions",
"schedule": "daily at midnight",
},
{
"name": "process_closing_entities",
"display_name": "Process Closing Entities",
"schedule": "daily at midnight",
},
{
"name": "process_expired_bans",
"display_name": "Process Expired Bans",
"schedule": "every 15 minutes",
},
{
"name": "cleanup_orphaned_images",
"display_name": "Cleanup Orphaned Images",
"schedule": "weekly on Sunday",
},
{
"name": "cleanup_old_versions",
"display_name": "Cleanup Old Versions",
"schedule": "weekly on Sunday",
},
{
"name": "data_retention_cleanup",
"display_name": "Data Retention Cleanup",
"schedule": "daily at 3 AM",
},
]
def get(self, request):
"""Return status of all scheduled tasks."""
try:
task_name = request.query_params.get("task")
tasks_status = []
for task_info in self.SCHEDULED_TASKS:
# Get last run info from cache
cache_key = f"task_last_run_{task_info['name']}"
last_run_info = cache.get(cache_key, {})
task_status = {
"name": task_info["name"],
"displayName": task_info["display_name"],
"schedule": task_info["schedule"],
"lastRun": last_run_info.get("timestamp"),
"lastResult": last_run_info.get("result", "unknown"),
"lastDuration": last_run_info.get("duration"),
"status": "scheduled",
}
if task_name and task_name == task_info["name"]:
return Response(task_status)
tasks_status.append(task_status)
if task_name:
return Response(
{"detail": f"Unknown task: {task_name}"},
status=status.HTTP_404_NOT_FOUND,
)
return Response(
{
"tasks": tasks_status,
"totalTasks": len(tasks_status),
}
)
except Exception as e:
capture_and_log(e, "Celery task status - error", source="api")
return Response(
{"detail": "Failed to fetch task status"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)

View File

@@ -38,8 +38,7 @@ urlpatterns = [
path("user/", CurrentUserAPIView.as_view(), name="auth-current-user"),
# JWT token management
path("token/refresh/", TokenRefreshView.as_view(), name="auth-token-refresh"),
# Social authentication endpoints (dj-rest-auth)
path("social/", include("dj_rest_auth.registration.urls")),
# Note: dj_rest_auth removed - using custom social auth views below
path(
"password/reset/",
PasswordResetAPIView.as_view(),

View File

@@ -1,7 +1,11 @@
from django.urls import path
from .views import GenerateUploadURLView
from . import views
app_name = "images"
urlpatterns = [
path("generate-upload-url/", GenerateUploadURLView.as_view(), name="generate-upload-url"),
path("generate-upload-url/", views.GenerateUploadURLView.as_view(), name="generate_upload_url"),
path("delete/", views.DeleteImageView.as_view(), name="delete_image"),
path("og-image/", views.GenerateOGImageView.as_view(), name="og_image"),
]

View File

@@ -1,6 +1,7 @@
import logging
import requests
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from rest_framework import status
from rest_framework.permissions import IsAuthenticated
@@ -30,3 +31,109 @@ class GenerateUploadURLView(APIView):
except Exception as e:
capture_and_log(e, 'Generate upload URL - unexpected error', source='api')
return Response({"detail": "An unexpected error occurred."}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
class DeleteImageView(APIView):
"""
POST /images/delete/
Delete an image from Cloudflare Images.
"""
permission_classes = [IsAuthenticated]
def post(self, request):
image_id = request.data.get("image_id")
if not image_id:
return Response(
{"detail": "image_id is required"},
status=status.HTTP_400_BAD_REQUEST,
)
try:
# Get Cloudflare credentials
account_id = getattr(settings, "CLOUDFLARE_IMAGES_ACCOUNT_ID", None)
api_token = getattr(settings, "CLOUDFLARE_IMAGES_API_TOKEN", None)
if not account_id or not api_token:
logger.warning("Cloudflare Images not configured, mock deleting image")
return Response({"success": True, "mock": True})
# Delete from Cloudflare
url = f"https://api.cloudflare.com/client/v4/accounts/{account_id}/images/v1/{image_id}"
response = requests.delete(
url,
headers={"Authorization": f"Bearer {api_token}"},
timeout=10,
)
if response.status_code in (200, 404): # 404 = already deleted
return Response({"success": True})
else:
logger.error(f"Cloudflare delete failed: {response.text}")
return Response(
{"detail": "Failed to delete image"},
status=status.HTTP_502_BAD_GATEWAY,
)
except requests.RequestException as e:
capture_and_log(e, "Delete image - Cloudflare API error", source="api")
return Response(
{"detail": "Failed to delete image"},
status=status.HTTP_502_BAD_GATEWAY,
)
except Exception as e:
capture_and_log(e, "Delete image - unexpected error", source="api")
return Response(
{"detail": "An unexpected error occurred"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
class GenerateOGImageView(APIView):
"""
POST /images/og-image/
Generate an Open Graph image for social sharing.
"""
permission_classes = [] # Public endpoint
def post(self, request):
title = request.data.get("title", "")
description = request.data.get("description", "")
entity_type = request.data.get("entity_type", "")
image_url = request.data.get("image_url", "")
if not title:
return Response(
{"detail": "title is required"},
status=status.HTTP_400_BAD_REQUEST,
)
try:
# This is a placeholder for OG image generation
# In production, you would:
# 1. Use an image generation service (Cloudinary, imgix, etc.)
# 2. Or use a headless browser service (Puppeteer, Playwright)
# 3. Or use a dedicated OG image service
# For now, return a template URL or placeholder
base_url = getattr(settings, "SITE_URL", "https://thrillwiki.com")
og_image_url = f"{base_url}/api/v1/images/og-preview/?title={title[:100]}"
return Response({
"success": True,
"og_image_url": og_image_url,
"title": title,
"description": description[:200] if description else "",
"entity_type": entity_type,
"note": "Placeholder - configure OG image service for production",
})
except Exception as e:
capture_and_log(e, "Generate OG image", source="api")
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)

View File

@@ -30,4 +30,7 @@ urlpatterns = [
views.MapCacheAPIView.as_view(),
name="map_cache_invalidate",
),
# Location detection and enrichment
path("detect-location/", views.DetectLocationView.as_view(), name="detect_location"),
path("enrich-location/", views.EnrichLocationView.as_view(), name="enrich_location"),
]

View File

@@ -999,3 +999,245 @@ MapSearchView = MapSearchAPIView
MapBoundsView = MapBoundsAPIView
MapStatsView = MapStatsAPIView
MapCacheView = MapCacheAPIView
# =============================================================================
# Location Detection / Enrichment Endpoints
# =============================================================================
@extend_schema_view(
post=extend_schema(
summary="Detect user location from IP",
description="Detect the user's approximate location based on their IP address.",
request={
"application/json": {
"type": "object",
"properties": {
"ip_address": {
"type": "string",
"description": "IP address to geolocate. If not provided, uses request IP.",
}
},
}
},
responses={
200: {
"type": "object",
"properties": {
"latitude": {"type": "number"},
"longitude": {"type": "number"},
"city": {"type": "string"},
"region": {"type": "string"},
"country": {"type": "string"},
"timezone": {"type": "string"},
},
}
},
tags=["Maps"],
),
)
class DetectLocationView(APIView):
"""
POST /maps/detect-location/
Detect user's location based on IP address using a geolocation service.
"""
permission_classes = [AllowAny]
def post(self, request):
try:
# Get IP address from request or payload
ip_address = request.data.get("ip_address")
if not ip_address:
# Get client IP from request
x_forwarded_for = request.META.get("HTTP_X_FORWARDED_FOR")
if x_forwarded_for:
ip_address = x_forwarded_for.split(",")[0].strip()
else:
ip_address = request.META.get("REMOTE_ADDR", "")
# For localhost/development, return a default location
if ip_address in ("127.0.0.1", "::1", "localhost") or ip_address.startswith("192.168."):
return Response(
{
"latitude": 40.7128,
"longitude": -74.006,
"city": "New York",
"region": "New York",
"country": "US",
"country_name": "United States",
"timezone": "America/New_York",
"detected": False,
"reason": "localhost_fallback",
}
)
# Use IP geolocation service (ipapi.co, ipinfo.io, etc.)
import httpx
try:
response = httpx.get(
f"https://ipapi.co/{ip_address}/json/",
timeout=5.0,
headers={"User-Agent": "ThrillWiki/1.0"},
)
if response.status_code == 200:
data = response.json()
return Response(
{
"latitude": data.get("latitude"),
"longitude": data.get("longitude"),
"city": data.get("city", ""),
"region": data.get("region", ""),
"country": data.get("country_code", ""),
"country_name": data.get("country_name", ""),
"timezone": data.get("timezone", ""),
"detected": True,
}
)
except httpx.HTTPError as e:
logger.warning(f"IP geolocation failed: {e}")
# Fallback response
return Response(
{
"latitude": None,
"longitude": None,
"city": "",
"region": "",
"country": "",
"country_name": "",
"timezone": "",
"detected": False,
"reason": "geolocation_failed",
}
)
except Exception as e:
capture_and_log(e, "Detect location from IP", source="api")
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
@extend_schema_view(
post=extend_schema(
summary="Enrich location with geocoding",
description="Enrich location data with reverse geocoding (coordinates to address).",
request={
"application/json": {
"type": "object",
"properties": {
"latitude": {"type": "number", "required": True},
"longitude": {"type": "number", "required": True},
},
}
},
responses={
200: {
"type": "object",
"properties": {
"formatted_address": {"type": "string"},
"street_address": {"type": "string"},
"city": {"type": "string"},
"state": {"type": "string"},
"postal_code": {"type": "string"},
"country": {"type": "string"},
},
}
},
tags=["Maps"],
),
)
class EnrichLocationView(APIView):
"""
POST /maps/enrich-location/
Enrich location with reverse geocoding (coordinates to address).
"""
permission_classes = [AllowAny]
def post(self, request):
try:
latitude = request.data.get("latitude")
longitude = request.data.get("longitude")
if latitude is None or longitude is None:
return Response(
{"detail": "latitude and longitude are required"},
status=status.HTTP_400_BAD_REQUEST,
)
try:
lat = float(latitude)
lng = float(longitude)
except (TypeError, ValueError):
return Response(
{"detail": "Invalid latitude or longitude"},
status=status.HTTP_400_BAD_REQUEST,
)
# Use reverse geocoding service
import httpx
try:
# Using Nominatim (OpenStreetMap) - free, no API key required
response = httpx.get(
"https://nominatim.openstreetmap.org/reverse",
params={
"lat": lat,
"lon": lng,
"format": "json",
"addressdetails": 1,
},
timeout=5.0,
headers={"User-Agent": "ThrillWiki/1.0"},
)
if response.status_code == 200:
data = response.json()
address = data.get("address", {})
return Response(
{
"formatted_address": data.get("display_name", ""),
"street_address": address.get("road", ""),
"house_number": address.get("house_number", ""),
"city": (
address.get("city")
or address.get("town")
or address.get("village")
or ""
),
"state": address.get("state", ""),
"postal_code": address.get("postcode", ""),
"country": address.get("country", ""),
"country_code": address.get("country_code", "").upper(),
"enriched": True,
}
)
except httpx.HTTPError as e:
logger.warning(f"Reverse geocoding failed: {e}")
# Fallback response
return Response(
{
"formatted_address": "",
"street_address": "",
"city": "",
"state": "",
"postal_code": "",
"country": "",
"country_code": "",
"enriched": False,
"reason": "geocoding_failed",
}
)
except Exception as e:
capture_and_log(e, "Enrich location", source="api")
return Response(
{"detail": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)

View File

@@ -106,8 +106,11 @@ urlpatterns = [
path("media/", include("apps.media.urls")),
path("blog/", include("apps.blog.urls")),
path("support/", include("apps.support.urls")),
path("notifications/", include("apps.notifications.urls")),
path("errors/", include("apps.core.urls.errors")),
path("images/", include("apps.api.v1.images.urls")),
# Admin dashboard API endpoints
path("admin/", include("apps.api.v1.admin.urls")),
# Cloudflare Images Toolkit API endpoints
path("cloudflare-images/", include("django_cloudflareimages_toolkit.urls")),
# Include router URLs (for rankings and any other router-registered endpoints)