Removed VueJS frontend and dramatically enhanced API

This commit is contained in:
pacnpal
2025-08-28 14:01:28 -04:00
parent 08a4a2d034
commit 715e284b3e
123 changed files with 4056 additions and 26546 deletions

View File

@@ -4,8 +4,14 @@ Migrated from apps.core.views.map_views
"""
import logging
from typing import Dict, List, Any, Optional
from django.http import HttpRequest
from django.db.models import Q
from django.core.cache import cache
from django.contrib.gis.geos import Polygon
from django.contrib.gis.db.models.functions import Distance
from django.contrib.gis.geos import Point
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
@@ -13,6 +19,16 @@ from rest_framework.permissions import AllowAny
from drf_spectacular.utils import extend_schema, extend_schema_view, OpenApiParameter
from drf_spectacular.types import OpenApiTypes
from apps.parks.models import Park, ParkLocation
from apps.rides.models import Ride
from ..serializers.maps import (
MapLocationSerializer,
MapLocationsResponseSerializer,
MapSearchResultSerializer,
MapSearchResponseSerializer,
MapLocationDetailSerializer,
)
logger = logging.getLogger(__name__)
@@ -26,59 +42,71 @@ logger = logging.getLogger(__name__)
type=OpenApiTypes.NUMBER,
location=OpenApiParameter.QUERY,
required=False,
description="Northern latitude bound",
description="Northern latitude bound (-90 to 90). Used with south, east, west to define geographic bounds.",
examples=[41.5],
),
OpenApiParameter(
"south",
type=OpenApiTypes.NUMBER,
location=OpenApiParameter.QUERY,
required=False,
description="Southern latitude bound",
description="Southern latitude bound (-90 to 90). Must be less than north bound.",
examples=[41.4],
),
OpenApiParameter(
"east",
type=OpenApiTypes.NUMBER,
location=OpenApiParameter.QUERY,
required=False,
description="Eastern longitude bound",
description="Eastern longitude bound (-180 to 180). Must be greater than west bound.",
examples=[-82.6],
),
OpenApiParameter(
"west",
type=OpenApiTypes.NUMBER,
location=OpenApiParameter.QUERY,
required=False,
description="Western longitude bound",
description="Western longitude bound (-180 to 180). Used with other bounds for geographic filtering.",
examples=[-82.8],
),
OpenApiParameter(
"zoom",
type=OpenApiTypes.INT,
location=OpenApiParameter.QUERY,
required=False,
description="Map zoom level",
description="Map zoom level (1-20). Higher values show more detail. Used for clustering decisions.",
examples=[10],
),
OpenApiParameter(
"types",
type=OpenApiTypes.STR,
location=OpenApiParameter.QUERY,
required=False,
description="Comma-separated location types",
description="Comma-separated location types to include. Valid values: 'park', 'ride'. Default: 'park,ride'",
examples=["park,ride", "park", "ride"],
),
OpenApiParameter(
"cluster",
type=OpenApiTypes.BOOL,
location=OpenApiParameter.QUERY,
required=False,
description="Enable clustering",
description="Enable location clustering for high-density areas. Default: false",
examples=[True, False],
),
OpenApiParameter(
"q",
type=OpenApiTypes.STR,
location=OpenApiParameter.QUERY,
required=False,
description="Text query",
description="Text search query. Searches park/ride names, cities, and states.",
examples=["Cedar Point", "roller coaster", "Ohio"],
),
],
responses={200: OpenApiTypes.OBJECT},
responses={
200: MapLocationsResponseSerializer,
400: OpenApiTypes.OBJECT,
500: OpenApiTypes.OBJECT,
},
tags=["Maps"],
),
)
@@ -90,15 +118,151 @@ class MapLocationsAPIView(APIView):
def get(self, request: HttpRequest) -> Response:
"""Get map locations with optional clustering and filtering."""
try:
# Simple implementation to fix import error
# TODO: Implement full functionality
return Response(
{
"status": "success",
"message": "Map locations endpoint - implementation needed",
"data": [],
}
)
# Parse query parameters
north = request.GET.get("north")
south = request.GET.get("south")
east = request.GET.get("east")
west = request.GET.get("west")
zoom = request.GET.get("zoom", 10)
types = request.GET.get("types", "park,ride").split(",")
cluster = request.GET.get("cluster", "false").lower() == "true"
query = request.GET.get("q", "").strip()
# Build cache key
cache_key = f"map_locations_{north}_{south}_{east}_{west}_{zoom}_{','.join(types)}_{cluster}_{query}"
cached_result = cache.get(cache_key)
if cached_result:
return Response(cached_result)
locations = []
total_count = 0
# Get parks if requested
if "park" in types:
parks_query = Park.objects.select_related("location", "operator").filter(
location__point__isnull=False
)
# Apply bounds filtering
if all([north, south, east, west]):
try:
bounds_polygon = Polygon.from_bbox((
float(west), float(south), float(east), float(north)
))
parks_query = parks_query.filter(
location__point__within=bounds_polygon)
except (ValueError, TypeError):
pass
# Apply text search
if query:
parks_query = parks_query.filter(
Q(name__icontains=query) |
Q(location__city__icontains=query) |
Q(location__state__icontains=query)
)
# Serialize parks
for park in parks_query[:100]: # Limit results
park_data = {
"id": park.id,
"type": "park",
"name": park.name,
"slug": park.slug,
"latitude": park.location.latitude if hasattr(park, 'location') and park.location else None,
"longitude": park.location.longitude if hasattr(park, 'location') and park.location else None,
"status": park.status,
"location": {
"city": park.location.city if hasattr(park, 'location') and park.location else "",
"state": park.location.state if hasattr(park, 'location') and park.location else "",
"country": park.location.country if hasattr(park, 'location') and park.location else "",
"formatted_address": park.location.formatted_address if hasattr(park, 'location') and park.location else "",
},
"stats": {
"coaster_count": park.coaster_count or 0,
"ride_count": park.ride_count or 0,
"average_rating": float(park.average_rating) if park.average_rating else None,
},
}
locations.append(park_data)
# Get rides if requested
if "ride" in types:
rides_query = Ride.objects.select_related("park__location", "manufacturer").filter(
park__location__point__isnull=False
)
# Apply bounds filtering
if all([north, south, east, west]):
try:
bounds_polygon = Polygon.from_bbox((
float(west), float(south), float(east), float(north)
))
rides_query = rides_query.filter(
park__location__point__within=bounds_polygon)
except (ValueError, TypeError):
pass
# Apply text search
if query:
rides_query = rides_query.filter(
Q(name__icontains=query) |
Q(park__name__icontains=query) |
Q(park__location__city__icontains=query)
)
# Serialize rides
for ride in rides_query[:100]: # Limit results
ride_data = {
"id": ride.id,
"type": "ride",
"name": ride.name,
"slug": ride.slug,
"latitude": ride.park.location.latitude if hasattr(ride.park, 'location') and ride.park.location else None,
"longitude": ride.park.location.longitude if hasattr(ride.park, 'location') and ride.park.location else None,
"status": ride.status,
"location": {
"city": ride.park.location.city if hasattr(ride.park, 'location') and ride.park.location else "",
"state": ride.park.location.state if hasattr(ride.park, 'location') and ride.park.location else "",
"country": ride.park.location.country if hasattr(ride.park, 'location') and ride.park.location else "",
"formatted_address": ride.park.location.formatted_address if hasattr(ride.park, 'location') and ride.park.location else "",
},
"stats": {
"category": ride.get_category_display() if ride.category else None,
"average_rating": float(ride.average_rating) if ride.average_rating else None,
"park_name": ride.park.name,
},
}
locations.append(ride_data)
total_count = len(locations)
# Calculate bounds from results
bounds = {}
if locations:
lats = [loc["latitude"] for loc in locations if loc["latitude"]]
lngs = [loc["longitude"] for loc in locations if loc["longitude"]]
if lats and lngs:
bounds = {
"north": max(lats),
"south": min(lats),
"east": max(lngs),
"west": min(lngs),
}
result = {
"status": "success",
"locations": locations,
"clusters": [], # TODO: Implement clustering
"bounds": bounds,
"total_count": total_count,
"clustered": cluster,
}
# Cache result for 5 minutes
cache.set(cache_key, result, 300)
return Response(result)
except Exception as e:
logger.error(f"Error in MapLocationsAPIView: {str(e)}", exc_info=True)
@@ -128,7 +292,12 @@ class MapLocationsAPIView(APIView):
description="ID of the location",
),
],
responses={200: OpenApiTypes.OBJECT, 404: OpenApiTypes.OBJECT},
responses={
200: MapLocationDetailSerializer,
400: OpenApiTypes.OBJECT,
404: OpenApiTypes.OBJECT,
500: OpenApiTypes.OBJECT,
},
tags=["Maps"],
),
)
@@ -142,17 +311,90 @@ class MapLocationDetailAPIView(APIView):
) -> Response:
"""Get detailed information for a specific location."""
try:
# Simple implementation to fix import error
return Response(
{
"status": "success",
"message": f"Location detail for {location_type}/{location_id} - implementation needed",
"data": {
"location_type": location_type,
"location_id": location_id,
if location_type == "park":
try:
obj = Park.objects.select_related(
"location", "operator").get(id=location_id)
except Park.DoesNotExist:
return Response(
{"status": "error", "message": "Park not found"},
status=status.HTTP_404_NOT_FOUND,
)
elif location_type == "ride":
try:
obj = Ride.objects.select_related(
"park__location", "manufacturer").get(id=location_id)
except Ride.DoesNotExist:
return Response(
{"status": "error", "message": "Ride not found"},
status=status.HTTP_404_NOT_FOUND,
)
else:
return Response(
{"status": "error", "message": "Invalid location type"},
status=status.HTTP_400_BAD_REQUEST,
)
# Serialize the object
if location_type == "park":
data = {
"id": obj.id,
"type": "park",
"name": obj.name,
"slug": obj.slug,
"description": obj.description,
"latitude": obj.location.latitude if hasattr(obj, 'location') and obj.location else None,
"longitude": obj.location.longitude if hasattr(obj, 'location') and obj.location else None,
"status": obj.status,
"location": {
"street_address": obj.location.street_address if hasattr(obj, 'location') and obj.location else "",
"city": obj.location.city if hasattr(obj, 'location') and obj.location else "",
"state": obj.location.state if hasattr(obj, 'location') and obj.location else "",
"country": obj.location.country if hasattr(obj, 'location') and obj.location else "",
"postal_code": obj.location.postal_code if hasattr(obj, 'location') and obj.location else "",
"formatted_address": obj.location.formatted_address if hasattr(obj, 'location') and obj.location else "",
},
"stats": {
"coaster_count": obj.coaster_count or 0,
"ride_count": obj.ride_count or 0,
"average_rating": float(obj.average_rating) if obj.average_rating else None,
"size_acres": float(obj.size_acres) if obj.size_acres else None,
"opening_date": obj.opening_date.isoformat() if obj.opening_date else None,
},
"nearby_locations": [], # TODO: Implement nearby locations
}
)
else: # ride
data = {
"id": obj.id,
"type": "ride",
"name": obj.name,
"slug": obj.slug,
"description": obj.description,
"latitude": obj.park.location.latitude if hasattr(obj.park, 'location') and obj.park.location else None,
"longitude": obj.park.location.longitude if hasattr(obj.park, 'location') and obj.park.location else None,
"status": obj.status,
"location": {
"street_address": obj.park.location.street_address if hasattr(obj.park, 'location') and obj.park.location else "",
"city": obj.park.location.city if hasattr(obj.park, 'location') and obj.park.location else "",
"state": obj.park.location.state if hasattr(obj.park, 'location') and obj.park.location else "",
"country": obj.park.location.country if hasattr(obj.park, 'location') and obj.park.location else "",
"postal_code": obj.park.location.postal_code if hasattr(obj.park, 'location') and obj.park.location else "",
"formatted_address": obj.park.location.formatted_address if hasattr(obj.park, 'location') and obj.park.location else "",
},
"stats": {
"category": obj.get_category_display() if obj.category else None,
"average_rating": float(obj.average_rating) if obj.average_rating else None,
"park_name": obj.park.name,
"opening_date": obj.opening_date.isoformat() if obj.opening_date else None,
"manufacturer": obj.manufacturer.name if obj.manufacturer else None,
},
"nearby_locations": [], # TODO: Implement nearby locations
}
return Response({
"status": "success",
"data": data,
})
except Exception as e:
logger.error(f"Error in MapLocationDetailAPIView: {str(e)}", exc_info=True)
@@ -174,8 +416,33 @@ class MapLocationDetailAPIView(APIView):
required=True,
description="Search query",
),
OpenApiParameter(
"types",
type=OpenApiTypes.STR,
location=OpenApiParameter.QUERY,
required=False,
description="Comma-separated location types (park,ride)",
),
OpenApiParameter(
"page",
type=OpenApiTypes.INT,
location=OpenApiParameter.QUERY,
required=False,
description="Page number",
),
OpenApiParameter(
"page_size",
type=OpenApiTypes.INT,
location=OpenApiParameter.QUERY,
required=False,
description="Results per page",
),
],
responses={200: OpenApiTypes.OBJECT, 400: OpenApiTypes.OBJECT},
responses={
200: MapSearchResponseSerializer,
400: OpenApiTypes.OBJECT,
500: OpenApiTypes.OBJECT,
},
tags=["Maps"],
),
)
@@ -197,14 +464,76 @@ class MapSearchAPIView(APIView):
status=status.HTTP_400_BAD_REQUEST,
)
# Simple implementation to fix import error
return Response(
{
"status": "success",
"message": f"Search for '{query}' - implementation needed",
"data": [],
}
)
types = request.GET.get("types", "park,ride").split(",")
page = int(request.GET.get("page", 1))
page_size = min(int(request.GET.get("page_size", 20)), 100)
results = []
total_count = 0
# Search parks
if "park" in types:
parks_query = Park.objects.select_related("location").filter(
Q(name__icontains=query) |
Q(location__city__icontains=query) |
Q(location__state__icontains=query)
).filter(location__point__isnull=False)
for park in parks_query[:50]: # Limit results
results.append({
"id": park.id,
"type": "park",
"name": park.name,
"slug": park.slug,
"latitude": park.location.latitude if hasattr(park, 'location') and park.location else None,
"longitude": park.location.longitude if hasattr(park, 'location') and park.location else None,
"location": {
"city": park.location.city if hasattr(park, 'location') and park.location else "",
"state": park.location.state if hasattr(park, 'location') and park.location else "",
"country": park.location.country if hasattr(park, 'location') and park.location else "",
},
"relevance_score": 1.0, # TODO: Implement relevance scoring
})
# Search rides
if "ride" in types:
rides_query = Ride.objects.select_related("park__location").filter(
Q(name__icontains=query) |
Q(park__name__icontains=query) |
Q(park__location__city__icontains=query)
).filter(park__location__point__isnull=False)
for ride in rides_query[:50]: # Limit results
results.append({
"id": ride.id,
"type": "ride",
"name": ride.name,
"slug": ride.slug,
"latitude": ride.park.location.latitude if hasattr(ride.park, 'location') and ride.park.location else None,
"longitude": ride.park.location.longitude if hasattr(ride.park, 'location') and ride.park.location else None,
"location": {
"city": ride.park.location.city if hasattr(ride.park, 'location') and ride.park.location else "",
"state": ride.park.location.state if hasattr(ride.park, 'location') and ride.park.location else "",
"country": ride.park.location.country if hasattr(ride.park, 'location') and ride.park.location else "",
},
"relevance_score": 1.0, # TODO: Implement relevance scoring
})
total_count = len(results)
# Apply pagination
start_idx = (page - 1) * page_size
end_idx = start_idx + page_size
paginated_results = results[start_idx:end_idx]
return Response({
"status": "success",
"results": paginated_results,
"query": query,
"total_count": total_count,
"page": page,
"page_size": page_size,
})
except Exception as e:
logger.error(f"Error in MapSearchAPIView: {str(e)}", exc_info=True)
@@ -247,6 +576,13 @@ class MapSearchAPIView(APIView):
required=True,
description="Western longitude bound",
),
OpenApiParameter(
"types",
type=OpenApiTypes.STR,
location=OpenApiParameter.QUERY,
required=False,
description="Comma-separated location types (park,ride)",
),
],
responses={200: OpenApiTypes.OBJECT, 400: OpenApiTypes.OBJECT},
tags=["Maps"],
@@ -260,22 +596,87 @@ class MapBoundsAPIView(APIView):
def get(self, request: HttpRequest) -> Response:
"""Get locations within specific geographic bounds."""
try:
# Simple implementation to fix import error
return Response(
{
"status": "success",
"message": "Bounds query - implementation needed",
"data": [],
}
)
# Parse required bounds parameters
try:
north = float(request.GET.get("north"))
south = float(request.GET.get("south"))
east = float(request.GET.get("east"))
west = float(request.GET.get("west"))
except (TypeError, ValueError):
return Response(
{"status": "error", "message": "Invalid bounds parameters"},
status=status.HTTP_400_BAD_REQUEST,
)
# Validate bounds
if north <= south:
return Response(
{"status": "error", "message": "North bound must be greater than south bound"},
status=status.HTTP_400_BAD_REQUEST,
)
if west >= east:
return Response(
{"status": "error", "message": "West bound must be less than east bound"},
status=status.HTTP_400_BAD_REQUEST,
)
types = request.GET.get("types", "park,ride").split(",")
locations = []
# Create bounds polygon
bounds_polygon = Polygon.from_bbox((west, south, east, north))
# Get parks within bounds
if "park" in types:
parks_query = Park.objects.select_related("location").filter(
location__point__within=bounds_polygon
)
for park in parks_query[:100]: # Limit results
locations.append({
"id": park.id,
"type": "park",
"name": park.name,
"slug": park.slug,
"latitude": park.location.latitude if hasattr(park, 'location') and park.location else None,
"longitude": park.location.longitude if hasattr(park, 'location') and park.location else None,
"status": park.status,
})
# Get rides within bounds
if "ride" in types:
rides_query = Ride.objects.select_related("park__location").filter(
park__location__point__within=bounds_polygon
)
for ride in rides_query[:100]: # Limit results
locations.append({
"id": ride.id,
"type": "ride",
"name": ride.name,
"slug": ride.slug,
"latitude": ride.park.location.latitude if hasattr(ride.park, 'location') and ride.park.location else None,
"longitude": ride.park.location.longitude if hasattr(ride.park, 'location') and ride.park.location else None,
"status": ride.status,
})
return Response({
"status": "success",
"locations": locations,
"bounds": {
"north": north,
"south": south,
"east": east,
"west": west,
},
"total_count": len(locations),
})
except Exception as e:
logger.error(f"Error in MapBoundsAPIView: {str(e)}", exc_info=True)
return Response(
{
"status": "error",
"message": "Failed to retrieve locations within bounds",
},
{"status": "error", "message": "Failed to retrieve locations within bounds"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
@@ -296,15 +697,26 @@ class MapStatsAPIView(APIView):
def get(self, request: HttpRequest) -> Response:
"""Get map service statistics and performance metrics."""
try:
# Simple implementation to fix import error
return Response(
{
"status": "success",
"data": {"total_locations": 0, "cache_hits": 0, "cache_misses": 0},
}
)
# Count locations with coordinates
parks_with_location = Park.objects.filter(
location__point__isnull=False).count()
rides_with_location = Ride.objects.filter(
park__location__point__isnull=False).count()
total_locations = parks_with_location + rides_with_location
return Response({
"status": "success",
"data": {
"total_locations": total_locations,
"parks_with_location": parks_with_location,
"rides_with_location": rides_with_location,
"cache_hits": 0, # TODO: Implement cache statistics
"cache_misses": 0, # TODO: Implement cache statistics
},
})
except Exception as e:
logger.error(f"Error in MapStatsAPIView: {str(e)}", exc_info=True)
return Response(
{"error": f"Internal server error: {str(e)}"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
@@ -333,12 +745,21 @@ class MapCacheAPIView(APIView):
def delete(self, request: HttpRequest) -> Response:
"""Clear all map cache (admin only)."""
try:
# Simple implementation to fix import error
return Response(
{"status": "success", "message": "Map cache cleared successfully"}
)
# Clear all map-related cache keys
cache_keys = cache.keys("map_*")
if cache_keys:
cache.delete_many(cache_keys)
cleared_count = len(cache_keys)
else:
cleared_count = 0
return Response({
"status": "success",
"message": f"Map cache cleared successfully. Cleared {cleared_count} entries.",
})
except Exception as e:
logger.error(f"Error in MapCacheAPIView.delete: {str(e)}", exc_info=True)
return Response(
{"error": f"Internal server error: {str(e)}"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
@@ -347,12 +768,21 @@ class MapCacheAPIView(APIView):
def post(self, request: HttpRequest) -> Response:
"""Invalidate specific cache entries."""
try:
# Simple implementation to fix import error
return Response(
{"status": "success", "message": "Cache invalidated successfully"}
)
# Get cache keys to invalidate from request data
cache_keys = request.data.get("cache_keys", [])
if cache_keys:
cache.delete_many(cache_keys)
invalidated_count = len(cache_keys)
else:
invalidated_count = 0
return Response({
"status": "success",
"message": f"Cache invalidated successfully. Invalidated {invalidated_count} entries.",
})
except Exception as e:
logger.error(f"Error in MapCacheAPIView.post: {str(e)}", exc_info=True)
return Response(
{"error": f"Internal server error: {str(e)}"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,

View File

@@ -146,9 +146,10 @@ def _import_accounts_symbols() -> Dict[str, Any]:
_accounts = _import_accounts_symbols()
# Bind account symbols into the module namespace (either actual objects or None)
# Bind account symbols into the module namespace (only if they exist)
for _name in _ACCOUNTS_SYMBOLS:
globals()[_name] = _accounts.get(_name)
if _accounts.get(_name) is not None:
globals()[_name] = _accounts[_name]
# --- Services domain ---
@@ -255,22 +256,79 @@ _SERVICES_EXPORTS = [
"DistanceCalculationOutputSerializer",
]
# Build __all__ from known exports plus any serializer-like names discovered above
__all__ = (
_SHARED_EXPORTS
+ _PARKS_EXPORTS
+ _COMPANIES_EXPORTS
+ _RIDES_EXPORTS
+ _SERVICES_EXPORTS
+ _ACCOUNTS_SYMBOLS
)
# Build a static __all__ list with only the serializers we know exist
__all__ = [
# Shared exports
"CATEGORY_CHOICES",
"ModelChoices",
"LocationOutputSerializer",
"CompanyOutputSerializer",
"UserModel",
# Parks exports
"ParkListOutputSerializer",
"ParkDetailOutputSerializer",
"ParkCreateInputSerializer",
"ParkUpdateInputSerializer",
"ParkFilterInputSerializer",
"ParkAreaDetailOutputSerializer",
"ParkAreaCreateInputSerializer",
"ParkAreaUpdateInputSerializer",
"ParkLocationOutputSerializer",
"ParkLocationCreateInputSerializer",
"ParkLocationUpdateInputSerializer",
"ParkSuggestionSerializer",
"ParkSuggestionOutputSerializer",
# Companies exports
"CompanyDetailOutputSerializer",
"CompanyCreateInputSerializer",
"CompanyUpdateInputSerializer",
"RideModelDetailOutputSerializer",
"RideModelCreateInputSerializer",
"RideModelUpdateInputSerializer",
# Rides exports
"RideParkOutputSerializer",
"RideModelOutputSerializer",
"RideListOutputSerializer",
"RideDetailOutputSerializer",
"RideCreateInputSerializer",
"RideUpdateInputSerializer",
"RideFilterInputSerializer",
"RollerCoasterStatsOutputSerializer",
"RollerCoasterStatsCreateInputSerializer",
"RollerCoasterStatsUpdateInputSerializer",
"RideLocationOutputSerializer",
"RideLocationCreateInputSerializer",
"RideLocationUpdateInputSerializer",
"RideReviewOutputSerializer",
"RideReviewCreateInputSerializer",
"RideReviewUpdateInputSerializer",
# Services exports
"HealthCheckOutputSerializer",
"PerformanceMetricsOutputSerializer",
"SimpleHealthOutputSerializer",
"EmailSendInputSerializer",
"EmailTemplateOutputSerializer",
"MapDataOutputSerializer",
"CoordinateInputSerializer",
"HistoryEventSerializer",
"HistoryEntryOutputSerializer",
"HistoryCreateInputSerializer",
"ModerationSubmissionSerializer",
"ModerationSubmissionOutputSerializer",
"RoadtripParkSerializer",
"RoadtripCreateInputSerializer",
"RoadtripOutputSerializer",
"GeocodeInputSerializer",
"GeocodeOutputSerializer",
"DistanceCalculationInputSerializer",
"DistanceCalculationOutputSerializer",
]
# Add any discovered globals that look like serializers (avoid duplicates)
for name in list(globals().keys()):
if name in __all__:
continue
if name.endswith(("Serializer", "OutputSerializer", "InputSerializer")):
# Add any accounts serializers that actually exist
for name in _ACCOUNTS_SYMBOLS:
if name in globals():
__all__.append(name)
# Ensure __all__ is a flat list of unique strings (preserve order)
__all__ = list(dict.fromkeys(__all__))

View File

@@ -0,0 +1,408 @@
"""
Maps domain serializers for ThrillWiki API v1.
This module contains all serializers related to map functionality,
including location data, search results, and clustering.
"""
from rest_framework import serializers
from drf_spectacular.utils import (
extend_schema_serializer,
extend_schema_field,
OpenApiExample,
)
# === MAP LOCATION SERIALIZERS ===
@extend_schema_serializer(
examples=[
OpenApiExample(
"Map Location Example",
summary="Example map location response",
description="A location point on the map",
value={
"id": 1,
"type": "park",
"name": "Cedar Point",
"slug": "cedar-point",
"latitude": 41.4793,
"longitude": -82.6833,
"status": "OPERATING",
"location": {
"city": "Sandusky",
"state": "Ohio",
"country": "United States",
},
"stats": {
"coaster_count": 17,
"ride_count": 70,
"average_rating": 4.5,
},
},
)
]
)
class MapLocationSerializer(serializers.Serializer):
"""Serializer for individual map locations (parks and rides)."""
id = serializers.IntegerField()
type = serializers.CharField() # 'park' or 'ride'
name = serializers.CharField()
slug = serializers.CharField()
latitude = serializers.FloatField(allow_null=True)
longitude = serializers.FloatField(allow_null=True)
status = serializers.CharField()
# Location details
location = serializers.SerializerMethodField()
# Statistics
stats = serializers.SerializerMethodField()
@extend_schema_field(serializers.DictField())
def get_location(self, obj) -> dict:
"""Get location information."""
if hasattr(obj, 'location') and obj.location:
return {
"city": obj.location.city,
"state": obj.location.state,
"country": obj.location.country,
"formatted_address": obj.location.formatted_address,
}
return {}
@extend_schema_field(serializers.DictField())
def get_stats(self, obj) -> dict:
"""Get relevant statistics based on object type."""
if obj._meta.model_name == 'park':
return {
"coaster_count": obj.coaster_count or 0,
"ride_count": obj.ride_count or 0,
"average_rating": float(obj.average_rating) if obj.average_rating else None,
}
elif obj._meta.model_name == 'ride':
return {
"category": obj.get_category_display() if obj.category else None,
"average_rating": float(obj.average_rating) if obj.average_rating else None,
"park_name": obj.park.name if obj.park else None,
}
return {}
@extend_schema_serializer(
examples=[
OpenApiExample(
"Map Cluster Example",
summary="Example map cluster response",
description="A cluster of locations on the map",
value={
"id": "cluster_1",
"type": "cluster",
"latitude": 41.5,
"longitude": -82.7,
"count": 5,
"bounds": {
"north": 41.6,
"south": 41.4,
"east": -82.6,
"west": -82.8,
},
},
)
]
)
class MapClusterSerializer(serializers.Serializer):
"""Serializer for map clusters."""
id = serializers.CharField()
type = serializers.CharField(default="cluster")
latitude = serializers.FloatField()
longitude = serializers.FloatField()
count = serializers.IntegerField()
bounds = serializers.DictField()
@extend_schema_serializer(
examples=[
OpenApiExample(
"Map Locations Response Example",
summary="Example map locations response",
description="Response containing locations and optional clusters",
value={
"status": "success",
"data": {
"locations": [
{
"id": 1,
"type": "park",
"name": "Cedar Point",
"slug": "cedar-point",
"latitude": 41.4793,
"longitude": -82.6833,
"status": "OPERATING",
}
],
"clusters": [],
"bounds": {
"north": 41.5,
"south": 41.4,
"east": -82.6,
"west": -82.8,
},
"total_count": 1,
"clustered": False,
},
},
)
]
)
class MapLocationsResponseSerializer(serializers.Serializer):
"""Response serializer for map locations endpoint."""
status = serializers.CharField(default="success")
locations = serializers.ListField(child=serializers.DictField())
clusters = serializers.ListField(child=serializers.DictField(), default=list)
bounds = serializers.DictField(default=dict)
total_count = serializers.IntegerField(default=0)
clustered = serializers.BooleanField(default=False)
# === MAP SEARCH SERIALIZERS ===
@extend_schema_serializer(
examples=[
OpenApiExample(
"Map Search Result Example",
summary="Example map search result",
description="A search result for map locations",
value={
"id": 1,
"type": "park",
"name": "Cedar Point",
"slug": "cedar-point",
"latitude": 41.4793,
"longitude": -82.6833,
"location": {
"city": "Sandusky",
"state": "Ohio",
"country": "United States",
},
"relevance_score": 0.95,
},
)
]
)
class MapSearchResultSerializer(serializers.Serializer):
"""Serializer for map search results."""
id = serializers.IntegerField()
type = serializers.CharField()
name = serializers.CharField()
slug = serializers.CharField()
latitude = serializers.FloatField(allow_null=True)
longitude = serializers.FloatField(allow_null=True)
location = serializers.SerializerMethodField()
relevance_score = serializers.FloatField(required=False)
@extend_schema_field(serializers.DictField())
def get_location(self, obj) -> dict:
"""Get location information."""
if hasattr(obj, 'location') and obj.location:
return {
"city": obj.location.city,
"state": obj.location.state,
"country": obj.location.country,
}
return {}
@extend_schema_serializer(
examples=[
OpenApiExample(
"Map Search Response Example",
summary="Example map search response",
description="Response containing search results",
value={
"status": "success",
"data": {
"results": [
{
"id": 1,
"type": "park",
"name": "Cedar Point",
"slug": "cedar-point",
"latitude": 41.4793,
"longitude": -82.6833,
}
],
"query": "cedar point",
"total_count": 1,
"page": 1,
"page_size": 20,
},
},
)
]
)
class MapSearchResponseSerializer(serializers.Serializer):
"""Response serializer for map search endpoint."""
status = serializers.CharField(default="success")
results = serializers.ListField(child=serializers.DictField())
query = serializers.CharField()
total_count = serializers.IntegerField(default=0)
page = serializers.IntegerField(default=1)
page_size = serializers.IntegerField(default=20)
# === MAP DETAIL SERIALIZERS ===
@extend_schema_serializer(
examples=[
OpenApiExample(
"Map Location Detail Example",
summary="Example map location detail response",
description="Detailed information about a specific location",
value={
"id": 1,
"type": "park",
"name": "Cedar Point",
"slug": "cedar-point",
"description": "America's Roller Coast",
"latitude": 41.4793,
"longitude": -82.6833,
"status": "OPERATING",
"location": {
"street_address": "1 Cedar Point Dr",
"city": "Sandusky",
"state": "Ohio",
"country": "United States",
"postal_code": "44870",
"formatted_address": "1 Cedar Point Dr, Sandusky, Ohio, 44870, United States",
},
"stats": {
"coaster_count": 17,
"ride_count": 70,
"average_rating": 4.5,
},
"nearby_locations": [],
},
)
]
)
class MapLocationDetailSerializer(serializers.Serializer):
"""Serializer for detailed map location information."""
id = serializers.IntegerField()
type = serializers.CharField()
name = serializers.CharField()
slug = serializers.CharField()
description = serializers.CharField()
latitude = serializers.FloatField(allow_null=True)
longitude = serializers.FloatField(allow_null=True)
status = serializers.CharField()
# Detailed location information
location = serializers.SerializerMethodField()
# Statistics
stats = serializers.SerializerMethodField()
# Nearby locations
nearby_locations = serializers.SerializerMethodField()
@extend_schema_field(serializers.DictField())
def get_location(self, obj) -> dict:
"""Get detailed location information."""
if hasattr(obj, 'location') and obj.location:
return {
"street_address": obj.location.street_address,
"city": obj.location.city,
"state": obj.location.state,
"country": obj.location.country,
"postal_code": obj.location.postal_code,
"formatted_address": obj.location.formatted_address,
}
return {}
@extend_schema_field(serializers.DictField())
def get_stats(self, obj) -> dict:
"""Get detailed statistics based on object type."""
if obj._meta.model_name == 'park':
return {
"coaster_count": obj.coaster_count or 0,
"ride_count": obj.ride_count or 0,
"average_rating": float(obj.average_rating) if obj.average_rating else None,
"size_acres": float(obj.size_acres) if obj.size_acres else None,
"opening_date": obj.opening_date.isoformat() if obj.opening_date else None,
}
elif obj._meta.model_name == 'ride':
return {
"category": obj.get_category_display() if obj.category else None,
"average_rating": float(obj.average_rating) if obj.average_rating else None,
"park_name": obj.park.name if obj.park else None,
"opening_date": obj.opening_date.isoformat() if obj.opening_date else None,
"manufacturer": obj.manufacturer.name if obj.manufacturer else None,
}
return {}
@extend_schema_field(serializers.ListField(child=serializers.DictField()))
def get_nearby_locations(self, obj) -> list:
"""Get nearby locations (placeholder for now)."""
# TODO: Implement nearby location logic
return []
# === INPUT SERIALIZERS ===
class MapBoundsInputSerializer(serializers.Serializer):
"""Input serializer for map bounds queries."""
north = serializers.FloatField(min_value=-90, max_value=90)
south = serializers.FloatField(min_value=-90, max_value=90)
east = serializers.FloatField(min_value=-180, max_value=180)
west = serializers.FloatField(min_value=-180, max_value=180)
def validate(self, attrs):
"""Validate that bounds make geographic sense."""
if attrs['north'] <= attrs['south']:
raise serializers.ValidationError(
"North bound must be greater than south bound")
# Handle longitude wraparound (e.g., crossing the international date line)
# For now, we'll require west < east for simplicity
if attrs['west'] >= attrs['east']:
raise serializers.ValidationError("West bound must be less than east bound")
return attrs
class MapSearchInputSerializer(serializers.Serializer):
"""Input serializer for map search queries."""
q = serializers.CharField(min_length=1, max_length=255)
types = serializers.CharField(required=False, allow_blank=True)
bounds = MapBoundsInputSerializer(required=False)
page = serializers.IntegerField(min_value=1, default=1)
page_size = serializers.IntegerField(min_value=1, max_value=100, default=20)
def validate_types(self, value):
"""Validate location types."""
if not value:
return []
valid_types = ['park', 'ride']
types = [t.strip().lower() for t in value.split(',')]
for location_type in types:
if location_type not in valid_types:
raise serializers.ValidationError(
f"Invalid location type: {location_type}. Valid types: {', '.join(valid_types)}"
)
return types

View File

@@ -0,0 +1,155 @@
"""
Statistics serializers for ThrillWiki API.
Provides serialization for platform statistics data.
"""
from rest_framework import serializers
class StatsSerializer(serializers.Serializer):
"""
Serializer for platform statistics response.
This serializer defines the structure of the statistics API response,
including all the various counts and breakdowns available.
"""
# Core entity counts
total_parks = serializers.IntegerField(
help_text="Total number of parks in the database"
)
total_rides = serializers.IntegerField(
help_text="Total number of rides in the database"
)
total_manufacturers = serializers.IntegerField(
help_text="Total number of ride manufacturers"
)
total_operators = serializers.IntegerField(
help_text="Total number of park operators"
)
total_designers = serializers.IntegerField(
help_text="Total number of ride designers"
)
total_property_owners = serializers.IntegerField(
help_text="Total number of property owners"
)
total_roller_coasters = serializers.IntegerField(
help_text="Total number of roller coasters with detailed stats"
)
# Photo counts
total_photos = serializers.IntegerField(
help_text="Total number of photos (parks + rides combined)"
)
total_park_photos = serializers.IntegerField(
help_text="Total number of park photos"
)
total_ride_photos = serializers.IntegerField(
help_text="Total number of ride photos"
)
# Review counts
total_reviews = serializers.IntegerField(
help_text="Total number of reviews (parks + rides)"
)
total_park_reviews = serializers.IntegerField(
help_text="Total number of park reviews"
)
total_ride_reviews = serializers.IntegerField(
help_text="Total number of ride reviews"
)
# Ride category counts (optional fields since they depend on data)
roller_coasters = serializers.IntegerField(
required=False,
help_text="Number of rides categorized as roller coasters"
)
dark_rides = serializers.IntegerField(
required=False,
help_text="Number of rides categorized as dark rides"
)
flat_rides = serializers.IntegerField(
required=False,
help_text="Number of rides categorized as flat rides"
)
water_rides = serializers.IntegerField(
required=False,
help_text="Number of rides categorized as water rides"
)
transport_rides = serializers.IntegerField(
required=False,
help_text="Number of rides categorized as transport rides"
)
other_rides = serializers.IntegerField(
required=False,
help_text="Number of rides categorized as other"
)
# Park status counts (optional fields since they depend on data)
operating_parks = serializers.IntegerField(
required=False,
help_text="Number of currently operating parks"
)
temporarily_closed_parks = serializers.IntegerField(
required=False,
help_text="Number of temporarily closed parks"
)
permanently_closed_parks = serializers.IntegerField(
required=False,
help_text="Number of permanently closed parks"
)
under_construction_parks = serializers.IntegerField(
required=False,
help_text="Number of parks under construction"
)
demolished_parks = serializers.IntegerField(
required=False,
help_text="Number of demolished parks"
)
relocated_parks = serializers.IntegerField(
required=False,
help_text="Number of relocated parks"
)
# Ride status counts (optional fields since they depend on data)
operating_rides = serializers.IntegerField(
required=False,
help_text="Number of currently operating rides"
)
temporarily_closed_rides = serializers.IntegerField(
required=False,
help_text="Number of temporarily closed rides"
)
sbno_rides = serializers.IntegerField(
required=False,
help_text="Number of rides standing but not operating"
)
closing_rides = serializers.IntegerField(
required=False,
help_text="Number of rides in the process of closing"
)
permanently_closed_rides = serializers.IntegerField(
required=False,
help_text="Number of permanently closed rides"
)
under_construction_rides = serializers.IntegerField(
required=False,
help_text="Number of rides under construction"
)
demolished_rides = serializers.IntegerField(
required=False,
help_text="Number of demolished rides"
)
relocated_rides = serializers.IntegerField(
required=False,
help_text="Number of relocated rides"
)
# Metadata
last_updated = serializers.CharField(
help_text="ISO timestamp when these statistics were last calculated"
)
relative_last_updated = serializers.CharField(
help_text="Human-readable relative time since last update (e.g., '2 minutes ago')"
)

View File

@@ -0,0 +1,95 @@
"""
Django signals for automatically updating statistics cache.
This module contains signal handlers that invalidate the stats cache
whenever relevant entities are created, updated, or deleted.
"""
from django.db.models.signals import post_save, post_delete
from django.dispatch import receiver
from django.core.cache import cache
from apps.parks.models import Park, ParkReview, ParkPhoto, Company as ParkCompany
from apps.rides.models import Ride, RollerCoasterStats, RideReview, RidePhoto, Company as RideCompany
def invalidate_stats_cache():
"""
Invalidate the platform stats cache.
This function is called whenever any entity that affects statistics
is created, updated, or deleted.
"""
cache.delete("platform_stats")
# Also update the timestamp for when stats were last invalidated
from datetime import datetime
cache.set("platform_stats_timestamp", datetime.now().isoformat(), 300)
# Park signals
@receiver(post_save, sender=Park)
@receiver(post_delete, sender=Park)
def park_changed(sender, **kwargs):
"""Handle Park creation/deletion."""
invalidate_stats_cache()
# Ride signals
@receiver(post_save, sender=Ride)
@receiver(post_delete, sender=Ride)
def ride_changed(sender, **kwargs):
"""Handle Ride creation/deletion."""
invalidate_stats_cache()
# Roller coaster stats signals
@receiver(post_save, sender=RollerCoasterStats)
@receiver(post_delete, sender=RollerCoasterStats)
def roller_coaster_stats_changed(sender, **kwargs):
"""Handle RollerCoasterStats creation/deletion."""
invalidate_stats_cache()
# Company signals (both park and ride companies)
@receiver(post_save, sender=ParkCompany)
@receiver(post_delete, sender=ParkCompany)
def park_company_changed(sender, **kwargs):
"""Handle ParkCompany creation/deletion."""
invalidate_stats_cache()
@receiver(post_save, sender=RideCompany)
@receiver(post_delete, sender=RideCompany)
def ride_company_changed(sender, **kwargs):
"""Handle RideCompany creation/deletion."""
invalidate_stats_cache()
# Photo signals
@receiver(post_save, sender=ParkPhoto)
@receiver(post_delete, sender=ParkPhoto)
def park_photo_changed(sender, **kwargs):
"""Handle ParkPhoto creation/deletion."""
invalidate_stats_cache()
@receiver(post_save, sender=RidePhoto)
@receiver(post_delete, sender=RidePhoto)
def ride_photo_changed(sender, **kwargs):
"""Handle RidePhoto creation/deletion."""
invalidate_stats_cache()
# Review signals
@receiver(post_save, sender=ParkReview)
@receiver(post_delete, sender=ParkReview)
def park_review_changed(sender, **kwargs):
"""Handle ParkReview creation/deletion."""
invalidate_stats_cache()
@receiver(post_save, sender=RideReview)
@receiver(post_delete, sender=RideReview)
def ride_review_changed(sender, **kwargs):
"""Handle RideReview creation/deletion."""
invalidate_stats_cache()

View File

@@ -22,6 +22,7 @@ from .views import (
TrendingAPIView,
NewContentAPIView,
)
from .views.stats import StatsAPIView, StatsRecalculateAPIView
from django.urls import path, include
from rest_framework.routers import DefaultRouter
@@ -58,6 +59,9 @@ urlpatterns = [
# Trending system endpoints
path("trending/content/", TrendingAPIView.as_view(), name="trending"),
path("trending/new/", NewContentAPIView.as_view(), name="new-content"),
# Statistics endpoints
path("stats/", StatsAPIView.as_view(), name="stats"),
path("stats/recalculate/", StatsRecalculateAPIView.as_view(), name="stats-recalculate"),
# Ranking system endpoints
path(
"rankings/calculate/",

View File

@@ -301,56 +301,77 @@ class SocialProvidersAPIView(APIView):
def get(self, request: Request) -> Response:
from django.core.cache import cache
from django.core.exceptions import ObjectDoesNotExist
site = get_current_site(request._request) # type: ignore[attr-defined]
try:
site = get_current_site(request._request) # type: ignore[attr-defined]
# Cache key based on site and request host
# Use pk for Site objects, domain for RequestSite objects
site_identifier = getattr(site, "pk", site.domain)
cache_key = f"social_providers:{site_identifier}:{request.get_host()}"
# Cache key based on site and request host
# Use pk for Site objects, domain for RequestSite objects
site_identifier = getattr(site, "pk", site.domain)
cache_key = f"social_providers:{site_identifier}:{request.get_host()}"
# Try to get from cache first (cache for 15 minutes)
cached_providers = cache.get(cache_key)
if cached_providers is not None:
return Response(cached_providers)
# Try to get from cache first (cache for 15 minutes)
cached_providers = cache.get(cache_key)
if cached_providers is not None:
return Response(cached_providers)
providers_list = []
providers_list = []
# Optimized query: filter by site and order by provider name
from allauth.socialaccount.models import SocialApp
# Optimized query: filter by site and order by provider name
from allauth.socialaccount.models import SocialApp
social_apps = SocialApp.objects.filter(sites=site).order_by("provider")
for social_app in social_apps:
try:
# Simplified provider name resolution - avoid expensive provider class loading
provider_name = social_app.name or social_app.provider.title()
social_apps = SocialApp.objects.filter(sites=site).order_by("provider")
except ObjectDoesNotExist:
# If no social apps exist, return empty list
social_apps = []
# Build auth URL efficiently
auth_url = request.build_absolute_uri(
f"/accounts/{social_app.provider}/login/"
)
for social_app in social_apps:
try:
# Simplified provider name resolution - avoid expensive provider class loading
provider_name = social_app.name or social_app.provider.title()
providers_list.append(
{
"id": social_app.provider,
"name": provider_name,
"authUrl": auth_url,
}
)
# Build auth URL efficiently
auth_url = request.build_absolute_uri(
f"/accounts/{social_app.provider}/login/"
)
except Exception:
# Skip if provider can't be loaded
continue
providers_list.append(
{
"id": social_app.provider,
"name": provider_name,
"authUrl": auth_url,
}
)
# Serialize and cache the result
serializer = SocialProviderOutputSerializer(providers_list, many=True)
response_data = serializer.data
except Exception:
# Skip if provider can't be loaded
continue
# Cache for 15 minutes (900 seconds)
cache.set(cache_key, response_data, 900)
# Serialize and cache the result
serializer = SocialProviderOutputSerializer(providers_list, many=True)
response_data = serializer.data
return Response(response_data)
# Cache for 15 minutes (900 seconds)
cache.set(cache_key, response_data, 900)
return Response(response_data)
except Exception as e:
# Return a proper JSON error response instead of letting it bubble up
return Response(
{
"status": "error",
"error": {
"code": "SOCIAL_PROVIDERS_ERROR",
"message": "Unable to retrieve social providers",
"details": str(e) if str(e) else None,
},
"data": None,
},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
@extend_schema_view(

View File

@@ -55,7 +55,9 @@ except ImportError:
@extend_schema_view(
get=extend_schema(
summary="Health check",
description="Get comprehensive health check information including system metrics.",
description=(
"Get comprehensive health check information including system metrics."
),
responses={
200: HealthCheckOutputSerializer,
503: HealthCheckOutputSerializer,
@@ -104,18 +106,30 @@ class HealthCheckAPIView(APIView):
# Process individual health checks
for plugin in plugins:
plugin_name = plugin.identifier()
# Handle both plugin objects and strings
if hasattr(plugin, 'identifier'):
plugin_name = plugin.identifier()
plugin_class_name = plugin.__class__.__name__
critical_service = getattr(plugin, "critical_service", False)
response_time = getattr(plugin, "_response_time", None)
else:
# If plugin is a string, use it directly
plugin_name = str(plugin)
plugin_class_name = plugin_name
critical_service = False
response_time = None
plugin_errors = (
errors.get(plugin.__class__.__name__, [])
errors.get(plugin_class_name, [])
if isinstance(errors, dict)
else []
)
health_data["checks"][plugin_name] = {
"status": "healthy" if not plugin_errors else "unhealthy",
"critical": getattr(plugin, "critical_service", False),
"critical": critical_service,
"errors": [str(error) for error in plugin_errors],
"response_time_ms": getattr(plugin, "_response_time", None),
"response_time_ms": response_time,
}
# Calculate total response time
@@ -320,6 +334,16 @@ class PerformanceMetricsAPIView(APIView):
},
tags=["Health"],
),
options=extend_schema(
summary="CORS preflight for simple health check",
description=(
"Handle CORS preflight requests for the simple health check endpoint."
),
responses={
200: SimpleHealthOutputSerializer,
},
tags=["Health"],
),
)
class SimpleHealthAPIView(APIView):
"""Simple health check endpoint for load balancers."""
@@ -342,7 +366,7 @@ class SimpleHealthAPIView(APIView):
"timestamp": timezone.now(),
}
serializer = SimpleHealthOutputSerializer(response_data)
return Response(serializer.data)
return Response(serializer.data, status=200)
except Exception as e:
response_data = {
"status": "error",
@@ -351,3 +375,12 @@ class SimpleHealthAPIView(APIView):
}
serializer = SimpleHealthOutputSerializer(response_data)
return Response(serializer.data, status=503)
def options(self, request: Request) -> Response:
"""Handle OPTIONS requests for CORS preflight."""
response_data = {
"status": "ok",
"timestamp": timezone.now(),
}
serializer = SimpleHealthOutputSerializer(response_data)
return Response(serializer.data)

View File

@@ -0,0 +1,358 @@
"""
Statistics API views for ThrillWiki.
Provides aggregate statistics about the platform's content including
counts of parks, rides, manufacturers, and other entities.
"""
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from rest_framework.permissions import AllowAny, IsAdminUser
from django.db.models import Count, Q
from django.core.cache import cache
from django.utils import timezone
from drf_spectacular.utils import extend_schema, OpenApiExample
from datetime import datetime, timedelta
from apps.parks.models import Park, ParkReview, ParkPhoto, Company as ParkCompany
from apps.rides.models import Ride, RollerCoasterStats, RideReview, RidePhoto, Company as RideCompany
from ..serializers.stats import StatsSerializer
class StatsAPIView(APIView):
"""
API endpoint that returns aggregate statistics about the platform.
Returns counts of various entities like parks, rides, manufacturers, etc.
Results are cached for performance.
"""
permission_classes = [AllowAny]
def _get_relative_time(self, timestamp_str):
"""
Convert an ISO timestamp to a human-readable relative time.
Args:
timestamp_str: ISO format timestamp string
Returns:
str: Human-readable relative time (e.g., "2 days, 3 hours, 15 minutes ago", "just now")
"""
if not timestamp_str or timestamp_str == 'just_now':
return 'just now'
try:
# Parse the ISO timestamp
if isinstance(timestamp_str, str):
timestamp = datetime.fromisoformat(timestamp_str.replace('Z', '+00:00'))
else:
timestamp = timestamp_str
# Make timezone-aware if needed
if timestamp.tzinfo is None:
timestamp = timezone.make_aware(timestamp)
now = timezone.now()
diff = now - timestamp
total_seconds = int(diff.total_seconds())
# If less than a minute, return "just now"
if total_seconds < 60:
return 'just now'
# Calculate time components
days = diff.days
hours = (total_seconds % 86400) // 3600
minutes = (total_seconds % 3600) // 60
# Build the relative time string
parts = []
if days > 0:
parts.append(f'{days} day{"s" if days != 1 else ""}')
if hours > 0:
parts.append(f'{hours} hour{"s" if hours != 1 else ""}')
if minutes > 0:
parts.append(f'{minutes} minute{"s" if minutes != 1 else ""}')
# Join parts with commas and add "ago"
if len(parts) == 0:
return 'just now'
elif len(parts) == 1:
return f'{parts[0]} ago'
elif len(parts) == 2:
return f'{parts[0]} and {parts[1]} ago'
else:
return f'{", ".join(parts[:-1])}, and {parts[-1]} ago'
except (ValueError, TypeError):
return 'unknown'
@extend_schema(
operation_id="get_platform_stats",
summary="Get platform statistics",
description="""
Returns comprehensive aggregate statistics about the ThrillWiki platform.
This endpoint provides detailed counts and breakdowns of all major entities including:
- Parks, rides, and roller coasters
- Companies (manufacturers, operators, designers, property owners)
- Photos and reviews
- Ride categories (roller coasters, dark rides, flat rides, etc.)
- Status breakdowns (operating, closed, under construction, etc.)
Results are cached for 5 minutes for optimal performance and automatically
invalidated when relevant data changes.
**No authentication required** - this is a public endpoint.
""".strip(),
responses={
200: StatsSerializer,
500: {
"type": "object",
"properties": {
"error": {"type": "string", "description": "Error message if statistics calculation fails"}
}
}
},
tags=["Statistics"],
examples=[
OpenApiExample(
name="Sample Response",
description="Example of platform statistics response",
value={
"total_parks": 7,
"total_rides": 10,
"total_manufacturers": 6,
"total_operators": 7,
"total_designers": 4,
"total_property_owners": 0,
"total_roller_coasters": 8,
"total_photos": 0,
"total_park_photos": 0,
"total_ride_photos": 0,
"total_reviews": 8,
"total_park_reviews": 4,
"total_ride_reviews": 4,
"roller_coasters": 10,
"operating_parks": 7,
"operating_rides": 10,
"last_updated": "2025-08-28T17:34:59.677143+00:00",
"relative_last_updated": "just now"
}
)
]
)
def get(self, request):
"""Get platform statistics."""
# Try to get cached stats first
cache_key = "platform_stats"
cached_stats = cache.get(cache_key)
if cached_stats:
return Response(cached_stats, status=status.HTTP_200_OK)
# Calculate fresh stats
stats = self._calculate_stats()
# Cache for 5 minutes
cache.set(cache_key, stats, 300)
return Response(stats, status=status.HTTP_200_OK)
def _calculate_stats(self):
"""Calculate all platform statistics."""
# Basic entity counts
total_parks = Park.objects.count()
total_rides = Ride.objects.count()
# Company counts by role
total_manufacturers = RideCompany.objects.filter(
roles__contains=["MANUFACTURER"]
).count()
total_operators = ParkCompany.objects.filter(
roles__contains=["OPERATOR"]
).count()
total_designers = RideCompany.objects.filter(
roles__contains=["DESIGNER"]
).count()
total_property_owners = ParkCompany.objects.filter(
roles__contains=["PROPERTY_OWNER"]
).count()
# Photo counts (combined)
total_park_photos = ParkPhoto.objects.count()
total_ride_photos = RidePhoto.objects.count()
total_photos = total_park_photos + total_ride_photos
# Ride type counts
total_roller_coasters = RollerCoasterStats.objects.count()
# Ride category counts
ride_categories = Ride.objects.values('category').annotate(
count=Count('id')
).exclude(category='')
category_stats = {}
for category in ride_categories:
category_code = category['category']
category_count = category['count']
# Convert category codes to readable names
category_names = {
'RC': 'roller_coasters',
'DR': 'dark_rides',
'FR': 'flat_rides',
'WR': 'water_rides',
'TR': 'transport_rides',
'OT': 'other_rides'
}
category_name = category_names.get(
category_code, f'category_{category_code.lower()}')
category_stats[category_name] = category_count
# Park status counts
park_statuses = Park.objects.values('status').annotate(
count=Count('id')
)
park_status_stats = {}
for status_item in park_statuses:
status_code = status_item['status']
status_count = status_item['count']
# Convert status codes to readable names
status_names = {
'OPERATING': 'operating_parks',
'CLOSED_TEMP': 'temporarily_closed_parks',
'CLOSED_PERM': 'permanently_closed_parks',
'UNDER_CONSTRUCTION': 'under_construction_parks',
'DEMOLISHED': 'demolished_parks',
'RELOCATED': 'relocated_parks'
}
status_name = status_names.get(status_code, f'status_{status_code.lower()}')
park_status_stats[status_name] = status_count
# Ride status counts
ride_statuses = Ride.objects.values('status').annotate(
count=Count('id')
)
ride_status_stats = {}
for status_item in ride_statuses:
status_code = status_item['status']
status_count = status_item['count']
# Convert status codes to readable names
status_names = {
'OPERATING': 'operating_rides',
'CLOSED_TEMP': 'temporarily_closed_rides',
'SBNO': 'sbno_rides',
'CLOSING': 'closing_rides',
'CLOSED_PERM': 'permanently_closed_rides',
'UNDER_CONSTRUCTION': 'under_construction_rides',
'DEMOLISHED': 'demolished_rides',
'RELOCATED': 'relocated_rides'
}
status_name = status_names.get(
status_code, f'ride_status_{status_code.lower()}')
ride_status_stats[status_name] = status_count
# Review counts
total_park_reviews = ParkReview.objects.count()
total_ride_reviews = RideReview.objects.count()
total_reviews = total_park_reviews + total_ride_reviews
# Timestamp handling
now = timezone.now()
last_updated_iso = now.isoformat()
# Get cached timestamp or use current time
cached_timestamp = cache.get('platform_stats_timestamp')
if cached_timestamp and cached_timestamp != 'just_now':
# Use cached timestamp for consistency
last_updated_iso = cached_timestamp
else:
# Set new timestamp in cache
cache.set('platform_stats_timestamp', last_updated_iso, 300)
# Calculate relative time
relative_last_updated = self._get_relative_time(last_updated_iso)
# Combine all stats
stats = {
# Core entity counts
'total_parks': total_parks,
'total_rides': total_rides,
'total_manufacturers': total_manufacturers,
'total_operators': total_operators,
'total_designers': total_designers,
'total_property_owners': total_property_owners,
'total_roller_coasters': total_roller_coasters,
# Photo counts
'total_photos': total_photos,
'total_park_photos': total_park_photos,
'total_ride_photos': total_ride_photos,
# Review counts
'total_reviews': total_reviews,
'total_park_reviews': total_park_reviews,
'total_ride_reviews': total_ride_reviews,
# Category breakdowns
**category_stats,
# Status breakdowns
**park_status_stats,
**ride_status_stats,
# Metadata
'last_updated': last_updated_iso,
'relative_last_updated': relative_last_updated
}
return stats
class StatsRecalculateAPIView(APIView):
"""
Admin-only API endpoint to force recalculation of platform statistics.
This endpoint clears the cache and forces a fresh calculation of all statistics.
Only accessible to admin users.
"""
permission_classes = [IsAdminUser]
@extend_schema(exclude=True)
def post(self, request):
"""Force recalculation of platform statistics."""
# Clear the cache
cache.delete("platform_stats")
cache.delete("platform_stats_timestamp")
# Create a new StatsAPIView instance to reuse the calculation logic
stats_view = StatsAPIView()
fresh_stats = stats_view._calculate_stats()
# Cache the fresh stats
cache.set("platform_stats", fresh_stats, 300)
# Return success response with the fresh stats
return Response({
"message": "Platform statistics have been successfully recalculated",
"stats": fresh_stats,
"recalculated_at": timezone.now().isoformat()
}, status=status.HTTP_200_OK)