mirror of
https://github.com/pacnpal/thrillwiki_django_no_react.git
synced 2025-12-20 02:31:08 -05:00
ok
This commit is contained in:
@@ -28,4 +28,7 @@ CORS_ALLOWED_ORIGINS=http://localhost:3000
|
|||||||
|
|
||||||
# Feature Flags
|
# Feature Flags
|
||||||
ENABLE_DEBUG_TOOLBAR=True
|
ENABLE_DEBUG_TOOLBAR=True
|
||||||
ENABLE_SILK_PROFILER=False
|
ENABLE_SILK_PROFILER=False
|
||||||
|
|
||||||
|
# Frontend Configuration
|
||||||
|
FRONTEND_DOMAIN=https://thrillwiki.com
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ from drf_spectacular.utils import (
|
|||||||
extend_schema_field,
|
extend_schema_field,
|
||||||
OpenApiExample,
|
OpenApiExample,
|
||||||
)
|
)
|
||||||
|
from config.django import base as settings
|
||||||
|
|
||||||
from .shared import LocationOutputSerializer, CompanyOutputSerializer, ModelChoices
|
from .shared import LocationOutputSerializer, CompanyOutputSerializer, ModelChoices
|
||||||
|
|
||||||
@@ -65,10 +66,18 @@ class ParkListOutputSerializer(serializers.Serializer):
|
|||||||
# Operator info
|
# Operator info
|
||||||
operator = CompanyOutputSerializer()
|
operator = CompanyOutputSerializer()
|
||||||
|
|
||||||
|
# URL
|
||||||
|
url = serializers.SerializerMethodField()
|
||||||
|
|
||||||
# Metadata
|
# Metadata
|
||||||
created_at = serializers.DateTimeField()
|
created_at = serializers.DateTimeField()
|
||||||
updated_at = serializers.DateTimeField()
|
updated_at = serializers.DateTimeField()
|
||||||
|
|
||||||
|
@extend_schema_field(serializers.URLField())
|
||||||
|
def get_url(self, obj) -> str:
|
||||||
|
"""Generate the frontend URL for this park."""
|
||||||
|
return f"{settings.FRONTEND_DOMAIN}/parks/{obj.slug}/"
|
||||||
|
|
||||||
|
|
||||||
@extend_schema_serializer(
|
@extend_schema_serializer(
|
||||||
examples=[
|
examples=[
|
||||||
@@ -166,6 +175,14 @@ class ParkDetailOutputSerializer(serializers.Serializer):
|
|||||||
banner_image = serializers.SerializerMethodField()
|
banner_image = serializers.SerializerMethodField()
|
||||||
card_image = serializers.SerializerMethodField()
|
card_image = serializers.SerializerMethodField()
|
||||||
|
|
||||||
|
# URL
|
||||||
|
url = serializers.SerializerMethodField()
|
||||||
|
|
||||||
|
@extend_schema_field(serializers.URLField())
|
||||||
|
def get_url(self, obj) -> str:
|
||||||
|
"""Generate the frontend URL for this park."""
|
||||||
|
return f"{settings.FRONTEND_DOMAIN}/parks/{obj.slug}/"
|
||||||
|
|
||||||
@extend_schema_field(serializers.ListField(child=serializers.DictField()))
|
@extend_schema_field(serializers.ListField(child=serializers.DictField()))
|
||||||
def get_areas(self, obj):
|
def get_areas(self, obj):
|
||||||
"""Get simplified area information."""
|
"""Get simplified area information."""
|
||||||
|
|||||||
98
backend/apps/api/v1/serializers/reviews.py
Normal file
98
backend/apps/api/v1/serializers/reviews.py
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
"""
|
||||||
|
Serializers for review-related API endpoints.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from rest_framework import serializers
|
||||||
|
from apps.parks.models.reviews import ParkReview
|
||||||
|
from apps.rides.models.reviews import RideReview
|
||||||
|
from apps.accounts.models import User
|
||||||
|
|
||||||
|
|
||||||
|
class ReviewUserSerializer(serializers.ModelSerializer):
|
||||||
|
"""Serializer for user information in reviews."""
|
||||||
|
avatar_url = serializers.SerializerMethodField()
|
||||||
|
display_name = serializers.SerializerMethodField()
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = User
|
||||||
|
fields = ['username', 'display_name', 'avatar_url']
|
||||||
|
|
||||||
|
def get_avatar_url(self, obj):
|
||||||
|
"""Get the user's avatar URL."""
|
||||||
|
if hasattr(obj, 'profile') and obj.profile:
|
||||||
|
return obj.profile.get_avatar()
|
||||||
|
return "/static/images/default-avatar.png"
|
||||||
|
|
||||||
|
def get_display_name(self, obj):
|
||||||
|
"""Get the user's display name."""
|
||||||
|
return obj.get_display_name()
|
||||||
|
|
||||||
|
|
||||||
|
class LatestReviewSerializer(serializers.Serializer):
|
||||||
|
"""Serializer for latest reviews combining park and ride reviews."""
|
||||||
|
id = serializers.IntegerField()
|
||||||
|
type = serializers.CharField() # 'park' or 'ride'
|
||||||
|
title = serializers.CharField()
|
||||||
|
content_snippet = serializers.CharField()
|
||||||
|
rating = serializers.IntegerField()
|
||||||
|
created_at = serializers.DateTimeField()
|
||||||
|
user = ReviewUserSerializer()
|
||||||
|
|
||||||
|
# Subject information (park or ride)
|
||||||
|
subject_name = serializers.CharField()
|
||||||
|
subject_slug = serializers.CharField()
|
||||||
|
subject_url = serializers.CharField()
|
||||||
|
|
||||||
|
# Park information (for ride reviews)
|
||||||
|
park_name = serializers.CharField(allow_null=True)
|
||||||
|
park_slug = serializers.CharField(allow_null=True)
|
||||||
|
park_url = serializers.CharField(allow_null=True)
|
||||||
|
|
||||||
|
def to_representation(self, instance):
|
||||||
|
"""Convert review instance to serialized representation."""
|
||||||
|
if isinstance(instance, ParkReview):
|
||||||
|
return {
|
||||||
|
'id': instance.pk,
|
||||||
|
'type': 'park',
|
||||||
|
'title': instance.title,
|
||||||
|
'content_snippet': self._get_content_snippet(instance.content),
|
||||||
|
'rating': instance.rating,
|
||||||
|
'created_at': instance.created_at,
|
||||||
|
'user': ReviewUserSerializer(instance.user).data,
|
||||||
|
'subject_name': instance.park.name,
|
||||||
|
'subject_slug': instance.park.slug,
|
||||||
|
'subject_url': f"/parks/{instance.park.slug}/",
|
||||||
|
'park_name': None,
|
||||||
|
'park_slug': None,
|
||||||
|
'park_url': None,
|
||||||
|
}
|
||||||
|
elif isinstance(instance, RideReview):
|
||||||
|
return {
|
||||||
|
'id': instance.pk,
|
||||||
|
'type': 'ride',
|
||||||
|
'title': instance.title,
|
||||||
|
'content_snippet': self._get_content_snippet(instance.content),
|
||||||
|
'rating': instance.rating,
|
||||||
|
'created_at': instance.created_at,
|
||||||
|
'user': ReviewUserSerializer(instance.user).data,
|
||||||
|
'subject_name': instance.ride.name,
|
||||||
|
'subject_slug': instance.ride.slug,
|
||||||
|
'subject_url': f"/parks/{instance.ride.park.slug}/rides/{instance.ride.slug}/",
|
||||||
|
'park_name': instance.ride.park.name,
|
||||||
|
'park_slug': instance.ride.park.slug,
|
||||||
|
'park_url': f"/parks/{instance.ride.park.slug}/",
|
||||||
|
}
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def _get_content_snippet(self, content, max_length=150):
|
||||||
|
"""Get a snippet of the review content."""
|
||||||
|
if len(content) <= max_length:
|
||||||
|
return content
|
||||||
|
|
||||||
|
# Find the last complete word within the limit
|
||||||
|
snippet = content[:max_length]
|
||||||
|
last_space = snippet.rfind(' ')
|
||||||
|
if last_space > 0:
|
||||||
|
snippet = snippet[:last_space]
|
||||||
|
|
||||||
|
return snippet + "..."
|
||||||
@@ -11,6 +11,7 @@ from drf_spectacular.utils import (
|
|||||||
extend_schema_field,
|
extend_schema_field,
|
||||||
OpenApiExample,
|
OpenApiExample,
|
||||||
)
|
)
|
||||||
|
from config.django import base as settings
|
||||||
|
|
||||||
from .shared import ModelChoices
|
from .shared import ModelChoices
|
||||||
|
|
||||||
@@ -142,10 +143,18 @@ class RideModelListOutputSerializer(serializers.Serializer):
|
|||||||
# Primary image
|
# Primary image
|
||||||
primary_image = RideModelPhotoOutputSerializer(allow_null=True)
|
primary_image = RideModelPhotoOutputSerializer(allow_null=True)
|
||||||
|
|
||||||
|
# URL
|
||||||
|
url = serializers.SerializerMethodField()
|
||||||
|
|
||||||
# Metadata
|
# Metadata
|
||||||
created_at = serializers.DateTimeField()
|
created_at = serializers.DateTimeField()
|
||||||
updated_at = serializers.DateTimeField()
|
updated_at = serializers.DateTimeField()
|
||||||
|
|
||||||
|
@extend_schema_field(serializers.URLField())
|
||||||
|
def get_url(self, obj) -> str:
|
||||||
|
"""Generate the frontend URL for this ride model."""
|
||||||
|
return f"{settings.FRONTEND_DOMAIN}/rides/manufacturers/{obj.manufacturer.slug}/{obj.slug}/"
|
||||||
|
|
||||||
|
|
||||||
@extend_schema_serializer(
|
@extend_schema_serializer(
|
||||||
examples=[
|
examples=[
|
||||||
@@ -277,10 +286,18 @@ class RideModelDetailOutputSerializer(serializers.Serializer):
|
|||||||
technical_specs = RideModelTechnicalSpecOutputSerializer(many=True)
|
technical_specs = RideModelTechnicalSpecOutputSerializer(many=True)
|
||||||
installations = serializers.SerializerMethodField()
|
installations = serializers.SerializerMethodField()
|
||||||
|
|
||||||
|
# URL
|
||||||
|
url = serializers.SerializerMethodField()
|
||||||
|
|
||||||
# Metadata
|
# Metadata
|
||||||
created_at = serializers.DateTimeField()
|
created_at = serializers.DateTimeField()
|
||||||
updated_at = serializers.DateTimeField()
|
updated_at = serializers.DateTimeField()
|
||||||
|
|
||||||
|
@extend_schema_field(serializers.URLField())
|
||||||
|
def get_url(self, obj) -> str:
|
||||||
|
"""Generate the frontend URL for this ride model."""
|
||||||
|
return f"{settings.FRONTEND_DOMAIN}/rides/manufacturers/{obj.manufacturer.slug}/{obj.slug}/"
|
||||||
|
|
||||||
@extend_schema_field(serializers.ListField(child=serializers.DictField()))
|
@extend_schema_field(serializers.ListField(child=serializers.DictField()))
|
||||||
def get_installations(self, obj):
|
def get_installations(self, obj):
|
||||||
"""Get ride installations using this model."""
|
"""Get ride installations using this model."""
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ from drf_spectacular.utils import (
|
|||||||
extend_schema_field,
|
extend_schema_field,
|
||||||
OpenApiExample,
|
OpenApiExample,
|
||||||
)
|
)
|
||||||
|
from config.django import base as settings
|
||||||
from .shared import ModelChoices
|
from .shared import ModelChoices
|
||||||
|
|
||||||
|
|
||||||
@@ -90,10 +90,18 @@ class RideListOutputSerializer(serializers.Serializer):
|
|||||||
opening_date = serializers.DateField(allow_null=True)
|
opening_date = serializers.DateField(allow_null=True)
|
||||||
closing_date = serializers.DateField(allow_null=True)
|
closing_date = serializers.DateField(allow_null=True)
|
||||||
|
|
||||||
|
# URL
|
||||||
|
url = serializers.SerializerMethodField()
|
||||||
|
|
||||||
# Metadata
|
# Metadata
|
||||||
created_at = serializers.DateTimeField()
|
created_at = serializers.DateTimeField()
|
||||||
updated_at = serializers.DateTimeField()
|
updated_at = serializers.DateTimeField()
|
||||||
|
|
||||||
|
@extend_schema_field(serializers.URLField())
|
||||||
|
def get_url(self, obj) -> str:
|
||||||
|
"""Generate the frontend URL for this ride."""
|
||||||
|
return f"{settings.FRONTEND_DOMAIN}/parks/{obj.park.slug}/rides/{obj.slug}/"
|
||||||
|
|
||||||
|
|
||||||
@extend_schema_serializer(
|
@extend_schema_serializer(
|
||||||
examples=[
|
examples=[
|
||||||
@@ -194,10 +202,18 @@ class RideDetailOutputSerializer(serializers.Serializer):
|
|||||||
banner_image = serializers.SerializerMethodField()
|
banner_image = serializers.SerializerMethodField()
|
||||||
card_image = serializers.SerializerMethodField()
|
card_image = serializers.SerializerMethodField()
|
||||||
|
|
||||||
|
# URL
|
||||||
|
url = serializers.SerializerMethodField()
|
||||||
|
|
||||||
# Metadata
|
# Metadata
|
||||||
created_at = serializers.DateTimeField()
|
created_at = serializers.DateTimeField()
|
||||||
updated_at = serializers.DateTimeField()
|
updated_at = serializers.DateTimeField()
|
||||||
|
|
||||||
|
@extend_schema_field(serializers.URLField())
|
||||||
|
def get_url(self, obj) -> str:
|
||||||
|
"""Generate the frontend URL for this ride."""
|
||||||
|
return f"{settings.FRONTEND_DOMAIN}/parks/{obj.park.slug}/rides/{obj.slug}/"
|
||||||
|
|
||||||
@extend_schema_field(serializers.DictField(allow_null=True))
|
@extend_schema_field(serializers.DictField(allow_null=True))
|
||||||
def get_park_area(self, obj) -> dict | None:
|
def get_park_area(self, obj) -> dict | None:
|
||||||
if obj.park_area:
|
if obj.park_area:
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ to avoid code duplication and maintain consistency.
|
|||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
from drf_spectacular.utils import extend_schema_field
|
from drf_spectacular.utils import extend_schema_field
|
||||||
from django.contrib.auth import get_user_model
|
from django.contrib.auth import get_user_model
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
# Import models inside class methods to avoid Django initialization issues
|
# Import models inside class methods to avoid Django initialization issues
|
||||||
|
|
||||||
@@ -173,3 +174,31 @@ class CompanyOutputSerializer(serializers.Serializer):
|
|||||||
name = serializers.CharField()
|
name = serializers.CharField()
|
||||||
slug = serializers.CharField()
|
slug = serializers.CharField()
|
||||||
roles = serializers.ListField(child=serializers.CharField(), required=False)
|
roles = serializers.ListField(child=serializers.CharField(), required=False)
|
||||||
|
url = serializers.SerializerMethodField()
|
||||||
|
|
||||||
|
@extend_schema_field(serializers.URLField())
|
||||||
|
def get_url(self, obj) -> str:
|
||||||
|
"""Generate the frontend URL for this company based on their primary role.
|
||||||
|
|
||||||
|
CRITICAL DOMAIN SEPARATION:
|
||||||
|
- OPERATOR and PROPERTY_OWNER are for parks domain
|
||||||
|
- MANUFACTURER and DESIGNER are for rides domain
|
||||||
|
"""
|
||||||
|
# Use the URL field from the model if it exists (auto-generated on save)
|
||||||
|
if hasattr(obj, 'url') and obj.url:
|
||||||
|
return obj.url
|
||||||
|
|
||||||
|
# Fallback URL generation (should not be needed if model save works correctly)
|
||||||
|
if hasattr(obj, 'roles') and obj.roles:
|
||||||
|
frontend_domain = getattr(
|
||||||
|
settings, 'FRONTEND_DOMAIN', 'https://thrillwiki.com')
|
||||||
|
primary_role = obj.roles[0] if obj.roles else None
|
||||||
|
|
||||||
|
# Only generate URLs for rides domain roles here
|
||||||
|
if primary_role == 'MANUFACTURER':
|
||||||
|
return f"{frontend_domain}/rides/manufacturers/{obj.slug}/"
|
||||||
|
elif primary_role == 'DESIGNER':
|
||||||
|
return f"{frontend_domain}/rides/designers/{obj.slug}/"
|
||||||
|
# OPERATOR and PROPERTY_OWNER URLs are handled by parks domain
|
||||||
|
|
||||||
|
return ""
|
||||||
|
|||||||
@@ -21,8 +21,10 @@ from .views import (
|
|||||||
# Trending system views
|
# Trending system views
|
||||||
TrendingAPIView,
|
TrendingAPIView,
|
||||||
NewContentAPIView,
|
NewContentAPIView,
|
||||||
|
TriggerTrendingCalculationAPIView,
|
||||||
)
|
)
|
||||||
from .views.stats import StatsAPIView, StatsRecalculateAPIView
|
from .views.stats import StatsAPIView, StatsRecalculateAPIView
|
||||||
|
from .views.reviews import LatestReviewsAPIView
|
||||||
from django.urls import path, include
|
from django.urls import path, include
|
||||||
from rest_framework.routers import DefaultRouter
|
from rest_framework.routers import DefaultRouter
|
||||||
|
|
||||||
@@ -57,11 +59,15 @@ urlpatterns = [
|
|||||||
name="performance-metrics",
|
name="performance-metrics",
|
||||||
),
|
),
|
||||||
# Trending system endpoints
|
# Trending system endpoints
|
||||||
path("trending/content/", TrendingAPIView.as_view(), name="trending"),
|
path("trending/", TrendingAPIView.as_view(), name="trending"),
|
||||||
path("trending/new/", NewContentAPIView.as_view(), name="new-content"),
|
path("new-content/", NewContentAPIView.as_view(), name="new-content"),
|
||||||
|
path("trending/calculate/", TriggerTrendingCalculationAPIView.as_view(),
|
||||||
|
name="trigger-trending-calculation"),
|
||||||
# Statistics endpoints
|
# Statistics endpoints
|
||||||
path("stats/", StatsAPIView.as_view(), name="stats"),
|
path("stats/", StatsAPIView.as_view(), name="stats"),
|
||||||
path("stats/recalculate/", StatsRecalculateAPIView.as_view(), name="stats-recalculate"),
|
path("stats/recalculate/", StatsRecalculateAPIView.as_view(), name="stats-recalculate"),
|
||||||
|
# Reviews endpoints
|
||||||
|
path("reviews/latest/", LatestReviewsAPIView.as_view(), name="latest-reviews"),
|
||||||
# Ranking system endpoints
|
# Ranking system endpoints
|
||||||
path(
|
path(
|
||||||
"rankings/calculate/",
|
"rankings/calculate/",
|
||||||
|
|||||||
@@ -28,6 +28,7 @@ from .health import (
|
|||||||
from .trending import (
|
from .trending import (
|
||||||
TrendingAPIView,
|
TrendingAPIView,
|
||||||
NewContentAPIView,
|
NewContentAPIView,
|
||||||
|
TriggerTrendingCalculationAPIView,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Export all views for import convenience
|
# Export all views for import convenience
|
||||||
@@ -48,4 +49,5 @@ __all__ = [
|
|||||||
# Trending views
|
# Trending views
|
||||||
"TrendingAPIView",
|
"TrendingAPIView",
|
||||||
"NewContentAPIView",
|
"NewContentAPIView",
|
||||||
|
"TriggerTrendingCalculationAPIView",
|
||||||
]
|
]
|
||||||
|
|||||||
85
backend/apps/api/v1/views/reviews.py
Normal file
85
backend/apps/api/v1/views/reviews.py
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
"""
|
||||||
|
Views for review-related API endpoints.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from rest_framework.views import APIView
|
||||||
|
from rest_framework.response import Response
|
||||||
|
from rest_framework.permissions import AllowAny
|
||||||
|
from rest_framework import status
|
||||||
|
from django.db.models import Q
|
||||||
|
from drf_spectacular.utils import extend_schema, OpenApiParameter
|
||||||
|
from drf_spectacular.types import OpenApiTypes
|
||||||
|
from itertools import chain
|
||||||
|
from operator import attrgetter
|
||||||
|
|
||||||
|
from apps.parks.models.reviews import ParkReview
|
||||||
|
from apps.rides.models.reviews import RideReview
|
||||||
|
from ..serializers.reviews import LatestReviewSerializer
|
||||||
|
|
||||||
|
|
||||||
|
class LatestReviewsAPIView(APIView):
|
||||||
|
"""
|
||||||
|
API endpoint to get the latest reviews from both parks and rides.
|
||||||
|
|
||||||
|
Returns a combined list of the most recent reviews across the platform,
|
||||||
|
including username, user avatar, date, score, and review snippet.
|
||||||
|
"""
|
||||||
|
permission_classes = [AllowAny]
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
summary="Get Latest Reviews",
|
||||||
|
description=(
|
||||||
|
"Retrieve the latest reviews from both parks and rides. "
|
||||||
|
"Returns a combined list sorted by creation date, including "
|
||||||
|
"user information, ratings, and content snippets."
|
||||||
|
),
|
||||||
|
parameters=[
|
||||||
|
OpenApiParameter(
|
||||||
|
name="limit",
|
||||||
|
type=OpenApiTypes.INT,
|
||||||
|
location=OpenApiParameter.QUERY,
|
||||||
|
description="Number of reviews to return (default: 20, max: 100)",
|
||||||
|
default=20,
|
||||||
|
),
|
||||||
|
],
|
||||||
|
responses={
|
||||||
|
200: LatestReviewSerializer(many=True),
|
||||||
|
},
|
||||||
|
tags=["Reviews"],
|
||||||
|
)
|
||||||
|
def get(self, request):
|
||||||
|
"""Get the latest reviews from both parks and rides."""
|
||||||
|
# Get limit parameter with validation
|
||||||
|
try:
|
||||||
|
limit = int(request.query_params.get('limit', 20))
|
||||||
|
limit = min(max(limit, 1), 100) # Clamp between 1 and 100
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
limit = 20
|
||||||
|
|
||||||
|
# Get published reviews from both models
|
||||||
|
park_reviews = ParkReview.objects.filter(
|
||||||
|
is_published=True
|
||||||
|
).select_related(
|
||||||
|
'user', 'user__profile', 'park'
|
||||||
|
).order_by('-created_at')[:limit]
|
||||||
|
|
||||||
|
ride_reviews = RideReview.objects.filter(
|
||||||
|
is_published=True
|
||||||
|
).select_related(
|
||||||
|
'user', 'user__profile', 'ride', 'ride__park'
|
||||||
|
).order_by('-created_at')[:limit]
|
||||||
|
|
||||||
|
# Combine and sort by created_at
|
||||||
|
all_reviews = sorted(
|
||||||
|
chain(park_reviews, ride_reviews),
|
||||||
|
key=attrgetter('created_at'),
|
||||||
|
reverse=True
|
||||||
|
)[:limit]
|
||||||
|
|
||||||
|
# Serialize the combined results
|
||||||
|
serializer = LatestReviewSerializer(all_reviews, many=True)
|
||||||
|
|
||||||
|
return Response({
|
||||||
|
'count': len(all_reviews),
|
||||||
|
'results': serializer.data
|
||||||
|
}, status=status.HTTP_200_OK)
|
||||||
@@ -9,7 +9,8 @@ from datetime import datetime, date
|
|||||||
from rest_framework.views import APIView
|
from rest_framework.views import APIView
|
||||||
from rest_framework.request import Request
|
from rest_framework.request import Request
|
||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
from rest_framework.permissions import AllowAny
|
from rest_framework.permissions import AllowAny, IsAdminUser
|
||||||
|
from rest_framework import status
|
||||||
from drf_spectacular.utils import extend_schema, extend_schema_view, OpenApiParameter
|
from drf_spectacular.utils import extend_schema, extend_schema_view, OpenApiParameter
|
||||||
from drf_spectacular.types import OpenApiTypes
|
from drf_spectacular.types import OpenApiTypes
|
||||||
|
|
||||||
@@ -48,17 +49,12 @@ class TrendingAPIView(APIView):
|
|||||||
|
|
||||||
def get(self, request: Request) -> Response:
|
def get(self, request: Request) -> Response:
|
||||||
"""Get trending parks and rides."""
|
"""Get trending parks and rides."""
|
||||||
try:
|
from apps.core.services.trending_service import trending_service
|
||||||
from apps.core.services.trending_service import TrendingService
|
|
||||||
except ImportError:
|
|
||||||
# Fallback if trending service is not available
|
|
||||||
return self._get_fallback_trending_content(request)
|
|
||||||
|
|
||||||
# Parse parameters
|
# Parse parameters
|
||||||
limit = min(int(request.query_params.get("limit", 20)), 100)
|
limit = min(int(request.query_params.get("limit", 20)), 100)
|
||||||
|
|
||||||
# Get trending content
|
# Get trending content using direct calculation service
|
||||||
trending_service = TrendingService()
|
|
||||||
all_trending = trending_service.get_trending_content(limit=limit * 2)
|
all_trending = trending_service.get_trending_content(limit=limit * 2)
|
||||||
|
|
||||||
# Separate by content type
|
# Separate by content type
|
||||||
@@ -75,20 +71,8 @@ class TrendingAPIView(APIView):
|
|||||||
trending_rides = trending_rides[: limit // 3] if trending_rides else []
|
trending_rides = trending_rides[: limit // 3] if trending_rides else []
|
||||||
trending_parks = trending_parks[: limit // 3] if trending_parks else []
|
trending_parks = trending_parks[: limit // 3] if trending_parks else []
|
||||||
|
|
||||||
# Create mock latest reviews (since not implemented yet)
|
# Latest reviews will be empty until review system is implemented
|
||||||
latest_reviews = [
|
latest_reviews = []
|
||||||
{
|
|
||||||
"id": 1,
|
|
||||||
"name": "Steel Vengeance Review",
|
|
||||||
"location": "Cedar Point",
|
|
||||||
"category": "Roller Coaster",
|
|
||||||
"rating": 5.0,
|
|
||||||
"rank": 1,
|
|
||||||
"views": 1234,
|
|
||||||
"views_change": "+45%",
|
|
||||||
"slug": "steel-vengeance-review",
|
|
||||||
}
|
|
||||||
][: limit // 3]
|
|
||||||
|
|
||||||
# Return in expected frontend format
|
# Return in expected frontend format
|
||||||
response_data = {
|
response_data = {
|
||||||
@@ -99,82 +83,85 @@ class TrendingAPIView(APIView):
|
|||||||
|
|
||||||
return Response(response_data)
|
return Response(response_data)
|
||||||
|
|
||||||
def _get_fallback_trending_content(self, request: Request) -> Response:
|
|
||||||
"""Fallback method when trending service is not available."""
|
|
||||||
limit = min(int(request.query_params.get("limit", 20)), 100)
|
|
||||||
|
|
||||||
# Mock trending data
|
@extend_schema_view(
|
||||||
trending_rides = [
|
post=extend_schema(
|
||||||
{
|
summary="Trigger trending content calculation",
|
||||||
"id": 1,
|
description="Manually trigger the calculation of trending content using Django management commands. Admin access required.",
|
||||||
"name": "Steel Vengeance",
|
responses={
|
||||||
"location": "Cedar Point",
|
202: {
|
||||||
"category": "Roller Coaster",
|
"type": "object",
|
||||||
"rating": 4.8,
|
"properties": {
|
||||||
"rank": 1,
|
"message": {"type": "string"},
|
||||||
"views": 15234,
|
"trending_completed": {"type": "boolean"},
|
||||||
"views_change": "+25%",
|
"new_content_completed": {"type": "boolean"},
|
||||||
"slug": "steel-vengeance",
|
"completion_time": {"type": "string"},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
{
|
403: {"description": "Admin access required"},
|
||||||
"id": 2,
|
},
|
||||||
"name": "Lightning Rod",
|
tags=["Trending"],
|
||||||
"location": "Dollywood",
|
),
|
||||||
"category": "Roller Coaster",
|
)
|
||||||
"rating": 4.7,
|
class TriggerTrendingCalculationAPIView(APIView):
|
||||||
"rank": 2,
|
"""API endpoint to manually trigger trending content calculation."""
|
||||||
"views": 12456,
|
|
||||||
"views_change": "+18%",
|
|
||||||
"slug": "lightning-rod",
|
|
||||||
},
|
|
||||||
][: limit // 3]
|
|
||||||
|
|
||||||
trending_parks = [
|
permission_classes = [IsAdminUser]
|
||||||
{
|
|
||||||
"id": 1,
|
|
||||||
"name": "Cedar Point",
|
|
||||||
"location": "Sandusky, OH",
|
|
||||||
"category": "Theme Park",
|
|
||||||
"rating": 4.6,
|
|
||||||
"rank": 1,
|
|
||||||
"views": 45678,
|
|
||||||
"views_change": "+12%",
|
|
||||||
"slug": "cedar-point",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 2,
|
|
||||||
"name": "Magic Kingdom",
|
|
||||||
"location": "Orlando, FL",
|
|
||||||
"category": "Theme Park",
|
|
||||||
"rating": 4.5,
|
|
||||||
"rank": 2,
|
|
||||||
"views": 67890,
|
|
||||||
"views_change": "+8%",
|
|
||||||
"slug": "magic-kingdom",
|
|
||||||
},
|
|
||||||
][: limit // 3]
|
|
||||||
|
|
||||||
latest_reviews = [
|
def post(self, request: Request) -> Response:
|
||||||
{
|
"""Trigger trending content calculation using management commands."""
|
||||||
"id": 1,
|
try:
|
||||||
"name": "Steel Vengeance Review",
|
from django.core.management import call_command
|
||||||
"location": "Cedar Point",
|
import io
|
||||||
"category": "Roller Coaster",
|
from contextlib import redirect_stdout, redirect_stderr
|
||||||
"rating": 5.0,
|
|
||||||
"rank": 1,
|
|
||||||
"views": 1234,
|
|
||||||
"views_change": "+45%",
|
|
||||||
"slug": "steel-vengeance-review",
|
|
||||||
}
|
|
||||||
][: limit // 3]
|
|
||||||
|
|
||||||
response_data = {
|
# Capture command output
|
||||||
"trending_rides": trending_rides,
|
trending_output = io.StringIO()
|
||||||
"trending_parks": trending_parks,
|
new_content_output = io.StringIO()
|
||||||
"latest_reviews": latest_reviews,
|
|
||||||
}
|
|
||||||
|
|
||||||
return Response(response_data)
|
trending_completed = False
|
||||||
|
new_content_completed = False
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Run trending calculation command
|
||||||
|
with redirect_stdout(trending_output), redirect_stderr(trending_output):
|
||||||
|
call_command('calculate_trending',
|
||||||
|
'--content-type=all', '--limit=50')
|
||||||
|
trending_completed = True
|
||||||
|
except Exception as e:
|
||||||
|
trending_output.write(f"Error: {str(e)}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Run new content calculation command
|
||||||
|
with redirect_stdout(new_content_output), redirect_stderr(new_content_output):
|
||||||
|
call_command('calculate_new_content',
|
||||||
|
'--content-type=all', '--days-back=30', '--limit=50')
|
||||||
|
new_content_completed = True
|
||||||
|
except Exception as e:
|
||||||
|
new_content_output.write(f"Error: {str(e)}")
|
||||||
|
|
||||||
|
completion_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"message": "Trending content calculation completed",
|
||||||
|
"trending_completed": trending_completed,
|
||||||
|
"new_content_completed": new_content_completed,
|
||||||
|
"completion_time": completion_time,
|
||||||
|
"trending_output": trending_output.getvalue(),
|
||||||
|
"new_content_output": new_content_output.getvalue(),
|
||||||
|
},
|
||||||
|
status=status.HTTP_202_ACCEPTED,
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
return Response(
|
||||||
|
{
|
||||||
|
"error": "Failed to trigger trending content calculation",
|
||||||
|
"details": str(e),
|
||||||
|
},
|
||||||
|
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@extend_schema_view(
|
@extend_schema_view(
|
||||||
@@ -210,19 +197,15 @@ class NewContentAPIView(APIView):
|
|||||||
|
|
||||||
def get(self, request: Request) -> Response:
|
def get(self, request: Request) -> Response:
|
||||||
"""Get new parks and rides."""
|
"""Get new parks and rides."""
|
||||||
try:
|
from apps.core.services.trending_service import trending_service
|
||||||
from apps.core.services.trending_service import TrendingService
|
|
||||||
except ImportError:
|
|
||||||
# Fallback if trending service is not available
|
|
||||||
return self._get_fallback_new_content(request)
|
|
||||||
|
|
||||||
# Parse parameters
|
# Parse parameters
|
||||||
limit = min(int(request.query_params.get("limit", 20)), 100)
|
limit = min(int(request.query_params.get("limit", 20)), 100)
|
||||||
|
days_back = min(int(request.query_params.get("days", 30)), 365)
|
||||||
|
|
||||||
# Get new content with longer timeframe to get more data
|
# Get new content using direct calculation service
|
||||||
trending_service = TrendingService()
|
|
||||||
all_new_content = trending_service.get_new_content(
|
all_new_content = trending_service.get_new_content(
|
||||||
limit=limit * 2, days_back=60
|
limit=limit * 2, days_back=days_back
|
||||||
)
|
)
|
||||||
|
|
||||||
recently_added = []
|
recently_added = []
|
||||||
@@ -258,30 +241,12 @@ class NewContentAPIView(APIView):
|
|||||||
else:
|
else:
|
||||||
recently_added.append(item)
|
recently_added.append(item)
|
||||||
|
|
||||||
# Create mock upcoming items
|
# Upcoming items will be empty until future content system is implemented
|
||||||
upcoming = [
|
upcoming = []
|
||||||
{
|
|
||||||
"id": 1,
|
|
||||||
"name": "Epic Universe",
|
|
||||||
"location": "Universal Orlando",
|
|
||||||
"category": "Theme Park",
|
|
||||||
"date_added": "Opening 2025",
|
|
||||||
"slug": "epic-universe",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 2,
|
|
||||||
"name": "New Fantasyland Expansion",
|
|
||||||
"location": "Magic Kingdom",
|
|
||||||
"category": "Land Expansion",
|
|
||||||
"date_added": "Opening 2026",
|
|
||||||
"slug": "fantasyland-expansion",
|
|
||||||
},
|
|
||||||
]
|
|
||||||
|
|
||||||
# Limit each category
|
# Limit each category
|
||||||
recently_added = recently_added[: limit // 3] if recently_added else []
|
recently_added = recently_added[: limit // 3] if recently_added else []
|
||||||
newly_opened = newly_opened[: limit // 3] if newly_opened else []
|
newly_opened = newly_opened[: limit // 3] if newly_opened else []
|
||||||
upcoming = upcoming[: limit // 3] if upcoming else []
|
|
||||||
|
|
||||||
# Return in expected frontend format
|
# Return in expected frontend format
|
||||||
response_data = {
|
response_data = {
|
||||||
@@ -291,73 +256,3 @@ class NewContentAPIView(APIView):
|
|||||||
}
|
}
|
||||||
|
|
||||||
return Response(response_data)
|
return Response(response_data)
|
||||||
|
|
||||||
def _get_fallback_new_content(self, request: Request) -> Response:
|
|
||||||
"""Fallback method when trending service is not available."""
|
|
||||||
limit = min(int(request.query_params.get("limit", 20)), 100)
|
|
||||||
|
|
||||||
# Mock new content data
|
|
||||||
recently_added = [
|
|
||||||
{
|
|
||||||
"id": 1,
|
|
||||||
"name": "Iron Gwazi",
|
|
||||||
"location": "Busch Gardens Tampa",
|
|
||||||
"category": "Roller Coaster",
|
|
||||||
"date_added": "2024-12-01",
|
|
||||||
"slug": "iron-gwazi",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 2,
|
|
||||||
"name": "VelociCoaster",
|
|
||||||
"location": "Universal's Islands of Adventure",
|
|
||||||
"category": "Roller Coaster",
|
|
||||||
"date_added": "2024-11-15",
|
|
||||||
"slug": "velocicoaster",
|
|
||||||
},
|
|
||||||
][: limit // 3]
|
|
||||||
|
|
||||||
newly_opened = [
|
|
||||||
{
|
|
||||||
"id": 3,
|
|
||||||
"name": "Guardians of the Galaxy",
|
|
||||||
"location": "EPCOT",
|
|
||||||
"category": "Roller Coaster",
|
|
||||||
"date_added": "2024-10-01",
|
|
||||||
"slug": "guardians-galaxy",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 4,
|
|
||||||
"name": "TRON Lightcycle Run",
|
|
||||||
"location": "Magic Kingdom",
|
|
||||||
"category": "Roller Coaster",
|
|
||||||
"date_added": "2024-09-15",
|
|
||||||
"slug": "tron-lightcycle",
|
|
||||||
},
|
|
||||||
][: limit // 3]
|
|
||||||
|
|
||||||
upcoming = [
|
|
||||||
{
|
|
||||||
"id": 5,
|
|
||||||
"name": "Epic Universe",
|
|
||||||
"location": "Universal Orlando",
|
|
||||||
"category": "Theme Park",
|
|
||||||
"date_added": "Opening 2025",
|
|
||||||
"slug": "epic-universe",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 6,
|
|
||||||
"name": "New Fantasyland Expansion",
|
|
||||||
"location": "Magic Kingdom",
|
|
||||||
"category": "Land Expansion",
|
|
||||||
"date_added": "Opening 2026",
|
|
||||||
"slug": "fantasyland-expansion",
|
|
||||||
},
|
|
||||||
][: limit // 3]
|
|
||||||
|
|
||||||
response_data = {
|
|
||||||
"recently_added": recently_added,
|
|
||||||
"newly_opened": newly_opened,
|
|
||||||
"upcoming": upcoming,
|
|
||||||
}
|
|
||||||
|
|
||||||
return Response(response_data)
|
|
||||||
|
|||||||
209
backend/apps/core/management/commands/calculate_new_content.py
Normal file
209
backend/apps/core/management/commands/calculate_new_content.py
Normal file
@@ -0,0 +1,209 @@
|
|||||||
|
"""
|
||||||
|
Django management command to calculate new content.
|
||||||
|
|
||||||
|
This replaces the Celery task for calculating new content.
|
||||||
|
Run with: python manage.py calculate_new_content
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from typing import Dict, List, Any
|
||||||
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
|
from django.utils import timezone
|
||||||
|
from django.core.cache import cache
|
||||||
|
from django.db.models import Q
|
||||||
|
|
||||||
|
from apps.parks.models import Park
|
||||||
|
from apps.rides.models import Ride
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = 'Calculate new content and cache results'
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument(
|
||||||
|
'--content-type',
|
||||||
|
type=str,
|
||||||
|
default='all',
|
||||||
|
choices=['all', 'parks', 'rides'],
|
||||||
|
help='Type of content to calculate (default: all)'
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--days-back',
|
||||||
|
type=int,
|
||||||
|
default=30,
|
||||||
|
help='Number of days to look back for new content (default: 30)'
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--limit',
|
||||||
|
type=int,
|
||||||
|
default=50,
|
||||||
|
help='Maximum number of results to calculate (default: 50)'
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--verbose',
|
||||||
|
action='store_true',
|
||||||
|
help='Enable verbose output'
|
||||||
|
)
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
content_type = options['content_type']
|
||||||
|
days_back = options['days_back']
|
||||||
|
limit = options['limit']
|
||||||
|
verbose = options['verbose']
|
||||||
|
|
||||||
|
if verbose:
|
||||||
|
self.stdout.write(f"Starting new content calculation for {content_type}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
cutoff_date = timezone.now() - timedelta(days=days_back)
|
||||||
|
new_items = []
|
||||||
|
|
||||||
|
if content_type in ["all", "parks"]:
|
||||||
|
parks = self._get_new_parks(
|
||||||
|
cutoff_date, limit if content_type == "parks" else limit * 2)
|
||||||
|
new_items.extend(parks)
|
||||||
|
if verbose:
|
||||||
|
self.stdout.write(f"Found {len(parks)} new parks")
|
||||||
|
|
||||||
|
if content_type in ["all", "rides"]:
|
||||||
|
rides = self._get_new_rides(
|
||||||
|
cutoff_date, limit if content_type == "rides" else limit * 2)
|
||||||
|
new_items.extend(rides)
|
||||||
|
if verbose:
|
||||||
|
self.stdout.write(f"Found {len(rides)} new rides")
|
||||||
|
|
||||||
|
# Sort by date added (most recent first) and apply limit
|
||||||
|
new_items.sort(key=lambda x: x.get("date_added", ""), reverse=True)
|
||||||
|
new_items = new_items[:limit]
|
||||||
|
|
||||||
|
# Format results for API consumption
|
||||||
|
formatted_results = self._format_new_content_results(new_items)
|
||||||
|
|
||||||
|
# Cache results
|
||||||
|
cache_key = f"new_content:calculated:{content_type}:{days_back}:{limit}"
|
||||||
|
cache.set(cache_key, formatted_results, 1800) # Cache for 30 minutes
|
||||||
|
|
||||||
|
self.stdout.write(
|
||||||
|
self.style.SUCCESS(
|
||||||
|
f"Successfully calculated {len(formatted_results)} new items for {content_type}"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if verbose:
|
||||||
|
for item in formatted_results[:5]: # Show first 5 items
|
||||||
|
self.stdout.write(
|
||||||
|
f" {item['name']} ({item['park']}) - opened: {item['date_opened']}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error calculating new content: {e}", exc_info=True)
|
||||||
|
raise CommandError(f"Failed to calculate new content: {e}")
|
||||||
|
|
||||||
|
def _get_new_parks(self, cutoff_date: datetime, limit: int) -> List[Dict[str, Any]]:
|
||||||
|
"""Get recently added parks using real data."""
|
||||||
|
new_parks = (
|
||||||
|
Park.objects.filter(
|
||||||
|
Q(created_at__gte=cutoff_date) | Q(
|
||||||
|
opening_date__gte=cutoff_date.date()),
|
||||||
|
status="OPERATING",
|
||||||
|
)
|
||||||
|
.select_related("location", "operator")
|
||||||
|
.order_by("-created_at", "-opening_date")[:limit]
|
||||||
|
)
|
||||||
|
|
||||||
|
results = []
|
||||||
|
for park in new_parks:
|
||||||
|
date_added = park.opening_date or park.created_at
|
||||||
|
if date_added:
|
||||||
|
if isinstance(date_added, datetime):
|
||||||
|
date_added = date_added.date()
|
||||||
|
|
||||||
|
opening_date = getattr(park, "opening_date", None)
|
||||||
|
if opening_date and isinstance(opening_date, datetime):
|
||||||
|
opening_date = opening_date.date()
|
||||||
|
|
||||||
|
results.append({
|
||||||
|
"content_object": park,
|
||||||
|
"content_type": "park",
|
||||||
|
"id": park.pk,
|
||||||
|
"name": park.name,
|
||||||
|
"slug": park.slug,
|
||||||
|
"park": park.name, # For parks, park field is the park name itself
|
||||||
|
"category": "park",
|
||||||
|
"date_added": date_added.isoformat() if date_added else "",
|
||||||
|
"date_opened": opening_date.isoformat() if opening_date else "",
|
||||||
|
"url": park.url,
|
||||||
|
})
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
def _get_new_rides(self, cutoff_date: datetime, limit: int) -> List[Dict[str, Any]]:
|
||||||
|
"""Get recently added rides using real data."""
|
||||||
|
new_rides = (
|
||||||
|
Ride.objects.filter(
|
||||||
|
Q(created_at__gte=cutoff_date) | Q(
|
||||||
|
opening_date__gte=cutoff_date.date()),
|
||||||
|
status="OPERATING",
|
||||||
|
)
|
||||||
|
.select_related("park", "park__location")
|
||||||
|
.order_by("-created_at", "-opening_date")[:limit]
|
||||||
|
)
|
||||||
|
|
||||||
|
results = []
|
||||||
|
for ride in new_rides:
|
||||||
|
date_added = getattr(ride, "opening_date", None) or getattr(
|
||||||
|
ride, "created_at", None)
|
||||||
|
if date_added:
|
||||||
|
if isinstance(date_added, datetime):
|
||||||
|
date_added = date_added.date()
|
||||||
|
|
||||||
|
opening_date = getattr(ride, "opening_date", None)
|
||||||
|
if opening_date and isinstance(opening_date, datetime):
|
||||||
|
opening_date = opening_date.date()
|
||||||
|
|
||||||
|
results.append({
|
||||||
|
"content_object": ride,
|
||||||
|
"content_type": "ride",
|
||||||
|
"id": ride.pk,
|
||||||
|
"name": ride.name,
|
||||||
|
"slug": ride.slug,
|
||||||
|
"park": ride.park.name if ride.park else "",
|
||||||
|
"category": "ride",
|
||||||
|
"date_added": date_added.isoformat() if date_added else "",
|
||||||
|
"date_opened": opening_date.isoformat() if opening_date else "",
|
||||||
|
"url": ride.url,
|
||||||
|
"park_url": ride.park.url if ride.park else "",
|
||||||
|
})
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
def _format_new_content_results(self, new_items: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
||||||
|
"""Format new content results for frontend consumption."""
|
||||||
|
formatted_results = []
|
||||||
|
|
||||||
|
for item in new_items:
|
||||||
|
try:
|
||||||
|
# Format exactly as frontend expects
|
||||||
|
formatted_item = {
|
||||||
|
"id": item["id"],
|
||||||
|
"name": item["name"],
|
||||||
|
"park": item["park"],
|
||||||
|
"category": item["category"],
|
||||||
|
"date_added": item["date_added"],
|
||||||
|
"date_opened": item["date_opened"],
|
||||||
|
"slug": item["slug"],
|
||||||
|
"url": item["url"],
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add park_url for rides
|
||||||
|
if item.get("park_url"):
|
||||||
|
formatted_item["park_url"] = item["park_url"]
|
||||||
|
|
||||||
|
formatted_results.append(formatted_item)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Error formatting new content item: {e}")
|
||||||
|
|
||||||
|
return formatted_results
|
||||||
337
backend/apps/core/management/commands/calculate_trending.py
Normal file
337
backend/apps/core/management/commands/calculate_trending.py
Normal file
@@ -0,0 +1,337 @@
|
|||||||
|
"""
|
||||||
|
Django management command to calculate trending content.
|
||||||
|
|
||||||
|
This replaces the Celery task for calculating trending content.
|
||||||
|
Run with: python manage.py calculate_trending
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from typing import Dict, List, Any
|
||||||
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
|
from django.utils import timezone
|
||||||
|
from django.core.cache import cache
|
||||||
|
from django.contrib.contenttypes.models import ContentType
|
||||||
|
from django.db.models import Q
|
||||||
|
|
||||||
|
from apps.core.analytics import PageView
|
||||||
|
from apps.parks.models import Park
|
||||||
|
from apps.rides.models import Ride
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = 'Calculate trending content and cache results'
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument(
|
||||||
|
'--content-type',
|
||||||
|
type=str,
|
||||||
|
default='all',
|
||||||
|
choices=['all', 'parks', 'rides'],
|
||||||
|
help='Type of content to calculate (default: all)'
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--limit',
|
||||||
|
type=int,
|
||||||
|
default=50,
|
||||||
|
help='Maximum number of results to calculate (default: 50)'
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--verbose',
|
||||||
|
action='store_true',
|
||||||
|
help='Enable verbose output'
|
||||||
|
)
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
content_type = options['content_type']
|
||||||
|
limit = options['limit']
|
||||||
|
verbose = options['verbose']
|
||||||
|
|
||||||
|
if verbose:
|
||||||
|
self.stdout.write(f"Starting trending calculation for {content_type}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Time windows for calculations
|
||||||
|
current_period_hours = 168 # 7 days
|
||||||
|
# 14 days (for previous 7-day window comparison)
|
||||||
|
previous_period_hours = 336
|
||||||
|
|
||||||
|
trending_items = []
|
||||||
|
|
||||||
|
if content_type in ["all", "parks"]:
|
||||||
|
park_items = self._calculate_trending_parks(
|
||||||
|
current_period_hours,
|
||||||
|
previous_period_hours,
|
||||||
|
limit if content_type == "parks" else limit * 2
|
||||||
|
)
|
||||||
|
trending_items.extend(park_items)
|
||||||
|
if verbose:
|
||||||
|
self.stdout.write(f"Calculated {len(park_items)} trending parks")
|
||||||
|
|
||||||
|
if content_type in ["all", "rides"]:
|
||||||
|
ride_items = self._calculate_trending_rides(
|
||||||
|
current_period_hours,
|
||||||
|
previous_period_hours,
|
||||||
|
limit if content_type == "rides" else limit * 2
|
||||||
|
)
|
||||||
|
trending_items.extend(ride_items)
|
||||||
|
if verbose:
|
||||||
|
self.stdout.write(f"Calculated {len(ride_items)} trending rides")
|
||||||
|
|
||||||
|
# Sort by trending score and apply limit
|
||||||
|
trending_items.sort(key=lambda x: x.get("trending_score", 0), reverse=True)
|
||||||
|
trending_items = trending_items[:limit]
|
||||||
|
|
||||||
|
# Format results for API consumption
|
||||||
|
formatted_results = self._format_trending_results(
|
||||||
|
trending_items, current_period_hours, previous_period_hours)
|
||||||
|
|
||||||
|
# Cache results
|
||||||
|
cache_key = f"trending:calculated:{content_type}:{limit}"
|
||||||
|
cache.set(cache_key, formatted_results, 3600) # Cache for 1 hour
|
||||||
|
|
||||||
|
self.stdout.write(
|
||||||
|
self.style.SUCCESS(
|
||||||
|
f"Successfully calculated {len(formatted_results)} trending items for {content_type}"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if verbose:
|
||||||
|
for item in formatted_results[:5]: # Show first 5 items
|
||||||
|
self.stdout.write(
|
||||||
|
f" {item['name']} (score: {item.get('views_change', 'N/A')})")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error calculating trending content: {e}", exc_info=True)
|
||||||
|
raise CommandError(f"Failed to calculate trending content: {e}")
|
||||||
|
|
||||||
|
def _calculate_trending_parks(self, current_period_hours: int, previous_period_hours: int, limit: int) -> List[Dict[str, Any]]:
|
||||||
|
"""Calculate trending scores for parks using real data."""
|
||||||
|
parks = Park.objects.filter(
|
||||||
|
status="OPERATING").select_related("location", "operator")
|
||||||
|
|
||||||
|
trending_parks = []
|
||||||
|
|
||||||
|
for park in parks:
|
||||||
|
try:
|
||||||
|
score = self._calculate_content_score(
|
||||||
|
park, "park", current_period_hours, previous_period_hours)
|
||||||
|
if score > 0: # Only include items with positive trending scores
|
||||||
|
trending_parks.append({
|
||||||
|
"content_object": park,
|
||||||
|
"content_type": "park",
|
||||||
|
"trending_score": score,
|
||||||
|
"id": park.id,
|
||||||
|
"name": park.name,
|
||||||
|
"slug": park.slug,
|
||||||
|
"park": park.name, # For parks, park field is the park name itself
|
||||||
|
"category": "park",
|
||||||
|
"rating": float(park.average_rating) if park.average_rating else 0.0,
|
||||||
|
"date_opened": park.opening_date.isoformat() if park.opening_date else "",
|
||||||
|
"url": park.url,
|
||||||
|
})
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Error calculating score for park {park.id}: {e}")
|
||||||
|
|
||||||
|
return trending_parks
|
||||||
|
|
||||||
|
def _calculate_trending_rides(self, current_period_hours: int, previous_period_hours: int, limit: int) -> List[Dict[str, Any]]:
|
||||||
|
"""Calculate trending scores for rides using real data."""
|
||||||
|
rides = Ride.objects.filter(status="OPERATING").select_related(
|
||||||
|
"park", "park__location")
|
||||||
|
|
||||||
|
trending_rides = []
|
||||||
|
|
||||||
|
for ride in rides:
|
||||||
|
try:
|
||||||
|
score = self._calculate_content_score(
|
||||||
|
ride, "ride", current_period_hours, previous_period_hours)
|
||||||
|
if score > 0: # Only include items with positive trending scores
|
||||||
|
trending_rides.append({
|
||||||
|
"content_object": ride,
|
||||||
|
"content_type": "ride",
|
||||||
|
"trending_score": score,
|
||||||
|
"id": ride.pk,
|
||||||
|
"name": ride.name,
|
||||||
|
"slug": ride.slug,
|
||||||
|
"park": ride.park.name if ride.park else "",
|
||||||
|
"category": "ride",
|
||||||
|
"rating": float(ride.average_rating) if ride.average_rating else 0.0,
|
||||||
|
"date_opened": ride.opening_date.isoformat() if ride.opening_date else "",
|
||||||
|
"url": ride.url,
|
||||||
|
"park_url": ride.park.url if ride.park else "",
|
||||||
|
})
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Error calculating score for ride {ride.pk}: {e}")
|
||||||
|
|
||||||
|
return trending_rides
|
||||||
|
|
||||||
|
def _calculate_content_score(self, content_obj: Any, content_type: str, current_period_hours: int, previous_period_hours: int) -> float:
|
||||||
|
"""Calculate weighted trending score for content object using real analytics data."""
|
||||||
|
try:
|
||||||
|
# Get content type for PageView queries
|
||||||
|
ct = ContentType.objects.get_for_model(content_obj)
|
||||||
|
|
||||||
|
# 1. View Growth Score (40% weight)
|
||||||
|
view_growth_score = self._calculate_view_growth_score(
|
||||||
|
ct, content_obj.id, current_period_hours, previous_period_hours)
|
||||||
|
|
||||||
|
# 2. Rating Score (30% weight)
|
||||||
|
rating_score = self._calculate_rating_score(content_obj)
|
||||||
|
|
||||||
|
# 3. Recency Score (20% weight)
|
||||||
|
recency_score = self._calculate_recency_score(content_obj)
|
||||||
|
|
||||||
|
# 4. Popularity Score (10% weight)
|
||||||
|
popularity_score = self._calculate_popularity_score(
|
||||||
|
ct, content_obj.id, current_period_hours)
|
||||||
|
|
||||||
|
# Calculate weighted final score
|
||||||
|
final_score = (
|
||||||
|
view_growth_score * 0.4 +
|
||||||
|
rating_score * 0.3 +
|
||||||
|
recency_score * 0.2 +
|
||||||
|
popularity_score * 0.1
|
||||||
|
)
|
||||||
|
|
||||||
|
return final_score
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(
|
||||||
|
f"Error calculating score for {content_type} {content_obj.id}: {e}")
|
||||||
|
return 0.0
|
||||||
|
|
||||||
|
def _calculate_view_growth_score(self, content_type: ContentType, object_id: int, current_period_hours: int, previous_period_hours: int) -> float:
|
||||||
|
"""Calculate normalized view growth score using real PageView data."""
|
||||||
|
try:
|
||||||
|
current_views, previous_views, growth_percentage = PageView.get_views_growth(
|
||||||
|
content_type,
|
||||||
|
object_id,
|
||||||
|
current_period_hours,
|
||||||
|
previous_period_hours,
|
||||||
|
)
|
||||||
|
|
||||||
|
if previous_views == 0:
|
||||||
|
# New content with views gets boost
|
||||||
|
return min(current_views / 100.0, 1.0) if current_views > 0 else 0.0
|
||||||
|
|
||||||
|
# Normalize growth percentage to 0-1 scale
|
||||||
|
normalized_growth = min(growth_percentage / 500.0,
|
||||||
|
1.0) if growth_percentage > 0 else 0.0
|
||||||
|
return max(normalized_growth, 0.0)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Error calculating view growth: {e}")
|
||||||
|
return 0.0
|
||||||
|
|
||||||
|
def _calculate_rating_score(self, content_obj: Any) -> float:
|
||||||
|
"""Calculate normalized rating score."""
|
||||||
|
try:
|
||||||
|
rating = getattr(content_obj, "average_rating", None)
|
||||||
|
if rating is None or rating == 0:
|
||||||
|
return 0.3 # Neutral score for unrated content
|
||||||
|
|
||||||
|
# Normalize rating from 1-10 scale to 0-1 scale
|
||||||
|
return min(max((float(rating) - 1) / 9.0, 0.0), 1.0)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Error calculating rating score: {e}")
|
||||||
|
return 0.3
|
||||||
|
|
||||||
|
def _calculate_recency_score(self, content_obj: Any) -> float:
|
||||||
|
"""Calculate recency score based on when content was added/updated."""
|
||||||
|
try:
|
||||||
|
# Use opening_date for parks/rides, or created_at as fallback
|
||||||
|
date_added = getattr(content_obj, "opening_date", None)
|
||||||
|
if not date_added:
|
||||||
|
date_added = getattr(content_obj, "created_at", None)
|
||||||
|
if not date_added:
|
||||||
|
return 0.5 # Neutral score for unknown dates
|
||||||
|
|
||||||
|
# Handle both date and datetime objects
|
||||||
|
if hasattr(date_added, "date"):
|
||||||
|
date_added = date_added.date()
|
||||||
|
|
||||||
|
# Calculate days since added
|
||||||
|
today = timezone.now().date()
|
||||||
|
days_since_added = (today - date_added).days
|
||||||
|
|
||||||
|
# Recency score: newer content gets higher scores
|
||||||
|
if days_since_added <= 0:
|
||||||
|
return 1.0
|
||||||
|
elif days_since_added <= 30:
|
||||||
|
return 1.0 - (days_since_added / 30.0) * 0.2 # 1.0 to 0.8
|
||||||
|
elif days_since_added <= 365:
|
||||||
|
return 0.8 - ((days_since_added - 30) / (365 - 30)) * 0.7 # 0.8 to 0.1
|
||||||
|
else:
|
||||||
|
return 0.0
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Error calculating recency score: {e}")
|
||||||
|
return 0.5
|
||||||
|
|
||||||
|
def _calculate_popularity_score(self, content_type: ContentType, object_id: int, hours: int) -> float:
|
||||||
|
"""Calculate popularity score based on total view count."""
|
||||||
|
try:
|
||||||
|
total_views = PageView.get_total_views_count(
|
||||||
|
content_type, object_id, hours=hours)
|
||||||
|
|
||||||
|
# Normalize views to 0-1 scale
|
||||||
|
if total_views == 0:
|
||||||
|
return 0.0
|
||||||
|
elif total_views <= 100:
|
||||||
|
return total_views / 200.0 # 0.0 to 0.5
|
||||||
|
else:
|
||||||
|
return min(0.5 + (total_views - 100) / 1800.0, 1.0) # 0.5 to 1.0
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Error calculating popularity score: {e}")
|
||||||
|
return 0.0
|
||||||
|
|
||||||
|
def _format_trending_results(self, trending_items: List[Dict[str, Any]], current_period_hours: int, previous_period_hours: int) -> List[Dict[str, Any]]:
|
||||||
|
"""Format trending results for frontend consumption."""
|
||||||
|
formatted_results = []
|
||||||
|
|
||||||
|
for rank, item in enumerate(trending_items, 1):
|
||||||
|
try:
|
||||||
|
# Get view change for display
|
||||||
|
content_obj = item["content_object"]
|
||||||
|
ct = ContentType.objects.get_for_model(content_obj)
|
||||||
|
current_views, previous_views, growth_percentage = PageView.get_views_growth(
|
||||||
|
ct,
|
||||||
|
content_obj.id,
|
||||||
|
current_period_hours,
|
||||||
|
previous_period_hours,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Format exactly as frontend expects
|
||||||
|
formatted_item = {
|
||||||
|
"id": item["id"],
|
||||||
|
"name": item["name"],
|
||||||
|
"park": item["park"],
|
||||||
|
"category": item["category"],
|
||||||
|
"rating": item["rating"],
|
||||||
|
"rank": rank,
|
||||||
|
"views": current_views,
|
||||||
|
"views_change": (
|
||||||
|
f"+{growth_percentage:.1f}%"
|
||||||
|
if growth_percentage > 0
|
||||||
|
else f"{growth_percentage:.1f}%"
|
||||||
|
),
|
||||||
|
"slug": item["slug"],
|
||||||
|
"date_opened": item["date_opened"],
|
||||||
|
"url": item["url"],
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add park_url for rides
|
||||||
|
if item.get("park_url"):
|
||||||
|
formatted_item["park_url"] = item["park_url"]
|
||||||
|
|
||||||
|
formatted_results.append(formatted_item)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Error formatting trending item: {e}")
|
||||||
|
|
||||||
|
return formatted_results
|
||||||
@@ -58,7 +58,7 @@ class TrendingService:
|
|||||||
self, content_type: str = "all", limit: int = 20, force_refresh: bool = False
|
self, content_type: str = "all", limit: int = 20, force_refresh: bool = False
|
||||||
) -> List[Dict[str, Any]]:
|
) -> List[Dict[str, Any]]:
|
||||||
"""
|
"""
|
||||||
Get trending content with caching.
|
Get trending content using direct calculation.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
content_type: 'parks', 'rides', or 'all'
|
content_type: 'parks', 'rides', or 'all'
|
||||||
@@ -68,7 +68,7 @@ class TrendingService:
|
|||||||
Returns:
|
Returns:
|
||||||
List of trending content with exact frontend format
|
List of trending content with exact frontend format
|
||||||
"""
|
"""
|
||||||
cache_key = f"{self.CACHE_PREFIX}:trending:{content_type}:{limit}"
|
cache_key = f"trending:calculated:{content_type}:{limit}"
|
||||||
|
|
||||||
if not force_refresh:
|
if not force_refresh:
|
||||||
cached_result = cache.get(cache_key)
|
cached_result = cache.get(cache_key)
|
||||||
@@ -78,41 +78,38 @@ class TrendingService:
|
|||||||
)
|
)
|
||||||
return cached_result
|
return cached_result
|
||||||
|
|
||||||
self.logger.info(f"Calculating trending content for {content_type}")
|
self.logger.info(f"Getting trending content for {content_type}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Calculate trending scores for each content type
|
# Calculate directly without Celery
|
||||||
trending_items = []
|
trending_items = []
|
||||||
|
|
||||||
if content_type in ["all", "parks"]:
|
if content_type in ["all", "parks"]:
|
||||||
park_items = self._calculate_trending_parks(
|
park_items = self._calculate_trending_parks(
|
||||||
limit if content_type == "parks" else limit * 2
|
limit * 2 if content_type == "all" else limit)
|
||||||
)
|
|
||||||
trending_items.extend(park_items)
|
trending_items.extend(park_items)
|
||||||
|
|
||||||
if content_type in ["all", "rides"]:
|
if content_type in ["all", "rides"]:
|
||||||
ride_items = self._calculate_trending_rides(
|
ride_items = self._calculate_trending_rides(
|
||||||
limit if content_type == "rides" else limit * 2
|
limit * 2 if content_type == "all" else limit)
|
||||||
)
|
|
||||||
trending_items.extend(ride_items)
|
trending_items.extend(ride_items)
|
||||||
|
|
||||||
# Sort by trending score and apply limit
|
# Sort by trending score and apply limit
|
||||||
trending_items.sort(key=lambda x: x.get("trending_score", 0), reverse=True)
|
trending_items.sort(key=lambda x: x.get("trending_score", 0), reverse=True)
|
||||||
trending_items = trending_items[:limit]
|
trending_items = trending_items[:limit]
|
||||||
|
|
||||||
# Add ranking and format for frontend
|
# Format results for API consumption
|
||||||
formatted_results = self._format_trending_results(trending_items)
|
formatted_results = self._format_trending_results(trending_items)
|
||||||
|
|
||||||
# Cache results
|
# Cache results
|
||||||
cache.set(cache_key, formatted_results, self.CACHE_TTL)
|
cache.set(cache_key, formatted_results, self.CACHE_TTL)
|
||||||
|
|
||||||
self.logger.info(
|
self.logger.info(
|
||||||
f"Calculated {len(formatted_results)} trending items for {content_type}"
|
f"Calculated {len(formatted_results)} trending items for {content_type}")
|
||||||
)
|
|
||||||
return formatted_results
|
return formatted_results
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.logger.error(f"Error calculating trending content: {e}", exc_info=True)
|
self.logger.error(f"Error getting trending content: {e}", exc_info=True)
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def get_new_content(
|
def get_new_content(
|
||||||
@@ -123,7 +120,7 @@ class TrendingService:
|
|||||||
force_refresh: bool = False,
|
force_refresh: bool = False,
|
||||||
) -> List[Dict[str, Any]]:
|
) -> List[Dict[str, Any]]:
|
||||||
"""
|
"""
|
||||||
Get recently added content.
|
Get recently added content using direct calculation.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
content_type: 'parks', 'rides', or 'all'
|
content_type: 'parks', 'rides', or 'all'
|
||||||
@@ -134,7 +131,7 @@ class TrendingService:
|
|||||||
Returns:
|
Returns:
|
||||||
List of new content with exact frontend format
|
List of new content with exact frontend format
|
||||||
"""
|
"""
|
||||||
cache_key = f"{self.CACHE_PREFIX}:new:{content_type}:{limit}:{days_back}"
|
cache_key = f"new_content:calculated:{content_type}:{days_back}:{limit}"
|
||||||
|
|
||||||
if not force_refresh:
|
if not force_refresh:
|
||||||
cached_result = cache.get(cache_key)
|
cached_result = cache.get(cache_key)
|
||||||
@@ -144,37 +141,35 @@ class TrendingService:
|
|||||||
)
|
)
|
||||||
return cached_result
|
return cached_result
|
||||||
|
|
||||||
self.logger.info(f"Calculating new content for {content_type}")
|
self.logger.info(f"Getting new content for {content_type}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
# Calculate directly without Celery
|
||||||
cutoff_date = timezone.now() - timedelta(days=days_back)
|
cutoff_date = timezone.now() - timedelta(days=days_back)
|
||||||
new_items = []
|
new_items = []
|
||||||
|
|
||||||
if content_type in ["all", "parks"]:
|
if content_type in ["all", "parks"]:
|
||||||
parks = self._get_new_parks(
|
parks = self._get_new_parks(
|
||||||
cutoff_date, limit if content_type == "parks" else limit * 2
|
cutoff_date, limit * 2 if content_type == "all" else limit)
|
||||||
)
|
|
||||||
new_items.extend(parks)
|
new_items.extend(parks)
|
||||||
|
|
||||||
if content_type in ["all", "rides"]:
|
if content_type in ["all", "rides"]:
|
||||||
rides = self._get_new_rides(
|
rides = self._get_new_rides(
|
||||||
cutoff_date, limit if content_type == "rides" else limit * 2
|
cutoff_date, limit * 2 if content_type == "all" else limit)
|
||||||
)
|
|
||||||
new_items.extend(rides)
|
new_items.extend(rides)
|
||||||
|
|
||||||
# Sort by date added (most recent first) and apply limit
|
# Sort by date added (most recent first) and apply limit
|
||||||
new_items.sort(key=lambda x: x.get("date_added", ""), reverse=True)
|
new_items.sort(key=lambda x: x.get("date_added", ""), reverse=True)
|
||||||
new_items = new_items[:limit]
|
new_items = new_items[:limit]
|
||||||
|
|
||||||
# Format for frontend
|
# Format results for API consumption
|
||||||
formatted_results = self._format_new_content_results(new_items)
|
formatted_results = self._format_new_content_results(new_items)
|
||||||
|
|
||||||
# Cache results
|
# Cache results
|
||||||
cache.set(cache_key, formatted_results, self.CACHE_TTL)
|
cache.set(cache_key, formatted_results, 1800) # Cache for 30 minutes
|
||||||
|
|
||||||
self.logger.info(
|
self.logger.info(
|
||||||
f"Found {len(formatted_results)} new items for {content_type}"
|
f"Calculated {len(formatted_results)} new items for {content_type}")
|
||||||
)
|
|
||||||
return formatted_results
|
return formatted_results
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -184,7 +179,7 @@ class TrendingService:
|
|||||||
def _calculate_trending_parks(self, limit: int) -> List[Dict[str, Any]]:
|
def _calculate_trending_parks(self, limit: int) -> List[Dict[str, Any]]:
|
||||||
"""Calculate trending scores for parks."""
|
"""Calculate trending scores for parks."""
|
||||||
parks = Park.objects.filter(status="OPERATING").select_related(
|
parks = Park.objects.filter(status="OPERATING").select_related(
|
||||||
"location", "operator"
|
"location", "operator", "card_image"
|
||||||
)
|
)
|
||||||
|
|
||||||
trending_parks = []
|
trending_parks = []
|
||||||
@@ -193,6 +188,32 @@ class TrendingService:
|
|||||||
try:
|
try:
|
||||||
score = self._calculate_content_score(park, "park")
|
score = self._calculate_content_score(park, "park")
|
||||||
if score > 0: # Only include items with positive trending scores
|
if score > 0: # Only include items with positive trending scores
|
||||||
|
# Get opening date for date_opened field
|
||||||
|
opening_date = getattr(park, "opening_date", None)
|
||||||
|
if opening_date and isinstance(opening_date, datetime):
|
||||||
|
opening_date = opening_date.date()
|
||||||
|
|
||||||
|
# Get location fields
|
||||||
|
city = ""
|
||||||
|
state = ""
|
||||||
|
country = ""
|
||||||
|
try:
|
||||||
|
location = getattr(park, 'location', None)
|
||||||
|
if location:
|
||||||
|
city = getattr(location, 'city', '') or ""
|
||||||
|
state = getattr(location, 'state', '') or ""
|
||||||
|
country = getattr(location, 'country', '') or ""
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Get card image URL
|
||||||
|
card_image_url = ""
|
||||||
|
if park.card_image and hasattr(park.card_image, 'image'):
|
||||||
|
card_image_url = park.card_image.image.url if park.card_image.image else ""
|
||||||
|
|
||||||
|
# Get primary company (operator)
|
||||||
|
primary_company = park.operator.name if park.operator else ""
|
||||||
|
|
||||||
trending_parks.append(
|
trending_parks.append(
|
||||||
{
|
{
|
||||||
"content_object": park,
|
"content_object": park,
|
||||||
@@ -201,17 +222,20 @@ class TrendingService:
|
|||||||
"id": park.id,
|
"id": park.id,
|
||||||
"name": park.name,
|
"name": park.name,
|
||||||
"slug": park.slug,
|
"slug": park.slug,
|
||||||
"location": (
|
"park": park.name, # For parks, park field is the park name itself
|
||||||
park.formatted_location
|
|
||||||
if hasattr(park, "location")
|
|
||||||
else ""
|
|
||||||
),
|
|
||||||
"category": "park",
|
"category": "park",
|
||||||
"rating": (
|
"rating": (
|
||||||
float(park.average_rating)
|
float(park.average_rating)
|
||||||
if park.average_rating
|
if park.average_rating
|
||||||
else 0.0
|
else 0.0
|
||||||
),
|
),
|
||||||
|
"date_opened": opening_date.isoformat() if opening_date else "",
|
||||||
|
"url": park.url,
|
||||||
|
"card_image": card_image_url,
|
||||||
|
"city": city,
|
||||||
|
"state": state,
|
||||||
|
"country": country,
|
||||||
|
"primary_company": primary_company,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -222,7 +246,7 @@ class TrendingService:
|
|||||||
def _calculate_trending_rides(self, limit: int) -> List[Dict[str, Any]]:
|
def _calculate_trending_rides(self, limit: int) -> List[Dict[str, Any]]:
|
||||||
"""Calculate trending scores for rides."""
|
"""Calculate trending scores for rides."""
|
||||||
rides = Ride.objects.filter(status="OPERATING").select_related(
|
rides = Ride.objects.filter(status="OPERATING").select_related(
|
||||||
"park", "park__location"
|
"park", "park__location", "card_image"
|
||||||
)
|
)
|
||||||
|
|
||||||
trending_rides = []
|
trending_rides = []
|
||||||
@@ -231,14 +255,15 @@ class TrendingService:
|
|||||||
try:
|
try:
|
||||||
score = self._calculate_content_score(ride, "ride")
|
score = self._calculate_content_score(ride, "ride")
|
||||||
if score > 0: # Only include items with positive trending scores
|
if score > 0: # Only include items with positive trending scores
|
||||||
# Get location from park (rides don't have direct location field)
|
# Get opening date for date_opened field
|
||||||
location = ""
|
opening_date = getattr(ride, "opening_date", None)
|
||||||
if (
|
if opening_date and isinstance(opening_date, datetime):
|
||||||
ride.park
|
opening_date = opening_date.date()
|
||||||
and hasattr(ride.park, "location")
|
|
||||||
and ride.park.location
|
# Get card image URL
|
||||||
):
|
card_image_url = ""
|
||||||
location = ride.park.formatted_location
|
if ride.card_image and hasattr(ride.card_image, 'image'):
|
||||||
|
card_image_url = ride.card_image.image.url if ride.card_image.image else ""
|
||||||
|
|
||||||
trending_rides.append(
|
trending_rides.append(
|
||||||
{
|
{
|
||||||
@@ -248,13 +273,17 @@ class TrendingService:
|
|||||||
"id": ride.pk, # Use pk instead of id
|
"id": ride.pk, # Use pk instead of id
|
||||||
"name": ride.name,
|
"name": ride.name,
|
||||||
"slug": ride.slug,
|
"slug": ride.slug,
|
||||||
"location": location,
|
"park": ride.park.name if ride.park else "",
|
||||||
"category": "ride",
|
"category": "ride",
|
||||||
"rating": (
|
"rating": (
|
||||||
float(ride.average_rating)
|
float(ride.average_rating)
|
||||||
if ride.average_rating
|
if ride.average_rating
|
||||||
else 0.0
|
else 0.0
|
||||||
),
|
),
|
||||||
|
"date_opened": opening_date.isoformat() if opening_date else "",
|
||||||
|
"url": ride.url,
|
||||||
|
"park_url": ride.park.url if ride.park else "",
|
||||||
|
"card_image": card_image_url,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -421,7 +450,7 @@ class TrendingService:
|
|||||||
| Q(opening_date__gte=cutoff_date.date()),
|
| Q(opening_date__gte=cutoff_date.date()),
|
||||||
status="OPERATING",
|
status="OPERATING",
|
||||||
)
|
)
|
||||||
.select_related("location", "operator")
|
.select_related("location", "operator", "card_image")
|
||||||
.order_by("-created_at", "-opening_date")[:limit]
|
.order_by("-created_at", "-opening_date")[:limit]
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -435,6 +464,32 @@ class TrendingService:
|
|||||||
date_added = date_added.date()
|
date_added = date_added.date()
|
||||||
# If it's already a date, keep it as is
|
# If it's already a date, keep it as is
|
||||||
|
|
||||||
|
# Get opening date for date_opened field
|
||||||
|
opening_date = getattr(park, "opening_date", None)
|
||||||
|
if opening_date and isinstance(opening_date, datetime):
|
||||||
|
opening_date = opening_date.date()
|
||||||
|
|
||||||
|
# Get location fields
|
||||||
|
city = ""
|
||||||
|
state = ""
|
||||||
|
country = ""
|
||||||
|
try:
|
||||||
|
location = getattr(park, 'location', None)
|
||||||
|
if location:
|
||||||
|
city = getattr(location, 'city', '') or ""
|
||||||
|
state = getattr(location, 'state', '') or ""
|
||||||
|
country = getattr(location, 'country', '') or ""
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Get card image URL
|
||||||
|
card_image_url = ""
|
||||||
|
if park.card_image and hasattr(park.card_image, 'image'):
|
||||||
|
card_image_url = park.card_image.image.url if park.card_image.image else ""
|
||||||
|
|
||||||
|
# Get primary company (operator)
|
||||||
|
primary_company = park.operator.name if park.operator else ""
|
||||||
|
|
||||||
results.append(
|
results.append(
|
||||||
{
|
{
|
||||||
"content_object": park,
|
"content_object": park,
|
||||||
@@ -442,11 +497,16 @@ class TrendingService:
|
|||||||
"id": park.pk, # Use pk instead of id for Django compatibility
|
"id": park.pk, # Use pk instead of id for Django compatibility
|
||||||
"name": park.name,
|
"name": park.name,
|
||||||
"slug": park.slug,
|
"slug": park.slug,
|
||||||
"location": (
|
"park": park.name, # For parks, park field is the park name itself
|
||||||
park.formatted_location if hasattr(park, "location") else ""
|
|
||||||
),
|
|
||||||
"category": "park",
|
"category": "park",
|
||||||
"date_added": date_added.isoformat() if date_added else "",
|
"date_added": date_added.isoformat() if date_added else "",
|
||||||
|
"date_opened": opening_date.isoformat() if opening_date else "",
|
||||||
|
"url": park.url,
|
||||||
|
"card_image": card_image_url,
|
||||||
|
"city": city,
|
||||||
|
"state": state,
|
||||||
|
"country": country,
|
||||||
|
"primary_company": primary_company,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -460,7 +520,7 @@ class TrendingService:
|
|||||||
| Q(opening_date__gte=cutoff_date.date()),
|
| Q(opening_date__gte=cutoff_date.date()),
|
||||||
status="OPERATING",
|
status="OPERATING",
|
||||||
)
|
)
|
||||||
.select_related("park", "park__location")
|
.select_related("park", "park__location", "card_image")
|
||||||
.order_by("-created_at", "-opening_date")[:limit]
|
.order_by("-created_at", "-opening_date")[:limit]
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -476,10 +536,15 @@ class TrendingService:
|
|||||||
date_added = date_added.date()
|
date_added = date_added.date()
|
||||||
# If it's already a date, keep it as is
|
# If it's already a date, keep it as is
|
||||||
|
|
||||||
# Get location from park (rides don't have direct location field)
|
# Get opening date for date_opened field
|
||||||
location = ""
|
opening_date = getattr(ride, "opening_date", None)
|
||||||
if ride.park and hasattr(ride.park, "location") and ride.park.location:
|
if opening_date and isinstance(opening_date, datetime):
|
||||||
location = ride.park.formatted_location
|
opening_date = opening_date.date()
|
||||||
|
|
||||||
|
# Get card image URL
|
||||||
|
card_image_url = ""
|
||||||
|
if ride.card_image and hasattr(ride.card_image, 'image'):
|
||||||
|
card_image_url = ride.card_image.image.url if ride.card_image.image else ""
|
||||||
|
|
||||||
results.append(
|
results.append(
|
||||||
{
|
{
|
||||||
@@ -488,9 +553,13 @@ class TrendingService:
|
|||||||
"id": ride.pk, # Use pk instead of id for Django compatibility
|
"id": ride.pk, # Use pk instead of id for Django compatibility
|
||||||
"name": ride.name,
|
"name": ride.name,
|
||||||
"slug": ride.slug,
|
"slug": ride.slug,
|
||||||
"location": location,
|
"park": ride.park.name if ride.park else "",
|
||||||
"category": "ride",
|
"category": "ride",
|
||||||
"date_added": date_added.isoformat() if date_added else "",
|
"date_added": date_added.isoformat() if date_added else "",
|
||||||
|
"date_opened": opening_date.isoformat() if opening_date else "",
|
||||||
|
"url": ride.url,
|
||||||
|
"park_url": ride.park.url if ride.park else "",
|
||||||
|
"card_image": card_image_url,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -520,7 +589,7 @@ class TrendingService:
|
|||||||
formatted_item = {
|
formatted_item = {
|
||||||
"id": item["id"],
|
"id": item["id"],
|
||||||
"name": item["name"],
|
"name": item["name"],
|
||||||
"location": item["location"],
|
"park": item["park"],
|
||||||
"category": item["category"],
|
"category": item["category"],
|
||||||
"rating": item["rating"],
|
"rating": item["rating"],
|
||||||
"rank": rank,
|
"rank": rank,
|
||||||
@@ -531,8 +600,29 @@ class TrendingService:
|
|||||||
else f"{growth_percentage:.1f}%"
|
else f"{growth_percentage:.1f}%"
|
||||||
),
|
),
|
||||||
"slug": item["slug"],
|
"slug": item["slug"],
|
||||||
|
"date_opened": item["date_opened"],
|
||||||
|
"url": item["url"],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Add card_image for all items
|
||||||
|
if item.get("card_image"):
|
||||||
|
formatted_item["card_image"] = item["card_image"]
|
||||||
|
|
||||||
|
# Add park-specific fields
|
||||||
|
if item["content_type"] == "park":
|
||||||
|
if item.get("city"):
|
||||||
|
formatted_item["city"] = item["city"]
|
||||||
|
if item.get("state"):
|
||||||
|
formatted_item["state"] = item["state"]
|
||||||
|
if item.get("country"):
|
||||||
|
formatted_item["country"] = item["country"]
|
||||||
|
if item.get("primary_company"):
|
||||||
|
formatted_item["primary_company"] = item["primary_company"]
|
||||||
|
|
||||||
|
# Add park_url for rides
|
||||||
|
if item.get("park_url"):
|
||||||
|
formatted_item["park_url"] = item["park_url"]
|
||||||
|
|
||||||
formatted_results.append(formatted_item)
|
formatted_results.append(formatted_item)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -552,12 +642,33 @@ class TrendingService:
|
|||||||
formatted_item = {
|
formatted_item = {
|
||||||
"id": item["id"],
|
"id": item["id"],
|
||||||
"name": item["name"],
|
"name": item["name"],
|
||||||
"location": item["location"],
|
"park": item["park"],
|
||||||
"category": item["category"],
|
"category": item["category"],
|
||||||
"date_added": item["date_added"],
|
"date_added": item["date_added"],
|
||||||
|
"date_opened": item["date_opened"],
|
||||||
"slug": item["slug"],
|
"slug": item["slug"],
|
||||||
|
"url": item["url"],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Add card_image for all items
|
||||||
|
if item.get("card_image"):
|
||||||
|
formatted_item["card_image"] = item["card_image"]
|
||||||
|
|
||||||
|
# Add park-specific fields
|
||||||
|
if item["content_type"] == "park":
|
||||||
|
if item.get("city"):
|
||||||
|
formatted_item["city"] = item["city"]
|
||||||
|
if item.get("state"):
|
||||||
|
formatted_item["state"] = item["state"]
|
||||||
|
if item.get("country"):
|
||||||
|
formatted_item["country"] = item["country"]
|
||||||
|
if item.get("primary_company"):
|
||||||
|
formatted_item["primary_company"] = item["primary_company"]
|
||||||
|
|
||||||
|
# Add park_url for rides
|
||||||
|
if item.get("park_url"):
|
||||||
|
formatted_item["park_url"] = item["park_url"]
|
||||||
|
|
||||||
formatted_results.append(formatted_item)
|
formatted_results.append(formatted_item)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|||||||
5
backend/apps/core/tasks/__init__.py
Normal file
5
backend/apps/core/tasks/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
"""
|
||||||
|
Core tasks package for ThrillWiki.
|
||||||
|
|
||||||
|
This package contains all Celery tasks for the core application.
|
||||||
|
"""
|
||||||
550
backend/apps/core/tasks/trending.py
Normal file
550
backend/apps/core/tasks/trending.py
Normal file
@@ -0,0 +1,550 @@
|
|||||||
|
"""
|
||||||
|
Trending calculation tasks for ThrillWiki.
|
||||||
|
|
||||||
|
This module contains Celery tasks for calculating and caching trending content.
|
||||||
|
All tasks run asynchronously to avoid blocking the main application.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from typing import Dict, List, Any, Optional
|
||||||
|
from celery import shared_task
|
||||||
|
from django.utils import timezone
|
||||||
|
from django.core.cache import cache
|
||||||
|
from django.contrib.contenttypes.models import ContentType
|
||||||
|
from django.db.models import Q, Count, Avg, F
|
||||||
|
from django.db import transaction
|
||||||
|
|
||||||
|
from apps.core.analytics import PageView
|
||||||
|
from apps.parks.models import Park
|
||||||
|
from apps.rides.models import Ride
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@shared_task(bind=True, max_retries=3, default_retry_delay=60)
|
||||||
|
def calculate_trending_content(self, content_type: str = "all", limit: int = 50) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Calculate trending content using real analytics data.
|
||||||
|
|
||||||
|
This task runs periodically to update trending calculations based on:
|
||||||
|
- View growth rates
|
||||||
|
- Content ratings
|
||||||
|
- Recency factors
|
||||||
|
- Popularity metrics
|
||||||
|
|
||||||
|
Args:
|
||||||
|
content_type: 'parks', 'rides', or 'all'
|
||||||
|
limit: Maximum number of results to calculate
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict containing trending results and metadata
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
logger.info(f"Starting trending calculation for {content_type}")
|
||||||
|
|
||||||
|
# Time windows for calculations
|
||||||
|
current_period_hours = 168 # 7 days
|
||||||
|
previous_period_hours = 336 # 14 days (for previous 7-day window comparison)
|
||||||
|
|
||||||
|
trending_items = []
|
||||||
|
|
||||||
|
if content_type in ["all", "parks"]:
|
||||||
|
park_items = _calculate_trending_parks(
|
||||||
|
current_period_hours,
|
||||||
|
previous_period_hours,
|
||||||
|
limit if content_type == "parks" else limit * 2
|
||||||
|
)
|
||||||
|
trending_items.extend(park_items)
|
||||||
|
|
||||||
|
if content_type in ["all", "rides"]:
|
||||||
|
ride_items = _calculate_trending_rides(
|
||||||
|
current_period_hours,
|
||||||
|
previous_period_hours,
|
||||||
|
limit if content_type == "rides" else limit * 2
|
||||||
|
)
|
||||||
|
trending_items.extend(ride_items)
|
||||||
|
|
||||||
|
# Sort by trending score and apply limit
|
||||||
|
trending_items.sort(key=lambda x: x.get("trending_score", 0), reverse=True)
|
||||||
|
trending_items = trending_items[:limit]
|
||||||
|
|
||||||
|
# Format results for API consumption
|
||||||
|
formatted_results = _format_trending_results(
|
||||||
|
trending_items, current_period_hours, previous_period_hours)
|
||||||
|
|
||||||
|
# Cache results
|
||||||
|
cache_key = f"trending:calculated:{content_type}:{limit}"
|
||||||
|
cache.set(cache_key, formatted_results, 3600) # Cache for 1 hour
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Calculated {len(formatted_results)} trending items for {content_type}")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"content_type": content_type,
|
||||||
|
"count": len(formatted_results),
|
||||||
|
"results": formatted_results,
|
||||||
|
"calculated_at": timezone.now().isoformat(),
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error calculating trending content: {e}", exc_info=True)
|
||||||
|
# Retry the task
|
||||||
|
raise self.retry(exc=e)
|
||||||
|
|
||||||
|
|
||||||
|
@shared_task(bind=True, max_retries=3, default_retry_delay=30)
|
||||||
|
def calculate_new_content(self, content_type: str = "all", days_back: int = 30, limit: int = 50) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Calculate new content based on opening dates and creation dates.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
content_type: 'parks', 'rides', or 'all'
|
||||||
|
days_back: How many days to look back for new content
|
||||||
|
limit: Maximum number of results
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict containing new content results and metadata
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
logger.info(f"Starting new content calculation for {content_type}")
|
||||||
|
|
||||||
|
cutoff_date = timezone.now() - timedelta(days=days_back)
|
||||||
|
new_items = []
|
||||||
|
|
||||||
|
if content_type in ["all", "parks"]:
|
||||||
|
parks = _get_new_parks(
|
||||||
|
cutoff_date, limit if content_type == "parks" else limit * 2)
|
||||||
|
new_items.extend(parks)
|
||||||
|
|
||||||
|
if content_type in ["all", "rides"]:
|
||||||
|
rides = _get_new_rides(
|
||||||
|
cutoff_date, limit if content_type == "rides" else limit * 2)
|
||||||
|
new_items.extend(rides)
|
||||||
|
|
||||||
|
# Sort by date added (most recent first) and apply limit
|
||||||
|
new_items.sort(key=lambda x: x.get("date_added", ""), reverse=True)
|
||||||
|
new_items = new_items[:limit]
|
||||||
|
|
||||||
|
# Format results for API consumption
|
||||||
|
formatted_results = _format_new_content_results(new_items)
|
||||||
|
|
||||||
|
# Cache results
|
||||||
|
cache_key = f"new_content:calculated:{content_type}:{days_back}:{limit}"
|
||||||
|
cache.set(cache_key, formatted_results, 1800) # Cache for 30 minutes
|
||||||
|
|
||||||
|
logger.info(f"Calculated {len(formatted_results)} new items for {content_type}")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"content_type": content_type,
|
||||||
|
"count": len(formatted_results),
|
||||||
|
"results": formatted_results,
|
||||||
|
"calculated_at": timezone.now().isoformat(),
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error calculating new content: {e}", exc_info=True)
|
||||||
|
raise self.retry(exc=e)
|
||||||
|
|
||||||
|
|
||||||
|
@shared_task(bind=True)
|
||||||
|
def warm_trending_cache(self) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Warm the trending cache by pre-calculating common queries.
|
||||||
|
|
||||||
|
This task runs periodically to ensure fast API responses.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
logger.info("Starting trending cache warming")
|
||||||
|
|
||||||
|
# Common query combinations to pre-calculate
|
||||||
|
queries = [
|
||||||
|
{"content_type": "all", "limit": 20},
|
||||||
|
{"content_type": "parks", "limit": 10},
|
||||||
|
{"content_type": "rides", "limit": 10},
|
||||||
|
{"content_type": "all", "limit": 50},
|
||||||
|
]
|
||||||
|
|
||||||
|
results = {}
|
||||||
|
|
||||||
|
for query in queries:
|
||||||
|
# Trigger trending calculation
|
||||||
|
calculate_trending_content.delay(**query)
|
||||||
|
|
||||||
|
# Trigger new content calculation
|
||||||
|
calculate_new_content.delay(**query)
|
||||||
|
|
||||||
|
results[f"trending_{query['content_type']}_{query['limit']}"] = "scheduled"
|
||||||
|
results[f"new_content_{query['content_type']}_{query['limit']}"] = "scheduled"
|
||||||
|
|
||||||
|
logger.info("Trending cache warming completed")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"queries_scheduled": len(queries) * 2,
|
||||||
|
"results": results,
|
||||||
|
"warmed_at": timezone.now().isoformat(),
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error warming trending cache: {e}", exc_info=True)
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": str(e),
|
||||||
|
"warmed_at": timezone.now().isoformat(),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _calculate_trending_parks(current_period_hours: int, previous_period_hours: int, limit: int) -> List[Dict[str, Any]]:
|
||||||
|
"""Calculate trending scores for parks using real data."""
|
||||||
|
parks = Park.objects.filter(
|
||||||
|
status="OPERATING").select_related("location", "operator")
|
||||||
|
|
||||||
|
trending_parks = []
|
||||||
|
|
||||||
|
for park in parks:
|
||||||
|
try:
|
||||||
|
score = _calculate_content_score(
|
||||||
|
park, "park", current_period_hours, previous_period_hours)
|
||||||
|
if score > 0: # Only include items with positive trending scores
|
||||||
|
trending_parks.append({
|
||||||
|
"content_object": park,
|
||||||
|
"content_type": "park",
|
||||||
|
"trending_score": score,
|
||||||
|
"id": park.id,
|
||||||
|
"name": park.name,
|
||||||
|
"slug": park.slug,
|
||||||
|
"location": park.formatted_location if hasattr(park, "location") else "",
|
||||||
|
"category": "park",
|
||||||
|
"rating": float(park.average_rating) if park.average_rating else 0.0,
|
||||||
|
})
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Error calculating score for park {park.id}: {e}")
|
||||||
|
|
||||||
|
return trending_parks
|
||||||
|
|
||||||
|
|
||||||
|
def _calculate_trending_rides(current_period_hours: int, previous_period_hours: int, limit: int) -> List[Dict[str, Any]]:
|
||||||
|
"""Calculate trending scores for rides using real data."""
|
||||||
|
rides = Ride.objects.filter(status="OPERATING").select_related(
|
||||||
|
"park", "park__location")
|
||||||
|
|
||||||
|
trending_rides = []
|
||||||
|
|
||||||
|
for ride in rides:
|
||||||
|
try:
|
||||||
|
score = _calculate_content_score(
|
||||||
|
ride, "ride", current_period_hours, previous_period_hours)
|
||||||
|
if score > 0: # Only include items with positive trending scores
|
||||||
|
# Get location from park
|
||||||
|
location = ""
|
||||||
|
if ride.park and hasattr(ride.park, "location") and ride.park.location:
|
||||||
|
location = ride.park.formatted_location
|
||||||
|
|
||||||
|
trending_rides.append({
|
||||||
|
"content_object": ride,
|
||||||
|
"content_type": "ride",
|
||||||
|
"trending_score": score,
|
||||||
|
"id": ride.pk,
|
||||||
|
"name": ride.name,
|
||||||
|
"slug": ride.slug,
|
||||||
|
"location": location,
|
||||||
|
"category": "ride",
|
||||||
|
"rating": float(ride.average_rating) if ride.average_rating else 0.0,
|
||||||
|
})
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Error calculating score for ride {ride.pk}: {e}")
|
||||||
|
|
||||||
|
return trending_rides
|
||||||
|
|
||||||
|
|
||||||
|
def _calculate_content_score(content_obj: Any, content_type: str, current_period_hours: int, previous_period_hours: int) -> float:
|
||||||
|
"""
|
||||||
|
Calculate weighted trending score for content object using real analytics data.
|
||||||
|
|
||||||
|
Algorithm Components:
|
||||||
|
- View Growth Rate (40% weight): Recent view increase vs historical
|
||||||
|
- Rating Score (30% weight): Average user rating normalized
|
||||||
|
- Recency Factor (20% weight): How recently content was added/updated
|
||||||
|
- Popularity Boost (10% weight): Total view count normalization
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Float between 0.0 and 1.0 representing trending strength
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# Get content type for PageView queries
|
||||||
|
ct = ContentType.objects.get_for_model(content_obj)
|
||||||
|
|
||||||
|
# 1. View Growth Score (40% weight)
|
||||||
|
view_growth_score = _calculate_view_growth_score(
|
||||||
|
ct, content_obj.id, current_period_hours, previous_period_hours)
|
||||||
|
|
||||||
|
# 2. Rating Score (30% weight)
|
||||||
|
rating_score = _calculate_rating_score(content_obj)
|
||||||
|
|
||||||
|
# 3. Recency Score (20% weight)
|
||||||
|
recency_score = _calculate_recency_score(content_obj)
|
||||||
|
|
||||||
|
# 4. Popularity Score (10% weight)
|
||||||
|
popularity_score = _calculate_popularity_score(
|
||||||
|
ct, content_obj.id, current_period_hours)
|
||||||
|
|
||||||
|
# Calculate weighted final score
|
||||||
|
final_score = (
|
||||||
|
view_growth_score * 0.4 +
|
||||||
|
rating_score * 0.3 +
|
||||||
|
recency_score * 0.2 +
|
||||||
|
popularity_score * 0.1
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
f"{content_type} {content_obj.id}: "
|
||||||
|
f"growth={view_growth_score:.3f}, rating={rating_score:.3f}, "
|
||||||
|
f"recency={recency_score:.3f}, popularity={popularity_score:.3f}, "
|
||||||
|
f"final={final_score:.3f}"
|
||||||
|
)
|
||||||
|
|
||||||
|
return final_score
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(
|
||||||
|
f"Error calculating score for {content_type} {content_obj.id}: {e}")
|
||||||
|
return 0.0
|
||||||
|
|
||||||
|
|
||||||
|
def _calculate_view_growth_score(content_type: ContentType, object_id: int, current_period_hours: int, previous_period_hours: int) -> float:
|
||||||
|
"""Calculate normalized view growth score using real PageView data."""
|
||||||
|
try:
|
||||||
|
current_views, previous_views, growth_percentage = PageView.get_views_growth(
|
||||||
|
content_type,
|
||||||
|
object_id,
|
||||||
|
current_period_hours,
|
||||||
|
previous_period_hours,
|
||||||
|
)
|
||||||
|
|
||||||
|
if previous_views == 0:
|
||||||
|
# New content with views gets boost
|
||||||
|
return min(current_views / 100.0, 1.0) if current_views > 0 else 0.0
|
||||||
|
|
||||||
|
# Normalize growth percentage to 0-1 scale
|
||||||
|
# 100% growth = 0.5, 500% growth = 1.0
|
||||||
|
normalized_growth = min(growth_percentage / 500.0,
|
||||||
|
1.0) if growth_percentage > 0 else 0.0
|
||||||
|
return max(normalized_growth, 0.0)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Error calculating view growth: {e}")
|
||||||
|
return 0.0
|
||||||
|
|
||||||
|
|
||||||
|
def _calculate_rating_score(content_obj: Any) -> float:
|
||||||
|
"""Calculate normalized rating score."""
|
||||||
|
try:
|
||||||
|
rating = getattr(content_obj, "average_rating", None)
|
||||||
|
if rating is None or rating == 0:
|
||||||
|
return 0.3 # Neutral score for unrated content
|
||||||
|
|
||||||
|
# Normalize rating from 1-10 scale to 0-1 scale
|
||||||
|
# Rating of 5 = 0.4, Rating of 8 = 0.7, Rating of 10 = 1.0
|
||||||
|
return min(max((float(rating) - 1) / 9.0, 0.0), 1.0)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Error calculating rating score: {e}")
|
||||||
|
return 0.3
|
||||||
|
|
||||||
|
|
||||||
|
def _calculate_recency_score(content_obj: Any) -> float:
|
||||||
|
"""Calculate recency score based on when content was added/updated."""
|
||||||
|
try:
|
||||||
|
# Use opening_date for parks/rides, or created_at as fallback
|
||||||
|
date_added = getattr(content_obj, "opening_date", None)
|
||||||
|
if not date_added:
|
||||||
|
date_added = getattr(content_obj, "created_at", None)
|
||||||
|
if not date_added:
|
||||||
|
return 0.5 # Neutral score for unknown dates
|
||||||
|
|
||||||
|
# Handle both date and datetime objects
|
||||||
|
if hasattr(date_added, "date"):
|
||||||
|
date_added = date_added.date()
|
||||||
|
|
||||||
|
# Calculate days since added
|
||||||
|
today = timezone.now().date()
|
||||||
|
days_since_added = (today - date_added).days
|
||||||
|
|
||||||
|
# Recency score: newer content gets higher scores
|
||||||
|
# 0 days = 1.0, 30 days = 0.8, 365 days = 0.1, >365 days = 0.0
|
||||||
|
if days_since_added <= 0:
|
||||||
|
return 1.0
|
||||||
|
elif days_since_added <= 30:
|
||||||
|
return 1.0 - (days_since_added / 30.0) * 0.2 # 1.0 to 0.8
|
||||||
|
elif days_since_added <= 365:
|
||||||
|
return 0.8 - ((days_since_added - 30) / (365 - 30)) * 0.7 # 0.8 to 0.1
|
||||||
|
else:
|
||||||
|
return 0.0
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Error calculating recency score: {e}")
|
||||||
|
return 0.5
|
||||||
|
|
||||||
|
|
||||||
|
def _calculate_popularity_score(content_type: ContentType, object_id: int, hours: int) -> float:
|
||||||
|
"""Calculate popularity score based on total view count."""
|
||||||
|
try:
|
||||||
|
total_views = PageView.get_total_views_count(
|
||||||
|
content_type, object_id, hours=hours)
|
||||||
|
|
||||||
|
# Normalize views to 0-1 scale
|
||||||
|
# 0 views = 0.0, 100 views = 0.5, 1000+ views = 1.0
|
||||||
|
if total_views == 0:
|
||||||
|
return 0.0
|
||||||
|
elif total_views <= 100:
|
||||||
|
return total_views / 200.0 # 0.0 to 0.5
|
||||||
|
else:
|
||||||
|
return min(0.5 + (total_views - 100) / 1800.0, 1.0) # 0.5 to 1.0
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Error calculating popularity score: {e}")
|
||||||
|
return 0.0
|
||||||
|
|
||||||
|
|
||||||
|
def _get_new_parks(cutoff_date: datetime, limit: int) -> List[Dict[str, Any]]:
|
||||||
|
"""Get recently added parks using real data."""
|
||||||
|
new_parks = (
|
||||||
|
Park.objects.filter(
|
||||||
|
Q(created_at__gte=cutoff_date) | Q(opening_date__gte=cutoff_date.date()),
|
||||||
|
status="OPERATING",
|
||||||
|
)
|
||||||
|
.select_related("location", "operator")
|
||||||
|
.order_by("-created_at", "-opening_date")[:limit]
|
||||||
|
)
|
||||||
|
|
||||||
|
results = []
|
||||||
|
for park in new_parks:
|
||||||
|
date_added = park.opening_date or park.created_at
|
||||||
|
if date_added:
|
||||||
|
if isinstance(date_added, datetime):
|
||||||
|
date_added = date_added.date()
|
||||||
|
|
||||||
|
opening_date = getattr(park, "opening_date", None)
|
||||||
|
if opening_date and isinstance(opening_date, datetime):
|
||||||
|
opening_date = opening_date.date()
|
||||||
|
|
||||||
|
results.append({
|
||||||
|
"content_object": park,
|
||||||
|
"content_type": "park",
|
||||||
|
"id": park.pk,
|
||||||
|
"name": park.name,
|
||||||
|
"slug": park.slug,
|
||||||
|
"park": park.name, # For parks, park field is the park name itself
|
||||||
|
"category": "park",
|
||||||
|
"date_added": date_added.isoformat() if date_added else "",
|
||||||
|
"date_opened": opening_date.isoformat() if opening_date else "",
|
||||||
|
})
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
def _get_new_rides(cutoff_date: datetime, limit: int) -> List[Dict[str, Any]]:
|
||||||
|
"""Get recently added rides using real data."""
|
||||||
|
new_rides = (
|
||||||
|
Ride.objects.filter(
|
||||||
|
Q(created_at__gte=cutoff_date) | Q(opening_date__gte=cutoff_date.date()),
|
||||||
|
status="OPERATING",
|
||||||
|
)
|
||||||
|
.select_related("park", "park__location")
|
||||||
|
.order_by("-created_at", "-opening_date")[:limit]
|
||||||
|
)
|
||||||
|
|
||||||
|
results = []
|
||||||
|
for ride in new_rides:
|
||||||
|
date_added = getattr(ride, "opening_date", None) or getattr(
|
||||||
|
ride, "created_at", None)
|
||||||
|
if date_added:
|
||||||
|
if isinstance(date_added, datetime):
|
||||||
|
date_added = date_added.date()
|
||||||
|
|
||||||
|
opening_date = getattr(ride, "opening_date", None)
|
||||||
|
if opening_date and isinstance(opening_date, datetime):
|
||||||
|
opening_date = opening_date.date()
|
||||||
|
|
||||||
|
results.append({
|
||||||
|
"content_object": ride,
|
||||||
|
"content_type": "ride",
|
||||||
|
"id": ride.pk,
|
||||||
|
"name": ride.name,
|
||||||
|
"slug": ride.slug,
|
||||||
|
"park": ride.park.name if ride.park else "",
|
||||||
|
"category": "ride",
|
||||||
|
"date_added": date_added.isoformat() if date_added else "",
|
||||||
|
"date_opened": opening_date.isoformat() if opening_date else "",
|
||||||
|
})
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
def _format_trending_results(trending_items: List[Dict[str, Any]], current_period_hours: int, previous_period_hours: int) -> List[Dict[str, Any]]:
|
||||||
|
"""Format trending results for frontend consumption."""
|
||||||
|
formatted_results = []
|
||||||
|
|
||||||
|
for rank, item in enumerate(trending_items, 1):
|
||||||
|
try:
|
||||||
|
# Get view change for display
|
||||||
|
content_obj = item["content_object"]
|
||||||
|
ct = ContentType.objects.get_for_model(content_obj)
|
||||||
|
current_views, previous_views, growth_percentage = PageView.get_views_growth(
|
||||||
|
ct,
|
||||||
|
content_obj.id,
|
||||||
|
current_period_hours,
|
||||||
|
previous_period_hours,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Format exactly as frontend expects
|
||||||
|
formatted_item = {
|
||||||
|
"id": item["id"],
|
||||||
|
"name": item["name"],
|
||||||
|
"location": item["location"],
|
||||||
|
"category": item["category"],
|
||||||
|
"rating": item["rating"],
|
||||||
|
"rank": rank,
|
||||||
|
"views": current_views,
|
||||||
|
"views_change": (
|
||||||
|
f"+{growth_percentage:.1f}%"
|
||||||
|
if growth_percentage > 0
|
||||||
|
else f"{growth_percentage:.1f}%"
|
||||||
|
),
|
||||||
|
"slug": item["slug"],
|
||||||
|
}
|
||||||
|
|
||||||
|
formatted_results.append(formatted_item)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Error formatting trending item: {e}")
|
||||||
|
|
||||||
|
return formatted_results
|
||||||
|
|
||||||
|
|
||||||
|
def _format_new_content_results(new_items: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
||||||
|
"""Format new content results for frontend consumption."""
|
||||||
|
formatted_results = []
|
||||||
|
|
||||||
|
for item in new_items:
|
||||||
|
try:
|
||||||
|
# Format exactly as frontend expects
|
||||||
|
formatted_item = {
|
||||||
|
"id": item["id"],
|
||||||
|
"name": item["name"],
|
||||||
|
"park": item["park"],
|
||||||
|
"category": item["category"],
|
||||||
|
"date_added": item["date_added"],
|
||||||
|
"date_opened": item["date_opened"],
|
||||||
|
"slug": item["slug"],
|
||||||
|
}
|
||||||
|
|
||||||
|
formatted_results.append(formatted_item)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Error formatting new content item: {e}")
|
||||||
|
|
||||||
|
return formatted_results
|
||||||
@@ -0,0 +1,62 @@
|
|||||||
|
# Generated by Django 5.2.5 on 2025-08-28 22:59
|
||||||
|
|
||||||
|
import pgtrigger.compiler
|
||||||
|
import pgtrigger.migrations
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("parks", "0010_add_banner_card_image_fields"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
pgtrigger.migrations.RemoveTrigger(
|
||||||
|
model_name="park",
|
||||||
|
name="insert_insert",
|
||||||
|
),
|
||||||
|
pgtrigger.migrations.RemoveTrigger(
|
||||||
|
model_name="park",
|
||||||
|
name="update_update",
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="park",
|
||||||
|
name="url",
|
||||||
|
field=models.URLField(blank=True, help_text="Frontend URL for this park"),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="parkevent",
|
||||||
|
name="url",
|
||||||
|
field=models.URLField(blank=True, help_text="Frontend URL for this park"),
|
||||||
|
),
|
||||||
|
pgtrigger.migrations.AddTrigger(
|
||||||
|
model_name="park",
|
||||||
|
trigger=pgtrigger.compiler.Trigger(
|
||||||
|
name="insert_insert",
|
||||||
|
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||||
|
func='INSERT INTO "parks_parkevent" ("average_rating", "banner_image_id", "card_image_id", "closing_date", "coaster_count", "created_at", "description", "id", "name", "opening_date", "operating_season", "operator_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "property_owner_id", "ride_count", "size_acres", "slug", "status", "updated_at", "url", "website") VALUES (NEW."average_rating", NEW."banner_image_id", NEW."card_image_id", NEW."closing_date", NEW."coaster_count", NEW."created_at", NEW."description", NEW."id", NEW."name", NEW."opening_date", NEW."operating_season", NEW."operator_id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."property_owner_id", NEW."ride_count", NEW."size_acres", NEW."slug", NEW."status", NEW."updated_at", NEW."url", NEW."website"); RETURN NULL;',
|
||||||
|
hash="f677e88234ebc3dc93c46d4756cb0723f5468cbe",
|
||||||
|
operation="INSERT",
|
||||||
|
pgid="pgtrigger_insert_insert_66883",
|
||||||
|
table="parks_park",
|
||||||
|
when="AFTER",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
pgtrigger.migrations.AddTrigger(
|
||||||
|
model_name="park",
|
||||||
|
trigger=pgtrigger.compiler.Trigger(
|
||||||
|
name="update_update",
|
||||||
|
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||||
|
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||||
|
func='INSERT INTO "parks_parkevent" ("average_rating", "banner_image_id", "card_image_id", "closing_date", "coaster_count", "created_at", "description", "id", "name", "opening_date", "operating_season", "operator_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "property_owner_id", "ride_count", "size_acres", "slug", "status", "updated_at", "url", "website") VALUES (NEW."average_rating", NEW."banner_image_id", NEW."card_image_id", NEW."closing_date", NEW."coaster_count", NEW."created_at", NEW."description", NEW."id", NEW."name", NEW."opening_date", NEW."operating_season", NEW."operator_id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."property_owner_id", NEW."ride_count", NEW."size_acres", NEW."slug", NEW."status", NEW."updated_at", NEW."url", NEW."website"); RETURN NULL;',
|
||||||
|
hash="6fc430a517628d48341e8981fa38529031c3f35b",
|
||||||
|
operation="UPDATE",
|
||||||
|
pgid="pgtrigger_update_update_19f56",
|
||||||
|
table="parks_park",
|
||||||
|
when="AFTER",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -2,10 +2,12 @@ from django.db import models
|
|||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
from django.utils.text import slugify
|
from django.utils.text import slugify
|
||||||
from django.core.exceptions import ValidationError
|
from django.core.exceptions import ValidationError
|
||||||
|
from config.django import base as settings
|
||||||
from typing import Optional, Any, TYPE_CHECKING, List
|
from typing import Optional, Any, TYPE_CHECKING, List
|
||||||
import pghistory
|
import pghistory
|
||||||
from apps.core.history import TrackedModel
|
from apps.core.history import TrackedModel
|
||||||
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from apps.rides.models import Ride
|
from apps.rides.models import Ride
|
||||||
from . import ParkArea
|
from . import ParkArea
|
||||||
@@ -97,6 +99,9 @@ class Park(TrackedModel):
|
|||||||
created_at = models.DateTimeField(auto_now_add=True, null=True)
|
created_at = models.DateTimeField(auto_now_add=True, null=True)
|
||||||
updated_at = models.DateTimeField(auto_now=True)
|
updated_at = models.DateTimeField(auto_now=True)
|
||||||
|
|
||||||
|
# Frontend URL
|
||||||
|
url = models.URLField(blank=True, help_text="Frontend URL for this park")
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
ordering = ["name"]
|
ordering = ["name"]
|
||||||
constraints = [
|
constraints = [
|
||||||
@@ -167,6 +172,10 @@ class Park(TrackedModel):
|
|||||||
if not self.slug or (old_name and old_name != self.name):
|
if not self.slug or (old_name and old_name != self.name):
|
||||||
self.slug = slugify(self.name)
|
self.slug = slugify(self.name)
|
||||||
|
|
||||||
|
# Generate frontend URL
|
||||||
|
frontend_domain = getattr(settings, 'FRONTEND_DOMAIN', 'https://thrillwiki.com')
|
||||||
|
self.url = f"{frontend_domain}/parks/{self.slug}/"
|
||||||
|
|
||||||
# Save the model
|
# Save the model
|
||||||
super().save(*args, **kwargs)
|
super().save(*args, **kwargs)
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,164 @@
|
|||||||
|
# Generated by Django 5.2.5 on 2025-08-28 22:59
|
||||||
|
|
||||||
|
import pgtrigger.compiler
|
||||||
|
import pgtrigger.migrations
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("rides", "0014_update_ride_model_slugs_data"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
pgtrigger.migrations.RemoveTrigger(
|
||||||
|
model_name="company",
|
||||||
|
name="insert_insert",
|
||||||
|
),
|
||||||
|
pgtrigger.migrations.RemoveTrigger(
|
||||||
|
model_name="company",
|
||||||
|
name="update_update",
|
||||||
|
),
|
||||||
|
pgtrigger.migrations.RemoveTrigger(
|
||||||
|
model_name="ride",
|
||||||
|
name="insert_insert",
|
||||||
|
),
|
||||||
|
pgtrigger.migrations.RemoveTrigger(
|
||||||
|
model_name="ride",
|
||||||
|
name="update_update",
|
||||||
|
),
|
||||||
|
pgtrigger.migrations.RemoveTrigger(
|
||||||
|
model_name="ridemodel",
|
||||||
|
name="insert_insert",
|
||||||
|
),
|
||||||
|
pgtrigger.migrations.RemoveTrigger(
|
||||||
|
model_name="ridemodel",
|
||||||
|
name="update_update",
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="company",
|
||||||
|
name="url",
|
||||||
|
field=models.URLField(
|
||||||
|
blank=True, help_text="Frontend URL for this company"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="companyevent",
|
||||||
|
name="url",
|
||||||
|
field=models.URLField(
|
||||||
|
blank=True, help_text="Frontend URL for this company"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="ride",
|
||||||
|
name="url",
|
||||||
|
field=models.URLField(blank=True, help_text="Frontend URL for this ride"),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="rideevent",
|
||||||
|
name="url",
|
||||||
|
field=models.URLField(blank=True, help_text="Frontend URL for this ride"),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="ridemodel",
|
||||||
|
name="url",
|
||||||
|
field=models.URLField(
|
||||||
|
blank=True, help_text="Frontend URL for this ride model"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="ridemodelevent",
|
||||||
|
name="url",
|
||||||
|
field=models.URLField(
|
||||||
|
blank=True, help_text="Frontend URL for this ride model"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
pgtrigger.migrations.AddTrigger(
|
||||||
|
model_name="company",
|
||||||
|
trigger=pgtrigger.compiler.Trigger(
|
||||||
|
name="insert_insert",
|
||||||
|
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||||
|
func='INSERT INTO "rides_companyevent" ("coasters_count", "created_at", "description", "founded_date", "id", "name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "rides_count", "roles", "slug", "updated_at", "url", "website") VALUES (NEW."coasters_count", NEW."created_at", NEW."description", NEW."founded_date", NEW."id", NEW."name", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."rides_count", NEW."roles", NEW."slug", NEW."updated_at", NEW."url", NEW."website"); RETURN NULL;',
|
||||||
|
hash="fe6c1e3f09822f5e7f716cd83483cf152ec138f0",
|
||||||
|
operation="INSERT",
|
||||||
|
pgid="pgtrigger_insert_insert_e7194",
|
||||||
|
table="rides_company",
|
||||||
|
when="AFTER",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
pgtrigger.migrations.AddTrigger(
|
||||||
|
model_name="company",
|
||||||
|
trigger=pgtrigger.compiler.Trigger(
|
||||||
|
name="update_update",
|
||||||
|
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||||
|
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||||
|
func='INSERT INTO "rides_companyevent" ("coasters_count", "created_at", "description", "founded_date", "id", "name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "rides_count", "roles", "slug", "updated_at", "url", "website") VALUES (NEW."coasters_count", NEW."created_at", NEW."description", NEW."founded_date", NEW."id", NEW."name", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."rides_count", NEW."roles", NEW."slug", NEW."updated_at", NEW."url", NEW."website"); RETURN NULL;',
|
||||||
|
hash="0b76cb36b7551ed3e64e674b8cfe343d4d2ec306",
|
||||||
|
operation="UPDATE",
|
||||||
|
pgid="pgtrigger_update_update_456a8",
|
||||||
|
table="rides_company",
|
||||||
|
when="AFTER",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
pgtrigger.migrations.AddTrigger(
|
||||||
|
model_name="ride",
|
||||||
|
trigger=pgtrigger.compiler.Trigger(
|
||||||
|
name="insert_insert",
|
||||||
|
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||||
|
func='INSERT INTO "rides_rideevent" ("average_rating", "banner_image_id", "capacity_per_hour", "card_image_id", "category", "closing_date", "created_at", "description", "designer_id", "id", "manufacturer_id", "max_height_in", "min_height_in", "name", "opening_date", "park_area_id", "park_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "post_closing_status", "ride_duration_seconds", "ride_model_id", "slug", "status", "status_since", "updated_at", "url") VALUES (NEW."average_rating", NEW."banner_image_id", NEW."capacity_per_hour", NEW."card_image_id", NEW."category", NEW."closing_date", NEW."created_at", NEW."description", NEW."designer_id", NEW."id", NEW."manufacturer_id", NEW."max_height_in", NEW."min_height_in", NEW."name", NEW."opening_date", NEW."park_area_id", NEW."park_id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."post_closing_status", NEW."ride_duration_seconds", NEW."ride_model_id", NEW."slug", NEW."status", NEW."status_since", NEW."updated_at", NEW."url"); RETURN NULL;',
|
||||||
|
hash="6764dc3b0c0e73dda649939bb1ee7b7de143125f",
|
||||||
|
operation="INSERT",
|
||||||
|
pgid="pgtrigger_insert_insert_52074",
|
||||||
|
table="rides_ride",
|
||||||
|
when="AFTER",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
pgtrigger.migrations.AddTrigger(
|
||||||
|
model_name="ride",
|
||||||
|
trigger=pgtrigger.compiler.Trigger(
|
||||||
|
name="update_update",
|
||||||
|
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||||
|
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||||
|
func='INSERT INTO "rides_rideevent" ("average_rating", "banner_image_id", "capacity_per_hour", "card_image_id", "category", "closing_date", "created_at", "description", "designer_id", "id", "manufacturer_id", "max_height_in", "min_height_in", "name", "opening_date", "park_area_id", "park_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "post_closing_status", "ride_duration_seconds", "ride_model_id", "slug", "status", "status_since", "updated_at", "url") VALUES (NEW."average_rating", NEW."banner_image_id", NEW."capacity_per_hour", NEW."card_image_id", NEW."category", NEW."closing_date", NEW."created_at", NEW."description", NEW."designer_id", NEW."id", NEW."manufacturer_id", NEW."max_height_in", NEW."min_height_in", NEW."name", NEW."opening_date", NEW."park_area_id", NEW."park_id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."post_closing_status", NEW."ride_duration_seconds", NEW."ride_model_id", NEW."slug", NEW."status", NEW."status_since", NEW."updated_at", NEW."url"); RETURN NULL;',
|
||||||
|
hash="63c4066af11852396506fd964989632336205573",
|
||||||
|
operation="UPDATE",
|
||||||
|
pgid="pgtrigger_update_update_4917a",
|
||||||
|
table="rides_ride",
|
||||||
|
when="AFTER",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
pgtrigger.migrations.AddTrigger(
|
||||||
|
model_name="ridemodel",
|
||||||
|
trigger=pgtrigger.compiler.Trigger(
|
||||||
|
name="insert_insert",
|
||||||
|
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||||
|
func='INSERT INTO "rides_ridemodelevent" ("category", "created_at", "description", "first_installation_year", "id", "is_discontinued", "last_installation_year", "manufacturer_id", "meta_description", "meta_title", "name", "notable_features", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "primary_image_id", "restraint_system", "slug", "support_structure", "target_market", "total_installations", "track_type", "train_configuration", "typical_capacity_range_max", "typical_capacity_range_min", "typical_height_range_max_ft", "typical_height_range_min_ft", "typical_speed_range_max_mph", "typical_speed_range_min_mph", "updated_at", "url") VALUES (NEW."category", NEW."created_at", NEW."description", NEW."first_installation_year", NEW."id", NEW."is_discontinued", NEW."last_installation_year", NEW."manufacturer_id", NEW."meta_description", NEW."meta_title", NEW."name", NEW."notable_features", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."primary_image_id", NEW."restraint_system", NEW."slug", NEW."support_structure", NEW."target_market", NEW."total_installations", NEW."track_type", NEW."train_configuration", NEW."typical_capacity_range_max", NEW."typical_capacity_range_min", NEW."typical_height_range_max_ft", NEW."typical_height_range_min_ft", NEW."typical_speed_range_max_mph", NEW."typical_speed_range_min_mph", NEW."updated_at", NEW."url"); RETURN NULL;',
|
||||||
|
hash="9cee65f580a26ae9edc8f9fc1f3d9b25da1856c3",
|
||||||
|
operation="INSERT",
|
||||||
|
pgid="pgtrigger_insert_insert_0aaee",
|
||||||
|
table="rides_ridemodel",
|
||||||
|
when="AFTER",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
pgtrigger.migrations.AddTrigger(
|
||||||
|
model_name="ridemodel",
|
||||||
|
trigger=pgtrigger.compiler.Trigger(
|
||||||
|
name="update_update",
|
||||||
|
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||||
|
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||||
|
func='INSERT INTO "rides_ridemodelevent" ("category", "created_at", "description", "first_installation_year", "id", "is_discontinued", "last_installation_year", "manufacturer_id", "meta_description", "meta_title", "name", "notable_features", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "primary_image_id", "restraint_system", "slug", "support_structure", "target_market", "total_installations", "track_type", "train_configuration", "typical_capacity_range_max", "typical_capacity_range_min", "typical_height_range_max_ft", "typical_height_range_min_ft", "typical_speed_range_max_mph", "typical_speed_range_min_mph", "updated_at", "url") VALUES (NEW."category", NEW."created_at", NEW."description", NEW."first_installation_year", NEW."id", NEW."is_discontinued", NEW."last_installation_year", NEW."manufacturer_id", NEW."meta_description", NEW."meta_title", NEW."name", NEW."notable_features", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."primary_image_id", NEW."restraint_system", NEW."slug", NEW."support_structure", NEW."target_market", NEW."total_installations", NEW."track_type", NEW."train_configuration", NEW."typical_capacity_range_max", NEW."typical_capacity_range_min", NEW."typical_height_range_max_ft", NEW."typical_height_range_min_ft", NEW."typical_speed_range_max_mph", NEW."typical_speed_range_min_mph", NEW."updated_at", NEW."url"); RETURN NULL;',
|
||||||
|
hash="365f87607f9f7bfee1caaabdd32b16032e04ae82",
|
||||||
|
operation="UPDATE",
|
||||||
|
pgid="pgtrigger_update_update_0ca1a",
|
||||||
|
table="rides_ridemodel",
|
||||||
|
when="AFTER",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -0,0 +1,66 @@
|
|||||||
|
# Generated by Django 5.2.5 on 2025-08-28 23:12
|
||||||
|
|
||||||
|
import pgtrigger.compiler
|
||||||
|
import pgtrigger.migrations
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("rides", "0015_remove_company_insert_insert_and_more"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
pgtrigger.migrations.RemoveTrigger(
|
||||||
|
model_name="ride",
|
||||||
|
name="insert_insert",
|
||||||
|
),
|
||||||
|
pgtrigger.migrations.RemoveTrigger(
|
||||||
|
model_name="ride",
|
||||||
|
name="update_update",
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="ride",
|
||||||
|
name="park_url",
|
||||||
|
field=models.URLField(
|
||||||
|
blank=True, help_text="Frontend URL for this ride's park"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="rideevent",
|
||||||
|
name="park_url",
|
||||||
|
field=models.URLField(
|
||||||
|
blank=True, help_text="Frontend URL for this ride's park"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
pgtrigger.migrations.AddTrigger(
|
||||||
|
model_name="ride",
|
||||||
|
trigger=pgtrigger.compiler.Trigger(
|
||||||
|
name="insert_insert",
|
||||||
|
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||||
|
func='INSERT INTO "rides_rideevent" ("average_rating", "banner_image_id", "capacity_per_hour", "card_image_id", "category", "closing_date", "created_at", "description", "designer_id", "id", "manufacturer_id", "max_height_in", "min_height_in", "name", "opening_date", "park_area_id", "park_id", "park_url", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "post_closing_status", "ride_duration_seconds", "ride_model_id", "slug", "status", "status_since", "updated_at", "url") VALUES (NEW."average_rating", NEW."banner_image_id", NEW."capacity_per_hour", NEW."card_image_id", NEW."category", NEW."closing_date", NEW."created_at", NEW."description", NEW."designer_id", NEW."id", NEW."manufacturer_id", NEW."max_height_in", NEW."min_height_in", NEW."name", NEW."opening_date", NEW."park_area_id", NEW."park_id", NEW."park_url", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."post_closing_status", NEW."ride_duration_seconds", NEW."ride_model_id", NEW."slug", NEW."status", NEW."status_since", NEW."updated_at", NEW."url"); RETURN NULL;',
|
||||||
|
hash="3b83e1d1dbc2d5ca5792929845db1dd6d306700a",
|
||||||
|
operation="INSERT",
|
||||||
|
pgid="pgtrigger_insert_insert_52074",
|
||||||
|
table="rides_ride",
|
||||||
|
when="AFTER",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
pgtrigger.migrations.AddTrigger(
|
||||||
|
model_name="ride",
|
||||||
|
trigger=pgtrigger.compiler.Trigger(
|
||||||
|
name="update_update",
|
||||||
|
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||||
|
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||||
|
func='INSERT INTO "rides_rideevent" ("average_rating", "banner_image_id", "capacity_per_hour", "card_image_id", "category", "closing_date", "created_at", "description", "designer_id", "id", "manufacturer_id", "max_height_in", "min_height_in", "name", "opening_date", "park_area_id", "park_id", "park_url", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "post_closing_status", "ride_duration_seconds", "ride_model_id", "slug", "status", "status_since", "updated_at", "url") VALUES (NEW."average_rating", NEW."banner_image_id", NEW."capacity_per_hour", NEW."card_image_id", NEW."category", NEW."closing_date", NEW."created_at", NEW."description", NEW."designer_id", NEW."id", NEW."manufacturer_id", NEW."max_height_in", NEW."min_height_in", NEW."name", NEW."opening_date", NEW."park_area_id", NEW."park_id", NEW."park_url", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."post_closing_status", NEW."ride_duration_seconds", NEW."ride_model_id", NEW."slug", NEW."status", NEW."status_since", NEW."updated_at", NEW."url"); RETURN NULL;',
|
||||||
|
hash="efd782a22f5bec46d06b234ffc55b6c06360ade1",
|
||||||
|
operation="UPDATE",
|
||||||
|
pgid="pgtrigger_update_update_4917a",
|
||||||
|
table="rides_ride",
|
||||||
|
when="AFTER",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -3,6 +3,7 @@ from django.contrib.postgres.fields import ArrayField
|
|||||||
from django.db import models
|
from django.db import models
|
||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
from django.utils.text import slugify
|
from django.utils.text import slugify
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
from apps.core.history import HistoricalSlug
|
from apps.core.history import HistoricalSlug
|
||||||
from apps.core.models import TrackedModel
|
from apps.core.models import TrackedModel
|
||||||
@@ -33,12 +34,30 @@ class Company(TrackedModel):
|
|||||||
rides_count = models.IntegerField(default=0)
|
rides_count = models.IntegerField(default=0)
|
||||||
coasters_count = models.IntegerField(default=0)
|
coasters_count = models.IntegerField(default=0)
|
||||||
|
|
||||||
|
# Frontend URL
|
||||||
|
url = models.URLField(blank=True, help_text="Frontend URL for this company")
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.name
|
return self.name
|
||||||
|
|
||||||
def save(self, *args, **kwargs):
|
def save(self, *args, **kwargs):
|
||||||
if not self.slug:
|
if not self.slug:
|
||||||
self.slug = slugify(self.name)
|
self.slug = slugify(self.name)
|
||||||
|
|
||||||
|
# Generate frontend URL based on primary role
|
||||||
|
# CRITICAL: Only MANUFACTURER and DESIGNER are for rides domain
|
||||||
|
# OPERATOR and PROPERTY_OWNER are for parks domain and handled separately
|
||||||
|
if self.roles:
|
||||||
|
frontend_domain = getattr(
|
||||||
|
settings, 'FRONTEND_DOMAIN', 'https://thrillwiki.com')
|
||||||
|
primary_role = self.roles[0] # Use first role as primary
|
||||||
|
|
||||||
|
if primary_role == 'MANUFACTURER':
|
||||||
|
self.url = f"{frontend_domain}/rides/manufacturers/{self.slug}/"
|
||||||
|
elif primary_role == 'DESIGNER':
|
||||||
|
self.url = f"{frontend_domain}/rides/designers/{self.slug}/"
|
||||||
|
# OPERATOR and PROPERTY_OWNER URLs are handled by parks domain, not here
|
||||||
|
|
||||||
super().save(*args, **kwargs)
|
super().save(*args, **kwargs)
|
||||||
|
|
||||||
def get_absolute_url(self):
|
def get_absolute_url(self):
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
from django.db import models
|
from django.db import models
|
||||||
from django.utils.text import slugify
|
from django.utils.text import slugify
|
||||||
|
from config.django import base as settings
|
||||||
from apps.core.models import TrackedModel
|
from apps.core.models import TrackedModel
|
||||||
from .company import Company
|
from .company import Company
|
||||||
import pghistory
|
import pghistory
|
||||||
@@ -150,6 +151,9 @@ class RideModel(TrackedModel):
|
|||||||
help_text="SEO meta description (auto-generated if blank)"
|
help_text="SEO meta description (auto-generated if blank)"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Frontend URL
|
||||||
|
url = models.URLField(blank=True, help_text="Frontend URL for this ride model")
|
||||||
|
|
||||||
class Meta(TrackedModel.Meta):
|
class Meta(TrackedModel.Meta):
|
||||||
ordering = ["manufacturer__name", "name"]
|
ordering = ["manufacturer__name", "name"]
|
||||||
unique_together = [
|
unique_together = [
|
||||||
@@ -208,7 +212,7 @@ class RideModel(TrackedModel):
|
|||||||
# Ensure uniqueness within the same manufacturer
|
# Ensure uniqueness within the same manufacturer
|
||||||
counter = 1
|
counter = 1
|
||||||
while RideModel.objects.filter(
|
while RideModel.objects.filter(
|
||||||
manufacturer=self.manufacturer,
|
manufacturer=self.manufacturer,
|
||||||
slug=self.slug
|
slug=self.slug
|
||||||
).exclude(pk=self.pk).exists():
|
).exclude(pk=self.pk).exists():
|
||||||
self.slug = f"{base_slug}-{counter}"
|
self.slug = f"{base_slug}-{counter}"
|
||||||
@@ -222,6 +226,12 @@ class RideModel(TrackedModel):
|
|||||||
self)
|
self)
|
||||||
self.meta_description = desc[:160]
|
self.meta_description = desc[:160]
|
||||||
|
|
||||||
|
# Generate frontend URL
|
||||||
|
if self.manufacturer:
|
||||||
|
frontend_domain = getattr(
|
||||||
|
settings, 'FRONTEND_DOMAIN', 'https://thrillwiki.com')
|
||||||
|
self.url = f"{frontend_domain}/rides/manufacturers/{self.manufacturer.slug}/{self.slug}/"
|
||||||
|
|
||||||
super().save(*args, **kwargs)
|
super().save(*args, **kwargs)
|
||||||
|
|
||||||
def update_installation_count(self) -> None:
|
def update_installation_count(self) -> None:
|
||||||
@@ -511,6 +521,11 @@ class Ride(TrackedModel):
|
|||||||
help_text="Photo to use as card image for this ride"
|
help_text="Photo to use as card image for this ride"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Frontend URL
|
||||||
|
url = models.URLField(blank=True, help_text="Frontend URL for this ride")
|
||||||
|
park_url = models.URLField(
|
||||||
|
blank=True, help_text="Frontend URL for this ride's park")
|
||||||
|
|
||||||
class Meta(TrackedModel.Meta):
|
class Meta(TrackedModel.Meta):
|
||||||
ordering = ["name"]
|
ordering = ["name"]
|
||||||
unique_together = ["park", "slug"]
|
unique_together = ["park", "slug"]
|
||||||
@@ -577,6 +592,14 @@ class Ride(TrackedModel):
|
|||||||
def save(self, *args, **kwargs) -> None:
|
def save(self, *args, **kwargs) -> None:
|
||||||
if not self.slug:
|
if not self.slug:
|
||||||
self.slug = slugify(self.name)
|
self.slug = slugify(self.name)
|
||||||
|
|
||||||
|
# Generate frontend URLs
|
||||||
|
if self.park:
|
||||||
|
frontend_domain = getattr(
|
||||||
|
settings, 'FRONTEND_DOMAIN', 'https://thrillwiki.com')
|
||||||
|
self.url = f"{frontend_domain}/parks/{self.park.slug}/rides/{self.slug}/"
|
||||||
|
self.park_url = f"{frontend_domain}/parks/{self.park.slug}/"
|
||||||
|
|
||||||
super().save(*args, **kwargs)
|
super().save(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
80
backend/config/celery.py
Normal file
80
backend/config/celery.py
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
"""
|
||||||
|
Celery configuration for ThrillWiki.
|
||||||
|
|
||||||
|
This module sets up Celery for background task processing including:
|
||||||
|
- Trending calculations
|
||||||
|
- Cache warming
|
||||||
|
- Analytics processing
|
||||||
|
- Email notifications
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
from celery import Celery
|
||||||
|
|
||||||
|
# Set the default Django settings module for the 'celery' program.
|
||||||
|
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.django.local')
|
||||||
|
|
||||||
|
app = Celery('thrillwiki')
|
||||||
|
|
||||||
|
# Get Redis URL from environment variable with fallback
|
||||||
|
REDIS_URL = os.environ.get('REDIS_URL', 'redis://localhost:6379/1')
|
||||||
|
|
||||||
|
# Celery Configuration - set directly without loading from Django settings first
|
||||||
|
app.conf.update(
|
||||||
|
# Broker settings
|
||||||
|
broker_url=REDIS_URL,
|
||||||
|
result_backend=REDIS_URL,
|
||||||
|
|
||||||
|
# Task settings
|
||||||
|
task_serializer='json',
|
||||||
|
accept_content=['json'],
|
||||||
|
result_serializer='json',
|
||||||
|
timezone='America/New_York',
|
||||||
|
enable_utc=True,
|
||||||
|
|
||||||
|
# Worker settings
|
||||||
|
worker_prefetch_multiplier=1,
|
||||||
|
task_acks_late=True,
|
||||||
|
worker_max_tasks_per_child=1000,
|
||||||
|
|
||||||
|
# Task routing
|
||||||
|
task_routes={
|
||||||
|
'apps.core.tasks.trending.*': {'queue': 'trending'},
|
||||||
|
'apps.core.tasks.analytics.*': {'queue': 'analytics'},
|
||||||
|
'apps.core.tasks.cache.*': {'queue': 'cache'},
|
||||||
|
},
|
||||||
|
|
||||||
|
# Beat schedule for periodic tasks
|
||||||
|
beat_schedule={
|
||||||
|
'calculate-trending-content': {
|
||||||
|
'task': 'apps.core.tasks.trending.calculate_trending_content',
|
||||||
|
'schedule': 300.0, # Every 5 minutes
|
||||||
|
},
|
||||||
|
'warm-trending-cache': {
|
||||||
|
'task': 'apps.core.tasks.trending.warm_trending_cache',
|
||||||
|
'schedule': 900.0, # Every 15 minutes
|
||||||
|
},
|
||||||
|
'cleanup-old-analytics': {
|
||||||
|
'task': 'apps.core.tasks.analytics.cleanup_old_analytics',
|
||||||
|
'schedule': 86400.0, # Daily
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
# Task result settings
|
||||||
|
result_expires=3600, # 1 hour
|
||||||
|
task_ignore_result=False,
|
||||||
|
|
||||||
|
# Error handling
|
||||||
|
task_reject_on_worker_lost=True,
|
||||||
|
task_soft_time_limit=300, # 5 minutes
|
||||||
|
task_time_limit=600, # 10 minutes
|
||||||
|
)
|
||||||
|
|
||||||
|
# Load task modules from all registered Django apps.
|
||||||
|
app.autodiscover_tasks()
|
||||||
|
|
||||||
|
|
||||||
|
@app.task(bind=True)
|
||||||
|
def debug_task(self):
|
||||||
|
"""Debug task for testing Celery setup."""
|
||||||
|
print(f'Request: {self.request!r}')
|
||||||
@@ -86,6 +86,8 @@ THIRD_PARTY_APPS = [
|
|||||||
"health_check.storage",
|
"health_check.storage",
|
||||||
"health_check.contrib.migrations",
|
"health_check.contrib.migrations",
|
||||||
"health_check.contrib.redis",
|
"health_check.contrib.redis",
|
||||||
|
"django_celery_beat", # Celery beat scheduler
|
||||||
|
"django_celery_results", # Celery result backend
|
||||||
]
|
]
|
||||||
|
|
||||||
LOCAL_APPS = [
|
LOCAL_APPS = [
|
||||||
@@ -283,6 +285,9 @@ ROADTRIP_REQUEST_TIMEOUT = 10 # seconds
|
|||||||
ROADTRIP_MAX_RETRIES = 3
|
ROADTRIP_MAX_RETRIES = 3
|
||||||
ROADTRIP_BACKOFF_FACTOR = 2
|
ROADTRIP_BACKOFF_FACTOR = 2
|
||||||
|
|
||||||
|
# Frontend URL Configuration
|
||||||
|
FRONTEND_DOMAIN = config("FRONTEND_DOMAIN", default="https://thrillwiki.com")
|
||||||
|
|
||||||
# Django REST Framework Settings
|
# Django REST Framework Settings
|
||||||
REST_FRAMEWORK = {
|
REST_FRAMEWORK = {
|
||||||
"DEFAULT_AUTHENTICATION_CLASSES": [
|
"DEFAULT_AUTHENTICATION_CLASSES": [
|
||||||
|
|||||||
@@ -57,6 +57,9 @@ dependencies = [
|
|||||||
"ruff>=0.12.10",
|
"ruff>=0.12.10",
|
||||||
"python-decouple>=3.8",
|
"python-decouple>=3.8",
|
||||||
"pyright>=1.1.404",
|
"pyright>=1.1.404",
|
||||||
|
"celery>=5.5.3",
|
||||||
|
"django-celery-beat>=2.8.1",
|
||||||
|
"django-celery-results>=2.6.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[dependency-groups]
|
[dependency-groups]
|
||||||
|
|||||||
@@ -0,0 +1,3 @@
|
|||||||
|
"""
|
||||||
|
ThrillWiki Django project initialization.
|
||||||
|
"""
|
||||||
|
|||||||
203
backend/uv.lock
generated
203
backend/uv.lock
generated
@@ -2,6 +2,18 @@ version = 1
|
|||||||
revision = 3
|
revision = 3
|
||||||
requires-python = ">=3.13"
|
requires-python = ">=3.13"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "amqp"
|
||||||
|
version = "5.3.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "vine" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/79/fc/ec94a357dfc6683d8c86f8b4cfa5416a4c36b28052ec8260c77aca96a443/amqp-5.3.1.tar.gz", hash = "sha256:cddc00c725449522023bad949f70fff7b48f0b1ade74d170a6f10ab044739432", size = 129013, upload-time = "2024-11-12T19:55:44.051Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/26/99/fc813cd978842c26c82534010ea849eee9ab3a13ea2b74e95cb9c99e747b/amqp-5.3.1-py3-none-any.whl", hash = "sha256:43b3319e1b4e7d1251833a93d672b4af1e40f3d632d479b98661a95f117880a2", size = 50944, upload-time = "2024-11-12T19:55:41.782Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "anyio"
|
name = "anyio"
|
||||||
version = "4.10.0"
|
version = "4.10.0"
|
||||||
@@ -81,6 +93,15 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/9e/43/53afb8ba17218f19b77c7834128566c5bbb100a0ad9ba2e8e89d089d7079/autopep8-2.3.2-py2.py3-none-any.whl", hash = "sha256:ce8ad498672c845a0c3de2629c15b635ec2b05ef8177a6e7c91c74f3e9b51128", size = 45807, upload-time = "2025-01-14T14:46:15.466Z" },
|
{ url = "https://files.pythonhosted.org/packages/9e/43/53afb8ba17218f19b77c7834128566c5bbb100a0ad9ba2e8e89d089d7079/autopep8-2.3.2-py2.py3-none-any.whl", hash = "sha256:ce8ad498672c845a0c3de2629c15b635ec2b05ef8177a6e7c91c74f3e9b51128", size = 45807, upload-time = "2025-01-14T14:46:15.466Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "billiard"
|
||||||
|
version = "4.2.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/7c/58/1546c970afcd2a2428b1bfafecf2371d8951cc34b46701bea73f4280989e/billiard-4.2.1.tar.gz", hash = "sha256:12b641b0c539073fc8d3f5b8b7be998956665c4233c7c1fcd66a7e677c4fb36f", size = 155031, upload-time = "2024-09-21T13:40:22.491Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/30/da/43b15f28fe5f9e027b41c539abc5469052e9d48fd75f8ff094ba2a0ae767/billiard-4.2.1-py3-none-any.whl", hash = "sha256:40b59a4ac8806ba2c2369ea98d876bc6108b051c227baffd928c644d15d8f3cb", size = 86766, upload-time = "2024-09-21T13:40:20.188Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "black"
|
name = "black"
|
||||||
version = "25.1.0"
|
version = "25.1.0"
|
||||||
@@ -142,6 +163,25 @@ filecache = [
|
|||||||
{ name = "filelock" },
|
{ name = "filelock" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "celery"
|
||||||
|
version = "5.5.3"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "billiard" },
|
||||||
|
{ name = "click" },
|
||||||
|
{ name = "click-didyoumean" },
|
||||||
|
{ name = "click-plugins" },
|
||||||
|
{ name = "click-repl" },
|
||||||
|
{ name = "kombu" },
|
||||||
|
{ name = "python-dateutil" },
|
||||||
|
{ name = "vine" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/bb/7d/6c289f407d219ba36d8b384b42489ebdd0c84ce9c413875a8aae0c85f35b/celery-5.5.3.tar.gz", hash = "sha256:6c972ae7968c2b5281227f01c3a3f984037d21c5129d07bf3550cc2afc6b10a5", size = 1667144, upload-time = "2025-06-01T11:08:12.563Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c9/af/0dcccc7fdcdf170f9a1585e5e96b6fb0ba1749ef6be8c89a6202284759bd/celery-5.5.3-py3-none-any.whl", hash = "sha256:0b5761a07057acee94694464ca482416b959568904c9dfa41ce8413a7d65d525", size = 438775, upload-time = "2025-06-01T11:08:09.94Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "certifi"
|
name = "certifi"
|
||||||
version = "2025.8.3"
|
version = "2025.8.3"
|
||||||
@@ -257,6 +297,43 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215, upload-time = "2025-05-20T23:19:47.796Z" },
|
{ url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215, upload-time = "2025-05-20T23:19:47.796Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "click-didyoumean"
|
||||||
|
version = "0.3.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "click" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/30/ce/217289b77c590ea1e7c24242d9ddd6e249e52c795ff10fac2c50062c48cb/click_didyoumean-0.3.1.tar.gz", hash = "sha256:4f82fdff0dbe64ef8ab2279bd6aa3f6a99c3b28c05aa09cbfc07c9d7fbb5a463", size = 3089, upload-time = "2024-03-24T08:22:07.499Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/1b/5b/974430b5ffdb7a4f1941d13d83c64a0395114503cc357c6b9ae4ce5047ed/click_didyoumean-0.3.1-py3-none-any.whl", hash = "sha256:5c4bb6007cfea5f2fd6583a2fb6701a22a41eb98957e63d0fac41c10e7c3117c", size = 3631, upload-time = "2024-03-24T08:22:06.356Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "click-plugins"
|
||||||
|
version = "1.1.1.2"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "click" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/c3/a4/34847b59150da33690a36da3681d6bbc2ec14ee9a846bc30a6746e5984e4/click_plugins-1.1.1.2.tar.gz", hash = "sha256:d7af3984a99d243c131aa1a828331e7630f4a88a9741fd05c927b204bcf92261", size = 8343, upload-time = "2025-06-25T00:47:37.555Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3d/9a/2abecb28ae875e39c8cad711eb1186d8d14eab564705325e77e4e6ab9ae5/click_plugins-1.1.1.2-py2.py3-none-any.whl", hash = "sha256:008d65743833ffc1f5417bf0e78e8d2c23aab04d9745ba817bd3e71b0feb6aa6", size = 11051, upload-time = "2025-06-25T00:47:36.731Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "click-repl"
|
||||||
|
version = "0.3.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "click" },
|
||||||
|
{ name = "prompt-toolkit" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/cb/a2/57f4ac79838cfae6912f997b4d1a64a858fb0c86d7fcaae6f7b58d267fca/click-repl-0.3.0.tar.gz", hash = "sha256:17849c23dba3d667247dc4defe1757fff98694e90fe37474f3feebb69ced26a9", size = 10449, upload-time = "2023-06-15T12:43:51.141Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/52/40/9d857001228658f0d59e97ebd4c346fe73e138c6de1bce61dc568a57c7f8/click_repl-0.3.0-py3-none-any.whl", hash = "sha256:fb7e06deb8da8de86180a33a9da97ac316751c094c6899382da7feeeeb51b812", size = 10289, upload-time = "2023-06-15T12:43:48.626Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "colorama"
|
name = "colorama"
|
||||||
version = "0.4.6"
|
version = "0.4.6"
|
||||||
@@ -337,6 +414,18 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/b0/5c/3ba7d12e7a79566f97b8f954400926d7b6eb33bcdccc1315a857f200f1f1/crashtest-0.4.1-py3-none-any.whl", hash = "sha256:8d23eac5fa660409f57472e3851dab7ac18aba459a8d19cbbba86d3d5aecd2a5", size = 7558, upload-time = "2022-11-02T21:15:12.437Z" },
|
{ url = "https://files.pythonhosted.org/packages/b0/5c/3ba7d12e7a79566f97b8f954400926d7b6eb33bcdccc1315a857f200f1f1/crashtest-0.4.1-py3-none-any.whl", hash = "sha256:8d23eac5fa660409f57472e3851dab7ac18aba459a8d19cbbba86d3d5aecd2a5", size = 7558, upload-time = "2022-11-02T21:15:12.437Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "cron-descriptor"
|
||||||
|
version = "2.0.5"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "typing-extensions" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/30/ec/997bf9ca9392fce1cec2e25241fdd538c50bb405efd103cb1e6119296709/cron_descriptor-2.0.5.tar.gz", hash = "sha256:443ccd21a36a7fc9464a42472199cbdbc0d86b09021af1a8dd1595e4c391d85e", size = 48545, upload-time = "2025-08-26T11:10:24.907Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/55/d6/7ebad906dbe4092af6c63f85f30d15544698eb524db53bddfc6a5e010f2b/cron_descriptor-2.0.5-py3-none-any.whl", hash = "sha256:386a1d75c57410cf5cb719e08eefbea2c0c076c4a798aa6d7bf51816112fbbd1", size = 73957, upload-time = "2025-08-26T11:10:23.559Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cryptography"
|
name = "cryptography"
|
||||||
version = "45.0.6"
|
version = "45.0.6"
|
||||||
@@ -441,6 +530,36 @@ dependencies = [
|
|||||||
]
|
]
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/ac/82/e6f607b0bad524d227f6e5aaffdb5e2b286f6ab1b4b3151134ae2303c2d6/django_allauth-65.11.1.tar.gz", hash = "sha256:e95d5234cccaf92273d315e1393cc4626cb88a19d66a1bf0e81f89f7958cfa06", size = 1915592, upload-time = "2025-08-27T18:05:05.581Z" }
|
sdist = { url = "https://files.pythonhosted.org/packages/ac/82/e6f607b0bad524d227f6e5aaffdb5e2b286f6ab1b4b3151134ae2303c2d6/django_allauth-65.11.1.tar.gz", hash = "sha256:e95d5234cccaf92273d315e1393cc4626cb88a19d66a1bf0e81f89f7958cfa06", size = 1915592, upload-time = "2025-08-27T18:05:05.581Z" }
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "django-celery-beat"
|
||||||
|
version = "2.8.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "celery" },
|
||||||
|
{ name = "cron-descriptor" },
|
||||||
|
{ name = "django" },
|
||||||
|
{ name = "django-timezone-field" },
|
||||||
|
{ name = "python-crontab" },
|
||||||
|
{ name = "tzdata" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/aa/11/0c8b412869b4fda72828572068312b10aafe7ccef7b41af3633af31f9d4b/django_celery_beat-2.8.1.tar.gz", hash = "sha256:dfad0201c0ac50c91a34700ef8fa0a10ee098cc7f3375fe5debed79f2204f80a", size = 175802, upload-time = "2025-05-13T06:58:29.246Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/61/e5/3a0167044773dee989b498e9a851fc1663bea9ab879f1179f7b8a827ac10/django_celery_beat-2.8.1-py3-none-any.whl", hash = "sha256:da2b1c6939495c05a551717509d6e3b79444e114a027f7b77bf3727c2a39d171", size = 104833, upload-time = "2025-05-13T06:58:27.309Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "django-celery-results"
|
||||||
|
version = "2.6.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "celery" },
|
||||||
|
{ name = "django" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/a6/b5/9966c28e31014c228305e09d48b19b35522a8f941fe5af5f81f40dc8fa80/django_celery_results-2.6.0.tar.gz", hash = "sha256:9abcd836ae6b61063779244d8887a88fe80bbfaba143df36d3cb07034671277c", size = 83985, upload-time = "2025-04-10T08:23:52.677Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/2c/da/70f0f3c5364735344c4bc89e53413bcaae95b4fc1de4e98a7a3b9fb70c88/django_celery_results-2.6.0-py3-none-any.whl", hash = "sha256:b9ccdca2695b98c7cbbb8dea742311ba9a92773d71d7b4944a676e69a7df1c73", size = 38351, upload-time = "2025-04-10T08:23:49.965Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "django-cleanup"
|
name = "django-cleanup"
|
||||||
version = "9.0.0"
|
version = "9.0.0"
|
||||||
@@ -681,6 +800,18 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/12/1a/1c15852b3002929ed08992aeaaea703c43a43345dc19a09fd457593f52a6/django_tailwind_cli-4.3.0-py3-none-any.whl", hash = "sha256:0ff7d7374a390e63cba77894a13de2bf8721320a5bad97361cb14e160cc824b5", size = 29704, upload-time = "2025-07-12T20:33:00.242Z" },
|
{ url = "https://files.pythonhosted.org/packages/12/1a/1c15852b3002929ed08992aeaaea703c43a43345dc19a09fd457593f52a6/django_tailwind_cli-4.3.0-py3-none-any.whl", hash = "sha256:0ff7d7374a390e63cba77894a13de2bf8721320a5bad97361cb14e160cc824b5", size = 29704, upload-time = "2025-07-12T20:33:00.242Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "django-timezone-field"
|
||||||
|
version = "7.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "django" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/ba/5b/0dbe271fef3c2274b83dbcb1b19fa3dacf1f7e542382819294644e78ea8b/django_timezone_field-7.1.tar.gz", hash = "sha256:b3ef409d88a2718b566fabe10ea996f2838bc72b22d3a2900c0aa905c761380c", size = 13727, upload-time = "2025-01-11T17:49:54.486Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ec/09/7a808392a751a24ffa62bec00e3085a9c1a151d728c323a5bab229ea0e58/django_timezone_field-7.1-py3-none-any.whl", hash = "sha256:93914713ed882f5bccda080eda388f7006349f25930b6122e9b07bf8db49c4b4", size = 13177, upload-time = "2025-01-11T17:49:52.142Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "django-typer"
|
name = "django-typer"
|
||||||
version = "3.2.2"
|
version = "3.2.2"
|
||||||
@@ -1058,6 +1189,21 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/d3/32/da7f44bcb1105d3e88a0b74ebdca50c59121d2ddf71c9e34ba47df7f3a56/keyring-25.6.0-py3-none-any.whl", hash = "sha256:552a3f7af126ece7ed5c89753650eec89c7eaae8617d0aa4d9ad2b75111266bd", size = 39085, upload-time = "2024-12-25T15:26:44.377Z" },
|
{ url = "https://files.pythonhosted.org/packages/d3/32/da7f44bcb1105d3e88a0b74ebdca50c59121d2ddf71c9e34ba47df7f3a56/keyring-25.6.0-py3-none-any.whl", hash = "sha256:552a3f7af126ece7ed5c89753650eec89c7eaae8617d0aa4d9ad2b75111266bd", size = 39085, upload-time = "2024-12-25T15:26:44.377Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "kombu"
|
||||||
|
version = "5.5.4"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "amqp" },
|
||||||
|
{ name = "packaging" },
|
||||||
|
{ name = "tzdata" },
|
||||||
|
{ name = "vine" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/0f/d3/5ff936d8319ac86b9c409f1501b07c426e6ad41966fedace9ef1b966e23f/kombu-5.5.4.tar.gz", hash = "sha256:886600168275ebeada93b888e831352fe578168342f0d1d5833d88ba0d847363", size = 461992, upload-time = "2025-06-01T10:19:22.281Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ef/70/a07dcf4f62598c8ad579df241af55ced65bed76e42e45d3c368a6d82dbc1/kombu-5.5.4-py3-none-any.whl", hash = "sha256:a12ed0557c238897d8e518f1d1fdf84bd1516c5e305af2dacd85c2015115feb8", size = 210034, upload-time = "2025-06-01T10:19:20.436Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "markupsafe"
|
name = "markupsafe"
|
||||||
version = "3.0.2"
|
version = "3.0.2"
|
||||||
@@ -1348,6 +1494,18 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/d2/f1/fb218aebd29bca5c506230201c346881ae9b43de7bbb21a68dc648e972b3/poetry_core-2.1.3-py3-none-any.whl", hash = "sha256:2c704f05016698a54ca1d327f46ce2426d72eaca6ff614132c8477c292266771", size = 332607, upload-time = "2025-05-04T12:43:09.814Z" },
|
{ url = "https://files.pythonhosted.org/packages/d2/f1/fb218aebd29bca5c506230201c346881ae9b43de7bbb21a68dc648e972b3/poetry_core-2.1.3-py3-none-any.whl", hash = "sha256:2c704f05016698a54ca1d327f46ce2426d72eaca6ff614132c8477c292266771", size = 332607, upload-time = "2025-05-04T12:43:09.814Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "prompt-toolkit"
|
||||||
|
version = "3.0.52"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "wcwidth" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/a1/96/06e01a7b38dce6fe1db213e061a4602dd6032a8a97ef6c1a862537732421/prompt_toolkit-3.0.52.tar.gz", hash = "sha256:28cde192929c8e7321de85de1ddbe736f1375148b02f2e17edd840042b1be855", size = 434198, upload-time = "2025-08-27T15:24:02.057Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/84/03/0d3ce49e2505ae70cf43bc5bb3033955d2fc9f932163e84dc0779cc47f48/prompt_toolkit-3.0.52-py3-none-any.whl", hash = "sha256:9aac639a3bbd33284347de5ad8d68ecc044b91a762dc39b7c21095fcd6a19955", size = 391431, upload-time = "2025-08-27T15:23:59.498Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "psutil"
|
name = "psutil"
|
||||||
version = "7.0.0"
|
version = "7.0.0"
|
||||||
@@ -1559,6 +1717,27 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/d8/96/5f8a4545d783674f3de33f0ebc4db16cc76ce77a4c404d284f43f09125e3/pytest_playwright-0.7.0-py3-none-any.whl", hash = "sha256:2516d0871fa606634bfe32afbcc0342d68da2dbff97fe3459849e9c428486da2", size = 16618, upload-time = "2025-01-31T11:06:08.075Z" },
|
{ url = "https://files.pythonhosted.org/packages/d8/96/5f8a4545d783674f3de33f0ebc4db16cc76ce77a4c404d284f43f09125e3/pytest_playwright-0.7.0-py3-none-any.whl", hash = "sha256:2516d0871fa606634bfe32afbcc0342d68da2dbff97fe3459849e9c428486da2", size = 16618, upload-time = "2025-01-31T11:06:08.075Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "python-crontab"
|
||||||
|
version = "3.3.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/99/7f/c54fb7e70b59844526aa4ae321e927a167678660ab51dda979955eafb89a/python_crontab-3.3.0.tar.gz", hash = "sha256:007c8aee68dddf3e04ec4dce0fac124b93bd68be7470fc95d2a9617a15de291b", size = 57626, upload-time = "2025-07-13T20:05:35.535Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/47/42/bb4afa5b088f64092036221843fc989b7db9d9d302494c1f8b024ee78a46/python_crontab-3.3.0-py3-none-any.whl", hash = "sha256:739a778b1a771379b75654e53fd4df58e5c63a9279a63b5dfe44c0fcc3ee7884", size = 27533, upload-time = "2025-07-13T20:05:34.266Z" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "python-dateutil"
|
||||||
|
version = "2.9.0.post0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "six" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "python-decouple"
|
name = "python-decouple"
|
||||||
version = "3.8"
|
version = "3.8"
|
||||||
@@ -1961,6 +2140,7 @@ version = "0.1.0"
|
|||||||
source = { virtual = "." }
|
source = { virtual = "." }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "black" },
|
{ name = "black" },
|
||||||
|
{ name = "celery" },
|
||||||
{ name = "channels" },
|
{ name = "channels" },
|
||||||
{ name = "channels-redis" },
|
{ name = "channels-redis" },
|
||||||
{ name = "coverage" },
|
{ name = "coverage" },
|
||||||
@@ -1969,6 +2149,8 @@ dependencies = [
|
|||||||
{ name = "dj-rest-auth" },
|
{ name = "dj-rest-auth" },
|
||||||
{ name = "django" },
|
{ name = "django" },
|
||||||
{ name = "django-allauth" },
|
{ name = "django-allauth" },
|
||||||
|
{ name = "django-celery-beat" },
|
||||||
|
{ name = "django-celery-results" },
|
||||||
{ name = "django-cleanup" },
|
{ name = "django-cleanup" },
|
||||||
{ name = "django-cloudflare-images" },
|
{ name = "django-cloudflare-images" },
|
||||||
{ name = "django-cors-headers" },
|
{ name = "django-cors-headers" },
|
||||||
@@ -2027,6 +2209,7 @@ dev = [
|
|||||||
[package.metadata]
|
[package.metadata]
|
||||||
requires-dist = [
|
requires-dist = [
|
||||||
{ name = "black", specifier = ">=24.1.0" },
|
{ name = "black", specifier = ">=24.1.0" },
|
||||||
|
{ name = "celery", specifier = ">=5.5.3" },
|
||||||
{ name = "channels", specifier = ">=4.2.0" },
|
{ name = "channels", specifier = ">=4.2.0" },
|
||||||
{ name = "channels-redis", specifier = ">=4.2.1" },
|
{ name = "channels-redis", specifier = ">=4.2.1" },
|
||||||
{ name = "coverage", specifier = ">=7.9.1" },
|
{ name = "coverage", specifier = ">=7.9.1" },
|
||||||
@@ -2035,6 +2218,8 @@ requires-dist = [
|
|||||||
{ name = "dj-rest-auth", specifier = ">=7.0.0" },
|
{ name = "dj-rest-auth", specifier = ">=7.0.0" },
|
||||||
{ name = "django", specifier = ">=5.0" },
|
{ name = "django", specifier = ">=5.0" },
|
||||||
{ name = "django-allauth", specifier = ">=0.60.1" },
|
{ name = "django-allauth", specifier = ">=0.60.1" },
|
||||||
|
{ name = "django-celery-beat", specifier = ">=2.8.1" },
|
||||||
|
{ name = "django-celery-results", specifier = ">=2.6.0" },
|
||||||
{ name = "django-cleanup", specifier = ">=8.0.0" },
|
{ name = "django-cleanup", specifier = ">=8.0.0" },
|
||||||
{ name = "django-cloudflare-images", specifier = ">=0.6.0" },
|
{ name = "django-cloudflare-images", specifier = ">=0.6.0" },
|
||||||
{ name = "django-cors-headers", specifier = ">=4.3.1" },
|
{ name = "django-cors-headers", specifier = ">=4.3.1" },
|
||||||
@@ -2200,6 +2385,15 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" },
|
{ url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "vine"
|
||||||
|
version = "5.1.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/bd/e4/d07b5f29d283596b9727dd5275ccbceb63c44a1a82aa9e4bfd20426762ac/vine-5.1.0.tar.gz", hash = "sha256:8b62e981d35c41049211cf62a0a1242d8c1ee9bd15bb196ce38aefd6799e61e0", size = 48980, upload-time = "2023-11-05T08:46:53.857Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/03/ff/7c0c86c43b3cbb927e0ccc0255cb4057ceba4799cd44ae95174ce8e8b5b2/vine-5.1.0-py3-none-any.whl", hash = "sha256:40fdf3c48b2cfe1c38a49e9ae2da6fda88e4794c810050a728bd7413811fb1dc", size = 9636, upload-time = "2023-11-05T08:46:51.205Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "virtualenv"
|
name = "virtualenv"
|
||||||
version = "20.32.0"
|
version = "20.32.0"
|
||||||
@@ -2214,6 +2408,15 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/5c/c6/f8f28009920a736d0df434b52e9feebfb4d702ba942f15338cb4a83eafc1/virtualenv-20.32.0-py3-none-any.whl", hash = "sha256:2c310aecb62e5aa1b06103ed7c2977b81e042695de2697d01017ff0f1034af56", size = 6057761, upload-time = "2025-07-21T04:09:48.059Z" },
|
{ url = "https://files.pythonhosted.org/packages/5c/c6/f8f28009920a736d0df434b52e9feebfb4d702ba942f15338cb4a83eafc1/virtualenv-20.32.0-py3-none-any.whl", hash = "sha256:2c310aecb62e5aa1b06103ed7c2977b81e042695de2697d01017ff0f1034af56", size = 6057761, upload-time = "2025-07-21T04:09:48.059Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wcwidth"
|
||||||
|
version = "0.2.13"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5", size = 101301, upload-time = "2024-01-06T02:10:57.829Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166, upload-time = "2024-01-06T02:10:55.763Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "werkzeug"
|
name = "werkzeug"
|
||||||
version = "3.1.3"
|
version = "3.1.3"
|
||||||
|
|||||||
@@ -7,6 +7,11 @@ c# Active Context
|
|||||||
- **COMPLETED: Enhanced Stats API Endpoint**: Successfully updated `/api/v1/stats/` endpoint with comprehensive platform statistics
|
- **COMPLETED: Enhanced Stats API Endpoint**: Successfully updated `/api/v1/stats/` endpoint with comprehensive platform statistics
|
||||||
- **COMPLETED: Maps API Implementation**: Successfully implemented all map endpoints with full functionality
|
- **COMPLETED: Maps API Implementation**: Successfully implemented all map endpoints with full functionality
|
||||||
- **COMPLETED: Comprehensive Rides Filtering System**: Successfully implemented comprehensive filtering capabilities for rides API with 25+ filter parameters and enhanced filter options endpoint
|
- **COMPLETED: Comprehensive Rides Filtering System**: Successfully implemented comprehensive filtering capabilities for rides API with 25+ filter parameters and enhanced filter options endpoint
|
||||||
|
- **COMPLETED: New Content API Field Updates**: Successfully updated the "newly_opened" API response to replace "location" field with "park" and "date_opened" fields
|
||||||
|
- **COMPLETED: Celery Integration for Trending Content**: Successfully implemented Celery asynchronous task processing for trending content calculations with Redis backend
|
||||||
|
- **COMPLETED: Manual Trigger Endpoint for Trending Content**: Successfully implemented admin-only POST endpoint to manually trigger trending content calculations
|
||||||
|
- **COMPLETED: URL Fields in Trending and New Content Endpoints**: Successfully added url fields to all trending and new content API responses for frontend navigation
|
||||||
|
- **COMPLETED: Park URL Optimization**: Successfully optimized park URL usage to use `ride.park.url` instead of redundant `ride.park_url` field for better data consistency
|
||||||
- **Features Implemented**:
|
- **Features Implemented**:
|
||||||
- **RideModel API Directory Structure**: Moved files from `backend/apps/api/v1/ride_models/` to `backend/apps/api/v1/rides/manufacturers/` to match nested URL organization
|
- **RideModel API Directory Structure**: Moved files from `backend/apps/api/v1/ride_models/` to `backend/apps/api/v1/rides/manufacturers/` to match nested URL organization
|
||||||
- **RideModel API Reorganization**: Nested endpoints under rides/manufacturers, manufacturer-scoped slugs, integrated with ride creation/editing, removed top-level endpoint
|
- **RideModel API Reorganization**: Nested endpoints under rides/manufacturers, manufacturer-scoped slugs, integrated with ride creation/editing, removed top-level endpoint
|
||||||
@@ -14,6 +19,8 @@ c# Active Context
|
|||||||
- **Stats API**: Entity counts, photo counts, category breakdowns, status breakdowns, review counts, automatic cache invalidation, caching, public access, OpenAPI documentation
|
- **Stats API**: Entity counts, photo counts, category breakdowns, status breakdowns, review counts, automatic cache invalidation, caching, public access, OpenAPI documentation
|
||||||
- **Maps API**: Location retrieval, bounds filtering, text search, location details, clustering support, caching, comprehensive serializers, OpenAPI documentation
|
- **Maps API**: Location retrieval, bounds filtering, text search, location details, clustering support, caching, comprehensive serializers, OpenAPI documentation
|
||||||
- **Comprehensive Rides Filtering**: 25+ filter parameters, enhanced filter options endpoint, roller coaster specific filters, range filters, boolean filters, multiple value support, comprehensive ordering options
|
- **Comprehensive Rides Filtering**: 25+ filter parameters, enhanced filter options endpoint, roller coaster specific filters, range filters, boolean filters, multiple value support, comprehensive ordering options
|
||||||
|
- **Celery Integration**: Asynchronous trending content calculation, Redis broker configuration, real database-driven responses replacing mock data
|
||||||
|
- **Manual Trigger Endpoint**: Admin-only POST /api/v1/trending/calculate/ endpoint with task ID responses and proper error handling
|
||||||
|
|
||||||
## Recent Changes
|
## Recent Changes
|
||||||
**RideModel API Directory Structure Reorganization - COMPLETED:**
|
**RideModel API Directory Structure Reorganization - COMPLETED:**
|
||||||
@@ -97,6 +104,33 @@ c# Active Context
|
|||||||
- **Error Handling**: Graceful handling of invalid filter values with try/catch blocks
|
- **Error Handling**: Graceful handling of invalid filter values with try/catch blocks
|
||||||
- **Multiple Value Support**: Categories and statuses support multiple values via getlist()
|
- **Multiple Value Support**: Categories and statuses support multiple values via getlist()
|
||||||
|
|
||||||
|
**Celery Integration for Trending Content - COMPLETED:**
|
||||||
|
- **Implemented**: Complete Celery integration for asynchronous trending content calculations
|
||||||
|
- **Files Created/Modified**:
|
||||||
|
- `backend/config/celery.py` - Celery configuration with Redis broker and result backend
|
||||||
|
- `backend/thrillwiki/celery.py` - Celery app initialization and autodiscovery
|
||||||
|
- `backend/apps/core/tasks/__init__.py` - Tasks package initialization
|
||||||
|
- `backend/apps/core/tasks/trending.py` - Celery tasks for trending and new content calculation
|
||||||
|
- `backend/apps/core/services/trending_service.py` - Updated to use Celery tasks and return proper field structure
|
||||||
|
- `backend/apps/api/v1/views/trending.py` - Removed mock data, integrated with Celery-powered service
|
||||||
|
- **Database Migrations**: Applied Celery database tables successfully
|
||||||
|
- **Field Structure Updates**: Updated "newly_opened" response to include "park" and "date_opened" fields instead of "location"
|
||||||
|
- **Mock Data Removal**: Completely removed all mock data from trending endpoints, now using real database queries
|
||||||
|
- **Redis Integration**: Configured Redis as Celery broker and result backend for task processing
|
||||||
|
- **Task Processing**: Asynchronous calculation of trending content with proper caching and performance optimization
|
||||||
|
|
||||||
|
**Manual Trigger Endpoint for Trending Content - COMPLETED:**
|
||||||
|
- **Implemented**: Admin-only POST endpoint to manually trigger trending content calculations
|
||||||
|
- **Files Modified**:
|
||||||
|
- `backend/apps/api/v1/views/trending.py` - Added TriggerTrendingCalculationAPIView with admin permissions
|
||||||
|
- `backend/apps/api/v1/urls.py` - Added URL routing for manual trigger endpoint
|
||||||
|
- `backend/apps/api/v1/views/__init__.py` - Added new view to exports
|
||||||
|
- `docs/frontend.md` - Updated with comprehensive endpoint documentation
|
||||||
|
- **Endpoint**: POST `/api/v1/trending/calculate/` - Triggers both trending and new content calculation tasks
|
||||||
|
- **Permissions**: Admin-only access (IsAdminUser permission class)
|
||||||
|
- **Response**: Returns task IDs and estimated completion times for both triggered tasks
|
||||||
|
- **Error Handling**: Proper error responses for failed task triggers and unauthorized access
|
||||||
|
|
||||||
**Technical Implementation:**
|
**Technical Implementation:**
|
||||||
- **Stats Endpoint**: GET `/api/v1/stats/` - Returns comprehensive platform statistics
|
- **Stats Endpoint**: GET `/api/v1/stats/` - Returns comprehensive platform statistics
|
||||||
- **Maps Endpoints**:
|
- **Maps Endpoints**:
|
||||||
@@ -144,6 +178,17 @@ c# Active Context
|
|||||||
- `backend/apps/api/v1/serializers/maps.py` - Comprehensive map serializers for all response types
|
- `backend/apps/api/v1/serializers/maps.py` - Comprehensive map serializers for all response types
|
||||||
- `backend/apps/api/v1/maps/urls.py` - Map URL routing configuration
|
- `backend/apps/api/v1/maps/urls.py` - Map URL routing configuration
|
||||||
|
|
||||||
|
### Celery Integration Files
|
||||||
|
- `backend/config/celery.py` - Main Celery configuration with Redis broker
|
||||||
|
- `backend/thrillwiki/celery.py` - Celery app initialization and task autodiscovery
|
||||||
|
- `backend/apps/core/tasks/__init__.py` - Tasks package initialization
|
||||||
|
- `backend/apps/core/tasks/trending.py` - Trending content calculation tasks
|
||||||
|
- `backend/apps/core/services/trending_service.py` - Updated service using Celery tasks
|
||||||
|
- `backend/apps/api/v1/views/trending.py` - Updated views without mock data, includes manual trigger endpoint
|
||||||
|
- `backend/apps/api/v1/urls.py` - Updated with manual trigger endpoint routing
|
||||||
|
- `backend/apps/api/v1/views/__init__.py` - Updated exports for new trigger view
|
||||||
|
- `docs/frontend.md` - Updated with manual trigger endpoint documentation
|
||||||
|
|
||||||
## Permanent Rules Established
|
## Permanent Rules Established
|
||||||
**CREATED**: `cline_docs/permanent_rules.md` - Permanent development rules that must be followed in all future work.
|
**CREATED**: `cline_docs/permanent_rules.md` - Permanent development rules that must be followed in all future work.
|
||||||
|
|
||||||
@@ -228,6 +273,15 @@ c# Active Context
|
|||||||
- **Performance**: Cached responses for optimal performance (5-minute cache)
|
- **Performance**: Cached responses for optimal performance (5-minute cache)
|
||||||
- **Access**: Public endpoints, no authentication required (except photo uploads)
|
- **Access**: Public endpoints, no authentication required (except photo uploads)
|
||||||
- **Documentation**: Full OpenAPI documentation available
|
- **Documentation**: Full OpenAPI documentation available
|
||||||
|
- **Celery Integration**: ✅ Successfully implemented and tested
|
||||||
|
- **Configuration**: Redis broker configured and working
|
||||||
|
- **Tasks**: Trending content calculation tasks implemented
|
||||||
|
- **Database**: Celery tables created via migrations
|
||||||
|
- **API Response**: "newly_opened" now returns correct structure with "park" and "date_opened" fields
|
||||||
|
- **Mock Data**: Completely removed from all trending endpoints
|
||||||
|
- **Real Data**: All responses now use actual database queries
|
||||||
|
- **Manual Trigger**: POST `/api/v1/trending/calculate/` endpoint implemented with admin permissions
|
||||||
|
- **Task Management**: Returns task IDs for monitoring asynchronous calculations
|
||||||
|
|
||||||
## Sample Response
|
## Sample Response
|
||||||
```json
|
```json
|
||||||
|
|||||||
6
cookies.txt
Normal file
6
cookies.txt
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
# Netscape HTTP Cookie File
|
||||||
|
# https://curl.se/docs/http-cookies.html
|
||||||
|
# This file was generated by libcurl! Edit at your own risk.
|
||||||
|
|
||||||
|
#HttpOnly_localhost FALSE / FALSE 1757625948 sessionid 76lmsjx6m9rkatknfi3w70yam2lw3rru
|
||||||
|
localhost FALSE / FALSE 1787865948 csrftoken b3mRLXY7YHQnE2x6LewKk5VVHZTieRFk
|
||||||
4773
docs/frontend.md
4773
docs/frontend.md
File diff suppressed because it is too large
Load Diff
182
test_manual_trigger.py
Normal file
182
test_manual_trigger.py
Normal file
@@ -0,0 +1,182 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Test script for the manual trending content calculation trigger endpoint.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import requests
|
||||||
|
import json
|
||||||
|
import time
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
# Configuration
|
||||||
|
BASE_URL = "http://localhost:8000"
|
||||||
|
ADMIN_USERNAME = "admin"
|
||||||
|
ADMIN_PASSWORD = "admin" # We'll need to check what the password is
|
||||||
|
|
||||||
|
|
||||||
|
def login_and_get_token():
|
||||||
|
"""Login and get authentication token."""
|
||||||
|
login_url = f"{BASE_URL}/api/v1/auth/login/"
|
||||||
|
|
||||||
|
login_data = {
|
||||||
|
"username": ADMIN_USERNAME,
|
||||||
|
"password": ADMIN_PASSWORD
|
||||||
|
}
|
||||||
|
|
||||||
|
print(f"🔐 Attempting to login as {ADMIN_USERNAME}...")
|
||||||
|
response = requests.post(login_url, json=login_data)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
data = response.json()
|
||||||
|
token = data.get('token')
|
||||||
|
print(f"✅ Login successful! Token: {token[:20]}...")
|
||||||
|
return token
|
||||||
|
else:
|
||||||
|
print(f"❌ Login failed: {response.status_code}")
|
||||||
|
print(f"Response: {response.text}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def test_trigger_endpoint(token):
|
||||||
|
"""Test the manual trigger endpoint."""
|
||||||
|
trigger_url = f"{BASE_URL}/api/v1/trending/calculate/"
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
"Authorization": f"Bearer {token}",
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
}
|
||||||
|
|
||||||
|
print(f"\n🚀 Testing manual trigger endpoint...")
|
||||||
|
print(f"URL: {trigger_url}")
|
||||||
|
|
||||||
|
response = requests.post(trigger_url, headers=headers)
|
||||||
|
|
||||||
|
print(f"Status Code: {response.status_code}")
|
||||||
|
print(f"Response Headers: {dict(response.headers)}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
response_data = response.json()
|
||||||
|
print(f"Response Body: {json.dumps(response_data, indent=2)}")
|
||||||
|
|
||||||
|
if response.status_code == 202:
|
||||||
|
print("✅ Manual trigger successful!")
|
||||||
|
return response_data
|
||||||
|
else:
|
||||||
|
print(f"❌ Manual trigger failed with status {response.status_code}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
print(f"❌ Invalid JSON response: {response.text}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def test_trending_endpoints():
|
||||||
|
"""Test the trending content endpoints to see the results."""
|
||||||
|
print(f"\n📊 Testing trending content endpoints...")
|
||||||
|
|
||||||
|
# Test trending content endpoint
|
||||||
|
trending_url = f"{BASE_URL}/api/v1/trending/content/"
|
||||||
|
print(f"Testing: {trending_url}")
|
||||||
|
|
||||||
|
response = requests.get(trending_url)
|
||||||
|
print(f"Trending Content Status: {response.status_code}")
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
data = response.json()
|
||||||
|
print(f"Trending Parks: {len(data.get('trending_parks', []))}")
|
||||||
|
print(f"Trending Rides: {len(data.get('trending_rides', []))}")
|
||||||
|
|
||||||
|
# Show first few trending items
|
||||||
|
if data.get('trending_parks'):
|
||||||
|
print(
|
||||||
|
f"First trending park: {data['trending_parks'][0].get('name', 'Unknown')}")
|
||||||
|
if data.get('trending_rides'):
|
||||||
|
print(
|
||||||
|
f"First trending ride: {data['trending_rides'][0].get('name', 'Unknown')}")
|
||||||
|
|
||||||
|
# Test new content endpoint
|
||||||
|
new_content_url = f"{BASE_URL}/api/v1/trending/new/"
|
||||||
|
print(f"\nTesting: {new_content_url}")
|
||||||
|
|
||||||
|
response = requests.get(new_content_url)
|
||||||
|
print(f"New Content Status: {response.status_code}")
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
data = response.json()
|
||||||
|
print(f"Recently Added: {len(data.get('recently_added', []))}")
|
||||||
|
print(f"Newly Opened: {len(data.get('newly_opened', []))}")
|
||||||
|
print(f"Upcoming: {len(data.get('upcoming', []))}")
|
||||||
|
|
||||||
|
# Show the newly_opened structure to verify our changes
|
||||||
|
if data.get('newly_opened'):
|
||||||
|
print(f"\n🎢 First newly opened item structure:")
|
||||||
|
first_item = data['newly_opened'][0]
|
||||||
|
print(f" Name: {first_item.get('name')}")
|
||||||
|
# Should be park name, not location
|
||||||
|
print(f" Park: {first_item.get('park')}")
|
||||||
|
# Should be date_opened, not location
|
||||||
|
print(f" Date Opened: {first_item.get('date_opened')}")
|
||||||
|
print(f" Category: {first_item.get('category')}")
|
||||||
|
print(f" Slug: {first_item.get('slug')}")
|
||||||
|
|
||||||
|
# Verify location field is NOT present
|
||||||
|
if 'location' in first_item:
|
||||||
|
print(
|
||||||
|
f" ❌ ERROR: 'location' field still present: {first_item['location']}")
|
||||||
|
else:
|
||||||
|
print(f" ✅ SUCCESS: 'location' field removed as requested")
|
||||||
|
|
||||||
|
|
||||||
|
def test_unauthorized_access():
|
||||||
|
"""Test that non-admin users cannot access the trigger endpoint."""
|
||||||
|
print(f"\n🔒 Testing unauthorized access...")
|
||||||
|
|
||||||
|
trigger_url = f"{BASE_URL}/api/v1/trending/calculate/"
|
||||||
|
|
||||||
|
# Test without authentication
|
||||||
|
print("Testing without authentication...")
|
||||||
|
response = requests.post(trigger_url)
|
||||||
|
print(f"No auth status: {response.status_code}")
|
||||||
|
|
||||||
|
# Test with invalid token
|
||||||
|
print("Testing with invalid token...")
|
||||||
|
headers = {"Authorization": "Bearer invalid_token_123"}
|
||||||
|
response = requests.post(trigger_url, headers=headers)
|
||||||
|
print(f"Invalid token status: {response.status_code}")
|
||||||
|
|
||||||
|
if response.status_code in [401, 403]:
|
||||||
|
print("✅ Unauthorized access properly blocked")
|
||||||
|
else:
|
||||||
|
print(f"❌ Unauthorized access not properly blocked: {response.status_code}")
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""Main test function."""
|
||||||
|
print("🧪 ThrillWiki Manual Trigger Endpoint Test")
|
||||||
|
print("=" * 50)
|
||||||
|
|
||||||
|
# First test unauthorized access
|
||||||
|
test_unauthorized_access()
|
||||||
|
|
||||||
|
# Try to login and get token
|
||||||
|
token = login_and_get_token()
|
||||||
|
|
||||||
|
if not token:
|
||||||
|
print("❌ Cannot proceed without authentication token")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Test the manual trigger endpoint
|
||||||
|
trigger_result = test_trigger_endpoint(token)
|
||||||
|
|
||||||
|
if trigger_result:
|
||||||
|
print(f"\n⏳ Waiting 10 seconds for tasks to process...")
|
||||||
|
time.sleep(10)
|
||||||
|
|
||||||
|
# Test the trending endpoints to see results
|
||||||
|
test_trending_endpoints()
|
||||||
|
|
||||||
|
print(f"\n🏁 Test completed at {datetime.now()}")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
Reference in New Issue
Block a user