mirror of
https://github.com/pacnpal/thrillwiki_django_no_react.git
synced 2025-12-20 12:51:09 -05:00
Refactor API structure and add comprehensive user management features
- Restructure API v1 with improved serializers organization - Add user deletion requests and moderation queue system - Implement bulk moderation operations and permissions - Add user profile enhancements with display names and avatars - Expand ride and park API endpoints with better filtering - Add manufacturer API with detailed ride relationships - Improve authentication flows and error handling - Update frontend documentation and API specifications
This commit is contained in:
@@ -166,7 +166,9 @@ def custom_exception_handler(
|
||||
request=request,
|
||||
)
|
||||
|
||||
response = Response(custom_response_data, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
response = Response(
|
||||
custom_response_data, status=status.HTTP_500_INTERNAL_SERVER_ERROR
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
@@ -20,39 +20,37 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Calculate new content and cache results'
|
||||
help = "Calculate new content and cache results"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'--content-type',
|
||||
"--content-type",
|
||||
type=str,
|
||||
default='all',
|
||||
choices=['all', 'parks', 'rides'],
|
||||
help='Type of content to calculate (default: all)'
|
||||
default="all",
|
||||
choices=["all", "parks", "rides"],
|
||||
help="Type of content to calculate (default: all)",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--days-back',
|
||||
"--days-back",
|
||||
type=int,
|
||||
default=30,
|
||||
help='Number of days to look back for new content (default: 30)'
|
||||
help="Number of days to look back for new content (default: 30)",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--limit',
|
||||
"--limit",
|
||||
type=int,
|
||||
default=50,
|
||||
help='Maximum number of results to calculate (default: 50)'
|
||||
help="Maximum number of results to calculate (default: 50)",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--verbose',
|
||||
action='store_true',
|
||||
help='Enable verbose output'
|
||||
"--verbose", action="store_true", help="Enable verbose output"
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
content_type = options['content_type']
|
||||
days_back = options['days_back']
|
||||
limit = options['limit']
|
||||
verbose = options['verbose']
|
||||
content_type = options["content_type"]
|
||||
days_back = options["days_back"]
|
||||
limit = options["limit"]
|
||||
verbose = options["verbose"]
|
||||
|
||||
if verbose:
|
||||
self.stdout.write(f"Starting new content calculation for {content_type}")
|
||||
@@ -63,14 +61,16 @@ class Command(BaseCommand):
|
||||
|
||||
if content_type in ["all", "parks"]:
|
||||
parks = self._get_new_parks(
|
||||
cutoff_date, limit if content_type == "parks" else limit * 2)
|
||||
cutoff_date, limit if content_type == "parks" else limit * 2
|
||||
)
|
||||
new_items.extend(parks)
|
||||
if verbose:
|
||||
self.stdout.write(f"Found {len(parks)} new parks")
|
||||
|
||||
if content_type in ["all", "rides"]:
|
||||
rides = self._get_new_rides(
|
||||
cutoff_date, limit if content_type == "rides" else limit * 2)
|
||||
cutoff_date, limit if content_type == "rides" else limit * 2
|
||||
)
|
||||
new_items.extend(rides)
|
||||
if verbose:
|
||||
self.stdout.write(f"Found {len(rides)} new rides")
|
||||
@@ -95,7 +95,8 @@ class Command(BaseCommand):
|
||||
if verbose:
|
||||
for item in formatted_results[:5]: # Show first 5 items
|
||||
self.stdout.write(
|
||||
f" {item['name']} ({item['park']}) - opened: {item['date_opened']}")
|
||||
f" {item['name']} ({item['park']}) - opened: {item['date_opened']}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error calculating new content: {e}", exc_info=True)
|
||||
@@ -105,8 +106,8 @@ class Command(BaseCommand):
|
||||
"""Get recently added parks using real data."""
|
||||
new_parks = (
|
||||
Park.objects.filter(
|
||||
Q(created_at__gte=cutoff_date) | Q(
|
||||
opening_date__gte=cutoff_date.date()),
|
||||
Q(created_at__gte=cutoff_date)
|
||||
| Q(opening_date__gte=cutoff_date.date()),
|
||||
status="OPERATING",
|
||||
)
|
||||
.select_related("location", "operator")
|
||||
@@ -124,18 +125,20 @@ class Command(BaseCommand):
|
||||
if opening_date and isinstance(opening_date, datetime):
|
||||
opening_date = opening_date.date()
|
||||
|
||||
results.append({
|
||||
"content_object": park,
|
||||
"content_type": "park",
|
||||
"id": park.pk,
|
||||
"name": park.name,
|
||||
"slug": park.slug,
|
||||
"park": park.name, # For parks, park field is the park name itself
|
||||
"category": "park",
|
||||
"date_added": date_added.isoformat() if date_added else "",
|
||||
"date_opened": opening_date.isoformat() if opening_date else "",
|
||||
"url": park.url,
|
||||
})
|
||||
results.append(
|
||||
{
|
||||
"content_object": park,
|
||||
"content_type": "park",
|
||||
"id": park.pk,
|
||||
"name": park.name,
|
||||
"slug": park.slug,
|
||||
"park": park.name, # For parks, park field is the park name itself
|
||||
"category": "park",
|
||||
"date_added": date_added.isoformat() if date_added else "",
|
||||
"date_opened": opening_date.isoformat() if opening_date else "",
|
||||
"url": park.url,
|
||||
}
|
||||
)
|
||||
|
||||
return results
|
||||
|
||||
@@ -143,8 +146,8 @@ class Command(BaseCommand):
|
||||
"""Get recently added rides using real data."""
|
||||
new_rides = (
|
||||
Ride.objects.filter(
|
||||
Q(created_at__gte=cutoff_date) | Q(
|
||||
opening_date__gte=cutoff_date.date()),
|
||||
Q(created_at__gte=cutoff_date)
|
||||
| Q(opening_date__gte=cutoff_date.date()),
|
||||
status="OPERATING",
|
||||
)
|
||||
.select_related("park", "park__location")
|
||||
@@ -154,7 +157,8 @@ class Command(BaseCommand):
|
||||
results = []
|
||||
for ride in new_rides:
|
||||
date_added = getattr(ride, "opening_date", None) or getattr(
|
||||
ride, "created_at", None)
|
||||
ride, "created_at", None
|
||||
)
|
||||
if date_added:
|
||||
if isinstance(date_added, datetime):
|
||||
date_added = date_added.date()
|
||||
@@ -163,23 +167,27 @@ class Command(BaseCommand):
|
||||
if opening_date and isinstance(opening_date, datetime):
|
||||
opening_date = opening_date.date()
|
||||
|
||||
results.append({
|
||||
"content_object": ride,
|
||||
"content_type": "ride",
|
||||
"id": ride.pk,
|
||||
"name": ride.name,
|
||||
"slug": ride.slug,
|
||||
"park": ride.park.name if ride.park else "",
|
||||
"category": "ride",
|
||||
"date_added": date_added.isoformat() if date_added else "",
|
||||
"date_opened": opening_date.isoformat() if opening_date else "",
|
||||
"url": ride.url,
|
||||
"park_url": ride.park.url if ride.park else "",
|
||||
})
|
||||
results.append(
|
||||
{
|
||||
"content_object": ride,
|
||||
"content_type": "ride",
|
||||
"id": ride.pk,
|
||||
"name": ride.name,
|
||||
"slug": ride.slug,
|
||||
"park": ride.park.name if ride.park else "",
|
||||
"category": "ride",
|
||||
"date_added": date_added.isoformat() if date_added else "",
|
||||
"date_opened": opening_date.isoformat() if opening_date else "",
|
||||
"url": ride.url,
|
||||
"park_url": ride.park.url if ride.park else "",
|
||||
}
|
||||
)
|
||||
|
||||
return results
|
||||
|
||||
def _format_new_content_results(self, new_items: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
||||
def _format_new_content_results(
|
||||
self, new_items: List[Dict[str, Any]]
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Format new content results for frontend consumption."""
|
||||
formatted_results = []
|
||||
|
||||
|
||||
@@ -6,13 +6,11 @@ Run with: python manage.py calculate_trending
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, List, Any
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.utils import timezone
|
||||
from django.core.cache import cache
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.db.models import Q
|
||||
|
||||
from apps.core.analytics import PageView
|
||||
from apps.parks.models import Park
|
||||
@@ -22,32 +20,30 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Calculate trending content and cache results'
|
||||
help = "Calculate trending content and cache results"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
'--content-type',
|
||||
"--content-type",
|
||||
type=str,
|
||||
default='all',
|
||||
choices=['all', 'parks', 'rides'],
|
||||
help='Type of content to calculate (default: all)'
|
||||
default="all",
|
||||
choices=["all", "parks", "rides"],
|
||||
help="Type of content to calculate (default: all)",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--limit',
|
||||
"--limit",
|
||||
type=int,
|
||||
default=50,
|
||||
help='Maximum number of results to calculate (default: 50)'
|
||||
help="Maximum number of results to calculate (default: 50)",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--verbose',
|
||||
action='store_true',
|
||||
help='Enable verbose output'
|
||||
"--verbose", action="store_true", help="Enable verbose output"
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
content_type = options['content_type']
|
||||
limit = options['limit']
|
||||
verbose = options['verbose']
|
||||
content_type = options["content_type"]
|
||||
limit = options["limit"]
|
||||
verbose = options["verbose"]
|
||||
|
||||
if verbose:
|
||||
self.stdout.write(f"Starting trending calculation for {content_type}")
|
||||
@@ -64,7 +60,7 @@ class Command(BaseCommand):
|
||||
park_items = self._calculate_trending_parks(
|
||||
current_period_hours,
|
||||
previous_period_hours,
|
||||
limit if content_type == "parks" else limit * 2
|
||||
limit if content_type == "parks" else limit * 2,
|
||||
)
|
||||
trending_items.extend(park_items)
|
||||
if verbose:
|
||||
@@ -74,7 +70,7 @@ class Command(BaseCommand):
|
||||
ride_items = self._calculate_trending_rides(
|
||||
current_period_hours,
|
||||
previous_period_hours,
|
||||
limit if content_type == "rides" else limit * 2
|
||||
limit if content_type == "rides" else limit * 2,
|
||||
)
|
||||
trending_items.extend(ride_items)
|
||||
if verbose:
|
||||
@@ -86,7 +82,8 @@ class Command(BaseCommand):
|
||||
|
||||
# Format results for API consumption
|
||||
formatted_results = self._format_trending_results(
|
||||
trending_items, current_period_hours, previous_period_hours)
|
||||
trending_items, current_period_hours, previous_period_hours
|
||||
)
|
||||
|
||||
# Cache results
|
||||
cache_key = f"trending:calculated:{content_type}:{limit}"
|
||||
@@ -101,74 +98,109 @@ class Command(BaseCommand):
|
||||
if verbose:
|
||||
for item in formatted_results[:5]: # Show first 5 items
|
||||
self.stdout.write(
|
||||
f" {item['name']} (score: {item.get('views_change', 'N/A')})")
|
||||
f" {item['name']} (score: {item.get('views_change', 'N/A')})"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error calculating trending content: {e}", exc_info=True)
|
||||
raise CommandError(f"Failed to calculate trending content: {e}")
|
||||
|
||||
def _calculate_trending_parks(self, current_period_hours: int, previous_period_hours: int, limit: int) -> List[Dict[str, Any]]:
|
||||
def _calculate_trending_parks(
|
||||
self, current_period_hours: int, previous_period_hours: int, limit: int
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Calculate trending scores for parks using real data."""
|
||||
parks = Park.objects.filter(
|
||||
status="OPERATING").select_related("location", "operator")
|
||||
parks = Park.objects.filter(status="OPERATING").select_related(
|
||||
"location", "operator"
|
||||
)
|
||||
|
||||
trending_parks = []
|
||||
|
||||
for park in parks:
|
||||
try:
|
||||
score = self._calculate_content_score(
|
||||
park, "park", current_period_hours, previous_period_hours)
|
||||
park, "park", current_period_hours, previous_period_hours
|
||||
)
|
||||
if score > 0: # Only include items with positive trending scores
|
||||
trending_parks.append({
|
||||
"content_object": park,
|
||||
"content_type": "park",
|
||||
"trending_score": score,
|
||||
"id": park.id,
|
||||
"name": park.name,
|
||||
"slug": park.slug,
|
||||
"park": park.name, # For parks, park field is the park name itself
|
||||
"category": "park",
|
||||
"rating": float(park.average_rating) if park.average_rating else 0.0,
|
||||
"date_opened": park.opening_date.isoformat() if park.opening_date else "",
|
||||
"url": park.url,
|
||||
})
|
||||
trending_parks.append(
|
||||
{
|
||||
"content_object": park,
|
||||
"content_type": "park",
|
||||
"trending_score": score,
|
||||
"id": park.id,
|
||||
"name": park.name,
|
||||
"slug": park.slug,
|
||||
"park": park.name, # For parks, park field is the park name itself
|
||||
"category": "park",
|
||||
"rating": (
|
||||
float(park.average_rating)
|
||||
if park.average_rating
|
||||
else 0.0
|
||||
),
|
||||
"date_opened": (
|
||||
park.opening_date.isoformat()
|
||||
if park.opening_date
|
||||
else ""
|
||||
),
|
||||
"url": park.url,
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Error calculating score for park {park.id}: {e}")
|
||||
|
||||
return trending_parks
|
||||
|
||||
def _calculate_trending_rides(self, current_period_hours: int, previous_period_hours: int, limit: int) -> List[Dict[str, Any]]:
|
||||
def _calculate_trending_rides(
|
||||
self, current_period_hours: int, previous_period_hours: int, limit: int
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Calculate trending scores for rides using real data."""
|
||||
rides = Ride.objects.filter(status="OPERATING").select_related(
|
||||
"park", "park__location")
|
||||
"park", "park__location"
|
||||
)
|
||||
|
||||
trending_rides = []
|
||||
|
||||
for ride in rides:
|
||||
try:
|
||||
score = self._calculate_content_score(
|
||||
ride, "ride", current_period_hours, previous_period_hours)
|
||||
ride, "ride", current_period_hours, previous_period_hours
|
||||
)
|
||||
if score > 0: # Only include items with positive trending scores
|
||||
trending_rides.append({
|
||||
"content_object": ride,
|
||||
"content_type": "ride",
|
||||
"trending_score": score,
|
||||
"id": ride.pk,
|
||||
"name": ride.name,
|
||||
"slug": ride.slug,
|
||||
"park": ride.park.name if ride.park else "",
|
||||
"category": "ride",
|
||||
"rating": float(ride.average_rating) if ride.average_rating else 0.0,
|
||||
"date_opened": ride.opening_date.isoformat() if ride.opening_date else "",
|
||||
"url": ride.url,
|
||||
"park_url": ride.park.url if ride.park else "",
|
||||
})
|
||||
trending_rides.append(
|
||||
{
|
||||
"content_object": ride,
|
||||
"content_type": "ride",
|
||||
"trending_score": score,
|
||||
"id": ride.pk,
|
||||
"name": ride.name,
|
||||
"slug": ride.slug,
|
||||
"park": ride.park.name if ride.park else "",
|
||||
"category": "ride",
|
||||
"rating": (
|
||||
float(ride.average_rating)
|
||||
if ride.average_rating
|
||||
else 0.0
|
||||
),
|
||||
"date_opened": (
|
||||
ride.opening_date.isoformat()
|
||||
if ride.opening_date
|
||||
else ""
|
||||
),
|
||||
"url": ride.url,
|
||||
"park_url": ride.park.url if ride.park else "",
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Error calculating score for ride {ride.pk}: {e}")
|
||||
|
||||
return trending_rides
|
||||
|
||||
def _calculate_content_score(self, content_obj: Any, content_type: str, current_period_hours: int, previous_period_hours: int) -> float:
|
||||
def _calculate_content_score(
|
||||
self,
|
||||
content_obj: Any,
|
||||
content_type: str,
|
||||
current_period_hours: int,
|
||||
previous_period_hours: int,
|
||||
) -> float:
|
||||
"""Calculate weighted trending score for content object using real analytics data."""
|
||||
try:
|
||||
# Get content type for PageView queries
|
||||
@@ -176,7 +208,8 @@ class Command(BaseCommand):
|
||||
|
||||
# 1. View Growth Score (40% weight)
|
||||
view_growth_score = self._calculate_view_growth_score(
|
||||
ct, content_obj.id, current_period_hours, previous_period_hours)
|
||||
ct, content_obj.id, current_period_hours, previous_period_hours
|
||||
)
|
||||
|
||||
# 2. Rating Score (30% weight)
|
||||
rating_score = self._calculate_rating_score(content_obj)
|
||||
@@ -186,31 +219,41 @@ class Command(BaseCommand):
|
||||
|
||||
# 4. Popularity Score (10% weight)
|
||||
popularity_score = self._calculate_popularity_score(
|
||||
ct, content_obj.id, current_period_hours)
|
||||
ct, content_obj.id, current_period_hours
|
||||
)
|
||||
|
||||
# Calculate weighted final score
|
||||
final_score = (
|
||||
view_growth_score * 0.4 +
|
||||
rating_score * 0.3 +
|
||||
recency_score * 0.2 +
|
||||
popularity_score * 0.1
|
||||
view_growth_score * 0.4
|
||||
+ rating_score * 0.3
|
||||
+ recency_score * 0.2
|
||||
+ popularity_score * 0.1
|
||||
)
|
||||
|
||||
return final_score
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error calculating score for {content_type} {content_obj.id}: {e}")
|
||||
f"Error calculating score for {content_type} {content_obj.id}: {e}"
|
||||
)
|
||||
return 0.0
|
||||
|
||||
def _calculate_view_growth_score(self, content_type: ContentType, object_id: int, current_period_hours: int, previous_period_hours: int) -> float:
|
||||
def _calculate_view_growth_score(
|
||||
self,
|
||||
content_type: ContentType,
|
||||
object_id: int,
|
||||
current_period_hours: int,
|
||||
previous_period_hours: int,
|
||||
) -> float:
|
||||
"""Calculate normalized view growth score using real PageView data."""
|
||||
try:
|
||||
current_views, previous_views, growth_percentage = PageView.get_views_growth(
|
||||
content_type,
|
||||
object_id,
|
||||
current_period_hours,
|
||||
previous_period_hours,
|
||||
current_views, previous_views, growth_percentage = (
|
||||
PageView.get_views_growth(
|
||||
content_type,
|
||||
object_id,
|
||||
current_period_hours,
|
||||
previous_period_hours,
|
||||
)
|
||||
)
|
||||
|
||||
if previous_views == 0:
|
||||
@@ -218,8 +261,9 @@ class Command(BaseCommand):
|
||||
return min(current_views / 100.0, 1.0) if current_views > 0 else 0.0
|
||||
|
||||
# Normalize growth percentage to 0-1 scale
|
||||
normalized_growth = min(growth_percentage / 500.0,
|
||||
1.0) if growth_percentage > 0 else 0.0
|
||||
normalized_growth = (
|
||||
min(growth_percentage / 500.0, 1.0) if growth_percentage > 0 else 0.0
|
||||
)
|
||||
return max(normalized_growth, 0.0)
|
||||
|
||||
except Exception as e:
|
||||
@@ -272,11 +316,14 @@ class Command(BaseCommand):
|
||||
logger.warning(f"Error calculating recency score: {e}")
|
||||
return 0.5
|
||||
|
||||
def _calculate_popularity_score(self, content_type: ContentType, object_id: int, hours: int) -> float:
|
||||
def _calculate_popularity_score(
|
||||
self, content_type: ContentType, object_id: int, hours: int
|
||||
) -> float:
|
||||
"""Calculate popularity score based on total view count."""
|
||||
try:
|
||||
total_views = PageView.get_total_views_count(
|
||||
content_type, object_id, hours=hours)
|
||||
content_type, object_id, hours=hours
|
||||
)
|
||||
|
||||
# Normalize views to 0-1 scale
|
||||
if total_views == 0:
|
||||
@@ -290,7 +337,12 @@ class Command(BaseCommand):
|
||||
logger.warning(f"Error calculating popularity score: {e}")
|
||||
return 0.0
|
||||
|
||||
def _format_trending_results(self, trending_items: List[Dict[str, Any]], current_period_hours: int, previous_period_hours: int) -> List[Dict[str, Any]]:
|
||||
def _format_trending_results(
|
||||
self,
|
||||
trending_items: List[Dict[str, Any]],
|
||||
current_period_hours: int,
|
||||
previous_period_hours: int,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Format trending results for frontend consumption."""
|
||||
formatted_results = []
|
||||
|
||||
@@ -299,11 +351,13 @@ class Command(BaseCommand):
|
||||
# Get view change for display
|
||||
content_obj = item["content_object"]
|
||||
ct = ContentType.objects.get_for_model(content_obj)
|
||||
current_views, previous_views, growth_percentage = PageView.get_views_growth(
|
||||
ct,
|
||||
content_obj.id,
|
||||
current_period_hours,
|
||||
previous_period_hours,
|
||||
current_views, previous_views, growth_percentage = (
|
||||
PageView.get_views_growth(
|
||||
ct,
|
||||
content_obj.id,
|
||||
current_period_hours,
|
||||
previous_period_hours,
|
||||
)
|
||||
)
|
||||
|
||||
# Format exactly as frontend expects
|
||||
|
||||
@@ -305,7 +305,7 @@ class CacheMonitor:
|
||||
stats["cache_backend"] = cache_backend
|
||||
stats["message"] = f"Cache statistics not available for {cache_backend}"
|
||||
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
# Don't log as error since this is expected for non-Redis backends
|
||||
cache_backend = self.cache_service.default_cache.__class__.__name__
|
||||
stats["cache_backend"] = cache_backend
|
||||
|
||||
@@ -48,7 +48,11 @@ class ParkLocationAdapter(BaseLocationAdapter):
|
||||
self, location_obj: ParkLocation
|
||||
) -> Optional[UnifiedLocation]:
|
||||
"""Convert ParkLocation to UnifiedLocation."""
|
||||
if not location_obj.point or location_obj.latitude is None or location_obj.longitude is None:
|
||||
if (
|
||||
not location_obj.point
|
||||
or location_obj.latitude is None
|
||||
or location_obj.longitude is None
|
||||
):
|
||||
return None
|
||||
|
||||
park = location_obj.park
|
||||
@@ -175,7 +179,11 @@ class RideLocationAdapter(BaseLocationAdapter):
|
||||
self, location_obj: RideLocation
|
||||
) -> Optional[UnifiedLocation]:
|
||||
"""Convert RideLocation to UnifiedLocation."""
|
||||
if not location_obj.point or location_obj.latitude is None or location_obj.longitude is None:
|
||||
if (
|
||||
not location_obj.point
|
||||
or location_obj.latitude is None
|
||||
or location_obj.longitude is None
|
||||
):
|
||||
return None
|
||||
|
||||
ride = location_obj.ride
|
||||
|
||||
@@ -86,12 +86,14 @@ class TrendingService:
|
||||
|
||||
if content_type in ["all", "parks"]:
|
||||
park_items = self._calculate_trending_parks(
|
||||
limit * 2 if content_type == "all" else limit)
|
||||
limit * 2 if content_type == "all" else limit
|
||||
)
|
||||
trending_items.extend(park_items)
|
||||
|
||||
if content_type in ["all", "rides"]:
|
||||
ride_items = self._calculate_trending_rides(
|
||||
limit * 2 if content_type == "all" else limit)
|
||||
limit * 2 if content_type == "all" else limit
|
||||
)
|
||||
trending_items.extend(ride_items)
|
||||
|
||||
# Sort by trending score and apply limit
|
||||
@@ -105,7 +107,8 @@ class TrendingService:
|
||||
cache.set(cache_key, formatted_results, self.CACHE_TTL)
|
||||
|
||||
self.logger.info(
|
||||
f"Calculated {len(formatted_results)} trending items for {content_type}")
|
||||
f"Calculated {len(formatted_results)} trending items for {content_type}"
|
||||
)
|
||||
return formatted_results
|
||||
|
||||
except Exception as e:
|
||||
@@ -150,12 +153,14 @@ class TrendingService:
|
||||
|
||||
if content_type in ["all", "parks"]:
|
||||
parks = self._get_new_parks(
|
||||
cutoff_date, limit * 2 if content_type == "all" else limit)
|
||||
cutoff_date, limit * 2 if content_type == "all" else limit
|
||||
)
|
||||
new_items.extend(parks)
|
||||
|
||||
if content_type in ["all", "rides"]:
|
||||
rides = self._get_new_rides(
|
||||
cutoff_date, limit * 2 if content_type == "all" else limit)
|
||||
cutoff_date, limit * 2 if content_type == "all" else limit
|
||||
)
|
||||
new_items.extend(rides)
|
||||
|
||||
# Sort by date added (most recent first) and apply limit
|
||||
@@ -169,7 +174,8 @@ class TrendingService:
|
||||
cache.set(cache_key, formatted_results, 1800) # Cache for 30 minutes
|
||||
|
||||
self.logger.info(
|
||||
f"Calculated {len(formatted_results)} new items for {content_type}")
|
||||
f"Calculated {len(formatted_results)} new items for {content_type}"
|
||||
)
|
||||
return formatted_results
|
||||
|
||||
except Exception as e:
|
||||
@@ -198,18 +204,20 @@ class TrendingService:
|
||||
state = ""
|
||||
country = ""
|
||||
try:
|
||||
location = getattr(park, 'location', None)
|
||||
location = getattr(park, "location", None)
|
||||
if location:
|
||||
city = getattr(location, 'city', '') or ""
|
||||
state = getattr(location, 'state', '') or ""
|
||||
country = getattr(location, 'country', '') or ""
|
||||
city = getattr(location, "city", "") or ""
|
||||
state = getattr(location, "state", "") or ""
|
||||
country = getattr(location, "country", "") or ""
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Get card image URL
|
||||
card_image_url = ""
|
||||
if park.card_image and hasattr(park.card_image, 'image'):
|
||||
card_image_url = park.card_image.image.url if park.card_image.image else ""
|
||||
if park.card_image and hasattr(park.card_image, "image"):
|
||||
card_image_url = (
|
||||
park.card_image.image.url if park.card_image.image else ""
|
||||
)
|
||||
|
||||
# Get primary company (operator)
|
||||
primary_company = park.operator.name if park.operator else ""
|
||||
@@ -229,7 +237,9 @@ class TrendingService:
|
||||
if park.average_rating
|
||||
else 0.0
|
||||
),
|
||||
"date_opened": opening_date.isoformat() if opening_date else "",
|
||||
"date_opened": (
|
||||
opening_date.isoformat() if opening_date else ""
|
||||
),
|
||||
"url": park.url,
|
||||
"card_image": card_image_url,
|
||||
"city": city,
|
||||
@@ -262,8 +272,10 @@ class TrendingService:
|
||||
|
||||
# Get card image URL
|
||||
card_image_url = ""
|
||||
if ride.card_image and hasattr(ride.card_image, 'image'):
|
||||
card_image_url = ride.card_image.image.url if ride.card_image.image else ""
|
||||
if ride.card_image and hasattr(ride.card_image, "image"):
|
||||
card_image_url = (
|
||||
ride.card_image.image.url if ride.card_image.image else ""
|
||||
)
|
||||
|
||||
trending_rides.append(
|
||||
{
|
||||
@@ -280,7 +292,9 @@ class TrendingService:
|
||||
if ride.average_rating
|
||||
else 0.0
|
||||
),
|
||||
"date_opened": opening_date.isoformat() if opening_date else "",
|
||||
"date_opened": (
|
||||
opening_date.isoformat() if opening_date else ""
|
||||
),
|
||||
"url": ride.url,
|
||||
"park_url": ride.park.url if ride.park else "",
|
||||
"card_image": card_image_url,
|
||||
@@ -474,18 +488,20 @@ class TrendingService:
|
||||
state = ""
|
||||
country = ""
|
||||
try:
|
||||
location = getattr(park, 'location', None)
|
||||
location = getattr(park, "location", None)
|
||||
if location:
|
||||
city = getattr(location, 'city', '') or ""
|
||||
state = getattr(location, 'state', '') or ""
|
||||
country = getattr(location, 'country', '') or ""
|
||||
city = getattr(location, "city", "") or ""
|
||||
state = getattr(location, "state", "") or ""
|
||||
country = getattr(location, "country", "") or ""
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Get card image URL
|
||||
card_image_url = ""
|
||||
if park.card_image and hasattr(park.card_image, 'image'):
|
||||
card_image_url = park.card_image.image.url if park.card_image.image else ""
|
||||
if park.card_image and hasattr(park.card_image, "image"):
|
||||
card_image_url = (
|
||||
park.card_image.image.url if park.card_image.image else ""
|
||||
)
|
||||
|
||||
# Get primary company (operator)
|
||||
primary_company = park.operator.name if park.operator else ""
|
||||
@@ -543,8 +559,10 @@ class TrendingService:
|
||||
|
||||
# Get card image URL
|
||||
card_image_url = ""
|
||||
if ride.card_image and hasattr(ride.card_image, 'image'):
|
||||
card_image_url = ride.card_image.image.url if ride.card_image.image else ""
|
||||
if ride.card_image and hasattr(ride.card_image, "image"):
|
||||
card_image_url = (
|
||||
ride.card_image.image.url if ride.card_image.image else ""
|
||||
)
|
||||
|
||||
results.append(
|
||||
{
|
||||
|
||||
@@ -7,13 +7,12 @@ All tasks run asynchronously to avoid blocking the main application.
|
||||
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, List, Any, Optional
|
||||
from typing import Dict, List, Any
|
||||
from celery import shared_task
|
||||
from django.utils import timezone
|
||||
from django.core.cache import cache
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.db.models import Q, Count, Avg, F
|
||||
from django.db import transaction
|
||||
from django.db.models import Q
|
||||
|
||||
from apps.core.analytics import PageView
|
||||
from apps.parks.models import Park
|
||||
@@ -23,7 +22,9 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=3, default_retry_delay=60)
|
||||
def calculate_trending_content(self, content_type: str = "all", limit: int = 50) -> Dict[str, Any]:
|
||||
def calculate_trending_content(
|
||||
self, content_type: str = "all", limit: int = 50
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Calculate trending content using real analytics data.
|
||||
|
||||
@@ -53,7 +54,7 @@ def calculate_trending_content(self, content_type: str = "all", limit: int = 50)
|
||||
park_items = _calculate_trending_parks(
|
||||
current_period_hours,
|
||||
previous_period_hours,
|
||||
limit if content_type == "parks" else limit * 2
|
||||
limit if content_type == "parks" else limit * 2,
|
||||
)
|
||||
trending_items.extend(park_items)
|
||||
|
||||
@@ -61,7 +62,7 @@ def calculate_trending_content(self, content_type: str = "all", limit: int = 50)
|
||||
ride_items = _calculate_trending_rides(
|
||||
current_period_hours,
|
||||
previous_period_hours,
|
||||
limit if content_type == "rides" else limit * 2
|
||||
limit if content_type == "rides" else limit * 2,
|
||||
)
|
||||
trending_items.extend(ride_items)
|
||||
|
||||
@@ -71,14 +72,16 @@ def calculate_trending_content(self, content_type: str = "all", limit: int = 50)
|
||||
|
||||
# Format results for API consumption
|
||||
formatted_results = _format_trending_results(
|
||||
trending_items, current_period_hours, previous_period_hours)
|
||||
trending_items, current_period_hours, previous_period_hours
|
||||
)
|
||||
|
||||
# Cache results
|
||||
cache_key = f"trending:calculated:{content_type}:{limit}"
|
||||
cache.set(cache_key, formatted_results, 3600) # Cache for 1 hour
|
||||
|
||||
logger.info(
|
||||
f"Calculated {len(formatted_results)} trending items for {content_type}")
|
||||
f"Calculated {len(formatted_results)} trending items for {content_type}"
|
||||
)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
@@ -95,7 +98,9 @@ def calculate_trending_content(self, content_type: str = "all", limit: int = 50)
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=3, default_retry_delay=30)
|
||||
def calculate_new_content(self, content_type: str = "all", days_back: int = 30, limit: int = 50) -> Dict[str, Any]:
|
||||
def calculate_new_content(
|
||||
self, content_type: str = "all", days_back: int = 30, limit: int = 50
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Calculate new content based on opening dates and creation dates.
|
||||
|
||||
@@ -115,12 +120,14 @@ def calculate_new_content(self, content_type: str = "all", days_back: int = 30,
|
||||
|
||||
if content_type in ["all", "parks"]:
|
||||
parks = _get_new_parks(
|
||||
cutoff_date, limit if content_type == "parks" else limit * 2)
|
||||
cutoff_date, limit if content_type == "parks" else limit * 2
|
||||
)
|
||||
new_items.extend(parks)
|
||||
|
||||
if content_type in ["all", "rides"]:
|
||||
rides = _get_new_rides(
|
||||
cutoff_date, limit if content_type == "rides" else limit * 2)
|
||||
cutoff_date, limit if content_type == "rides" else limit * 2
|
||||
)
|
||||
new_items.extend(rides)
|
||||
|
||||
# Sort by date added (most recent first) and apply limit
|
||||
@@ -177,7 +184,9 @@ def warm_trending_cache(self) -> Dict[str, Any]:
|
||||
calculate_new_content.delay(**query)
|
||||
|
||||
results[f"trending_{query['content_type']}_{query['limit']}"] = "scheduled"
|
||||
results[f"new_content_{query['content_type']}_{query['limit']}"] = "scheduled"
|
||||
results[f"new_content_{query['content_type']}_{query['limit']}"] = (
|
||||
"scheduled"
|
||||
)
|
||||
|
||||
logger.info("Trending cache warming completed")
|
||||
|
||||
@@ -197,70 +206,93 @@ def warm_trending_cache(self) -> Dict[str, Any]:
|
||||
}
|
||||
|
||||
|
||||
def _calculate_trending_parks(current_period_hours: int, previous_period_hours: int, limit: int) -> List[Dict[str, Any]]:
|
||||
def _calculate_trending_parks(
|
||||
current_period_hours: int, previous_period_hours: int, limit: int
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Calculate trending scores for parks using real data."""
|
||||
parks = Park.objects.filter(
|
||||
status="OPERATING").select_related("location", "operator")
|
||||
parks = Park.objects.filter(status="OPERATING").select_related(
|
||||
"location", "operator"
|
||||
)
|
||||
|
||||
trending_parks = []
|
||||
|
||||
for park in parks:
|
||||
try:
|
||||
score = _calculate_content_score(
|
||||
park, "park", current_period_hours, previous_period_hours)
|
||||
park, "park", current_period_hours, previous_period_hours
|
||||
)
|
||||
if score > 0: # Only include items with positive trending scores
|
||||
trending_parks.append({
|
||||
"content_object": park,
|
||||
"content_type": "park",
|
||||
"trending_score": score,
|
||||
"id": park.id,
|
||||
"name": park.name,
|
||||
"slug": park.slug,
|
||||
"location": park.formatted_location if hasattr(park, "location") else "",
|
||||
"category": "park",
|
||||
"rating": float(park.average_rating) if park.average_rating else 0.0,
|
||||
})
|
||||
trending_parks.append(
|
||||
{
|
||||
"content_object": park,
|
||||
"content_type": "park",
|
||||
"trending_score": score,
|
||||
"id": park.id,
|
||||
"name": park.name,
|
||||
"slug": park.slug,
|
||||
"location": (
|
||||
park.formatted_location if hasattr(park, "location") else ""
|
||||
),
|
||||
"category": "park",
|
||||
"rating": (
|
||||
float(park.average_rating) if park.average_rating else 0.0
|
||||
),
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Error calculating score for park {park.id}: {e}")
|
||||
|
||||
return trending_parks
|
||||
|
||||
|
||||
def _calculate_trending_rides(current_period_hours: int, previous_period_hours: int, limit: int) -> List[Dict[str, Any]]:
|
||||
def _calculate_trending_rides(
|
||||
current_period_hours: int, previous_period_hours: int, limit: int
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Calculate trending scores for rides using real data."""
|
||||
rides = Ride.objects.filter(status="OPERATING").select_related(
|
||||
"park", "park__location")
|
||||
"park", "park__location"
|
||||
)
|
||||
|
||||
trending_rides = []
|
||||
|
||||
for ride in rides:
|
||||
try:
|
||||
score = _calculate_content_score(
|
||||
ride, "ride", current_period_hours, previous_period_hours)
|
||||
ride, "ride", current_period_hours, previous_period_hours
|
||||
)
|
||||
if score > 0: # Only include items with positive trending scores
|
||||
# Get location from park
|
||||
location = ""
|
||||
if ride.park and hasattr(ride.park, "location") and ride.park.location:
|
||||
location = ride.park.formatted_location
|
||||
|
||||
trending_rides.append({
|
||||
"content_object": ride,
|
||||
"content_type": "ride",
|
||||
"trending_score": score,
|
||||
"id": ride.pk,
|
||||
"name": ride.name,
|
||||
"slug": ride.slug,
|
||||
"location": location,
|
||||
"category": "ride",
|
||||
"rating": float(ride.average_rating) if ride.average_rating else 0.0,
|
||||
})
|
||||
trending_rides.append(
|
||||
{
|
||||
"content_object": ride,
|
||||
"content_type": "ride",
|
||||
"trending_score": score,
|
||||
"id": ride.pk,
|
||||
"name": ride.name,
|
||||
"slug": ride.slug,
|
||||
"location": location,
|
||||
"category": "ride",
|
||||
"rating": (
|
||||
float(ride.average_rating) if ride.average_rating else 0.0
|
||||
),
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Error calculating score for ride {ride.pk}: {e}")
|
||||
|
||||
return trending_rides
|
||||
|
||||
|
||||
def _calculate_content_score(content_obj: Any, content_type: str, current_period_hours: int, previous_period_hours: int) -> float:
|
||||
def _calculate_content_score(
|
||||
content_obj: Any,
|
||||
content_type: str,
|
||||
current_period_hours: int,
|
||||
previous_period_hours: int,
|
||||
) -> float:
|
||||
"""
|
||||
Calculate weighted trending score for content object using real analytics data.
|
||||
|
||||
@@ -279,7 +311,8 @@ def _calculate_content_score(content_obj: Any, content_type: str, current_period
|
||||
|
||||
# 1. View Growth Score (40% weight)
|
||||
view_growth_score = _calculate_view_growth_score(
|
||||
ct, content_obj.id, current_period_hours, previous_period_hours)
|
||||
ct, content_obj.id, current_period_hours, previous_period_hours
|
||||
)
|
||||
|
||||
# 2. Rating Score (30% weight)
|
||||
rating_score = _calculate_rating_score(content_obj)
|
||||
@@ -289,14 +322,15 @@ def _calculate_content_score(content_obj: Any, content_type: str, current_period
|
||||
|
||||
# 4. Popularity Score (10% weight)
|
||||
popularity_score = _calculate_popularity_score(
|
||||
ct, content_obj.id, current_period_hours)
|
||||
ct, content_obj.id, current_period_hours
|
||||
)
|
||||
|
||||
# Calculate weighted final score
|
||||
final_score = (
|
||||
view_growth_score * 0.4 +
|
||||
rating_score * 0.3 +
|
||||
recency_score * 0.2 +
|
||||
popularity_score * 0.1
|
||||
view_growth_score * 0.4
|
||||
+ rating_score * 0.3
|
||||
+ recency_score * 0.2
|
||||
+ popularity_score * 0.1
|
||||
)
|
||||
|
||||
logger.debug(
|
||||
@@ -310,11 +344,17 @@ def _calculate_content_score(content_obj: Any, content_type: str, current_period
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error calculating score for {content_type} {content_obj.id}: {e}")
|
||||
f"Error calculating score for {content_type} {content_obj.id}: {e}"
|
||||
)
|
||||
return 0.0
|
||||
|
||||
|
||||
def _calculate_view_growth_score(content_type: ContentType, object_id: int, current_period_hours: int, previous_period_hours: int) -> float:
|
||||
def _calculate_view_growth_score(
|
||||
content_type: ContentType,
|
||||
object_id: int,
|
||||
current_period_hours: int,
|
||||
previous_period_hours: int,
|
||||
) -> float:
|
||||
"""Calculate normalized view growth score using real PageView data."""
|
||||
try:
|
||||
current_views, previous_views, growth_percentage = PageView.get_views_growth(
|
||||
@@ -330,8 +370,9 @@ def _calculate_view_growth_score(content_type: ContentType, object_id: int, curr
|
||||
|
||||
# Normalize growth percentage to 0-1 scale
|
||||
# 100% growth = 0.5, 500% growth = 1.0
|
||||
normalized_growth = min(growth_percentage / 500.0,
|
||||
1.0) if growth_percentage > 0 else 0.0
|
||||
normalized_growth = (
|
||||
min(growth_percentage / 500.0, 1.0) if growth_percentage > 0 else 0.0
|
||||
)
|
||||
return max(normalized_growth, 0.0)
|
||||
|
||||
except Exception as e:
|
||||
@@ -389,11 +430,14 @@ def _calculate_recency_score(content_obj: Any) -> float:
|
||||
return 0.5
|
||||
|
||||
|
||||
def _calculate_popularity_score(content_type: ContentType, object_id: int, hours: int) -> float:
|
||||
def _calculate_popularity_score(
|
||||
content_type: ContentType, object_id: int, hours: int
|
||||
) -> float:
|
||||
"""Calculate popularity score based on total view count."""
|
||||
try:
|
||||
total_views = PageView.get_total_views_count(
|
||||
content_type, object_id, hours=hours)
|
||||
content_type, object_id, hours=hours
|
||||
)
|
||||
|
||||
# Normalize views to 0-1 scale
|
||||
# 0 views = 0.0, 100 views = 0.5, 1000+ views = 1.0
|
||||
@@ -431,17 +475,19 @@ def _get_new_parks(cutoff_date: datetime, limit: int) -> List[Dict[str, Any]]:
|
||||
if opening_date and isinstance(opening_date, datetime):
|
||||
opening_date = opening_date.date()
|
||||
|
||||
results.append({
|
||||
"content_object": park,
|
||||
"content_type": "park",
|
||||
"id": park.pk,
|
||||
"name": park.name,
|
||||
"slug": park.slug,
|
||||
"park": park.name, # For parks, park field is the park name itself
|
||||
"category": "park",
|
||||
"date_added": date_added.isoformat() if date_added else "",
|
||||
"date_opened": opening_date.isoformat() if opening_date else "",
|
||||
})
|
||||
results.append(
|
||||
{
|
||||
"content_object": park,
|
||||
"content_type": "park",
|
||||
"id": park.pk,
|
||||
"name": park.name,
|
||||
"slug": park.slug,
|
||||
"park": park.name, # For parks, park field is the park name itself
|
||||
"category": "park",
|
||||
"date_added": date_added.isoformat() if date_added else "",
|
||||
"date_opened": opening_date.isoformat() if opening_date else "",
|
||||
}
|
||||
)
|
||||
|
||||
return results
|
||||
|
||||
@@ -460,7 +506,8 @@ def _get_new_rides(cutoff_date: datetime, limit: int) -> List[Dict[str, Any]]:
|
||||
results = []
|
||||
for ride in new_rides:
|
||||
date_added = getattr(ride, "opening_date", None) or getattr(
|
||||
ride, "created_at", None)
|
||||
ride, "created_at", None
|
||||
)
|
||||
if date_added:
|
||||
if isinstance(date_added, datetime):
|
||||
date_added = date_added.date()
|
||||
@@ -469,22 +516,28 @@ def _get_new_rides(cutoff_date: datetime, limit: int) -> List[Dict[str, Any]]:
|
||||
if opening_date and isinstance(opening_date, datetime):
|
||||
opening_date = opening_date.date()
|
||||
|
||||
results.append({
|
||||
"content_object": ride,
|
||||
"content_type": "ride",
|
||||
"id": ride.pk,
|
||||
"name": ride.name,
|
||||
"slug": ride.slug,
|
||||
"park": ride.park.name if ride.park else "",
|
||||
"category": "ride",
|
||||
"date_added": date_added.isoformat() if date_added else "",
|
||||
"date_opened": opening_date.isoformat() if opening_date else "",
|
||||
})
|
||||
results.append(
|
||||
{
|
||||
"content_object": ride,
|
||||
"content_type": "ride",
|
||||
"id": ride.pk,
|
||||
"name": ride.name,
|
||||
"slug": ride.slug,
|
||||
"park": ride.park.name if ride.park else "",
|
||||
"category": "ride",
|
||||
"date_added": date_added.isoformat() if date_added else "",
|
||||
"date_opened": opening_date.isoformat() if opening_date else "",
|
||||
}
|
||||
)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def _format_trending_results(trending_items: List[Dict[str, Any]], current_period_hours: int, previous_period_hours: int) -> List[Dict[str, Any]]:
|
||||
def _format_trending_results(
|
||||
trending_items: List[Dict[str, Any]],
|
||||
current_period_hours: int,
|
||||
previous_period_hours: int,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Format trending results for frontend consumption."""
|
||||
formatted_results = []
|
||||
|
||||
@@ -493,11 +546,13 @@ def _format_trending_results(trending_items: List[Dict[str, Any]], current_perio
|
||||
# Get view change for display
|
||||
content_obj = item["content_object"]
|
||||
ct = ContentType.objects.get_for_model(content_obj)
|
||||
current_views, previous_views, growth_percentage = PageView.get_views_growth(
|
||||
ct,
|
||||
content_obj.id,
|
||||
current_period_hours,
|
||||
previous_period_hours,
|
||||
current_views, previous_views, growth_percentage = (
|
||||
PageView.get_views_growth(
|
||||
ct,
|
||||
content_obj.id,
|
||||
current_period_hours,
|
||||
previous_period_hours,
|
||||
)
|
||||
)
|
||||
|
||||
# Format exactly as frontend expects
|
||||
@@ -525,7 +580,9 @@ def _format_trending_results(trending_items: List[Dict[str, Any]], current_perio
|
||||
return formatted_results
|
||||
|
||||
|
||||
def _format_new_content_results(new_items: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
||||
def _format_new_content_results(
|
||||
new_items: List[Dict[str, Any]],
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Format new content results for frontend consumption."""
|
||||
formatted_results = []
|
||||
|
||||
|
||||
Reference in New Issue
Block a user