mirror of
https://github.com/pacnpal/thrillwiki_django_no_react.git
synced 2025-12-22 07:31:08 -05:00
update
This commit is contained in:
@@ -1,4 +0,0 @@
|
||||
from .location_service import RideLocationService
|
||||
from .media_service import RideMediaService
|
||||
|
||||
__all__ = ["RideLocationService", "RideMediaService"]
|
||||
@@ -1,784 +0,0 @@
|
||||
"""
|
||||
Smart Ride Loader for Hybrid Filtering Strategy
|
||||
|
||||
This service implements intelligent data loading for rides, automatically choosing
|
||||
between client-side and server-side filtering based on data size and complexity.
|
||||
|
||||
Key Features:
|
||||
- Automatic strategy selection (≤200 records = client-side, >200 = server-side)
|
||||
- Progressive loading for large datasets
|
||||
- Intelligent caching with automatic invalidation
|
||||
- Comprehensive filter metadata generation
|
||||
- Optimized database queries with strategic prefetching
|
||||
|
||||
Architecture:
|
||||
- Client-side: Load all data once, filter in frontend
|
||||
- Server-side: Apply filters in database, paginate results
|
||||
- Hybrid: Combine both approaches based on data characteristics
|
||||
"""
|
||||
|
||||
from typing import Dict, List, Any, Optional
|
||||
from django.core.cache import cache
|
||||
from django.db import models
|
||||
from django.db.models import Q, Min, Max
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SmartRideLoader:
|
||||
"""
|
||||
Intelligent ride data loader that chooses optimal filtering strategy.
|
||||
|
||||
Strategy Selection:
|
||||
- ≤200 total records: Client-side filtering (load all data)
|
||||
- >200 total records: Server-side filtering (database filtering + pagination)
|
||||
|
||||
Features:
|
||||
- Progressive loading for large datasets
|
||||
- 5-minute intelligent caching
|
||||
- Comprehensive filter metadata
|
||||
- Optimized queries with prefetch_related
|
||||
"""
|
||||
|
||||
# Configuration constants
|
||||
INITIAL_LOAD_SIZE = 50
|
||||
PROGRESSIVE_LOAD_SIZE = 25
|
||||
MAX_CLIENT_SIDE_RECORDS = 200
|
||||
CACHE_TIMEOUT = 300 # 5 minutes
|
||||
|
||||
def __init__(self):
|
||||
self.cache_prefix = "rides_hybrid_"
|
||||
|
||||
def get_initial_load(self, filters: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
|
||||
"""
|
||||
Get initial data load with automatic strategy selection.
|
||||
|
||||
Args:
|
||||
filters: Optional filter parameters
|
||||
|
||||
Returns:
|
||||
Dict containing:
|
||||
- strategy: 'client_side' or 'server_side'
|
||||
- data: List of ride records
|
||||
- total_count: Total number of records
|
||||
- has_more: Whether more data is available
|
||||
- filter_metadata: Available filter options
|
||||
"""
|
||||
|
||||
# Get total count for strategy decision
|
||||
total_count = self._get_total_count(filters)
|
||||
|
||||
# Choose strategy based on total count
|
||||
if total_count <= self.MAX_CLIENT_SIDE_RECORDS:
|
||||
return self._get_client_side_data(filters, total_count)
|
||||
else:
|
||||
return self._get_server_side_data(filters, total_count)
|
||||
|
||||
def get_progressive_load(self, offset: int, filters: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
|
||||
"""
|
||||
Get additional data for progressive loading (server-side strategy only).
|
||||
|
||||
Args:
|
||||
offset: Number of records to skip
|
||||
filters: Filter parameters
|
||||
|
||||
Returns:
|
||||
Dict containing additional ride records
|
||||
"""
|
||||
|
||||
# Build queryset with filters
|
||||
queryset = self._build_filtered_queryset(filters)
|
||||
|
||||
# Get total count for this filtered set
|
||||
total_count = queryset.count()
|
||||
|
||||
# Get progressive batch
|
||||
rides = list(queryset[offset:offset + self.PROGRESSIVE_LOAD_SIZE])
|
||||
|
||||
return {
|
||||
'rides': self._serialize_rides(rides),
|
||||
'total_count': total_count,
|
||||
'has_more': len(rides) == self.PROGRESSIVE_LOAD_SIZE,
|
||||
'next_offset': offset + len(rides) if len(rides) == self.PROGRESSIVE_LOAD_SIZE else None
|
||||
}
|
||||
|
||||
def get_filter_metadata(self, filters: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
|
||||
"""
|
||||
Get comprehensive filter metadata for dynamic filter generation.
|
||||
|
||||
Args:
|
||||
filters: Optional filters to scope the metadata
|
||||
|
||||
Returns:
|
||||
Dict containing all available filter options and ranges
|
||||
"""
|
||||
cache_key = f"{self.cache_prefix}filter_metadata_{hash(str(filters))}"
|
||||
metadata = cache.get(cache_key)
|
||||
|
||||
if metadata is None:
|
||||
metadata = self._generate_filter_metadata(filters)
|
||||
cache.set(cache_key, metadata, self.CACHE_TIMEOUT)
|
||||
|
||||
return metadata
|
||||
|
||||
def invalidate_cache(self) -> None:
|
||||
"""Invalidate all cached data for rides."""
|
||||
# Note: In production, you might want to use cache versioning
|
||||
# or more sophisticated cache invalidation
|
||||
cache_keys = [
|
||||
f"{self.cache_prefix}client_side_all",
|
||||
f"{self.cache_prefix}filter_metadata",
|
||||
f"{self.cache_prefix}total_count",
|
||||
]
|
||||
|
||||
for key in cache_keys:
|
||||
cache.delete(key)
|
||||
|
||||
def _get_total_count(self, filters: Optional[Dict[str, Any]] = None) -> int:
|
||||
"""Get total count of rides matching filters."""
|
||||
cache_key = f"{self.cache_prefix}total_count_{hash(str(filters))}"
|
||||
count = cache.get(cache_key)
|
||||
|
||||
if count is None:
|
||||
queryset = self._build_filtered_queryset(filters)
|
||||
count = queryset.count()
|
||||
cache.set(cache_key, count, self.CACHE_TIMEOUT)
|
||||
|
||||
return count
|
||||
|
||||
def _get_client_side_data(self, filters: Optional[Dict[str, Any]],
|
||||
total_count: int) -> Dict[str, Any]:
|
||||
"""Get all data for client-side filtering."""
|
||||
cache_key = f"{self.cache_prefix}client_side_all"
|
||||
cached_data = cache.get(cache_key)
|
||||
|
||||
if cached_data is None:
|
||||
from apps.rides.models import Ride
|
||||
|
||||
# Load all rides with optimized query
|
||||
queryset = Ride.objects.select_related(
|
||||
'park',
|
||||
'park__location',
|
||||
'park_area',
|
||||
'manufacturer',
|
||||
'designer',
|
||||
'ride_model',
|
||||
'ride_model__manufacturer'
|
||||
).prefetch_related(
|
||||
'coaster_stats'
|
||||
).order_by('name')
|
||||
|
||||
rides = list(queryset)
|
||||
cached_data = self._serialize_rides(rides)
|
||||
cache.set(cache_key, cached_data, self.CACHE_TIMEOUT)
|
||||
|
||||
return {
|
||||
'strategy': 'client_side',
|
||||
'rides': cached_data,
|
||||
'total_count': total_count,
|
||||
'has_more': False,
|
||||
'filter_metadata': self.get_filter_metadata(filters)
|
||||
}
|
||||
|
||||
def _get_server_side_data(self, filters: Optional[Dict[str, Any]],
|
||||
total_count: int) -> Dict[str, Any]:
|
||||
"""Get initial batch for server-side filtering."""
|
||||
# Build filtered queryset
|
||||
queryset = self._build_filtered_queryset(filters)
|
||||
|
||||
# Get initial batch
|
||||
rides = list(queryset[:self.INITIAL_LOAD_SIZE])
|
||||
|
||||
return {
|
||||
'strategy': 'server_side',
|
||||
'rides': self._serialize_rides(rides),
|
||||
'total_count': total_count,
|
||||
'has_more': len(rides) == self.INITIAL_LOAD_SIZE,
|
||||
'next_offset': len(rides) if len(rides) == self.INITIAL_LOAD_SIZE else None
|
||||
}
|
||||
|
||||
def _build_filtered_queryset(self, filters: Optional[Dict[str, Any]]):
|
||||
"""Build Django queryset with applied filters."""
|
||||
from apps.rides.models import Ride
|
||||
|
||||
# Start with optimized base queryset
|
||||
queryset = Ride.objects.select_related(
|
||||
'park',
|
||||
'park__location',
|
||||
'park_area',
|
||||
'manufacturer',
|
||||
'designer',
|
||||
'ride_model',
|
||||
'ride_model__manufacturer'
|
||||
).prefetch_related(
|
||||
'coaster_stats'
|
||||
)
|
||||
|
||||
if not filters:
|
||||
return queryset.order_by('name')
|
||||
|
||||
# Apply filters
|
||||
q_objects = Q()
|
||||
|
||||
# Text search using computed search_text field
|
||||
if 'search' in filters and filters['search']:
|
||||
search_term = filters['search'].lower()
|
||||
q_objects &= Q(search_text__icontains=search_term)
|
||||
|
||||
# Park filters
|
||||
if 'park_slug' in filters and filters['park_slug']:
|
||||
q_objects &= Q(park__slug=filters['park_slug'])
|
||||
|
||||
if 'park_id' in filters and filters['park_id']:
|
||||
q_objects &= Q(park_id=filters['park_id'])
|
||||
|
||||
# Category filters
|
||||
if 'category' in filters and filters['category']:
|
||||
q_objects &= Q(category__in=filters['category'])
|
||||
|
||||
# Status filters
|
||||
if 'status' in filters and filters['status']:
|
||||
q_objects &= Q(status__in=filters['status'])
|
||||
|
||||
# Company filters
|
||||
if 'manufacturer_ids' in filters and filters['manufacturer_ids']:
|
||||
q_objects &= Q(manufacturer_id__in=filters['manufacturer_ids'])
|
||||
|
||||
if 'designer_ids' in filters and filters['designer_ids']:
|
||||
q_objects &= Q(designer_id__in=filters['designer_ids'])
|
||||
|
||||
# Ride model filters
|
||||
if 'ride_model_ids' in filters and filters['ride_model_ids']:
|
||||
q_objects &= Q(ride_model_id__in=filters['ride_model_ids'])
|
||||
|
||||
# Opening year filters using computed opening_year field
|
||||
if 'opening_year' in filters and filters['opening_year']:
|
||||
q_objects &= Q(opening_year=filters['opening_year'])
|
||||
|
||||
if 'min_opening_year' in filters and filters['min_opening_year']:
|
||||
q_objects &= Q(opening_year__gte=filters['min_opening_year'])
|
||||
|
||||
if 'max_opening_year' in filters and filters['max_opening_year']:
|
||||
q_objects &= Q(opening_year__lte=filters['max_opening_year'])
|
||||
|
||||
# Rating filters
|
||||
if 'min_rating' in filters and filters['min_rating']:
|
||||
q_objects &= Q(average_rating__gte=filters['min_rating'])
|
||||
|
||||
if 'max_rating' in filters and filters['max_rating']:
|
||||
q_objects &= Q(average_rating__lte=filters['max_rating'])
|
||||
|
||||
# Height requirement filters
|
||||
if 'min_height_requirement' in filters and filters['min_height_requirement']:
|
||||
q_objects &= Q(min_height_in__gte=filters['min_height_requirement'])
|
||||
|
||||
if 'max_height_requirement' in filters and filters['max_height_requirement']:
|
||||
q_objects &= Q(max_height_in__lte=filters['max_height_requirement'])
|
||||
|
||||
# Capacity filters
|
||||
if 'min_capacity' in filters and filters['min_capacity']:
|
||||
q_objects &= Q(capacity_per_hour__gte=filters['min_capacity'])
|
||||
|
||||
if 'max_capacity' in filters and filters['max_capacity']:
|
||||
q_objects &= Q(capacity_per_hour__lte=filters['max_capacity'])
|
||||
|
||||
# Roller coaster specific filters
|
||||
if 'roller_coaster_type' in filters and filters['roller_coaster_type']:
|
||||
q_objects &= Q(coaster_stats__roller_coaster_type__in=filters['roller_coaster_type'])
|
||||
|
||||
if 'track_material' in filters and filters['track_material']:
|
||||
q_objects &= Q(coaster_stats__track_material__in=filters['track_material'])
|
||||
|
||||
if 'propulsion_system' in filters and filters['propulsion_system']:
|
||||
q_objects &= Q(coaster_stats__propulsion_system__in=filters['propulsion_system'])
|
||||
|
||||
# Roller coaster height filters
|
||||
if 'min_height_ft' in filters and filters['min_height_ft']:
|
||||
q_objects &= Q(coaster_stats__height_ft__gte=filters['min_height_ft'])
|
||||
|
||||
if 'max_height_ft' in filters and filters['max_height_ft']:
|
||||
q_objects &= Q(coaster_stats__height_ft__lte=filters['max_height_ft'])
|
||||
|
||||
# Roller coaster speed filters
|
||||
if 'min_speed_mph' in filters and filters['min_speed_mph']:
|
||||
q_objects &= Q(coaster_stats__speed_mph__gte=filters['min_speed_mph'])
|
||||
|
||||
if 'max_speed_mph' in filters and filters['max_speed_mph']:
|
||||
q_objects &= Q(coaster_stats__speed_mph__lte=filters['max_speed_mph'])
|
||||
|
||||
# Inversion filters
|
||||
if 'min_inversions' in filters and filters['min_inversions']:
|
||||
q_objects &= Q(coaster_stats__inversions__gte=filters['min_inversions'])
|
||||
|
||||
if 'max_inversions' in filters and filters['max_inversions']:
|
||||
q_objects &= Q(coaster_stats__inversions__lte=filters['max_inversions'])
|
||||
|
||||
if 'has_inversions' in filters and filters['has_inversions'] is not None:
|
||||
if filters['has_inversions']:
|
||||
q_objects &= Q(coaster_stats__inversions__gt=0)
|
||||
else:
|
||||
q_objects &= Q(coaster_stats__inversions=0)
|
||||
|
||||
# Apply filters and ordering
|
||||
queryset = queryset.filter(q_objects)
|
||||
|
||||
# Apply ordering
|
||||
ordering = filters.get('ordering', 'name')
|
||||
if ordering in ['height_ft', '-height_ft', 'speed_mph', '-speed_mph']:
|
||||
# For coaster stats ordering, we need to join and order by the stats
|
||||
ordering_field = ordering.replace('height_ft', 'coaster_stats__height_ft').replace('speed_mph', 'coaster_stats__speed_mph')
|
||||
queryset = queryset.order_by(ordering_field)
|
||||
else:
|
||||
queryset = queryset.order_by(ordering)
|
||||
|
||||
return queryset
|
||||
|
||||
def _serialize_rides(self, rides: List) -> List[Dict[str, Any]]:
|
||||
"""Serialize ride objects to dictionaries."""
|
||||
serialized = []
|
||||
|
||||
for ride in rides:
|
||||
# Basic ride data
|
||||
ride_data = {
|
||||
'id': ride.id,
|
||||
'name': ride.name,
|
||||
'slug': ride.slug,
|
||||
'description': ride.description,
|
||||
'category': ride.category,
|
||||
'status': ride.status,
|
||||
'opening_date': ride.opening_date.isoformat() if ride.opening_date else None,
|
||||
'closing_date': ride.closing_date.isoformat() if ride.closing_date else None,
|
||||
'opening_year': ride.opening_year,
|
||||
'min_height_in': ride.min_height_in,
|
||||
'max_height_in': ride.max_height_in,
|
||||
'capacity_per_hour': ride.capacity_per_hour,
|
||||
'ride_duration_seconds': ride.ride_duration_seconds,
|
||||
'average_rating': float(ride.average_rating) if ride.average_rating else None,
|
||||
'url': ride.url,
|
||||
'park_url': ride.park_url,
|
||||
'created_at': ride.created_at.isoformat(),
|
||||
'updated_at': ride.updated_at.isoformat(),
|
||||
}
|
||||
|
||||
# Park data
|
||||
if ride.park:
|
||||
ride_data['park'] = {
|
||||
'id': ride.park.id,
|
||||
'name': ride.park.name,
|
||||
'slug': ride.park.slug,
|
||||
}
|
||||
|
||||
# Park location data
|
||||
if hasattr(ride.park, 'location') and ride.park.location:
|
||||
ride_data['park']['location'] = {
|
||||
'city': ride.park.location.city,
|
||||
'state': ride.park.location.state,
|
||||
'country': ride.park.location.country,
|
||||
}
|
||||
|
||||
# Park area data
|
||||
if ride.park_area:
|
||||
ride_data['park_area'] = {
|
||||
'id': ride.park_area.id,
|
||||
'name': ride.park_area.name,
|
||||
'slug': ride.park_area.slug,
|
||||
}
|
||||
|
||||
# Company data
|
||||
if ride.manufacturer:
|
||||
ride_data['manufacturer'] = {
|
||||
'id': ride.manufacturer.id,
|
||||
'name': ride.manufacturer.name,
|
||||
'slug': ride.manufacturer.slug,
|
||||
}
|
||||
|
||||
if ride.designer:
|
||||
ride_data['designer'] = {
|
||||
'id': ride.designer.id,
|
||||
'name': ride.designer.name,
|
||||
'slug': ride.designer.slug,
|
||||
}
|
||||
|
||||
# Ride model data
|
||||
if ride.ride_model:
|
||||
ride_data['ride_model'] = {
|
||||
'id': ride.ride_model.id,
|
||||
'name': ride.ride_model.name,
|
||||
'slug': ride.ride_model.slug,
|
||||
'category': ride.ride_model.category,
|
||||
}
|
||||
|
||||
if ride.ride_model.manufacturer:
|
||||
ride_data['ride_model']['manufacturer'] = {
|
||||
'id': ride.ride_model.manufacturer.id,
|
||||
'name': ride.ride_model.manufacturer.name,
|
||||
'slug': ride.ride_model.manufacturer.slug,
|
||||
}
|
||||
|
||||
# Roller coaster stats
|
||||
if hasattr(ride, 'coaster_stats') and ride.coaster_stats:
|
||||
stats = ride.coaster_stats
|
||||
ride_data['coaster_stats'] = {
|
||||
'height_ft': float(stats.height_ft) if stats.height_ft else None,
|
||||
'length_ft': float(stats.length_ft) if stats.length_ft else None,
|
||||
'speed_mph': float(stats.speed_mph) if stats.speed_mph else None,
|
||||
'inversions': stats.inversions,
|
||||
'ride_time_seconds': stats.ride_time_seconds,
|
||||
'track_type': stats.track_type,
|
||||
'track_material': stats.track_material,
|
||||
'roller_coaster_type': stats.roller_coaster_type,
|
||||
'max_drop_height_ft': float(stats.max_drop_height_ft) if stats.max_drop_height_ft else None,
|
||||
'propulsion_system': stats.propulsion_system,
|
||||
'train_style': stats.train_style,
|
||||
'trains_count': stats.trains_count,
|
||||
'cars_per_train': stats.cars_per_train,
|
||||
'seats_per_car': stats.seats_per_car,
|
||||
}
|
||||
|
||||
serialized.append(ride_data)
|
||||
|
||||
return serialized
|
||||
|
||||
def _generate_filter_metadata(self, filters: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
|
||||
"""Generate comprehensive filter metadata."""
|
||||
from apps.rides.models import Ride, RideModel
|
||||
from apps.rides.models.company import Company
|
||||
from apps.rides.models.rides import RollerCoasterStats
|
||||
|
||||
# Get unique values from database with counts
|
||||
parks_data = list(Ride.objects.exclude(
|
||||
park__isnull=True
|
||||
).select_related('park').values(
|
||||
'park__id', 'park__name', 'park__slug'
|
||||
).annotate(count=models.Count('id')).distinct().order_by('park__name'))
|
||||
|
||||
park_areas_data = list(Ride.objects.exclude(
|
||||
park_area__isnull=True
|
||||
).select_related('park_area').values(
|
||||
'park_area__id', 'park_area__name', 'park_area__slug'
|
||||
).annotate(count=models.Count('id')).distinct().order_by('park_area__name'))
|
||||
|
||||
manufacturers_data = list(Company.objects.filter(
|
||||
roles__contains=['MANUFACTURER']
|
||||
).values('id', 'name', 'slug').annotate(
|
||||
count=models.Count('manufactured_rides')
|
||||
).order_by('name'))
|
||||
|
||||
designers_data = list(Company.objects.filter(
|
||||
roles__contains=['DESIGNER']
|
||||
).values('id', 'name', 'slug').annotate(
|
||||
count=models.Count('designed_rides')
|
||||
).order_by('name'))
|
||||
|
||||
ride_models_data = list(RideModel.objects.select_related(
|
||||
'manufacturer'
|
||||
).values(
|
||||
'id', 'name', 'slug', 'manufacturer__name', 'manufacturer__slug', 'category'
|
||||
).annotate(count=models.Count('rides')).order_by('manufacturer__name', 'name'))
|
||||
|
||||
# Get categories and statuses with counts
|
||||
categories_data = list(Ride.objects.values('category').annotate(
|
||||
count=models.Count('id')
|
||||
).order_by('category'))
|
||||
|
||||
statuses_data = list(Ride.objects.values('status').annotate(
|
||||
count=models.Count('id')
|
||||
).order_by('status'))
|
||||
|
||||
# Get roller coaster specific data with counts
|
||||
rc_types_data = list(RollerCoasterStats.objects.values('roller_coaster_type').annotate(
|
||||
count=models.Count('ride')
|
||||
).exclude(roller_coaster_type__isnull=True).order_by('roller_coaster_type'))
|
||||
|
||||
track_materials_data = list(RollerCoasterStats.objects.values('track_material').annotate(
|
||||
count=models.Count('ride')
|
||||
).exclude(track_material__isnull=True).order_by('track_material'))
|
||||
|
||||
propulsion_systems_data = list(RollerCoasterStats.objects.values('propulsion_system').annotate(
|
||||
count=models.Count('ride')
|
||||
).exclude(propulsion_system__isnull=True).order_by('propulsion_system'))
|
||||
|
||||
# Convert to frontend-expected format with value/label/count
|
||||
categories = [
|
||||
{
|
||||
'value': item['category'],
|
||||
'label': self._get_category_label(item['category']),
|
||||
'count': item['count']
|
||||
}
|
||||
for item in categories_data
|
||||
]
|
||||
|
||||
statuses = [
|
||||
{
|
||||
'value': item['status'],
|
||||
'label': self._get_status_label(item['status']),
|
||||
'count': item['count']
|
||||
}
|
||||
for item in statuses_data
|
||||
]
|
||||
|
||||
roller_coaster_types = [
|
||||
{
|
||||
'value': item['roller_coaster_type'],
|
||||
'label': self._get_rc_type_label(item['roller_coaster_type']),
|
||||
'count': item['count']
|
||||
}
|
||||
for item in rc_types_data
|
||||
]
|
||||
|
||||
track_materials = [
|
||||
{
|
||||
'value': item['track_material'],
|
||||
'label': self._get_track_material_label(item['track_material']),
|
||||
'count': item['count']
|
||||
}
|
||||
for item in track_materials_data
|
||||
]
|
||||
|
||||
propulsion_systems = [
|
||||
{
|
||||
'value': item['propulsion_system'],
|
||||
'label': self._get_propulsion_system_label(item['propulsion_system']),
|
||||
'count': item['count']
|
||||
}
|
||||
for item in propulsion_systems_data
|
||||
]
|
||||
|
||||
# Convert other data to expected format
|
||||
parks = [
|
||||
{
|
||||
'value': str(item['park__id']),
|
||||
'label': item['park__name'],
|
||||
'count': item['count']
|
||||
}
|
||||
for item in parks_data
|
||||
]
|
||||
|
||||
park_areas = [
|
||||
{
|
||||
'value': str(item['park_area__id']),
|
||||
'label': item['park_area__name'],
|
||||
'count': item['count']
|
||||
}
|
||||
for item in park_areas_data
|
||||
]
|
||||
|
||||
manufacturers = [
|
||||
{
|
||||
'value': str(item['id']),
|
||||
'label': item['name'],
|
||||
'count': item['count']
|
||||
}
|
||||
for item in manufacturers_data
|
||||
]
|
||||
|
||||
designers = [
|
||||
{
|
||||
'value': str(item['id']),
|
||||
'label': item['name'],
|
||||
'count': item['count']
|
||||
}
|
||||
for item in designers_data
|
||||
]
|
||||
|
||||
ride_models = [
|
||||
{
|
||||
'value': str(item['id']),
|
||||
'label': f"{item['manufacturer__name']} {item['name']}",
|
||||
'count': item['count']
|
||||
}
|
||||
for item in ride_models_data
|
||||
]
|
||||
|
||||
# Calculate ranges from actual data
|
||||
ride_stats = Ride.objects.aggregate(
|
||||
min_rating=Min('average_rating'),
|
||||
max_rating=Max('average_rating'),
|
||||
min_height_req=Min('min_height_in'),
|
||||
max_height_req=Max('max_height_in'),
|
||||
min_capacity=Min('capacity_per_hour'),
|
||||
max_capacity=Max('capacity_per_hour'),
|
||||
min_duration=Min('ride_duration_seconds'),
|
||||
max_duration=Max('ride_duration_seconds'),
|
||||
min_year=Min('opening_year'),
|
||||
max_year=Max('opening_year'),
|
||||
)
|
||||
|
||||
# Calculate roller coaster specific ranges
|
||||
coaster_stats = RollerCoasterStats.objects.aggregate(
|
||||
min_height_ft=Min('height_ft'),
|
||||
max_height_ft=Max('height_ft'),
|
||||
min_length_ft=Min('length_ft'),
|
||||
max_length_ft=Max('length_ft'),
|
||||
min_speed_mph=Min('speed_mph'),
|
||||
max_speed_mph=Max('speed_mph'),
|
||||
min_inversions=Min('inversions'),
|
||||
max_inversions=Max('inversions'),
|
||||
min_ride_time=Min('ride_time_seconds'),
|
||||
max_ride_time=Max('ride_time_seconds'),
|
||||
min_drop_height=Min('max_drop_height_ft'),
|
||||
max_drop_height=Max('max_drop_height_ft'),
|
||||
min_trains=Min('trains_count'),
|
||||
max_trains=Max('trains_count'),
|
||||
min_cars=Min('cars_per_train'),
|
||||
max_cars=Max('cars_per_train'),
|
||||
min_seats=Min('seats_per_car'),
|
||||
max_seats=Max('seats_per_car'),
|
||||
)
|
||||
|
||||
return {
|
||||
'categorical': {
|
||||
'categories': categories,
|
||||
'statuses': statuses,
|
||||
'roller_coaster_types': roller_coaster_types,
|
||||
'track_materials': track_materials,
|
||||
'propulsion_systems': propulsion_systems,
|
||||
'parks': parks,
|
||||
'park_areas': park_areas,
|
||||
'manufacturers': manufacturers,
|
||||
'designers': designers,
|
||||
'ride_models': ride_models,
|
||||
},
|
||||
'ranges': {
|
||||
'rating': {
|
||||
'min': float(ride_stats['min_rating'] or 1),
|
||||
'max': float(ride_stats['max_rating'] or 10),
|
||||
'step': 0.1,
|
||||
'unit': 'stars'
|
||||
},
|
||||
'height_requirement': {
|
||||
'min': ride_stats['min_height_req'] or 30,
|
||||
'max': ride_stats['max_height_req'] or 90,
|
||||
'step': 1,
|
||||
'unit': 'inches'
|
||||
},
|
||||
'capacity': {
|
||||
'min': ride_stats['min_capacity'] or 0,
|
||||
'max': ride_stats['max_capacity'] or 5000,
|
||||
'step': 50,
|
||||
'unit': 'riders/hour'
|
||||
},
|
||||
'ride_duration': {
|
||||
'min': ride_stats['min_duration'] or 0,
|
||||
'max': ride_stats['max_duration'] or 600,
|
||||
'step': 10,
|
||||
'unit': 'seconds'
|
||||
},
|
||||
'opening_year': {
|
||||
'min': ride_stats['min_year'] or 1800,
|
||||
'max': ride_stats['max_year'] or 2030,
|
||||
'step': 1,
|
||||
'unit': 'year'
|
||||
},
|
||||
'height_ft': {
|
||||
'min': float(coaster_stats['min_height_ft'] or 0),
|
||||
'max': float(coaster_stats['max_height_ft'] or 500),
|
||||
'step': 5,
|
||||
'unit': 'feet'
|
||||
},
|
||||
'length_ft': {
|
||||
'min': float(coaster_stats['min_length_ft'] or 0),
|
||||
'max': float(coaster_stats['max_length_ft'] or 10000),
|
||||
'step': 100,
|
||||
'unit': 'feet'
|
||||
},
|
||||
'speed_mph': {
|
||||
'min': float(coaster_stats['min_speed_mph'] or 0),
|
||||
'max': float(coaster_stats['max_speed_mph'] or 150),
|
||||
'step': 5,
|
||||
'unit': 'mph'
|
||||
},
|
||||
'inversions': {
|
||||
'min': coaster_stats['min_inversions'] or 0,
|
||||
'max': coaster_stats['max_inversions'] or 20,
|
||||
'step': 1,
|
||||
'unit': 'inversions'
|
||||
},
|
||||
},
|
||||
'total_count': Ride.objects.count(),
|
||||
}
|
||||
|
||||
def _get_category_label(self, category: str) -> str:
|
||||
"""Convert category code to human-readable label."""
|
||||
category_labels = {
|
||||
'RC': 'Roller Coaster',
|
||||
'DR': 'Dark Ride',
|
||||
'FR': 'Flat Ride',
|
||||
'WR': 'Water Ride',
|
||||
'TR': 'Transport Ride',
|
||||
'OT': 'Other',
|
||||
}
|
||||
if category in category_labels:
|
||||
return category_labels[category]
|
||||
else:
|
||||
raise ValueError(f"Unknown ride category: {category}")
|
||||
|
||||
def _get_status_label(self, status: str) -> str:
|
||||
"""Convert status code to human-readable label."""
|
||||
status_labels = {
|
||||
'OPERATING': 'Operating',
|
||||
'CLOSED_TEMP': 'Temporarily Closed',
|
||||
'SBNO': 'Standing But Not Operating',
|
||||
'CLOSING': 'Closing Soon',
|
||||
'CLOSED_PERM': 'Permanently Closed',
|
||||
'UNDER_CONSTRUCTION': 'Under Construction',
|
||||
'DEMOLISHED': 'Demolished',
|
||||
'RELOCATED': 'Relocated',
|
||||
}
|
||||
if status in status_labels:
|
||||
return status_labels[status]
|
||||
else:
|
||||
raise ValueError(f"Unknown ride status: {status}")
|
||||
|
||||
def _get_rc_type_label(self, rc_type: str) -> str:
|
||||
"""Convert roller coaster type to human-readable label."""
|
||||
rc_type_labels = {
|
||||
'SITDOWN': 'Sit Down',
|
||||
'INVERTED': 'Inverted',
|
||||
'SUSPENDED': 'Suspended',
|
||||
'FLOORLESS': 'Floorless',
|
||||
'FLYING': 'Flying',
|
||||
'WING': 'Wing',
|
||||
'DIVE': 'Dive',
|
||||
'SPINNING': 'Spinning',
|
||||
'WILD_MOUSE': 'Wild Mouse',
|
||||
'BOBSLED': 'Bobsled',
|
||||
'PIPELINE': 'Pipeline',
|
||||
'FOURTH_DIMENSION': '4th Dimension',
|
||||
'FAMILY': 'Family',
|
||||
}
|
||||
if rc_type in rc_type_labels:
|
||||
return rc_type_labels[rc_type]
|
||||
else:
|
||||
raise ValueError(f"Unknown roller coaster type: {rc_type}")
|
||||
|
||||
def _get_track_material_label(self, material: str) -> str:
|
||||
"""Convert track material to human-readable label."""
|
||||
material_labels = {
|
||||
'STEEL': 'Steel',
|
||||
'WOOD': 'Wood',
|
||||
'HYBRID': 'Hybrid (Steel/Wood)',
|
||||
}
|
||||
if material in material_labels:
|
||||
return material_labels[material]
|
||||
else:
|
||||
raise ValueError(f"Unknown track material: {material}")
|
||||
|
||||
def _get_propulsion_system_label(self, propulsion_system: str) -> str:
|
||||
"""Convert propulsion system to human-readable label."""
|
||||
propulsion_labels = {
|
||||
'CHAIN': 'Chain Lift',
|
||||
'LSM': 'Linear Synchronous Motor',
|
||||
'LIM': 'Linear Induction Motor',
|
||||
'HYDRAULIC': 'Hydraulic Launch',
|
||||
'PNEUMATIC': 'Pneumatic Launch',
|
||||
'CABLE': 'Cable Lift',
|
||||
'FLYWHEEL': 'Flywheel Launch',
|
||||
'GRAVITY': 'Gravity',
|
||||
'NONE': 'No Propulsion System',
|
||||
}
|
||||
if propulsion_system in propulsion_labels:
|
||||
return propulsion_labels[propulsion_system]
|
||||
else:
|
||||
raise ValueError(f"Unknown propulsion system: {propulsion_system}")
|
||||
@@ -1,359 +0,0 @@
|
||||
"""
|
||||
Rides-specific location services with OpenStreetMap integration.
|
||||
Handles location management for individual rides within parks.
|
||||
"""
|
||||
|
||||
import requests
|
||||
from typing import List, Dict, Any, Optional
|
||||
from django.db import transaction
|
||||
import logging
|
||||
|
||||
from ..models import RideLocation
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RideLocationService:
|
||||
"""
|
||||
Location service specifically for rides using OpenStreetMap integration.
|
||||
Focuses on precise positioning within parks and navigation assistance.
|
||||
"""
|
||||
|
||||
NOMINATIM_BASE_URL = "https://nominatim.openstreetmap.org"
|
||||
USER_AGENT = "ThrillWiki/1.0 (https://thrillwiki.com)"
|
||||
|
||||
@classmethod
|
||||
def create_ride_location(
|
||||
cls,
|
||||
*,
|
||||
ride,
|
||||
latitude: Optional[float] = None,
|
||||
longitude: Optional[float] = None,
|
||||
park_area: str = "",
|
||||
notes: str = "",
|
||||
entrance_notes: str = "",
|
||||
accessibility_notes: str = "",
|
||||
) -> RideLocation:
|
||||
"""
|
||||
Create a location for a ride within a park.
|
||||
|
||||
Args:
|
||||
ride: Ride instance
|
||||
latitude: Latitude coordinate (optional for rides)
|
||||
longitude: Longitude coordinate (optional for rides)
|
||||
park_area: Themed area within the park
|
||||
notes: General location notes
|
||||
entrance_notes: Entrance and navigation notes
|
||||
accessibility_notes: Accessibility information
|
||||
|
||||
Returns:
|
||||
Created RideLocation instance
|
||||
"""
|
||||
with transaction.atomic():
|
||||
ride_location = RideLocation(
|
||||
ride=ride,
|
||||
park_area=park_area,
|
||||
notes=notes,
|
||||
entrance_notes=entrance_notes,
|
||||
accessibility_notes=accessibility_notes,
|
||||
)
|
||||
|
||||
# Set coordinates if provided
|
||||
if latitude is not None and longitude is not None:
|
||||
ride_location.set_coordinates(latitude, longitude)
|
||||
|
||||
ride_location.full_clean()
|
||||
ride_location.save()
|
||||
|
||||
return ride_location
|
||||
|
||||
@classmethod
|
||||
def update_ride_location(
|
||||
cls, ride_location: RideLocation, **updates
|
||||
) -> RideLocation:
|
||||
"""
|
||||
Update ride location with validation.
|
||||
|
||||
Args:
|
||||
ride_location: RideLocation instance to update
|
||||
**updates: Fields to update
|
||||
|
||||
Returns:
|
||||
Updated RideLocation instance
|
||||
"""
|
||||
with transaction.atomic():
|
||||
# Handle coordinates separately
|
||||
latitude = updates.pop("latitude", None)
|
||||
longitude = updates.pop("longitude", None)
|
||||
|
||||
# Update regular fields
|
||||
for field, value in updates.items():
|
||||
if hasattr(ride_location, field):
|
||||
setattr(ride_location, field, value)
|
||||
|
||||
# Update coordinates if provided
|
||||
if latitude is not None and longitude is not None:
|
||||
ride_location.set_coordinates(latitude, longitude)
|
||||
|
||||
ride_location.full_clean()
|
||||
ride_location.save()
|
||||
|
||||
return ride_location
|
||||
|
||||
@classmethod
|
||||
def find_rides_in_area(cls, park, park_area: str) -> List[RideLocation]:
|
||||
"""
|
||||
Find all rides in a specific park area.
|
||||
|
||||
Args:
|
||||
park: Park instance
|
||||
park_area: Name of the park area/land
|
||||
|
||||
Returns:
|
||||
List of RideLocation instances in the area
|
||||
"""
|
||||
return list(
|
||||
RideLocation.objects.filter(ride__park=park, park_area__icontains=park_area)
|
||||
.select_related("ride")
|
||||
.order_by("ride__name")
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def find_nearby_rides(
|
||||
cls, latitude: float, longitude: float, park=None, radius_meters: float = 500
|
||||
) -> List[RideLocation]:
|
||||
"""
|
||||
Find rides near given coordinates using PostGIS.
|
||||
Useful for finding rides near a specific location within a park.
|
||||
|
||||
Args:
|
||||
latitude: Center latitude
|
||||
longitude: Center longitude
|
||||
park: Optional park to limit search to
|
||||
radius_meters: Search radius in meters (default: 500m)
|
||||
|
||||
Returns:
|
||||
List of nearby RideLocation instances
|
||||
"""
|
||||
from django.contrib.gis.geos import Point
|
||||
from django.contrib.gis.measure import Distance
|
||||
|
||||
center_point = Point(longitude, latitude, srid=4326)
|
||||
|
||||
queryset = RideLocation.objects.filter(
|
||||
point__distance_lte=(center_point, Distance(m=radius_meters)),
|
||||
point__isnull=False,
|
||||
)
|
||||
|
||||
if park:
|
||||
queryset = queryset.filter(ride__park=park)
|
||||
|
||||
return list(
|
||||
queryset.select_related("ride", "ride__park").order_by("point__distance")
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_ride_navigation_info(cls, ride_location: RideLocation) -> Dict[str, Any]:
|
||||
"""
|
||||
Get comprehensive navigation information for a ride.
|
||||
|
||||
Args:
|
||||
ride_location: RideLocation instance
|
||||
|
||||
Returns:
|
||||
Dictionary with navigation information
|
||||
"""
|
||||
info = {
|
||||
"ride_name": ride_location.ride.name,
|
||||
"park_name": ride_location.ride.park.name,
|
||||
"park_area": ride_location.park_area,
|
||||
"has_coordinates": ride_location.has_coordinates,
|
||||
"entrance_notes": ride_location.entrance_notes,
|
||||
"accessibility_notes": ride_location.accessibility_notes,
|
||||
"general_notes": ride_location.notes,
|
||||
}
|
||||
|
||||
# Add coordinate information if available
|
||||
if ride_location.has_coordinates:
|
||||
info.update(
|
||||
{
|
||||
"latitude": ride_location.latitude,
|
||||
"longitude": ride_location.longitude,
|
||||
"coordinates": ride_location.coordinates,
|
||||
}
|
||||
)
|
||||
|
||||
# Calculate distance to park entrance if park has location
|
||||
park_location = getattr(ride_location.ride.park, "location", None)
|
||||
if park_location and park_location.point:
|
||||
distance_km = ride_location.distance_to_park_location()
|
||||
if distance_km is not None:
|
||||
info["distance_from_park_entrance_km"] = round(distance_km, 2)
|
||||
|
||||
return info
|
||||
|
||||
@classmethod
|
||||
def estimate_ride_coordinates_from_park(
|
||||
cls,
|
||||
ride_location: RideLocation,
|
||||
area_offset_meters: Optional[Dict[str, List[float]]] = None,
|
||||
) -> Optional[List[float]]:
|
||||
"""
|
||||
Estimate ride coordinates based on park location and area.
|
||||
Useful when exact ride coordinates are not available.
|
||||
|
||||
Args:
|
||||
ride_location: RideLocation instance
|
||||
area_offset_meters: Dictionary mapping area names to [north_offset, east_offset] in meters
|
||||
|
||||
Returns:
|
||||
Estimated [latitude, longitude] list or None
|
||||
"""
|
||||
park_location = getattr(ride_location.ride.park, "location", None)
|
||||
if not park_location or not park_location.point:
|
||||
return None
|
||||
|
||||
# Default area offsets (rough estimates for common themed areas)
|
||||
default_offsets = {
|
||||
"main street": (0, 0), # Usually at entrance
|
||||
"fantasyland": (200, 100), # Often north-east
|
||||
"tomorrowland": (100, 200), # Often east
|
||||
"frontierland": (-100, -200), # Often south-west
|
||||
"adventureland": (-200, 100), # Often south-east
|
||||
"new orleans square": (-150, -100),
|
||||
"critter country": (-200, -200),
|
||||
"galaxy's edge": (300, 300), # Often on periphery
|
||||
"cars land": (200, -200),
|
||||
"pixar pier": (0, 300), # Often waterfront
|
||||
}
|
||||
|
||||
offsets = area_offset_meters or default_offsets
|
||||
|
||||
# Find matching area offset
|
||||
area_lower = ride_location.park_area.lower()
|
||||
offset = None
|
||||
|
||||
for area_name, area_offset in offsets.items():
|
||||
if area_name in area_lower:
|
||||
offset = area_offset
|
||||
break
|
||||
|
||||
if not offset:
|
||||
# Default small random offset if no specific area match
|
||||
import random
|
||||
|
||||
offset = (random.randint(-100, 100), random.randint(-100, 100))
|
||||
|
||||
# Convert meter offsets to coordinate offsets
|
||||
# Rough conversion: 1 degree latitude ≈ 111,000 meters
|
||||
# 1 degree longitude varies by latitude, but we'll use a rough approximation
|
||||
lat_offset = offset[0] / 111000 # North offset in degrees
|
||||
lon_offset = offset[1] / (
|
||||
111000 * abs(park_location.latitude) * 0.01
|
||||
) # East offset
|
||||
|
||||
estimated_lat = park_location.latitude + lat_offset
|
||||
estimated_lon = park_location.longitude + lon_offset
|
||||
|
||||
return [estimated_lat, estimated_lon]
|
||||
|
||||
@classmethod
|
||||
def bulk_update_ride_areas_from_osm(cls, park) -> int:
|
||||
"""
|
||||
Bulk update ride locations for a park using OSM data.
|
||||
Attempts to find more precise locations for rides within the park.
|
||||
|
||||
Args:
|
||||
park: Park instance
|
||||
|
||||
Returns:
|
||||
Number of ride locations updated
|
||||
"""
|
||||
updated_count = 0
|
||||
park_location = getattr(park, "location", None)
|
||||
|
||||
if not park_location or not park_location.point:
|
||||
return updated_count
|
||||
|
||||
# Get all rides in the park that don't have precise coordinates
|
||||
ride_locations = RideLocation.objects.filter(
|
||||
ride__park=park, point__isnull=True
|
||||
).select_related("ride")
|
||||
|
||||
for ride_location in ride_locations:
|
||||
# Try to search for the specific ride within the park area
|
||||
search_query = f"{ride_location.ride.name} {park.name}"
|
||||
|
||||
try:
|
||||
# Search for the ride specifically
|
||||
params = {
|
||||
"q": search_query,
|
||||
"format": "json",
|
||||
"limit": 5,
|
||||
"addressdetails": 1,
|
||||
"bounded": 1, # Restrict to viewbox
|
||||
# Create a bounding box around the park (roughly 2km radius)
|
||||
"viewbox": f"{park_location.longitude - 0.02},{park_location.latitude + 0.02},{park_location.longitude + 0.02},{park_location.latitude - 0.02}",
|
||||
}
|
||||
|
||||
headers = {"User-Agent": cls.USER_AGENT}
|
||||
|
||||
response = requests.get(
|
||||
f"{cls.NOMINATIM_BASE_URL}/search",
|
||||
params=params,
|
||||
headers=headers,
|
||||
timeout=5,
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
results = response.json()
|
||||
|
||||
# Look for results that might be the ride
|
||||
for result in results:
|
||||
display_name = result.get("display_name", "").lower()
|
||||
if (
|
||||
ride_location.ride.name.lower() in display_name
|
||||
and park.name.lower() in display_name
|
||||
):
|
||||
# Update the ride location
|
||||
ride_location.set_coordinates(
|
||||
float(result["lat"]), float(result["lon"])
|
||||
)
|
||||
ride_location.save()
|
||||
updated_count += 1
|
||||
break
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
f"Error updating ride location for {ride_location.ride.name}: {str(e)}"
|
||||
)
|
||||
continue
|
||||
|
||||
return updated_count
|
||||
|
||||
@classmethod
|
||||
def generate_park_area_map(cls, park) -> Dict[str, List[str]]:
|
||||
"""
|
||||
Generate a map of park areas and the rides in each area.
|
||||
|
||||
Args:
|
||||
park: Park instance
|
||||
|
||||
Returns:
|
||||
Dictionary mapping area names to lists of ride names
|
||||
"""
|
||||
area_map = {}
|
||||
|
||||
ride_locations = (
|
||||
RideLocation.objects.filter(ride__park=park)
|
||||
.select_related("ride")
|
||||
.order_by("park_area", "ride__name")
|
||||
)
|
||||
|
||||
for ride_location in ride_locations:
|
||||
area = ride_location.park_area or "Unknown Area"
|
||||
if area not in area_map:
|
||||
area_map[area] = []
|
||||
area_map[area].append(ride_location.ride.name)
|
||||
|
||||
return area_map
|
||||
@@ -1,305 +0,0 @@
|
||||
"""
|
||||
Ride-specific media service for ThrillWiki.
|
||||
|
||||
This module provides media management functionality specific to rides.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import List, Optional, Dict, Any
|
||||
from django.core.files.uploadedfile import UploadedFile
|
||||
from django.db import transaction
|
||||
from django.contrib.auth import get_user_model
|
||||
from apps.core.services.media_service import MediaService
|
||||
from ..models import Ride, RidePhoto
|
||||
|
||||
User = get_user_model()
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RideMediaService:
|
||||
"""Service for managing ride-specific media operations."""
|
||||
|
||||
@staticmethod
|
||||
def upload_photo(
|
||||
ride: Ride,
|
||||
image_file: UploadedFile,
|
||||
user: User,
|
||||
caption: str = "",
|
||||
alt_text: str = "",
|
||||
photo_type: str = "exterior",
|
||||
is_primary: bool = False,
|
||||
auto_approve: bool = False,
|
||||
) -> RidePhoto:
|
||||
"""
|
||||
Upload a photo for a ride.
|
||||
|
||||
Args:
|
||||
ride: Ride instance
|
||||
image_file: Uploaded image file
|
||||
user: User uploading the photo
|
||||
caption: Photo caption
|
||||
alt_text: Alt text for accessibility
|
||||
photo_type: Type of photo (exterior, queue, station, etc.)
|
||||
is_primary: Whether this should be the primary photo
|
||||
auto_approve: Whether to auto-approve the photo
|
||||
|
||||
Returns:
|
||||
Created RidePhoto instance
|
||||
|
||||
Raises:
|
||||
ValueError: If image validation fails
|
||||
"""
|
||||
# Validate image file
|
||||
is_valid, error_message = MediaService.validate_image_file(image_file)
|
||||
if not is_valid:
|
||||
raise ValueError(error_message)
|
||||
|
||||
# Process image
|
||||
processed_image = MediaService.process_image(image_file)
|
||||
|
||||
with transaction.atomic():
|
||||
# Create photo instance
|
||||
photo = RidePhoto(
|
||||
ride=ride,
|
||||
image=processed_image,
|
||||
caption=caption or MediaService.generate_default_caption(user.username),
|
||||
alt_text=alt_text,
|
||||
photo_type=photo_type,
|
||||
is_primary=is_primary,
|
||||
is_approved=auto_approve,
|
||||
uploaded_by=user,
|
||||
)
|
||||
|
||||
# Extract EXIF date
|
||||
photo.date_taken = MediaService.extract_exif_date(processed_image)
|
||||
|
||||
photo.save()
|
||||
|
||||
logger.info(f"Photo uploaded for ride {ride.slug} by user {user.username}")
|
||||
return photo
|
||||
|
||||
@staticmethod
|
||||
def get_ride_photos(
|
||||
ride: Ride,
|
||||
approved_only: bool = True,
|
||||
primary_first: bool = True,
|
||||
photo_type: Optional[str] = None,
|
||||
) -> List[RidePhoto]:
|
||||
"""
|
||||
Get photos for a ride.
|
||||
|
||||
Args:
|
||||
ride: Ride instance
|
||||
approved_only: Whether to only return approved photos
|
||||
primary_first: Whether to order primary photos first
|
||||
photo_type: Filter by photo type (optional)
|
||||
|
||||
Returns:
|
||||
List of RidePhoto instances
|
||||
"""
|
||||
queryset = ride.photos.all()
|
||||
|
||||
if approved_only:
|
||||
queryset = queryset.filter(is_approved=True)
|
||||
|
||||
if photo_type:
|
||||
queryset = queryset.filter(photo_type=photo_type)
|
||||
|
||||
if primary_first:
|
||||
queryset = queryset.order_by("-is_primary", "-created_at")
|
||||
else:
|
||||
queryset = queryset.order_by("-created_at")
|
||||
|
||||
return list(queryset)
|
||||
|
||||
@staticmethod
|
||||
def get_primary_photo(ride: Ride) -> Optional[RidePhoto]:
|
||||
"""
|
||||
Get the primary photo for a ride.
|
||||
|
||||
Args:
|
||||
ride: Ride instance
|
||||
|
||||
Returns:
|
||||
Primary RidePhoto instance or None
|
||||
"""
|
||||
try:
|
||||
return ride.photos.filter(is_primary=True, is_approved=True).first()
|
||||
except RidePhoto.DoesNotExist:
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def get_photos_by_type(ride: Ride, photo_type: str) -> List[RidePhoto]:
|
||||
"""
|
||||
Get photos of a specific type for a ride.
|
||||
|
||||
Args:
|
||||
ride: Ride instance
|
||||
photo_type: Type of photos to retrieve
|
||||
|
||||
Returns:
|
||||
List of RidePhoto instances
|
||||
"""
|
||||
return list(
|
||||
ride.photos.filter(photo_type=photo_type, is_approved=True).order_by(
|
||||
"-created_at"
|
||||
)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def set_primary_photo(ride: Ride, photo: RidePhoto) -> bool:
|
||||
"""
|
||||
Set a photo as the primary photo for a ride.
|
||||
|
||||
Args:
|
||||
ride: Ride instance
|
||||
photo: RidePhoto to set as primary
|
||||
|
||||
Returns:
|
||||
True if successful, False otherwise
|
||||
"""
|
||||
if photo.ride != ride:
|
||||
return False
|
||||
|
||||
with transaction.atomic():
|
||||
# Unset current primary
|
||||
ride.photos.filter(is_primary=True).update(is_primary=False)
|
||||
|
||||
# Set new primary
|
||||
photo.is_primary = True
|
||||
photo.save()
|
||||
|
||||
logger.info(f"Set photo {photo.pk} as primary for ride {ride.slug}")
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def approve_photo(photo: RidePhoto, approved_by: User) -> bool:
|
||||
"""
|
||||
Approve a ride photo.
|
||||
|
||||
Args:
|
||||
photo: RidePhoto to approve
|
||||
approved_by: User approving the photo
|
||||
|
||||
Returns:
|
||||
True if successful, False otherwise
|
||||
"""
|
||||
try:
|
||||
photo.is_approved = True
|
||||
photo.save()
|
||||
|
||||
logger.info(f"Photo {photo.pk} approved by user {approved_by.username}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to approve photo {photo.pk}: {str(e)}")
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def delete_photo(photo: RidePhoto, deleted_by: User) -> bool:
|
||||
"""
|
||||
Delete a ride photo.
|
||||
|
||||
Args:
|
||||
photo: RidePhoto to delete
|
||||
deleted_by: User deleting the photo
|
||||
|
||||
Returns:
|
||||
True if successful, False otherwise
|
||||
"""
|
||||
try:
|
||||
ride_slug = photo.ride.slug
|
||||
photo_id = photo.pk
|
||||
|
||||
# Delete the file and database record
|
||||
if photo.image:
|
||||
photo.image.delete(save=False)
|
||||
photo.delete()
|
||||
|
||||
logger.info(
|
||||
f"Photo {photo_id} deleted from ride {ride_slug} by user {deleted_by.username}"
|
||||
)
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to delete photo {photo.pk}: {str(e)}")
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def get_photo_stats(ride: Ride) -> Dict[str, Any]:
|
||||
"""
|
||||
Get photo statistics for a ride.
|
||||
|
||||
Args:
|
||||
ride: Ride instance
|
||||
|
||||
Returns:
|
||||
Dictionary with photo statistics
|
||||
"""
|
||||
photos = ride.photos.all()
|
||||
|
||||
# Get counts by photo type
|
||||
type_counts = {}
|
||||
for photo_type, _ in RidePhoto._meta.get_field("photo_type").choices:
|
||||
type_counts[photo_type] = photos.filter(photo_type=photo_type).count()
|
||||
|
||||
return {
|
||||
"total_photos": photos.count(),
|
||||
"approved_photos": photos.filter(is_approved=True).count(),
|
||||
"pending_photos": photos.filter(is_approved=False).count(),
|
||||
"has_primary": photos.filter(is_primary=True).exists(),
|
||||
"recent_uploads": photos.order_by("-created_at")[:5].count(),
|
||||
"by_type": type_counts,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def bulk_approve_photos(photos: List[RidePhoto], approved_by: User) -> int:
|
||||
"""
|
||||
Bulk approve multiple photos.
|
||||
|
||||
Args:
|
||||
photos: List of RidePhoto instances to approve
|
||||
approved_by: User approving the photos
|
||||
|
||||
Returns:
|
||||
Number of photos successfully approved
|
||||
"""
|
||||
approved_count = 0
|
||||
|
||||
with transaction.atomic():
|
||||
for photo in photos:
|
||||
if RideMediaService.approve_photo(photo, approved_by):
|
||||
approved_count += 1
|
||||
|
||||
logger.info(
|
||||
f"Bulk approved {approved_count} photos by user {approved_by.username}"
|
||||
)
|
||||
return approved_count
|
||||
|
||||
@staticmethod
|
||||
def get_construction_timeline(ride: Ride) -> List[RidePhoto]:
|
||||
"""
|
||||
Get construction photos ordered chronologically.
|
||||
|
||||
Args:
|
||||
ride: Ride instance
|
||||
|
||||
Returns:
|
||||
List of construction RidePhoto instances ordered by date taken
|
||||
"""
|
||||
return list(
|
||||
ride.photos.filter(photo_type="construction", is_approved=True).order_by(
|
||||
"date_taken", "created_at"
|
||||
)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_onride_photos(ride: Ride) -> List[RidePhoto]:
|
||||
"""
|
||||
Get on-ride photos for a ride.
|
||||
|
||||
Args:
|
||||
ride: Ride instance
|
||||
|
||||
Returns:
|
||||
List of on-ride RidePhoto instances
|
||||
"""
|
||||
return RideMediaService.get_photos_by_type(ride, "onride")
|
||||
@@ -1,550 +0,0 @@
|
||||
"""
|
||||
Service for calculating ride rankings using the Internet Roller Coaster Poll algorithm.
|
||||
|
||||
This service implements a pairwise comparison system where each ride is compared
|
||||
to every other ride based on mutual riders (users who have rated both rides).
|
||||
Rankings are determined by winning percentage in these comparisons.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Dict, List, Optional
|
||||
from decimal import Decimal
|
||||
from datetime import date
|
||||
|
||||
from django.db import transaction
|
||||
from django.db.models import Avg, Count, Q
|
||||
from django.utils import timezone
|
||||
|
||||
from apps.rides.models import (
|
||||
Ride,
|
||||
RideReview,
|
||||
RideRanking,
|
||||
RidePairComparison,
|
||||
RankingSnapshot,
|
||||
)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RideRankingService:
|
||||
"""
|
||||
Calculates ride rankings using the Internet Roller Coaster Poll algorithm.
|
||||
|
||||
Algorithm Overview:
|
||||
1. For each pair of rides, find users who have rated both
|
||||
2. Count how many users preferred each ride (higher rating)
|
||||
3. Calculate wins, losses, and ties for each ride
|
||||
4. Rank rides by winning percentage (ties count as 0.5 wins)
|
||||
5. Break ties by head-to-head comparison
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.logger = logging.getLogger(f"{__name__}.{self.__class__.__name__}")
|
||||
self.calculation_version = "1.0"
|
||||
|
||||
def update_all_rankings(self, category: Optional[str] = None) -> Dict[str, any]:
|
||||
"""
|
||||
Main entry point to update all ride rankings.
|
||||
|
||||
Args:
|
||||
category: Optional ride category to filter ('RC' for roller coasters, etc.)
|
||||
If None, ranks all rides.
|
||||
|
||||
Returns:
|
||||
Dictionary with statistics about the ranking calculation
|
||||
"""
|
||||
start_time = timezone.now()
|
||||
self.logger.info(
|
||||
f"Starting ranking calculation for category: {category or 'ALL'}"
|
||||
)
|
||||
|
||||
try:
|
||||
with transaction.atomic():
|
||||
# Get rides to rank
|
||||
rides = self._get_eligible_rides(category)
|
||||
if not rides:
|
||||
self.logger.warning("No eligible rides found for ranking")
|
||||
return {
|
||||
"status": "skipped",
|
||||
"message": "No eligible rides found",
|
||||
"duration": (timezone.now() - start_time).total_seconds(),
|
||||
}
|
||||
|
||||
self.logger.info(f"Found {len(rides)} rides to rank")
|
||||
|
||||
# Calculate pairwise comparisons
|
||||
comparisons = self._calculate_all_comparisons(rides)
|
||||
|
||||
# Calculate rankings from comparisons
|
||||
rankings = self._calculate_rankings_from_comparisons(rides, comparisons)
|
||||
|
||||
# Save rankings
|
||||
self._save_rankings(rankings)
|
||||
|
||||
# Save snapshots for historical tracking
|
||||
self._save_ranking_snapshots(rankings)
|
||||
|
||||
# Clean up old data
|
||||
self._cleanup_old_data()
|
||||
|
||||
duration = (timezone.now() - start_time).total_seconds()
|
||||
self.logger.info(
|
||||
f"Ranking calculation completed in {duration:.2f} seconds"
|
||||
)
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"rides_ranked": len(rides),
|
||||
"comparisons_made": len(comparisons),
|
||||
"duration": duration,
|
||||
"timestamp": timezone.now(),
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error updating rankings: {e}", exc_info=True)
|
||||
raise
|
||||
|
||||
def _get_eligible_rides(self, category: Optional[str] = None) -> List[Ride]:
|
||||
"""
|
||||
Get rides that are eligible for ranking.
|
||||
|
||||
Only includes rides that:
|
||||
- Are currently operating
|
||||
- Have at least one review/rating
|
||||
"""
|
||||
queryset = (
|
||||
Ride.objects.filter(status="OPERATING", reviews__is_published=True)
|
||||
.annotate(
|
||||
review_count=Count("reviews", filter=Q(reviews__is_published=True))
|
||||
)
|
||||
.filter(review_count__gt=0)
|
||||
)
|
||||
|
||||
if category:
|
||||
queryset = queryset.filter(category=category)
|
||||
|
||||
return list(queryset.distinct())
|
||||
|
||||
def _calculate_all_comparisons(
|
||||
self, rides: List[Ride]
|
||||
) -> Dict[tuple[int, int], RidePairComparison]:
|
||||
"""
|
||||
Calculate pairwise comparisons for all ride pairs.
|
||||
|
||||
Returns a dictionary keyed by (ride_a_id, ride_b_id) tuples.
|
||||
"""
|
||||
comparisons = {}
|
||||
total_pairs = len(rides) * (len(rides) - 1) // 2
|
||||
processed = 0
|
||||
|
||||
for i, ride_a in enumerate(rides):
|
||||
for ride_b in rides[i + 1 :]:
|
||||
comparison = self._calculate_pairwise_comparison(ride_a, ride_b)
|
||||
if comparison:
|
||||
# Store both directions for easy lookup
|
||||
comparisons[(ride_a.id, ride_b.id)] = comparison
|
||||
comparisons[(ride_b.id, ride_a.id)] = comparison
|
||||
|
||||
processed += 1
|
||||
if processed % 100 == 0:
|
||||
self.logger.debug(
|
||||
f"Processed {processed}/{total_pairs} comparisons"
|
||||
)
|
||||
|
||||
return comparisons
|
||||
|
||||
def _calculate_pairwise_comparison(
|
||||
self, ride_a: Ride, ride_b: Ride
|
||||
) -> Optional[RidePairComparison]:
|
||||
"""
|
||||
Calculate the pairwise comparison between two rides.
|
||||
|
||||
Finds users who have rated both rides and determines which ride
|
||||
they preferred based on their ratings.
|
||||
"""
|
||||
# Get mutual riders (users who have rated both rides)
|
||||
ride_a_reviewers = set(
|
||||
RideReview.objects.filter(ride=ride_a, is_published=True).values_list(
|
||||
"user_id", flat=True
|
||||
)
|
||||
)
|
||||
|
||||
ride_b_reviewers = set(
|
||||
RideReview.objects.filter(ride=ride_b, is_published=True).values_list(
|
||||
"user_id", flat=True
|
||||
)
|
||||
)
|
||||
|
||||
mutual_riders = ride_a_reviewers & ride_b_reviewers
|
||||
|
||||
if not mutual_riders:
|
||||
# No mutual riders, no comparison possible
|
||||
return None
|
||||
|
||||
# Get ratings from mutual riders
|
||||
ride_a_ratings = {
|
||||
review.user_id: review.rating
|
||||
for review in RideReview.objects.filter(
|
||||
ride=ride_a, user_id__in=mutual_riders, is_published=True
|
||||
)
|
||||
}
|
||||
|
||||
ride_b_ratings = {
|
||||
review.user_id: review.rating
|
||||
for review in RideReview.objects.filter(
|
||||
ride=ride_b, user_id__in=mutual_riders, is_published=True
|
||||
)
|
||||
}
|
||||
|
||||
# Count wins and ties
|
||||
ride_a_wins = 0
|
||||
ride_b_wins = 0
|
||||
ties = 0
|
||||
|
||||
for user_id in mutual_riders:
|
||||
rating_a = ride_a_ratings.get(user_id, 0)
|
||||
rating_b = ride_b_ratings.get(user_id, 0)
|
||||
|
||||
if rating_a > rating_b:
|
||||
ride_a_wins += 1
|
||||
elif rating_b > rating_a:
|
||||
ride_b_wins += 1
|
||||
else:
|
||||
ties += 1
|
||||
|
||||
# Calculate average ratings from mutual riders
|
||||
ride_a_avg = (
|
||||
sum(ride_a_ratings.values()) / len(ride_a_ratings) if ride_a_ratings else 0
|
||||
)
|
||||
ride_b_avg = (
|
||||
sum(ride_b_ratings.values()) / len(ride_b_ratings) if ride_b_ratings else 0
|
||||
)
|
||||
|
||||
# Create or update comparison record
|
||||
comparison, created = RidePairComparison.objects.update_or_create(
|
||||
ride_a=ride_a if ride_a.id < ride_b.id else ride_b,
|
||||
ride_b=ride_b if ride_a.id < ride_b.id else ride_a,
|
||||
defaults={
|
||||
"ride_a_wins": ride_a_wins if ride_a.id < ride_b.id else ride_b_wins,
|
||||
"ride_b_wins": ride_b_wins if ride_a.id < ride_b.id else ride_a_wins,
|
||||
"ties": ties,
|
||||
"mutual_riders_count": len(mutual_riders),
|
||||
"ride_a_avg_rating": (
|
||||
Decimal(str(ride_a_avg))
|
||||
if ride_a.id < ride_b.id
|
||||
else Decimal(str(ride_b_avg))
|
||||
),
|
||||
"ride_b_avg_rating": (
|
||||
Decimal(str(ride_b_avg))
|
||||
if ride_a.id < ride_b.id
|
||||
else Decimal(str(ride_a_avg))
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
return comparison
|
||||
|
||||
def _calculate_rankings_from_comparisons(
|
||||
self, rides: List[Ride], comparisons: Dict[tuple[int, int], RidePairComparison]
|
||||
) -> List[Dict]:
|
||||
"""
|
||||
Calculate final rankings from pairwise comparisons.
|
||||
|
||||
Returns a list of dictionaries containing ranking data for each ride.
|
||||
"""
|
||||
rankings = []
|
||||
|
||||
for ride in rides:
|
||||
wins = 0
|
||||
losses = 0
|
||||
ties = 0
|
||||
comparison_count = 0
|
||||
|
||||
# Count wins, losses, and ties
|
||||
for other_ride in rides:
|
||||
if ride.id == other_ride.id:
|
||||
continue
|
||||
|
||||
comparison_key = (
|
||||
min(ride.id, other_ride.id),
|
||||
max(ride.id, other_ride.id),
|
||||
)
|
||||
comparison = comparisons.get(comparison_key)
|
||||
|
||||
if not comparison:
|
||||
continue
|
||||
|
||||
comparison_count += 1
|
||||
|
||||
# Determine win/loss/tie for this ride
|
||||
if comparison.ride_a_id == ride.id:
|
||||
if comparison.ride_a_wins > comparison.ride_b_wins:
|
||||
wins += 1
|
||||
elif comparison.ride_a_wins < comparison.ride_b_wins:
|
||||
losses += 1
|
||||
else:
|
||||
ties += 1
|
||||
else: # ride_b_id == ride.id
|
||||
if comparison.ride_b_wins > comparison.ride_a_wins:
|
||||
wins += 1
|
||||
elif comparison.ride_b_wins < comparison.ride_a_wins:
|
||||
losses += 1
|
||||
else:
|
||||
ties += 1
|
||||
|
||||
# Calculate winning percentage (ties count as 0.5)
|
||||
total_comparisons = wins + losses + ties
|
||||
if total_comparisons > 0:
|
||||
winning_percentage = Decimal(
|
||||
str((wins + 0.5 * ties) / total_comparisons)
|
||||
)
|
||||
else:
|
||||
winning_percentage = Decimal("0.5")
|
||||
|
||||
# Get average rating and reviewer count
|
||||
ride_stats = RideReview.objects.filter(
|
||||
ride=ride, is_published=True
|
||||
).aggregate(
|
||||
avg_rating=Avg("rating"), reviewer_count=Count("user", distinct=True)
|
||||
)
|
||||
|
||||
rankings.append(
|
||||
{
|
||||
"ride": ride,
|
||||
"wins": wins,
|
||||
"losses": losses,
|
||||
"ties": ties,
|
||||
"winning_percentage": winning_percentage,
|
||||
"comparison_count": comparison_count,
|
||||
"average_rating": ride_stats["avg_rating"],
|
||||
"mutual_riders_count": ride_stats["reviewer_count"] or 0,
|
||||
}
|
||||
)
|
||||
|
||||
# Sort by winning percentage (descending), then by mutual riders count for ties
|
||||
rankings.sort(
|
||||
key=lambda x: (
|
||||
x["winning_percentage"],
|
||||
x["mutual_riders_count"],
|
||||
x["average_rating"] or 0,
|
||||
),
|
||||
reverse=True,
|
||||
)
|
||||
|
||||
# Handle tie-breaking with head-to-head comparisons
|
||||
rankings = self._apply_tiebreakers(rankings, comparisons)
|
||||
|
||||
# Assign final ranks
|
||||
for i, ranking_data in enumerate(rankings, 1):
|
||||
ranking_data["rank"] = i
|
||||
|
||||
return rankings
|
||||
|
||||
def _apply_tiebreakers(
|
||||
self,
|
||||
rankings: List[Dict],
|
||||
comparisons: Dict[tuple[int, int], RidePairComparison],
|
||||
) -> List[Dict]:
|
||||
"""
|
||||
Apply head-to-head tiebreaker for rides with identical winning percentages.
|
||||
|
||||
If two rides have the same winning percentage, the one that beat the other
|
||||
in their head-to-head comparison gets the higher rank.
|
||||
"""
|
||||
i = 0
|
||||
while i < len(rankings) - 1:
|
||||
# Find rides with same winning percentage
|
||||
tied_group = [rankings[i]]
|
||||
j = i + 1
|
||||
|
||||
while (
|
||||
j < len(rankings)
|
||||
and rankings[j]["winning_percentage"]
|
||||
== rankings[i]["winning_percentage"]
|
||||
):
|
||||
tied_group.append(rankings[j])
|
||||
j += 1
|
||||
|
||||
if len(tied_group) > 1:
|
||||
# Apply head-to-head tiebreaker within the group
|
||||
tied_group = self._sort_tied_group(tied_group, comparisons)
|
||||
|
||||
# Replace the tied section with sorted group
|
||||
rankings[i:j] = tied_group
|
||||
|
||||
i = j
|
||||
|
||||
return rankings
|
||||
|
||||
def _sort_tied_group(
|
||||
self,
|
||||
tied_group: List[Dict],
|
||||
comparisons: Dict[tuple[int, int], RidePairComparison],
|
||||
) -> List[Dict]:
|
||||
"""
|
||||
Sort a group of tied rides using head-to-head comparisons.
|
||||
"""
|
||||
# Create mini-rankings within the tied group
|
||||
for ride_data in tied_group:
|
||||
mini_wins = 0
|
||||
mini_losses = 0
|
||||
|
||||
for other_data in tied_group:
|
||||
if ride_data["ride"].id == other_data["ride"].id:
|
||||
continue
|
||||
|
||||
comparison_key = (
|
||||
min(ride_data["ride"].id, other_data["ride"].id),
|
||||
max(ride_data["ride"].id, other_data["ride"].id),
|
||||
)
|
||||
comparison = comparisons.get(comparison_key)
|
||||
|
||||
if comparison:
|
||||
if comparison.ride_a_id == ride_data["ride"].id:
|
||||
if comparison.ride_a_wins > comparison.ride_b_wins:
|
||||
mini_wins += 1
|
||||
elif comparison.ride_a_wins < comparison.ride_b_wins:
|
||||
mini_losses += 1
|
||||
else:
|
||||
if comparison.ride_b_wins > comparison.ride_a_wins:
|
||||
mini_wins += 1
|
||||
elif comparison.ride_b_wins < comparison.ride_a_wins:
|
||||
mini_losses += 1
|
||||
|
||||
ride_data["tiebreaker_score"] = mini_wins - mini_losses
|
||||
|
||||
# Sort by tiebreaker score, then by mutual riders count, then by average rating
|
||||
tied_group.sort(
|
||||
key=lambda x: (
|
||||
x["tiebreaker_score"],
|
||||
x["mutual_riders_count"],
|
||||
x["average_rating"] or 0,
|
||||
),
|
||||
reverse=True,
|
||||
)
|
||||
|
||||
return tied_group
|
||||
|
||||
def _save_rankings(self, rankings: List[Dict]):
|
||||
"""Save calculated rankings to the database."""
|
||||
for ranking_data in rankings:
|
||||
RideRanking.objects.update_or_create(
|
||||
ride=ranking_data["ride"],
|
||||
defaults={
|
||||
"rank": ranking_data["rank"],
|
||||
"wins": ranking_data["wins"],
|
||||
"losses": ranking_data["losses"],
|
||||
"ties": ranking_data["ties"],
|
||||
"winning_percentage": ranking_data["winning_percentage"],
|
||||
"mutual_riders_count": ranking_data["mutual_riders_count"],
|
||||
"comparison_count": ranking_data["comparison_count"],
|
||||
"average_rating": ranking_data["average_rating"],
|
||||
"last_calculated": timezone.now(),
|
||||
"calculation_version": self.calculation_version,
|
||||
},
|
||||
)
|
||||
|
||||
def _save_ranking_snapshots(self, rankings: List[Dict]):
|
||||
"""Save ranking snapshots for historical tracking."""
|
||||
today = date.today()
|
||||
|
||||
for ranking_data in rankings:
|
||||
RankingSnapshot.objects.update_or_create(
|
||||
ride=ranking_data["ride"],
|
||||
snapshot_date=today,
|
||||
defaults={
|
||||
"rank": ranking_data["rank"],
|
||||
"winning_percentage": ranking_data["winning_percentage"],
|
||||
},
|
||||
)
|
||||
|
||||
def _cleanup_old_data(self, days_to_keep: int = 365):
|
||||
"""Clean up old comparison and snapshot data."""
|
||||
cutoff_date = timezone.now() - timezone.timedelta(days=days_to_keep)
|
||||
|
||||
# Delete old snapshots
|
||||
deleted_snapshots = RankingSnapshot.objects.filter(
|
||||
snapshot_date__lt=cutoff_date.date()
|
||||
).delete()
|
||||
|
||||
if deleted_snapshots[0] > 0:
|
||||
self.logger.info(f"Deleted {deleted_snapshots[0]} old ranking snapshots")
|
||||
|
||||
def get_ride_ranking_details(self, ride: Ride) -> Optional[Dict]:
|
||||
"""
|
||||
Get detailed ranking information for a specific ride.
|
||||
|
||||
Returns dictionary with ranking details or None if not ranked.
|
||||
"""
|
||||
try:
|
||||
ranking = RideRanking.objects.get(ride=ride)
|
||||
|
||||
# Get recent head-to-head comparisons
|
||||
comparisons = (
|
||||
RidePairComparison.objects.filter(Q(ride_a=ride) | Q(ride_b=ride))
|
||||
.select_related("ride_a", "ride_b")
|
||||
.order_by("-mutual_riders_count")[:10]
|
||||
)
|
||||
|
||||
# Get ranking history
|
||||
history = RankingSnapshot.objects.filter(ride=ride).order_by(
|
||||
"-snapshot_date"
|
||||
)[:30]
|
||||
|
||||
return {
|
||||
"current_rank": ranking.rank,
|
||||
"winning_percentage": ranking.winning_percentage,
|
||||
"wins": ranking.wins,
|
||||
"losses": ranking.losses,
|
||||
"ties": ranking.ties,
|
||||
"average_rating": ranking.average_rating,
|
||||
"mutual_riders_count": ranking.mutual_riders_count,
|
||||
"last_calculated": ranking.last_calculated,
|
||||
"head_to_head": [
|
||||
{
|
||||
"opponent": (
|
||||
comp.ride_b if comp.ride_a_id == ride.id else comp.ride_a
|
||||
),
|
||||
"result": (
|
||||
"win"
|
||||
if (
|
||||
(
|
||||
comp.ride_a_id == ride.id
|
||||
and comp.ride_a_wins > comp.ride_b_wins
|
||||
)
|
||||
or (
|
||||
comp.ride_b_id == ride.id
|
||||
and comp.ride_b_wins > comp.ride_a_wins
|
||||
)
|
||||
)
|
||||
else (
|
||||
"loss"
|
||||
if (
|
||||
(
|
||||
comp.ride_a_id == ride.id
|
||||
and comp.ride_a_wins < comp.ride_b_wins
|
||||
)
|
||||
or (
|
||||
comp.ride_b_id == ride.id
|
||||
and comp.ride_b_wins < comp.ride_a_wins
|
||||
)
|
||||
)
|
||||
else "tie"
|
||||
)
|
||||
),
|
||||
"mutual_riders": comp.mutual_riders_count,
|
||||
}
|
||||
for comp in comparisons
|
||||
],
|
||||
"ranking_history": [
|
||||
{
|
||||
"date": snapshot.snapshot_date,
|
||||
"rank": snapshot.rank,
|
||||
"winning_percentage": snapshot.winning_percentage,
|
||||
}
|
||||
for snapshot in history
|
||||
],
|
||||
}
|
||||
except RideRanking.DoesNotExist:
|
||||
return None
|
||||
@@ -1,730 +0,0 @@
|
||||
"""
|
||||
Ride Search Service
|
||||
|
||||
Provides comprehensive search and filtering capabilities for rides using PostgreSQL's
|
||||
advanced full-text search features including SearchVector, SearchQuery, SearchRank,
|
||||
and TrigramSimilarity for fuzzy matching.
|
||||
|
||||
This service implements the filtering design specified in:
|
||||
backend/docs/ride_filtering_design.md
|
||||
"""
|
||||
|
||||
from django.contrib.postgres.search import (
|
||||
SearchVector,
|
||||
SearchQuery,
|
||||
SearchRank,
|
||||
TrigramSimilarity,
|
||||
)
|
||||
from django.db import models
|
||||
from django.db.models import Q, F, Value
|
||||
from django.db.models.functions import Greatest
|
||||
from typing import Dict, List, Optional, Any
|
||||
|
||||
from apps.rides.models import Ride
|
||||
from apps.parks.models import Park
|
||||
from apps.rides.models.company import Company
|
||||
|
||||
|
||||
class RideSearchService:
|
||||
"""
|
||||
Advanced search service for rides with PostgreSQL full-text search capabilities.
|
||||
|
||||
Features:
|
||||
- Full-text search with ranking and highlighting
|
||||
- Fuzzy matching with trigram similarity
|
||||
- Comprehensive filtering across 8 categories
|
||||
- Range filtering for numeric fields
|
||||
- Date range filtering
|
||||
- Multi-select filtering
|
||||
- Sorting with multiple options
|
||||
- Search suggestions and autocomplete
|
||||
"""
|
||||
|
||||
# Search configuration
|
||||
SEARCH_LANGUAGES = ["english"]
|
||||
TRIGRAM_SIMILARITY_THRESHOLD = 0.3
|
||||
SEARCH_RANK_WEIGHTS = [0.1, 0.2, 0.4, 1.0] # D, C, B, A weights
|
||||
|
||||
# Filter categories from our design
|
||||
FILTER_CATEGORIES = {
|
||||
"search_text": ["global_search", "name_search", "description_search"],
|
||||
"basic_info": ["category", "status", "park", "park_area"],
|
||||
"dates": ["opening_date_range", "closing_date_range", "status_since_range"],
|
||||
"height_safety": ["min_height_range", "max_height_range"],
|
||||
"performance": ["capacity_range", "duration_range", "rating_range"],
|
||||
"relationships": ["manufacturer", "designer", "ride_model"],
|
||||
"roller_coaster": [
|
||||
"height_ft_range",
|
||||
"length_ft_range",
|
||||
"speed_mph_range",
|
||||
"inversions_range",
|
||||
"track_material",
|
||||
"coaster_type",
|
||||
"propulsion_system",
|
||||
],
|
||||
"company": ["manufacturer_roles", "designer_roles", "founded_date_range"],
|
||||
}
|
||||
|
||||
# Sorting options
|
||||
SORT_OPTIONS = {
|
||||
"relevance": "search_rank",
|
||||
"name_asc": "name",
|
||||
"name_desc": "-name",
|
||||
"opening_date_asc": "opening_date",
|
||||
"opening_date_desc": "-opening_date",
|
||||
"rating_asc": "average_rating",
|
||||
"rating_desc": "-average_rating",
|
||||
"height_asc": "rollercoasterstats__height_ft",
|
||||
"height_desc": "-rollercoasterstats__height_ft",
|
||||
"speed_asc": "rollercoasterstats__speed_mph",
|
||||
"speed_desc": "-rollercoasterstats__speed_mph",
|
||||
"capacity_asc": "capacity_per_hour",
|
||||
"capacity_desc": "-capacity_per_hour",
|
||||
"created_asc": "created_at",
|
||||
"created_desc": "-created_at",
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize the search service."""
|
||||
self.base_queryset = self._get_base_queryset()
|
||||
|
||||
def _get_base_queryset(self):
|
||||
"""
|
||||
Get the base queryset with all necessary relationships pre-loaded
|
||||
for optimal performance.
|
||||
"""
|
||||
return Ride.objects.select_related(
|
||||
"park",
|
||||
"park_area",
|
||||
"manufacturer",
|
||||
"designer",
|
||||
"ride_model",
|
||||
"rollercoasterstats",
|
||||
).prefetch_related("manufacturer__roles", "designer__roles")
|
||||
|
||||
def search_and_filter(
|
||||
self,
|
||||
filters: Dict[str, Any],
|
||||
sort_by: str = "relevance",
|
||||
page: int = 1,
|
||||
page_size: int = 20,
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Main search and filter method that combines all capabilities.
|
||||
|
||||
Args:
|
||||
filters: Dictionary of filter parameters
|
||||
sort_by: Sorting option key
|
||||
page: Page number for pagination
|
||||
page_size: Number of results per page
|
||||
|
||||
Returns:
|
||||
Dictionary containing results, pagination info, and metadata
|
||||
"""
|
||||
queryset = self.base_queryset
|
||||
search_metadata = {}
|
||||
|
||||
# Apply text search with ranking
|
||||
if filters.get("global_search"):
|
||||
queryset, search_rank = self._apply_full_text_search(
|
||||
queryset, filters["global_search"]
|
||||
)
|
||||
search_metadata["search_applied"] = True
|
||||
search_metadata["search_term"] = filters["global_search"]
|
||||
else:
|
||||
search_rank = Value(0)
|
||||
|
||||
# Apply all filter categories
|
||||
queryset = self._apply_basic_info_filters(queryset, filters)
|
||||
queryset = self._apply_date_filters(queryset, filters)
|
||||
queryset = self._apply_height_safety_filters(queryset, filters)
|
||||
queryset = self._apply_performance_filters(queryset, filters)
|
||||
queryset = self._apply_relationship_filters(queryset, filters)
|
||||
queryset = self._apply_roller_coaster_filters(queryset, filters)
|
||||
queryset = self._apply_company_filters(queryset, filters)
|
||||
|
||||
# Add search rank to queryset for sorting
|
||||
queryset = queryset.annotate(search_rank=search_rank)
|
||||
|
||||
# Apply sorting
|
||||
queryset = self._apply_sorting(queryset, sort_by)
|
||||
|
||||
# Get total count before pagination
|
||||
total_count = queryset.count()
|
||||
|
||||
# Apply pagination
|
||||
start_idx = (page - 1) * page_size
|
||||
end_idx = start_idx + page_size
|
||||
results = list(queryset[start_idx:end_idx])
|
||||
|
||||
# Generate search highlights if search was applied
|
||||
if filters.get("global_search"):
|
||||
results = self._add_search_highlights(results, filters["global_search"])
|
||||
|
||||
return {
|
||||
"results": results,
|
||||
"pagination": {
|
||||
"page": page,
|
||||
"page_size": page_size,
|
||||
"total_count": total_count,
|
||||
"total_pages": (total_count + page_size - 1) // page_size,
|
||||
"has_next": end_idx < total_count,
|
||||
"has_previous": page > 1,
|
||||
},
|
||||
"metadata": search_metadata,
|
||||
"applied_filters": self._get_applied_filters_summary(filters),
|
||||
}
|
||||
|
||||
def _apply_full_text_search(
|
||||
self, queryset, search_term: str
|
||||
) -> tuple[models.QuerySet, models.Expression]:
|
||||
"""
|
||||
Apply PostgreSQL full-text search with ranking and fuzzy matching.
|
||||
"""
|
||||
if not search_term or not search_term.strip():
|
||||
return queryset, Value(0)
|
||||
|
||||
search_term = search_term.strip()
|
||||
|
||||
# Create search vector combining multiple fields with different weights
|
||||
search_vector = (
|
||||
SearchVector("name", weight="A")
|
||||
+ SearchVector("description", weight="B")
|
||||
+ SearchVector("park__name", weight="C")
|
||||
+ SearchVector("manufacturer__name", weight="C")
|
||||
+ SearchVector("designer__name", weight="C")
|
||||
+ SearchVector("ride_model__name", weight="D")
|
||||
)
|
||||
|
||||
# Create search query - try different query types for best results
|
||||
search_query = SearchQuery(search_term, config="english")
|
||||
|
||||
# Calculate search rank
|
||||
search_rank = SearchRank(
|
||||
search_vector, search_query, weights=self.SEARCH_RANK_WEIGHTS
|
||||
)
|
||||
|
||||
# Apply trigram similarity for fuzzy matching on name
|
||||
trigram_similarity = TrigramSimilarity("name", search_term)
|
||||
|
||||
# Combine full-text search with trigram similarity
|
||||
queryset = queryset.annotate(trigram_similarity=trigram_similarity).filter(
|
||||
Q(search_vector=search_query)
|
||||
| Q(trigram_similarity__gte=self.TRIGRAM_SIMILARITY_THRESHOLD)
|
||||
)
|
||||
|
||||
# Use the greatest of search rank and trigram similarity for final ranking
|
||||
final_rank = Greatest(search_rank, F("trigram_similarity"))
|
||||
|
||||
return queryset, final_rank
|
||||
|
||||
def _apply_basic_info_filters(
|
||||
self, queryset, filters: Dict[str, Any]
|
||||
) -> models.QuerySet:
|
||||
"""Apply basic information filters."""
|
||||
|
||||
# Category filter (multi-select)
|
||||
if filters.get("category"):
|
||||
categories = (
|
||||
filters["category"]
|
||||
if isinstance(filters["category"], list)
|
||||
else [filters["category"]]
|
||||
)
|
||||
queryset = queryset.filter(category__in=categories)
|
||||
|
||||
# Status filter (multi-select)
|
||||
if filters.get("status"):
|
||||
statuses = (
|
||||
filters["status"]
|
||||
if isinstance(filters["status"], list)
|
||||
else [filters["status"]]
|
||||
)
|
||||
queryset = queryset.filter(status__in=statuses)
|
||||
|
||||
# Park filter (multi-select)
|
||||
if filters.get("park"):
|
||||
parks = (
|
||||
filters["park"]
|
||||
if isinstance(filters["park"], list)
|
||||
else [filters["park"]]
|
||||
)
|
||||
if isinstance(parks[0], str): # If slugs provided
|
||||
queryset = queryset.filter(park__slug__in=parks)
|
||||
else: # If IDs provided
|
||||
queryset = queryset.filter(park_id__in=parks)
|
||||
|
||||
# Park area filter (multi-select)
|
||||
if filters.get("park_area"):
|
||||
areas = (
|
||||
filters["park_area"]
|
||||
if isinstance(filters["park_area"], list)
|
||||
else [filters["park_area"]]
|
||||
)
|
||||
if isinstance(areas[0], str): # If slugs provided
|
||||
queryset = queryset.filter(park_area__slug__in=areas)
|
||||
else: # If IDs provided
|
||||
queryset = queryset.filter(park_area_id__in=areas)
|
||||
|
||||
return queryset
|
||||
|
||||
def _apply_date_filters(self, queryset, filters: Dict[str, Any]) -> models.QuerySet:
|
||||
"""Apply date range filters."""
|
||||
|
||||
# Opening date range
|
||||
if filters.get("opening_date_range"):
|
||||
date_range = filters["opening_date_range"]
|
||||
if date_range.get("start"):
|
||||
queryset = queryset.filter(opening_date__gte=date_range["start"])
|
||||
if date_range.get("end"):
|
||||
queryset = queryset.filter(opening_date__lte=date_range["end"])
|
||||
|
||||
# Closing date range
|
||||
if filters.get("closing_date_range"):
|
||||
date_range = filters["closing_date_range"]
|
||||
if date_range.get("start"):
|
||||
queryset = queryset.filter(closing_date__gte=date_range["start"])
|
||||
if date_range.get("end"):
|
||||
queryset = queryset.filter(closing_date__lte=date_range["end"])
|
||||
|
||||
# Status since range
|
||||
if filters.get("status_since_range"):
|
||||
date_range = filters["status_since_range"]
|
||||
if date_range.get("start"):
|
||||
queryset = queryset.filter(status_since__gte=date_range["start"])
|
||||
if date_range.get("end"):
|
||||
queryset = queryset.filter(status_since__lte=date_range["end"])
|
||||
|
||||
return queryset
|
||||
|
||||
def _apply_height_safety_filters(
|
||||
self, queryset, filters: Dict[str, Any]
|
||||
) -> models.QuerySet:
|
||||
"""Apply height and safety requirement filters."""
|
||||
|
||||
# Minimum height range
|
||||
if filters.get("min_height_range"):
|
||||
height_range = filters["min_height_range"]
|
||||
if height_range.get("min") is not None:
|
||||
queryset = queryset.filter(min_height_in__gte=height_range["min"])
|
||||
if height_range.get("max") is not None:
|
||||
queryset = queryset.filter(min_height_in__lte=height_range["max"])
|
||||
|
||||
# Maximum height range
|
||||
if filters.get("max_height_range"):
|
||||
height_range = filters["max_height_range"]
|
||||
if height_range.get("min") is not None:
|
||||
queryset = queryset.filter(max_height_in__gte=height_range["min"])
|
||||
if height_range.get("max") is not None:
|
||||
queryset = queryset.filter(max_height_in__lte=height_range["max"])
|
||||
|
||||
return queryset
|
||||
|
||||
def _apply_performance_filters(
|
||||
self, queryset, filters: Dict[str, Any]
|
||||
) -> models.QuerySet:
|
||||
"""Apply performance metric filters."""
|
||||
|
||||
# Capacity range
|
||||
if filters.get("capacity_range"):
|
||||
capacity_range = filters["capacity_range"]
|
||||
if capacity_range.get("min") is not None:
|
||||
queryset = queryset.filter(capacity_per_hour__gte=capacity_range["min"])
|
||||
if capacity_range.get("max") is not None:
|
||||
queryset = queryset.filter(capacity_per_hour__lte=capacity_range["max"])
|
||||
|
||||
# Duration range
|
||||
if filters.get("duration_range"):
|
||||
duration_range = filters["duration_range"]
|
||||
if duration_range.get("min") is not None:
|
||||
queryset = queryset.filter(
|
||||
ride_duration_seconds__gte=duration_range["min"]
|
||||
)
|
||||
if duration_range.get("max") is not None:
|
||||
queryset = queryset.filter(
|
||||
ride_duration_seconds__lte=duration_range["max"]
|
||||
)
|
||||
|
||||
# Rating range
|
||||
if filters.get("rating_range"):
|
||||
rating_range = filters["rating_range"]
|
||||
if rating_range.get("min") is not None:
|
||||
queryset = queryset.filter(average_rating__gte=rating_range["min"])
|
||||
if rating_range.get("max") is not None:
|
||||
queryset = queryset.filter(average_rating__lte=rating_range["max"])
|
||||
|
||||
return queryset
|
||||
|
||||
def _apply_relationship_filters(
|
||||
self, queryset, filters: Dict[str, Any]
|
||||
) -> models.QuerySet:
|
||||
"""Apply relationship filters (manufacturer, designer, ride model)."""
|
||||
|
||||
# Manufacturer filter (multi-select)
|
||||
if filters.get("manufacturer"):
|
||||
manufacturers = (
|
||||
filters["manufacturer"]
|
||||
if isinstance(filters["manufacturer"], list)
|
||||
else [filters["manufacturer"]]
|
||||
)
|
||||
if isinstance(manufacturers[0], str): # If slugs provided
|
||||
queryset = queryset.filter(manufacturer__slug__in=manufacturers)
|
||||
else: # If IDs provided
|
||||
queryset = queryset.filter(manufacturer_id__in=manufacturers)
|
||||
|
||||
# Designer filter (multi-select)
|
||||
if filters.get("designer"):
|
||||
designers = (
|
||||
filters["designer"]
|
||||
if isinstance(filters["designer"], list)
|
||||
else [filters["designer"]]
|
||||
)
|
||||
if isinstance(designers[0], str): # If slugs provided
|
||||
queryset = queryset.filter(designer__slug__in=designers)
|
||||
else: # If IDs provided
|
||||
queryset = queryset.filter(designer_id__in=designers)
|
||||
|
||||
# Ride model filter (multi-select)
|
||||
if filters.get("ride_model"):
|
||||
models_list = (
|
||||
filters["ride_model"]
|
||||
if isinstance(filters["ride_model"], list)
|
||||
else [filters["ride_model"]]
|
||||
)
|
||||
if isinstance(models_list[0], str): # If slugs provided
|
||||
queryset = queryset.filter(ride_model__slug__in=models_list)
|
||||
else: # If IDs provided
|
||||
queryset = queryset.filter(ride_model_id__in=models_list)
|
||||
|
||||
return queryset
|
||||
|
||||
def _apply_roller_coaster_filters(
|
||||
self, queryset, filters: Dict[str, Any]
|
||||
) -> models.QuerySet:
|
||||
"""Apply roller coaster specific filters."""
|
||||
queryset = self._apply_numeric_range_filter(
|
||||
queryset, filters, "height_ft_range", "rollercoasterstats__height_ft"
|
||||
)
|
||||
queryset = self._apply_numeric_range_filter(
|
||||
queryset, filters, "length_ft_range", "rollercoasterstats__length_ft"
|
||||
)
|
||||
queryset = self._apply_numeric_range_filter(
|
||||
queryset, filters, "speed_mph_range", "rollercoasterstats__speed_mph"
|
||||
)
|
||||
queryset = self._apply_numeric_range_filter(
|
||||
queryset, filters, "inversions_range", "rollercoasterstats__inversions"
|
||||
)
|
||||
|
||||
# Track material filter (multi-select)
|
||||
if filters.get("track_material"):
|
||||
materials = (
|
||||
filters["track_material"]
|
||||
if isinstance(filters["track_material"], list)
|
||||
else [filters["track_material"]]
|
||||
)
|
||||
queryset = queryset.filter(rollercoasterstats__track_material__in=materials)
|
||||
|
||||
# Coaster type filter (multi-select)
|
||||
if filters.get("coaster_type"):
|
||||
types = (
|
||||
filters["coaster_type"]
|
||||
if isinstance(filters["coaster_type"], list)
|
||||
else [filters["coaster_type"]]
|
||||
)
|
||||
queryset = queryset.filter(
|
||||
rollercoasterstats__roller_coaster_type__in=types
|
||||
)
|
||||
|
||||
# Propulsion system filter (multi-select)
|
||||
if filters.get("propulsion_system"):
|
||||
propulsion_systems = (
|
||||
filters["propulsion_system"]
|
||||
if isinstance(filters["propulsion_system"], list)
|
||||
else [filters["propulsion_system"]]
|
||||
)
|
||||
queryset = queryset.filter(rollercoasterstats__propulsion_system__in=propulsion_systems)
|
||||
|
||||
return queryset
|
||||
|
||||
def _apply_numeric_range_filter(
|
||||
self,
|
||||
queryset,
|
||||
filters: Dict[str, Any],
|
||||
filter_key: str,
|
||||
field_name: str,
|
||||
) -> models.QuerySet:
|
||||
"""Apply numeric range filter to reduce complexity."""
|
||||
if filters.get(filter_key):
|
||||
range_filter = filters[filter_key]
|
||||
if range_filter.get("min") is not None:
|
||||
queryset = queryset.filter(
|
||||
**{f"{field_name}__gte": range_filter["min"]}
|
||||
)
|
||||
if range_filter.get("max") is not None:
|
||||
queryset = queryset.filter(
|
||||
**{f"{field_name}__lte": range_filter["max"]}
|
||||
)
|
||||
return queryset
|
||||
|
||||
def _apply_company_filters(
|
||||
self, queryset, filters: Dict[str, Any]
|
||||
) -> models.QuerySet:
|
||||
"""Apply company-related filters."""
|
||||
|
||||
# Manufacturer roles filter
|
||||
if filters.get("manufacturer_roles"):
|
||||
roles = (
|
||||
filters["manufacturer_roles"]
|
||||
if isinstance(filters["manufacturer_roles"], list)
|
||||
else [filters["manufacturer_roles"]]
|
||||
)
|
||||
queryset = queryset.filter(manufacturer__roles__overlap=roles)
|
||||
|
||||
# Designer roles filter
|
||||
if filters.get("designer_roles"):
|
||||
roles = (
|
||||
filters["designer_roles"]
|
||||
if isinstance(filters["designer_roles"], list)
|
||||
else [filters["designer_roles"]]
|
||||
)
|
||||
queryset = queryset.filter(designer__roles__overlap=roles)
|
||||
|
||||
# Founded date range
|
||||
if filters.get("founded_date_range"):
|
||||
date_range = filters["founded_date_range"]
|
||||
if date_range.get("start"):
|
||||
queryset = queryset.filter(
|
||||
Q(manufacturer__founded_date__gte=date_range["start"])
|
||||
| Q(designer__founded_date__gte=date_range["start"])
|
||||
)
|
||||
if date_range.get("end"):
|
||||
queryset = queryset.filter(
|
||||
Q(manufacturer__founded_date__lte=date_range["end"])
|
||||
| Q(designer__founded_date__lte=date_range["end"])
|
||||
)
|
||||
|
||||
return queryset
|
||||
|
||||
def _apply_sorting(self, queryset, sort_by: str) -> models.QuerySet:
|
||||
"""Apply sorting to the queryset."""
|
||||
|
||||
if sort_by not in self.SORT_OPTIONS:
|
||||
sort_by = "relevance"
|
||||
|
||||
sort_field = self.SORT_OPTIONS[sort_by]
|
||||
|
||||
# Handle special case for relevance sorting
|
||||
if sort_by == "relevance":
|
||||
return queryset.order_by("-search_rank", "name")
|
||||
|
||||
# Apply the sorting
|
||||
return queryset.order_by(
|
||||
sort_field, "name"
|
||||
) # Always add name as secondary sort
|
||||
|
||||
def _add_search_highlights(
|
||||
self, results: List[Ride], search_term: str
|
||||
) -> List[Ride]:
|
||||
"""Add search highlights to results using SearchHeadline."""
|
||||
|
||||
if not search_term or not results:
|
||||
return results
|
||||
|
||||
# Create search query for highlighting
|
||||
SearchQuery(search_term, config="english")
|
||||
|
||||
# Add highlights to each result
|
||||
# (note: highlights would need to be processed at query time)
|
||||
for ride in results:
|
||||
# Store highlighted versions as dynamic attributes (for template use)
|
||||
setattr(ride, "highlighted_name", ride.name)
|
||||
setattr(ride, "highlighted_description", ride.description)
|
||||
|
||||
return results
|
||||
|
||||
def _get_applied_filters_summary(self, filters: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Generate a summary of applied filters for the frontend."""
|
||||
|
||||
applied = {}
|
||||
|
||||
# Count filters in each category
|
||||
for category, filter_keys in self.FILTER_CATEGORIES.items():
|
||||
category_filters = []
|
||||
for key in filter_keys:
|
||||
if filters.get(key):
|
||||
category_filters.append(
|
||||
{
|
||||
"key": key,
|
||||
"value": filters[key],
|
||||
"display_name": self._get_filter_display_name(key),
|
||||
}
|
||||
)
|
||||
if category_filters:
|
||||
applied[category] = category_filters
|
||||
|
||||
return applied
|
||||
|
||||
def _get_filter_display_name(self, filter_key: str) -> str:
|
||||
"""Convert filter key to human-readable display name."""
|
||||
|
||||
display_names = {
|
||||
"global_search": "Search",
|
||||
"category": "Category",
|
||||
"status": "Status",
|
||||
"park": "Park",
|
||||
"park_area": "Park Area",
|
||||
"opening_date_range": "Opening Date",
|
||||
"closing_date_range": "Closing Date",
|
||||
"status_since_range": "Status Since",
|
||||
"min_height_range": "Minimum Height",
|
||||
"max_height_range": "Maximum Height",
|
||||
"capacity_range": "Capacity",
|
||||
"duration_range": "Duration",
|
||||
"rating_range": "Rating",
|
||||
"manufacturer": "Manufacturer",
|
||||
"designer": "Designer",
|
||||
"ride_model": "Ride Model",
|
||||
"height_ft_range": "Height (ft)",
|
||||
"length_ft_range": "Length (ft)",
|
||||
"speed_mph_range": "Speed (mph)",
|
||||
"inversions_range": "Inversions",
|
||||
"track_material": "Track Material",
|
||||
"coaster_type": "Coaster Type",
|
||||
"propulsion_system": "Propulsion System",
|
||||
"manufacturer_roles": "Manufacturer Roles",
|
||||
"designer_roles": "Designer Roles",
|
||||
"founded_date_range": "Founded Date",
|
||||
}
|
||||
|
||||
if filter_key in display_names:
|
||||
return display_names[filter_key]
|
||||
else:
|
||||
raise ValueError(f"Unknown filter key: {filter_key}")
|
||||
|
||||
def get_search_suggestions(
|
||||
self, query: str, limit: int = 10
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get search suggestions for autocomplete functionality.
|
||||
"""
|
||||
|
||||
if not query or len(query) < 2:
|
||||
return []
|
||||
|
||||
suggestions = []
|
||||
|
||||
# Ride names with trigram similarity
|
||||
ride_suggestions = (
|
||||
Ride.objects.annotate(similarity=TrigramSimilarity("name", query))
|
||||
.filter(similarity__gte=0.1)
|
||||
.order_by("-similarity")
|
||||
.values("name", "slug", "similarity")[: limit // 2]
|
||||
)
|
||||
|
||||
for ride in ride_suggestions:
|
||||
suggestions.append(
|
||||
{
|
||||
"type": "ride",
|
||||
"text": ride["name"],
|
||||
"slug": ride["slug"],
|
||||
"score": ride["similarity"],
|
||||
}
|
||||
)
|
||||
|
||||
# Park names
|
||||
park_suggestions = (
|
||||
Park.objects.annotate(similarity=TrigramSimilarity("name", query))
|
||||
.filter(similarity__gte=0.1)
|
||||
.order_by("-similarity")
|
||||
.values("name", "slug", "similarity")[: limit // 4]
|
||||
)
|
||||
|
||||
for park in park_suggestions:
|
||||
suggestions.append(
|
||||
{
|
||||
"type": "park",
|
||||
"text": park["name"],
|
||||
"slug": park["slug"],
|
||||
"score": park["similarity"],
|
||||
}
|
||||
)
|
||||
|
||||
# Manufacturer names
|
||||
manufacturer_suggestions = (
|
||||
Company.objects.filter(roles__contains=["MANUFACTURER"])
|
||||
.annotate(similarity=TrigramSimilarity("name", query))
|
||||
.filter(similarity__gte=0.1)
|
||||
.order_by("-similarity")
|
||||
.values("name", "slug", "similarity")[: limit // 4]
|
||||
)
|
||||
|
||||
for manufacturer in manufacturer_suggestions:
|
||||
suggestions.append(
|
||||
{
|
||||
"type": "manufacturer",
|
||||
"text": manufacturer["name"],
|
||||
"slug": manufacturer["slug"],
|
||||
"score": manufacturer["similarity"],
|
||||
}
|
||||
)
|
||||
|
||||
# Sort by score and return top results
|
||||
suggestions.sort(key=lambda x: x["score"], reverse=True)
|
||||
return suggestions[:limit]
|
||||
|
||||
def get_filter_options(
|
||||
self, filter_type: str, context_filters: Optional[Dict[str, Any]] = None
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get available options for a specific filter type.
|
||||
Optionally filter options based on current context.
|
||||
"""
|
||||
|
||||
context_filters = context_filters or {}
|
||||
base_queryset = self.base_queryset
|
||||
|
||||
# Apply context filters to narrow down options
|
||||
if context_filters:
|
||||
temp_filters = context_filters.copy()
|
||||
temp_filters.pop(
|
||||
filter_type, None
|
||||
) # Remove the filter we're getting options for
|
||||
base_queryset = self._apply_all_filters(base_queryset, temp_filters)
|
||||
|
||||
if filter_type == "park":
|
||||
return list(
|
||||
base_queryset.values("park__name", "park__slug")
|
||||
.distinct()
|
||||
.order_by("park__name")
|
||||
)
|
||||
|
||||
elif filter_type == "manufacturer":
|
||||
return list(
|
||||
base_queryset.filter(manufacturer__isnull=False)
|
||||
.values("manufacturer__name", "manufacturer__slug")
|
||||
.distinct()
|
||||
.order_by("manufacturer__name")
|
||||
)
|
||||
|
||||
elif filter_type == "designer":
|
||||
return list(
|
||||
base_queryset.filter(designer__isnull=False)
|
||||
.values("designer__name", "designer__slug")
|
||||
.distinct()
|
||||
.order_by("designer__name")
|
||||
)
|
||||
|
||||
# Add more filter options as needed
|
||||
return []
|
||||
|
||||
def _apply_all_filters(self, queryset, filters: Dict[str, Any]) -> models.QuerySet:
|
||||
"""Apply all filters except search ranking."""
|
||||
|
||||
queryset = self._apply_basic_info_filters(queryset, filters)
|
||||
queryset = self._apply_date_filters(queryset, filters)
|
||||
queryset = self._apply_height_safety_filters(queryset, filters)
|
||||
queryset = self._apply_performance_filters(queryset, filters)
|
||||
queryset = self._apply_relationship_filters(queryset, filters)
|
||||
queryset = self._apply_roller_coaster_filters(queryset, filters)
|
||||
queryset = self._apply_company_filters(queryset, filters)
|
||||
|
||||
return queryset
|
||||
Reference in New Issue
Block a user